mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-14 00:16:57 +00:00
Compare commits
361 Commits
v0.23.0
...
v0.26.1-cl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8058970523 | ||
|
|
3273fe0470 | ||
|
|
7468a5e96c | ||
|
|
a87faa0db7 | ||
|
|
b669a73432 | ||
|
|
833f7fbdbe | ||
|
|
62ce8e0bda | ||
|
|
ddd25bfe01 | ||
|
|
19da45c53b | ||
|
|
0026410c61 | ||
|
|
b57c59baa4 | ||
|
|
af8a5f2c21 | ||
|
|
d6400aef27 | ||
|
|
81fe65afed | ||
|
|
c2b58720d1 | ||
|
|
5515aa5045 | ||
|
|
15150db957 | ||
|
|
3b2e467ca6 | ||
|
|
4fbb83a34d | ||
|
|
ff6a7b6007 | ||
|
|
6312e7f1f3 | ||
|
|
bfb375ac87 | ||
|
|
21d277a0ef | ||
|
|
c3e3c900f2 | ||
|
|
05c8d81e65 | ||
|
|
cd6276eef9 | ||
|
|
7bcaa2fd13 | ||
|
|
67ecd7c147 | ||
|
|
5890600101 | ||
|
|
e2a9414c7a | ||
|
|
216965e9d9 | ||
|
|
d0ddbcc2b2 | ||
|
|
41db2601a6 | ||
|
|
42cb94e1f4 | ||
|
|
6e7a0cc65d | ||
|
|
23eba82038 | ||
|
|
001b9acb63 | ||
|
|
af65ccfd6a | ||
|
|
0c7251475d | ||
|
|
1a87b2f37d | ||
|
|
752a0e13ad | ||
|
|
ccaca33446 | ||
|
|
2a90e805a2 | ||
|
|
c4a2d70d19 | ||
|
|
f7e4a0177d | ||
|
|
cca65499de | ||
|
|
80fa7dbbfa | ||
|
|
c24b1e5250 | ||
|
|
78cf8f1f9f | ||
|
|
1da7277817 | ||
|
|
c71c95feb0 | ||
|
|
3bee31e6c7 | ||
|
|
9448ca58aa | ||
|
|
c9a236b0af | ||
|
|
622c15e825 | ||
|
|
054598734a | ||
|
|
7ca647f0d0 | ||
|
|
aa50fcb1f0 | ||
|
|
b408de0761 | ||
|
|
72d9c5ee5c | ||
|
|
2b7440d4b5 | ||
|
|
3bc6a18bcd | ||
|
|
db56d6cb11 | ||
|
|
a5759139bf | ||
|
|
8a959da120 | ||
|
|
0a78750465 | ||
|
|
372f4fc924 | ||
|
|
ae5b401e74 | ||
|
|
c562655be7 | ||
|
|
c8bb54cd94 | ||
|
|
8c80326dd5 | ||
|
|
bad4bed439 | ||
|
|
7828da15c3 | ||
|
|
7e2f6063ae | ||
|
|
2b766a2f26 | ||
|
|
8ae504bfb0 | ||
|
|
5981e6c57c | ||
|
|
1be3a1e945 | ||
|
|
629b897845 | ||
|
|
9f5fee404b | ||
|
|
40bf98711c | ||
|
|
f9f075bca2 | ||
|
|
0c1a3d59eb | ||
|
|
523fb5cd56 | ||
|
|
436f61a7f4 | ||
|
|
3fab5869fa | ||
|
|
0515c6e844 | ||
|
|
38176181ac | ||
|
|
a7e634bd4f | ||
|
|
78a381a30b | ||
|
|
343bce6a29 | ||
|
|
d263f762bf | ||
|
|
dfaeb19566 | ||
|
|
010dcc3e80 | ||
|
|
d0aa5f747c | ||
|
|
f6d53e03f1 | ||
|
|
3ecebd15ee | ||
|
|
db83e39a7f | ||
|
|
5d48f72ade | ||
|
|
1818026a84 | ||
|
|
0ad7d38eec | ||
|
|
b17ad5c2be | ||
|
|
1824b3c07b | ||
|
|
c9c7da3626 | ||
|
|
030a90523d | ||
|
|
56d223a51d | ||
|
|
f558ff826a | ||
|
|
5fb4ed60e7 | ||
|
|
0d2a358cc2 | ||
|
|
595250c93e | ||
|
|
c636988935 | ||
|
|
eea483c470 | ||
|
|
d53c61a6d0 | ||
|
|
c0d4f71a34 | ||
|
|
f56989e46e | ||
|
|
c0251eb680 | ||
|
|
450b81ca13 | ||
|
|
2f3faadcbf | ||
|
|
5986a2d126 | ||
|
|
d75e84f625 | ||
|
|
c221277fd2 | ||
|
|
3b30fadb55 | ||
|
|
d7df4d6b84 | ||
|
|
fd854035c1 | ||
|
|
4d1c138842 | ||
|
|
7649239b08 | ||
|
|
0e2f6ba1b6 | ||
|
|
f529c46598 | ||
|
|
1ba49d2ddb | ||
|
|
1b5ca88231 | ||
|
|
37329e0784 | ||
|
|
eaff393c76 | ||
|
|
a845cd8880 | ||
|
|
b28a465304 | ||
|
|
845d3114ea | ||
|
|
287fa7ca74 | ||
|
|
80ed9654e1 | ||
|
|
7ddab7ef31 | ||
|
|
d534a7f7c8 | ||
|
|
5af51c852c | ||
|
|
ee7970f603 | ||
|
|
5453877ca7 | ||
|
|
ea0a5271f7 | ||
|
|
879cc4ec26 | ||
|
|
6ac2475aba | ||
|
|
47d5f659e0 | ||
|
|
8c9e51e94f | ||
|
|
0da5aca9f6 | ||
|
|
9906db9e64 | ||
|
|
8096b568f0 | ||
|
|
2934a77832 | ||
|
|
cf6cb938a6 | ||
|
|
8ff6b1b540 | ||
|
|
a938a9ab0f | ||
|
|
ae73386723 | ||
|
|
34c8a859eb | ||
|
|
80d039042b | ||
|
|
5606e22d97 | ||
|
|
23e35fa526 | ||
|
|
82033f935e | ||
|
|
ae2b0e7aa7 | ||
|
|
948615537b | ||
|
|
a0e129304c | ||
|
|
8d72d538de | ||
|
|
ffefd0caf2 | ||
|
|
fa196986c2 | ||
|
|
a30e02c18c | ||
|
|
c9f3726447 | ||
|
|
8363200fd7 | ||
|
|
37548eb720 | ||
|
|
dadce6032d | ||
|
|
0a1d2ce231 | ||
|
|
9d01c5d882 | ||
|
|
53fc2edab3 | ||
|
|
b7c5b78a61 | ||
|
|
f081dc2001 | ||
|
|
2cf7daa227 | ||
|
|
9d75fbc619 | ||
|
|
3b1b9a277b | ||
|
|
40e87b9544 | ||
|
|
ded7922be5 | ||
|
|
11ef64ee43 | ||
|
|
5e6d7b7649 | ||
|
|
5fd9616b5f | ||
|
|
a1227648ba | ||
|
|
919f4173cf | ||
|
|
7c5aad4073 | ||
|
|
d47ccd9199 | ||
|
|
cc5e884b34 | ||
|
|
ac5535055f | ||
|
|
15cb4dafa9 | ||
|
|
8ca76d9fdf | ||
|
|
f62e52ec68 | ||
|
|
bf01c674ea | ||
|
|
e9b6a05b75 | ||
|
|
6bbc1b4316 | ||
|
|
3c696da274 | ||
|
|
d9d6dee550 | ||
|
|
cc6306c0e1 | ||
|
|
b59145385e | ||
|
|
3f4e0ec971 | ||
|
|
ec0716ddd1 | ||
|
|
6d6725b3b8 | ||
|
|
6660be2cb7 | ||
|
|
847fcb570b | ||
|
|
4095ec462e | ||
|
|
f7f2421e71 | ||
|
|
b664a46e91 | ||
|
|
06e6eaa7b4 | ||
|
|
30a094cbb2 | ||
|
|
904bae98f8 | ||
|
|
c32f13a909 | ||
|
|
519093ea65 | ||
|
|
bd49d1c4b5 | ||
|
|
2665c0099d | ||
|
|
d65f055030 | ||
|
|
66d87761b7 | ||
|
|
ba69ad672a | ||
|
|
7934e3956b | ||
|
|
68fe93b7db | ||
|
|
efd0ea9e1e | ||
|
|
6ef73eb226 | ||
|
|
fc2f23d36c | ||
|
|
7c39fab453 | ||
|
|
c5164c01c0 | ||
|
|
351ad32d77 | ||
|
|
3ad8311bdd | ||
|
|
ea5ae2bae5 | ||
|
|
72e3adc55e | ||
|
|
b250392e8d | ||
|
|
d8b0d68840 | ||
|
|
c4737749ab | ||
|
|
a1ab02f9fb | ||
|
|
bba64b32ca | ||
|
|
9abd2aa9d7 | ||
|
|
de35a9a605 | ||
|
|
ed750e8792 | ||
|
|
37ca50832c | ||
|
|
31c7a0105b | ||
|
|
ddab9eafa1 | ||
|
|
76a4f86e0c | ||
|
|
6b34318274 | ||
|
|
5508c6c154 | ||
|
|
9a62ac0c94 | ||
|
|
01737ef847 | ||
|
|
3144b572c4 | ||
|
|
10de92987a | ||
|
|
c752c14c46 | ||
|
|
87a8bf5e96 | ||
|
|
ba14ea1243 | ||
|
|
9be90011c6 | ||
|
|
f9b14ca149 | ||
|
|
6591acfdfa | ||
|
|
e64ba122e1 | ||
|
|
a9523146a3 | ||
|
|
392ee86714 | ||
|
|
1d73f484f0 | ||
|
|
cfcd3ae048 | ||
|
|
5395041dcb | ||
|
|
40eabd50d1 | ||
|
|
35ffd0ec3a | ||
|
|
d3d76bf97a | ||
|
|
595ae42e94 | ||
|
|
0667d940f9 | ||
|
|
75d1272325 | ||
|
|
8e2d6cf87d | ||
|
|
9e1bba40f7 | ||
|
|
f7bb499c28 | ||
|
|
b33b1ef3dd | ||
|
|
30aeda7a1a | ||
|
|
22d9d660cc | ||
|
|
7524bfc07f | ||
|
|
bda7472880 | ||
|
|
1ed05c6c07 | ||
|
|
0b3e0a59cb | ||
|
|
0616f68eb0 | ||
|
|
6b8e5a4c92 | ||
|
|
d72c887422 | ||
|
|
664d09e86a | ||
|
|
e226b1a87f | ||
|
|
b227666271 | ||
|
|
6fea050813 | ||
|
|
cf67964133 | ||
|
|
f8d04b11d5 | ||
|
|
3a29cbf0ae | ||
|
|
66f5de9703 | ||
|
|
cbaca2b579 | ||
|
|
a76d9b15c9 | ||
|
|
59636fa688 | ||
|
|
ff0908d3fa | ||
|
|
21f35762ca | ||
|
|
7464720426 | ||
|
|
6e57c40c37 | ||
|
|
c8518f4ab2 | ||
|
|
b9c061ab3d | ||
|
|
d905bbf961 | ||
|
|
6641e7aa50 | ||
|
|
61c15b69fb | ||
|
|
8ec0c4c913 | ||
|
|
0a9d6e8210 | ||
|
|
0ed800b612 | ||
|
|
4ac005b094 | ||
|
|
5e3a53b576 | ||
|
|
e87146b0d9 | ||
|
|
5caa79df67 | ||
|
|
d519e1036f | ||
|
|
19eebc0b0a | ||
|
|
5585020753 | ||
|
|
ef7e7a8f11 | ||
|
|
eb91f27b65 | ||
|
|
24eef577c5 | ||
|
|
e7e4ccf74f | ||
|
|
017ecf76e3 | ||
|
|
1c9ceadd8d | ||
|
|
36ab7b3ebd | ||
|
|
b4038597ba | ||
|
|
79817bd465 | ||
|
|
93ad8f04b5 | ||
|
|
e4cb7ed30f | ||
|
|
b9e060423f | ||
|
|
ead1ec3396 | ||
|
|
306a8cd059 | ||
|
|
4c50deb4b7 | ||
|
|
be75426e64 | ||
|
|
23458de588 | ||
|
|
9fd849d48b | ||
|
|
2b28bc9510 | ||
|
|
d107b3f46c | ||
|
|
44149bec60 | ||
|
|
f80b4fdedd | ||
|
|
fd4a90549b | ||
|
|
b602a0836a | ||
|
|
7349fca607 | ||
|
|
4bacc8e47d | ||
|
|
7141f89c5f | ||
|
|
893654fb15 | ||
|
|
c9e1d054c7 | ||
|
|
2e2eeb0a42 | ||
|
|
0f342ac46e | ||
|
|
29ac324e90 | ||
|
|
23f11e355d | ||
|
|
f09016b2bc | ||
|
|
1fa3aeceeb | ||
|
|
443afdc412 | ||
|
|
776befc1f0 | ||
|
|
3edbc74430 | ||
|
|
b969f34317 | ||
|
|
6c46fbbc57 | ||
|
|
87115b02d9 | ||
|
|
c614520405 | ||
|
|
3756f5a0ca | ||
|
|
5b4e4bb858 | ||
|
|
602a327aa8 | ||
|
|
14c6ae4735 | ||
|
|
493a0e377d | ||
|
|
02dd1ea29d | ||
|
|
eaddee9fe2 | ||
|
|
d9165c7f77 | ||
|
|
2ef58ccce9 | ||
|
|
4009804221 | ||
|
|
168a1315de |
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -23,7 +23,7 @@ A clear and concise description of what you expected to happen.
|
|||||||
**Screenshots**
|
**Screenshots**
|
||||||
If applicable, add screenshots to help explain your problem.
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
**MeiliSearch version:** [e.g. v0.20.0]
|
**Meilisearch version:** [e.g. v0.20.0]
|
||||||
|
|
||||||
**Additional context**
|
**Additional context**
|
||||||
Additional information that may be relevant to the issue.
|
Additional information that may be relevant to the issue.
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -6,5 +6,5 @@ contact_links:
|
|||||||
url: https://github.com/meilisearch/documentation/issues/new
|
url: https://github.com/meilisearch/documentation/issues/new
|
||||||
about: For documentation issues, open an issue or a PR in the documentation repository
|
about: For documentation issues, open an issue or a PR in the documentation repository
|
||||||
- name: Support questions & other
|
- name: Support questions & other
|
||||||
url: https://github.com/meilisearch/MeiliSearch/discussions/new
|
url: https://github.com/meilisearch/meilisearch/discussions/new
|
||||||
about: For any other question, open a discussion in this repository
|
about: For any other question, open a discussion in this repository
|
||||||
|
|||||||
2
.github/is-latest-release.sh
vendored
2
.github/is-latest-release.sh
vendored
@@ -74,7 +74,7 @@ semverLT() {
|
|||||||
# Returns the tag of the latest stable release (in terms of semver and not of release date)
|
# Returns the tag of the latest stable release (in terms of semver and not of release date)
|
||||||
get_latest() {
|
get_latest() {
|
||||||
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
|
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
|
||||||
curl -s 'https://api.github.com/repos/meilisearch/MeiliSearch/releases' > "$temp_file"
|
curl -s 'https://api.github.com/repos/meilisearch/meilisearch/releases' > "$temp_file"
|
||||||
releases=$(cat "$temp_file" | \
|
releases=$(cat "$temp_file" | \
|
||||||
grep -E "tag_name|draft|prerelease" \
|
grep -E "tag_name|draft|prerelease" \
|
||||||
| tr -d ',"' | cut -d ':' -f2 | tr -d ' ')
|
| tr -d ',"' | cut -d ':' -f2 | tr -d ' ')
|
||||||
|
|||||||
13
.github/release-draft-template.yml
vendored
13
.github/release-draft-template.yml
vendored
@@ -1,13 +0,0 @@
|
|||||||
name-template: 'v$RESOLVED_VERSION'
|
|
||||||
tag-template: 'v$RESOLVED_VERSION'
|
|
||||||
version-template: '0.21.0-alpha.$PATCH'
|
|
||||||
exclude-labels:
|
|
||||||
- 'skip-changelog'
|
|
||||||
template: |
|
|
||||||
## Changes
|
|
||||||
|
|
||||||
$CHANGES
|
|
||||||
no-changes-template: 'Changes are coming soon 😎'
|
|
||||||
sort-direction: 'ascending'
|
|
||||||
version-resolver:
|
|
||||||
default: patch
|
|
||||||
2
.github/workflows/README.md
vendored
2
.github/workflows/README.md
vendored
@@ -1,4 +1,4 @@
|
|||||||
# GitHub Actions Workflow for MeiliSearch
|
# GitHub Actions Workflow for Meilisearch
|
||||||
|
|
||||||
> **Note:**
|
> **Note:**
|
||||||
|
|
||||||
|
|||||||
76
.github/workflows/publish-binaries.yml
vendored
76
.github/workflows/publish-binaries.yml
vendored
@@ -9,6 +9,7 @@ jobs:
|
|||||||
name: Publish for ${{ matrix.os }}
|
name: Publish for ${{ matrix.os }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-18.04, macos-latest, windows-latest]
|
os: [ubuntu-18.04, macos-latest, windows-latest]
|
||||||
include:
|
include:
|
||||||
@@ -37,28 +38,69 @@ jobs:
|
|||||||
asset_name: ${{ matrix.asset_name }}
|
asset_name: ${{ matrix.asset_name }}
|
||||||
tag: ${{ github.ref }}
|
tag: ${{ github.ref }}
|
||||||
|
|
||||||
publish-armv8:
|
publish-aarch64:
|
||||||
name: Publish for ARMv8
|
name: Publish to GitHub
|
||||||
runs-on: ubuntu-18.04
|
runs-on: ${{ matrix.os }}
|
||||||
|
continue-on-error: false
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- build: aarch64
|
||||||
|
os: ubuntu-18.04
|
||||||
|
target: aarch64-unknown-linux-gnu
|
||||||
|
linker: gcc-aarch64-linux-gnu
|
||||||
|
use-cross: true
|
||||||
|
asset_name: meilisearch-linux-aarch64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- name: Checkout repository
|
||||||
- uses: uraimo/run-on-arch-action@v2.1.1
|
uses: actions/checkout@v2
|
||||||
id: runcmd
|
|
||||||
|
- name: Installing Rust toolchain
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
arch: aarch64 # aka ARMv8
|
toolchain: stable
|
||||||
distro: ubuntu18.04
|
profile: minimal
|
||||||
env: |
|
target: ${{ matrix.target }}
|
||||||
JEMALLOC_SYS_WITH_LG_PAGE: 16
|
override: true
|
||||||
|
|
||||||
|
- name: APT update
|
||||||
run: |
|
run: |
|
||||||
apt update
|
sudo apt update
|
||||||
apt install -y curl gcc make
|
|
||||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal --default-toolchain stable
|
- name: Install target specific tools
|
||||||
source $HOME/.cargo/env
|
if: matrix.use-cross
|
||||||
cargo build --release --locked
|
run: |
|
||||||
|
sudo apt-get install -y ${{ matrix.linker }}
|
||||||
|
|
||||||
|
- name: Configure target aarch64 GNU
|
||||||
|
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
||||||
|
## Environment variable is not passed using env:
|
||||||
|
## LD gold won't work with MUSL
|
||||||
|
# env:
|
||||||
|
# JEMALLOC_SYS_WITH_LG_PAGE: 16
|
||||||
|
# RUSTFLAGS: '-Clink-arg=-fuse-ld=gold'
|
||||||
|
run: |
|
||||||
|
echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config
|
||||||
|
echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config
|
||||||
|
echo 'JEMALLOC_SYS_WITH_LG_PAGE=16' >> $GITHUB_ENV
|
||||||
|
echo RUSTFLAGS="-Clink-arg=-fuse-ld=gold" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Cargo build
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: build
|
||||||
|
use-cross: ${{ matrix.use-cross }}
|
||||||
|
args: --release --target ${{ matrix.target }}
|
||||||
|
|
||||||
|
- name: List target output files
|
||||||
|
run: ls -lR ./target
|
||||||
|
|
||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
uses: svenstaro/upload-release-action@v1-release
|
uses: svenstaro/upload-release-action@v1-release
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.PUBLISH_TOKEN }}
|
repo_token: ${{ secrets.PUBLISH_TOKEN }}
|
||||||
file: target/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
asset_name: meilisearch-linux-armv8
|
asset_name: ${{ matrix.asset_name }}
|
||||||
tag: ${{ github.ref }}
|
tag: ${{ github.ref }}
|
||||||
|
|||||||
28
.github/workflows/publish-docker-latest.yml
vendored
28
.github/workflows/publish-docker-latest.yml
vendored
@@ -6,17 +6,25 @@ on:
|
|||||||
name: Publish latest image to Docker Hub
|
name: Publish latest image to Docker Hub
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
docker-latest:
|
||||||
runs-on: ubuntu-18.04
|
runs-on: docker
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- name: Set up QEMU
|
||||||
- name: Check if current release is latest
|
uses: docker/setup-qemu-action@v1
|
||||||
run: echo "##[set-output name=is_latest;]$(sh .github/is-latest-release.sh)"
|
|
||||||
id: release
|
- name: Set up Docker Buildx
|
||||||
- name: Publish to Registry
|
uses: docker/setup-buildx-action@v1
|
||||||
if: steps.release.outputs.is_latest == 'true'
|
|
||||||
uses: elgohr/Publish-Docker-Github-Action@master
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
with:
|
with:
|
||||||
name: getmeili/meilisearch
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Build and push
|
||||||
|
id: docker_build
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
tags: getmeili/meilisearch:latest
|
||||||
|
|||||||
35
.github/workflows/publish-docker-tag.yml
vendored
35
.github/workflows/publish-docker-tag.yml
vendored
@@ -7,16 +7,33 @@ on:
|
|||||||
name: Publish tagged image to Docker Hub
|
name: Publish tagged image to Docker Hub
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
docker-tag:
|
||||||
runs-on: ubuntu-18.04
|
runs-on: docker
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- name: Set up QEMU
|
||||||
- name: Publish to Registry
|
uses: docker/setup-qemu-action@v1
|
||||||
uses: elgohr/Publish-Docker-Github-Action@master
|
|
||||||
env:
|
- name: Set up Docker Buildx
|
||||||
COMMIT_SHA: ${{ github.sha }}
|
uses: docker/setup-buildx-action@v1
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
with:
|
with:
|
||||||
name: getmeili/meilisearch
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
tag_names: true
|
|
||||||
|
- name: Docker meta
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v3
|
||||||
|
with:
|
||||||
|
images: getmeili/meilisearch
|
||||||
|
flavor: latest=false
|
||||||
|
tags: type=ref,event=tag
|
||||||
|
|
||||||
|
- name: Build and push
|
||||||
|
id: docker_build
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
|||||||
16
.github/workflows/release-drafter.yml
vendored
16
.github/workflows/release-drafter.yml
vendored
@@ -1,16 +0,0 @@
|
|||||||
name: Release Drafter
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update_release_draft:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: release-drafter/release-drafter@v5
|
|
||||||
with:
|
|
||||||
config-name: release-draft-template.yml
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.RELEASE_DRAFTER_TOKEN }}
|
|
||||||
1
.github/workflows/rust.yml
vendored
1
.github/workflows/rust.yml
vendored
@@ -11,6 +11,7 @@ on:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
CARGO_TERM_COLOR: always
|
CARGO_TERM_COLOR: always
|
||||||
|
RUST_BACKTRACE: 1
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tests:
|
tests:
|
||||||
|
|||||||
@@ -1,28 +1,20 @@
|
|||||||
# Contributing
|
# Contributing
|
||||||
|
|
||||||
First, thank you for contributing to MeiliSearch! The goal of this document is to provide everything you need to start contributing to MeiliSearch.
|
First, thank you for contributing to Meilisearch! The goal of this document is to provide everything you need to start contributing to Meilisearch.
|
||||||
|
|
||||||
- [Hacktoberfest](#hacktoberfest)
|
Remember that there are many ways to contribute other than writing code: writing [tutorials or blog posts](https://github.com/meilisearch/awesome-meilisearch), improving [the documentation](https://github.com/meilisearch/documentation), submitting [bug reports](https://github.com/meilisearch/meilisearch/issues/new?assignees=&labels=&template=bug_report.md&title=) and [feature requests](https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal)...
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
- [Assumptions](#assumptions)
|
- [Assumptions](#assumptions)
|
||||||
- [How to Contribute](#how-to-contribute)
|
- [How to Contribute](#how-to-contribute)
|
||||||
- [Development Workflow](#development-workflow)
|
- [Development Workflow](#development-workflow)
|
||||||
- [Git Guidelines](#git-guidelines)
|
- [Git Guidelines](#git-guidelines)
|
||||||
|
|
||||||
## Hacktoberfest
|
|
||||||
|
|
||||||
It's [Hacktoberfest month](https://blog.meilisearch.com/contribute-hacktoberfest-2021/)! 🥳
|
|
||||||
|
|
||||||
🚀 If your PR gets accepted it will count into your participation to Hacktoberfest!
|
|
||||||
|
|
||||||
✅ To be accepted it has either to have been merged, approved or tagged with the `hacktoberest-accepted` label.
|
|
||||||
|
|
||||||
🧐 Don't forget to check the [quality standards](https://hacktoberfest.digitalocean.com/resources/qualitystandards), otherwise your PR could be marked as `spam` or `invalid`, and it will not be counted toward your participation in Hacktoberfest.
|
|
||||||
|
|
||||||
## Assumptions
|
## Assumptions
|
||||||
|
|
||||||
1. **You're familiar with [Github](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
1. **You're familiar with [Github](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
||||||
2. **You've read the MeiliSearch [documentation](https://docs.meilisearch.com).**
|
2. **You've read the Meilisearch [documentation](https://docs.meilisearch.com).**
|
||||||
3. **You know about the [MeiliSearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
3. **You know about the [Meilisearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
||||||
Please use this for help.**
|
Please use this for help.**
|
||||||
|
|
||||||
## How to Contribute
|
## How to Contribute
|
||||||
@@ -30,21 +22,21 @@ It's [Hacktoberfest month](https://blog.meilisearch.com/contribute-hacktoberfest
|
|||||||
1. Ensure your change has an issue! Find an
|
1. Ensure your change has an issue! Find an
|
||||||
[existing issue](https://github.com/meilisearch/meilisearch/issues/) or [open a new issue](https://github.com/meilisearch/meilisearch/issues/new).
|
[existing issue](https://github.com/meilisearch/meilisearch/issues/) or [open a new issue](https://github.com/meilisearch/meilisearch/issues/new).
|
||||||
* This is where you can get a feel if the change will be accepted or not.
|
* This is where you can get a feel if the change will be accepted or not.
|
||||||
2. Once approved, [fork the MeiliSearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own Github account.
|
2. Once approved, [fork the Meilisearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own Github account.
|
||||||
3. [Create a new Git branch](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-and-deleting-branches-within-your-repository)
|
3. [Create a new Git branch](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-and-deleting-branches-within-your-repository)
|
||||||
4. Review the [Development Workflow](#development-workflow) section that describes the steps to maintain the repository.
|
4. Review the [Development Workflow](#development-workflow) section that describes the steps to maintain the repository.
|
||||||
5. Make your changes on your branch.
|
5. Make your changes on your branch.
|
||||||
6. [Submit the branch as a Pull Request](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request-from-a-fork) pointing to the `main` branch of the MeiliSearch repository. A maintainer should comment and/or review your Pull Request within a few days. Although depending on the circumstances, it may take longer.
|
6. [Submit the branch as a Pull Request](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request-from-a-fork) pointing to the `main` branch of the Meilisearch repository. A maintainer should comment and/or review your Pull Request within a few days. Although depending on the circumstances, it may take longer.
|
||||||
|
|
||||||
## Development Workflow
|
## Development Workflow
|
||||||
|
|
||||||
### Setup and run MeiliSearch
|
### Setup and run Meilisearch
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cargo run --release
|
cargo run --release
|
||||||
```
|
```
|
||||||
|
|
||||||
We recommend using the `--release` flag to test the full performance of MeiliSearch.
|
We recommend using the `--release` flag to test the full performance of Meilisearch.
|
||||||
|
|
||||||
### Test
|
### Test
|
||||||
|
|
||||||
|
|||||||
1781
Cargo.lock
generated
1781
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -3,11 +3,5 @@ members = [
|
|||||||
"meilisearch-http",
|
"meilisearch-http",
|
||||||
"meilisearch-error",
|
"meilisearch-error",
|
||||||
"meilisearch-lib",
|
"meilisearch-lib",
|
||||||
|
"meilisearch-auth",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
|
||||||
|
|
||||||
[profile.release]
|
|
||||||
debug = true
|
|
||||||
|
|
||||||
[patch.crates-io]
|
|
||||||
pest = { git = "https://github.com/pest-parser/pest.git", rev = "51fd1d49f1041f7839975664ef71fe15c7dcaf67" }
|
|
||||||
|
|||||||
7
Cross.toml
Normal file
7
Cross.toml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
[build.env]
|
||||||
|
passthrough = [
|
||||||
|
"RUST_BACKTRACE",
|
||||||
|
"CARGO_TERM_COLOR",
|
||||||
|
"RUSTFLAGS",
|
||||||
|
"JEMALLOC_SYS_WITH_LG_PAGE"
|
||||||
|
]
|
||||||
23
Dockerfile
23
Dockerfile
@@ -1,9 +1,8 @@
|
|||||||
# Compile
|
# Compile
|
||||||
FROM alpine:3.14 AS compiler
|
FROM alpine:3.14 AS compiler
|
||||||
|
|
||||||
RUN apk update --quiet
|
RUN apk update --quiet \
|
||||||
RUN apk add curl
|
&& apk add -q --no-cache curl build-base
|
||||||
RUN apk add build-base
|
|
||||||
|
|
||||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||||
|
|
||||||
@@ -12,6 +11,7 @@ WORKDIR /meilisearch
|
|||||||
COPY Cargo.lock .
|
COPY Cargo.lock .
|
||||||
COPY Cargo.toml .
|
COPY Cargo.toml .
|
||||||
|
|
||||||
|
COPY meilisearch-auth/Cargo.toml meilisearch-auth/
|
||||||
COPY meilisearch-error/Cargo.toml meilisearch-error/
|
COPY meilisearch-error/Cargo.toml meilisearch-error/
|
||||||
COPY meilisearch-http/Cargo.toml meilisearch-http/
|
COPY meilisearch-http/Cargo.toml meilisearch-http/
|
||||||
COPY meilisearch-lib/Cargo.toml meilisearch-lib/
|
COPY meilisearch-lib/Cargo.toml meilisearch-lib/
|
||||||
@@ -21,7 +21,10 @@ ENV RUSTFLAGS="-C target-feature=-crt-static"
|
|||||||
# Create dummy main.rs files for each workspace member to be able to compile all the dependencies
|
# Create dummy main.rs files for each workspace member to be able to compile all the dependencies
|
||||||
RUN find . -type d -name "meilisearch-*" | xargs -I{} sh -c 'mkdir {}/src; echo "fn main() { }" > {}/src/main.rs;'
|
RUN find . -type d -name "meilisearch-*" | xargs -I{} sh -c 'mkdir {}/src; echo "fn main() { }" > {}/src/main.rs;'
|
||||||
# Use `cargo build` instead of `cargo vendor` because we need to not only download but compile dependencies too
|
# Use `cargo build` instead of `cargo vendor` because we need to not only download but compile dependencies too
|
||||||
RUN $HOME/.cargo/bin/cargo build --release
|
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
|
||||||
|
export JEMALLOC_SYS_WITH_LG_PAGE=16; \
|
||||||
|
fi && \
|
||||||
|
$HOME/.cargo/bin/cargo build --release
|
||||||
# Cleanup dummy main.rs files
|
# Cleanup dummy main.rs files
|
||||||
RUN find . -path "*/src/main.rs" -delete
|
RUN find . -path "*/src/main.rs" -delete
|
||||||
|
|
||||||
@@ -30,16 +33,22 @@ ARG COMMIT_DATE
|
|||||||
ENV COMMIT_SHA=${COMMIT_SHA} COMMIT_DATE=${COMMIT_DATE}
|
ENV COMMIT_SHA=${COMMIT_SHA} COMMIT_DATE=${COMMIT_DATE}
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN $HOME/.cargo/bin/cargo build --release
|
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
|
||||||
|
export JEMALLOC_SYS_WITH_LG_PAGE=16; \
|
||||||
|
fi && \
|
||||||
|
$HOME/.cargo/bin/cargo build --release
|
||||||
|
|
||||||
# Run
|
# Run
|
||||||
FROM alpine:3.14
|
FROM alpine:3.14
|
||||||
|
|
||||||
RUN apk add -q --no-cache libgcc tini curl
|
ENV MEILI_HTTP_ADDR 0.0.0.0:7700
|
||||||
|
ENV MEILI_SERVER_PROVIDER docker
|
||||||
|
|
||||||
|
RUN apk update --quiet \
|
||||||
|
&& apk add -q --no-cache libgcc tini curl
|
||||||
|
|
||||||
COPY --from=compiler /meilisearch/target/release/meilisearch .
|
COPY --from=compiler /meilisearch/target/release/meilisearch .
|
||||||
|
|
||||||
ENV MEILI_HTTP_ADDR 0.0.0.0:7700
|
|
||||||
EXPOSE 7700/tcp
|
EXPOSE 7700/tcp
|
||||||
|
|
||||||
ENTRYPOINT ["tini", "--"]
|
ENTRYPOINT ["tini", "--"]
|
||||||
|
|||||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2019-2021 Meili SAS
|
Copyright (c) 2019-2022 Meilisearch
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
44
README.md
44
README.md
@@ -1,8 +1,8 @@
|
|||||||
<p align="center">
|
<p align="center">
|
||||||
<img src="assets/logo.svg" alt="MeiliSearch" width="200" height="200" />
|
<img src="assets/logo.svg" alt="Meilisearch" width="200" height="200" />
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<h1 align="center">MeiliSearch</h1>
|
<h1 align="center">Meilisearch</h1>
|
||||||
|
|
||||||
<h4 align="center">
|
<h4 align="center">
|
||||||
<a href="https://www.meilisearch.com">Website</a> |
|
<a href="https://www.meilisearch.com">Website</a> |
|
||||||
@@ -15,17 +15,17 @@
|
|||||||
</h4>
|
</h4>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://github.com/meilisearch/MeiliSearch/actions"><img src="https://github.com/meilisearch/MeiliSearch/workflows/Cargo%20test/badge.svg" alt="Build Status"></a>
|
<a href="https://github.com/meilisearch/meilisearch/actions"><img src="https://github.com/meilisearch/meilisearch/workflows/Cargo%20test/badge.svg" alt="Build Status"></a>
|
||||||
<a href="https://deps.rs/repo/github/meilisearch/MeiliSearch"><img src="https://deps.rs/repo/github/meilisearch/MeiliSearch/status.svg" alt="Dependency status"></a>
|
<a href="https://deps.rs/repo/github/meilisearch/meilisearch"><img src="https://deps.rs/repo/github/meilisearch/meilisearch/status.svg" alt="Dependency status"></a>
|
||||||
<a href="https://github.com/meilisearch/MeiliSearch/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-MIT-informational" alt="License"></a>
|
<a href="https://github.com/meilisearch/meilisearch/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-MIT-informational" alt="License"></a>
|
||||||
<a href="https://slack.meilisearch.com"><img src="https://img.shields.io/badge/slack-MeiliSearch-blue.svg?logo=slack" alt="Slack"></a>
|
<a href="https://slack.meilisearch.com"><img src="https://img.shields.io/badge/slack-meilisearch-blue.svg?logo=slack" alt="Slack"></a>
|
||||||
<a href="https://github.com/meilisearch/MeiliSearch/discussions" alt="Discussions"><img src="https://img.shields.io/badge/github-discussions-red" /></a>
|
<a href="https://github.com/meilisearch/meilisearch/discussions" alt="Discussions"><img src="https://img.shields.io/badge/github-discussions-red" /></a>
|
||||||
<a href="https://app.bors.tech/repositories/26457"><img src="https://bors.tech/images/badge_small.svg" alt="Bors enabled"></a>
|
<a href="https://app.bors.tech/repositories/26457"><img src="https://bors.tech/images/badge_small.svg" alt="Bors enabled"></a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p align="center">⚡ Lightning Fast, Ultra Relevant, and Typo-Tolerant Search Engine 🔍</p>
|
<p align="center">⚡ Lightning Fast, Ultra Relevant, and Typo-Tolerant Search Engine 🔍</p>
|
||||||
|
|
||||||
**MeiliSearch** is a powerful, fast, open-source, easy to use and deploy search engine. Both searching and indexing are highly customizable. Features such as typo-tolerance, filters, and synonyms are provided out-of-the-box.
|
**Meilisearch** is a powerful, fast, open-source, easy to use and deploy search engine. Both searching and indexing are highly customizable. Features such as typo-tolerance, filters, and synonyms are provided out-of-the-box.
|
||||||
For more information about features go to [our documentation](https://docs.meilisearch.com/).
|
For more information about features go to [our documentation](https://docs.meilisearch.com/).
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
@@ -61,13 +61,13 @@ meilisearch
|
|||||||
docker run -p 7700:7700 -v "$(pwd)/data.ms:/data.ms" getmeili/meilisearch
|
docker run -p 7700:7700 -v "$(pwd)/data.ms:/data.ms" getmeili/meilisearch
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Announcing a cloud-hosted MeiliSearch
|
#### Announcing a cloud-hosted Meilisearch
|
||||||
|
|
||||||
Join the closed beta by filling out this [form](https://meilisearch.typeform.com/to/FtnzvZfh).
|
Join the closed beta by filling out this [form](https://meilisearch.typeform.com/to/FtnzvZfh).
|
||||||
|
|
||||||
#### Try MeiliSearch in our Sandbox
|
#### Try Meilisearch in our Sandbox
|
||||||
|
|
||||||
Create a MeiliSearch instance in [MeiliSearch Sandbox](https://sandbox.meilisearch.com/). This instance is free, and will be active for 48 hours.
|
Create a Meilisearch instance in [Meilisearch Sandbox](https://sandbox.meilisearch.com/). This instance is free, and will be active for 48 hours.
|
||||||
|
|
||||||
#### Run on Digital Ocean
|
#### Run on Digital Ocean
|
||||||
|
|
||||||
@@ -99,8 +99,8 @@ curl -L https://install.meilisearch.com | sh
|
|||||||
If you have the latest stable Rust toolchain installed on your local system, clone the repository and change it to your working directory.
|
If you have the latest stable Rust toolchain installed on your local system, clone the repository and change it to your working directory.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/meilisearch/MeiliSearch.git
|
git clone https://github.com/meilisearch/meilisearch.git
|
||||||
cd MeiliSearch
|
cd meilisearch
|
||||||
cargo run --release
|
cargo run --release
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -161,19 +161,19 @@ curl 'http://127.0.0.1:7700/indexes/movies/search?q=botman+robin&limit=2' | jq
|
|||||||
|
|
||||||
#### Use the Web Interface
|
#### Use the Web Interface
|
||||||
|
|
||||||
We also deliver an **out-of-the-box [web interface](https://github.com/meilisearch/mini-dashboard)** in which you can test MeiliSearch interactively.
|
We also deliver an **out-of-the-box [web interface](https://github.com/meilisearch/mini-dashboard)** in which you can test Meilisearch interactively.
|
||||||
|
|
||||||
You can access the web interface in your web browser at the root of the server. The default URL is [http://127.0.0.1:7700](http://127.0.0.1:7700). All you need to do is open your web browser and enter MeiliSearch’s address to visit it. This will lead you to a web page with a search bar that will allow you to search in the selected index.
|
You can access the web interface in your web browser at the root of the server. The default URL is [http://127.0.0.1:7700](http://127.0.0.1:7700). All you need to do is open your web browser and enter Meilisearch’s address to visit it. This will lead you to a web page with a search bar that will allow you to search in the selected index.
|
||||||
|
|
||||||
| [See the gif above](#demo)
|
| [See the gif above](#demo)
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
Now that your MeiliSearch server is up and running, you can learn more about how to tune your search engine in [the documentation](https://docs.meilisearch.com).
|
Now that your Meilisearch server is up and running, you can learn more about how to tune your search engine in [the documentation](https://docs.meilisearch.com).
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
Hey! We're glad you're thinking about contributing to MeiliSearch! Feel free to pick an [issue labeled as `good first issue`](https://github.com/meilisearch/MeiliSearch/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22), and to ask any question you need. Some points might not be clear and we are available to help you!
|
Hey! We're glad you're thinking about contributing to Meilisearch! Feel free to pick an [issue labeled as `good first issue`](https://github.com/meilisearch/meilisearch/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22), and to ask any question you need. Some points might not be clear and we are available to help you!
|
||||||
|
|
||||||
Also, we recommend following the [CONTRIBUTING](./CONTRIBUTING.md) to create your PR.
|
Also, we recommend following the [CONTRIBUTING](./CONTRIBUTING.md) to create your PR.
|
||||||
|
|
||||||
@@ -184,16 +184,16 @@ The code in this repository is only concerned with managing multiple indexes, ha
|
|||||||
Search and indexation are the domain of our core engine, [`milli`](https://github.com/meilisearch/milli), while tokenization is handled by [our `tokenizer` library](https://github.com/meilisearch/tokenizer/).
|
Search and indexation are the domain of our core engine, [`milli`](https://github.com/meilisearch/milli), while tokenization is handled by [our `tokenizer` library](https://github.com/meilisearch/tokenizer/).
|
||||||
## Telemetry
|
## Telemetry
|
||||||
|
|
||||||
MeiliSearch collects anonymous data regarding general usage.
|
Meilisearch collects anonymous data regarding general usage.
|
||||||
This helps us better understand developers' usage of MeiliSearch features.
|
This helps us better understand developers' usage of Meilisearch features.
|
||||||
|
|
||||||
To see what information we're retrieving, please see the complete list [on the dedicated issue](https://github.com/meilisearch/MeiliSearch/issues/720).
|
To find out more on what information we're retrieving, please see our documentation on [Telemetry](https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html).
|
||||||
|
|
||||||
This program is optional, you can disable these analytics by using the `MEILI_NO_ANALYTICS` env variable.
|
This program is optional, you can disable these analytics by using the `MEILI_NO_ANALYTICS` env variable.
|
||||||
|
|
||||||
## Feature request
|
## Feature request
|
||||||
|
|
||||||
The feature requests are not managed in this repository. Please visit our [dedicated repository](https://github.com/meilisearch/product) to see our work about the MeiliSearch product.
|
The feature requests are not managed in this repository. Please visit our [dedicated repository](https://github.com/meilisearch/product) to see our work about the Meilisearch product.
|
||||||
|
|
||||||
If you have a feature request or any feedback about an existing feature, please open [a discussion](https://github.com/meilisearch/product/discussions).
|
If you have a feature request or any feedback about an existing feature, please open [a discussion](https://github.com/meilisearch/product/discussions).
|
||||||
Also, feel free to participate in the current discussions, we are looking forward to reading your comments.
|
Also, feel free to participate in the current discussions, we are looking forward to reading your comments.
|
||||||
@@ -202,4 +202,4 @@ Also, feel free to participate in the current discussions, we are looking forwar
|
|||||||
|
|
||||||
Please visit [this page](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html#contact-us).
|
Please visit [this page](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html#contact-us).
|
||||||
|
|
||||||
MeiliSearch is developed by [Meili](https://www.meilisearch.com), a young company. To know more about us, you can [read our blog](https://blog.meilisearch.com). Any suggestion or feedback is highly appreciated. Thank you for your support!
|
Meilisearch is developed by [Meili](https://www.meilisearch.com), a young company. To know more about us, you can [read our blog](https://blog.meilisearch.com). Any suggestion or feedback is highly appreciated. Thank you for your support!
|
||||||
|
|||||||
33
SECURITY.md
Normal file
33
SECURITY.md
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# Security
|
||||||
|
|
||||||
|
Meilisearch takes the security of our software products and services seriously.
|
||||||
|
|
||||||
|
If you believe you have found a security vulnerability in any Meilisearch-owned repository, please report it to us as described below.
|
||||||
|
|
||||||
|
## Suported versions
|
||||||
|
|
||||||
|
As long as we are pre-v1.0, only the latest version of Meilisearch will be supported with security updates.
|
||||||
|
|
||||||
|
## Reporting security issues
|
||||||
|
|
||||||
|
⚠️ Please do not report security vulnerabilities through public GitHub issues. ⚠️
|
||||||
|
|
||||||
|
Instead, please kindly email us at security@meilisearch.com
|
||||||
|
|
||||||
|
Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
|
||||||
|
|
||||||
|
- Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
|
||||||
|
- Full paths of source file(s) related to the manifestation of the issue
|
||||||
|
- The location of the affected source code (tag/branch/commit or direct URL)
|
||||||
|
- Any special configuration required to reproduce the issue
|
||||||
|
- Step-by-step instructions to reproduce the issue
|
||||||
|
- Proof-of-concept or exploit code (if possible)
|
||||||
|
- Impact of the issue, including how an attacker might exploit the issue
|
||||||
|
|
||||||
|
This information will help us triage your report more quickly.
|
||||||
|
|
||||||
|
You will receive a response from us within 72 hours. If the issue is confirmed, we will release a patch as soon as possible depending on complexity.
|
||||||
|
|
||||||
|
## Preferred languages
|
||||||
|
|
||||||
|
We prefer all communications to be in English.
|
||||||
@@ -1,17 +1,19 @@
|
|||||||
<svg width="360" height="360" viewBox="0 0 360 360" fill="none" xmlns="http://www.w3.org/2000/svg">
|
<svg width="300" height="300" viewBox="0 0 300 300" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
<g id="logo_main">
|
<path d="M0 237L55.426 96.7678C63.2367 77.0063 82.499 64 103.955 64H137.371L81.9447 204.232C74.1341 223.993 54.8717 237 33.4156 237H0Z" fill="url(#paint0_linear_1_898)"/>
|
||||||
<rect id="Rectangle" x="107.333" y="0.150146" width="274.315" height="274.315" rx="98.8334" transform="rotate(23 107.333 0.150146)" fill="url(#paint0_linear)"/>
|
<path d="M81.3123 237L136.738 96.7682C144.549 77.0067 163.811 64.0004 185.267 64.0004H218.683L163.257 204.232C155.446 223.994 136.184 237 114.728 237H81.3123Z" fill="url(#paint1_linear_1_898)"/>
|
||||||
<path id="Rectangle_2" fill-rule="evenodd" clip-rule="evenodd" d="M61.3296 230.199C46.2224 194.608 38.6688 176.813 38.208 160.329C37.5286 136.025 47.0175 112.539 64.3891 95.5282C76.1718 83.9904 93.9669 76.4368 129.557 61.3296C165.147 46.2224 182.943 38.6688 199.427 38.208C223.731 37.5286 247.217 47.0175 264.228 64.3891C275.766 76.1718 283.319 93.9669 298.426 129.557C313.534 165.147 321.087 182.943 321.548 199.427C322.227 223.731 312.738 247.217 295.367 264.228C283.584 275.766 265.789 283.319 230.199 298.426C194.608 313.534 176.813 321.087 160.329 321.548C136.025 322.227 112.539 312.738 95.5282 295.367C83.9903 283.584 76.4368 265.789 61.3296 230.199Z" fill="url(#paint1_linear)"/>
|
<path d="M162.629 237L218.055 96.7682C225.866 77.0067 245.128 64.0004 266.584 64.0004H300L244.574 204.232C236.763 223.994 217.501 237 196.045 237H162.629Z" fill="url(#paint2_linear_1_898)"/>
|
||||||
<path id="m" fill-rule="evenodd" clip-rule="evenodd" d="M219.568 130.748C242.363 130.748 259.263 147.451 259.263 174.569V229.001H227.232V179.678C227.232 166.119 220.747 159.634 210.136 159.634C205.223 159.634 200.311 161.796 195.595 167.494C195.791 169.852 195.988 172.21 195.988 174.569V229.001H164.154V179.678C164.154 166.119 157.472 159.634 147.057 159.634C142.145 159.634 137.429 161.992 132.712 168.084V229.001H100.878V133.695H132.712V139.394C139.197 133.892 145.878 130.748 156.49 130.748C168.477 130.748 178.695 135.267 185.769 143.52C195.791 134.678 205.42 130.748 219.568 130.748Z" fill="white"/>
|
|
||||||
</g>
|
|
||||||
<defs>
|
<defs>
|
||||||
<linearGradient id="paint0_linear" x1="-13.6248" y1="129.208" x2="244.49" y2="403.522" gradientUnits="userSpaceOnUse">
|
<linearGradient id="paint0_linear_1_898" x1="300.001" y1="50.7858" x2="1.63474" y2="221.244" gradientUnits="userSpaceOnUse">
|
||||||
<stop stop-color="#E41359"/>
|
<stop stop-color="#FF5CAA"/>
|
||||||
<stop offset="1" stop-color="#F23C79"/>
|
<stop offset="1" stop-color="#FF4E62"/>
|
||||||
</linearGradient>
|
</linearGradient>
|
||||||
<linearGradient id="paint1_linear" x1="11.0088" y1="111.65" x2="111.65" y2="348.747" gradientUnits="userSpaceOnUse">
|
<linearGradient id="paint1_linear_1_898" x1="300.001" y1="50.7858" x2="1.63474" y2="221.244" gradientUnits="userSpaceOnUse">
|
||||||
<stop stop-color="#24222F"/>
|
<stop stop-color="#FF5CAA"/>
|
||||||
<stop offset="1" stop-color="#2B2937"/>
|
<stop offset="1" stop-color="#FF4E62"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="paint2_linear_1_898" x1="300.001" y1="50.7858" x2="1.63474" y2="221.244" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#FF5CAA"/>
|
||||||
|
<stop offset="1" stop-color="#FF4E62"/>
|
||||||
</linearGradient>
|
</linearGradient>
|
||||||
</defs>
|
</defs>
|
||||||
</svg>
|
</svg>
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 2.0 KiB After Width: | Height: | Size: 1.3 KiB |
@@ -5,5 +5,6 @@ status = [
|
|||||||
'Run Clippy',
|
'Run Clippy',
|
||||||
'Run Rustfmt'
|
'Run Rustfmt'
|
||||||
]
|
]
|
||||||
|
pr_status = ['Milestone Check']
|
||||||
# 3 hours timeout
|
# 3 hours timeout
|
||||||
timeout-sec = 10800
|
timeout-sec = 10800
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
_datas in movies.json are from https://www.themoviedb.org/_
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -51,13 +51,13 @@ semverLT() {
|
|||||||
if [ $MAJOR_A -le $MAJOR_B ] && [ $MINOR_A -le $MINOR_B ] && [ $PATCH_A -lt $PATCH_B ]; then
|
if [ $MAJOR_A -le $MAJOR_B ] && [ $MINOR_A -le $MINOR_B ] && [ $PATCH_A -lt $PATCH_B ]; then
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
if [ "_$SPECIAL_A" == "_" ] && [ "_$SPECIAL_B" == "_" ] ; then
|
if [ "_$SPECIAL_A" == '_' ] && [ "_$SPECIAL_B" == '_' ] ; then
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
if [ "_$SPECIAL_A" == "_" ] && [ "_$SPECIAL_B" != "_" ] ; then
|
if [ "_$SPECIAL_A" == '_' ] && [ "_$SPECIAL_B" != '_' ] ; then
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
if [ "_$SPECIAL_A" != "_" ] && [ "_$SPECIAL_B" == "_" ] ; then
|
if [ "_$SPECIAL_A" != '_' ] && [ "_$SPECIAL_B" == '_' ] ; then
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
if [ "_$SPECIAL_A" < "_$SPECIAL_B" ]; then
|
if [ "_$SPECIAL_A" < "_$SPECIAL_B" ]; then
|
||||||
@@ -67,39 +67,47 @@ semverLT() {
|
|||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Get a token from https://github.com/settings/tokens to increasae rate limit (from 60 to 5000), make sure the token scope is set to 'public_repo'
|
||||||
|
# Create GITHUB_PAT enviroment variable once you aquired the token to start using it
|
||||||
# Returns the tag of the latest stable release (in terms of semver and not of release date)
|
# Returns the tag of the latest stable release (in terms of semver and not of release date)
|
||||||
get_latest() {
|
get_latest() {
|
||||||
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
|
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
|
||||||
curl -s 'https://api.github.com/repos/meilisearch/MeiliSearch/releases' > "$temp_file" || return 1
|
|
||||||
|
if [ -z "$GITHUB_PAT" ]; then
|
||||||
|
curl -s 'https://api.github.com/repos/meilisearch/meilisearch/releases' > "$temp_file" || return 1
|
||||||
|
else
|
||||||
|
curl -H "Authorization: token $GITHUB_PAT" -s 'https://api.github.com/repos/meilisearch/meilisearch/releases' > "$temp_file" || return 1
|
||||||
|
fi
|
||||||
|
|
||||||
releases=$(cat "$temp_file" | \
|
releases=$(cat "$temp_file" | \
|
||||||
grep -E "tag_name|draft|prerelease" \
|
grep -E '"tag_name":|"draft":|"prerelease":' \
|
||||||
| tr -d ',"' | cut -d ':' -f2 | tr -d ' ')
|
| tr -d ',"' | cut -d ':' -f2 | tr -d ' ')
|
||||||
# Returns a list of [tag_name draft_boolean prerelease_boolean ...]
|
# Returns a list of [tag_name draft_boolean prerelease_boolean ...]
|
||||||
# Ex: v0.10.1 false false v0.9.1-rc.1 false true v0.9.0 false false...
|
# Ex: v0.10.1 false false v0.9.1-rc.1 false true v0.9.0 false false...
|
||||||
|
|
||||||
i=0
|
i=0
|
||||||
latest=""
|
latest=''
|
||||||
current_tag=""
|
current_tag=''
|
||||||
for release_info in $releases; do
|
for release_info in $releases; do
|
||||||
if [ $i -eq 0 ]; then # Cheking tag_name
|
if [ $i -eq 0 ]; then # Cheking tag_name
|
||||||
if echo "$release_info" | grep -q "$GREP_SEMVER_REGEXP"; then # If it's not an alpha or beta release
|
if echo "$release_info" | grep -q "$GREP_SEMVER_REGEXP"; then # If it's not an alpha or beta release
|
||||||
current_tag=$release_info
|
current_tag=$release_info
|
||||||
else
|
else
|
||||||
current_tag=""
|
current_tag=''
|
||||||
fi
|
fi
|
||||||
i=1
|
i=1
|
||||||
elif [ $i -eq 1 ]; then # Checking draft boolean
|
elif [ $i -eq 1 ]; then # Checking draft boolean
|
||||||
if [ "$release_info" = "true" ]; then
|
if [ "$release_info" = 'true' ]; then
|
||||||
current_tag=""
|
current_tag=''
|
||||||
fi
|
fi
|
||||||
i=2
|
i=2
|
||||||
elif [ $i -eq 2 ]; then # Checking prerelease boolean
|
elif [ $i -eq 2 ]; then # Checking prerelease boolean
|
||||||
if [ "$release_info" = "true" ]; then
|
if [ "$release_info" = 'true' ]; then
|
||||||
current_tag=""
|
current_tag=''
|
||||||
fi
|
fi
|
||||||
i=0
|
i=0
|
||||||
if [ "$current_tag" != "" ]; then # If the current_tag is valid
|
if [ "$current_tag" != '' ]; then # If the current_tag is valid
|
||||||
if [ "$latest" = "" ]; then # If there is no latest yet
|
if [ "$latest" = '' ]; then # If there is no latest yet
|
||||||
latest="$current_tag"
|
latest="$current_tag"
|
||||||
else
|
else
|
||||||
semverLT $current_tag $latest # Comparing latest and the current tag
|
semverLT $current_tag $latest # Comparing latest and the current tag
|
||||||
@@ -112,7 +120,7 @@ get_latest() {
|
|||||||
done
|
done
|
||||||
|
|
||||||
rm -f "$temp_file"
|
rm -f "$temp_file"
|
||||||
echo $latest
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
# Gets the OS by setting the $os variable
|
# Gets the OS by setting the $os variable
|
||||||
@@ -140,11 +148,18 @@ get_os() {
|
|||||||
get_archi() {
|
get_archi() {
|
||||||
architecture=$(uname -m)
|
architecture=$(uname -m)
|
||||||
case "$architecture" in
|
case "$architecture" in
|
||||||
'x86_64' | 'amd64' | 'arm64')
|
'x86_64' | 'amd64' )
|
||||||
archi='amd64'
|
archi='amd64'
|
||||||
;;
|
;;
|
||||||
|
'arm64')
|
||||||
|
if [ $os = 'macos' ]; then # MacOS M1
|
||||||
|
archi='amd64'
|
||||||
|
else
|
||||||
|
archi='aarch64'
|
||||||
|
fi
|
||||||
|
;;
|
||||||
'aarch64')
|
'aarch64')
|
||||||
archi='armv8'
|
archi='aarch64'
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
return 1
|
return 1
|
||||||
@@ -153,7 +168,7 @@ get_archi() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
success_usage() {
|
success_usage() {
|
||||||
printf "$GREEN%s\n$DEFAULT" "MeiliSearch binary successfully downloaded as '$BINARY_NAME' file."
|
printf "$GREEN%s\n$DEFAULT" "Meilisearch $latest binary successfully downloaded as '$binary_name' file."
|
||||||
echo ''
|
echo ''
|
||||||
echo 'Run it:'
|
echo 'Run it:'
|
||||||
echo ' $ ./meilisearch'
|
echo ' $ ./meilisearch'
|
||||||
@@ -161,40 +176,65 @@ success_usage() {
|
|||||||
echo ' $ ./meilisearch --help'
|
echo ' $ ./meilisearch --help'
|
||||||
}
|
}
|
||||||
|
|
||||||
failure_usage() {
|
not_available_failure_usage() {
|
||||||
printf "$RED%s\n$DEFAULT" 'ERROR: MeiliSearch binary is not available for your OS distribution or your architecture yet.'
|
printf "$RED%s\n$DEFAULT" 'ERROR: Meilisearch binary is not available for your OS distribution or your architecture yet.'
|
||||||
echo ''
|
echo ''
|
||||||
echo 'However, you can easily compile the binary from the source files.'
|
echo 'However, you can easily compile the binary from the source files.'
|
||||||
echo 'Follow the steps at the page ("Source" tab): https://docs.meilisearch.com/learn/getting_started/installation.html'
|
echo 'Follow the steps at the page ("Source" tab): https://docs.meilisearch.com/learn/getting_started/installation.html'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fetch_release_failure_usage() {
|
||||||
|
echo ''
|
||||||
|
printf "$RED%s\n$DEFAULT" 'ERROR: Impossible to get the latest stable version of Meilisearch.'
|
||||||
|
echo 'Please let us know about this issue: https://github.com/meilisearch/meilisearch/issues/new/choose'
|
||||||
|
}
|
||||||
|
|
||||||
# MAIN
|
# MAIN
|
||||||
latest="$(get_latest)"
|
|
||||||
|
|
||||||
|
# Fill $latest variable
|
||||||
|
if ! get_latest; then
|
||||||
|
fetch_release_failure_usage # TO CHANGE
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$latest" = '' ]; then
|
||||||
|
fetch_release_failure_usage
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Fill $os variable
|
||||||
if ! get_os; then
|
if ! get_os; then
|
||||||
failure_usage
|
not_available_failure_usage
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Fill $archi variable
|
||||||
if ! get_archi; then
|
if ! get_archi; then
|
||||||
failure_usage
|
not_available_failure_usage
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Downloading MeiliSearch binary $latest for $os, architecture $archi..."
|
echo "Downloading Meilisearch binary $latest for $os, architecture $archi..."
|
||||||
case "$os" in
|
case "$os" in
|
||||||
'windows')
|
'windows')
|
||||||
release_file="meilisearch-$os-$archi.exe"
|
release_file="meilisearch-$os-$archi.exe"
|
||||||
BINARY_NAME='meilisearch.exe'
|
binary_name='meilisearch.exe'
|
||||||
|
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
release_file="meilisearch-$os-$archi"
|
release_file="meilisearch-$os-$archi"
|
||||||
BINARY_NAME='meilisearch'
|
binary_name='meilisearch'
|
||||||
|
|
||||||
esac
|
esac
|
||||||
link="https://github.com/meilisearch/MeiliSearch/releases/download/$latest/$release_file"
|
|
||||||
curl -OL "$link"
|
# Fetch the Meilisearch binary
|
||||||
mv "$release_file" "$BINARY_NAME"
|
link="https://github.com/meilisearch/meilisearch/releases/download/$latest/$release_file"
|
||||||
chmod 744 "$BINARY_NAME"
|
curl --fail -OL "$link"
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
fetch_release_failure_usage
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mv "$release_file" "$binary_name"
|
||||||
|
chmod 744 "$binary_name"
|
||||||
success_usage
|
success_usage
|
||||||
|
|||||||
15
meilisearch-auth/Cargo.toml
Normal file
15
meilisearch-auth/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
[package]
|
||||||
|
name = "meilisearch-auth"
|
||||||
|
version = "0.26.1"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
enum-iterator = "0.7.0"
|
||||||
|
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
||||||
|
sha2 = "0.9.6"
|
||||||
|
meilisearch-error = { path = "../meilisearch-error" }
|
||||||
|
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
||||||
|
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
|
rand = "0.8.4"
|
||||||
|
serde = { version = "1.0.130", features = ["derive"] }
|
||||||
|
thiserror = "1.0.28"
|
||||||
104
meilisearch-auth/src/action.rs
Normal file
104
meilisearch-auth/src/action.rs
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
use enum_iterator::IntoEnumIterator;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(IntoEnumIterator, Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||||
|
#[repr(u8)]
|
||||||
|
pub enum Action {
|
||||||
|
#[serde(rename = "*")]
|
||||||
|
All = 0,
|
||||||
|
#[serde(rename = "search")]
|
||||||
|
Search = actions::SEARCH,
|
||||||
|
#[serde(rename = "documents.add")]
|
||||||
|
DocumentsAdd = actions::DOCUMENTS_ADD,
|
||||||
|
#[serde(rename = "documents.get")]
|
||||||
|
DocumentsGet = actions::DOCUMENTS_GET,
|
||||||
|
#[serde(rename = "documents.delete")]
|
||||||
|
DocumentsDelete = actions::DOCUMENTS_DELETE,
|
||||||
|
#[serde(rename = "indexes.create")]
|
||||||
|
IndexesAdd = actions::INDEXES_CREATE,
|
||||||
|
#[serde(rename = "indexes.get")]
|
||||||
|
IndexesGet = actions::INDEXES_GET,
|
||||||
|
#[serde(rename = "indexes.update")]
|
||||||
|
IndexesUpdate = actions::INDEXES_UPDATE,
|
||||||
|
#[serde(rename = "indexes.delete")]
|
||||||
|
IndexesDelete = actions::INDEXES_DELETE,
|
||||||
|
#[serde(rename = "tasks.get")]
|
||||||
|
TasksGet = actions::TASKS_GET,
|
||||||
|
#[serde(rename = "settings.get")]
|
||||||
|
SettingsGet = actions::SETTINGS_GET,
|
||||||
|
#[serde(rename = "settings.update")]
|
||||||
|
SettingsUpdate = actions::SETTINGS_UPDATE,
|
||||||
|
#[serde(rename = "stats.get")]
|
||||||
|
StatsGet = actions::STATS_GET,
|
||||||
|
#[serde(rename = "dumps.create")]
|
||||||
|
DumpsCreate = actions::DUMPS_CREATE,
|
||||||
|
#[serde(rename = "dumps.get")]
|
||||||
|
DumpsGet = actions::DUMPS_GET,
|
||||||
|
#[serde(rename = "version")]
|
||||||
|
Version = actions::VERSION,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Action {
|
||||||
|
pub fn from_repr(repr: u8) -> Option<Self> {
|
||||||
|
use actions::*;
|
||||||
|
match repr {
|
||||||
|
0 => Some(Self::All),
|
||||||
|
SEARCH => Some(Self::Search),
|
||||||
|
DOCUMENTS_ADD => Some(Self::DocumentsAdd),
|
||||||
|
DOCUMENTS_GET => Some(Self::DocumentsGet),
|
||||||
|
DOCUMENTS_DELETE => Some(Self::DocumentsDelete),
|
||||||
|
INDEXES_CREATE => Some(Self::IndexesAdd),
|
||||||
|
INDEXES_GET => Some(Self::IndexesGet),
|
||||||
|
INDEXES_UPDATE => Some(Self::IndexesUpdate),
|
||||||
|
INDEXES_DELETE => Some(Self::IndexesDelete),
|
||||||
|
TASKS_GET => Some(Self::TasksGet),
|
||||||
|
SETTINGS_GET => Some(Self::SettingsGet),
|
||||||
|
SETTINGS_UPDATE => Some(Self::SettingsUpdate),
|
||||||
|
STATS_GET => Some(Self::StatsGet),
|
||||||
|
DUMPS_CREATE => Some(Self::DumpsCreate),
|
||||||
|
DUMPS_GET => Some(Self::DumpsGet),
|
||||||
|
VERSION => Some(Self::Version),
|
||||||
|
_otherwise => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn repr(&self) -> u8 {
|
||||||
|
use actions::*;
|
||||||
|
match self {
|
||||||
|
Self::All => 0,
|
||||||
|
Self::Search => SEARCH,
|
||||||
|
Self::DocumentsAdd => DOCUMENTS_ADD,
|
||||||
|
Self::DocumentsGet => DOCUMENTS_GET,
|
||||||
|
Self::DocumentsDelete => DOCUMENTS_DELETE,
|
||||||
|
Self::IndexesAdd => INDEXES_CREATE,
|
||||||
|
Self::IndexesGet => INDEXES_GET,
|
||||||
|
Self::IndexesUpdate => INDEXES_UPDATE,
|
||||||
|
Self::IndexesDelete => INDEXES_DELETE,
|
||||||
|
Self::TasksGet => TASKS_GET,
|
||||||
|
Self::SettingsGet => SETTINGS_GET,
|
||||||
|
Self::SettingsUpdate => SETTINGS_UPDATE,
|
||||||
|
Self::StatsGet => STATS_GET,
|
||||||
|
Self::DumpsCreate => DUMPS_CREATE,
|
||||||
|
Self::DumpsGet => DUMPS_GET,
|
||||||
|
Self::Version => VERSION,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod actions {
|
||||||
|
pub const SEARCH: u8 = 1;
|
||||||
|
pub const DOCUMENTS_ADD: u8 = 2;
|
||||||
|
pub const DOCUMENTS_GET: u8 = 3;
|
||||||
|
pub const DOCUMENTS_DELETE: u8 = 4;
|
||||||
|
pub const INDEXES_CREATE: u8 = 5;
|
||||||
|
pub const INDEXES_GET: u8 = 6;
|
||||||
|
pub const INDEXES_UPDATE: u8 = 7;
|
||||||
|
pub const INDEXES_DELETE: u8 = 8;
|
||||||
|
pub const TASKS_GET: u8 = 9;
|
||||||
|
pub const SETTINGS_GET: u8 = 10;
|
||||||
|
pub const SETTINGS_UPDATE: u8 = 11;
|
||||||
|
pub const STATS_GET: u8 = 12;
|
||||||
|
pub const DUMPS_CREATE: u8 = 13;
|
||||||
|
pub const DUMPS_GET: u8 = 14;
|
||||||
|
pub const VERSION: u8 = 15;
|
||||||
|
}
|
||||||
47
meilisearch-auth/src/dump.rs
Normal file
47
meilisearch-auth/src/dump.rs
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
use std::fs::File;
|
||||||
|
use std::io::BufRead;
|
||||||
|
use std::io::BufReader;
|
||||||
|
use std::io::Write;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use crate::{AuthController, HeedAuthStore, Result};
|
||||||
|
|
||||||
|
const KEYS_PATH: &str = "keys";
|
||||||
|
|
||||||
|
impl AuthController {
|
||||||
|
pub fn dump(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()> {
|
||||||
|
let mut store = HeedAuthStore::new(&src)?;
|
||||||
|
|
||||||
|
// do not attempt to close the database on drop!
|
||||||
|
store.set_drop_on_close(false);
|
||||||
|
|
||||||
|
let keys_file_path = dst.as_ref().join(KEYS_PATH);
|
||||||
|
|
||||||
|
let keys = store.list_api_keys()?;
|
||||||
|
let mut keys_file = File::create(&keys_file_path)?;
|
||||||
|
for key in keys {
|
||||||
|
serde_json::to_writer(&mut keys_file, &key)?;
|
||||||
|
keys_file.write_all(b"\n")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn load_dump(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()> {
|
||||||
|
let store = HeedAuthStore::new(&dst)?;
|
||||||
|
|
||||||
|
let keys_file_path = src.as_ref().join(KEYS_PATH);
|
||||||
|
|
||||||
|
if !keys_file_path.exists() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut reader = BufReader::new(File::open(&keys_file_path)?).lines();
|
||||||
|
while let Some(key) = reader.next().transpose()? {
|
||||||
|
let key = serde_json::from_str(&key)?;
|
||||||
|
store.put_api_key(key)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
46
meilisearch-auth/src/error.rs
Normal file
46
meilisearch-auth/src/error.rs
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
use std::error::Error;
|
||||||
|
|
||||||
|
use meilisearch_error::ErrorCode;
|
||||||
|
use meilisearch_error::{internal_error, Code};
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
pub type Result<T> = std::result::Result<T, AuthControllerError>;
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub enum AuthControllerError {
|
||||||
|
#[error("`{0}` field is mandatory.")]
|
||||||
|
MissingParameter(&'static str),
|
||||||
|
#[error("`actions` field value `{0}` is invalid. It should be an array of string representing action names.")]
|
||||||
|
InvalidApiKeyActions(Value),
|
||||||
|
#[error("`indexes` field value `{0}` is invalid. It should be an array of string representing index names.")]
|
||||||
|
InvalidApiKeyIndexes(Value),
|
||||||
|
#[error("`expiresAt` field value `{0}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.")]
|
||||||
|
InvalidApiKeyExpiresAt(Value),
|
||||||
|
#[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")]
|
||||||
|
InvalidApiKeyDescription(Value),
|
||||||
|
#[error("API key `{0}` not found.")]
|
||||||
|
ApiKeyNotFound(String),
|
||||||
|
#[error("Internal error: {0}")]
|
||||||
|
Internal(Box<dyn Error + Send + Sync + 'static>),
|
||||||
|
}
|
||||||
|
|
||||||
|
internal_error!(
|
||||||
|
AuthControllerError: heed::Error,
|
||||||
|
std::io::Error,
|
||||||
|
serde_json::Error,
|
||||||
|
std::str::Utf8Error
|
||||||
|
);
|
||||||
|
|
||||||
|
impl ErrorCode for AuthControllerError {
|
||||||
|
fn error_code(&self) -> Code {
|
||||||
|
match self {
|
||||||
|
Self::MissingParameter(_) => Code::MissingParameter,
|
||||||
|
Self::InvalidApiKeyActions(_) => Code::InvalidApiKeyActions,
|
||||||
|
Self::InvalidApiKeyIndexes(_) => Code::InvalidApiKeyIndexes,
|
||||||
|
Self::InvalidApiKeyExpiresAt(_) => Code::InvalidApiKeyExpiresAt,
|
||||||
|
Self::InvalidApiKeyDescription(_) => Code::InvalidApiKeyDescription,
|
||||||
|
Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound,
|
||||||
|
Self::Internal(_) => Code::Internal,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
181
meilisearch-auth/src/key.rs
Normal file
181
meilisearch-auth/src/key.rs
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
use crate::action::Action;
|
||||||
|
use crate::error::{AuthControllerError, Result};
|
||||||
|
use crate::store::{KeyId, KEY_ID_LENGTH};
|
||||||
|
use rand::Rng;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::{from_value, Value};
|
||||||
|
use time::format_description::well_known::Rfc3339;
|
||||||
|
use time::macros::{format_description, time};
|
||||||
|
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize)]
|
||||||
|
pub struct Key {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub id: KeyId,
|
||||||
|
pub actions: Vec<Action>,
|
||||||
|
pub indexes: Vec<String>,
|
||||||
|
#[serde(with = "time::serde::rfc3339::option")]
|
||||||
|
pub expires_at: Option<OffsetDateTime>,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub created_at: OffsetDateTime,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub updated_at: OffsetDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Key {
|
||||||
|
pub fn create_from_value(value: Value) -> Result<Self> {
|
||||||
|
let description = match value.get("description") {
|
||||||
|
Some(Value::Null) => None,
|
||||||
|
Some(des) => Some(
|
||||||
|
from_value(des.clone())
|
||||||
|
.map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()))?,
|
||||||
|
),
|
||||||
|
None => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let id = generate_id();
|
||||||
|
|
||||||
|
let actions = value
|
||||||
|
.get("actions")
|
||||||
|
.map(|act| {
|
||||||
|
from_value(act.clone())
|
||||||
|
.map_err(|_| AuthControllerError::InvalidApiKeyActions(act.clone()))
|
||||||
|
})
|
||||||
|
.ok_or(AuthControllerError::MissingParameter("actions"))??;
|
||||||
|
|
||||||
|
let indexes = value
|
||||||
|
.get("indexes")
|
||||||
|
.map(|ind| {
|
||||||
|
from_value(ind.clone())
|
||||||
|
.map_err(|_| AuthControllerError::InvalidApiKeyIndexes(ind.clone()))
|
||||||
|
})
|
||||||
|
.ok_or(AuthControllerError::MissingParameter("indexes"))??;
|
||||||
|
|
||||||
|
let expires_at = value
|
||||||
|
.get("expiresAt")
|
||||||
|
.map(parse_expiration_date)
|
||||||
|
.ok_or(AuthControllerError::MissingParameter("expiresAt"))??;
|
||||||
|
|
||||||
|
let created_at = OffsetDateTime::now_utc();
|
||||||
|
let updated_at = created_at;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
description,
|
||||||
|
id,
|
||||||
|
actions,
|
||||||
|
indexes,
|
||||||
|
expires_at,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_from_value(&mut self, value: Value) -> Result<()> {
|
||||||
|
if let Some(des) = value.get("description") {
|
||||||
|
let des = from_value(des.clone())
|
||||||
|
.map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()));
|
||||||
|
self.description = des?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(act) = value.get("actions") {
|
||||||
|
let act = from_value(act.clone())
|
||||||
|
.map_err(|_| AuthControllerError::InvalidApiKeyActions(act.clone()));
|
||||||
|
self.actions = act?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ind) = value.get("indexes") {
|
||||||
|
let ind = from_value(ind.clone())
|
||||||
|
.map_err(|_| AuthControllerError::InvalidApiKeyIndexes(ind.clone()));
|
||||||
|
self.indexes = ind?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(exp) = value.get("expiresAt") {
|
||||||
|
self.expires_at = parse_expiration_date(exp)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.updated_at = OffsetDateTime::now_utc();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn default_admin() -> Self {
|
||||||
|
let now = OffsetDateTime::now_utc();
|
||||||
|
Self {
|
||||||
|
description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()),
|
||||||
|
id: generate_id(),
|
||||||
|
actions: vec![Action::All],
|
||||||
|
indexes: vec!["*".to_string()],
|
||||||
|
expires_at: None,
|
||||||
|
created_at: now,
|
||||||
|
updated_at: now,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn default_search() -> Self {
|
||||||
|
let now = OffsetDateTime::now_utc();
|
||||||
|
Self {
|
||||||
|
description: Some(
|
||||||
|
"Default Search API Key (Use it to search from the frontend)".to_string(),
|
||||||
|
),
|
||||||
|
id: generate_id(),
|
||||||
|
actions: vec![Action::Search],
|
||||||
|
indexes: vec!["*".to_string()],
|
||||||
|
expires_at: None,
|
||||||
|
created_at: now,
|
||||||
|
updated_at: now,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a printable key of 64 characters using thread_rng.
|
||||||
|
fn generate_id() -> [u8; KEY_ID_LENGTH] {
|
||||||
|
const CHARSET: &[u8] = b"abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||||
|
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut bytes = [0; KEY_ID_LENGTH];
|
||||||
|
for byte in bytes.iter_mut() {
|
||||||
|
*byte = CHARSET[rng.gen_range(0..CHARSET.len())];
|
||||||
|
}
|
||||||
|
|
||||||
|
bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> {
|
||||||
|
match value {
|
||||||
|
Value::String(string) => OffsetDateTime::parse(string, &Rfc3339)
|
||||||
|
.or_else(|_| {
|
||||||
|
PrimitiveDateTime::parse(
|
||||||
|
string,
|
||||||
|
format_description!(
|
||||||
|
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
|
||||||
|
),
|
||||||
|
).map(|datetime| datetime.assume_utc())
|
||||||
|
})
|
||||||
|
.or_else(|_| {
|
||||||
|
PrimitiveDateTime::parse(
|
||||||
|
string,
|
||||||
|
format_description!(
|
||||||
|
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
|
||||||
|
),
|
||||||
|
).map(|datetime| datetime.assume_utc())
|
||||||
|
})
|
||||||
|
.or_else(|_| {
|
||||||
|
Date::parse(string, format_description!(
|
||||||
|
"[year repr:full base:calendar]-[month repr:numerical]-[day]"
|
||||||
|
)).map(|date| PrimitiveDateTime::new(date, time!(00:00)).assume_utc())
|
||||||
|
})
|
||||||
|
.map_err(|_| AuthControllerError::InvalidApiKeyExpiresAt(value.clone()))
|
||||||
|
// check if the key is already expired.
|
||||||
|
.and_then(|d| {
|
||||||
|
if d > OffsetDateTime::now_utc() {
|
||||||
|
Ok(d)
|
||||||
|
} else {
|
||||||
|
Err(AuthControllerError::InvalidApiKeyExpiresAt(value.clone()))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(Option::Some),
|
||||||
|
Value::Null => Ok(None),
|
||||||
|
_otherwise => Err(AuthControllerError::InvalidApiKeyExpiresAt(value.clone())),
|
||||||
|
}
|
||||||
|
}
|
||||||
272
meilisearch-auth/src/lib.rs
Normal file
272
meilisearch-auth/src/lib.rs
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
mod action;
|
||||||
|
mod dump;
|
||||||
|
pub mod error;
|
||||||
|
mod key;
|
||||||
|
mod store;
|
||||||
|
|
||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use std::path::Path;
|
||||||
|
use std::str::from_utf8;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
|
pub use action::{actions, Action};
|
||||||
|
use error::{AuthControllerError, Result};
|
||||||
|
pub use key::Key;
|
||||||
|
pub use store::open_auth_store_env;
|
||||||
|
use store::HeedAuthStore;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct AuthController {
|
||||||
|
store: Arc<HeedAuthStore>,
|
||||||
|
master_key: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AuthController {
|
||||||
|
pub fn new(db_path: impl AsRef<Path>, master_key: &Option<String>) -> Result<Self> {
|
||||||
|
let store = HeedAuthStore::new(db_path)?;
|
||||||
|
|
||||||
|
if store.is_empty()? {
|
||||||
|
generate_default_keys(&store)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
store: Arc::new(store),
|
||||||
|
master_key: master_key.clone(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_key(&self, value: Value) -> Result<Key> {
|
||||||
|
let key = Key::create_from_value(value)?;
|
||||||
|
self.store.put_api_key(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_key(&self, key: impl AsRef<str>, value: Value) -> Result<Key> {
|
||||||
|
let mut key = self.get_key(key)?;
|
||||||
|
key.update_from_value(value)?;
|
||||||
|
self.store.put_api_key(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_key(&self, key: impl AsRef<str>) -> Result<Key> {
|
||||||
|
self.store
|
||||||
|
.get_api_key(&key)?
|
||||||
|
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_key_filters(
|
||||||
|
&self,
|
||||||
|
key: impl AsRef<str>,
|
||||||
|
search_rules: Option<SearchRules>,
|
||||||
|
) -> Result<AuthFilter> {
|
||||||
|
let mut filters = AuthFilter::default();
|
||||||
|
if self
|
||||||
|
.master_key
|
||||||
|
.as_ref()
|
||||||
|
.map_or(false, |master_key| master_key != key.as_ref())
|
||||||
|
{
|
||||||
|
let key = self
|
||||||
|
.store
|
||||||
|
.get_api_key(&key)?
|
||||||
|
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))?;
|
||||||
|
|
||||||
|
if !key.indexes.iter().any(|i| i.as_str() == "*") {
|
||||||
|
filters.search_rules = match search_rules {
|
||||||
|
// Intersect search_rules with parent key authorized indexes.
|
||||||
|
Some(search_rules) => SearchRules::Map(
|
||||||
|
key.indexes
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|index| {
|
||||||
|
search_rules
|
||||||
|
.get_index_search_rules(&index)
|
||||||
|
.map(|index_search_rules| (index, Some(index_search_rules)))
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
),
|
||||||
|
None => SearchRules::Set(key.indexes.into_iter().collect()),
|
||||||
|
};
|
||||||
|
} else if let Some(search_rules) = search_rules {
|
||||||
|
filters.search_rules = search_rules;
|
||||||
|
}
|
||||||
|
|
||||||
|
filters.allow_index_creation = key
|
||||||
|
.actions
|
||||||
|
.iter()
|
||||||
|
.any(|&action| action == Action::IndexesAdd || action == Action::All);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(filters)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn list_keys(&self) -> Result<Vec<Key>> {
|
||||||
|
self.store.list_api_keys()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_key(&self, key: impl AsRef<str>) -> Result<()> {
|
||||||
|
if self.store.delete_api_key(&key)? {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(AuthControllerError::ApiKeyNotFound(
|
||||||
|
key.as_ref().to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_master_key(&self) -> Option<&String> {
|
||||||
|
self.master_key.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a valid key from a key id using the current master key.
|
||||||
|
/// Returns None if no master key has been set.
|
||||||
|
pub fn generate_key(&self, id: &str) -> Option<String> {
|
||||||
|
self.master_key
|
||||||
|
.as_ref()
|
||||||
|
.map(|master_key| generate_key(master_key.as_bytes(), id))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if the provided key is authorized to make a specific action
|
||||||
|
/// without checking if the key is valid.
|
||||||
|
pub fn is_key_authorized(
|
||||||
|
&self,
|
||||||
|
key: &[u8],
|
||||||
|
action: Action,
|
||||||
|
index: Option<&str>,
|
||||||
|
) -> Result<bool> {
|
||||||
|
match self
|
||||||
|
.store
|
||||||
|
// check if the key has access to all indexes.
|
||||||
|
.get_expiration_date(key, action, None)?
|
||||||
|
.or(match index {
|
||||||
|
// else check if the key has access to the requested index.
|
||||||
|
Some(index) => {
|
||||||
|
self.store
|
||||||
|
.get_expiration_date(key, action, Some(index.as_bytes()))?
|
||||||
|
}
|
||||||
|
// or to any index if no index has been requested.
|
||||||
|
None => self.store.prefix_first_expiration_date(key, action)?,
|
||||||
|
}) {
|
||||||
|
// check expiration date.
|
||||||
|
Some(Some(exp)) => Ok(OffsetDateTime::now_utc() < exp),
|
||||||
|
// no expiration date.
|
||||||
|
Some(None) => Ok(true),
|
||||||
|
// action or index forbidden.
|
||||||
|
None => Ok(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if the provided key is valid
|
||||||
|
/// without checking if the key is authorized to make a specific action.
|
||||||
|
pub fn is_key_valid(&self, key: &[u8]) -> Result<bool> {
|
||||||
|
if let Some(id) = self.store.get_key_id(key) {
|
||||||
|
let id = from_utf8(&id)?;
|
||||||
|
if let Some(generated) = self.generate_key(id) {
|
||||||
|
return Ok(generated.as_bytes() == key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if the provided key is valid
|
||||||
|
/// and is authorized to make a specific action.
|
||||||
|
pub fn authenticate(&self, key: &[u8], action: Action, index: Option<&str>) -> Result<bool> {
|
||||||
|
if self.is_key_authorized(key, action, index)? {
|
||||||
|
self.is_key_valid(key)
|
||||||
|
} else {
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AuthFilter {
|
||||||
|
pub search_rules: SearchRules,
|
||||||
|
pub allow_index_creation: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for AuthFilter {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
search_rules: SearchRules::default(),
|
||||||
|
allow_index_creation: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Transparent wrapper around a list of allowed indexes with the search rules to apply for each.
|
||||||
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||||
|
#[serde(untagged)]
|
||||||
|
pub enum SearchRules {
|
||||||
|
Set(HashSet<String>),
|
||||||
|
Map(HashMap<String, Option<IndexSearchRules>>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for SearchRules {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::Set(Some("*".to_string()).into_iter().collect())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SearchRules {
|
||||||
|
pub fn is_index_authorized(&self, index: &str) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::Set(set) => set.contains("*") || set.contains(index),
|
||||||
|
Self::Map(map) => map.contains_key("*") || map.contains_key(index),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_index_search_rules(&self, index: &str) -> Option<IndexSearchRules> {
|
||||||
|
match self {
|
||||||
|
Self::Set(set) => {
|
||||||
|
if set.contains("*") || set.contains(index) {
|
||||||
|
Some(IndexSearchRules::default())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Self::Map(map) => map
|
||||||
|
.get(index)
|
||||||
|
.or_else(|| map.get("*"))
|
||||||
|
.map(|isr| isr.clone().unwrap_or_default()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoIterator for SearchRules {
|
||||||
|
type Item = (String, IndexSearchRules);
|
||||||
|
type IntoIter = Box<dyn Iterator<Item = Self::Item>>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
match self {
|
||||||
|
Self::Set(array) => {
|
||||||
|
Box::new(array.into_iter().map(|i| (i, IndexSearchRules::default())))
|
||||||
|
}
|
||||||
|
Self::Map(map) => {
|
||||||
|
Box::new(map.into_iter().map(|(i, isr)| (i, isr.unwrap_or_default())))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Contains the rules to apply on the top of the search query for a specific index.
|
||||||
|
///
|
||||||
|
/// filter: search filter to apply in addition to query filters.
|
||||||
|
#[derive(Debug, Serialize, Deserialize, Default, Clone)]
|
||||||
|
pub struct IndexSearchRules {
|
||||||
|
pub filter: Option<serde_json::Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_key(master_key: &[u8], keyid: &str) -> String {
|
||||||
|
let key = [keyid.as_bytes(), master_key].concat();
|
||||||
|
let sha = Sha256::digest(&key);
|
||||||
|
format!("{}{:x}", keyid, sha)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_default_keys(store: &HeedAuthStore) -> Result<()> {
|
||||||
|
store.put_api_key(Key::default_admin())?;
|
||||||
|
store.put_api_key(Key::default_search())?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
256
meilisearch-auth/src/store.rs
Normal file
256
meilisearch-auth/src/store.rs
Normal file
@@ -0,0 +1,256 @@
|
|||||||
|
use enum_iterator::IntoEnumIterator;
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::cmp::Reverse;
|
||||||
|
use std::convert::TryFrom;
|
||||||
|
use std::convert::TryInto;
|
||||||
|
use std::fs::create_dir_all;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::str;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
|
||||||
|
use heed::{Database, Env, EnvOpenOptions, RwTxn};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
|
use super::error::Result;
|
||||||
|
use super::{Action, Key};
|
||||||
|
|
||||||
|
const AUTH_STORE_SIZE: usize = 1_073_741_824; //1GiB
|
||||||
|
pub const KEY_ID_LENGTH: usize = 8;
|
||||||
|
const AUTH_DB_PATH: &str = "auth";
|
||||||
|
const KEY_DB_NAME: &str = "api-keys";
|
||||||
|
const KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME: &str = "keyid-action-index-expiration";
|
||||||
|
|
||||||
|
pub type KeyId = [u8; KEY_ID_LENGTH];
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct HeedAuthStore {
|
||||||
|
env: Arc<Env>,
|
||||||
|
keys: Database<ByteSlice, SerdeJson<Key>>,
|
||||||
|
action_keyid_index_expiration: Database<KeyIdActionCodec, SerdeJson<Option<OffsetDateTime>>>,
|
||||||
|
should_close_on_drop: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for HeedAuthStore {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if self.should_close_on_drop && Arc::strong_count(&self.env) == 1 {
|
||||||
|
self.env.as_ref().clone().prepare_for_closing();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn open_auth_store_env(path: &Path) -> heed::Result<heed::Env> {
|
||||||
|
let mut options = EnvOpenOptions::new();
|
||||||
|
options.map_size(AUTH_STORE_SIZE); // 1GB
|
||||||
|
options.max_dbs(2);
|
||||||
|
options.open(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HeedAuthStore {
|
||||||
|
pub fn new(path: impl AsRef<Path>) -> Result<Self> {
|
||||||
|
let path = path.as_ref().join(AUTH_DB_PATH);
|
||||||
|
create_dir_all(&path)?;
|
||||||
|
let env = Arc::new(open_auth_store_env(path.as_ref())?);
|
||||||
|
let keys = env.create_database(Some(KEY_DB_NAME))?;
|
||||||
|
let action_keyid_index_expiration =
|
||||||
|
env.create_database(Some(KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME))?;
|
||||||
|
Ok(Self {
|
||||||
|
env,
|
||||||
|
keys,
|
||||||
|
action_keyid_index_expiration,
|
||||||
|
should_close_on_drop: true,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_drop_on_close(&mut self, v: bool) {
|
||||||
|
self.should_close_on_drop = v;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_empty(&self) -> Result<bool> {
|
||||||
|
let rtxn = self.env.read_txn()?;
|
||||||
|
|
||||||
|
Ok(self.keys.len(&rtxn)? == 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn put_api_key(&self, key: Key) -> Result<Key> {
|
||||||
|
let mut wtxn = self.env.write_txn()?;
|
||||||
|
self.keys.put(&mut wtxn, &key.id, &key)?;
|
||||||
|
|
||||||
|
let id = key.id;
|
||||||
|
// delete key from inverted database before refilling it.
|
||||||
|
self.delete_key_from_inverted_db(&mut wtxn, &id)?;
|
||||||
|
// create inverted database.
|
||||||
|
let db = self.action_keyid_index_expiration;
|
||||||
|
|
||||||
|
let actions = if key.actions.contains(&Action::All) {
|
||||||
|
// if key.actions contains All, we iterate over all actions.
|
||||||
|
Action::into_enum_iter().collect()
|
||||||
|
} else {
|
||||||
|
key.actions.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
let no_index_restriction = key.indexes.contains(&"*".to_owned());
|
||||||
|
for action in actions {
|
||||||
|
if no_index_restriction {
|
||||||
|
// If there is no index restriction we put None.
|
||||||
|
db.put(&mut wtxn, &(&id, &action, None), &key.expires_at)?;
|
||||||
|
} else {
|
||||||
|
// else we create a key for each index.
|
||||||
|
for index in key.indexes.iter() {
|
||||||
|
db.put(
|
||||||
|
&mut wtxn,
|
||||||
|
&(&id, &action, Some(index.as_bytes())),
|
||||||
|
&key.expires_at,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
wtxn.commit()?;
|
||||||
|
|
||||||
|
Ok(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_api_key(&self, key: impl AsRef<str>) -> Result<Option<Key>> {
|
||||||
|
let rtxn = self.env.read_txn()?;
|
||||||
|
match self.get_key_id(key.as_ref().as_bytes()) {
|
||||||
|
Some(id) => self.keys.get(&rtxn, &id).map_err(|e| e.into()),
|
||||||
|
None => Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_api_key(&self, key: impl AsRef<str>) -> Result<bool> {
|
||||||
|
let mut wtxn = self.env.write_txn()?;
|
||||||
|
let existing = match self.get_key_id(key.as_ref().as_bytes()) {
|
||||||
|
Some(id) => {
|
||||||
|
let existing = self.keys.delete(&mut wtxn, &id)?;
|
||||||
|
self.delete_key_from_inverted_db(&mut wtxn, &id)?;
|
||||||
|
existing
|
||||||
|
}
|
||||||
|
None => false,
|
||||||
|
};
|
||||||
|
wtxn.commit()?;
|
||||||
|
|
||||||
|
Ok(existing)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn list_api_keys(&self) -> Result<Vec<Key>> {
|
||||||
|
let mut list = Vec::new();
|
||||||
|
let rtxn = self.env.read_txn()?;
|
||||||
|
for result in self.keys.remap_key_type::<DecodeIgnore>().iter(&rtxn)? {
|
||||||
|
let (_, content) = result?;
|
||||||
|
list.push(content);
|
||||||
|
}
|
||||||
|
list.sort_unstable_by_key(|k| Reverse(k.created_at));
|
||||||
|
Ok(list)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_expiration_date(
|
||||||
|
&self,
|
||||||
|
key: &[u8],
|
||||||
|
action: Action,
|
||||||
|
index: Option<&[u8]>,
|
||||||
|
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||||
|
let rtxn = self.env.read_txn()?;
|
||||||
|
match self.get_key_id(key) {
|
||||||
|
Some(id) => {
|
||||||
|
let tuple = (&id, &action, index);
|
||||||
|
Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?)
|
||||||
|
}
|
||||||
|
None => Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prefix_first_expiration_date(
|
||||||
|
&self,
|
||||||
|
key: &[u8],
|
||||||
|
action: Action,
|
||||||
|
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||||
|
let rtxn = self.env.read_txn()?;
|
||||||
|
match self.get_key_id(key) {
|
||||||
|
Some(id) => {
|
||||||
|
let tuple = (&id, &action, None);
|
||||||
|
Ok(self
|
||||||
|
.action_keyid_index_expiration
|
||||||
|
.prefix_iter(&rtxn, &tuple)?
|
||||||
|
.next()
|
||||||
|
.transpose()?
|
||||||
|
.map(|(_, expiration)| expiration))
|
||||||
|
}
|
||||||
|
None => Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_key_id(&self, key: &[u8]) -> Option<KeyId> {
|
||||||
|
try_split_array_at::<_, KEY_ID_LENGTH>(key).map(|(id, _)| *id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete_key_from_inverted_db(&self, wtxn: &mut RwTxn, key: &KeyId) -> Result<()> {
|
||||||
|
let mut iter = self
|
||||||
|
.action_keyid_index_expiration
|
||||||
|
.remap_types::<ByteSlice, DecodeIgnore>()
|
||||||
|
.prefix_iter_mut(wtxn, key)?;
|
||||||
|
while iter.next().transpose()?.is_some() {
|
||||||
|
// safety: we don't keep references from inside the LMDB database.
|
||||||
|
unsafe { iter.del_current()? };
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Codec allowing to retrieve the expiration date of an action,
|
||||||
|
/// optionnally on a spcific index, for a given key.
|
||||||
|
pub struct KeyIdActionCodec;
|
||||||
|
|
||||||
|
impl<'a> heed::BytesDecode<'a> for KeyIdActionCodec {
|
||||||
|
type DItem = (KeyId, Action, Option<&'a [u8]>);
|
||||||
|
|
||||||
|
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||||
|
let (key_id, action_bytes) = try_split_array_at(bytes)?;
|
||||||
|
let (action_bytes, index) = match try_split_array_at(action_bytes)? {
|
||||||
|
(action, []) => (action, None),
|
||||||
|
(action, index) => (action, Some(index)),
|
||||||
|
};
|
||||||
|
let action = Action::from_repr(u8::from_be_bytes(*action_bytes))?;
|
||||||
|
|
||||||
|
Some((*key_id, action, index))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||||
|
type EItem = (&'a KeyId, &'a Action, Option<&'a [u8]>);
|
||||||
|
|
||||||
|
fn bytes_encode((key_id, action, index): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
|
||||||
|
bytes.extend_from_slice(*key_id);
|
||||||
|
let action_bytes = u8::to_be_bytes(action.repr());
|
||||||
|
bytes.extend_from_slice(&action_bytes);
|
||||||
|
if let Some(index) = index {
|
||||||
|
bytes.extend_from_slice(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(Cow::Owned(bytes))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Divides one slice into two at an index, returns `None` if mid is out of bounds.
|
||||||
|
pub fn try_split_at<T>(slice: &[T], mid: usize) -> Option<(&[T], &[T])> {
|
||||||
|
if mid <= slice.len() {
|
||||||
|
Some(slice.split_at(mid))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Divides one slice into an array and the tail at an index,
|
||||||
|
/// returns `None` if `N` is out of bounds.
|
||||||
|
pub fn try_split_array_at<T, const N: usize>(slice: &[T]) -> Option<(&[T; N], &[T])>
|
||||||
|
where
|
||||||
|
[T; N]: for<'a> TryFrom<&'a [T]>,
|
||||||
|
{
|
||||||
|
let (head, tail) = try_split_at(slice, N)?;
|
||||||
|
let head = head.try_into().ok()?;
|
||||||
|
Some((head, tail))
|
||||||
|
}
|
||||||
@@ -1,9 +1,15 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "meilisearch-error"
|
name = "meilisearch-error"
|
||||||
version = "0.23.0"
|
version = "0.26.1"
|
||||||
authors = ["marin <postma.marin@protonmail.com>"]
|
authors = ["marin <postma.marin@protonmail.com>"]
|
||||||
edition = "2018"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-http = "=3.0.0-beta.10"
|
actix-web = { version = "4", default-features = false }
|
||||||
|
proptest = { version = "1.0.0", optional = true }
|
||||||
|
proptest-derive = { version = "0.3.0", optional = true }
|
||||||
serde = { version = "1.0.130", features = ["derive"] }
|
serde = { version = "1.0.130", features = ["derive"] }
|
||||||
|
serde_json = "1.0.69"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
test-traits = ["proptest", "proptest-derive"]
|
||||||
|
|||||||
@@ -1,8 +1,75 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use actix_http::http::StatusCode;
|
use actix_web::{self as aweb, http::StatusCode, HttpResponseBuilder};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
||||||
|
pub struct ResponseError {
|
||||||
|
#[serde(skip)]
|
||||||
|
#[cfg_attr(
|
||||||
|
feature = "test-traits",
|
||||||
|
proptest(strategy = "strategy::status_code_strategy()")
|
||||||
|
)]
|
||||||
|
code: StatusCode,
|
||||||
|
message: String,
|
||||||
|
#[serde(rename = "code")]
|
||||||
|
error_code: String,
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
error_type: String,
|
||||||
|
#[serde(rename = "link")]
|
||||||
|
error_link: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ResponseError {
|
||||||
|
pub fn from_msg(message: String, code: Code) -> Self {
|
||||||
|
Self {
|
||||||
|
code: code.http(),
|
||||||
|
message,
|
||||||
|
error_code: code.err_code().error_name.to_string(),
|
||||||
|
error_type: code.type_(),
|
||||||
|
error_link: code.url(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for ResponseError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
self.message.fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for ResponseError {}
|
||||||
|
|
||||||
|
impl<T> From<T> for ResponseError
|
||||||
|
where
|
||||||
|
T: ErrorCode,
|
||||||
|
{
|
||||||
|
fn from(other: T) -> Self {
|
||||||
|
Self {
|
||||||
|
code: other.http_status(),
|
||||||
|
message: other.to_string(),
|
||||||
|
error_code: other.error_name(),
|
||||||
|
error_type: other.error_type(),
|
||||||
|
error_link: other.error_url(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl aweb::error::ResponseError for ResponseError {
|
||||||
|
fn error_response(&self) -> aweb::HttpResponse {
|
||||||
|
let json = serde_json::to_vec(self).unwrap();
|
||||||
|
HttpResponseBuilder::new(self.status_code())
|
||||||
|
.content_type("application/json")
|
||||||
|
.body(json)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn status_code(&self) -> StatusCode {
|
||||||
|
self.code
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub trait ErrorCode: std::error::Error {
|
pub trait ErrorCode: std::error::Error {
|
||||||
fn error_code(&self) -> Code;
|
fn error_code(&self) -> Code;
|
||||||
|
|
||||||
@@ -39,9 +106,9 @@ impl fmt::Display for ErrorType {
|
|||||||
use ErrorType::*;
|
use ErrorType::*;
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
InternalError => write!(f, "internal_error"),
|
InternalError => write!(f, "internal"),
|
||||||
InvalidRequestError => write!(f, "invalid_request_error"),
|
InvalidRequestError => write!(f, "invalid_request"),
|
||||||
AuthenticationError => write!(f, "authentication_error"),
|
AuthenticationError => write!(f, "auth"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -53,7 +120,6 @@ pub enum Code {
|
|||||||
IndexAlreadyExists,
|
IndexAlreadyExists,
|
||||||
IndexNotFound,
|
IndexNotFound,
|
||||||
InvalidIndexUid,
|
InvalidIndexUid,
|
||||||
OpenIndex,
|
|
||||||
|
|
||||||
// invalid state error
|
// invalid state error
|
||||||
InvalidState,
|
InvalidState,
|
||||||
@@ -62,20 +128,24 @@ pub enum Code {
|
|||||||
|
|
||||||
MaxFieldsLimitExceeded,
|
MaxFieldsLimitExceeded,
|
||||||
MissingDocumentId,
|
MissingDocumentId,
|
||||||
|
InvalidDocumentId,
|
||||||
|
|
||||||
Facet,
|
|
||||||
Filter,
|
Filter,
|
||||||
Sort,
|
Sort,
|
||||||
|
|
||||||
BadParameter,
|
BadParameter,
|
||||||
BadRequest,
|
BadRequest,
|
||||||
|
DatabaseSizeLimitReached,
|
||||||
DocumentNotFound,
|
DocumentNotFound,
|
||||||
Internal,
|
Internal,
|
||||||
InvalidGeoField,
|
InvalidGeoField,
|
||||||
InvalidRankingRule,
|
InvalidRankingRule,
|
||||||
|
InvalidStore,
|
||||||
InvalidToken,
|
InvalidToken,
|
||||||
MissingAuthorizationHeader,
|
MissingAuthorizationHeader,
|
||||||
NotFound,
|
NoSpaceLeftOnDevice,
|
||||||
|
DumpNotFound,
|
||||||
|
TaskNotFound,
|
||||||
PayloadTooLarge,
|
PayloadTooLarge,
|
||||||
RetrieveDocument,
|
RetrieveDocument,
|
||||||
SearchDocuments,
|
SearchDocuments,
|
||||||
@@ -88,6 +158,13 @@ pub enum Code {
|
|||||||
MissingContentType,
|
MissingContentType,
|
||||||
MalformedPayload,
|
MalformedPayload,
|
||||||
MissingPayload,
|
MissingPayload,
|
||||||
|
|
||||||
|
ApiKeyNotFound,
|
||||||
|
MissingParameter,
|
||||||
|
InvalidApiKeyActions,
|
||||||
|
InvalidApiKeyIndexes,
|
||||||
|
InvalidApiKeyExpiresAt,
|
||||||
|
InvalidApiKeyDescription,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Code {
|
impl Code {
|
||||||
@@ -98,34 +175,39 @@ impl Code {
|
|||||||
match self {
|
match self {
|
||||||
// index related errors
|
// index related errors
|
||||||
// create index is thrown on internal error while creating an index.
|
// create index is thrown on internal error while creating an index.
|
||||||
CreateIndex => ErrCode::internal("index_creation_failed", StatusCode::BAD_REQUEST),
|
CreateIndex => {
|
||||||
IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::BAD_REQUEST),
|
ErrCode::internal("index_creation_failed", StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::CONFLICT),
|
||||||
// thrown when requesting an unexisting index
|
// thrown when requesting an unexisting index
|
||||||
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND),
|
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND),
|
||||||
InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
|
InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
|
||||||
OpenIndex => {
|
|
||||||
ErrCode::internal("index_not_accessible", StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
}
|
|
||||||
|
|
||||||
// invalid state error
|
// invalid state error
|
||||||
InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR),
|
InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
// thrown when no primary key has been set
|
// thrown when no primary key has been set
|
||||||
MissingPrimaryKey => ErrCode::invalid("missing_primary_key", StatusCode::BAD_REQUEST),
|
MissingPrimaryKey => {
|
||||||
|
ErrCode::invalid("primary_key_inference_failed", StatusCode::BAD_REQUEST)
|
||||||
|
}
|
||||||
// error thrown when trying to set an already existing primary key
|
// error thrown when trying to set an already existing primary key
|
||||||
PrimaryKeyAlreadyPresent => {
|
PrimaryKeyAlreadyPresent => {
|
||||||
ErrCode::invalid("primary_key_already_present", StatusCode::BAD_REQUEST)
|
ErrCode::invalid("index_primary_key_already_exists", StatusCode::BAD_REQUEST)
|
||||||
}
|
}
|
||||||
// invalid ranking rule
|
// invalid ranking rule
|
||||||
InvalidRankingRule => ErrCode::invalid("invalid_request", StatusCode::BAD_REQUEST),
|
InvalidRankingRule => ErrCode::invalid("invalid_ranking_rule", StatusCode::BAD_REQUEST),
|
||||||
|
|
||||||
|
// invalid database
|
||||||
|
InvalidStore => {
|
||||||
|
ErrCode::internal("invalid_store_file", StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
|
||||||
// invalid document
|
// invalid document
|
||||||
MaxFieldsLimitExceeded => {
|
MaxFieldsLimitExceeded => {
|
||||||
ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST)
|
ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST)
|
||||||
}
|
}
|
||||||
MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST),
|
MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST),
|
||||||
|
InvalidDocumentId => ErrCode::invalid("invalid_document_id", StatusCode::BAD_REQUEST),
|
||||||
|
|
||||||
// error related to facets
|
|
||||||
Facet => ErrCode::invalid("invalid_facet", StatusCode::BAD_REQUEST),
|
|
||||||
// error related to filters
|
// error related to filters
|
||||||
Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST),
|
Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST),
|
||||||
// error related to sorts
|
// error related to sorts
|
||||||
@@ -133,16 +215,22 @@ impl Code {
|
|||||||
|
|
||||||
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
|
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
|
||||||
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
|
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
|
||||||
|
DatabaseSizeLimitReached => ErrCode::internal(
|
||||||
|
"database_size_limit_reached",
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
),
|
||||||
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
||||||
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
InvalidGeoField => {
|
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
|
||||||
ErrCode::authentication("invalid_geo_field", StatusCode::BAD_REQUEST)
|
InvalidToken => ErrCode::authentication("invalid_api_key", StatusCode::FORBIDDEN),
|
||||||
}
|
|
||||||
InvalidToken => ErrCode::authentication("invalid_token", StatusCode::FORBIDDEN),
|
|
||||||
MissingAuthorizationHeader => {
|
MissingAuthorizationHeader => {
|
||||||
ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED)
|
ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED)
|
||||||
}
|
}
|
||||||
NotFound => ErrCode::invalid("not_found", StatusCode::NOT_FOUND),
|
TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND),
|
||||||
|
DumpNotFound => ErrCode::invalid("dump_not_found", StatusCode::NOT_FOUND),
|
||||||
|
NoSpaceLeftOnDevice => {
|
||||||
|
ErrCode::internal("no_space_left_on_device", StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE),
|
PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE),
|
||||||
RetrieveDocument => {
|
RetrieveDocument => {
|
||||||
ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST)
|
ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST)
|
||||||
@@ -154,7 +242,7 @@ impl Code {
|
|||||||
|
|
||||||
// error related to dump
|
// error related to dump
|
||||||
DumpAlreadyInProgress => {
|
DumpAlreadyInProgress => {
|
||||||
ErrCode::invalid("dump_already_in_progress", StatusCode::CONFLICT)
|
ErrCode::invalid("dump_already_processing", StatusCode::CONFLICT)
|
||||||
}
|
}
|
||||||
DumpProcessFailed => {
|
DumpProcessFailed => {
|
||||||
ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR)
|
ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
@@ -167,6 +255,22 @@ impl Code {
|
|||||||
ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
|
ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
|
||||||
}
|
}
|
||||||
MissingPayload => ErrCode::invalid("missing_payload", StatusCode::BAD_REQUEST),
|
MissingPayload => ErrCode::invalid("missing_payload", StatusCode::BAD_REQUEST),
|
||||||
|
|
||||||
|
// error related to keys
|
||||||
|
ApiKeyNotFound => ErrCode::invalid("api_key_not_found", StatusCode::NOT_FOUND),
|
||||||
|
MissingParameter => ErrCode::invalid("missing_parameter", StatusCode::BAD_REQUEST),
|
||||||
|
InvalidApiKeyActions => {
|
||||||
|
ErrCode::invalid("invalid_api_key_actions", StatusCode::BAD_REQUEST)
|
||||||
|
}
|
||||||
|
InvalidApiKeyIndexes => {
|
||||||
|
ErrCode::invalid("invalid_api_key_indexes", StatusCode::BAD_REQUEST)
|
||||||
|
}
|
||||||
|
InvalidApiKeyExpiresAt => {
|
||||||
|
ErrCode::invalid("invalid_api_key_expires_at", StatusCode::BAD_REQUEST)
|
||||||
|
}
|
||||||
|
InvalidApiKeyDescription => {
|
||||||
|
ErrCode::invalid("invalid_api_key_description", StatusCode::BAD_REQUEST)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -223,3 +327,27 @@ impl ErrCode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "test-traits")]
|
||||||
|
mod strategy {
|
||||||
|
use proptest::strategy::Strategy;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
pub(super) fn status_code_strategy() -> impl Strategy<Value = StatusCode> {
|
||||||
|
(100..999u16).prop_map(|i| StatusCode::from_u16(i).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! internal_error {
|
||||||
|
($target:ty : $($other:path), *) => {
|
||||||
|
$(
|
||||||
|
impl From<$other> for $target {
|
||||||
|
fn from(other: $other) -> Self {
|
||||||
|
Self::Internal(Box::new(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,17 +1,17 @@
|
|||||||
[package]
|
[package]
|
||||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||||
description = "MeiliSearch HTTP server"
|
description = "Meilisearch HTTP server"
|
||||||
edition = "2018"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "meilisearch-http"
|
name = "meilisearch-http"
|
||||||
version = "0.23.0"
|
version = "0.26.1"
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "meilisearch"
|
name = "meilisearch"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
actix-web-static-files = { git = "https://github.com/MarinPostma/actix-web-static-files.git", rev = "39d8006", optional = true }
|
static-files = { version = "0.2.1", optional = true }
|
||||||
anyhow = { version = "1.0.43", optional = true }
|
anyhow = { version = "1.0.43", optional = true }
|
||||||
cargo_toml = { version = "0.9", optional = true }
|
cargo_toml = { version = "0.9", optional = true }
|
||||||
hex = { version = "0.4.3", optional = true }
|
hex = { version = "0.4.3", optional = true }
|
||||||
@@ -22,16 +22,16 @@ vergen = { version = "5.1.15", default-features = false, features = ["git"] }
|
|||||||
zip = { version = "0.5.13", optional = true }
|
zip = { version = "0.5.13", optional = true }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-cors = { git = "https://github.com/MarinPostma/actix-extras.git", rev = "963ac94d" }
|
actix-cors = "0.6"
|
||||||
actix-web = { version = "4.0.0-beta.9", features = ["rustls"] }
|
actix-web = { version = "4", features = ["rustls"] }
|
||||||
actix-web-static-files = { git = "https://github.com/MarinPostma/actix-web-static-files.git", rev = "39d8006", optional = true }
|
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
||||||
anyhow = { version = "1.0.43", features = ["backtrace"] }
|
anyhow = { version = "1.0.43", features = ["backtrace"] }
|
||||||
|
arc-swap = "1.3.2"
|
||||||
async-stream = "0.3.2"
|
async-stream = "0.3.2"
|
||||||
async-trait = "0.1.51"
|
async-trait = "0.1.51"
|
||||||
arc-swap = "1.3.2"
|
bstr = "0.2.17"
|
||||||
byte-unit = { version = "4.0.12", default-features = false, features = ["std"] }
|
byte-unit = { version = "4.0.12", default-features = false, features = ["std", "serde"] }
|
||||||
bytes = "1.1.0"
|
bytes = "1.1.0"
|
||||||
chrono = { version = "0.4.19", features = ["serde"] }
|
|
||||||
crossbeam-channel = "0.5.1"
|
crossbeam-channel = "0.5.1"
|
||||||
either = "1.6.1"
|
either = "1.6.1"
|
||||||
env_logger = "0.9.0"
|
env_logger = "0.9.0"
|
||||||
@@ -42,41 +42,47 @@ futures-util = "0.3.17"
|
|||||||
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
||||||
http = "0.2.4"
|
http = "0.2.4"
|
||||||
indexmap = { version = "1.7.0", features = ["serde-1"] }
|
indexmap = { version = "1.7.0", features = ["serde-1"] }
|
||||||
|
iso8601-duration = "0.1.0"
|
||||||
itertools = "0.10.1"
|
itertools = "0.10.1"
|
||||||
|
jsonwebtoken = "7"
|
||||||
log = "0.4.14"
|
log = "0.4.14"
|
||||||
meilisearch-lib = { path = "../meilisearch-lib" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-error = { path = "../meilisearch-error" }
|
meilisearch-error = { path = "../meilisearch-error" }
|
||||||
meilisearch-tokenizer = { git = "https://github.com/meilisearch/tokenizer.git", tag = "v0.2.5" }
|
meilisearch-lib = { path = "../meilisearch-lib" }
|
||||||
memmap = "0.7.0"
|
|
||||||
mime = "0.3.16"
|
mime = "0.3.16"
|
||||||
num_cpus = "1.13.0"
|
num_cpus = "1.13.0"
|
||||||
|
obkv = "0.2.0"
|
||||||
once_cell = "1.8.0"
|
once_cell = "1.8.0"
|
||||||
parking_lot = "0.11.2"
|
parking_lot = "0.11.2"
|
||||||
|
platform-dirs = "0.3.0"
|
||||||
rand = "0.8.4"
|
rand = "0.8.4"
|
||||||
rayon = "1.5.1"
|
rayon = "1.5.1"
|
||||||
regex = "1.5.4"
|
regex = "1.5.4"
|
||||||
rustls = "0.19.1"
|
rustls = "0.20.2"
|
||||||
|
rustls-pemfile = "0.2"
|
||||||
|
segment = { version = "0.2.0", optional = true }
|
||||||
serde = { version = "1.0.130", features = ["derive"] }
|
serde = { version = "1.0.130", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
||||||
sha2 = "0.9.6"
|
sha2 = "0.9.6"
|
||||||
siphasher = "0.3.7"
|
siphasher = "0.3.7"
|
||||||
slice-group-by = "0.2.6"
|
slice-group-by = "0.2.6"
|
||||||
structopt = "0.3.23"
|
static-files = { version = "0.2.1", optional = true }
|
||||||
|
clap = { version = "3.0", features = ["derive", "env"] }
|
||||||
|
sysinfo = "0.20.2"
|
||||||
tar = "0.4.37"
|
tar = "0.4.37"
|
||||||
tempfile = "3.2.0"
|
tempfile = "3.2.0"
|
||||||
thiserror = "1.0.28"
|
thiserror = "1.0.28"
|
||||||
|
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
tokio = { version = "1.11.0", features = ["full"] }
|
tokio = { version = "1.11.0", features = ["full"] }
|
||||||
|
tokio-stream = "0.1.7"
|
||||||
uuid = { version = "0.8.2", features = ["serde"] }
|
uuid = { version = "0.8.2", features = ["serde"] }
|
||||||
walkdir = "2.3.2"
|
walkdir = "2.3.2"
|
||||||
obkv = "0.2.0"
|
pin-project-lite = "0.2.8"
|
||||||
pin-project = "1.0.8"
|
|
||||||
whoami = { version = "1.1.3", optional = true }
|
|
||||||
reqwest = { version = "0.11.4", features = ["json", "rustls-tls"], default-features = false, optional = true }
|
|
||||||
sysinfo = "0.20.2"
|
|
||||||
tokio-stream = "0.1.7"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.2.0"
|
actix-rt = "2.2.0"
|
||||||
|
assert-json-diff = "2.0.1"
|
||||||
|
maplit = "1.0.2"
|
||||||
paste = "1.0.5"
|
paste = "1.0.5"
|
||||||
serde_url_params = "0.2.1"
|
serde_url_params = "0.2.1"
|
||||||
urlencoding = "2.1.0"
|
urlencoding = "2.1.0"
|
||||||
@@ -84,6 +90,7 @@ urlencoding = "2.1.0"
|
|||||||
[features]
|
[features]
|
||||||
mini-dashboard = [
|
mini-dashboard = [
|
||||||
"actix-web-static-files",
|
"actix-web-static-files",
|
||||||
|
"static-files",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"cargo_toml",
|
"cargo_toml",
|
||||||
"hex",
|
"hex",
|
||||||
@@ -92,12 +99,12 @@ mini-dashboard = [
|
|||||||
"tempfile",
|
"tempfile",
|
||||||
"zip",
|
"zip",
|
||||||
]
|
]
|
||||||
analytics = ["whoami", "reqwest"]
|
analytics = ["segment"]
|
||||||
default = ["analytics", "mini-dashboard"]
|
default = ["analytics", "mini-dashboard"]
|
||||||
|
|
||||||
[target.'cfg(target_os = "linux")'.dependencies]
|
[target.'cfg(target_os = "linux")'.dependencies]
|
||||||
tikv-jemallocator = "0.4.1"
|
tikv-jemallocator = "0.4.1"
|
||||||
|
|
||||||
[package.metadata.mini-dashboard]
|
[package.metadata.mini-dashboard]
|
||||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.4/build.zip"
|
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.9/build.zip"
|
||||||
sha1 = "750e8a8e56cfa61fbf9ead14b08a5f17ad3f3d37"
|
sha1 = "b1833c3e5dc6b5d9d519ae4834935ae6c8a47024"
|
||||||
|
|||||||
@@ -16,11 +16,11 @@ mod mini_dashboard {
|
|||||||
use std::io::{Cursor, Read, Write};
|
use std::io::{Cursor, Read, Write};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use actix_web_static_files::resource_dir;
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use cargo_toml::Manifest;
|
use cargo_toml::Manifest;
|
||||||
use reqwest::blocking::get;
|
use reqwest::blocking::get;
|
||||||
use sha1::{Digest, Sha1};
|
use sha1::{Digest, Sha1};
|
||||||
|
use static_files::resource_dir;
|
||||||
|
|
||||||
pub fn setup_mini_dashboard() -> anyhow::Result<()> {
|
pub fn setup_mini_dashboard() -> anyhow::Result<()> {
|
||||||
let cargo_manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
|
let cargo_manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
|
||||||
|
|||||||
@@ -1,126 +0,0 @@
|
|||||||
use std::hash::{Hash, Hasher};
|
|
||||||
use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};
|
|
||||||
|
|
||||||
use log::debug;
|
|
||||||
use meilisearch_lib::MeiliSearch;
|
|
||||||
use serde::Serialize;
|
|
||||||
use siphasher::sip::SipHasher;
|
|
||||||
|
|
||||||
use crate::Opt;
|
|
||||||
|
|
||||||
const AMPLITUDE_API_KEY: &str = "f7fba398780e06d8fe6666a9be7e3d47";
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
|
||||||
struct EventProperties {
|
|
||||||
database_size: u64,
|
|
||||||
last_update_timestamp: Option<i64>, //timestamp
|
|
||||||
number_of_documents: Vec<u64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EventProperties {
|
|
||||||
async fn from(data: MeiliSearch) -> anyhow::Result<EventProperties> {
|
|
||||||
let stats = data.get_all_stats().await?;
|
|
||||||
|
|
||||||
let database_size = stats.database_size;
|
|
||||||
let last_update_timestamp = stats.last_update.map(|u| u.timestamp());
|
|
||||||
let number_of_documents = stats
|
|
||||||
.indexes
|
|
||||||
.values()
|
|
||||||
.map(|index| index.number_of_documents)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(EventProperties {
|
|
||||||
database_size,
|
|
||||||
last_update_timestamp,
|
|
||||||
number_of_documents,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
|
||||||
struct UserProperties<'a> {
|
|
||||||
env: &'a str,
|
|
||||||
start_since_days: u64,
|
|
||||||
user_email: Option<String>,
|
|
||||||
server_provider: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
|
||||||
struct Event<'a> {
|
|
||||||
user_id: &'a str,
|
|
||||||
event_type: &'a str,
|
|
||||||
device_id: &'a str,
|
|
||||||
time: u64,
|
|
||||||
app_version: &'a str,
|
|
||||||
user_properties: UserProperties<'a>,
|
|
||||||
event_properties: Option<EventProperties>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
|
||||||
struct AmplitudeRequest<'a> {
|
|
||||||
api_key: &'a str,
|
|
||||||
events: Vec<Event<'a>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn analytics_sender(data: MeiliSearch, opt: Opt) {
|
|
||||||
let username = whoami::username();
|
|
||||||
let hostname = whoami::hostname();
|
|
||||||
let platform = whoami::platform();
|
|
||||||
|
|
||||||
let uid = username + &hostname + &platform.to_string();
|
|
||||||
|
|
||||||
let mut hasher = SipHasher::new();
|
|
||||||
uid.hash(&mut hasher);
|
|
||||||
let hash = hasher.finish();
|
|
||||||
|
|
||||||
let uid = format!("{:X}", hash);
|
|
||||||
let platform = platform.to_string();
|
|
||||||
let first_start = Instant::now();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let n = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
|
||||||
let user_id = &uid;
|
|
||||||
let device_id = &platform;
|
|
||||||
let time = n.as_secs();
|
|
||||||
let event_type = "runtime_tick";
|
|
||||||
let elapsed_since_start = first_start.elapsed().as_secs() / 86_400; // One day
|
|
||||||
let event_properties = EventProperties::from(data.clone()).await.ok();
|
|
||||||
let app_version = env!("CARGO_PKG_VERSION").to_string();
|
|
||||||
let app_version = app_version.as_str();
|
|
||||||
let user_email = std::env::var("MEILI_USER_EMAIL").ok();
|
|
||||||
let server_provider = std::env::var("MEILI_SERVER_PROVIDER").ok();
|
|
||||||
let user_properties = UserProperties {
|
|
||||||
env: &opt.env,
|
|
||||||
start_since_days: elapsed_since_start,
|
|
||||||
user_email,
|
|
||||||
server_provider,
|
|
||||||
};
|
|
||||||
|
|
||||||
let event = Event {
|
|
||||||
user_id,
|
|
||||||
event_type,
|
|
||||||
device_id,
|
|
||||||
time,
|
|
||||||
app_version,
|
|
||||||
user_properties,
|
|
||||||
event_properties,
|
|
||||||
};
|
|
||||||
|
|
||||||
let request = AmplitudeRequest {
|
|
||||||
api_key: AMPLITUDE_API_KEY,
|
|
||||||
events: vec![event],
|
|
||||||
};
|
|
||||||
|
|
||||||
let response = reqwest::Client::new()
|
|
||||||
.post("https://api2.amplitude.com/2/httpapi")
|
|
||||||
.timeout(Duration::from_secs(60)) // 1 minute max
|
|
||||||
.json(&request)
|
|
||||||
.send()
|
|
||||||
.await;
|
|
||||||
if let Err(e) = response {
|
|
||||||
debug!("Unsuccessful call to Amplitude: {}", e);
|
|
||||||
}
|
|
||||||
|
|
||||||
tokio::time::sleep(Duration::from_secs(3600)).await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
51
meilisearch-http/src/analytics/mock_analytics.rs
Normal file
51
meilisearch-http/src/analytics/mock_analytics.rs
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
use std::{any::Any, sync::Arc};
|
||||||
|
|
||||||
|
use actix_web::HttpRequest;
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
use crate::{routes::indexes::documents::UpdateDocumentsQuery, Opt};
|
||||||
|
|
||||||
|
use super::{find_user_id, Analytics};
|
||||||
|
|
||||||
|
pub struct MockAnalytics;
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct SearchAggregator {}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl SearchAggregator {
|
||||||
|
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||||
|
Self::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MockAnalytics {
|
||||||
|
#[allow(clippy::new_ret_no_self)]
|
||||||
|
pub fn new(opt: &Opt) -> (Arc<dyn Analytics>, String) {
|
||||||
|
let user = find_user_id(&opt.db_path).unwrap_or_default();
|
||||||
|
(Arc::new(Self), user)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Analytics for MockAnalytics {
|
||||||
|
// These methods are noop and should be optimized out
|
||||||
|
fn publish(&self, _event_name: String, _send: Value, _request: Option<&HttpRequest>) {}
|
||||||
|
fn get_search(&self, _aggregate: super::SearchAggregator) {}
|
||||||
|
fn post_search(&self, _aggregate: super::SearchAggregator) {}
|
||||||
|
fn add_documents(
|
||||||
|
&self,
|
||||||
|
_documents_query: &UpdateDocumentsQuery,
|
||||||
|
_index_creation: bool,
|
||||||
|
_request: &HttpRequest,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
fn update_documents(
|
||||||
|
&self,
|
||||||
|
_documents_query: &UpdateDocumentsQuery,
|
||||||
|
_index_creation: bool,
|
||||||
|
_request: &HttpRequest,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
}
|
||||||
84
meilisearch-http/src/analytics/mod.rs
Normal file
84
meilisearch-http/src/analytics/mod.rs
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
mod mock_analytics;
|
||||||
|
// if we are in release mode and the feature analytics was enabled
|
||||||
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
|
mod segment_analytics;
|
||||||
|
|
||||||
|
use std::fs;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use actix_web::HttpRequest;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use platform_dirs::AppDirs;
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||||
|
|
||||||
|
pub use mock_analytics::MockAnalytics;
|
||||||
|
|
||||||
|
// if we are in debug mode OR the analytics feature is disabled
|
||||||
|
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
||||||
|
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||||
|
pub type SegmentAnalytics = mock_analytics::MockAnalytics;
|
||||||
|
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||||
|
pub type SearchAggregator = mock_analytics::SearchAggregator;
|
||||||
|
|
||||||
|
// if we are in release mode and the feature analytics was enabled
|
||||||
|
// we use the real analytics
|
||||||
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
|
pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
||||||
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
|
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
||||||
|
|
||||||
|
/// The Meilisearch config dir:
|
||||||
|
/// `~/.config/Meilisearch` on *NIX or *BSD.
|
||||||
|
/// `~/Library/ApplicationSupport` on macOS.
|
||||||
|
/// `%APPDATA` (= `C:\Users%USERNAME%\AppData\Roaming`) on windows.
|
||||||
|
static MEILISEARCH_CONFIG_PATH: Lazy<Option<PathBuf>> =
|
||||||
|
Lazy::new(|| AppDirs::new(Some("Meilisearch"), false).map(|appdir| appdir.config_dir));
|
||||||
|
|
||||||
|
fn config_user_id_path(db_path: &Path) -> Option<PathBuf> {
|
||||||
|
db_path
|
||||||
|
.canonicalize()
|
||||||
|
.ok()
|
||||||
|
.map(|path| {
|
||||||
|
path.join("instance-uid")
|
||||||
|
.display()
|
||||||
|
.to_string()
|
||||||
|
.replace('/', "-")
|
||||||
|
})
|
||||||
|
.zip(MEILISEARCH_CONFIG_PATH.as_ref())
|
||||||
|
.map(|(filename, config_path)| config_path.join(filename.trim_start_matches('-')))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Look for the instance-uid in the `data.ms` or in `~/.config/Meilisearch/path-to-db-instance-uid`
|
||||||
|
fn find_user_id(db_path: &Path) -> Option<String> {
|
||||||
|
fs::read_to_string(db_path.join("instance-uid"))
|
||||||
|
.ok()
|
||||||
|
.or_else(|| fs::read_to_string(&config_user_id_path(db_path)?).ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait Analytics: Sync + Send {
|
||||||
|
/// The method used to publish most analytics that do not need to be batched every hours
|
||||||
|
fn publish(&self, event_name: String, send: Value, request: Option<&HttpRequest>);
|
||||||
|
|
||||||
|
/// This method should be called to aggergate a get search
|
||||||
|
fn get_search(&self, aggregate: SearchAggregator);
|
||||||
|
|
||||||
|
/// This method should be called to aggregate a post search
|
||||||
|
fn post_search(&self, aggregate: SearchAggregator);
|
||||||
|
|
||||||
|
// this method should be called to aggregate a add documents request
|
||||||
|
fn add_documents(
|
||||||
|
&self,
|
||||||
|
documents_query: &UpdateDocumentsQuery,
|
||||||
|
index_creation: bool,
|
||||||
|
request: &HttpRequest,
|
||||||
|
);
|
||||||
|
// this method should be called to batch a update documents request
|
||||||
|
fn update_documents(
|
||||||
|
&self,
|
||||||
|
documents_query: &UpdateDocumentsQuery,
|
||||||
|
index_creation: bool,
|
||||||
|
request: &HttpRequest,
|
||||||
|
);
|
||||||
|
}
|
||||||
586
meilisearch-http/src/analytics/segment_analytics.rs
Normal file
586
meilisearch-http/src/analytics/segment_analytics.rs
Normal file
@@ -0,0 +1,586 @@
|
|||||||
|
use std::collections::{BinaryHeap, HashMap, HashSet};
|
||||||
|
use std::fs;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
|
use actix_web::http::header::USER_AGENT;
|
||||||
|
use actix_web::HttpRequest;
|
||||||
|
use http::header::CONTENT_TYPE;
|
||||||
|
use meilisearch_auth::SearchRules;
|
||||||
|
use meilisearch_lib::index::{SearchQuery, SearchResult};
|
||||||
|
use meilisearch_lib::index_controller::Stats;
|
||||||
|
use meilisearch_lib::MeiliSearch;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use regex::Regex;
|
||||||
|
use segment::message::{Identify, Track, User};
|
||||||
|
use segment::{AutoBatcher, Batcher, HttpClient};
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
use sysinfo::{DiskExt, System, SystemExt};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
use tokio::select;
|
||||||
|
use tokio::sync::mpsc::{self, Receiver, Sender};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::analytics::Analytics;
|
||||||
|
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||||
|
use crate::Opt;
|
||||||
|
|
||||||
|
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
|
||||||
|
|
||||||
|
/// Write the instance-uid in the `data.ms` and in `~/.config/MeiliSearch/path-to-db-instance-uid`. Ignore the errors.
|
||||||
|
fn write_user_id(db_path: &Path, user_id: &str) {
|
||||||
|
let _ = fs::write(db_path.join("instance-uid"), user_id.as_bytes());
|
||||||
|
if let Some((meilisearch_config_path, user_id_path)) = MEILISEARCH_CONFIG_PATH
|
||||||
|
.as_ref()
|
||||||
|
.zip(config_user_id_path(db_path))
|
||||||
|
{
|
||||||
|
let _ = fs::create_dir_all(&meilisearch_config_path);
|
||||||
|
let _ = fs::write(user_id_path, user_id.as_bytes());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const SEGMENT_API_KEY: &str = "P3FWhhEsJiEDCuEHpmcN9DHcK4hVfBvb";
|
||||||
|
|
||||||
|
pub fn extract_user_agents(request: &HttpRequest) -> Vec<String> {
|
||||||
|
request
|
||||||
|
.headers()
|
||||||
|
.get(USER_AGENT)
|
||||||
|
.map(|header| header.to_str().ok())
|
||||||
|
.flatten()
|
||||||
|
.unwrap_or("unknown")
|
||||||
|
.split(';')
|
||||||
|
.map(str::trim)
|
||||||
|
.map(ToString::to_string)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum AnalyticsMsg {
|
||||||
|
BatchMessage(Track),
|
||||||
|
AggregateGetSearch(SearchAggregator),
|
||||||
|
AggregatePostSearch(SearchAggregator),
|
||||||
|
AggregateAddDocuments(DocumentsAggregator),
|
||||||
|
AggregateUpdateDocuments(DocumentsAggregator),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct SegmentAnalytics {
|
||||||
|
sender: Sender<AnalyticsMsg>,
|
||||||
|
user: User,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SegmentAnalytics {
|
||||||
|
pub async fn new(opt: &Opt, meilisearch: &MeiliSearch) -> (Arc<dyn Analytics>, String) {
|
||||||
|
let user_id = super::find_user_id(&opt.db_path);
|
||||||
|
let first_time_run = user_id.is_none();
|
||||||
|
let user_id = user_id.unwrap_or_else(|| Uuid::new_v4().to_string());
|
||||||
|
write_user_id(&opt.db_path, &user_id);
|
||||||
|
|
||||||
|
let client = HttpClient::default();
|
||||||
|
let user = User::UserId { user_id };
|
||||||
|
let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
|
||||||
|
|
||||||
|
// If Meilisearch is Launched for the first time:
|
||||||
|
// 1. Send an event Launched associated to the user `total_launch`.
|
||||||
|
// 2. Batch an event Launched with the real instance-id and send it in one hour.
|
||||||
|
if first_time_run {
|
||||||
|
let _ = batcher
|
||||||
|
.push(Track {
|
||||||
|
user: User::UserId {
|
||||||
|
user_id: "total_launch".to_string(),
|
||||||
|
},
|
||||||
|
event: "Launched".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
let _ = batcher.flush().await;
|
||||||
|
let _ = batcher
|
||||||
|
.push(Track {
|
||||||
|
user: user.clone(),
|
||||||
|
event: "Launched".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (sender, inbox) = mpsc::channel(100); // How many analytics can we bufferize
|
||||||
|
|
||||||
|
let segment = Box::new(Segment {
|
||||||
|
inbox,
|
||||||
|
user: user.clone(),
|
||||||
|
opt: opt.clone(),
|
||||||
|
batcher,
|
||||||
|
post_search_aggregator: SearchAggregator::default(),
|
||||||
|
get_search_aggregator: SearchAggregator::default(),
|
||||||
|
add_documents_aggregator: DocumentsAggregator::default(),
|
||||||
|
update_documents_aggregator: DocumentsAggregator::default(),
|
||||||
|
});
|
||||||
|
tokio::spawn(segment.run(meilisearch.clone()));
|
||||||
|
|
||||||
|
let this = Self {
|
||||||
|
sender,
|
||||||
|
user: user.clone(),
|
||||||
|
};
|
||||||
|
|
||||||
|
(Arc::new(this), user.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl super::Analytics for SegmentAnalytics {
|
||||||
|
fn publish(&self, event_name: String, mut send: Value, request: Option<&HttpRequest>) {
|
||||||
|
let user_agent = request
|
||||||
|
.map(|req| req.headers().get(USER_AGENT))
|
||||||
|
.flatten()
|
||||||
|
.map(|header| header.to_str().unwrap_or("unknown"))
|
||||||
|
.map(|s| s.split(';').map(str::trim).collect::<Vec<&str>>());
|
||||||
|
|
||||||
|
send["user-agent"] = json!(user_agent);
|
||||||
|
let event = Track {
|
||||||
|
user: self.user.clone(),
|
||||||
|
event: event_name.clone(),
|
||||||
|
properties: send,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let _ = self
|
||||||
|
.sender
|
||||||
|
.try_send(AnalyticsMsg::BatchMessage(event.into()));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_search(&self, aggregate: SearchAggregator) {
|
||||||
|
let _ = self
|
||||||
|
.sender
|
||||||
|
.try_send(AnalyticsMsg::AggregateGetSearch(aggregate));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn post_search(&self, aggregate: SearchAggregator) {
|
||||||
|
let _ = self
|
||||||
|
.sender
|
||||||
|
.try_send(AnalyticsMsg::AggregatePostSearch(aggregate));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_documents(
|
||||||
|
&self,
|
||||||
|
documents_query: &UpdateDocumentsQuery,
|
||||||
|
index_creation: bool,
|
||||||
|
request: &HttpRequest,
|
||||||
|
) {
|
||||||
|
let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request);
|
||||||
|
let _ = self
|
||||||
|
.sender
|
||||||
|
.try_send(AnalyticsMsg::AggregateAddDocuments(aggregate));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_documents(
|
||||||
|
&self,
|
||||||
|
documents_query: &UpdateDocumentsQuery,
|
||||||
|
index_creation: bool,
|
||||||
|
request: &HttpRequest,
|
||||||
|
) {
|
||||||
|
let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request);
|
||||||
|
let _ = self
|
||||||
|
.sender
|
||||||
|
.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Segment {
|
||||||
|
inbox: Receiver<AnalyticsMsg>,
|
||||||
|
user: User,
|
||||||
|
opt: Opt,
|
||||||
|
batcher: AutoBatcher,
|
||||||
|
get_search_aggregator: SearchAggregator,
|
||||||
|
post_search_aggregator: SearchAggregator,
|
||||||
|
add_documents_aggregator: DocumentsAggregator,
|
||||||
|
update_documents_aggregator: DocumentsAggregator,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Segment {
|
||||||
|
fn compute_traits(opt: &Opt, stats: Stats) -> Value {
|
||||||
|
static FIRST_START_TIMESTAMP: Lazy<Instant> = Lazy::new(Instant::now);
|
||||||
|
static SYSTEM: Lazy<Value> = Lazy::new(|| {
|
||||||
|
let mut sys = System::new_all();
|
||||||
|
sys.refresh_all();
|
||||||
|
let kernel_version = sys
|
||||||
|
.kernel_version()
|
||||||
|
.map(|k| k.split_once("-").map(|(k, _)| k.to_string()))
|
||||||
|
.flatten();
|
||||||
|
json!({
|
||||||
|
"distribution": sys.name(),
|
||||||
|
"kernel_version": kernel_version,
|
||||||
|
"cores": sys.processors().len(),
|
||||||
|
"ram_size": sys.total_memory(),
|
||||||
|
"disk_size": sys.disks().iter().map(|disk| disk.total_space()).max(),
|
||||||
|
"server_provider": std::env::var("MEILI_SERVER_PROVIDER").ok(),
|
||||||
|
})
|
||||||
|
});
|
||||||
|
// The infos are all cli option except every option containing sensitive information.
|
||||||
|
// We consider an information as sensible if it contains a path, an address or a key.
|
||||||
|
let infos = {
|
||||||
|
// First we see if any sensitive fields were used.
|
||||||
|
let db_path = opt.db_path != PathBuf::from("./data.ms");
|
||||||
|
let import_dump = opt.import_dump.is_some();
|
||||||
|
let dumps_dir = opt.dumps_dir != PathBuf::from("dumps/");
|
||||||
|
let import_snapshot = opt.import_snapshot.is_some();
|
||||||
|
let snapshots_dir = opt.snapshot_dir != PathBuf::from("snapshots/");
|
||||||
|
let http_addr = opt.http_addr != "127.0.0.1:7700";
|
||||||
|
|
||||||
|
let mut infos = serde_json::to_value(opt).unwrap();
|
||||||
|
|
||||||
|
// Then we overwrite all sensitive field with a boolean representing if
|
||||||
|
// the feature was used or not.
|
||||||
|
infos["db_path"] = json!(db_path);
|
||||||
|
infos["import_dump"] = json!(import_dump);
|
||||||
|
infos["dumps_dir"] = json!(dumps_dir);
|
||||||
|
infos["import_snapshot"] = json!(import_snapshot);
|
||||||
|
infos["snapshot_dir"] = json!(snapshots_dir);
|
||||||
|
infos["http_addr"] = json!(http_addr);
|
||||||
|
|
||||||
|
infos
|
||||||
|
};
|
||||||
|
|
||||||
|
let number_of_documents = stats
|
||||||
|
.indexes
|
||||||
|
.values()
|
||||||
|
.map(|index| index.number_of_documents)
|
||||||
|
.collect::<Vec<u64>>();
|
||||||
|
|
||||||
|
json!({
|
||||||
|
"start_since_days": FIRST_START_TIMESTAMP.elapsed().as_secs() / (60 * 60 * 24), // one day
|
||||||
|
"system": *SYSTEM,
|
||||||
|
"stats": {
|
||||||
|
"database_size": stats.database_size,
|
||||||
|
"indexes_number": stats.indexes.len(),
|
||||||
|
"documents_number": number_of_documents,
|
||||||
|
},
|
||||||
|
"infos": infos,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run(mut self, meilisearch: MeiliSearch) {
|
||||||
|
const INTERVAL: Duration = Duration::from_secs(60 * 60); // one hour
|
||||||
|
// The first batch must be sent after one hour.
|
||||||
|
let mut interval =
|
||||||
|
tokio::time::interval_at(tokio::time::Instant::now() + INTERVAL, INTERVAL);
|
||||||
|
|
||||||
|
loop {
|
||||||
|
select! {
|
||||||
|
_ = interval.tick() => {
|
||||||
|
self.tick(meilisearch.clone()).await;
|
||||||
|
},
|
||||||
|
msg = self.inbox.recv() => {
|
||||||
|
match msg {
|
||||||
|
Some(AnalyticsMsg::BatchMessage(msg)) => drop(self.batcher.push(msg).await),
|
||||||
|
Some(AnalyticsMsg::AggregateGetSearch(agreg)) => self.get_search_aggregator.aggregate(agreg),
|
||||||
|
Some(AnalyticsMsg::AggregatePostSearch(agreg)) => self.post_search_aggregator.aggregate(agreg),
|
||||||
|
Some(AnalyticsMsg::AggregateAddDocuments(agreg)) => self.add_documents_aggregator.aggregate(agreg),
|
||||||
|
Some(AnalyticsMsg::AggregateUpdateDocuments(agreg)) => self.update_documents_aggregator.aggregate(agreg),
|
||||||
|
None => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn tick(&mut self, meilisearch: MeiliSearch) {
|
||||||
|
if let Ok(stats) = meilisearch.get_all_stats(&SearchRules::default()).await {
|
||||||
|
let _ = self
|
||||||
|
.batcher
|
||||||
|
.push(Identify {
|
||||||
|
context: Some(json!({
|
||||||
|
"app": {
|
||||||
|
"version": env!("CARGO_PKG_VERSION").to_string(),
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
user: self.user.clone(),
|
||||||
|
traits: Self::compute_traits(&self.opt, stats),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
let get_search = std::mem::take(&mut self.get_search_aggregator)
|
||||||
|
.into_event(&self.user, "Documents Searched GET");
|
||||||
|
let post_search = std::mem::take(&mut self.post_search_aggregator)
|
||||||
|
.into_event(&self.user, "Documents Searched POST");
|
||||||
|
let add_documents = std::mem::take(&mut self.add_documents_aggregator)
|
||||||
|
.into_event(&self.user, "Documents Added");
|
||||||
|
let update_documents = std::mem::take(&mut self.update_documents_aggregator)
|
||||||
|
.into_event(&self.user, "Documents Updated");
|
||||||
|
|
||||||
|
if let Some(get_search) = get_search {
|
||||||
|
let _ = self.batcher.push(get_search).await;
|
||||||
|
}
|
||||||
|
if let Some(post_search) = post_search {
|
||||||
|
let _ = self.batcher.push(post_search).await;
|
||||||
|
}
|
||||||
|
if let Some(add_documents) = add_documents {
|
||||||
|
let _ = self.batcher.push(add_documents).await;
|
||||||
|
}
|
||||||
|
if let Some(update_documents) = update_documents {
|
||||||
|
let _ = self.batcher.push(update_documents).await;
|
||||||
|
}
|
||||||
|
let _ = self.batcher.flush().await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct SearchAggregator {
|
||||||
|
timestamp: Option<OffsetDateTime>,
|
||||||
|
|
||||||
|
// context
|
||||||
|
user_agents: HashSet<String>,
|
||||||
|
|
||||||
|
// requests
|
||||||
|
total_received: usize,
|
||||||
|
total_succeeded: usize,
|
||||||
|
time_spent: BinaryHeap<usize>,
|
||||||
|
|
||||||
|
// sort
|
||||||
|
sort_with_geo_point: bool,
|
||||||
|
// everytime a request has a filter, this field must be incremented by the number of terms it contains
|
||||||
|
sort_sum_of_criteria_terms: usize,
|
||||||
|
// everytime a request has a filter, this field must be incremented by one
|
||||||
|
sort_total_number_of_criteria: usize,
|
||||||
|
|
||||||
|
// filter
|
||||||
|
filter_with_geo_radius: bool,
|
||||||
|
// everytime a request has a filter, this field must be incremented by the number of terms it contains
|
||||||
|
filter_sum_of_criteria_terms: usize,
|
||||||
|
// everytime a request has a filter, this field must be incremented by one
|
||||||
|
filter_total_number_of_criteria: usize,
|
||||||
|
used_syntax: HashMap<String, usize>,
|
||||||
|
|
||||||
|
// q
|
||||||
|
// The maximum number of terms in a q request
|
||||||
|
max_terms_number: usize,
|
||||||
|
|
||||||
|
// pagination
|
||||||
|
max_limit: usize,
|
||||||
|
max_offset: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SearchAggregator {
|
||||||
|
pub fn from_query(query: &SearchQuery, request: &HttpRequest) -> Self {
|
||||||
|
let mut ret = Self::default();
|
||||||
|
ret.timestamp = Some(OffsetDateTime::now_utc());
|
||||||
|
|
||||||
|
ret.total_received = 1;
|
||||||
|
ret.user_agents = extract_user_agents(request).into_iter().collect();
|
||||||
|
|
||||||
|
if let Some(ref sort) = query.sort {
|
||||||
|
ret.sort_total_number_of_criteria = 1;
|
||||||
|
ret.sort_with_geo_point = sort.iter().any(|s| s.contains("_geoPoint("));
|
||||||
|
ret.sort_sum_of_criteria_terms = sort.len();
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref filter) = query.filter {
|
||||||
|
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
||||||
|
ret.filter_total_number_of_criteria = 1;
|
||||||
|
|
||||||
|
let syntax = match filter {
|
||||||
|
Value::String(_) => "string".to_string(),
|
||||||
|
Value::Array(values) => {
|
||||||
|
if values
|
||||||
|
.iter()
|
||||||
|
.map(|v| v.to_string())
|
||||||
|
.any(|s| RE.is_match(&s))
|
||||||
|
{
|
||||||
|
"mixed".to_string()
|
||||||
|
} else {
|
||||||
|
"array".to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => "none".to_string(),
|
||||||
|
};
|
||||||
|
// convert the string to a HashMap
|
||||||
|
ret.used_syntax.insert(syntax, 1);
|
||||||
|
|
||||||
|
let stringified_filters = filter.to_string();
|
||||||
|
ret.filter_with_geo_radius = stringified_filters.contains("_geoRadius(");
|
||||||
|
ret.filter_sum_of_criteria_terms = RE.split(&stringified_filters).count();
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref q) = query.q {
|
||||||
|
ret.max_terms_number = q.split_whitespace().count();
|
||||||
|
}
|
||||||
|
|
||||||
|
ret.max_limit = query.limit;
|
||||||
|
ret.max_offset = query.offset.unwrap_or_default();
|
||||||
|
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn succeed(&mut self, result: &SearchResult) {
|
||||||
|
self.total_succeeded = self.total_succeeded.saturating_add(1);
|
||||||
|
self.time_spent.push(result.processing_time_ms as usize);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Aggregate one [SearchAggregator] into another.
|
||||||
|
pub fn aggregate(&mut self, mut other: Self) {
|
||||||
|
if self.timestamp.is_none() {
|
||||||
|
self.timestamp = other.timestamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
// context
|
||||||
|
for user_agent in other.user_agents.into_iter() {
|
||||||
|
self.user_agents.insert(user_agent);
|
||||||
|
}
|
||||||
|
// request
|
||||||
|
self.total_received = self.total_received.saturating_add(other.total_received);
|
||||||
|
self.total_succeeded = self.total_succeeded.saturating_add(other.total_succeeded);
|
||||||
|
self.time_spent.append(&mut other.time_spent);
|
||||||
|
// sort
|
||||||
|
self.sort_with_geo_point |= other.sort_with_geo_point;
|
||||||
|
self.sort_sum_of_criteria_terms = self
|
||||||
|
.sort_sum_of_criteria_terms
|
||||||
|
.saturating_add(other.sort_sum_of_criteria_terms);
|
||||||
|
self.sort_total_number_of_criteria = self
|
||||||
|
.sort_total_number_of_criteria
|
||||||
|
.saturating_add(other.sort_total_number_of_criteria);
|
||||||
|
// filter
|
||||||
|
self.filter_with_geo_radius |= other.filter_with_geo_radius;
|
||||||
|
self.filter_sum_of_criteria_terms = self
|
||||||
|
.filter_sum_of_criteria_terms
|
||||||
|
.saturating_add(other.filter_sum_of_criteria_terms);
|
||||||
|
self.filter_total_number_of_criteria = self
|
||||||
|
.filter_total_number_of_criteria
|
||||||
|
.saturating_add(other.filter_total_number_of_criteria);
|
||||||
|
for (key, value) in other.used_syntax.into_iter() {
|
||||||
|
let used_syntax = self.used_syntax.entry(key).or_insert(0);
|
||||||
|
*used_syntax = used_syntax.saturating_add(value);
|
||||||
|
}
|
||||||
|
// q
|
||||||
|
self.max_terms_number = self.max_terms_number.max(other.max_terms_number);
|
||||||
|
// pagination
|
||||||
|
self.max_limit = self.max_limit.max(other.max_limit);
|
||||||
|
self.max_offset = self.max_offset.max(other.max_offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_event(self, user: &User, event_name: &str) -> Option<Track> {
|
||||||
|
if self.total_received == 0 {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
// the index of the 99th percentage of value
|
||||||
|
let percentile_99th = 0.99 * (self.total_succeeded as f64 - 1.) + 1.;
|
||||||
|
// we get all the values in a sorted manner
|
||||||
|
let time_spent = self.time_spent.into_sorted_vec();
|
||||||
|
// We are only intersted by the slowest value of the 99th fastest results
|
||||||
|
let time_spent = time_spent.get(percentile_99th as usize);
|
||||||
|
|
||||||
|
let properties = json!({
|
||||||
|
"user-agent": self.user_agents,
|
||||||
|
"requests": {
|
||||||
|
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
|
||||||
|
"total_succeeded": self.total_succeeded,
|
||||||
|
"total_failed": self.total_received.saturating_sub(self.total_succeeded), // just to be sure we never panics
|
||||||
|
"total_received": self.total_received,
|
||||||
|
},
|
||||||
|
"sort": {
|
||||||
|
"with_geoPoint": self.sort_with_geo_point,
|
||||||
|
"avg_criteria_number": format!("{:.2}", self.sort_sum_of_criteria_terms as f64 / self.sort_total_number_of_criteria as f64),
|
||||||
|
},
|
||||||
|
"filter": {
|
||||||
|
"with_geoRadius": self.filter_with_geo_radius,
|
||||||
|
"avg_criteria_number": format!("{:.2}", self.filter_sum_of_criteria_terms as f64 / self.filter_total_number_of_criteria as f64),
|
||||||
|
"most_used_syntax": self.used_syntax.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
||||||
|
},
|
||||||
|
"q": {
|
||||||
|
"max_terms_number": self.max_terms_number,
|
||||||
|
},
|
||||||
|
"pagination": {
|
||||||
|
"max_limit": self.max_limit,
|
||||||
|
"max_offset": self.max_offset,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
Some(Track {
|
||||||
|
timestamp: self.timestamp,
|
||||||
|
user: user.clone(),
|
||||||
|
event: event_name.to_string(),
|
||||||
|
properties,
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct DocumentsAggregator {
|
||||||
|
timestamp: Option<OffsetDateTime>,
|
||||||
|
|
||||||
|
// set to true when at least one request was received
|
||||||
|
updated: bool,
|
||||||
|
|
||||||
|
// context
|
||||||
|
user_agents: HashSet<String>,
|
||||||
|
|
||||||
|
content_types: HashSet<String>,
|
||||||
|
primary_keys: HashSet<String>,
|
||||||
|
index_creation: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DocumentsAggregator {
|
||||||
|
pub fn from_query(
|
||||||
|
documents_query: &UpdateDocumentsQuery,
|
||||||
|
index_creation: bool,
|
||||||
|
request: &HttpRequest,
|
||||||
|
) -> Self {
|
||||||
|
let mut ret = Self::default();
|
||||||
|
ret.timestamp = Some(OffsetDateTime::now_utc());
|
||||||
|
|
||||||
|
ret.updated = true;
|
||||||
|
ret.user_agents = extract_user_agents(request).into_iter().collect();
|
||||||
|
if let Some(primary_key) = documents_query.primary_key.clone() {
|
||||||
|
ret.primary_keys.insert(primary_key);
|
||||||
|
}
|
||||||
|
let content_type = request
|
||||||
|
.headers()
|
||||||
|
.get(CONTENT_TYPE)
|
||||||
|
.map(|s| s.to_str().unwrap_or("unkown"))
|
||||||
|
.unwrap()
|
||||||
|
.to_string();
|
||||||
|
ret.content_types.insert(content_type);
|
||||||
|
ret.index_creation = index_creation;
|
||||||
|
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Aggregate one [DocumentsAggregator] into another.
|
||||||
|
pub fn aggregate(&mut self, other: Self) {
|
||||||
|
if self.timestamp.is_none() {
|
||||||
|
self.timestamp = other.timestamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.updated |= other.updated;
|
||||||
|
// we can't create a union because there is no `into_union` method
|
||||||
|
for user_agent in other.user_agents.into_iter() {
|
||||||
|
self.user_agents.insert(user_agent);
|
||||||
|
}
|
||||||
|
for primary_key in other.primary_keys.into_iter() {
|
||||||
|
self.primary_keys.insert(primary_key);
|
||||||
|
}
|
||||||
|
for content_type in other.content_types.into_iter() {
|
||||||
|
self.content_types.insert(content_type);
|
||||||
|
}
|
||||||
|
self.index_creation |= other.index_creation;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_event(self, user: &User, event_name: &str) -> Option<Track> {
|
||||||
|
if !self.updated {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let properties = json!({
|
||||||
|
"user-agent": self.user_agents,
|
||||||
|
"payload_type": self.content_types,
|
||||||
|
"primary_key": self.primary_keys,
|
||||||
|
"index_creation": self.index_creation,
|
||||||
|
});
|
||||||
|
|
||||||
|
Some(Track {
|
||||||
|
timestamp: self.timestamp,
|
||||||
|
user: user.clone(),
|
||||||
|
event: event_name.to_string(),
|
||||||
|
properties,
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,22 +1,15 @@
|
|||||||
use std::error::Error;
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
use actix_web as aweb;
|
use actix_web as aweb;
|
||||||
use actix_web::body::Body;
|
|
||||||
use actix_web::http::StatusCode;
|
|
||||||
use actix_web::HttpResponseBuilder;
|
|
||||||
use aweb::error::{JsonPayloadError, QueryPayloadError};
|
use aweb::error::{JsonPayloadError, QueryPayloadError};
|
||||||
use meilisearch_error::{Code, ErrorCode};
|
use meilisearch_error::{Code, ErrorCode, ResponseError};
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum MeilisearchHttpError {
|
pub enum MeilisearchHttpError {
|
||||||
#[error("A Content-Type header is missing. Accepted values for the Content-Type header are: {}",
|
#[error("A Content-Type header is missing. Accepted values for the Content-Type header are: {}",
|
||||||
.0.iter().map(|s| format!("\"{}\"", s)).collect::<Vec<_>>().join(", "))]
|
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", "))]
|
||||||
MissingContentType(Vec<String>),
|
MissingContentType(Vec<String>),
|
||||||
#[error(
|
#[error(
|
||||||
"The Content-Type \"{0}\" is invalid. Accepted values for the Content-Type header are: {}",
|
"The Content-Type `{0}` is invalid. Accepted values for the Content-Type header are: {}",
|
||||||
.1.iter().map(|s| format!("\"{}\"", s)).collect::<Vec<_>>().join(", ")
|
.1.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
|
||||||
)]
|
)]
|
||||||
InvalidContentType(String, Vec<String>),
|
InvalidContentType(String, Vec<String>),
|
||||||
}
|
}
|
||||||
@@ -36,68 +29,18 @@ impl From<MeilisearchHttpError> for aweb::Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct ResponseError {
|
|
||||||
#[serde(skip)]
|
|
||||||
code: StatusCode,
|
|
||||||
message: String,
|
|
||||||
error_code: String,
|
|
||||||
error_type: String,
|
|
||||||
error_link: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for ResponseError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
self.message.fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> From<T> for ResponseError
|
|
||||||
where
|
|
||||||
T: ErrorCode,
|
|
||||||
{
|
|
||||||
fn from(other: T) -> Self {
|
|
||||||
Self {
|
|
||||||
code: other.http_status(),
|
|
||||||
message: other.to_string(),
|
|
||||||
error_code: other.error_name(),
|
|
||||||
error_type: other.error_type(),
|
|
||||||
error_link: other.error_url(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl aweb::error::ResponseError for ResponseError {
|
|
||||||
fn error_response(&self) -> aweb::HttpResponse<Body> {
|
|
||||||
let json = serde_json::to_vec(self).unwrap();
|
|
||||||
HttpResponseBuilder::new(self.status_code())
|
|
||||||
.content_type("application/json")
|
|
||||||
.body(json)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn status_code(&self) -> StatusCode {
|
|
||||||
self.code
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for PayloadError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
PayloadError::Json(e) => e.fmt(f),
|
|
||||||
PayloadError::Query(e) => e.fmt(f),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum PayloadError {
|
pub enum PayloadError {
|
||||||
|
#[error("{0}")]
|
||||||
Json(JsonPayloadError),
|
Json(JsonPayloadError),
|
||||||
|
#[error("{0}")]
|
||||||
Query(QueryPayloadError),
|
Query(QueryPayloadError),
|
||||||
|
#[error("The json payload provided is malformed. `{0}`.")]
|
||||||
|
MalformedPayload(serde_json::error::Error),
|
||||||
|
#[error("A json payload is missing.")]
|
||||||
|
MissingPayload,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Error for PayloadError {}
|
|
||||||
|
|
||||||
impl ErrorCode for PayloadError {
|
impl ErrorCode for PayloadError {
|
||||||
fn error_code(&self) -> Code {
|
fn error_code(&self) -> Code {
|
||||||
match self {
|
match self {
|
||||||
@@ -107,7 +50,8 @@ impl ErrorCode for PayloadError {
|
|||||||
JsonPayloadError::Payload(aweb::error::PayloadError::Overflow) => {
|
JsonPayloadError::Payload(aweb::error::PayloadError::Overflow) => {
|
||||||
Code::PayloadTooLarge
|
Code::PayloadTooLarge
|
||||||
}
|
}
|
||||||
JsonPayloadError::Deserialize(_) | JsonPayloadError::Payload(_) => Code::BadRequest,
|
JsonPayloadError::Payload(_) => Code::BadRequest,
|
||||||
|
JsonPayloadError::Deserialize(_) => Code::BadRequest,
|
||||||
JsonPayloadError::Serialize(_) => Code::Internal,
|
JsonPayloadError::Serialize(_) => Code::Internal,
|
||||||
_ => Code::Internal,
|
_ => Code::Internal,
|
||||||
},
|
},
|
||||||
@@ -115,13 +59,29 @@ impl ErrorCode for PayloadError {
|
|||||||
QueryPayloadError::Deserialize(_) => Code::BadRequest,
|
QueryPayloadError::Deserialize(_) => Code::BadRequest,
|
||||||
_ => Code::Internal,
|
_ => Code::Internal,
|
||||||
},
|
},
|
||||||
|
PayloadError::MissingPayload => Code::MissingPayload,
|
||||||
|
PayloadError::MalformedPayload(_) => Code::MalformedPayload,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<JsonPayloadError> for PayloadError {
|
impl From<JsonPayloadError> for PayloadError {
|
||||||
fn from(other: JsonPayloadError) -> Self {
|
fn from(other: JsonPayloadError) -> Self {
|
||||||
Self::Json(other)
|
match other {
|
||||||
|
JsonPayloadError::Deserialize(e)
|
||||||
|
if e.classify() == serde_json::error::Category::Eof
|
||||||
|
&& e.line() == 1
|
||||||
|
&& e.column() == 0 =>
|
||||||
|
{
|
||||||
|
Self::MissingPayload
|
||||||
|
}
|
||||||
|
JsonPayloadError::Deserialize(e)
|
||||||
|
if e.classify() != serde_json::error::Category::Data =>
|
||||||
|
{
|
||||||
|
Self::MalformedPayload(e)
|
||||||
|
}
|
||||||
|
_ => Self::Json(other),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,24 +2,21 @@ use meilisearch_error::{Code, ErrorCode};
|
|||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum AuthenticationError {
|
pub enum AuthenticationError {
|
||||||
#[error("You must have an authorization token")]
|
#[error("The Authorization header is missing. It must use the bearer authorization method.")]
|
||||||
MissingAuthorizationHeader,
|
MissingAuthorizationHeader,
|
||||||
#[error("Invalid API key")]
|
#[error("The provided API key is invalid.")]
|
||||||
InvalidToken(String),
|
InvalidToken,
|
||||||
// Triggered on configuration error.
|
// Triggered on configuration error.
|
||||||
#[error("Irretrievable state")]
|
#[error("An internal error has occurred. `Irretrievable state`.")]
|
||||||
IrretrievableState,
|
IrretrievableState,
|
||||||
#[error("Unknown authentication policy")]
|
|
||||||
UnknownPolicy,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ErrorCode for AuthenticationError {
|
impl ErrorCode for AuthenticationError {
|
||||||
fn error_code(&self) -> Code {
|
fn error_code(&self) -> Code {
|
||||||
match self {
|
match self {
|
||||||
AuthenticationError::MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
|
AuthenticationError::MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
|
||||||
AuthenticationError::InvalidToken(_) => Code::InvalidToken,
|
AuthenticationError::InvalidToken => Code::InvalidToken,
|
||||||
AuthenticationError::IrretrievableState => Code::Internal,
|
AuthenticationError::IrretrievableState => Code::Internal,
|
||||||
AuthenticationError::UnknownPolicy => Code::Internal,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,84 +1,84 @@
|
|||||||
mod error;
|
mod error;
|
||||||
|
|
||||||
use std::any::{Any, TypeId};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
use std::pin::Pin;
|
||||||
|
|
||||||
use actix_web::FromRequest;
|
use actix_web::FromRequest;
|
||||||
use futures::future::err;
|
use futures::future::err;
|
||||||
use futures::future::{ok, Ready};
|
use futures::Future;
|
||||||
|
use meilisearch_error::{Code, ResponseError};
|
||||||
|
|
||||||
use crate::error::ResponseError;
|
|
||||||
use error::AuthenticationError;
|
use error::AuthenticationError;
|
||||||
|
use meilisearch_auth::{AuthController, AuthFilter};
|
||||||
|
|
||||||
macro_rules! create_policies {
|
pub struct GuardedData<P, D> {
|
||||||
($($name:ident), *) => {
|
|
||||||
pub mod policies {
|
|
||||||
use std::collections::HashSet;
|
|
||||||
use crate::extractors::authentication::Policy;
|
|
||||||
|
|
||||||
$(
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct $name {
|
|
||||||
inner: HashSet<Vec<u8>>
|
|
||||||
}
|
|
||||||
|
|
||||||
impl $name {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self { inner: HashSet::new() }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add(&mut self, token: Vec<u8>) {
|
|
||||||
self.inner.insert(token);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Policy for $name {
|
|
||||||
fn authenticate(&self, token: &[u8]) -> bool {
|
|
||||||
self.inner.contains(token)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)*
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
create_policies!(Public, Private, Admin);
|
|
||||||
|
|
||||||
/// Instanciate a `Policies`, filled with the given policies.
|
|
||||||
macro_rules! init_policies {
|
|
||||||
($($name:ident), *) => {
|
|
||||||
{
|
|
||||||
let mut policies = crate::extractors::authentication::Policies::new();
|
|
||||||
$(
|
|
||||||
let policy = $name::new();
|
|
||||||
policies.insert(policy);
|
|
||||||
)*
|
|
||||||
policies
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Adds user to all specified policies.
|
|
||||||
macro_rules! create_users {
|
|
||||||
($policies:ident, $($user:expr => { $($policy:ty), * }), *) => {
|
|
||||||
{
|
|
||||||
$(
|
|
||||||
$(
|
|
||||||
$policies.get_mut::<$policy>().map(|p| p.add($user.to_owned()));
|
|
||||||
)*
|
|
||||||
)*
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct GuardedData<T, D> {
|
|
||||||
data: D,
|
data: D,
|
||||||
_marker: PhantomData<T>,
|
filters: AuthFilter,
|
||||||
|
_marker: PhantomData<P>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, D> Deref for GuardedData<T, D> {
|
impl<P, D> GuardedData<P, D> {
|
||||||
|
pub fn filters(&self) -> &AuthFilter {
|
||||||
|
&self.filters
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn auth_bearer(
|
||||||
|
auth: AuthController,
|
||||||
|
token: String,
|
||||||
|
index: Option<String>,
|
||||||
|
data: Option<D>,
|
||||||
|
) -> Result<Self, ResponseError>
|
||||||
|
where
|
||||||
|
P: Policy + 'static,
|
||||||
|
{
|
||||||
|
match Self::authenticate(auth, token, index).await? {
|
||||||
|
Some(filters) => match data {
|
||||||
|
Some(data) => Ok(Self {
|
||||||
|
data,
|
||||||
|
filters,
|
||||||
|
_marker: PhantomData,
|
||||||
|
}),
|
||||||
|
None => Err(AuthenticationError::IrretrievableState.into()),
|
||||||
|
},
|
||||||
|
None => Err(AuthenticationError::InvalidToken.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn auth_token(auth: AuthController, data: Option<D>) -> Result<Self, ResponseError>
|
||||||
|
where
|
||||||
|
P: Policy + 'static,
|
||||||
|
{
|
||||||
|
match Self::authenticate(auth, String::new(), None).await? {
|
||||||
|
Some(filters) => match data {
|
||||||
|
Some(data) => Ok(Self {
|
||||||
|
data,
|
||||||
|
filters,
|
||||||
|
_marker: PhantomData,
|
||||||
|
}),
|
||||||
|
None => Err(AuthenticationError::IrretrievableState.into()),
|
||||||
|
},
|
||||||
|
None => Err(AuthenticationError::MissingAuthorizationHeader.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn authenticate(
|
||||||
|
auth: AuthController,
|
||||||
|
token: String,
|
||||||
|
index: Option<String>,
|
||||||
|
) -> Result<Option<AuthFilter>, ResponseError>
|
||||||
|
where
|
||||||
|
P: Policy + 'static,
|
||||||
|
{
|
||||||
|
Ok(tokio::task::spawn_blocking(move || {
|
||||||
|
P::authenticate(auth, token.as_ref(), index.as_deref())
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<P, D> Deref for GuardedData<P, D> {
|
||||||
type Target = D;
|
type Target = D;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
@@ -86,98 +86,173 @@ impl<T, D> Deref for GuardedData<T, D> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Policy {
|
|
||||||
fn authenticate(&self, token: &[u8]) -> bool;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Policies {
|
|
||||||
inner: HashMap<TypeId, Box<dyn Any>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Policies {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
inner: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert<S: Policy + 'static>(&mut self, policy: S) {
|
|
||||||
self.inner.insert(TypeId::of::<S>(), Box::new(policy));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get<S: Policy + 'static>(&self) -> Option<&S> {
|
|
||||||
self.inner
|
|
||||||
.get(&TypeId::of::<S>())
|
|
||||||
.and_then(|p| p.downcast_ref::<S>())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_mut<S: Policy + 'static>(&mut self) -> Option<&mut S> {
|
|
||||||
self.inner
|
|
||||||
.get_mut(&TypeId::of::<S>())
|
|
||||||
.and_then(|p| p.downcast_mut::<S>())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Policies {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum AuthConfig {
|
|
||||||
NoAuth,
|
|
||||||
Auth(Policies),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for AuthConfig {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::NoAuth
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D> {
|
impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D> {
|
||||||
type Config = AuthConfig;
|
|
||||||
|
|
||||||
type Error = ResponseError;
|
type Error = ResponseError;
|
||||||
|
|
||||||
type Future = Ready<Result<Self, Self::Error>>;
|
type Future = Pin<Box<dyn Future<Output = Result<Self, Self::Error>>>>;
|
||||||
|
|
||||||
fn from_request(
|
fn from_request(
|
||||||
req: &actix_web::HttpRequest,
|
req: &actix_web::HttpRequest,
|
||||||
_payload: &mut actix_web::dev::Payload,
|
_payload: &mut actix_web::dev::Payload,
|
||||||
) -> Self::Future {
|
) -> Self::Future {
|
||||||
match req.app_data::<Self::Config>() {
|
match req.app_data::<AuthController>().cloned() {
|
||||||
Some(config) => match config {
|
Some(auth) => match req
|
||||||
AuthConfig::NoAuth => match req.app_data::<D>().cloned() {
|
.headers()
|
||||||
Some(data) => ok(Self {
|
.get("Authorization")
|
||||||
data,
|
.map(|type_token| type_token.to_str().unwrap_or_default().splitn(2, ' '))
|
||||||
_marker: PhantomData,
|
{
|
||||||
}),
|
Some(mut type_token) => match type_token.next() {
|
||||||
None => err(AuthenticationError::IrretrievableState.into()),
|
Some("Bearer") => {
|
||||||
},
|
// TODO: find a less hardcoded way?
|
||||||
AuthConfig::Auth(policies) => match policies.get::<P>() {
|
let index = req.match_info().get("index_uid");
|
||||||
Some(policy) => match req.headers().get("x-meili-api-key") {
|
match type_token.next() {
|
||||||
Some(token) => {
|
Some(token) => Box::pin(Self::auth_bearer(
|
||||||
if policy.authenticate(token.as_bytes()) {
|
auth,
|
||||||
match req.app_data::<D>().cloned() {
|
token.to_string(),
|
||||||
Some(data) => ok(Self {
|
index.map(String::from),
|
||||||
data,
|
req.app_data::<D>().cloned(),
|
||||||
_marker: PhantomData,
|
)),
|
||||||
}),
|
None => Box::pin(err(AuthenticationError::InvalidToken.into())),
|
||||||
None => err(AuthenticationError::IrretrievableState.into()),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let token = token.to_str().unwrap_or("unknown").to_string();
|
|
||||||
err(AuthenticationError::InvalidToken(token).into())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => err(AuthenticationError::MissingAuthorizationHeader.into()),
|
_otherwise => {
|
||||||
|
Box::pin(err(AuthenticationError::MissingAuthorizationHeader.into()))
|
||||||
|
}
|
||||||
},
|
},
|
||||||
None => err(AuthenticationError::UnknownPolicy.into()),
|
None => Box::pin(Self::auth_token(auth, req.app_data::<D>().cloned())),
|
||||||
},
|
},
|
||||||
},
|
None => Box::pin(err(AuthenticationError::IrretrievableState.into())),
|
||||||
None => err(AuthenticationError::IrretrievableState.into()),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait Policy {
|
||||||
|
fn authenticate(auth: AuthController, token: &str, index: Option<&str>) -> Option<AuthFilter>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod policies {
|
||||||
|
use jsonwebtoken::{dangerous_insecure_decode, decode, Algorithm, DecodingKey, Validation};
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
|
use crate::extractors::authentication::Policy;
|
||||||
|
use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules};
|
||||||
|
// reexport actions in policies in order to be used in routes configuration.
|
||||||
|
pub use meilisearch_auth::actions;
|
||||||
|
|
||||||
|
pub static TENANT_TOKEN_VALIDATION: Lazy<Validation> = Lazy::new(|| Validation {
|
||||||
|
validate_exp: false,
|
||||||
|
algorithms: vec![Algorithm::HS256, Algorithm::HS384, Algorithm::HS512],
|
||||||
|
..Default::default()
|
||||||
|
});
|
||||||
|
|
||||||
|
pub struct MasterPolicy;
|
||||||
|
|
||||||
|
impl Policy for MasterPolicy {
|
||||||
|
fn authenticate(
|
||||||
|
auth: AuthController,
|
||||||
|
token: &str,
|
||||||
|
_index: Option<&str>,
|
||||||
|
) -> Option<AuthFilter> {
|
||||||
|
if let Some(master_key) = auth.get_master_key() {
|
||||||
|
if master_key == token {
|
||||||
|
return Some(AuthFilter::default());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ActionPolicy<const A: u8>;
|
||||||
|
|
||||||
|
impl<const A: u8> Policy for ActionPolicy<A> {
|
||||||
|
fn authenticate(
|
||||||
|
auth: AuthController,
|
||||||
|
token: &str,
|
||||||
|
index: Option<&str>,
|
||||||
|
) -> Option<AuthFilter> {
|
||||||
|
// authenticate if token is the master key.
|
||||||
|
if auth.get_master_key().map_or(true, |mk| mk == token) {
|
||||||
|
return Some(AuthFilter::default());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tenant token
|
||||||
|
if let Some(filters) = ActionPolicy::<A>::authenticate_tenant_token(&auth, token, index)
|
||||||
|
{
|
||||||
|
return Some(filters);
|
||||||
|
} else if let Some(action) = Action::from_repr(A) {
|
||||||
|
// API key
|
||||||
|
if let Ok(true) = auth.authenticate(token.as_bytes(), action, index) {
|
||||||
|
return auth.get_key_filters(token, None).ok();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const A: u8> ActionPolicy<A> {
|
||||||
|
fn authenticate_tenant_token(
|
||||||
|
auth: &AuthController,
|
||||||
|
token: &str,
|
||||||
|
index: Option<&str>,
|
||||||
|
) -> Option<AuthFilter> {
|
||||||
|
// Only search action can be accessed by a tenant token.
|
||||||
|
if A != actions::SEARCH {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
// get token fields without validating it.
|
||||||
|
let Claims {
|
||||||
|
search_rules,
|
||||||
|
exp,
|
||||||
|
api_key_prefix,
|
||||||
|
} = dangerous_insecure_decode::<Claims>(token).ok()?.claims;
|
||||||
|
|
||||||
|
// Check index access if an index restriction is provided.
|
||||||
|
if let Some(index) = index {
|
||||||
|
if !search_rules.is_index_authorized(index) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if token is expired.
|
||||||
|
if let Some(exp) = exp {
|
||||||
|
if OffsetDateTime::now_utc().unix_timestamp() > exp {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if parent key is authorized to do the action.
|
||||||
|
if auth
|
||||||
|
.is_key_authorized(api_key_prefix.as_bytes(), Action::Search, index)
|
||||||
|
.ok()?
|
||||||
|
{
|
||||||
|
// Check if tenant token is valid.
|
||||||
|
let key = auth.generate_key(&api_key_prefix)?;
|
||||||
|
decode::<Claims>(
|
||||||
|
token,
|
||||||
|
&DecodingKey::from_secret(key.as_bytes()),
|
||||||
|
&TENANT_TOKEN_VALIDATION,
|
||||||
|
)
|
||||||
|
.ok()?;
|
||||||
|
|
||||||
|
return auth
|
||||||
|
.get_key_filters(api_key_prefix, Some(search_rules))
|
||||||
|
.ok();
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct Claims {
|
||||||
|
search_rules: SearchRules,
|
||||||
|
exp: Option<i64>,
|
||||||
|
api_key_prefix: String,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
pub mod payload;
|
pub mod payload;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod authentication;
|
pub mod authentication;
|
||||||
|
pub mod sequential_extractor;
|
||||||
|
|||||||
@@ -28,8 +28,6 @@ impl Default for PayloadConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl FromRequest for Payload {
|
impl FromRequest for Payload {
|
||||||
type Config = PayloadConfig;
|
|
||||||
|
|
||||||
type Error = PayloadError;
|
type Error = PayloadError;
|
||||||
|
|
||||||
type Future = Ready<Result<Payload, Self::Error>>;
|
type Future = Ready<Result<Payload, Self::Error>>;
|
||||||
@@ -39,7 +37,7 @@ impl FromRequest for Payload {
|
|||||||
let limit = req
|
let limit = req
|
||||||
.app_data::<PayloadConfig>()
|
.app_data::<PayloadConfig>()
|
||||||
.map(|c| c.limit)
|
.map(|c| c.limit)
|
||||||
.unwrap_or(Self::Config::default().limit);
|
.unwrap_or(PayloadConfig::default().limit);
|
||||||
ready(Ok(Payload {
|
ready(Ok(Payload {
|
||||||
payload: payload.take(),
|
payload: payload.take(),
|
||||||
limit,
|
limit,
|
||||||
|
|||||||
148
meilisearch-http/src/extractors/sequential_extractor.rs
Normal file
148
meilisearch-http/src/extractors/sequential_extractor.rs
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
#![allow(non_snake_case)]
|
||||||
|
use std::{future::Future, pin::Pin, task::Poll};
|
||||||
|
|
||||||
|
use actix_web::{dev::Payload, FromRequest, Handler, HttpRequest};
|
||||||
|
use pin_project_lite::pin_project;
|
||||||
|
|
||||||
|
/// `SeqHandler` is an actix `Handler` that enforces that extractors errors are returned in the
|
||||||
|
/// same order as they are defined in the wrapped handler. This is needed because, by default, actix
|
||||||
|
/// resolves the extractors concurrently, whereas we always need the authentication extractor to
|
||||||
|
/// throw first.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct SeqHandler<H>(pub H);
|
||||||
|
|
||||||
|
pub struct SeqFromRequest<T>(T);
|
||||||
|
|
||||||
|
/// This macro implements `FromRequest` for arbitrary arity handler, except for one, which is
|
||||||
|
/// useless anyway.
|
||||||
|
macro_rules! gen_seq {
|
||||||
|
($ty:ident; $($T:ident)+) => {
|
||||||
|
pin_project! {
|
||||||
|
pub struct $ty<$($T: FromRequest), +> {
|
||||||
|
$(
|
||||||
|
#[pin]
|
||||||
|
$T: ExtractFuture<$T::Future, $T, $T::Error>,
|
||||||
|
)+
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<$($T: FromRequest), +> Future for $ty<$($T),+> {
|
||||||
|
type Output = Result<SeqFromRequest<($($T),+)>, actix_web::Error>;
|
||||||
|
|
||||||
|
fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {
|
||||||
|
let mut this = self.project();
|
||||||
|
|
||||||
|
let mut count_fut = 0;
|
||||||
|
let mut count_finished = 0;
|
||||||
|
|
||||||
|
$(
|
||||||
|
count_fut += 1;
|
||||||
|
match this.$T.as_mut().project() {
|
||||||
|
ExtractProj::Future { fut } => match fut.poll(cx) {
|
||||||
|
Poll::Ready(Ok(output)) => {
|
||||||
|
count_finished += 1;
|
||||||
|
let _ = this
|
||||||
|
.$T
|
||||||
|
.as_mut()
|
||||||
|
.project_replace(ExtractFuture::Done { output });
|
||||||
|
}
|
||||||
|
Poll::Ready(Err(error)) => {
|
||||||
|
count_finished += 1;
|
||||||
|
let _ = this
|
||||||
|
.$T
|
||||||
|
.as_mut()
|
||||||
|
.project_replace(ExtractFuture::Error { error });
|
||||||
|
}
|
||||||
|
Poll::Pending => (),
|
||||||
|
},
|
||||||
|
ExtractProj::Done { .. } => count_finished += 1,
|
||||||
|
ExtractProj::Error { .. } => {
|
||||||
|
// short circuit if all previous are finished and we had an error.
|
||||||
|
if count_finished == count_fut {
|
||||||
|
match this.$T.project_replace(ExtractFuture::Empty) {
|
||||||
|
ExtractReplaceProj::Error { error } => {
|
||||||
|
return Poll::Ready(Err(error.into()))
|
||||||
|
}
|
||||||
|
_ => unreachable!("Invalid future state"),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
count_finished += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ExtractProj::Empty => unreachable!("From request polled after being finished. {}", stringify!($T)),
|
||||||
|
}
|
||||||
|
)+
|
||||||
|
|
||||||
|
if count_fut == count_finished {
|
||||||
|
let result = (
|
||||||
|
$(
|
||||||
|
match this.$T.project_replace(ExtractFuture::Empty) {
|
||||||
|
ExtractReplaceProj::Done { output } => output,
|
||||||
|
ExtractReplaceProj::Error { error } => return Poll::Ready(Err(error.into())),
|
||||||
|
_ => unreachable!("Invalid future state"),
|
||||||
|
},
|
||||||
|
)+
|
||||||
|
);
|
||||||
|
|
||||||
|
Poll::Ready(Ok(SeqFromRequest(result)))
|
||||||
|
} else {
|
||||||
|
Poll::Pending
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<$($T: FromRequest,)+> FromRequest for SeqFromRequest<($($T,)+)> {
|
||||||
|
type Error = actix_web::Error;
|
||||||
|
|
||||||
|
type Future = $ty<$($T),+>;
|
||||||
|
|
||||||
|
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
||||||
|
$ty {
|
||||||
|
$(
|
||||||
|
$T: ExtractFuture::Future {
|
||||||
|
fut: $T::from_request(req, payload),
|
||||||
|
},
|
||||||
|
)+
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Han, $($T: FromRequest),+> Handler<SeqFromRequest<($($T),+)>> for SeqHandler<Han>
|
||||||
|
where
|
||||||
|
Han: Handler<($($T),+)>,
|
||||||
|
{
|
||||||
|
type Output = Han::Output;
|
||||||
|
type Future = Han::Future;
|
||||||
|
|
||||||
|
fn call(&self, args: SeqFromRequest<($($T),+)>) -> Self::Future {
|
||||||
|
self.0.call(args.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not working for a single argument, but then, it is not really necessary.
|
||||||
|
// gen_seq! { SeqFromRequestFut1; A }
|
||||||
|
gen_seq! { SeqFromRequestFut2; A B }
|
||||||
|
gen_seq! { SeqFromRequestFut3; A B C }
|
||||||
|
gen_seq! { SeqFromRequestFut4; A B C D }
|
||||||
|
gen_seq! { SeqFromRequestFut5; A B C D E }
|
||||||
|
gen_seq! { SeqFromRequestFut6; A B C D E F }
|
||||||
|
|
||||||
|
pin_project! {
|
||||||
|
#[project = ExtractProj]
|
||||||
|
#[project_replace = ExtractReplaceProj]
|
||||||
|
enum ExtractFuture<Fut, Res, Err> {
|
||||||
|
Future {
|
||||||
|
#[pin]
|
||||||
|
fut: Fut,
|
||||||
|
},
|
||||||
|
Done {
|
||||||
|
output: Res,
|
||||||
|
},
|
||||||
|
Error {
|
||||||
|
error: Err,
|
||||||
|
},
|
||||||
|
Empty,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,64 +1,53 @@
|
|||||||
#![allow(rustdoc::private_intra_doc_links)]
|
#![allow(rustdoc::private_intra_doc_links)]
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
pub mod analytics;
|
||||||
|
mod task;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod extractors;
|
pub mod extractors;
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
|
||||||
pub mod analytics;
|
|
||||||
pub mod helpers;
|
pub mod helpers;
|
||||||
pub mod option;
|
pub mod option;
|
||||||
pub mod routes;
|
pub mod routes;
|
||||||
use std::path::Path;
|
|
||||||
|
use std::sync::{atomic::AtomicBool, Arc};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
use crate::extractors::authentication::AuthConfig;
|
|
||||||
use actix_web::error::JsonPayloadError;
|
use actix_web::error::JsonPayloadError;
|
||||||
|
use analytics::Analytics;
|
||||||
use error::PayloadError;
|
use error::PayloadError;
|
||||||
use http::header::CONTENT_TYPE;
|
use http::header::CONTENT_TYPE;
|
||||||
pub use option::Opt;
|
pub use option::Opt;
|
||||||
|
|
||||||
use actix_web::{web, HttpRequest};
|
use actix_web::{web, HttpRequest};
|
||||||
|
|
||||||
use extractors::authentication::policies::*;
|
|
||||||
use extractors::payload::PayloadConfig;
|
use extractors::payload::PayloadConfig;
|
||||||
|
use meilisearch_auth::AuthController;
|
||||||
use meilisearch_lib::MeiliSearch;
|
use meilisearch_lib::MeiliSearch;
|
||||||
use sha2::Digest;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
pub static AUTOBATCHING_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||||
pub struct ApiKeys {
|
|
||||||
pub public: Option<String>,
|
|
||||||
pub private: Option<String>,
|
|
||||||
pub master: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ApiKeys {
|
|
||||||
pub fn generate_missing_api_keys(&mut self) {
|
|
||||||
if let Some(master_key) = &self.master {
|
|
||||||
if self.private.is_none() {
|
|
||||||
let key = format!("{}-private", master_key);
|
|
||||||
let sha = sha2::Sha256::digest(key.as_bytes());
|
|
||||||
self.private = Some(format!("{:x}", sha));
|
|
||||||
}
|
|
||||||
if self.public.is_none() {
|
|
||||||
let key = format!("{}-public", master_key);
|
|
||||||
let sha = sha2::Sha256::digest(key.as_bytes());
|
|
||||||
self.public = Some(format!("{:x}", sha));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<MeiliSearch> {
|
pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<MeiliSearch> {
|
||||||
let mut meilisearch = MeiliSearch::builder();
|
let mut meilisearch = MeiliSearch::builder();
|
||||||
|
|
||||||
|
// enable autobatching?
|
||||||
|
let _ = AUTOBATCHING_ENABLED.store(
|
||||||
|
opt.scheduler_options.enable_auto_batching,
|
||||||
|
std::sync::atomic::Ordering::Relaxed,
|
||||||
|
);
|
||||||
|
|
||||||
meilisearch
|
meilisearch
|
||||||
.set_max_index_size(opt.max_index_size.get_bytes() as usize)
|
.set_max_index_size(opt.max_index_size.get_bytes() as usize)
|
||||||
.set_max_update_store_size(opt.max_udb_size.get_bytes() as usize)
|
.set_max_task_store_size(opt.max_task_db_size.get_bytes() as usize)
|
||||||
|
// snapshot
|
||||||
.set_ignore_missing_snapshot(opt.ignore_missing_snapshot)
|
.set_ignore_missing_snapshot(opt.ignore_missing_snapshot)
|
||||||
.set_ignore_snapshot_if_db_exists(opt.ignore_snapshot_if_db_exists)
|
.set_ignore_snapshot_if_db_exists(opt.ignore_snapshot_if_db_exists)
|
||||||
.set_dump_dst(opt.dumps_dir.clone())
|
|
||||||
.set_snapshot_interval(Duration::from_secs(opt.snapshot_interval_sec))
|
.set_snapshot_interval(Duration::from_secs(opt.snapshot_interval_sec))
|
||||||
.set_snapshot_dir(opt.snapshot_dir.clone());
|
.set_snapshot_dir(opt.snapshot_dir.clone())
|
||||||
|
// dump
|
||||||
|
.set_ignore_missing_dump(opt.ignore_missing_dump)
|
||||||
|
.set_ignore_dump_if_db_exists(opt.ignore_dump_if_db_exists)
|
||||||
|
.set_dump_dst(opt.dumps_dir.clone());
|
||||||
|
|
||||||
if let Some(ref path) = opt.import_snapshot {
|
if let Some(ref path) = opt.import_snapshot {
|
||||||
meilisearch.set_import_snapshot(path.clone());
|
meilisearch.set_import_snapshot(path.clone());
|
||||||
@@ -72,34 +61,25 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<MeiliSearch> {
|
|||||||
meilisearch.set_schedule_snapshot();
|
meilisearch.set_schedule_snapshot();
|
||||||
}
|
}
|
||||||
|
|
||||||
meilisearch.build(opt.db_path.clone(), opt.indexer_options.clone())
|
meilisearch.build(
|
||||||
|
opt.db_path.clone(),
|
||||||
|
opt.indexer_options.clone(),
|
||||||
|
opt.scheduler_options.clone(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Cleans and setup the temporary file folder in the database directory. This must be done after
|
pub fn configure_data(
|
||||||
/// the meilisearch instance has been created, to not interfere with the snapshot and dump loading.
|
config: &mut web::ServiceConfig,
|
||||||
pub fn setup_temp_dir(db_path: impl AsRef<Path>) -> anyhow::Result<()> {
|
data: MeiliSearch,
|
||||||
// Set the tempfile directory in the current db path, to avoid cross device references. Also
|
auth: AuthController,
|
||||||
// remove the previous outstanding files found there
|
opt: &Opt,
|
||||||
//
|
analytics: Arc<dyn Analytics>,
|
||||||
// TODO: if two processes open the same db, one might delete the other tmpdir. Need to make
|
) {
|
||||||
// sure that no one is using it before deleting it.
|
|
||||||
let temp_path = db_path.as_ref().join("tmp");
|
|
||||||
// Ignore error if tempdir doesn't exist
|
|
||||||
let _ = std::fs::remove_dir_all(&temp_path);
|
|
||||||
std::fs::create_dir_all(&temp_path)?;
|
|
||||||
if cfg!(windows) {
|
|
||||||
std::env::set_var("TMP", temp_path);
|
|
||||||
} else {
|
|
||||||
std::env::set_var("TMPDIR", temp_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn configure_data(config: &mut web::ServiceConfig, data: MeiliSearch, opt: &Opt) {
|
|
||||||
let http_payload_size_limit = opt.http_payload_size_limit.get_bytes() as usize;
|
let http_payload_size_limit = opt.http_payload_size_limit.get_bytes() as usize;
|
||||||
config
|
config
|
||||||
.app_data(data)
|
.app_data(data)
|
||||||
|
.app_data(auth)
|
||||||
|
.app_data(web::Data::from(analytics))
|
||||||
.app_data(
|
.app_data(
|
||||||
web::JsonConfig::default()
|
web::JsonConfig::default()
|
||||||
.content_type(|mime| mime == mime::APPLICATION_JSON)
|
.content_type(|mime| mime == mime::APPLICATION_JSON)
|
||||||
@@ -124,37 +104,10 @@ pub fn configure_data(config: &mut web::ServiceConfig, data: MeiliSearch, opt: &
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn configure_auth(config: &mut web::ServiceConfig, opts: &Opt) {
|
|
||||||
let mut keys = ApiKeys {
|
|
||||||
master: opts.master_key.clone(),
|
|
||||||
private: None,
|
|
||||||
public: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
keys.generate_missing_api_keys();
|
|
||||||
|
|
||||||
let auth_config = if let Some(ref master_key) = keys.master {
|
|
||||||
let private_key = keys.private.as_ref().unwrap();
|
|
||||||
let public_key = keys.public.as_ref().unwrap();
|
|
||||||
let mut policies = init_policies!(Public, Private, Admin);
|
|
||||||
create_users!(
|
|
||||||
policies,
|
|
||||||
master_key.as_bytes() => { Admin, Private, Public },
|
|
||||||
private_key.as_bytes() => { Private, Public },
|
|
||||||
public_key.as_bytes() => { Public }
|
|
||||||
);
|
|
||||||
AuthConfig::Auth(policies)
|
|
||||||
} else {
|
|
||||||
AuthConfig::NoAuth
|
|
||||||
};
|
|
||||||
|
|
||||||
config.app_data(auth_config).app_data(keys);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "mini-dashboard")]
|
#[cfg(feature = "mini-dashboard")]
|
||||||
pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
|
pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
|
||||||
use actix_web::HttpResponse;
|
use actix_web::HttpResponse;
|
||||||
use actix_web_static_files::Resource;
|
use static_files::Resource;
|
||||||
|
|
||||||
mod generated {
|
mod generated {
|
||||||
include!(concat!(env!("OUT_DIR"), "/generated.rs"));
|
include!(concat!(env!("OUT_DIR"), "/generated.rs"));
|
||||||
@@ -169,13 +122,13 @@ pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
|
|||||||
} = resource;
|
} = resource;
|
||||||
// Redirect index.html to /
|
// Redirect index.html to /
|
||||||
if path == "index.html" {
|
if path == "index.html" {
|
||||||
config.service(web::resource("/").route(
|
config.service(web::resource("/").route(web::get().to(move || async move {
|
||||||
web::get().to(move || HttpResponse::Ok().content_type(mime_type).body(data)),
|
HttpResponse::Ok().content_type(mime_type).body(data)
|
||||||
));
|
})));
|
||||||
} else {
|
} else {
|
||||||
config.service(web::resource(path).route(
|
config.service(web::resource(path).route(web::get().to(move || async move {
|
||||||
web::get().to(move || HttpResponse::Ok().content_type(mime_type).body(data)),
|
HttpResponse::Ok().content_type(mime_type).body(data)
|
||||||
));
|
})));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -190,24 +143,24 @@ pub fn dashboard(config: &mut web::ServiceConfig, _enable_frontend: bool) {
|
|||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! create_app {
|
macro_rules! create_app {
|
||||||
($data:expr, $enable_frontend:expr, $opt:expr) => {{
|
($data:expr, $auth:expr, $enable_frontend:expr, $opt:expr, $analytics:expr) => {{
|
||||||
use actix_cors::Cors;
|
use actix_cors::Cors;
|
||||||
use actix_web::middleware::TrailingSlash;
|
use actix_web::middleware::TrailingSlash;
|
||||||
use actix_web::App;
|
use actix_web::App;
|
||||||
use actix_web::{middleware, web};
|
use actix_web::{middleware, web};
|
||||||
use meilisearch_http::error::{MeilisearchHttpError, ResponseError};
|
use meilisearch_error::ResponseError;
|
||||||
|
use meilisearch_http::error::MeilisearchHttpError;
|
||||||
use meilisearch_http::routes;
|
use meilisearch_http::routes;
|
||||||
use meilisearch_http::{configure_auth, configure_data, dashboard};
|
use meilisearch_http::{configure_data, dashboard};
|
||||||
|
|
||||||
App::new()
|
App::new()
|
||||||
.configure(|s| configure_data(s, $data.clone(), &$opt))
|
.configure(|s| configure_data(s, $data.clone(), $auth.clone(), &$opt, $analytics))
|
||||||
.configure(|s| configure_auth(s, &$opt))
|
|
||||||
.configure(routes::configure)
|
.configure(routes::configure)
|
||||||
.configure(|s| dashboard(s, $enable_frontend))
|
.configure(|s| dashboard(s, $enable_frontend))
|
||||||
.wrap(
|
.wrap(
|
||||||
Cors::default()
|
Cors::default()
|
||||||
.send_wildcard()
|
.send_wildcard()
|
||||||
.allowed_headers(vec!["content-type", "x-meili-api-key"])
|
.allow_any_header()
|
||||||
.allow_any_origin()
|
.allow_any_origin()
|
||||||
.allow_any_method()
|
.allow_any_method()
|
||||||
.max_age(86_400), // 24h
|
.max_age(86_400), // 24h
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
use std::env;
|
use std::env;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use actix_web::HttpServer;
|
use actix_web::HttpServer;
|
||||||
|
use clap::Parser;
|
||||||
|
use meilisearch_auth::AuthController;
|
||||||
|
use meilisearch_http::analytics;
|
||||||
|
use meilisearch_http::analytics::Analytics;
|
||||||
use meilisearch_http::{create_app, setup_meilisearch, Opt};
|
use meilisearch_http::{create_app, setup_meilisearch, Opt};
|
||||||
use meilisearch_lib::MeiliSearch;
|
use meilisearch_lib::MeiliSearch;
|
||||||
use structopt::StructOpt;
|
|
||||||
|
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
|
||||||
use meilisearch_http::analytics;
|
|
||||||
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
#[global_allocator]
|
#[global_allocator]
|
||||||
@@ -28,7 +29,7 @@ fn setup(opt: &Opt) -> anyhow::Result<()> {
|
|||||||
|
|
||||||
#[actix_web::main]
|
#[actix_web::main]
|
||||||
async fn main() -> anyhow::Result<()> {
|
async fn main() -> anyhow::Result<()> {
|
||||||
let opt = Opt::from_args();
|
let opt = Opt::parse();
|
||||||
|
|
||||||
setup(&opt)?;
|
setup(&opt)?;
|
||||||
|
|
||||||
@@ -46,28 +47,41 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
let meilisearch = setup_meilisearch(&opt)?;
|
let meilisearch = setup_meilisearch(&opt)?;
|
||||||
|
|
||||||
// Setup the temp directory to be in the db folder. This is important, since temporary file
|
let auth_controller = AuthController::new(&opt.db_path, &opt.master_key)?;
|
||||||
// don't support to be persisted accross filesystem boundaries.
|
|
||||||
meilisearch_http::setup_temp_dir(&opt.db_path)?;
|
|
||||||
|
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
if !opt.no_analytics {
|
let (analytics, user) = if !opt.no_analytics {
|
||||||
let analytics_data = meilisearch.clone();
|
analytics::SegmentAnalytics::new(&opt, &meilisearch).await
|
||||||
let analytics_opt = opt.clone();
|
} else {
|
||||||
tokio::task::spawn(analytics::analytics_sender(analytics_data, analytics_opt));
|
analytics::MockAnalytics::new(&opt)
|
||||||
}
|
};
|
||||||
|
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||||
|
let (analytics, user) = analytics::MockAnalytics::new(&opt);
|
||||||
|
|
||||||
print_launch_resume(&opt);
|
print_launch_resume(&opt, &user);
|
||||||
|
|
||||||
run_http(meilisearch, opt).await?;
|
run_http(meilisearch, auth_controller, opt, analytics).await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn run_http(data: MeiliSearch, opt: Opt) -> anyhow::Result<()> {
|
async fn run_http(
|
||||||
|
data: MeiliSearch,
|
||||||
|
auth_controller: AuthController,
|
||||||
|
opt: Opt,
|
||||||
|
analytics: Arc<dyn Analytics>,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
let _enable_dashboard = &opt.env == "development";
|
let _enable_dashboard = &opt.env == "development";
|
||||||
let opt_clone = opt.clone();
|
let opt_clone = opt.clone();
|
||||||
let http_server = HttpServer::new(move || create_app!(data, _enable_dashboard, opt_clone))
|
let http_server = HttpServer::new(move || {
|
||||||
|
create_app!(
|
||||||
|
data,
|
||||||
|
auth_controller,
|
||||||
|
_enable_dashboard,
|
||||||
|
opt_clone,
|
||||||
|
analytics.clone()
|
||||||
|
)
|
||||||
|
})
|
||||||
// Disable signals allows the server to terminate immediately when a user enter CTRL-C
|
// Disable signals allows the server to terminate immediately when a user enter CTRL-C
|
||||||
.disable_signals();
|
.disable_signals();
|
||||||
|
|
||||||
@@ -82,19 +96,19 @@ async fn run_http(data: MeiliSearch, opt: Opt) -> anyhow::Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_launch_resume(opt: &Opt) {
|
pub fn print_launch_resume(opt: &Opt, user: &str) {
|
||||||
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
|
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
|
||||||
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
||||||
|
|
||||||
let ascii_name = r#"
|
let ascii_name = r#"
|
||||||
888b d888 d8b 888 d8b .d8888b. 888
|
888b d888 d8b 888 d8b 888
|
||||||
8888b d8888 Y8P 888 Y8P d88P Y88b 888
|
8888b d8888 Y8P 888 Y8P 888
|
||||||
88888b.d88888 888 Y88b. 888
|
88888b.d88888 888 888
|
||||||
888Y88888P888 .d88b. 888 888 888 "Y888b. .d88b. 8888b. 888d888 .d8888b 88888b.
|
888Y88888P888 .d88b. 888 888 888 .d8888b .d88b. 8888b. 888d888 .d8888b 88888b.
|
||||||
888 Y888P 888 d8P Y8b 888 888 888 "Y88b. d8P Y8b "88b 888P" d88P" 888 "88b
|
888 Y888P 888 d8P Y8b 888 888 888 88K d8P Y8b "88b 888P" d88P" 888 "88b
|
||||||
888 Y8P 888 88888888 888 888 888 "888 88888888 .d888888 888 888 888 888
|
888 Y8P 888 88888888 888 888 888 "Y8888b. 88888888 .d888888 888 888 888 888
|
||||||
888 " 888 Y8b. 888 888 888 Y88b d88P Y8b. 888 888 888 Y88b. 888 888
|
888 " 888 Y8b. 888 888 888 X88 Y8b. 888 888 888 Y88b. 888 888
|
||||||
888 888 "Y8888 888 888 888 "Y8888P" "Y8888 "Y888888 888 "Y8888P 888 888
|
888 888 "Y8888 888 888 888 88888P' "Y8888 "Y888888 888 "Y8888P 888 888
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
eprintln!("{}", ascii_name);
|
eprintln!("{}", ascii_name);
|
||||||
@@ -111,24 +125,28 @@ pub fn print_launch_resume(opt: &Opt) {
|
|||||||
|
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
{
|
{
|
||||||
if opt.no_analytics {
|
if !opt.no_analytics {
|
||||||
eprintln!("Anonymous telemetry:\t\"Disabled\"");
|
|
||||||
} else {
|
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"
|
"
|
||||||
Thank you for using MeiliSearch!
|
Thank you for using Meilisearch!
|
||||||
|
|
||||||
We collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html
|
We collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html
|
||||||
|
|
||||||
Anonymous telemetry: \"Enabled\""
|
Anonymous telemetry:\t\"Enabled\""
|
||||||
);
|
);
|
||||||
|
} else {
|
||||||
|
eprintln!("Anonymous telemetry:\t\"Disabled\"");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !user.is_empty() {
|
||||||
|
eprintln!("Instance UID:\t\t\"{}\"", user);
|
||||||
|
}
|
||||||
|
|
||||||
eprintln!();
|
eprintln!();
|
||||||
|
|
||||||
if opt.master_key.is_some() {
|
if opt.master_key.is_some() {
|
||||||
eprintln!("A Master Key has been set. Requests to MeiliSearch won't be authorized unless you provide an authentication key.");
|
eprintln!("A Master Key has been set. Requests to Meilisearch won't be authorized unless you provide an authentication key.");
|
||||||
} else {
|
} else {
|
||||||
eprintln!("No master key found; The server will accept unidentified requests. \
|
eprintln!("No master key found; The server will accept unidentified requests. \
|
||||||
If you need some protection in development mode, please export a key: export MEILI_MASTER_KEY=xxx");
|
If you need some protection in development mode, please export a key: export MEILI_MASTER_KEY=xxx");
|
||||||
@@ -137,6 +155,6 @@ Anonymous telemetry: \"Enabled\""
|
|||||||
eprintln!();
|
eprintln!();
|
||||||
eprintln!("Documentation:\t\thttps://docs.meilisearch.com");
|
eprintln!("Documentation:\t\thttps://docs.meilisearch.com");
|
||||||
eprintln!("Source code:\t\thttps://github.com/meilisearch/meilisearch");
|
eprintln!("Source code:\t\thttps://github.com/meilisearch/meilisearch");
|
||||||
eprintln!("Contact:\t\thttps://docs.meilisearch.com/resources/contact.html or bonjour@meilisearch.com");
|
eprintln!("Contact:\t\thttps://docs.meilisearch.com/resources/contact.html");
|
||||||
eprintln!();
|
eprintln!();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,134 +4,168 @@ use std::path::PathBuf;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use byte_unit::Byte;
|
use byte_unit::Byte;
|
||||||
use meilisearch_lib::options::IndexerOpts;
|
use clap::Parser;
|
||||||
use rustls::internal::pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
use meilisearch_lib::options::{IndexerOpts, SchedulerConfig};
|
||||||
use rustls::{
|
use rustls::{
|
||||||
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient, NoClientAuth,
|
server::{
|
||||||
|
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient,
|
||||||
|
ServerSessionMemoryCache,
|
||||||
|
},
|
||||||
RootCertStore,
|
RootCertStore,
|
||||||
};
|
};
|
||||||
use structopt::StructOpt;
|
use rustls_pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
const POSSIBLE_ENV: [&str; 2] = ["development", "production"];
|
const POSSIBLE_ENV: [&str; 2] = ["development", "production"];
|
||||||
|
|
||||||
#[derive(Debug, Clone, StructOpt)]
|
#[derive(Debug, Clone, Parser, Serialize)]
|
||||||
pub struct Opt {
|
pub struct Opt {
|
||||||
/// The destination where the database must be created.
|
/// The destination where the database must be created.
|
||||||
#[structopt(long, env = "MEILI_DB_PATH", default_value = "./data.ms")]
|
#[clap(long, env = "MEILI_DB_PATH", default_value = "./data.ms")]
|
||||||
pub db_path: PathBuf,
|
pub db_path: PathBuf,
|
||||||
|
|
||||||
/// The address on which the http server will listen.
|
/// The address on which the http server will listen.
|
||||||
#[structopt(long, env = "MEILI_HTTP_ADDR", default_value = "127.0.0.1:7700")]
|
#[clap(long, env = "MEILI_HTTP_ADDR", default_value = "127.0.0.1:7700")]
|
||||||
pub http_addr: String,
|
pub http_addr: String,
|
||||||
|
|
||||||
/// The master key allowing you to do everything on the server.
|
/// The master key allowing you to do everything on the server.
|
||||||
#[structopt(long, env = "MEILI_MASTER_KEY")]
|
#[serde(skip)]
|
||||||
|
#[clap(long, env = "MEILI_MASTER_KEY")]
|
||||||
pub master_key: Option<String>,
|
pub master_key: Option<String>,
|
||||||
|
|
||||||
/// This environment variable must be set to `production` if you are running in production.
|
/// This environment variable must be set to `production` if you are running in production.
|
||||||
/// If the server is running in development mode more logs will be displayed,
|
/// If the server is running in development mode more logs will be displayed,
|
||||||
/// and the master key can be avoided which implies that there is no security on the updates routes.
|
/// and the master key can be avoided which implies that there is no security on the updates routes.
|
||||||
/// This is useful to debug when integrating the engine with another service.
|
/// This is useful to debug when integrating the engine with another service.
|
||||||
#[structopt(long, env = "MEILI_ENV", default_value = "development", possible_values = &POSSIBLE_ENV)]
|
#[clap(long, env = "MEILI_ENV", default_value = "development", possible_values = &POSSIBLE_ENV)]
|
||||||
pub env: String,
|
pub env: String,
|
||||||
|
|
||||||
/// Do not send analytics to Meili.
|
/// Do not send analytics to Meili.
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
#[structopt(long, env = "MEILI_NO_ANALYTICS")]
|
#[serde(skip)] // we can't send true
|
||||||
|
#[clap(long, env = "MEILI_NO_ANALYTICS")]
|
||||||
pub no_analytics: bool,
|
pub no_analytics: bool,
|
||||||
|
|
||||||
/// The maximum size, in bytes, of the main lmdb database directory
|
/// The maximum size, in bytes, of the main lmdb database directory
|
||||||
#[structopt(long, env = "MEILI_MAX_INDEX_SIZE", default_value = "100 GiB")]
|
#[clap(long, env = "MEILI_MAX_INDEX_SIZE", default_value = "100 GiB")]
|
||||||
pub max_index_size: Byte,
|
pub max_index_size: Byte,
|
||||||
|
|
||||||
/// The maximum size, in bytes, of the update lmdb database directory
|
/// The maximum size, in bytes, of the update lmdb database directory
|
||||||
#[structopt(long, env = "MEILI_MAX_UDB_SIZE", default_value = "100 GiB")]
|
#[clap(long, env = "MEILI_MAX_TASK_DB_SIZE", default_value = "100 GiB")]
|
||||||
pub max_udb_size: Byte,
|
pub max_task_db_size: Byte,
|
||||||
|
|
||||||
/// The maximum size, in bytes, of accepted JSON payloads
|
/// The maximum size, in bytes, of accepted JSON payloads
|
||||||
#[structopt(long, env = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT", default_value = "100 MB")]
|
#[clap(long, env = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT", default_value = "100 MB")]
|
||||||
pub http_payload_size_limit: Byte,
|
pub http_payload_size_limit: Byte,
|
||||||
|
|
||||||
/// Read server certificates from CERTFILE.
|
/// Read server certificates from CERTFILE.
|
||||||
/// This should contain PEM-format certificates
|
/// This should contain PEM-format certificates
|
||||||
/// in the right order (the first certificate should
|
/// in the right order (the first certificate should
|
||||||
/// certify KEYFILE, the last should be a root CA).
|
/// certify KEYFILE, the last should be a root CA).
|
||||||
#[structopt(long, env = "MEILI_SSL_CERT_PATH", parse(from_os_str))]
|
#[serde(skip)]
|
||||||
|
#[clap(long, env = "MEILI_SSL_CERT_PATH", parse(from_os_str))]
|
||||||
pub ssl_cert_path: Option<PathBuf>,
|
pub ssl_cert_path: Option<PathBuf>,
|
||||||
|
|
||||||
/// Read private key from KEYFILE. This should be a RSA
|
/// Read private key from KEYFILE. This should be a RSA
|
||||||
/// private key or PKCS8-encoded private key, in PEM format.
|
/// private key or PKCS8-encoded private key, in PEM format.
|
||||||
#[structopt(long, env = "MEILI_SSL_KEY_PATH", parse(from_os_str))]
|
#[serde(skip)]
|
||||||
|
#[clap(long, env = "MEILI_SSL_KEY_PATH", parse(from_os_str))]
|
||||||
pub ssl_key_path: Option<PathBuf>,
|
pub ssl_key_path: Option<PathBuf>,
|
||||||
|
|
||||||
/// Enable client authentication, and accept certificates
|
/// Enable client authentication, and accept certificates
|
||||||
/// signed by those roots provided in CERTFILE.
|
/// signed by those roots provided in CERTFILE.
|
||||||
#[structopt(long, env = "MEILI_SSL_AUTH_PATH", parse(from_os_str))]
|
#[clap(long, env = "MEILI_SSL_AUTH_PATH", parse(from_os_str))]
|
||||||
|
#[serde(skip)]
|
||||||
pub ssl_auth_path: Option<PathBuf>,
|
pub ssl_auth_path: Option<PathBuf>,
|
||||||
|
|
||||||
/// Read DER-encoded OCSP response from OCSPFILE and staple to certificate.
|
/// Read DER-encoded OCSP response from OCSPFILE and staple to certificate.
|
||||||
/// Optional
|
/// Optional
|
||||||
#[structopt(long, env = "MEILI_SSL_OCSP_PATH", parse(from_os_str))]
|
#[serde(skip)]
|
||||||
|
#[clap(long, env = "MEILI_SSL_OCSP_PATH", parse(from_os_str))]
|
||||||
pub ssl_ocsp_path: Option<PathBuf>,
|
pub ssl_ocsp_path: Option<PathBuf>,
|
||||||
|
|
||||||
/// Send a fatal alert if the client does not complete client authentication.
|
/// Send a fatal alert if the client does not complete client authentication.
|
||||||
#[structopt(long, env = "MEILI_SSL_REQUIRE_AUTH")]
|
#[serde(skip)]
|
||||||
|
#[clap(long, env = "MEILI_SSL_REQUIRE_AUTH")]
|
||||||
pub ssl_require_auth: bool,
|
pub ssl_require_auth: bool,
|
||||||
|
|
||||||
/// SSL support session resumption
|
/// SSL support session resumption
|
||||||
#[structopt(long, env = "MEILI_SSL_RESUMPTION")]
|
#[serde(skip)]
|
||||||
|
#[clap(long, env = "MEILI_SSL_RESUMPTION")]
|
||||||
pub ssl_resumption: bool,
|
pub ssl_resumption: bool,
|
||||||
|
|
||||||
/// SSL support tickets.
|
/// SSL support tickets.
|
||||||
#[structopt(long, env = "MEILI_SSL_TICKETS")]
|
#[serde(skip)]
|
||||||
|
#[clap(long, env = "MEILI_SSL_TICKETS")]
|
||||||
pub ssl_tickets: bool,
|
pub ssl_tickets: bool,
|
||||||
|
|
||||||
/// Defines the path of the snapshot file to import.
|
/// Defines the path of the snapshot file to import.
|
||||||
/// This option will, by default, stop the process if a database already exist or if no snapshot exists at
|
/// This option will, by default, stop the process if a database already exist or if no snapshot exists at
|
||||||
/// the given path. If this option is not specified no snapshot is imported.
|
/// the given path. If this option is not specified no snapshot is imported.
|
||||||
#[structopt(long)]
|
#[clap(long)]
|
||||||
pub import_snapshot: Option<PathBuf>,
|
pub import_snapshot: Option<PathBuf>,
|
||||||
|
|
||||||
/// The engine will ignore a missing snapshot and not return an error in such case.
|
/// The engine will ignore a missing snapshot and not return an error in such case.
|
||||||
#[structopt(long, requires = "import-snapshot")]
|
#[clap(long, requires = "import-snapshot")]
|
||||||
pub ignore_missing_snapshot: bool,
|
pub ignore_missing_snapshot: bool,
|
||||||
|
|
||||||
/// The engine will skip snapshot importation and not return an error in such case.
|
/// The engine will skip snapshot importation and not return an error in such case.
|
||||||
#[structopt(long, requires = "import-snapshot")]
|
#[clap(long, requires = "import-snapshot")]
|
||||||
pub ignore_snapshot_if_db_exists: bool,
|
pub ignore_snapshot_if_db_exists: bool,
|
||||||
|
|
||||||
/// Defines the directory path where meilisearch will create snapshot each snapshot_time_gap.
|
/// Defines the directory path where meilisearch will create snapshot each snapshot_time_gap.
|
||||||
#[structopt(long, env = "MEILI_SNAPSHOT_DIR", default_value = "snapshots/")]
|
#[clap(long, env = "MEILI_SNAPSHOT_DIR", default_value = "snapshots/")]
|
||||||
pub snapshot_dir: PathBuf,
|
pub snapshot_dir: PathBuf,
|
||||||
|
|
||||||
/// Activate snapshot scheduling.
|
/// Activate snapshot scheduling.
|
||||||
#[structopt(long, env = "MEILI_SCHEDULE_SNAPSHOT")]
|
#[clap(long, env = "MEILI_SCHEDULE_SNAPSHOT")]
|
||||||
pub schedule_snapshot: bool,
|
pub schedule_snapshot: bool,
|
||||||
|
|
||||||
/// Defines time interval, in seconds, between each snapshot creation.
|
/// Defines time interval, in seconds, between each snapshot creation.
|
||||||
#[structopt(long, env = "MEILI_SNAPSHOT_INTERVAL_SEC", default_value = "86400")] // 24h
|
#[clap(long, env = "MEILI_SNAPSHOT_INTERVAL_SEC", default_value = "86400")] // 24h
|
||||||
pub snapshot_interval_sec: u64,
|
pub snapshot_interval_sec: u64,
|
||||||
|
|
||||||
/// Folder where dumps are created when the dump route is called.
|
|
||||||
#[structopt(long, env = "MEILI_DUMPS_DIR", default_value = "dumps/")]
|
|
||||||
pub dumps_dir: PathBuf,
|
|
||||||
|
|
||||||
/// Import a dump from the specified path, must be a `.dump` file.
|
/// Import a dump from the specified path, must be a `.dump` file.
|
||||||
#[structopt(long, conflicts_with = "import-snapshot")]
|
#[clap(long, conflicts_with = "import-snapshot")]
|
||||||
pub import_dump: Option<PathBuf>,
|
pub import_dump: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// If the dump doesn't exists, load or create the database specified by `db-path` instead.
|
||||||
|
#[clap(long, requires = "import-dump")]
|
||||||
|
pub ignore_missing_dump: bool,
|
||||||
|
|
||||||
|
/// Ignore the dump if a database already exists, and load that database instead.
|
||||||
|
#[clap(long, requires = "import-dump")]
|
||||||
|
pub ignore_dump_if_db_exists: bool,
|
||||||
|
|
||||||
|
/// Folder where dumps are created when the dump route is called.
|
||||||
|
#[clap(long, env = "MEILI_DUMPS_DIR", default_value = "dumps/")]
|
||||||
|
pub dumps_dir: PathBuf,
|
||||||
|
|
||||||
/// Set the log level
|
/// Set the log level
|
||||||
#[structopt(long, env = "MEILI_LOG_LEVEL", default_value = "info")]
|
#[clap(long, env = "MEILI_LOG_LEVEL", default_value = "info")]
|
||||||
pub log_level: String,
|
pub log_level: String,
|
||||||
|
|
||||||
#[structopt(skip)]
|
#[serde(skip)]
|
||||||
|
#[clap(flatten)]
|
||||||
pub indexer_options: IndexerOpts,
|
pub indexer_options: IndexerOpts,
|
||||||
|
|
||||||
|
#[serde(flatten)]
|
||||||
|
#[clap(flatten)]
|
||||||
|
pub scheduler_options: SchedulerConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Opt {
|
impl Opt {
|
||||||
|
/// Wether analytics should be enabled or not.
|
||||||
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
|
pub fn analytics(&self) -> bool {
|
||||||
|
!self.no_analytics
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_ssl_config(&self) -> anyhow::Result<Option<rustls::ServerConfig>> {
|
pub fn get_ssl_config(&self) -> anyhow::Result<Option<rustls::ServerConfig>> {
|
||||||
if let (Some(cert_path), Some(key_path)) = (&self.ssl_cert_path, &self.ssl_key_path) {
|
if let (Some(cert_path), Some(key_path)) = (&self.ssl_cert_path, &self.ssl_key_path) {
|
||||||
let client_auth = match &self.ssl_auth_path {
|
let config = rustls::ServerConfig::builder().with_safe_defaults();
|
||||||
|
|
||||||
|
let config = match &self.ssl_auth_path {
|
||||||
Some(auth_path) => {
|
Some(auth_path) => {
|
||||||
let roots = load_certs(auth_path.to_path_buf())?;
|
let roots = load_certs(auth_path.to_path_buf())?;
|
||||||
let mut client_auth_roots = RootCertStore::empty();
|
let mut client_auth_roots = RootCertStore::empty();
|
||||||
@@ -139,30 +173,32 @@ impl Opt {
|
|||||||
client_auth_roots.add(&root).unwrap();
|
client_auth_roots.add(&root).unwrap();
|
||||||
}
|
}
|
||||||
if self.ssl_require_auth {
|
if self.ssl_require_auth {
|
||||||
AllowAnyAuthenticatedClient::new(client_auth_roots)
|
let verifier = AllowAnyAuthenticatedClient::new(client_auth_roots);
|
||||||
|
config.with_client_cert_verifier(verifier)
|
||||||
} else {
|
} else {
|
||||||
AllowAnyAnonymousOrAuthenticatedClient::new(client_auth_roots)
|
let verifier =
|
||||||
|
AllowAnyAnonymousOrAuthenticatedClient::new(client_auth_roots);
|
||||||
|
config.with_client_cert_verifier(verifier)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => NoClientAuth::new(),
|
None => config.with_no_client_auth(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut config = rustls::ServerConfig::new(client_auth);
|
|
||||||
config.key_log = Arc::new(rustls::KeyLogFile::new());
|
|
||||||
|
|
||||||
let certs = load_certs(cert_path.to_path_buf())?;
|
let certs = load_certs(cert_path.to_path_buf())?;
|
||||||
let privkey = load_private_key(key_path.to_path_buf())?;
|
let privkey = load_private_key(key_path.to_path_buf())?;
|
||||||
let ocsp = load_ocsp(&self.ssl_ocsp_path)?;
|
let ocsp = load_ocsp(&self.ssl_ocsp_path)?;
|
||||||
config
|
let mut config = config
|
||||||
.set_single_cert_with_ocsp_and_sct(certs, privkey, ocsp, vec![])
|
.with_single_cert_with_ocsp_and_sct(certs, privkey, ocsp, vec![])
|
||||||
.map_err(|_| anyhow::anyhow!("bad certificates/private key"))?;
|
.map_err(|_| anyhow::anyhow!("bad certificates/private key"))?;
|
||||||
|
|
||||||
|
config.key_log = Arc::new(rustls::KeyLogFile::new());
|
||||||
|
|
||||||
if self.ssl_resumption {
|
if self.ssl_resumption {
|
||||||
config.set_persistence(rustls::ServerSessionMemoryCache::new(256));
|
config.session_storage = ServerSessionMemoryCache::new(256);
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.ssl_tickets {
|
if self.ssl_tickets {
|
||||||
config.ticketer = rustls::Ticketer::new();
|
config.ticketer = rustls::Ticketer::new().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Some(config))
|
Ok(Some(config))
|
||||||
@@ -176,7 +212,9 @@ fn load_certs(filename: PathBuf) -> anyhow::Result<Vec<rustls::Certificate>> {
|
|||||||
let certfile =
|
let certfile =
|
||||||
fs::File::open(filename).map_err(|_| anyhow::anyhow!("cannot open certificate file"))?;
|
fs::File::open(filename).map_err(|_| anyhow::anyhow!("cannot open certificate file"))?;
|
||||||
let mut reader = BufReader::new(certfile);
|
let mut reader = BufReader::new(certfile);
|
||||||
certs(&mut reader).map_err(|_| anyhow::anyhow!("cannot read certificate file"))
|
certs(&mut reader)
|
||||||
|
.map(|certs| certs.into_iter().map(rustls::Certificate).collect())
|
||||||
|
.map_err(|_| anyhow::anyhow!("cannot read certificate file"))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_private_key(filename: PathBuf) -> anyhow::Result<rustls::PrivateKey> {
|
fn load_private_key(filename: PathBuf) -> anyhow::Result<rustls::PrivateKey> {
|
||||||
@@ -201,10 +239,10 @@ fn load_private_key(filename: PathBuf) -> anyhow::Result<rustls::PrivateKey> {
|
|||||||
|
|
||||||
// prefer to load pkcs8 keys
|
// prefer to load pkcs8 keys
|
||||||
if !pkcs8_keys.is_empty() {
|
if !pkcs8_keys.is_empty() {
|
||||||
Ok(pkcs8_keys[0].clone())
|
Ok(rustls::PrivateKey(pkcs8_keys[0].clone()))
|
||||||
} else {
|
} else {
|
||||||
assert!(!rsa_keys.is_empty());
|
assert!(!rsa_keys.is_empty());
|
||||||
Ok(rsa_keys[0].clone())
|
Ok(rustls::PrivateKey(rsa_keys[0].clone()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
154
meilisearch-http/src/routes/api_key.rs
Normal file
154
meilisearch-http/src/routes/api_key.rs
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
use std::str;
|
||||||
|
|
||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
|
||||||
|
use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
|
use crate::extractors::{
|
||||||
|
authentication::{policies::*, GuardedData},
|
||||||
|
sequential_extractor::SeqHandler,
|
||||||
|
};
|
||||||
|
use meilisearch_error::{Code, ResponseError};
|
||||||
|
|
||||||
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.service(
|
||||||
|
web::resource("")
|
||||||
|
.route(web::post().to(SeqHandler(create_api_key)))
|
||||||
|
.route(web::get().to(SeqHandler(list_api_keys))),
|
||||||
|
)
|
||||||
|
.service(
|
||||||
|
web::resource("/{api_key}")
|
||||||
|
.route(web::get().to(SeqHandler(get_api_key)))
|
||||||
|
.route(web::patch().to(SeqHandler(patch_api_key)))
|
||||||
|
.route(web::delete().to(SeqHandler(delete_api_key))),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_api_key(
|
||||||
|
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||||
|
body: web::Json<Value>,
|
||||||
|
_req: HttpRequest,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let v = body.into_inner();
|
||||||
|
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||||
|
let key = auth_controller.create_key(v)?;
|
||||||
|
Ok(KeyView::from_key(key, &auth_controller))
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Created().json(res))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_api_keys(
|
||||||
|
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||||
|
_req: HttpRequest,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||||
|
let keys = auth_controller.list_keys()?;
|
||||||
|
let res: Vec<_> = keys
|
||||||
|
.into_iter()
|
||||||
|
.map(|k| KeyView::from_key(k, &auth_controller))
|
||||||
|
.collect();
|
||||||
|
Ok(res)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(KeyListView::from(res)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_api_key(
|
||||||
|
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||||
|
path: web::Path<AuthParam>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let api_key = path.into_inner().api_key;
|
||||||
|
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||||
|
let key = auth_controller.get_key(&api_key)?;
|
||||||
|
Ok(KeyView::from_key(key, &auth_controller))
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(res))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn patch_api_key(
|
||||||
|
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||||
|
body: web::Json<Value>,
|
||||||
|
path: web::Path<AuthParam>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let api_key = path.into_inner().api_key;
|
||||||
|
let body = body.into_inner();
|
||||||
|
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||||
|
let key = auth_controller.update_key(&api_key, body)?;
|
||||||
|
Ok(KeyView::from_key(key, &auth_controller))
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(res))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_api_key(
|
||||||
|
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||||
|
path: web::Path<AuthParam>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let api_key = path.into_inner().api_key;
|
||||||
|
tokio::task::spawn_blocking(move || auth_controller.delete_key(&api_key))
|
||||||
|
.await
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||||
|
|
||||||
|
Ok(HttpResponse::NoContent().finish())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct AuthParam {
|
||||||
|
api_key: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct KeyView {
|
||||||
|
description: Option<String>,
|
||||||
|
key: String,
|
||||||
|
actions: Vec<Action>,
|
||||||
|
indexes: Vec<String>,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||||
|
expires_at: Option<OffsetDateTime>,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
created_at: OffsetDateTime,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
updated_at: OffsetDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl KeyView {
|
||||||
|
fn from_key(key: Key, auth: &AuthController) -> Self {
|
||||||
|
let key_id = str::from_utf8(&key.id).unwrap();
|
||||||
|
let generated_key = auth.generate_key(key_id).unwrap_or_default();
|
||||||
|
|
||||||
|
KeyView {
|
||||||
|
description: key.description,
|
||||||
|
key: generated_key,
|
||||||
|
actions: key.actions,
|
||||||
|
indexes: key.indexes,
|
||||||
|
expires_at: key.expires_at,
|
||||||
|
created_at: key.created_at,
|
||||||
|
updated_at: key.updated_at,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
struct KeyListView {
|
||||||
|
results: Vec<KeyView>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Vec<KeyView>> for KeyListView {
|
||||||
|
fn from(results: Vec<KeyView>) -> Self {
|
||||||
|
Self { results }
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,19 +1,28 @@
|
|||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use log::debug;
|
use log::debug;
|
||||||
|
use meilisearch_error::ResponseError;
|
||||||
use meilisearch_lib::MeiliSearch;
|
use meilisearch_lib::MeiliSearch;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
use crate::error::ResponseError;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(web::resource("").route(web::post().to(create_dump)))
|
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))))
|
||||||
.service(web::resource("/{dump_uid}/status").route(web::get().to(get_dump_status)));
|
.service(
|
||||||
|
web::resource("/{dump_uid}/status").route(web::get().to(SeqHandler(get_dump_status))),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_dump(
|
pub async fn create_dump(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::DUMPS_CREATE }>, MeiliSearch>,
|
||||||
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
analytics.publish("Dump Created".to_string(), json!({}), Some(&req));
|
||||||
|
|
||||||
let res = meilisearch.create_dump().await?;
|
let res = meilisearch.create_dump().await?;
|
||||||
|
|
||||||
debug!("returns: {:?}", res);
|
debug!("returns: {:?}", res);
|
||||||
@@ -32,7 +41,7 @@ struct DumpParam {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn get_dump_status(
|
async fn get_dump_status(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::DUMPS_GET }>, MeiliSearch>,
|
||||||
path: web::Path<DumpParam>,
|
path: web::Path<DumpParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let res = meilisearch.dump_info(path.dump_uid.clone()).await?;
|
let res = meilisearch.dump_info(path.dump_uid.clone()).await?;
|
||||||
|
|||||||
@@ -1,24 +1,39 @@
|
|||||||
use actix_web::error::PayloadError;
|
use actix_web::error::PayloadError;
|
||||||
|
use actix_web::http::header::CONTENT_TYPE;
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
|
use actix_web::HttpMessage;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use bstr::ByteSlice;
|
||||||
use futures::{Stream, StreamExt};
|
use futures::{Stream, StreamExt};
|
||||||
use log::debug;
|
use log::debug;
|
||||||
|
use meilisearch_error::ResponseError;
|
||||||
use meilisearch_lib::index_controller::{DocumentAdditionFormat, Update};
|
use meilisearch_lib::index_controller::{DocumentAdditionFormat, Update};
|
||||||
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
||||||
use meilisearch_lib::MeiliSearch;
|
use meilisearch_lib::MeiliSearch;
|
||||||
|
use mime::Mime;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
|
|
||||||
use crate::error::{MeilisearchHttpError, ResponseError};
|
use crate::analytics::Analytics;
|
||||||
|
use crate::error::MeilisearchHttpError;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
use crate::extractors::payload::Payload;
|
use crate::extractors::payload::Payload;
|
||||||
use crate::routes::IndexParam;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
use crate::task::SummarizedTaskView;
|
||||||
|
|
||||||
const DEFAULT_RETRIEVE_DOCUMENTS_OFFSET: usize = 0;
|
const DEFAULT_RETRIEVE_DOCUMENTS_OFFSET: usize = 0;
|
||||||
const DEFAULT_RETRIEVE_DOCUMENTS_LIMIT: usize = 20;
|
const DEFAULT_RETRIEVE_DOCUMENTS_LIMIT: usize = 20;
|
||||||
|
|
||||||
|
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||||
|
vec![
|
||||||
|
"application/json".to_string(),
|
||||||
|
"application/x-ndjson".to_string(),
|
||||||
|
"text/csv".to_string(),
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
/// This is required because Payload is not Sync nor Send
|
/// This is required because Payload is not Sync nor Send
|
||||||
fn payload_to_stream(mut payload: Payload) -> impl Stream<Item = Result<Bytes, PayloadError>> {
|
fn payload_to_stream(mut payload: Payload) -> impl Stream<Item = Result<Bytes, PayloadError>> {
|
||||||
let (snd, recv) = mpsc::channel(1);
|
let (snd, recv) = mpsc::channel(1);
|
||||||
@@ -30,6 +45,24 @@ fn payload_to_stream(mut payload: Payload) -> impl Stream<Item = Result<Bytes, P
|
|||||||
tokio_stream::wrappers::ReceiverStream::new(recv)
|
tokio_stream::wrappers::ReceiverStream::new(recv)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Extracts the mime type from the content type and return
|
||||||
|
/// a meilisearch error if anyhthing bad happen.
|
||||||
|
fn extract_mime_type(req: &HttpRequest) -> Result<Option<Mime>, MeilisearchHttpError> {
|
||||||
|
match req.mime_type() {
|
||||||
|
Ok(Some(mime)) => Ok(Some(mime)),
|
||||||
|
Ok(None) => Ok(None),
|
||||||
|
Err(_) => match req.headers().get(CONTENT_TYPE) {
|
||||||
|
Some(content_type) => Err(MeilisearchHttpError::InvalidContentType(
|
||||||
|
content_type.as_bytes().as_bstr().to_string(),
|
||||||
|
ACCEPTED_CONTENT_TYPE.clone(),
|
||||||
|
)),
|
||||||
|
None => Err(MeilisearchHttpError::MissingContentType(
|
||||||
|
ACCEPTED_CONTENT_TYPE.clone(),
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct DocumentParam {
|
pub struct DocumentParam {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
@@ -39,22 +72,22 @@ pub struct DocumentParam {
|
|||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(
|
cfg.service(
|
||||||
web::resource("")
|
web::resource("")
|
||||||
.route(web::get().to(get_all_documents))
|
.route(web::get().to(SeqHandler(get_all_documents)))
|
||||||
.route(web::post().to(add_documents))
|
.route(web::post().to(SeqHandler(add_documents)))
|
||||||
.route(web::put().to(update_documents))
|
.route(web::put().to(SeqHandler(update_documents)))
|
||||||
.route(web::delete().to(clear_all_documents)),
|
.route(web::delete().to(SeqHandler(clear_all_documents))),
|
||||||
)
|
)
|
||||||
// this route needs to be before the /documents/{document_id} to match properly
|
// this route needs to be before the /documents/{document_id} to match properly
|
||||||
.service(web::resource("/delete-batch").route(web::post().to(delete_documents)))
|
.service(web::resource("/delete-batch").route(web::post().to(SeqHandler(delete_documents))))
|
||||||
.service(
|
.service(
|
||||||
web::resource("/{document_id}")
|
web::resource("/{document_id}")
|
||||||
.route(web::get().to(get_document))
|
.route(web::get().to(SeqHandler(get_document)))
|
||||||
.route(web::delete().to(delete_document)),
|
.route(web::delete().to(SeqHandler(delete_document))),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_document(
|
pub async fn get_document(
|
||||||
meilisearch: GuardedData<Public, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, MeiliSearch>,
|
||||||
path: web::Path<DocumentParam>,
|
path: web::Path<DocumentParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = path.index_uid.clone();
|
let index = path.index_uid.clone();
|
||||||
@@ -67,7 +100,7 @@ pub async fn get_document(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_document(
|
pub async fn delete_document(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, MeiliSearch>,
|
||||||
path: web::Path<DocumentParam>,
|
path: web::Path<DocumentParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let DocumentParam {
|
let DocumentParam {
|
||||||
@@ -75,11 +108,9 @@ pub async fn delete_document(
|
|||||||
index_uid,
|
index_uid,
|
||||||
} = path.into_inner();
|
} = path.into_inner();
|
||||||
let update = Update::DeleteDocuments(vec![document_id]);
|
let update = Update::DeleteDocuments(vec![document_id]);
|
||||||
let update_status = meilisearch
|
let task: SummarizedTaskView = meilisearch.register_update(index_uid, update).await?.into();
|
||||||
.register_update(index_uid, update, false)
|
debug!("returns: {:?}", task);
|
||||||
.await?;
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
debug!("returns: {:?}", update_status);
|
|
||||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
@@ -91,8 +122,8 @@ pub struct BrowseQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_all_documents(
|
pub async fn get_all_documents(
|
||||||
meilisearch: GuardedData<Public, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, MeiliSearch>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<String>,
|
||||||
params: web::Query<BrowseQuery>,
|
params: web::Query<BrowseQuery>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", params);
|
debug!("called with params: {:?}", params);
|
||||||
@@ -109,7 +140,7 @@ pub async fn get_all_documents(
|
|||||||
|
|
||||||
let documents = meilisearch
|
let documents = meilisearch
|
||||||
.documents(
|
.documents(
|
||||||
path.index_uid.clone(),
|
path.into_inner(),
|
||||||
params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET),
|
params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET),
|
||||||
params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT),
|
params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT),
|
||||||
attributes_to_retrieve,
|
attributes_to_retrieve,
|
||||||
@@ -122,75 +153,93 @@ pub async fn get_all_documents(
|
|||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||||
pub struct UpdateDocumentsQuery {
|
pub struct UpdateDocumentsQuery {
|
||||||
primary_key: Option<String>,
|
pub primary_key: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn add_documents(
|
pub async fn add_documents(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, MeiliSearch>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<String>,
|
||||||
params: web::Query<UpdateDocumentsQuery>,
|
params: web::Query<UpdateDocumentsQuery>,
|
||||||
body: Payload,
|
body: Payload,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", params);
|
debug!("called with params: {:?}", params);
|
||||||
document_addition(
|
let params = params.into_inner();
|
||||||
req.headers()
|
let index_uid = path.into_inner();
|
||||||
.get("Content-type")
|
|
||||||
.map(|s| s.to_str().unwrap_or("unkown")),
|
analytics.add_documents(
|
||||||
|
¶ms,
|
||||||
|
meilisearch.get_index(index_uid.clone()).await.is_err(),
|
||||||
|
&req,
|
||||||
|
);
|
||||||
|
|
||||||
|
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||||
|
let task = document_addition(
|
||||||
|
extract_mime_type(&req)?,
|
||||||
meilisearch,
|
meilisearch,
|
||||||
path.into_inner().index_uid,
|
index_uid,
|
||||||
params.into_inner().primary_key,
|
params.primary_key,
|
||||||
body,
|
body,
|
||||||
IndexDocumentsMethod::ReplaceDocuments,
|
IndexDocumentsMethod::ReplaceDocuments,
|
||||||
|
allow_index_creation,
|
||||||
)
|
)
|
||||||
.await
|
.await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_documents(
|
pub async fn update_documents(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, MeiliSearch>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<String>,
|
||||||
params: web::Query<UpdateDocumentsQuery>,
|
params: web::Query<UpdateDocumentsQuery>,
|
||||||
body: Payload,
|
body: Payload,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", params);
|
debug!("called with params: {:?}", params);
|
||||||
document_addition(
|
let index_uid = path.into_inner();
|
||||||
req.headers()
|
|
||||||
.get("Content-type")
|
analytics.update_documents(
|
||||||
.map(|s| s.to_str().unwrap_or("unkown")),
|
¶ms,
|
||||||
|
meilisearch.get_index(index_uid.clone()).await.is_err(),
|
||||||
|
&req,
|
||||||
|
);
|
||||||
|
|
||||||
|
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||||
|
let task = document_addition(
|
||||||
|
extract_mime_type(&req)?,
|
||||||
meilisearch,
|
meilisearch,
|
||||||
path.into_inner().index_uid,
|
index_uid,
|
||||||
params.into_inner().primary_key,
|
params.into_inner().primary_key,
|
||||||
body,
|
body,
|
||||||
IndexDocumentsMethod::UpdateDocuments,
|
IndexDocumentsMethod::UpdateDocuments,
|
||||||
|
allow_index_creation,
|
||||||
)
|
)
|
||||||
.await
|
.await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Route used when the payload type is "application/json"
|
|
||||||
/// Used to add or replace documents
|
|
||||||
async fn document_addition(
|
async fn document_addition(
|
||||||
content_type: Option<&str>,
|
mime_type: Option<Mime>,
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, MeiliSearch>,
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
body: Payload,
|
body: Payload,
|
||||||
method: IndexDocumentsMethod,
|
method: IndexDocumentsMethod,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
allow_index_creation: bool,
|
||||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
) -> Result<SummarizedTaskView, ResponseError> {
|
||||||
vec![
|
let format = match mime_type
|
||||||
"application/json".to_string(),
|
.as_ref()
|
||||||
"application/x-ndjson".to_string(),
|
.map(|m| (m.type_().as_str(), m.subtype().as_str()))
|
||||||
"application/csv".to_string(),
|
{
|
||||||
]
|
Some(("application", "json")) => DocumentAdditionFormat::Json,
|
||||||
});
|
Some(("application", "x-ndjson")) => DocumentAdditionFormat::Ndjson,
|
||||||
let format = match content_type {
|
Some(("text", "csv")) => DocumentAdditionFormat::Csv,
|
||||||
Some("application/json") => DocumentAdditionFormat::Json,
|
Some((type_, subtype)) => {
|
||||||
Some("application/x-ndjson") => DocumentAdditionFormat::Ndjson,
|
|
||||||
Some("text/csv") => DocumentAdditionFormat::Csv,
|
|
||||||
Some(other) => {
|
|
||||||
return Err(MeilisearchHttpError::InvalidContentType(
|
return Err(MeilisearchHttpError::InvalidContentType(
|
||||||
other.to_string(),
|
format!("{}/{}", type_, subtype),
|
||||||
ACCEPTED_CONTENT_TYPE.clone(),
|
ACCEPTED_CONTENT_TYPE.clone(),
|
||||||
)
|
)
|
||||||
.into())
|
.into())
|
||||||
@@ -207,17 +256,18 @@ async fn document_addition(
|
|||||||
primary_key,
|
primary_key,
|
||||||
method,
|
method,
|
||||||
format,
|
format,
|
||||||
|
allow_index_creation,
|
||||||
};
|
};
|
||||||
|
|
||||||
let update_status = meilisearch.register_update(index_uid, update, true).await?;
|
let task = meilisearch.register_update(index_uid, update).await?.into();
|
||||||
|
|
||||||
debug!("returns: {:?}", update_status);
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
Ok(task)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_documents(
|
pub async fn delete_documents(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, MeiliSearch>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<String>,
|
||||||
body: web::Json<Vec<Value>>,
|
body: web::Json<Vec<Value>>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", body);
|
debug!("called with params: {:?}", body);
|
||||||
@@ -231,21 +281,25 @@ pub async fn delete_documents(
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let update = Update::DeleteDocuments(ids);
|
let update = Update::DeleteDocuments(ids);
|
||||||
let update_status = meilisearch
|
let task: SummarizedTaskView = meilisearch
|
||||||
.register_update(path.into_inner().index_uid, update, false)
|
.register_update(path.into_inner(), update)
|
||||||
.await?;
|
.await?
|
||||||
debug!("returns: {:?}", update_status);
|
.into();
|
||||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
|
||||||
|
debug!("returns: {:?}", task);
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn clear_all_documents(
|
pub async fn clear_all_documents(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, MeiliSearch>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let update = Update::ClearDocuments;
|
let update = Update::ClearDocuments;
|
||||||
let update_status = meilisearch
|
let task: SummarizedTaskView = meilisearch
|
||||||
.register_update(path.into_inner().index_uid, update, false)
|
.register_update(path.into_inner(), update)
|
||||||
.await?;
|
.await?
|
||||||
debug!("returns: {:?}", update_status);
|
.into();
|
||||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
|
||||||
|
debug!("returns: {:?}", task);
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,45 +1,55 @@
|
|||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_lib::index_controller::IndexSettings;
|
use meilisearch_error::ResponseError;
|
||||||
|
use meilisearch_lib::index_controller::Update;
|
||||||
use meilisearch_lib::MeiliSearch;
|
use meilisearch_lib::MeiliSearch;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::json;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::error::ResponseError;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
use crate::routes::IndexParam;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
use crate::task::SummarizedTaskView;
|
||||||
|
|
||||||
pub mod documents;
|
pub mod documents;
|
||||||
pub mod search;
|
pub mod search;
|
||||||
pub mod settings;
|
pub mod settings;
|
||||||
pub mod updates;
|
pub mod tasks;
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(
|
cfg.service(
|
||||||
web::resource("")
|
web::resource("")
|
||||||
.route(web::get().to(list_indexes))
|
.route(web::get().to(list_indexes))
|
||||||
.route(web::post().to(create_index)),
|
.route(web::post().to(SeqHandler(create_index))),
|
||||||
)
|
)
|
||||||
.service(
|
.service(
|
||||||
web::scope("/{index_uid}")
|
web::scope("/{index_uid}")
|
||||||
.service(
|
.service(
|
||||||
web::resource("")
|
web::resource("")
|
||||||
.route(web::get().to(get_index))
|
.route(web::get().to(SeqHandler(get_index)))
|
||||||
.route(web::put().to(update_index))
|
.route(web::put().to(SeqHandler(update_index)))
|
||||||
.route(web::delete().to(delete_index)),
|
.route(web::delete().to(SeqHandler(delete_index))),
|
||||||
)
|
)
|
||||||
.service(web::resource("/stats").route(web::get().to(get_index_stats)))
|
.service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats))))
|
||||||
.service(web::scope("/documents").configure(documents::configure))
|
.service(web::scope("/documents").configure(documents::configure))
|
||||||
.service(web::scope("/search").configure(search::configure))
|
.service(web::scope("/search").configure(search::configure))
|
||||||
.service(web::scope("/updates").configure(updates::configure))
|
.service(web::scope("/tasks").configure(tasks::configure))
|
||||||
.service(web::scope("/settings").configure(settings::configure)),
|
.service(web::scope("/settings").configure(settings::configure)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn list_indexes(
|
pub async fn list_indexes(
|
||||||
data: GuardedData<Private, MeiliSearch>,
|
data: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, MeiliSearch>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let indexes = data.list_indexes().await?;
|
let search_rules = &data.filters().search_rules;
|
||||||
|
let indexes: Vec<_> = data
|
||||||
|
.list_indexes()
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.filter(|i| search_rules.is_index_authorized(&i.uid))
|
||||||
|
.collect();
|
||||||
|
|
||||||
debug!("returns: {:?}", indexes);
|
debug!("returns: {:?}", indexes);
|
||||||
Ok(HttpResponse::Ok().json(indexes))
|
Ok(HttpResponse::Ok().json(indexes))
|
||||||
}
|
}
|
||||||
@@ -52,16 +62,30 @@ pub struct IndexCreateRequest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_index(
|
pub async fn create_index(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, MeiliSearch>,
|
||||||
body: web::Json<IndexCreateRequest>,
|
body: web::Json<IndexCreateRequest>,
|
||||||
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let body = body.into_inner();
|
let IndexCreateRequest {
|
||||||
let meta = meilisearch.create_index(body.uid, body.primary_key).await?;
|
primary_key, uid, ..
|
||||||
Ok(HttpResponse::Created().json(meta))
|
} = body.into_inner();
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"Index Created".to_string(),
|
||||||
|
json!({ "primary_key": primary_key }),
|
||||||
|
Some(&req),
|
||||||
|
);
|
||||||
|
|
||||||
|
let update = Update::CreateIndex { primary_key };
|
||||||
|
let task: SummarizedTaskView = meilisearch.register_update(uid, update).await?.into();
|
||||||
|
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||||
|
#[allow(dead_code)]
|
||||||
pub struct UpdateIndexRequest {
|
pub struct UpdateIndexRequest {
|
||||||
uid: Option<String>,
|
uid: Option<String>,
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
@@ -72,51 +96,67 @@ pub struct UpdateIndexRequest {
|
|||||||
pub struct UpdateIndexResponse {
|
pub struct UpdateIndexResponse {
|
||||||
name: String,
|
name: String,
|
||||||
uid: String,
|
uid: String,
|
||||||
created_at: DateTime<Utc>,
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
updated_at: DateTime<Utc>,
|
created_at: OffsetDateTime,
|
||||||
primary_key: Option<String>,
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
updated_at: OffsetDateTime,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
primary_key: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_index(
|
pub async fn get_index(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, MeiliSearch>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let meta = meilisearch.get_index(path.index_uid.clone()).await?;
|
let meta = meilisearch.get_index(path.into_inner()).await?;
|
||||||
debug!("returns: {:?}", meta);
|
debug!("returns: {:?}", meta);
|
||||||
Ok(HttpResponse::Ok().json(meta))
|
Ok(HttpResponse::Ok().json(meta))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_index(
|
pub async fn update_index(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, MeiliSearch>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<String>,
|
||||||
body: web::Json<UpdateIndexRequest>,
|
body: web::Json<UpdateIndexRequest>,
|
||||||
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", body);
|
debug!("called with params: {:?}", body);
|
||||||
let body = body.into_inner();
|
let body = body.into_inner();
|
||||||
let settings = IndexSettings {
|
analytics.publish(
|
||||||
uid: body.uid,
|
"Index Updated".to_string(),
|
||||||
|
json!({ "primary_key": body.primary_key}),
|
||||||
|
Some(&req),
|
||||||
|
);
|
||||||
|
|
||||||
|
let update = Update::UpdateIndex {
|
||||||
primary_key: body.primary_key,
|
primary_key: body.primary_key,
|
||||||
};
|
};
|
||||||
let meta = meilisearch
|
|
||||||
.update_index(path.into_inner().index_uid, settings)
|
let task: SummarizedTaskView = meilisearch
|
||||||
.await?;
|
.register_update(path.into_inner(), update)
|
||||||
debug!("returns: {:?}", meta);
|
.await?
|
||||||
Ok(HttpResponse::Ok().json(meta))
|
.into();
|
||||||
|
|
||||||
|
debug!("returns: {:?}", task);
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_index(
|
pub async fn delete_index(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, MeiliSearch>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
meilisearch.delete_index(path.index_uid.clone()).await?;
|
let uid = path.into_inner();
|
||||||
Ok(HttpResponse::NoContent().finish())
|
let update = Update::DeleteIndex;
|
||||||
|
let task: SummarizedTaskView = meilisearch.register_update(uid, update).await?.into();
|
||||||
|
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_index_stats(
|
pub async fn get_index_stats(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::STATS_GET }>, MeiliSearch>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let response = meilisearch.get_index_stats(path.index_uid.clone()).await?;
|
let response = meilisearch.get_index_stats(path.into_inner()).await?;
|
||||||
|
|
||||||
debug!("returns: {:?}", response);
|
debug!("returns: {:?}", response);
|
||||||
Ok(HttpResponse::Ok().json(response))
|
Ok(HttpResponse::Ok().json(response))
|
||||||
|
|||||||
@@ -1,19 +1,21 @@
|
|||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use log::debug;
|
use log::debug;
|
||||||
|
use meilisearch_auth::IndexSearchRules;
|
||||||
|
use meilisearch_error::ResponseError;
|
||||||
use meilisearch_lib::index::{default_crop_length, SearchQuery, DEFAULT_SEARCH_LIMIT};
|
use meilisearch_lib::index::{default_crop_length, SearchQuery, DEFAULT_SEARCH_LIMIT};
|
||||||
use meilisearch_lib::MeiliSearch;
|
use meilisearch_lib::MeiliSearch;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::error::ResponseError;
|
use crate::analytics::{Analytics, SearchAggregator};
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
use crate::routes::IndexParam;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(
|
cfg.service(
|
||||||
web::resource("")
|
web::resource("")
|
||||||
.route(web::get().to(search_with_url_query))
|
.route(web::get().to(SeqHandler(search_with_url_query)))
|
||||||
.route(web::post().to(search_with_post)),
|
.route(web::post().to(SeqHandler(search_with_post))),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -79,6 +81,26 @@ impl From<SearchQueryGet> for SearchQuery {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Incorporate search rules in search query
|
||||||
|
fn add_search_rules(query: &mut SearchQuery, rules: IndexSearchRules) {
|
||||||
|
query.filter = match (query.filter.take(), rules.filter) {
|
||||||
|
(None, rules_filter) => rules_filter,
|
||||||
|
(filter, None) => filter,
|
||||||
|
(Some(filter), Some(rules_filter)) => {
|
||||||
|
let filter = match filter {
|
||||||
|
Value::Array(filter) => filter,
|
||||||
|
filter => vec![filter],
|
||||||
|
};
|
||||||
|
let rules_filter = match rules_filter {
|
||||||
|
Value::Array(rules_filter) => rules_filter,
|
||||||
|
rules_filter => vec![rules_filter],
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(Value::Array([filter, rules_filter].concat()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: TAMO: split on :asc, and :desc, instead of doing some weird things
|
// TODO: TAMO: split on :asc, and :desc, instead of doing some weird things
|
||||||
|
|
||||||
/// Transform the sort query parameter into something that matches the post expected format.
|
/// Transform the sort query parameter into something that matches the post expected format.
|
||||||
@@ -106,15 +128,34 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn search_with_url_query(
|
pub async fn search_with_url_query(
|
||||||
meilisearch: GuardedData<Public, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::SEARCH }>, MeiliSearch>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<String>,
|
||||||
params: web::Query<SearchQueryGet>,
|
params: web::Query<SearchQueryGet>,
|
||||||
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", params);
|
debug!("called with params: {:?}", params);
|
||||||
let query = params.into_inner().into();
|
let mut query: SearchQuery = params.into_inner().into();
|
||||||
let search_result = meilisearch
|
|
||||||
.search(path.into_inner().index_uid, query)
|
let index_uid = path.into_inner();
|
||||||
.await?;
|
// Tenant token search_rules.
|
||||||
|
if let Some(search_rules) = meilisearch
|
||||||
|
.filters()
|
||||||
|
.search_rules
|
||||||
|
.get_index_search_rules(&index_uid)
|
||||||
|
{
|
||||||
|
add_search_rules(&mut query, search_rules);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||||
|
|
||||||
|
let search_result = meilisearch.search(index_uid, query).await;
|
||||||
|
if let Ok(ref search_result) = search_result {
|
||||||
|
aggregate.succeed(search_result);
|
||||||
|
}
|
||||||
|
analytics.get_search(aggregate);
|
||||||
|
|
||||||
|
let search_result = search_result?;
|
||||||
|
|
||||||
// Tests that the nb_hits is always set to false
|
// Tests that the nb_hits is always set to false
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@@ -125,14 +166,34 @@ pub async fn search_with_url_query(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn search_with_post(
|
pub async fn search_with_post(
|
||||||
meilisearch: GuardedData<Public, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::SEARCH }>, MeiliSearch>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<String>,
|
||||||
params: web::Json<SearchQuery>,
|
params: web::Json<SearchQuery>,
|
||||||
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("search called with params: {:?}", params);
|
let mut query = params.into_inner();
|
||||||
let search_result = meilisearch
|
debug!("search called with params: {:?}", query);
|
||||||
.search(path.into_inner().index_uid, params.into_inner())
|
|
||||||
.await?;
|
let index_uid = path.into_inner();
|
||||||
|
// Tenant token search_rules.
|
||||||
|
if let Some(search_rules) = meilisearch
|
||||||
|
.filters()
|
||||||
|
.search_rules
|
||||||
|
.get_index_search_rules(&index_uid)
|
||||||
|
{
|
||||||
|
add_search_rules(&mut query, search_rules);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||||
|
|
||||||
|
let search_result = meilisearch.search(index_uid, query).await;
|
||||||
|
if let Ok(ref search_result) = search_result {
|
||||||
|
aggregate.succeed(search_result);
|
||||||
|
}
|
||||||
|
analytics.post_search(aggregate);
|
||||||
|
|
||||||
|
let search_result = search_result?;
|
||||||
|
|
||||||
// Tests that the nb_hits is always set to false
|
// Tests that the nb_hits is always set to false
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -1,92 +1,157 @@
|
|||||||
use log::debug;
|
use log::debug;
|
||||||
|
|
||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use meilisearch_error::ResponseError;
|
||||||
use meilisearch_lib::index::{Settings, Unchecked};
|
use meilisearch_lib::index::{Settings, Unchecked};
|
||||||
use meilisearch_lib::index_controller::Update;
|
use meilisearch_lib::index_controller::Update;
|
||||||
use meilisearch_lib::MeiliSearch;
|
use meilisearch_lib::MeiliSearch;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
use crate::error::ResponseError;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
|
use crate::task::SummarizedTaskView;
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! make_setting_route {
|
macro_rules! make_setting_route {
|
||||||
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal) => {
|
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
|
||||||
pub mod $attr {
|
pub mod $attr {
|
||||||
|
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use actix_web::{web, HttpResponse, Resource};
|
|
||||||
|
|
||||||
use meilisearch_lib::milli::update::Setting;
|
use meilisearch_lib::milli::update::Setting;
|
||||||
use meilisearch_lib::{MeiliSearch, index::Settings, index_controller::Update};
|
use meilisearch_lib::{index::Settings, index_controller::Update, MeiliSearch};
|
||||||
|
|
||||||
use crate::error::ResponseError;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::{GuardedData, policies::*};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
use crate::task::SummarizedTaskView;
|
||||||
|
use meilisearch_error::ResponseError;
|
||||||
|
|
||||||
pub async fn delete(
|
pub async fn delete(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, MeiliSearch>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let settings = Settings {
|
let settings = Settings {
|
||||||
$attr: Setting::Reset,
|
$attr: Setting::Reset,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let update = Update::Settings(settings);
|
|
||||||
let update_status = meilisearch.register_update(index_uid.into_inner(), update, false).await?;
|
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||||
debug!("returns: {:?}", update_status);
|
let update = Update::Settings {
|
||||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
settings,
|
||||||
|
is_deletion: true,
|
||||||
|
allow_index_creation,
|
||||||
|
};
|
||||||
|
let task: SummarizedTaskView = meilisearch
|
||||||
|
.register_update(index_uid.into_inner(), update)
|
||||||
|
.await?
|
||||||
|
.into();
|
||||||
|
|
||||||
|
debug!("returns: {:?}", task);
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update(
|
pub async fn update(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, MeiliSearch>,
|
||||||
index_uid: actix_web::web::Path<String>,
|
index_uid: actix_web::web::Path<String>,
|
||||||
body: actix_web::web::Json<Option<$type>>,
|
body: actix_web::web::Json<Option<$type>>,
|
||||||
|
req: HttpRequest,
|
||||||
|
$analytics_var: web::Data<dyn Analytics>,
|
||||||
) -> std::result::Result<HttpResponse, ResponseError> {
|
) -> std::result::Result<HttpResponse, ResponseError> {
|
||||||
|
let body = body.into_inner();
|
||||||
|
|
||||||
|
$analytics(&body, &req);
|
||||||
|
|
||||||
let settings = Settings {
|
let settings = Settings {
|
||||||
$attr: match body.into_inner() {
|
$attr: match body {
|
||||||
Some(inner_body) => Setting::Set(inner_body),
|
Some(inner_body) => Setting::Set(inner_body),
|
||||||
None => Setting::Reset
|
None => Setting::Reset,
|
||||||
},
|
},
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let update = Update::Settings(settings);
|
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||||
let update_status = meilisearch.register_update(index_uid.into_inner(), update, true).await?;
|
let update = Update::Settings {
|
||||||
debug!("returns: {:?}", update_status);
|
settings,
|
||||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
is_deletion: false,
|
||||||
|
allow_index_creation,
|
||||||
|
};
|
||||||
|
let task: SummarizedTaskView = meilisearch
|
||||||
|
.register_update(index_uid.into_inner(), update)
|
||||||
|
.await?
|
||||||
|
.into();
|
||||||
|
|
||||||
|
debug!("returns: {:?}", task);
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_GET }>, MeiliSearch>,
|
||||||
index_uid: actix_web::web::Path<String>,
|
index_uid: actix_web::web::Path<String>,
|
||||||
) -> std::result::Result<HttpResponse, ResponseError> {
|
) -> std::result::Result<HttpResponse, ResponseError> {
|
||||||
let settings = meilisearch.settings(index_uid.into_inner()).await?;
|
let settings = meilisearch.settings(index_uid.into_inner()).await?;
|
||||||
debug!("returns: {:?}", settings);
|
debug!("returns: {:?}", settings);
|
||||||
let mut json = serde_json::json!(&settings);
|
let mut json = serde_json::json!(&settings);
|
||||||
let val = json[$camelcase_attr].take();
|
let val = json[$camelcase_attr].take();
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(val))
|
Ok(HttpResponse::Ok().json(val))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resources() -> Resource {
|
pub fn resources() -> Resource {
|
||||||
Resource::new($route)
|
Resource::new($route)
|
||||||
.route(web::get().to(get))
|
.route(web::get().to(SeqHandler(get)))
|
||||||
.route(web::post().to(update))
|
.route(web::post().to(SeqHandler(update)))
|
||||||
.route(web::delete().to(delete))
|
.route(web::delete().to(SeqHandler(delete)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal) => {
|
||||||
|
make_setting_route!($route, $type, $attr, $camelcase_attr, _analytics, |_, _| {});
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
make_setting_route!(
|
make_setting_route!(
|
||||||
"/filterable-attributes",
|
"/filterable-attributes",
|
||||||
std::collections::BTreeSet<String>,
|
std::collections::BTreeSet<String>,
|
||||||
filterable_attributes,
|
filterable_attributes,
|
||||||
"filterableAttributes"
|
"filterableAttributes",
|
||||||
|
analytics,
|
||||||
|
|setting: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"FilterableAttributes Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"filterable_attributes": {
|
||||||
|
"total": setting.as_ref().map(|filter| filter.len()).unwrap_or(0),
|
||||||
|
"has_geo": setting.as_ref().map(|filter| filter.contains("_geo")).unwrap_or(false),
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
make_setting_route!(
|
make_setting_route!(
|
||||||
"/sortable-attributes",
|
"/sortable-attributes",
|
||||||
std::collections::BTreeSet<String>,
|
std::collections::BTreeSet<String>,
|
||||||
sortable_attributes,
|
sortable_attributes,
|
||||||
"sortableAttributes"
|
"sortableAttributes",
|
||||||
|
analytics,
|
||||||
|
|setting: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"SortableAttributes Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"sortable_attributes": {
|
||||||
|
"total": setting.as_ref().map(|sort| sort.len()).unwrap_or(0),
|
||||||
|
"has_geo": setting.as_ref().map(|sort| sort.contains("_geo")).unwrap_or(false),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
make_setting_route!(
|
make_setting_route!(
|
||||||
@@ -100,7 +165,21 @@ make_setting_route!(
|
|||||||
"/searchable-attributes",
|
"/searchable-attributes",
|
||||||
Vec<String>,
|
Vec<String>,
|
||||||
searchable_attributes,
|
searchable_attributes,
|
||||||
"searchableAttributes"
|
"searchableAttributes",
|
||||||
|
analytics,
|
||||||
|
|setting: &Option<Vec<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"SearchableAttributes Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"searchable_attributes": {
|
||||||
|
"total": setting.as_ref().map(|searchable| searchable.len()).unwrap_or(0),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
make_setting_route!(
|
make_setting_route!(
|
||||||
@@ -124,16 +203,36 @@ make_setting_route!(
|
|||||||
"distinctAttribute"
|
"distinctAttribute"
|
||||||
);
|
);
|
||||||
|
|
||||||
make_setting_route!("/ranking-rules", Vec<String>, ranking_rules, "rankingRules");
|
make_setting_route!(
|
||||||
|
"/ranking-rules",
|
||||||
|
Vec<String>,
|
||||||
|
ranking_rules,
|
||||||
|
"rankingRules",
|
||||||
|
analytics,
|
||||||
|
|setting: &Option<Vec<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"RankingRules Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"ranking_rules": {
|
||||||
|
"sort_position": setting.as_ref().map(|sort| sort.iter().position(|s| s == "sort")),
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
macro_rules! generate_configure {
|
macro_rules! generate_configure {
|
||||||
($($mod:ident),*) => {
|
($($mod:ident),*) => {
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
cfg.service(
|
cfg.service(
|
||||||
web::resource("")
|
web::resource("")
|
||||||
.route(web::post().to(update_all))
|
.route(web::post().to(SeqHandler(update_all)))
|
||||||
.route(web::get().to(get_all))
|
.route(web::get().to(SeqHandler(get_all)))
|
||||||
.route(web::delete().to(delete_all)))
|
.route(web::delete().to(SeqHandler(delete_all))))
|
||||||
$(.service($mod::resources()))*;
|
$(.service($mod::resources()))*;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -151,23 +250,52 @@ generate_configure!(
|
|||||||
);
|
);
|
||||||
|
|
||||||
pub async fn update_all(
|
pub async fn update_all(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, MeiliSearch>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
body: web::Json<Settings<Unchecked>>,
|
body: web::Json<Settings<Unchecked>>,
|
||||||
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let settings = body.into_inner();
|
let settings = body.into_inner();
|
||||||
|
|
||||||
let update = Update::Settings(settings);
|
analytics.publish(
|
||||||
let update_result = meilisearch
|
"Settings Updated".to_string(),
|
||||||
.register_update(index_uid.into_inner(), update, true)
|
json!({
|
||||||
.await?;
|
"ranking_rules": {
|
||||||
let json = serde_json::json!({ "updateId": update_result.id() });
|
"sort_position": settings.ranking_rules.as_ref().set().map(|sort| sort.iter().position(|s| s == "sort")),
|
||||||
debug!("returns: {:?}", json);
|
},
|
||||||
Ok(HttpResponse::Accepted().json(json))
|
"searchable_attributes": {
|
||||||
|
"total": settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()).unwrap_or(0),
|
||||||
|
},
|
||||||
|
"sortable_attributes": {
|
||||||
|
"total": settings.sortable_attributes.as_ref().set().map(|sort| sort.len()).unwrap_or(0),
|
||||||
|
"has_geo": settings.sortable_attributes.as_ref().set().map(|sort| sort.iter().any(|s| s == "_geo")).unwrap_or(false),
|
||||||
|
},
|
||||||
|
"filterable_attributes": {
|
||||||
|
"total": settings.filterable_attributes.as_ref().set().map(|filter| filter.len()).unwrap_or(0),
|
||||||
|
"has_geo": settings.filterable_attributes.as_ref().set().map(|filter| filter.iter().any(|s| s == "_geo")).unwrap_or(false),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
Some(&req),
|
||||||
|
);
|
||||||
|
|
||||||
|
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||||
|
let update = Update::Settings {
|
||||||
|
settings,
|
||||||
|
is_deletion: false,
|
||||||
|
allow_index_creation,
|
||||||
|
};
|
||||||
|
let task: SummarizedTaskView = meilisearch
|
||||||
|
.register_update(index_uid.into_inner(), update)
|
||||||
|
.await?
|
||||||
|
.into();
|
||||||
|
|
||||||
|
debug!("returns: {:?}", task);
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_all(
|
pub async fn get_all(
|
||||||
data: GuardedData<Private, MeiliSearch>,
|
data: GuardedData<ActionPolicy<{ actions::SETTINGS_GET }>, MeiliSearch>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let settings = data.settings(index_uid.into_inner()).await?;
|
let settings = data.settings(index_uid.into_inner()).await?;
|
||||||
@@ -176,16 +304,22 @@ pub async fn get_all(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_all(
|
pub async fn delete_all(
|
||||||
data: GuardedData<Private, MeiliSearch>,
|
data: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, MeiliSearch>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let settings = Settings::cleared();
|
let settings = Settings::cleared().into_unchecked();
|
||||||
|
|
||||||
let update = Update::Settings(settings.into_unchecked());
|
let allow_index_creation = data.filters().allow_index_creation;
|
||||||
let update_result = data
|
let update = Update::Settings {
|
||||||
.register_update(index_uid.into_inner(), update, false)
|
settings,
|
||||||
.await?;
|
is_deletion: true,
|
||||||
let json = serde_json::json!({ "updateId": update_result.id() });
|
allow_index_creation,
|
||||||
debug!("returns: {:?}", json);
|
};
|
||||||
Ok(HttpResponse::Accepted().json(json))
|
let task: SummarizedTaskView = data
|
||||||
|
.register_update(index_uid.into_inner(), update)
|
||||||
|
.await?
|
||||||
|
.into();
|
||||||
|
|
||||||
|
debug!("returns: {:?}", task);
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|||||||
80
meilisearch-http/src/routes/indexes/tasks.rs
Normal file
80
meilisearch-http/src/routes/indexes/tasks.rs
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use log::debug;
|
||||||
|
use meilisearch_error::ResponseError;
|
||||||
|
use meilisearch_lib::MeiliSearch;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::json;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
|
use crate::analytics::Analytics;
|
||||||
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
use crate::task::{TaskListView, TaskView};
|
||||||
|
|
||||||
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_all_tasks_status))))
|
||||||
|
.service(web::resource("{task_id}").route(web::get().to(SeqHandler(get_task_status))));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct UpdateIndexResponse {
|
||||||
|
name: String,
|
||||||
|
uid: String,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
created_at: OffsetDateTime,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
updated_at: OffsetDateTime,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
primary_key: OffsetDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct UpdateParam {
|
||||||
|
index_uid: String,
|
||||||
|
task_id: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_task_status(
|
||||||
|
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||||
|
index_uid: web::Path<UpdateParam>,
|
||||||
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
analytics.publish(
|
||||||
|
"Index Tasks Seen".to_string(),
|
||||||
|
json!({ "per_task_uid": true }),
|
||||||
|
Some(&req),
|
||||||
|
);
|
||||||
|
|
||||||
|
let UpdateParam { index_uid, task_id } = index_uid.into_inner();
|
||||||
|
|
||||||
|
let task: TaskView = meilisearch.get_index_task(index_uid, task_id).await?.into();
|
||||||
|
|
||||||
|
debug!("returns: {:?}", task);
|
||||||
|
Ok(HttpResponse::Ok().json(task))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_all_tasks_status(
|
||||||
|
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||||
|
index_uid: web::Path<String>,
|
||||||
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
analytics.publish(
|
||||||
|
"Index Tasks Seen".to_string(),
|
||||||
|
json!({ "per_task_uid": false }),
|
||||||
|
Some(&req),
|
||||||
|
);
|
||||||
|
|
||||||
|
let tasks: TaskListView = meilisearch
|
||||||
|
.list_index_task(index_uid.into_inner(), None, None)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(TaskView::from)
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.into();
|
||||||
|
|
||||||
|
debug!("returns: {:?}", tasks);
|
||||||
|
Ok(HttpResponse::Ok().json(tasks))
|
||||||
|
}
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
use actix_web::{web, HttpResponse};
|
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use log::debug;
|
|
||||||
use meilisearch_lib::MeiliSearch;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use crate::error::ResponseError;
|
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
|
||||||
use crate::routes::{IndexParam, UpdateStatusResponse};
|
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
|
||||||
cfg.service(web::resource("").route(web::get().to(get_all_updates_status)))
|
|
||||||
.service(web::resource("{update_id}").route(web::get().to(get_update_status)));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
|
||||||
struct UpdateIndexRequest {
|
|
||||||
uid: Option<String>,
|
|
||||||
primary_key: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct UpdateIndexResponse {
|
|
||||||
name: String,
|
|
||||||
uid: String,
|
|
||||||
created_at: DateTime<Utc>,
|
|
||||||
updated_at: DateTime<Utc>,
|
|
||||||
primary_key: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
pub struct UpdateParam {
|
|
||||||
index_uid: String,
|
|
||||||
update_id: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_update_status(
|
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
|
||||||
path: web::Path<UpdateParam>,
|
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
|
||||||
let params = path.into_inner();
|
|
||||||
let meta = meilisearch
|
|
||||||
.update_status(params.index_uid, params.update_id)
|
|
||||||
.await?;
|
|
||||||
let meta = UpdateStatusResponse::from(meta);
|
|
||||||
debug!("returns: {:?}", meta);
|
|
||||||
Ok(HttpResponse::Ok().json(meta))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_all_updates_status(
|
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
|
||||||
path: web::Path<IndexParam>,
|
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
|
||||||
let metas = meilisearch
|
|
||||||
.all_update_status(path.into_inner().index_uid)
|
|
||||||
.await?;
|
|
||||||
let metas = metas
|
|
||||||
.into_iter()
|
|
||||||
.map(UpdateStatusResponse::from)
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
debug!("returns: {:?}", metas);
|
|
||||||
Ok(HttpResponse::Ok().json(metas))
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,25 +1,24 @@
|
|||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpResponse};
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_lib::index_controller::updates::status::{UpdateResult, UpdateStatus};
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
|
use meilisearch_error::ResponseError;
|
||||||
use meilisearch_lib::index::{Settings, Unchecked};
|
use meilisearch_lib::index::{Settings, Unchecked};
|
||||||
use meilisearch_lib::{MeiliSearch, Update};
|
use meilisearch_lib::MeiliSearch;
|
||||||
|
|
||||||
use crate::error::ResponseError;
|
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
use crate::ApiKeys;
|
|
||||||
|
|
||||||
|
mod api_key;
|
||||||
mod dump;
|
mod dump;
|
||||||
mod indexes;
|
pub mod indexes;
|
||||||
|
mod tasks;
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(web::resource("/health").route(web::get().to(get_health)))
|
cfg.service(web::scope("/tasks").configure(tasks::configure))
|
||||||
|
.service(web::resource("/health").route(web::get().to(get_health)))
|
||||||
|
.service(web::scope("/keys").configure(api_key::configure))
|
||||||
.service(web::scope("/dumps").configure(dump::configure))
|
.service(web::scope("/dumps").configure(dump::configure))
|
||||||
.service(web::resource("/keys").route(web::get().to(list_keys)))
|
|
||||||
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
||||||
.service(web::resource("/version").route(web::get().to(get_version)))
|
.service(web::resource("/version").route(web::get().to(get_version)))
|
||||||
.service(web::scope("/indexes").configure(indexes::configure));
|
.service(web::scope("/indexes").configure(indexes::configure));
|
||||||
@@ -48,38 +47,6 @@ pub enum UpdateType {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&UpdateStatus> for UpdateType {
|
|
||||||
fn from(other: &UpdateStatus) -> Self {
|
|
||||||
use meilisearch_lib::milli::update::IndexDocumentsMethod::*;
|
|
||||||
match other.meta() {
|
|
||||||
Update::DocumentAddition { method, .. } => {
|
|
||||||
let number = match other {
|
|
||||||
UpdateStatus::Processed(processed) => match processed.success {
|
|
||||||
UpdateResult::DocumentsAddition(ref addition) => {
|
|
||||||
Some(addition.nb_documents)
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
match method {
|
|
||||||
ReplaceDocuments => UpdateType::DocumentsAddition { number },
|
|
||||||
UpdateDocuments => UpdateType::DocumentsPartial { number },
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Update::Settings(settings) => UpdateType::Settings {
|
|
||||||
settings: settings.clone(),
|
|
||||||
},
|
|
||||||
Update::ClearDocuments => UpdateType::ClearAll,
|
|
||||||
Update::DeleteDocuments(ids) => UpdateType::DocumentsDeletion {
|
|
||||||
number: Some(ids.len()),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct ProcessedUpdateResult {
|
pub struct ProcessedUpdateResult {
|
||||||
@@ -87,8 +54,10 @@ pub struct ProcessedUpdateResult {
|
|||||||
#[serde(rename = "type")]
|
#[serde(rename = "type")]
|
||||||
pub update_type: UpdateType,
|
pub update_type: UpdateType,
|
||||||
pub duration: f64, // in seconds
|
pub duration: f64, // in seconds
|
||||||
pub enqueued_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
pub processed_at: DateTime<Utc>,
|
pub enqueued_at: OffsetDateTime,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub processed_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
@@ -97,11 +66,12 @@ pub struct FailedUpdateResult {
|
|||||||
pub update_id: u64,
|
pub update_id: u64,
|
||||||
#[serde(rename = "type")]
|
#[serde(rename = "type")]
|
||||||
pub update_type: UpdateType,
|
pub update_type: UpdateType,
|
||||||
#[serde(flatten)]
|
pub error: ResponseError,
|
||||||
pub response: ResponseError,
|
|
||||||
pub duration: f64, // in seconds
|
pub duration: f64, // in seconds
|
||||||
pub enqueued_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
pub processed_at: DateTime<Utc>,
|
pub enqueued_at: OffsetDateTime,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub processed_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
@@ -110,9 +80,13 @@ pub struct EnqueuedUpdateResult {
|
|||||||
pub update_id: u64,
|
pub update_id: u64,
|
||||||
#[serde(rename = "type")]
|
#[serde(rename = "type")]
|
||||||
pub update_type: UpdateType,
|
pub update_type: UpdateType,
|
||||||
pub enqueued_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
pub enqueued_at: OffsetDateTime,
|
||||||
pub started_processing_at: Option<DateTime<Utc>>,
|
#[serde(
|
||||||
|
skip_serializing_if = "Option::is_none",
|
||||||
|
with = "time::serde::rfc3339::option"
|
||||||
|
)]
|
||||||
|
pub started_processing_at: Option<OffsetDateTime>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
@@ -136,81 +110,6 @@ pub enum UpdateStatusResponse {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<UpdateStatus> for UpdateStatusResponse {
|
|
||||||
fn from(other: UpdateStatus) -> Self {
|
|
||||||
let update_type = UpdateType::from(&other);
|
|
||||||
|
|
||||||
match other {
|
|
||||||
UpdateStatus::Processing(processing) => {
|
|
||||||
let content = EnqueuedUpdateResult {
|
|
||||||
update_id: processing.id(),
|
|
||||||
update_type,
|
|
||||||
enqueued_at: processing.from.enqueued_at,
|
|
||||||
started_processing_at: Some(processing.started_processing_at),
|
|
||||||
};
|
|
||||||
UpdateStatusResponse::Processing { content }
|
|
||||||
}
|
|
||||||
UpdateStatus::Enqueued(enqueued) => {
|
|
||||||
let content = EnqueuedUpdateResult {
|
|
||||||
update_id: enqueued.id(),
|
|
||||||
update_type,
|
|
||||||
enqueued_at: enqueued.enqueued_at,
|
|
||||||
started_processing_at: None,
|
|
||||||
};
|
|
||||||
UpdateStatusResponse::Enqueued { content }
|
|
||||||
}
|
|
||||||
UpdateStatus::Processed(processed) => {
|
|
||||||
let duration = processed
|
|
||||||
.processed_at
|
|
||||||
.signed_duration_since(processed.from.started_processing_at)
|
|
||||||
.num_milliseconds();
|
|
||||||
|
|
||||||
// necessary since chrono::duration don't expose a f64 secs method.
|
|
||||||
let duration = Duration::from_millis(duration as u64).as_secs_f64();
|
|
||||||
|
|
||||||
let content = ProcessedUpdateResult {
|
|
||||||
update_id: processed.id(),
|
|
||||||
update_type,
|
|
||||||
duration,
|
|
||||||
enqueued_at: processed.from.from.enqueued_at,
|
|
||||||
processed_at: processed.processed_at,
|
|
||||||
};
|
|
||||||
UpdateStatusResponse::Processed { content }
|
|
||||||
}
|
|
||||||
UpdateStatus::Aborted(_) => unreachable!(),
|
|
||||||
UpdateStatus::Failed(failed) => {
|
|
||||||
let duration = failed
|
|
||||||
.failed_at
|
|
||||||
.signed_duration_since(failed.from.started_processing_at)
|
|
||||||
.num_milliseconds();
|
|
||||||
|
|
||||||
// necessary since chrono::duration don't expose a f64 secs method.
|
|
||||||
let duration = Duration::from_millis(duration as u64).as_secs_f64();
|
|
||||||
|
|
||||||
let update_id = failed.id();
|
|
||||||
let processed_at = failed.failed_at;
|
|
||||||
let enqueued_at = failed.from.from.enqueued_at;
|
|
||||||
let response = failed.into();
|
|
||||||
|
|
||||||
let content = FailedUpdateResult {
|
|
||||||
update_id,
|
|
||||||
update_type,
|
|
||||||
response,
|
|
||||||
duration,
|
|
||||||
enqueued_at,
|
|
||||||
processed_at,
|
|
||||||
};
|
|
||||||
UpdateStatusResponse::Failed { content }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
pub struct IndexParam {
|
|
||||||
index_uid: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct IndexUpdateResponse {
|
pub struct IndexUpdateResponse {
|
||||||
@@ -230,13 +129,14 @@ impl IndexUpdateResponse {
|
|||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
pub async fn running() -> HttpResponse {
|
pub async fn running() -> HttpResponse {
|
||||||
HttpResponse::Ok().json(serde_json::json!({ "status": "MeiliSearch is running" }))
|
HttpResponse::Ok().json(serde_json::json!({ "status": "Meilisearch is running" }))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_stats(
|
async fn get_stats(
|
||||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
meilisearch: GuardedData<ActionPolicy<{ actions::STATS_GET }>, MeiliSearch>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let response = meilisearch.get_all_stats().await?;
|
let search_rules = &meilisearch.filters().search_rules;
|
||||||
|
let response = meilisearch.get_all_stats(search_rules).await?;
|
||||||
|
|
||||||
debug!("returns: {:?}", response);
|
debug!("returns: {:?}", response);
|
||||||
Ok(HttpResponse::Ok().json(response))
|
Ok(HttpResponse::Ok().json(response))
|
||||||
@@ -250,7 +150,9 @@ struct VersionResponse {
|
|||||||
pkg_version: String,
|
pkg_version: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_version(_meilisearch: GuardedData<Private, MeiliSearch>) -> HttpResponse {
|
async fn get_version(
|
||||||
|
_meilisearch: GuardedData<ActionPolicy<{ actions::VERSION }>, MeiliSearch>,
|
||||||
|
) -> HttpResponse {
|
||||||
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
|
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
|
||||||
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
||||||
|
|
||||||
@@ -267,107 +169,6 @@ struct KeysResponse {
|
|||||||
public: Option<String>,
|
public: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn list_keys(meilisearch: GuardedData<Admin, ApiKeys>) -> HttpResponse {
|
|
||||||
let api_keys = (*meilisearch).clone();
|
|
||||||
HttpResponse::Ok().json(&KeysResponse {
|
|
||||||
private: api_keys.private,
|
|
||||||
public: api_keys.public,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_health() -> Result<HttpResponse, ResponseError> {
|
pub async fn get_health() -> Result<HttpResponse, ResponseError> {
|
||||||
Ok(HttpResponse::Ok().json(serde_json::json!({ "status": "available" })))
|
Ok(HttpResponse::Ok().json(serde_json::json!({ "status": "available" })))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
use crate::extractors::authentication::GuardedData;
|
|
||||||
|
|
||||||
/// A type implemented for a route that uses a authentication policy `Policy`.
|
|
||||||
///
|
|
||||||
/// This trait is used for regression testing of route authenticaton policies.
|
|
||||||
trait Is<Policy, Data, T> {}
|
|
||||||
|
|
||||||
macro_rules! impl_is_policy {
|
|
||||||
($($param:ident)*) => {
|
|
||||||
impl<Policy, Func, Data, $($param,)* Res> Is<Policy, Data, (($($param,)*), Res)> for Func
|
|
||||||
where Func: Fn(GuardedData<Policy, Data>, $($param,)*) -> Res {}
|
|
||||||
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
impl_is_policy! {}
|
|
||||||
impl_is_policy! {A}
|
|
||||||
impl_is_policy! {A B}
|
|
||||||
impl_is_policy! {A B C}
|
|
||||||
impl_is_policy! {A B C D}
|
|
||||||
|
|
||||||
/// Emits a compile error if a route doesn't have the correct authentication policy.
|
|
||||||
///
|
|
||||||
/// This works by trying to cast the route function into a Is<Policy, _> type, where Policy it
|
|
||||||
/// the authentication policy defined for the route.
|
|
||||||
macro_rules! test_auth_routes {
|
|
||||||
($($policy:ident => { $($route:expr,)*})*) => {
|
|
||||||
#[test]
|
|
||||||
fn test_auth() {
|
|
||||||
$($(let _: &dyn Is<$policy, _, _> = &$route;)*)*
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
test_auth_routes! {
|
|
||||||
Public => {
|
|
||||||
indexes::search::search_with_url_query,
|
|
||||||
indexes::search::search_with_post,
|
|
||||||
|
|
||||||
indexes::documents::get_document,
|
|
||||||
indexes::documents::get_all_documents,
|
|
||||||
}
|
|
||||||
Private => {
|
|
||||||
get_stats,
|
|
||||||
get_version,
|
|
||||||
|
|
||||||
indexes::create_index,
|
|
||||||
indexes::list_indexes,
|
|
||||||
indexes::get_index_stats,
|
|
||||||
indexes::delete_index,
|
|
||||||
indexes::update_index,
|
|
||||||
indexes::get_index,
|
|
||||||
|
|
||||||
dump::create_dump,
|
|
||||||
|
|
||||||
indexes::settings::filterable_attributes::get,
|
|
||||||
indexes::settings::displayed_attributes::get,
|
|
||||||
indexes::settings::searchable_attributes::get,
|
|
||||||
indexes::settings::stop_words::get,
|
|
||||||
indexes::settings::synonyms::get,
|
|
||||||
indexes::settings::distinct_attribute::get,
|
|
||||||
indexes::settings::filterable_attributes::update,
|
|
||||||
indexes::settings::displayed_attributes::update,
|
|
||||||
indexes::settings::searchable_attributes::update,
|
|
||||||
indexes::settings::stop_words::update,
|
|
||||||
indexes::settings::synonyms::update,
|
|
||||||
indexes::settings::distinct_attribute::update,
|
|
||||||
indexes::settings::filterable_attributes::delete,
|
|
||||||
indexes::settings::displayed_attributes::delete,
|
|
||||||
indexes::settings::searchable_attributes::delete,
|
|
||||||
indexes::settings::stop_words::delete,
|
|
||||||
indexes::settings::synonyms::delete,
|
|
||||||
indexes::settings::distinct_attribute::delete,
|
|
||||||
indexes::settings::delete_all,
|
|
||||||
indexes::settings::get_all,
|
|
||||||
indexes::settings::update_all,
|
|
||||||
|
|
||||||
indexes::documents::clear_all_documents,
|
|
||||||
indexes::documents::delete_documents,
|
|
||||||
indexes::documents::update_documents,
|
|
||||||
indexes::documents::add_documents,
|
|
||||||
indexes::documents::delete_document,
|
|
||||||
|
|
||||||
indexes::updates::get_all_updates_status,
|
|
||||||
indexes::updates::get_update_status,
|
|
||||||
}
|
|
||||||
Admin => { list_keys, }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
80
meilisearch-http/src/routes/tasks.rs
Normal file
80
meilisearch-http/src/routes/tasks.rs
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use meilisearch_error::ResponseError;
|
||||||
|
use meilisearch_lib::tasks::task::TaskId;
|
||||||
|
use meilisearch_lib::tasks::TaskFilter;
|
||||||
|
use meilisearch_lib::MeiliSearch;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use crate::analytics::Analytics;
|
||||||
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
use crate::task::{TaskListView, TaskView};
|
||||||
|
|
||||||
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks))))
|
||||||
|
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_tasks(
|
||||||
|
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||||
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
analytics.publish(
|
||||||
|
"Tasks Seen".to_string(),
|
||||||
|
json!({ "per_task_uid": false }),
|
||||||
|
Some(&req),
|
||||||
|
);
|
||||||
|
|
||||||
|
let search_rules = &meilisearch.filters().search_rules;
|
||||||
|
let filters = if search_rules.is_index_authorized("*") {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let mut filters = TaskFilter::default();
|
||||||
|
for (index, _policy) in search_rules.clone() {
|
||||||
|
filters.filter_index(index);
|
||||||
|
}
|
||||||
|
Some(filters)
|
||||||
|
};
|
||||||
|
|
||||||
|
let tasks: TaskListView = meilisearch
|
||||||
|
.list_tasks(filters, None, None)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(TaskView::from)
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.into();
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(tasks))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_task(
|
||||||
|
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||||
|
task_id: web::Path<TaskId>,
|
||||||
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
analytics.publish(
|
||||||
|
"Tasks Seen".to_string(),
|
||||||
|
json!({ "per_task_uid": true }),
|
||||||
|
Some(&req),
|
||||||
|
);
|
||||||
|
|
||||||
|
let search_rules = &meilisearch.filters().search_rules;
|
||||||
|
let filters = if search_rules.is_index_authorized("*") {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let mut filters = TaskFilter::default();
|
||||||
|
for (index, _policy) in search_rules.clone() {
|
||||||
|
filters.filter_index(index);
|
||||||
|
}
|
||||||
|
Some(filters)
|
||||||
|
};
|
||||||
|
|
||||||
|
let task: TaskView = meilisearch
|
||||||
|
.get_task(task_id.into_inner(), filters)
|
||||||
|
.await?
|
||||||
|
.into();
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(task))
|
||||||
|
}
|
||||||
374
meilisearch-http/src/task.rs
Normal file
374
meilisearch-http/src/task.rs
Normal file
@@ -0,0 +1,374 @@
|
|||||||
|
use std::fmt::Write;
|
||||||
|
use std::write;
|
||||||
|
|
||||||
|
use meilisearch_error::ResponseError;
|
||||||
|
use meilisearch_lib::index::{Settings, Unchecked};
|
||||||
|
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
||||||
|
use meilisearch_lib::tasks::batch::BatchId;
|
||||||
|
use meilisearch_lib::tasks::task::{
|
||||||
|
DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult,
|
||||||
|
};
|
||||||
|
use serde::{Serialize, Serializer};
|
||||||
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
|
use crate::AUTOBATCHING_ENABLED;
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
enum TaskType {
|
||||||
|
IndexCreation,
|
||||||
|
IndexUpdate,
|
||||||
|
IndexDeletion,
|
||||||
|
DocumentAddition,
|
||||||
|
DocumentPartial,
|
||||||
|
DocumentDeletion,
|
||||||
|
SettingsUpdate,
|
||||||
|
ClearAll,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<TaskContent> for TaskType {
|
||||||
|
fn from(other: TaskContent) -> Self {
|
||||||
|
match other {
|
||||||
|
TaskContent::DocumentAddition {
|
||||||
|
merge_strategy: IndexDocumentsMethod::ReplaceDocuments,
|
||||||
|
..
|
||||||
|
} => TaskType::DocumentAddition,
|
||||||
|
TaskContent::DocumentAddition {
|
||||||
|
merge_strategy: IndexDocumentsMethod::UpdateDocuments,
|
||||||
|
..
|
||||||
|
} => TaskType::DocumentPartial,
|
||||||
|
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => TaskType::ClearAll,
|
||||||
|
TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentDeletion,
|
||||||
|
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
|
||||||
|
TaskContent::IndexDeletion => TaskType::IndexDeletion,
|
||||||
|
TaskContent::IndexCreation { .. } => TaskType::IndexCreation,
|
||||||
|
TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate,
|
||||||
|
_ => unreachable!("unexpected task type"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
enum TaskStatus {
|
||||||
|
Enqueued,
|
||||||
|
Processing,
|
||||||
|
Succeeded,
|
||||||
|
Failed,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
#[serde(untagged)]
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
|
enum TaskDetails {
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
DocumentAddition {
|
||||||
|
received_documents: usize,
|
||||||
|
indexed_documents: Option<u64>,
|
||||||
|
},
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
Settings {
|
||||||
|
#[serde(flatten)]
|
||||||
|
settings: Settings<Unchecked>,
|
||||||
|
},
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
IndexInfo { primary_key: Option<String> },
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
DocumentDeletion {
|
||||||
|
received_document_ids: usize,
|
||||||
|
deleted_documents: Option<u64>,
|
||||||
|
},
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
ClearAll { deleted_documents: Option<u64> },
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for
|
||||||
|
/// https://github.com/time-rs/time/issues/378.
|
||||||
|
/// This code is a port of the old code of time that was removed in 0.2.
|
||||||
|
fn serialize_duration<S: Serializer>(
|
||||||
|
duration: &Option<Duration>,
|
||||||
|
serializer: S,
|
||||||
|
) -> Result<S::Ok, S::Error> {
|
||||||
|
match duration {
|
||||||
|
Some(duration) => {
|
||||||
|
// technically speaking, negative duration is not valid ISO 8601
|
||||||
|
if duration.is_negative() {
|
||||||
|
return serializer.serialize_none();
|
||||||
|
}
|
||||||
|
|
||||||
|
const SECS_PER_DAY: i64 = Duration::DAY.whole_seconds();
|
||||||
|
let secs = duration.whole_seconds();
|
||||||
|
let days = secs / SECS_PER_DAY;
|
||||||
|
let secs = secs - days * SECS_PER_DAY;
|
||||||
|
let hasdate = days != 0;
|
||||||
|
let nanos = duration.subsec_nanoseconds();
|
||||||
|
let hastime = (secs != 0 || nanos != 0) || !hasdate;
|
||||||
|
|
||||||
|
// all the following unwrap can't fail
|
||||||
|
let mut res = String::new();
|
||||||
|
write!(&mut res, "P").unwrap();
|
||||||
|
|
||||||
|
if hasdate {
|
||||||
|
write!(&mut res, "{}D", days).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
const NANOS_PER_MILLI: i32 = Duration::MILLISECOND.subsec_nanoseconds();
|
||||||
|
const NANOS_PER_MICRO: i32 = Duration::MICROSECOND.subsec_nanoseconds();
|
||||||
|
|
||||||
|
if hastime {
|
||||||
|
if nanos == 0 {
|
||||||
|
write!(&mut res, "T{}S", secs).unwrap();
|
||||||
|
} else if nanos % NANOS_PER_MILLI == 0 {
|
||||||
|
write!(&mut res, "T{}.{:03}S", secs, nanos / NANOS_PER_MILLI).unwrap();
|
||||||
|
} else if nanos % NANOS_PER_MICRO == 0 {
|
||||||
|
write!(&mut res, "T{}.{:06}S", secs, nanos / NANOS_PER_MICRO).unwrap();
|
||||||
|
} else {
|
||||||
|
write!(&mut res, "T{}.{:09}S", secs, nanos).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
serializer.serialize_str(&res)
|
||||||
|
}
|
||||||
|
None => serializer.serialize_none(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct TaskView {
|
||||||
|
uid: TaskId,
|
||||||
|
index_uid: String,
|
||||||
|
status: TaskStatus,
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
task_type: TaskType,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
details: Option<TaskDetails>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
error: Option<ResponseError>,
|
||||||
|
#[serde(serialize_with = "serialize_duration")]
|
||||||
|
duration: Option<Duration>,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
enqueued_at: OffsetDateTime,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||||
|
started_at: Option<OffsetDateTime>,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||||
|
finished_at: Option<OffsetDateTime>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
batch_uid: Option<Option<BatchId>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Task> for TaskView {
|
||||||
|
fn from(task: Task) -> Self {
|
||||||
|
let Task {
|
||||||
|
id,
|
||||||
|
index_uid,
|
||||||
|
content,
|
||||||
|
events,
|
||||||
|
} = task;
|
||||||
|
|
||||||
|
let (task_type, mut details) = match content {
|
||||||
|
TaskContent::DocumentAddition {
|
||||||
|
merge_strategy,
|
||||||
|
documents_count,
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
let details = TaskDetails::DocumentAddition {
|
||||||
|
received_documents: documents_count,
|
||||||
|
indexed_documents: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let task_type = match merge_strategy {
|
||||||
|
IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentPartial,
|
||||||
|
IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentAddition,
|
||||||
|
_ => unreachable!("Unexpected document merge strategy."),
|
||||||
|
};
|
||||||
|
|
||||||
|
(task_type, Some(details))
|
||||||
|
}
|
||||||
|
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => (
|
||||||
|
TaskType::DocumentDeletion,
|
||||||
|
Some(TaskDetails::DocumentDeletion {
|
||||||
|
received_document_ids: ids.len(),
|
||||||
|
deleted_documents: None,
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => (
|
||||||
|
TaskType::ClearAll,
|
||||||
|
Some(TaskDetails::ClearAll {
|
||||||
|
deleted_documents: None,
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
TaskContent::IndexDeletion => (
|
||||||
|
TaskType::IndexDeletion,
|
||||||
|
Some(TaskDetails::ClearAll {
|
||||||
|
deleted_documents: None,
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
TaskContent::SettingsUpdate { settings, .. } => (
|
||||||
|
TaskType::SettingsUpdate,
|
||||||
|
Some(TaskDetails::Settings { settings }),
|
||||||
|
),
|
||||||
|
TaskContent::IndexCreation { primary_key } => (
|
||||||
|
TaskType::IndexCreation,
|
||||||
|
Some(TaskDetails::IndexInfo { primary_key }),
|
||||||
|
),
|
||||||
|
TaskContent::IndexUpdate { primary_key } => (
|
||||||
|
TaskType::IndexUpdate,
|
||||||
|
Some(TaskDetails::IndexInfo { primary_key }),
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
// An event always has at least one event: "Created"
|
||||||
|
let (status, error, finished_at) = match events.last().unwrap() {
|
||||||
|
TaskEvent::Created(_) => (TaskStatus::Enqueued, None, None),
|
||||||
|
TaskEvent::Batched { .. } => (TaskStatus::Enqueued, None, None),
|
||||||
|
TaskEvent::Processing(_) => (TaskStatus::Processing, None, None),
|
||||||
|
TaskEvent::Succeded { timestamp, result } => {
|
||||||
|
match (result, &mut details) {
|
||||||
|
(
|
||||||
|
TaskResult::DocumentAddition {
|
||||||
|
indexed_documents: num,
|
||||||
|
..
|
||||||
|
},
|
||||||
|
Some(TaskDetails::DocumentAddition {
|
||||||
|
ref mut indexed_documents,
|
||||||
|
..
|
||||||
|
}),
|
||||||
|
) => {
|
||||||
|
indexed_documents.replace(*num);
|
||||||
|
}
|
||||||
|
(
|
||||||
|
TaskResult::DocumentDeletion {
|
||||||
|
deleted_documents: docs,
|
||||||
|
..
|
||||||
|
},
|
||||||
|
Some(TaskDetails::DocumentDeletion {
|
||||||
|
ref mut deleted_documents,
|
||||||
|
..
|
||||||
|
}),
|
||||||
|
) => {
|
||||||
|
deleted_documents.replace(*docs);
|
||||||
|
}
|
||||||
|
(
|
||||||
|
TaskResult::ClearAll {
|
||||||
|
deleted_documents: docs,
|
||||||
|
},
|
||||||
|
Some(TaskDetails::ClearAll {
|
||||||
|
ref mut deleted_documents,
|
||||||
|
}),
|
||||||
|
) => {
|
||||||
|
deleted_documents.replace(*docs);
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
(TaskStatus::Succeeded, None, Some(*timestamp))
|
||||||
|
}
|
||||||
|
TaskEvent::Failed { timestamp, error } => {
|
||||||
|
match details {
|
||||||
|
Some(TaskDetails::DocumentDeletion {
|
||||||
|
ref mut deleted_documents,
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
deleted_documents.replace(0);
|
||||||
|
}
|
||||||
|
Some(TaskDetails::ClearAll {
|
||||||
|
ref mut deleted_documents,
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
deleted_documents.replace(0);
|
||||||
|
}
|
||||||
|
Some(TaskDetails::DocumentAddition {
|
||||||
|
ref mut indexed_documents,
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
indexed_documents.replace(0);
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
(TaskStatus::Failed, Some(error.clone()), Some(*timestamp))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let enqueued_at = match events.first() {
|
||||||
|
Some(TaskEvent::Created(ts)) => *ts,
|
||||||
|
_ => unreachable!("A task must always have a creation event."),
|
||||||
|
};
|
||||||
|
|
||||||
|
let started_at = events.iter().find_map(|e| match e {
|
||||||
|
TaskEvent::Processing(ts) => Some(*ts),
|
||||||
|
_ => None,
|
||||||
|
});
|
||||||
|
|
||||||
|
let duration = finished_at.zip(started_at).map(|(tf, ts)| (tf - ts));
|
||||||
|
|
||||||
|
let batch_uid = if AUTOBATCHING_ENABLED.load(std::sync::atomic::Ordering::Relaxed) {
|
||||||
|
let id = events.iter().find_map(|e| match e {
|
||||||
|
TaskEvent::Batched { batch_id, .. } => Some(*batch_id),
|
||||||
|
_ => None,
|
||||||
|
});
|
||||||
|
Some(id)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Self {
|
||||||
|
uid: id,
|
||||||
|
index_uid: index_uid.into_inner(),
|
||||||
|
status,
|
||||||
|
task_type,
|
||||||
|
details,
|
||||||
|
error,
|
||||||
|
duration,
|
||||||
|
enqueued_at,
|
||||||
|
started_at,
|
||||||
|
finished_at,
|
||||||
|
batch_uid,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct TaskListView {
|
||||||
|
results: Vec<TaskView>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Vec<TaskView>> for TaskListView {
|
||||||
|
fn from(results: Vec<TaskView>) -> Self {
|
||||||
|
Self { results }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct SummarizedTaskView {
|
||||||
|
uid: TaskId,
|
||||||
|
index_uid: String,
|
||||||
|
status: TaskStatus,
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
task_type: TaskType,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
enqueued_at: OffsetDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Task> for SummarizedTaskView {
|
||||||
|
fn from(mut other: Task) -> Self {
|
||||||
|
let created_event = other
|
||||||
|
.events
|
||||||
|
.drain(..1)
|
||||||
|
.next()
|
||||||
|
.expect("Task must have an enqueued event.");
|
||||||
|
|
||||||
|
let enqueued_at = match created_event {
|
||||||
|
TaskEvent::Created(ts) => ts,
|
||||||
|
_ => unreachable!("The first event of a task must always be 'Created'"),
|
||||||
|
};
|
||||||
|
|
||||||
|
Self {
|
||||||
|
uid: other.id,
|
||||||
|
index_uid: other.index_uid.to_string(),
|
||||||
|
status: TaskStatus::Enqueued,
|
||||||
|
task_type: other.content.into(),
|
||||||
|
enqueued_at,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
1335
meilisearch-http/tests/auth/api_keys.rs
Normal file
1335
meilisearch-http/tests/auth/api_keys.rs
Normal file
File diff suppressed because it is too large
Load Diff
620
meilisearch-http/tests/auth/authorization.rs
Normal file
620
meilisearch-http/tests/auth/authorization.rs
Normal file
@@ -0,0 +1,620 @@
|
|||||||
|
use crate::common::Server;
|
||||||
|
use ::time::format_description::well_known::Rfc3339;
|
||||||
|
use maplit::{hashmap, hashset};
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
|
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
||||||
|
Lazy::new(|| {
|
||||||
|
hashmap! {
|
||||||
|
("POST", "/indexes/products/search") => hashset!{"search", "*"},
|
||||||
|
("GET", "/indexes/products/search") => hashset!{"search", "*"},
|
||||||
|
("POST", "/indexes/products/documents") => hashset!{"documents.add", "*"},
|
||||||
|
("GET", "/indexes/products/documents") => hashset!{"documents.get", "*"},
|
||||||
|
("GET", "/indexes/products/documents/0") => hashset!{"documents.get", "*"},
|
||||||
|
("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "*"},
|
||||||
|
("GET", "/tasks") => hashset!{"tasks.get", "*"},
|
||||||
|
("GET", "/indexes/products/tasks") => hashset!{"tasks.get", "*"},
|
||||||
|
("GET", "/indexes/products/tasks/0") => hashset!{"tasks.get", "*"},
|
||||||
|
("PUT", "/indexes/products/") => hashset!{"indexes.update", "*"},
|
||||||
|
("GET", "/indexes/products/") => hashset!{"indexes.get", "*"},
|
||||||
|
("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "*"},
|
||||||
|
("POST", "/indexes") => hashset!{"indexes.create", "*"},
|
||||||
|
("GET", "/indexes") => hashset!{"indexes.get", "*"},
|
||||||
|
("GET", "/indexes/products/settings") => hashset!{"settings.get", "*"},
|
||||||
|
("GET", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.get", "*"},
|
||||||
|
("GET", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.get", "*"},
|
||||||
|
("GET", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.get", "*"},
|
||||||
|
("GET", "/indexes/products/settings/ranking-rules") => hashset!{"settings.get", "*"},
|
||||||
|
("GET", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.get", "*"},
|
||||||
|
("GET", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.get", "*"},
|
||||||
|
("GET", "/indexes/products/settings/stop-words") => hashset!{"settings.get", "*"},
|
||||||
|
("GET", "/indexes/products/settings/synonyms") => hashset!{"settings.get", "*"},
|
||||||
|
("DELETE", "/indexes/products/settings") => hashset!{"settings.update", "*"},
|
||||||
|
("POST", "/indexes/products/settings") => hashset!{"settings.update", "*"},
|
||||||
|
("POST", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "*"},
|
||||||
|
("POST", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "*"},
|
||||||
|
("POST", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "*"},
|
||||||
|
("POST", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "*"},
|
||||||
|
("POST", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "*"},
|
||||||
|
("POST", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "*"},
|
||||||
|
("POST", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "*"},
|
||||||
|
("POST", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "*"},
|
||||||
|
("GET", "/indexes/products/stats") => hashset!{"stats.get", "*"},
|
||||||
|
("GET", "/stats") => hashset!{"stats.get", "*"},
|
||||||
|
("POST", "/dumps") => hashset!{"dumps.create", "*"},
|
||||||
|
("GET", "/dumps/0/status") => hashset!{"dumps.get", "*"},
|
||||||
|
("GET", "/version") => hashset!{"version", "*"},
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
pub static ALL_ACTIONS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
|
||||||
|
AUTHORIZATIONS
|
||||||
|
.values()
|
||||||
|
.cloned()
|
||||||
|
.reduce(|l, r| l.union(&r).cloned().collect())
|
||||||
|
.unwrap()
|
||||||
|
});
|
||||||
|
|
||||||
|
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||||
|
json!({"message": "The provided API key is invalid.",
|
||||||
|
"code": "invalid_api_key",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn error_access_expired_key() {
|
||||||
|
use std::{thread, time};
|
||||||
|
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["products"],
|
||||||
|
"actions": ALL_ACTIONS.clone(),
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
// wait until the key is expired.
|
||||||
|
thread::sleep(time::Duration::new(1, 0));
|
||||||
|
|
||||||
|
for (method, route) in AUTHORIZATIONS.keys() {
|
||||||
|
let (response, code) = server.dummy_request(method, route).await;
|
||||||
|
|
||||||
|
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||||
|
assert_eq!(code, 403);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn error_access_unauthorized_index() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["sales"],
|
||||||
|
"actions": ALL_ACTIONS.clone(),
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
for (method, route) in AUTHORIZATIONS
|
||||||
|
.keys()
|
||||||
|
// filter `products` index routes
|
||||||
|
.filter(|(_, route)| route.starts_with("/indexes/products"))
|
||||||
|
{
|
||||||
|
let (response, code) = server.dummy_request(method, route).await;
|
||||||
|
|
||||||
|
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||||
|
assert_eq!(code, 403);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn error_access_unauthorized_action() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["products"],
|
||||||
|
"actions": [],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
for ((method, route), action) in AUTHORIZATIONS.iter() {
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
// Patch API key letting all rights but the needed one.
|
||||||
|
let content = json!({
|
||||||
|
"actions": ALL_ACTIONS.difference(action).collect::<Vec<_>>(),
|
||||||
|
});
|
||||||
|
let (_, code) = server.patch_api_key(&key, content).await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
|
||||||
|
server.use_api_key(&key);
|
||||||
|
let (response, code) = server.dummy_request(method, route).await;
|
||||||
|
|
||||||
|
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||||
|
assert_eq!(code, 403);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn access_authorized_restricted_index() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["products"],
|
||||||
|
"actions": [],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||||
|
for action in actions {
|
||||||
|
// Patch API key letting only the needed action.
|
||||||
|
let content = json!({
|
||||||
|
"actions": [action],
|
||||||
|
});
|
||||||
|
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
let (_, code) = server.patch_api_key(&key, content).await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
|
||||||
|
server.use_api_key(&key);
|
||||||
|
let (response, code) = server.dummy_request(method, route).await;
|
||||||
|
|
||||||
|
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||||
|
assert_ne!(code, 403);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn access_authorized_no_index_restriction() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["*"],
|
||||||
|
"actions": [],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||||
|
for action in actions {
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
// Patch API key letting only the needed action.
|
||||||
|
let content = json!({
|
||||||
|
"actions": [action],
|
||||||
|
});
|
||||||
|
let (_, code) = server.patch_api_key(&key, content).await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
|
||||||
|
server.use_api_key(&key);
|
||||||
|
let (response, code) = server.dummy_request(method, route).await;
|
||||||
|
|
||||||
|
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||||
|
assert_ne!(code, 403);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn access_authorized_stats_restricted_index() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
// create index `test`
|
||||||
|
let index = server.index("test");
|
||||||
|
let (_, code) = index.create(Some("id")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
// create index `products`
|
||||||
|
let index = server.index("products");
|
||||||
|
let (_, code) = index.create(Some("product_id")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
// create key with access on `products` index only.
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["products"],
|
||||||
|
"actions": ["stats.get"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
// use created key.
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
let (response, code) = server.stats().await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
|
||||||
|
// key should have access on `products` index.
|
||||||
|
assert!(response["indexes"].get("products").is_some());
|
||||||
|
|
||||||
|
// key should not have access on `test` index.
|
||||||
|
assert!(response["indexes"].get("test").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn access_authorized_stats_no_index_restriction() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
// create index `test`
|
||||||
|
let index = server.index("test");
|
||||||
|
let (_, code) = index.create(Some("id")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
// create index `products`
|
||||||
|
let index = server.index("products");
|
||||||
|
let (_, code) = index.create(Some("product_id")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
// create key with access on all indexes.
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["*"],
|
||||||
|
"actions": ["stats.get"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
// use created key.
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
let (response, code) = server.stats().await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
|
||||||
|
// key should have access on `products` index.
|
||||||
|
assert!(response["indexes"].get("products").is_some());
|
||||||
|
|
||||||
|
// key should have access on `test` index.
|
||||||
|
assert!(response["indexes"].get("test").is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn list_authorized_indexes_restricted_index() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
// create index `test`
|
||||||
|
let index = server.index("test");
|
||||||
|
let (_, code) = index.create(Some("id")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
// create index `products`
|
||||||
|
let index = server.index("products");
|
||||||
|
let (_, code) = index.create(Some("product_id")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
// create key with access on `products` index only.
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["products"],
|
||||||
|
"actions": ["indexes.get"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
// use created key.
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
let (response, code) = server.list_indexes().await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
|
||||||
|
let response = response.as_array().unwrap();
|
||||||
|
// key should have access on `products` index.
|
||||||
|
assert!(response.iter().any(|index| index["uid"] == "products"));
|
||||||
|
|
||||||
|
// key should not have access on `test` index.
|
||||||
|
assert!(!response.iter().any(|index| index["uid"] == "test"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn list_authorized_indexes_no_index_restriction() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
// create index `test`
|
||||||
|
let index = server.index("test");
|
||||||
|
let (_, code) = index.create(Some("id")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
// create index `products`
|
||||||
|
let index = server.index("products");
|
||||||
|
let (_, code) = index.create(Some("product_id")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
// create key with access on all indexes.
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["*"],
|
||||||
|
"actions": ["indexes.get"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
// use created key.
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
let (response, code) = server.list_indexes().await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
|
||||||
|
let response = response.as_array().unwrap();
|
||||||
|
// key should have access on `products` index.
|
||||||
|
assert!(response.iter().any(|index| index["uid"] == "products"));
|
||||||
|
|
||||||
|
// key should have access on `test` index.
|
||||||
|
assert!(response.iter().any(|index| index["uid"] == "test"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn list_authorized_tasks_restricted_index() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
// create index `test`
|
||||||
|
let index = server.index("test");
|
||||||
|
let (_, code) = index.create(Some("id")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
// create index `products`
|
||||||
|
let index = server.index("products");
|
||||||
|
let (_, code) = index.create(Some("product_id")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
// create key with access on `products` index only.
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["products"],
|
||||||
|
"actions": ["tasks.get"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
// use created key.
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
let (response, code) = server.service.get("/tasks").await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
println!("{}", response);
|
||||||
|
let response = response["results"].as_array().unwrap();
|
||||||
|
// key should have access on `products` index.
|
||||||
|
assert!(response.iter().any(|task| task["indexUid"] == "products"));
|
||||||
|
|
||||||
|
// key should not have access on `test` index.
|
||||||
|
assert!(!response.iter().any(|task| task["indexUid"] == "test"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn list_authorized_tasks_no_index_restriction() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
// create index `test`
|
||||||
|
let index = server.index("test");
|
||||||
|
let (_, code) = index.create(Some("id")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
// create index `products`
|
||||||
|
let index = server.index("products");
|
||||||
|
let (_, code) = index.create(Some("product_id")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
// create key with access on all indexes.
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["*"],
|
||||||
|
"actions": ["tasks.get"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
// use created key.
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
let (response, code) = server.service.get("/tasks").await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
|
||||||
|
let response = response["results"].as_array().unwrap();
|
||||||
|
// key should have access on `products` index.
|
||||||
|
assert!(response.iter().any(|task| task["indexUid"] == "products"));
|
||||||
|
|
||||||
|
// key should have access on `test` index.
|
||||||
|
assert!(response.iter().any(|task| task["indexUid"] == "test"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_creating_index_without_action() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
// create key with access on all indexes.
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["*"],
|
||||||
|
// Give all action but the ones allowing to create an index.
|
||||||
|
"actions": ALL_ACTIONS.iter().cloned().filter(|a| !AUTHORIZATIONS.get(&("POST","/indexes")).unwrap().contains(a)).collect::<Vec<_>>(),
|
||||||
|
"expiresAt": "2050-11-13T00:00:00Z"
|
||||||
|
});
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
// use created key.
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
let expected_error = json!({
|
||||||
|
"message": "Index `test` not found.",
|
||||||
|
"code": "index_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||||
|
});
|
||||||
|
|
||||||
|
// try to create a index via add documents route
|
||||||
|
let index = server.index("test");
|
||||||
|
let documents = json!([
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "foo",
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
|
||||||
|
let (response, code) = index.add_documents(documents, None).await;
|
||||||
|
assert_eq!(code, 202, "{:?}", response);
|
||||||
|
let task_id = response["uid"].as_u64().unwrap();
|
||||||
|
|
||||||
|
let response = index.wait_task(task_id).await;
|
||||||
|
assert_eq!(response["status"], "failed");
|
||||||
|
assert_eq!(response["error"], expected_error.clone());
|
||||||
|
|
||||||
|
// try to create a index via add settings route
|
||||||
|
let settings = json!({ "distinctAttribute": "test"});
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(settings).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
let task_id = response["uid"].as_u64().unwrap();
|
||||||
|
|
||||||
|
let response = index.wait_task(task_id).await;
|
||||||
|
|
||||||
|
assert_eq!(response["status"], "failed");
|
||||||
|
assert_eq!(response["error"], expected_error.clone());
|
||||||
|
|
||||||
|
// try to create a index via add specialized settings route
|
||||||
|
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
let task_id = response["uid"].as_u64().unwrap();
|
||||||
|
|
||||||
|
let response = index.wait_task(task_id).await;
|
||||||
|
|
||||||
|
assert_eq!(response["status"], "failed");
|
||||||
|
assert_eq!(response["error"], expected_error.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn lazy_create_index() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
// create key with access on all indexes.
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["*"],
|
||||||
|
"actions": ["*"],
|
||||||
|
"expiresAt": "2050-11-13T00:00:00Z"
|
||||||
|
});
|
||||||
|
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
// use created key.
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
server.use_api_key(&key);
|
||||||
|
|
||||||
|
// try to create a index via add documents route
|
||||||
|
let index = server.index("test");
|
||||||
|
let documents = json!([
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "foo",
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
|
||||||
|
let (response, code) = index.add_documents(documents, None).await;
|
||||||
|
assert_eq!(code, 202, "{:?}", response);
|
||||||
|
let task_id = response["uid"].as_u64().unwrap();
|
||||||
|
|
||||||
|
index.wait_task(task_id).await;
|
||||||
|
|
||||||
|
let (response, code) = index.get_task(task_id).await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
assert_eq!(response["status"], "succeeded");
|
||||||
|
|
||||||
|
// try to create a index via add settings route
|
||||||
|
let index = server.index("test1");
|
||||||
|
let settings = json!({ "distinctAttribute": "test"});
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(settings).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
let task_id = response["uid"].as_u64().unwrap();
|
||||||
|
|
||||||
|
index.wait_task(task_id).await;
|
||||||
|
|
||||||
|
let (response, code) = index.get_task(task_id).await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
assert_eq!(response["status"], "succeeded");
|
||||||
|
|
||||||
|
// try to create a index via add specialized settings route
|
||||||
|
let index = server.index("test2");
|
||||||
|
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
let task_id = response["uid"].as_u64().unwrap();
|
||||||
|
|
||||||
|
index.wait_task(task_id).await;
|
||||||
|
|
||||||
|
let (response, code) = index.get_task(task_id).await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
assert_eq!(response["status"], "succeeded");
|
||||||
|
}
|
||||||
55
meilisearch-http/tests/auth/mod.rs
Normal file
55
meilisearch-http/tests/auth/mod.rs
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
mod api_keys;
|
||||||
|
mod authorization;
|
||||||
|
mod payload;
|
||||||
|
mod tenant_token;
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
use actix_web::http::StatusCode;
|
||||||
|
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
|
impl Server {
|
||||||
|
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
|
||||||
|
self.service.api_key = Some(api_key.as_ref().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn add_api_key(&self, content: Value) -> (Value, StatusCode) {
|
||||||
|
let url = "/keys";
|
||||||
|
self.service.post(url, content).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_api_key(&self, key: impl AsRef<str>) -> (Value, StatusCode) {
|
||||||
|
let url = format!("/keys/{}", key.as_ref());
|
||||||
|
self.service.get(url).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn patch_api_key(&self, key: impl AsRef<str>, content: Value) -> (Value, StatusCode) {
|
||||||
|
let url = format!("/keys/{}", key.as_ref());
|
||||||
|
self.service.patch(url, content).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_api_keys(&self) -> (Value, StatusCode) {
|
||||||
|
let url = "/keys";
|
||||||
|
self.service.get(url).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_api_key(&self, key: impl AsRef<str>) -> (Value, StatusCode) {
|
||||||
|
let url = format!("/keys/{}", key.as_ref());
|
||||||
|
self.service.delete(url).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn dummy_request(
|
||||||
|
&self,
|
||||||
|
method: impl AsRef<str>,
|
||||||
|
url: impl AsRef<str>,
|
||||||
|
) -> (Value, StatusCode) {
|
||||||
|
match method.as_ref() {
|
||||||
|
"POST" => self.service.post(url, json!({})).await,
|
||||||
|
"PUT" => self.service.put(url, json!({})).await,
|
||||||
|
"PATCH" => self.service.patch(url, json!({})).await,
|
||||||
|
"GET" => self.service.get(url).await,
|
||||||
|
"DELETE" => self.service.delete(url).await,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
340
meilisearch-http/tests/auth/payload.rs
Normal file
340
meilisearch-http/tests/auth/payload.rs
Normal file
@@ -0,0 +1,340 @@
|
|||||||
|
use crate::common::Server;
|
||||||
|
use actix_web::test;
|
||||||
|
use meilisearch_http::{analytics, create_app};
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_api_key_bad_content_types() {
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["products"],
|
||||||
|
"actions": [
|
||||||
|
"documents.add"
|
||||||
|
],
|
||||||
|
"expiresAt": "2050-11-13T00:00:00Z"
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
let app = test::init_service(create_app!(
|
||||||
|
&server.service.meilisearch,
|
||||||
|
&server.service.auth,
|
||||||
|
true,
|
||||||
|
&server.service.options,
|
||||||
|
analytics::MockAnalytics::new(&server.service.options).0
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// post
|
||||||
|
let req = test::TestRequest::post()
|
||||||
|
.uri("/keys")
|
||||||
|
.set_payload(content.to_string())
|
||||||
|
.insert_header(("content-type", "text/plain"))
|
||||||
|
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
assert_eq!(status_code, 415);
|
||||||
|
assert_eq!(
|
||||||
|
response["message"],
|
||||||
|
json!(
|
||||||
|
r#"The Content-Type `text/plain` is invalid. Accepted values for the Content-Type header are: `application/json`"#
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assert_eq!(response["code"], "invalid_content_type");
|
||||||
|
assert_eq!(response["type"], "invalid_request");
|
||||||
|
assert_eq!(
|
||||||
|
response["link"],
|
||||||
|
"https://docs.meilisearch.com/errors#invalid_content_type"
|
||||||
|
);
|
||||||
|
|
||||||
|
// patch
|
||||||
|
let req = test::TestRequest::patch()
|
||||||
|
.uri("/keys/d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4")
|
||||||
|
.set_payload(content.to_string())
|
||||||
|
.insert_header(("content-type", "text/plain"))
|
||||||
|
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
assert_eq!(status_code, 415);
|
||||||
|
assert_eq!(
|
||||||
|
response["message"],
|
||||||
|
json!(
|
||||||
|
r#"The Content-Type `text/plain` is invalid. Accepted values for the Content-Type header are: `application/json`"#
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assert_eq!(response["code"], "invalid_content_type");
|
||||||
|
assert_eq!(response["type"], "invalid_request");
|
||||||
|
assert_eq!(
|
||||||
|
response["link"],
|
||||||
|
"https://docs.meilisearch.com/errors#invalid_content_type"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_api_key_empty_content_types() {
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["products"],
|
||||||
|
"actions": [
|
||||||
|
"documents.add"
|
||||||
|
],
|
||||||
|
"expiresAt": "2050-11-13T00:00:00Z"
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
let app = test::init_service(create_app!(
|
||||||
|
&server.service.meilisearch,
|
||||||
|
&server.service.auth,
|
||||||
|
true,
|
||||||
|
&server.service.options,
|
||||||
|
analytics::MockAnalytics::new(&server.service.options).0
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// post
|
||||||
|
let req = test::TestRequest::post()
|
||||||
|
.uri("/keys")
|
||||||
|
.set_payload(content.to_string())
|
||||||
|
.insert_header(("content-type", ""))
|
||||||
|
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
assert_eq!(status_code, 415);
|
||||||
|
assert_eq!(
|
||||||
|
response["message"],
|
||||||
|
json!(
|
||||||
|
r#"The Content-Type `` is invalid. Accepted values for the Content-Type header are: `application/json`"#
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assert_eq!(response["code"], "invalid_content_type");
|
||||||
|
assert_eq!(response["type"], "invalid_request");
|
||||||
|
assert_eq!(
|
||||||
|
response["link"],
|
||||||
|
"https://docs.meilisearch.com/errors#invalid_content_type"
|
||||||
|
);
|
||||||
|
|
||||||
|
// patch
|
||||||
|
let req = test::TestRequest::patch()
|
||||||
|
.uri("/keys/d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4")
|
||||||
|
.set_payload(content.to_string())
|
||||||
|
.insert_header(("content-type", ""))
|
||||||
|
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
assert_eq!(status_code, 415);
|
||||||
|
assert_eq!(
|
||||||
|
response["message"],
|
||||||
|
json!(
|
||||||
|
r#"The Content-Type `` is invalid. Accepted values for the Content-Type header are: `application/json`"#
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assert_eq!(response["code"], "invalid_content_type");
|
||||||
|
assert_eq!(response["type"], "invalid_request");
|
||||||
|
assert_eq!(
|
||||||
|
response["link"],
|
||||||
|
"https://docs.meilisearch.com/errors#invalid_content_type"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_api_key_missing_content_types() {
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["products"],
|
||||||
|
"actions": [
|
||||||
|
"documents.add"
|
||||||
|
],
|
||||||
|
"expiresAt": "2050-11-13T00:00:00Z"
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
let app = test::init_service(create_app!(
|
||||||
|
&server.service.meilisearch,
|
||||||
|
&server.service.auth,
|
||||||
|
true,
|
||||||
|
&server.service.options,
|
||||||
|
analytics::MockAnalytics::new(&server.service.options).0
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// post
|
||||||
|
let req = test::TestRequest::post()
|
||||||
|
.uri("/keys")
|
||||||
|
.set_payload(content.to_string())
|
||||||
|
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
assert_eq!(status_code, 415);
|
||||||
|
assert_eq!(
|
||||||
|
response["message"],
|
||||||
|
json!(
|
||||||
|
r#"A Content-Type header is missing. Accepted values for the Content-Type header are: `application/json`"#
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assert_eq!(response["code"], "missing_content_type");
|
||||||
|
assert_eq!(response["type"], "invalid_request");
|
||||||
|
assert_eq!(
|
||||||
|
response["link"],
|
||||||
|
"https://docs.meilisearch.com/errors#missing_content_type"
|
||||||
|
);
|
||||||
|
|
||||||
|
// patch
|
||||||
|
let req = test::TestRequest::patch()
|
||||||
|
.uri("/keys/d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4")
|
||||||
|
.set_payload(content.to_string())
|
||||||
|
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
assert_eq!(status_code, 415);
|
||||||
|
assert_eq!(
|
||||||
|
response["message"],
|
||||||
|
json!(
|
||||||
|
r#"A Content-Type header is missing. Accepted values for the Content-Type header are: `application/json`"#
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assert_eq!(response["code"], "missing_content_type");
|
||||||
|
assert_eq!(response["type"], "invalid_request");
|
||||||
|
assert_eq!(
|
||||||
|
response["link"],
|
||||||
|
"https://docs.meilisearch.com/errors#missing_content_type"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_api_key_empty_payload() {
|
||||||
|
let content = "";
|
||||||
|
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
let app = test::init_service(create_app!(
|
||||||
|
&server.service.meilisearch,
|
||||||
|
&server.service.auth,
|
||||||
|
true,
|
||||||
|
&server.service.options,
|
||||||
|
analytics::MockAnalytics::new(&server.service.options).0
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// post
|
||||||
|
let req = test::TestRequest::post()
|
||||||
|
.uri("/keys")
|
||||||
|
.set_payload(content)
|
||||||
|
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||||
|
.insert_header(("content-type", "application/json"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
assert_eq!(status_code, 400);
|
||||||
|
assert_eq!(response["code"], json!("missing_payload"));
|
||||||
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
|
assert_eq!(
|
||||||
|
response["link"],
|
||||||
|
json!("https://docs.meilisearch.com/errors#missing_payload")
|
||||||
|
);
|
||||||
|
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
|
||||||
|
|
||||||
|
// patch
|
||||||
|
let req = test::TestRequest::patch()
|
||||||
|
.uri("/keys/d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4")
|
||||||
|
.set_payload(content)
|
||||||
|
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||||
|
.insert_header(("content-type", "application/json"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
assert_eq!(status_code, 400);
|
||||||
|
assert_eq!(response["code"], json!("missing_payload"));
|
||||||
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
|
assert_eq!(
|
||||||
|
response["link"],
|
||||||
|
json!("https://docs.meilisearch.com/errors#missing_payload")
|
||||||
|
);
|
||||||
|
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_api_key_malformed_payload() {
|
||||||
|
let content = r#"{"malormed": "payload""#;
|
||||||
|
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
let app = test::init_service(create_app!(
|
||||||
|
&server.service.meilisearch,
|
||||||
|
&server.service.auth,
|
||||||
|
true,
|
||||||
|
&server.service.options,
|
||||||
|
analytics::MockAnalytics::new(&server.service.options).0
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// post
|
||||||
|
let req = test::TestRequest::post()
|
||||||
|
.uri("/keys")
|
||||||
|
.set_payload(content)
|
||||||
|
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||||
|
.insert_header(("content-type", "application/json"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
assert_eq!(status_code, 400);
|
||||||
|
assert_eq!(response["code"], json!("malformed_payload"));
|
||||||
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
|
assert_eq!(
|
||||||
|
response["link"],
|
||||||
|
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
response["message"],
|
||||||
|
json!(
|
||||||
|
r#"The json payload provided is malformed. `EOF while parsing an object at line 1 column 22`."#
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// patch
|
||||||
|
let req = test::TestRequest::patch()
|
||||||
|
.uri("/keys/d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4")
|
||||||
|
.set_payload(content)
|
||||||
|
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||||
|
.insert_header(("content-type", "application/json"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
assert_eq!(status_code, 400);
|
||||||
|
assert_eq!(response["code"], json!("malformed_payload"));
|
||||||
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
|
assert_eq!(
|
||||||
|
response["link"],
|
||||||
|
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
response["message"],
|
||||||
|
json!(
|
||||||
|
r#"The json payload provided is malformed. `EOF while parsing an object at line 1 column 22`."#
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
575
meilisearch-http/tests/auth/tenant_token.rs
Normal file
575
meilisearch-http/tests/auth/tenant_token.rs
Normal file
@@ -0,0 +1,575 @@
|
|||||||
|
use crate::common::Server;
|
||||||
|
use ::time::format_description::well_known::Rfc3339;
|
||||||
|
use maplit::hashmap;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
|
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
||||||
|
|
||||||
|
fn generate_tenant_token(parent_key: impl AsRef<str>, mut body: HashMap<&str, Value>) -> String {
|
||||||
|
use jsonwebtoken::{encode, EncodingKey, Header};
|
||||||
|
|
||||||
|
let key_id = &parent_key.as_ref()[..8];
|
||||||
|
body.insert("apiKeyPrefix", json!(key_id));
|
||||||
|
encode(
|
||||||
|
&Header::default(),
|
||||||
|
&body,
|
||||||
|
&EncodingKey::from_secret(parent_key.as_ref().as_bytes()),
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||||
|
json!([
|
||||||
|
{
|
||||||
|
"title": "Shazam!",
|
||||||
|
"id": "287947",
|
||||||
|
"color": ["green", "blue"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Captain Marvel",
|
||||||
|
"id": "299537",
|
||||||
|
"color": ["yellow", "blue"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Escape Room",
|
||||||
|
"id": "522681",
|
||||||
|
"color": ["yellow", "red"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "How to Train Your Dragon: The Hidden World",
|
||||||
|
"id": "166428",
|
||||||
|
"color": ["green", "red"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Glass",
|
||||||
|
"id": "450465",
|
||||||
|
"color": ["blue", "red"]
|
||||||
|
}
|
||||||
|
])
|
||||||
|
});
|
||||||
|
|
||||||
|
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||||
|
json!({"message": "The provided API key is invalid.",
|
||||||
|
"code": "invalid_api_key",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
static ACCEPTED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||||
|
vec![
|
||||||
|
json!({
|
||||||
|
"indexes": ["*"],
|
||||||
|
"actions": ["*"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
|
}),
|
||||||
|
json!({
|
||||||
|
"indexes": ["*"],
|
||||||
|
"actions": ["search"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
|
}),
|
||||||
|
json!({
|
||||||
|
"indexes": ["sales"],
|
||||||
|
"actions": ["*"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
|
}),
|
||||||
|
json!({
|
||||||
|
"indexes": ["sales"],
|
||||||
|
"actions": ["search"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||||
|
vec![
|
||||||
|
// no search action
|
||||||
|
json!({
|
||||||
|
"indexes": ["*"],
|
||||||
|
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
|
}),
|
||||||
|
json!({
|
||||||
|
"indexes": ["sales"],
|
||||||
|
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
|
}),
|
||||||
|
// bad index
|
||||||
|
json!({
|
||||||
|
"indexes": ["products"],
|
||||||
|
"actions": ["*"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
|
}),
|
||||||
|
json!({
|
||||||
|
"indexes": ["products"],
|
||||||
|
"actions": ["search"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
macro_rules! compute_autorized_search {
|
||||||
|
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
let index = server.index("sales");
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(0).await;
|
||||||
|
index
|
||||||
|
.update_settings(json!({"filterableAttributes": ["color"]}))
|
||||||
|
.await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
drop(index);
|
||||||
|
|
||||||
|
for key_content in ACCEPTED_KEYS.iter() {
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
let (response, code) = server.add_api_key(key_content.clone()).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
|
||||||
|
for tenant_token in $tenant_tokens.iter() {
|
||||||
|
let web_token = generate_tenant_token(&key, tenant_token.clone());
|
||||||
|
server.use_api_key(&web_token);
|
||||||
|
let index = server.index("sales");
|
||||||
|
index
|
||||||
|
.search(json!({ "filter": $filter }), |response, code| {
|
||||||
|
assert_eq!(
|
||||||
|
code, 200,
|
||||||
|
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||||
|
response, tenant_token, key_content
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
response["hits"].as_array().unwrap().len(),
|
||||||
|
$expected_count,
|
||||||
|
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||||
|
response,
|
||||||
|
tenant_token,
|
||||||
|
key_content
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! compute_forbidden_search {
|
||||||
|
($tenant_tokens:expr, $parent_keys:expr) => {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
let index = server.index("sales");
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(0).await;
|
||||||
|
drop(index);
|
||||||
|
|
||||||
|
for key_content in $parent_keys.iter() {
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
let (response, code) = server.add_api_key(key_content.clone()).await;
|
||||||
|
assert_eq!(code, 201, "{:?}", response);
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
|
||||||
|
for tenant_token in $tenant_tokens.iter() {
|
||||||
|
let web_token = generate_tenant_token(&key, tenant_token.clone());
|
||||||
|
server.use_api_key(&web_token);
|
||||||
|
let index = server.index("sales");
|
||||||
|
index
|
||||||
|
.search(json!({}), |response, code| {
|
||||||
|
assert_eq!(
|
||||||
|
response,
|
||||||
|
INVALID_RESPONSE.clone(),
|
||||||
|
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||||
|
response,
|
||||||
|
tenant_token,
|
||||||
|
key_content
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
code, 403,
|
||||||
|
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||||
|
response, tenant_token, key_content
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn search_authorized_simple_token() {
|
||||||
|
let tenant_tokens = vec![
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"*": {}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!(["*"]),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"sales": {}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!(["sales"]),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"*": {}}),
|
||||||
|
"exp" => Value::Null
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"*": Value::Null}),
|
||||||
|
"exp" => Value::Null
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!(["*"]),
|
||||||
|
"exp" => Value::Null
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"sales": {}}),
|
||||||
|
"exp" => Value::Null
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"sales": Value::Null}),
|
||||||
|
"exp" => Value::Null
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!(["sales"]),
|
||||||
|
"exp" => Value::Null
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
compute_autorized_search!(tenant_tokens, {}, 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn search_authorized_filter_token() {
|
||||||
|
let tenant_tokens = vec![
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"sales": {"filter": "color = blue"}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"*": {"filter": ["color = blue"]}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
// filter on sales should override filters on *
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({
|
||||||
|
"*": {"filter": "color = green"},
|
||||||
|
"sales": {"filter": "color = blue"}
|
||||||
|
}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({
|
||||||
|
"*": {},
|
||||||
|
"sales": {"filter": "color = blue"}
|
||||||
|
}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({
|
||||||
|
"*": {"filter": "color = green"},
|
||||||
|
"sales": {"filter": ["color = blue"]}
|
||||||
|
}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({
|
||||||
|
"*": {},
|
||||||
|
"sales": {"filter": ["color = blue"]}
|
||||||
|
}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
compute_autorized_search!(tenant_tokens, {}, 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn filter_search_authorized_filter_token() {
|
||||||
|
let tenant_tokens = vec![
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"sales": {"filter": "color = blue"}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"*": {"filter": ["color = blue"]}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
// filter on sales should override filters on *
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({
|
||||||
|
"*": {"filter": "color = green"},
|
||||||
|
"sales": {"filter": "color = blue"}
|
||||||
|
}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({
|
||||||
|
"*": {},
|
||||||
|
"sales": {"filter": "color = blue"}
|
||||||
|
}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({
|
||||||
|
"*": {"filter": "color = green"},
|
||||||
|
"sales": {"filter": ["color = blue"]}
|
||||||
|
}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({
|
||||||
|
"*": {},
|
||||||
|
"sales": {"filter": ["color = blue"]}
|
||||||
|
}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
compute_autorized_search!(tenant_tokens, "color = yellow", 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn error_search_token_forbidden_parent_key() {
|
||||||
|
let tenant_tokens = vec![
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"*": {}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"*": Value::Null}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!(["*"]),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"sales": {}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"sales": Value::Null}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!(["sales"]),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
compute_forbidden_search!(tenant_tokens, REFUSED_KEYS);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn error_search_forbidden_token() {
|
||||||
|
let tenant_tokens = vec![
|
||||||
|
// bad index
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"products": {}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!(["products"]),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"products": {}}),
|
||||||
|
"exp" => Value::Null
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"products": Value::Null}),
|
||||||
|
"exp" => Value::Null
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!(["products"]),
|
||||||
|
"exp" => Value::Null
|
||||||
|
},
|
||||||
|
// expired token
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"*": {}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"*": Value::Null}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!(["*"]),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"sales": {}}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!({"sales": Value::Null}),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
hashmap! {
|
||||||
|
"searchRules" => json!(["sales"]),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
compute_forbidden_search!(tenant_tokens, ACCEPTED_KEYS);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn error_access_forbidden_routes() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["*"],
|
||||||
|
"actions": ["*"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
|
||||||
|
let tenant_token = hashmap! {
|
||||||
|
"searchRules" => json!(["*"]),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
};
|
||||||
|
let web_token = generate_tenant_token(&key, tenant_token);
|
||||||
|
server.use_api_key(&web_token);
|
||||||
|
|
||||||
|
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||||
|
if !actions.contains("search") {
|
||||||
|
let (response, code) = server.dummy_request(method, route).await;
|
||||||
|
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||||
|
assert_eq!(code, 403);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn error_access_expired_parent_key() {
|
||||||
|
use std::{thread, time};
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["*"],
|
||||||
|
"actions": ["*"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
|
||||||
|
let tenant_token = hashmap! {
|
||||||
|
"searchRules" => json!(["*"]),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
};
|
||||||
|
let web_token = generate_tenant_token(&key, tenant_token);
|
||||||
|
server.use_api_key(&web_token);
|
||||||
|
|
||||||
|
// test search request while parent_key is not expired
|
||||||
|
let (response, code) = server
|
||||||
|
.dummy_request("POST", "/indexes/products/search")
|
||||||
|
.await;
|
||||||
|
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||||
|
assert_ne!(code, 403);
|
||||||
|
|
||||||
|
// wait until the key is expired.
|
||||||
|
thread::sleep(time::Duration::new(1, 0));
|
||||||
|
|
||||||
|
let (response, code) = server
|
||||||
|
.dummy_request("POST", "/indexes/products/search")
|
||||||
|
.await;
|
||||||
|
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||||
|
assert_eq!(code, 403);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn error_access_modified_token() {
|
||||||
|
let mut server = Server::new_auth().await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
let content = json!({
|
||||||
|
"indexes": ["*"],
|
||||||
|
"actions": ["*"],
|
||||||
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
assert_eq!(code, 201);
|
||||||
|
assert!(response["key"].is_string());
|
||||||
|
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
|
||||||
|
let tenant_token = hashmap! {
|
||||||
|
"searchRules" => json!(["products"]),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
};
|
||||||
|
let web_token = generate_tenant_token(&key, tenant_token);
|
||||||
|
server.use_api_key(&web_token);
|
||||||
|
|
||||||
|
// test search request while web_token is valid
|
||||||
|
let (response, code) = server
|
||||||
|
.dummy_request("POST", "/indexes/products/search")
|
||||||
|
.await;
|
||||||
|
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||||
|
assert_ne!(code, 403);
|
||||||
|
|
||||||
|
let tenant_token = hashmap! {
|
||||||
|
"searchRules" => json!(["*"]),
|
||||||
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
};
|
||||||
|
|
||||||
|
let alt = generate_tenant_token(&key, tenant_token);
|
||||||
|
let altered_token = [
|
||||||
|
web_token.split('.').next().unwrap(),
|
||||||
|
alt.split('.').nth(1).unwrap(),
|
||||||
|
web_token.split('.').nth(2).unwrap(),
|
||||||
|
]
|
||||||
|
.join(".");
|
||||||
|
|
||||||
|
server.use_api_key(&altered_token);
|
||||||
|
let (response, code) = server
|
||||||
|
.dummy_request("POST", "/indexes/products/search")
|
||||||
|
.await;
|
||||||
|
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||||
|
assert_eq!(code, 403);
|
||||||
|
}
|
||||||
@@ -7,6 +7,7 @@ use actix_web::http::StatusCode;
|
|||||||
use paste::paste;
|
use paste::paste;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use tokio::time::sleep;
|
use tokio::time::sleep;
|
||||||
|
use urlencoding::encode;
|
||||||
|
|
||||||
use super::service::Service;
|
use super::service::Service;
|
||||||
|
|
||||||
@@ -14,12 +15,12 @@ macro_rules! make_settings_test_routes {
|
|||||||
($($name:ident),+) => {
|
($($name:ident),+) => {
|
||||||
$(paste! {
|
$(paste! {
|
||||||
pub async fn [<update_$name>](&self, value: Value) -> (Value, StatusCode) {
|
pub async fn [<update_$name>](&self, value: Value) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/settings/{}", self.uid, stringify!($name).replace("_", "-"));
|
let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-"));
|
||||||
self.service.post(url, value).await
|
self.service.post(url, value).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn [<get_$name>](&self) -> (Value, StatusCode) {
|
pub async fn [<get_$name>](&self) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/settings/{}", self.uid, stringify!($name).replace("_", "-"));
|
let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-"));
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
})*
|
})*
|
||||||
@@ -34,19 +35,19 @@ pub struct Index<'a> {
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl Index<'_> {
|
impl Index<'_> {
|
||||||
pub async fn get(&self) -> (Value, StatusCode) {
|
pub async fn get(&self) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}", self.uid);
|
let url = format!("/indexes/{}", encode(self.uid.as_ref()));
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn load_test_set(&self) -> u64 {
|
pub async fn load_test_set(&self) -> u64 {
|
||||||
let url = format!("/indexes/{}/documents", self.uid);
|
let url = format!("/indexes/{}/documents", encode(self.uid.as_ref()));
|
||||||
let (response, code) = self
|
let (response, code) = self
|
||||||
.service
|
.service
|
||||||
.post_str(url, include_str!("../assets/test_set.json"))
|
.post_str(url, include_str!("../assets/test_set.json"))
|
||||||
.await;
|
.await;
|
||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
let update_id = response["updateId"].as_i64().unwrap();
|
let update_id = response["uid"].as_i64().unwrap();
|
||||||
self.wait_update_id(update_id as u64).await;
|
self.wait_task(update_id as u64).await;
|
||||||
update_id as u64
|
update_id as u64
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -62,13 +63,13 @@ impl Index<'_> {
|
|||||||
let body = json!({
|
let body = json!({
|
||||||
"primaryKey": primary_key,
|
"primaryKey": primary_key,
|
||||||
});
|
});
|
||||||
let url = format!("/indexes/{}", self.uid);
|
let url = format!("/indexes/{}", encode(self.uid.as_ref()));
|
||||||
|
|
||||||
self.service.put(url, body).await
|
self.service.put(url, body).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete(&self) -> (Value, StatusCode) {
|
pub async fn delete(&self) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}", self.uid);
|
let url = format!("/indexes/{}", encode(self.uid.as_ref()));
|
||||||
self.service.delete(url).await
|
self.service.delete(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -78,8 +79,12 @@ impl Index<'_> {
|
|||||||
primary_key: Option<&str>,
|
primary_key: Option<&str>,
|
||||||
) -> (Value, StatusCode) {
|
) -> (Value, StatusCode) {
|
||||||
let url = match primary_key {
|
let url = match primary_key {
|
||||||
Some(key) => format!("/indexes/{}/documents?primaryKey={}", self.uid, key),
|
Some(key) => format!(
|
||||||
None => format!("/indexes/{}/documents", self.uid),
|
"/indexes/{}/documents?primaryKey={}",
|
||||||
|
encode(self.uid.as_ref()),
|
||||||
|
key
|
||||||
|
),
|
||||||
|
None => format!("/indexes/{}/documents", encode(self.uid.as_ref())),
|
||||||
};
|
};
|
||||||
self.service.post(url, documents).await
|
self.service.post(url, documents).await
|
||||||
}
|
}
|
||||||
@@ -90,20 +95,24 @@ impl Index<'_> {
|
|||||||
primary_key: Option<&str>,
|
primary_key: Option<&str>,
|
||||||
) -> (Value, StatusCode) {
|
) -> (Value, StatusCode) {
|
||||||
let url = match primary_key {
|
let url = match primary_key {
|
||||||
Some(key) => format!("/indexes/{}/documents?primaryKey={}", self.uid, key),
|
Some(key) => format!(
|
||||||
None => format!("/indexes/{}/documents", self.uid),
|
"/indexes/{}/documents?primaryKey={}",
|
||||||
|
encode(self.uid.as_ref()),
|
||||||
|
key
|
||||||
|
),
|
||||||
|
None => format!("/indexes/{}/documents", encode(self.uid.as_ref())),
|
||||||
};
|
};
|
||||||
self.service.put(url, documents).await
|
self.service.put(url, documents).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn wait_update_id(&self, update_id: u64) -> Value {
|
pub async fn wait_task(&self, update_id: u64) -> Value {
|
||||||
// try 10 times to get status, or panic to not wait forever
|
// try 10 times to get status, or panic to not wait forever
|
||||||
let url = format!("/indexes/{}/updates/{}", self.uid, update_id);
|
let url = format!("/tasks/{}", update_id);
|
||||||
for _ in 0..10 {
|
for _ in 0..10 {
|
||||||
let (response, status_code) = self.service.get(&url).await;
|
let (response, status_code) = self.service.get(&url).await;
|
||||||
assert_eq!(status_code, 200, "response: {}", response);
|
assert_eq!(status_code, 200, "response: {}", response);
|
||||||
|
|
||||||
if response["status"] == "processed" || response["status"] == "failed" {
|
if response["status"] == "succeeded" || response["status"] == "failed" {
|
||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -112,13 +121,13 @@ impl Index<'_> {
|
|||||||
panic!("Timeout waiting for update id");
|
panic!("Timeout waiting for update id");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_update(&self, update_id: u64) -> (Value, StatusCode) {
|
pub async fn get_task(&self, update_id: u64) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/updates/{}", self.uid, update_id);
|
let url = format!("/indexes/{}/tasks/{}", self.uid, update_id);
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn list_updates(&self) -> (Value, StatusCode) {
|
pub async fn list_tasks(&self) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/updates", self.uid);
|
let url = format!("/indexes/{}/tasks", self.uid);
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -127,12 +136,12 @@ impl Index<'_> {
|
|||||||
id: u64,
|
id: u64,
|
||||||
_options: Option<GetDocumentOptions>,
|
_options: Option<GetDocumentOptions>,
|
||||||
) -> (Value, StatusCode) {
|
) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/documents/{}", self.uid, id);
|
let url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id);
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) {
|
pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) {
|
||||||
let mut url = format!("/indexes/{}/documents?", self.uid);
|
let mut url = format!("/indexes/{}/documents?", encode(self.uid.as_ref()));
|
||||||
if let Some(limit) = options.limit {
|
if let Some(limit) = options.limit {
|
||||||
url.push_str(&format!("limit={}&", limit));
|
url.push_str(&format!("limit={}&", limit));
|
||||||
}
|
}
|
||||||
@@ -152,39 +161,42 @@ impl Index<'_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_document(&self, id: u64) -> (Value, StatusCode) {
|
pub async fn delete_document(&self, id: u64) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/documents/{}", self.uid, id);
|
let url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id);
|
||||||
self.service.delete(url).await
|
self.service.delete(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn clear_all_documents(&self) -> (Value, StatusCode) {
|
pub async fn clear_all_documents(&self) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/documents", self.uid);
|
let url = format!("/indexes/{}/documents", encode(self.uid.as_ref()));
|
||||||
self.service.delete(url).await
|
self.service.delete(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
|
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/documents/delete-batch", self.uid);
|
let url = format!(
|
||||||
|
"/indexes/{}/documents/delete-batch",
|
||||||
|
encode(self.uid.as_ref())
|
||||||
|
);
|
||||||
self.service
|
self.service
|
||||||
.post(url, serde_json::to_value(&ids).unwrap())
|
.post(url, serde_json::to_value(&ids).unwrap())
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn settings(&self) -> (Value, StatusCode) {
|
pub async fn settings(&self) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/settings", self.uid);
|
let url = format!("/indexes/{}/settings", encode(self.uid.as_ref()));
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
|
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/settings", self.uid);
|
let url = format!("/indexes/{}/settings", encode(self.uid.as_ref()));
|
||||||
self.service.post(url, settings).await
|
self.service.post(url, settings).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_settings(&self) -> (Value, StatusCode) {
|
pub async fn delete_settings(&self) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/settings", self.uid);
|
let url = format!("/indexes/{}/settings", encode(self.uid.as_ref()));
|
||||||
self.service.delete(url).await
|
self.service.delete(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn stats(&self) -> (Value, StatusCode) {
|
pub async fn stats(&self) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/stats", self.uid);
|
let url = format!("/indexes/{}/stats", encode(self.uid.as_ref()));
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -209,13 +221,13 @@ impl Index<'_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn search_post(&self, query: Value) -> (Value, StatusCode) {
|
pub async fn search_post(&self, query: Value) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/search", self.uid);
|
let url = format!("/indexes/{}/search", encode(self.uid.as_ref()));
|
||||||
self.service.post(url, query).await
|
self.service.post(url, query).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn search_get(&self, query: Value) -> (Value, StatusCode) {
|
pub async fn search_get(&self, query: Value) -> (Value, StatusCode) {
|
||||||
let params = serde_url_params::to_string(&query).unwrap();
|
let params = serde_url_params::to_string(&query).unwrap();
|
||||||
let url = format!("/indexes/{}/search?{}", self.uid, params);
|
let url = format!("/indexes/{}/search?{}", encode(self.uid.as_ref()), params);
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,14 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use byte_unit::{Byte, ByteUnit};
|
use byte_unit::{Byte, ByteUnit};
|
||||||
|
use meilisearch_auth::AuthController;
|
||||||
use meilisearch_http::setup_meilisearch;
|
use meilisearch_http::setup_meilisearch;
|
||||||
use meilisearch_lib::options::{IndexerOpts, MaxMemory};
|
use meilisearch_lib::options::{IndexerOpts, MaxMemory};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use urlencoding::encode;
|
|
||||||
|
|
||||||
use meilisearch_http::option::Opt;
|
use meilisearch_http::option::Opt;
|
||||||
|
|
||||||
@@ -20,7 +21,7 @@ pub struct Server {
|
|||||||
_dir: Option<TempDir>,
|
_dir: Option<TempDir>,
|
||||||
}
|
}
|
||||||
|
|
||||||
static TEST_TEMP_DIR: Lazy<TempDir> = Lazy::new(|| TempDir::new().unwrap());
|
pub static TEST_TEMP_DIR: Lazy<TempDir> = Lazy::new(|| TempDir::new().unwrap());
|
||||||
|
|
||||||
impl Server {
|
impl Server {
|
||||||
pub async fn new() -> Self {
|
pub async fn new() -> Self {
|
||||||
@@ -35,9 +36,39 @@ impl Server {
|
|||||||
let options = default_settings(dir.path());
|
let options = default_settings(dir.path());
|
||||||
|
|
||||||
let meilisearch = setup_meilisearch(&options).unwrap();
|
let meilisearch = setup_meilisearch(&options).unwrap();
|
||||||
|
let auth = AuthController::new(&options.db_path, &options.master_key).unwrap();
|
||||||
let service = Service {
|
let service = Service {
|
||||||
meilisearch,
|
meilisearch,
|
||||||
|
auth,
|
||||||
options,
|
options,
|
||||||
|
api_key: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
Server {
|
||||||
|
service,
|
||||||
|
_dir: Some(dir),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn new_auth() -> Self {
|
||||||
|
let dir = TempDir::new().unwrap();
|
||||||
|
|
||||||
|
if cfg!(windows) {
|
||||||
|
std::env::set_var("TMP", TEST_TEMP_DIR.path());
|
||||||
|
} else {
|
||||||
|
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut options = default_settings(dir.path());
|
||||||
|
options.master_key = Some("MASTER_KEY".to_string());
|
||||||
|
|
||||||
|
let meilisearch = setup_meilisearch(&options).unwrap();
|
||||||
|
let auth = AuthController::new(&options.db_path, &options.master_key).unwrap();
|
||||||
|
let service = Service {
|
||||||
|
meilisearch,
|
||||||
|
auth,
|
||||||
|
options,
|
||||||
|
api_key: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
Server {
|
Server {
|
||||||
@@ -48,9 +79,12 @@ impl Server {
|
|||||||
|
|
||||||
pub async fn new_with_options(options: Opt) -> Self {
|
pub async fn new_with_options(options: Opt) -> Self {
|
||||||
let meilisearch = setup_meilisearch(&options).unwrap();
|
let meilisearch = setup_meilisearch(&options).unwrap();
|
||||||
|
let auth = AuthController::new(&options.db_path, &options.master_key).unwrap();
|
||||||
let service = Service {
|
let service = Service {
|
||||||
meilisearch,
|
meilisearch,
|
||||||
|
auth,
|
||||||
options,
|
options,
|
||||||
|
api_key: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
Server {
|
Server {
|
||||||
@@ -62,7 +96,7 @@ impl Server {
|
|||||||
/// Returns a view to an index. There is no guarantee that the index exists.
|
/// Returns a view to an index. There is no guarantee that the index exists.
|
||||||
pub fn index(&self, uid: impl AsRef<str>) -> Index<'_> {
|
pub fn index(&self, uid: impl AsRef<str>) -> Index<'_> {
|
||||||
Index {
|
Index {
|
||||||
uid: encode(uid.as_ref()).to_string(),
|
uid: uid.as_ref().to_string(),
|
||||||
service: &self.service,
|
service: &self.service,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -78,6 +112,14 @@ impl Server {
|
|||||||
pub async fn stats(&self) -> (Value, StatusCode) {
|
pub async fn stats(&self) -> (Value, StatusCode) {
|
||||||
self.service.get("/stats").await
|
self.service.get("/stats").await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn tasks(&self) -> (Value, StatusCode) {
|
||||||
|
self.service.get("/tasks").await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_dump_status(&self, uid: &str) -> (Value, StatusCode) {
|
||||||
|
self.service.get(format!("/dumps/{}/status", uid)).await
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||||
@@ -90,7 +132,7 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
|||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
no_analytics: true,
|
no_analytics: true,
|
||||||
max_index_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
max_index_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
||||||
max_udb_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
max_task_db_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
||||||
http_payload_size_limit: Byte::from_unit(10.0, ByteUnit::MiB).unwrap(),
|
http_payload_size_limit: Byte::from_unit(10.0, ByteUnit::MiB).unwrap(),
|
||||||
ssl_cert_path: None,
|
ssl_cert_path: None,
|
||||||
ssl_key_path: None,
|
ssl_key_path: None,
|
||||||
@@ -106,11 +148,14 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
|||||||
schedule_snapshot: false,
|
schedule_snapshot: false,
|
||||||
snapshot_interval_sec: 0,
|
snapshot_interval_sec: 0,
|
||||||
import_dump: None,
|
import_dump: None,
|
||||||
|
ignore_missing_dump: false,
|
||||||
|
ignore_dump_if_db_exists: false,
|
||||||
indexer_options: IndexerOpts {
|
indexer_options: IndexerOpts {
|
||||||
// memory has to be unlimited because several meilisearch are running in test context.
|
// memory has to be unlimited because several meilisearch are running in test context.
|
||||||
max_memory: MaxMemory::unlimited(),
|
max_memory: MaxMemory::unlimited(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
log_level: "off".into(),
|
log_level: "off".into(),
|
||||||
|
scheduler_options: meilisearch_lib::options::SchedulerConfig::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,22 +1,33 @@
|
|||||||
use actix_web::{http::StatusCode, test};
|
use actix_web::{http::StatusCode, test};
|
||||||
|
use meilisearch_auth::AuthController;
|
||||||
use meilisearch_lib::MeiliSearch;
|
use meilisearch_lib::MeiliSearch;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use meilisearch_http::{create_app, Opt};
|
use meilisearch_http::{analytics, create_app, Opt};
|
||||||
|
|
||||||
pub struct Service {
|
pub struct Service {
|
||||||
pub meilisearch: MeiliSearch,
|
pub meilisearch: MeiliSearch,
|
||||||
|
pub auth: AuthController,
|
||||||
pub options: Opt,
|
pub options: Opt,
|
||||||
|
pub api_key: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Service {
|
impl Service {
|
||||||
pub async fn post(&self, url: impl AsRef<str>, body: Value) -> (Value, StatusCode) {
|
pub async fn post(&self, url: impl AsRef<str>, body: Value) -> (Value, StatusCode) {
|
||||||
let app = test::init_service(create_app!(&self.meilisearch, true, &self.options)).await;
|
let app = test::init_service(create_app!(
|
||||||
|
&self.meilisearch,
|
||||||
|
&self.auth,
|
||||||
|
true,
|
||||||
|
&self.options,
|
||||||
|
analytics::MockAnalytics::new(&self.options).0
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
let req = test::TestRequest::post()
|
let mut req = test::TestRequest::post().uri(url.as_ref()).set_json(&body);
|
||||||
.uri(url.as_ref())
|
if let Some(api_key) = &self.api_key {
|
||||||
.set_json(&body)
|
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||||
.to_request();
|
}
|
||||||
|
let req = req.to_request();
|
||||||
let res = test::call_service(&app, req).await;
|
let res = test::call_service(&app, req).await;
|
||||||
let status_code = res.status();
|
let status_code = res.status();
|
||||||
|
|
||||||
@@ -31,13 +42,23 @@ impl Service {
|
|||||||
url: impl AsRef<str>,
|
url: impl AsRef<str>,
|
||||||
body: impl AsRef<str>,
|
body: impl AsRef<str>,
|
||||||
) -> (Value, StatusCode) {
|
) -> (Value, StatusCode) {
|
||||||
let app = test::init_service(create_app!(&self.meilisearch, true, &self.options)).await;
|
let app = test::init_service(create_app!(
|
||||||
|
&self.meilisearch,
|
||||||
|
&self.auth,
|
||||||
|
true,
|
||||||
|
&self.options,
|
||||||
|
analytics::MockAnalytics::new(&self.options).0
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
let req = test::TestRequest::post()
|
let mut req = test::TestRequest::post()
|
||||||
.uri(url.as_ref())
|
.uri(url.as_ref())
|
||||||
.set_payload(body.as_ref().to_string())
|
.set_payload(body.as_ref().to_string())
|
||||||
.insert_header(("content-type", "application/json"))
|
.insert_header(("content-type", "application/json"));
|
||||||
.to_request();
|
if let Some(api_key) = &self.api_key {
|
||||||
|
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||||
|
}
|
||||||
|
let req = req.to_request();
|
||||||
let res = test::call_service(&app, req).await;
|
let res = test::call_service(&app, req).await;
|
||||||
let status_code = res.status();
|
let status_code = res.status();
|
||||||
|
|
||||||
@@ -47,9 +68,20 @@ impl Service {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get(&self, url: impl AsRef<str>) -> (Value, StatusCode) {
|
pub async fn get(&self, url: impl AsRef<str>) -> (Value, StatusCode) {
|
||||||
let app = test::init_service(create_app!(&self.meilisearch, true, &self.options)).await;
|
let app = test::init_service(create_app!(
|
||||||
|
&self.meilisearch,
|
||||||
|
&self.auth,
|
||||||
|
true,
|
||||||
|
&self.options,
|
||||||
|
analytics::MockAnalytics::new(&self.options).0
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
let req = test::TestRequest::get().uri(url.as_ref()).to_request();
|
let mut req = test::TestRequest::get().uri(url.as_ref());
|
||||||
|
if let Some(api_key) = &self.api_key {
|
||||||
|
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||||
|
}
|
||||||
|
let req = req.to_request();
|
||||||
let res = test::call_service(&app, req).await;
|
let res = test::call_service(&app, req).await;
|
||||||
let status_code = res.status();
|
let status_code = res.status();
|
||||||
|
|
||||||
@@ -59,12 +91,43 @@ impl Service {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn put(&self, url: impl AsRef<str>, body: Value) -> (Value, StatusCode) {
|
pub async fn put(&self, url: impl AsRef<str>, body: Value) -> (Value, StatusCode) {
|
||||||
let app = test::init_service(create_app!(&self.meilisearch, true, &self.options)).await;
|
let app = test::init_service(create_app!(
|
||||||
|
&self.meilisearch,
|
||||||
|
&self.auth,
|
||||||
|
true,
|
||||||
|
&self.options,
|
||||||
|
analytics::MockAnalytics::new(&self.options).0
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
let req = test::TestRequest::put()
|
let mut req = test::TestRequest::put().uri(url.as_ref()).set_json(&body);
|
||||||
.uri(url.as_ref())
|
if let Some(api_key) = &self.api_key {
|
||||||
.set_json(&body)
|
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||||
.to_request();
|
}
|
||||||
|
let req = req.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
(response, status_code)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn patch(&self, url: impl AsRef<str>, body: Value) -> (Value, StatusCode) {
|
||||||
|
let app = test::init_service(create_app!(
|
||||||
|
&self.meilisearch,
|
||||||
|
&self.auth,
|
||||||
|
true,
|
||||||
|
&self.options,
|
||||||
|
analytics::MockAnalytics::new(&self.options).0
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let mut req = test::TestRequest::patch().uri(url.as_ref()).set_json(&body);
|
||||||
|
if let Some(api_key) = &self.api_key {
|
||||||
|
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||||
|
}
|
||||||
|
let req = req.to_request();
|
||||||
let res = test::call_service(&app, req).await;
|
let res = test::call_service(&app, req).await;
|
||||||
let status_code = res.status();
|
let status_code = res.status();
|
||||||
|
|
||||||
@@ -74,9 +137,20 @@ impl Service {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete(&self, url: impl AsRef<str>) -> (Value, StatusCode) {
|
pub async fn delete(&self, url: impl AsRef<str>) -> (Value, StatusCode) {
|
||||||
let app = test::init_service(create_app!(&self.meilisearch, true, &self.options)).await;
|
let app = test::init_service(create_app!(
|
||||||
|
&self.meilisearch,
|
||||||
|
&self.auth,
|
||||||
|
true,
|
||||||
|
&self.options,
|
||||||
|
analytics::MockAnalytics::new(&self.options).0
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
let req = test::TestRequest::delete().uri(url.as_ref()).to_request();
|
let mut req = test::TestRequest::delete().uri(url.as_ref());
|
||||||
|
if let Some(api_key) = &self.api_key {
|
||||||
|
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||||
|
}
|
||||||
|
let req = req.to_request();
|
||||||
let res = test::call_service(&app, req).await;
|
let res = test::call_service(&app, req).await;
|
||||||
let status_code = res.status();
|
let status_code = res.status();
|
||||||
|
|
||||||
|
|||||||
@@ -4,11 +4,11 @@ mod common;
|
|||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use meilisearch_http::create_app;
|
use meilisearch_http::{analytics, create_app};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn strict_json_bad_content_type() {
|
async fn error_json_bad_content_type() {
|
||||||
let routes = [
|
let routes = [
|
||||||
// all the POST routes except the dumps that can be created without any body or content-type
|
// all the POST routes except the dumps that can be created without any body or content-type
|
||||||
// and the search that is not a strict json
|
// and the search that is not a strict json
|
||||||
@@ -39,8 +39,10 @@ async fn strict_json_bad_content_type() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let app = test::init_service(create_app!(
|
let app = test::init_service(create_app!(
|
||||||
&server.service.meilisearch,
|
&server.service.meilisearch,
|
||||||
|
&server.service.auth,
|
||||||
true,
|
true,
|
||||||
&server.service.options
|
&server.service.options,
|
||||||
|
analytics::MockAnalytics::new(&server.service.options).0
|
||||||
))
|
))
|
||||||
.await;
|
.await;
|
||||||
for route in routes {
|
for route in routes {
|
||||||
@@ -69,10 +71,10 @@ async fn strict_json_bad_content_type() {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
json!({
|
json!({
|
||||||
"message": r#"A Content-Type header is missing. Accepted values for the Content-Type header are: "application/json""#,
|
"message": r#"A Content-Type header is missing. Accepted values for the Content-Type header are: `application/json`"#,
|
||||||
"errorCode": "missing_content_type",
|
"code": "missing_content_type",
|
||||||
"errorType": "invalid_request_error",
|
"type": "invalid_request",
|
||||||
"errorLink": "https://docs.meilisearch.com/errors#missing_content_type",
|
"link": "https://docs.meilisearch.com/errors#missing_content_type",
|
||||||
}),
|
}),
|
||||||
"when calling the route `{}` with no content-type",
|
"when calling the route `{}` with no content-type",
|
||||||
route,
|
route,
|
||||||
@@ -91,16 +93,16 @@ async fn strict_json_bad_content_type() {
|
|||||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
assert_eq!(status_code, 415);
|
assert_eq!(status_code, 415);
|
||||||
let expected_error_message = format!(
|
let expected_error_message = format!(
|
||||||
r#"The Content-Type "{}" is invalid. Accepted values for the Content-Type header are: "application/json""#,
|
r#"The Content-Type `{}` is invalid. Accepted values for the Content-Type header are: `application/json`"#,
|
||||||
bad_content_type
|
bad_content_type
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
json!({
|
json!({
|
||||||
"message": expected_error_message,
|
"message": expected_error_message,
|
||||||
"errorCode": "invalid_content_type",
|
"code": "invalid_content_type",
|
||||||
"errorType": "invalid_request_error",
|
"type": "invalid_request",
|
||||||
"errorLink": "https://docs.meilisearch.com/errors#invalid_content_type",
|
"link": "https://docs.meilisearch.com/errors#invalid_content_type",
|
||||||
}),
|
}),
|
||||||
"when calling the route `{}` with a content-type of `{}`",
|
"when calling the route `{}` with a content-type of `{}`",
|
||||||
route,
|
route,
|
||||||
@@ -109,3 +111,40 @@ async fn strict_json_bad_content_type() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn extract_actual_content_type() {
|
||||||
|
let route = "/indexes/doggo/documents";
|
||||||
|
let documents = "[{}]";
|
||||||
|
let server = Server::new().await;
|
||||||
|
let app = test::init_service(create_app!(
|
||||||
|
&server.service.meilisearch,
|
||||||
|
&server.service.auth,
|
||||||
|
true,
|
||||||
|
&server.service.options,
|
||||||
|
analytics::MockAnalytics::new(&server.service.options).0
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// Good content-type, we probably have an error since we didn't send anything in the json
|
||||||
|
// so we only ensure we didn't get a bad media type error.
|
||||||
|
let req = test::TestRequest::post()
|
||||||
|
.uri(route)
|
||||||
|
.set_payload(documents)
|
||||||
|
.insert_header(("content-type", "application/json; charset=utf-8"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
assert_ne!(status_code, 415,
|
||||||
|
"calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route);
|
||||||
|
|
||||||
|
let req = test::TestRequest::put()
|
||||||
|
.uri(route)
|
||||||
|
.set_payload(documents)
|
||||||
|
.insert_header(("content-type", "application/json; charset=latin-1"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
assert_ne!(status_code, 415,
|
||||||
|
"calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route);
|
||||||
|
}
|
||||||
|
|||||||
24
meilisearch-http/tests/dashboard/mod.rs
Normal file
24
meilisearch-http/tests/dashboard/mod.rs
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn dashboard_assets_load() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
mod generated {
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/generated.rs"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let generated = generated::generate();
|
||||||
|
|
||||||
|
for (path, _) in generated.into_iter() {
|
||||||
|
let path = if path == "index.html" {
|
||||||
|
// "index.html" redirects to "/"
|
||||||
|
"/".to_owned()
|
||||||
|
} else {
|
||||||
|
"/".to_owned() + path
|
||||||
|
};
|
||||||
|
|
||||||
|
let (_, status_code) = server.service.get(&path).await;
|
||||||
|
assert_eq!(status_code, 200);
|
||||||
|
}
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -5,8 +5,13 @@ use crate::common::{GetAllDocumentsOptions, Server};
|
|||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn delete_one_document_unexisting_index() {
|
async fn delete_one_document_unexisting_index() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let (_response, code) = server.index("test").delete_document(0).await;
|
let index = server.index("test");
|
||||||
assert_eq!(code, 404);
|
let (_response, code) = index.delete_document(0).await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
let response = index.wait_task(0).await;
|
||||||
|
|
||||||
|
assert_eq!(response["status"], "failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -16,8 +21,8 @@ async fn delete_one_unexisting_document() {
|
|||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
let (response, code) = index.delete_document(0).await;
|
let (response, code) = index.delete_document(0).await;
|
||||||
assert_eq!(code, 202, "{}", response);
|
assert_eq!(code, 202, "{}", response);
|
||||||
let update = index.wait_update_id(0).await;
|
let update = index.wait_task(0).await;
|
||||||
assert_eq!(update["status"], "processed");
|
assert_eq!(update["status"], "succeeded");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -27,10 +32,10 @@ async fn delete_one_document() {
|
|||||||
index
|
index
|
||||||
.add_documents(json!([{ "id": 0, "content": "foobar" }]), None)
|
.add_documents(json!([{ "id": 0, "content": "foobar" }]), None)
|
||||||
.await;
|
.await;
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
let (_response, code) = server.index("test").delete_document(0).await;
|
let (_response, code) = server.index("test").delete_document(0).await;
|
||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
index.wait_update_id(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (_response, code) = index.get_document(0, None).await;
|
let (_response, code) = index.get_document(0, None).await;
|
||||||
assert_eq!(code, 404);
|
assert_eq!(code, 404);
|
||||||
@@ -39,8 +44,13 @@ async fn delete_one_document() {
|
|||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn clear_all_documents_unexisting_index() {
|
async fn clear_all_documents_unexisting_index() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let (_response, code) = server.index("test").clear_all_documents().await;
|
let index = server.index("test");
|
||||||
assert_eq!(code, 404);
|
let (_response, code) = index.clear_all_documents().await;
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
let response = index.wait_task(0).await;
|
||||||
|
|
||||||
|
assert_eq!(response["status"], "failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -53,11 +63,11 @@ async fn clear_all_documents() {
|
|||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
let (_response, code) = index.clear_all_documents().await;
|
let (_response, code) = index.clear_all_documents().await;
|
||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
let _update = index.wait_update_id(1).await;
|
let _update = index.wait_task(1).await;
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
.get_all_documents(GetAllDocumentsOptions::default())
|
||||||
.await;
|
.await;
|
||||||
@@ -74,7 +84,7 @@ async fn clear_all_documents_empty_index() {
|
|||||||
let (_response, code) = index.clear_all_documents().await;
|
let (_response, code) = index.clear_all_documents().await;
|
||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
let _update = index.wait_update_id(0).await;
|
let _update = index.wait_task(0).await;
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
.get_all_documents(GetAllDocumentsOptions::default())
|
||||||
.await;
|
.await;
|
||||||
@@ -83,10 +93,22 @@ async fn clear_all_documents_empty_index() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn delete_batch_unexisting_index() {
|
async fn error_delete_batch_unexisting_index() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let (response, code) = server.index("test").delete_batch(vec![]).await;
|
let index = server.index("test");
|
||||||
assert_eq!(code, 404, "{}", response);
|
let (_, code) = index.delete_batch(vec![]).await;
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Index `test` not found.",
|
||||||
|
"code": "index_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||||
|
});
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
let response = index.wait_task(0).await;
|
||||||
|
|
||||||
|
assert_eq!(response["status"], "failed");
|
||||||
|
assert_eq!(response["error"], expected_response);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -94,11 +116,11 @@ async fn delete_batch() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
|
index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
let (_response, code) = index.delete_batch(vec![1, 0]).await;
|
let (_response, code) = index.delete_batch(vec![1, 0]).await;
|
||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
let _update = index.wait_update_id(1).await;
|
let _update = index.wait_task(1).await;
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
.get_all_documents(GetAllDocumentsOptions::default())
|
||||||
.await;
|
.await;
|
||||||
@@ -112,11 +134,11 @@ async fn delete_no_document_batch() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
|
index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
let (_response, code) = index.delete_batch(vec![]).await;
|
let (_response, code) = index.delete_batch(vec![]).await;
|
||||||
assert_eq!(code, 202, "{}", _response);
|
assert_eq!(code, 202, "{}", _response);
|
||||||
|
|
||||||
let _update = index.wait_update_id(1).await;
|
let _update = index.wait_task(1).await;
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
.get_all_documents(GetAllDocumentsOptions::default())
|
||||||
.await;
|
.await;
|
||||||
|
|||||||
@@ -13,11 +13,21 @@ async fn get_unexisting_index_single_document() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn get_unexisting_document() {
|
async fn error_get_unexisting_document() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
let (_response, code) = index.get_document(1, None).await;
|
index.wait_task(0).await;
|
||||||
|
let (response, code) = index.get_document(1, None).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Document `1` not found.",
|
||||||
|
"code": "document_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#document_not_found"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
assert_eq!(code, 404);
|
assert_eq!(code, 404);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -34,7 +44,7 @@ async fn get_document() {
|
|||||||
]);
|
]);
|
||||||
let (_, code) = index.add_documents(documents, None).await;
|
let (_, code) = index.add_documents(documents, None).await;
|
||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
let (response, code) = index.get_document(0, None).await;
|
let (response, code) = index.get_document(0, None).await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -47,21 +57,32 @@ async fn get_document() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn get_unexisting_index_all_documents() {
|
async fn error_get_unexisting_index_all_documents() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let (_response, code) = server
|
let (response, code) = server
|
||||||
.index("test")
|
.index("test")
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
.get_all_documents(GetAllDocumentsOptions::default())
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Index `test` not found.",
|
||||||
|
"code": "index_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
assert_eq!(code, 404);
|
assert_eq!(code, 404);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn get_no_documents() {
|
async fn get_no_document() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (_, code) = index.create(None).await;
|
let (_, code) = index.create(None).await;
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
.get_all_documents(GetAllDocumentsOptions::default())
|
||||||
|
|||||||
22
meilisearch-http/tests/dumps.rs
Normal file
22
meilisearch-http/tests/dumps.rs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
mod common;
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn get_unexisting_dump_status() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.get_dump_status("foobar").await;
|
||||||
|
assert_eq!(code, 404);
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Dump `foobar` not found.",
|
||||||
|
"code": "dump_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#dump_not_found"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
}
|
||||||
@@ -7,14 +7,15 @@ async fn create_index_no_primary_key() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (response, code) = index.create(None).await;
|
let (response, code) = index.create(None).await;
|
||||||
|
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 202);
|
||||||
assert_eq!(response["uid"], "test");
|
|
||||||
assert_eq!(response["name"], "test");
|
assert_eq!(response["status"], "enqueued");
|
||||||
assert!(response.get("createdAt").is_some());
|
|
||||||
assert!(response.get("updatedAt").is_some());
|
let response = index.wait_task(0).await;
|
||||||
assert_eq!(response["createdAt"], response["updatedAt"]);
|
|
||||||
assert_eq!(response["primaryKey"], Value::Null);
|
assert_eq!(response["status"], "succeeded");
|
||||||
assert_eq!(response.as_object().unwrap().len(), 5);
|
assert_eq!(response["type"], "indexCreation");
|
||||||
|
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -23,14 +24,15 @@ async fn create_index_with_primary_key() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (response, code) = index.create(Some("primary")).await;
|
let (response, code) = index.create(Some("primary")).await;
|
||||||
|
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 202);
|
||||||
assert_eq!(response["uid"], "test");
|
|
||||||
assert_eq!(response["name"], "test");
|
assert_eq!(response["status"], "enqueued");
|
||||||
assert!(response.get("createdAt").is_some());
|
|
||||||
assert!(response.get("updatedAt").is_some());
|
let response = index.wait_task(0).await;
|
||||||
//assert_eq!(response["createdAt"], response["updatedAt"]);
|
|
||||||
assert_eq!(response["primaryKey"], "primary");
|
assert_eq!(response["status"], "succeeded");
|
||||||
assert_eq!(response.as_object().unwrap().len(), 5);
|
assert_eq!(response["type"], "indexCreation");
|
||||||
|
assert_eq!(response["details"]["primaryKey"], "primary");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -42,35 +44,13 @@ async fn create_index_with_invalid_primary_key() {
|
|||||||
let (_response, code) = index.add_documents(document, Some("title")).await;
|
let (_response, code) = index.add_documents(document, Some("title")).await;
|
||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
let (response, code) = index.get().await;
|
let (response, code) = index.get().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["primaryKey"], Value::Null);
|
assert_eq!(response["primaryKey"], Value::Null);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
|
|
||||||
// transplant
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn create_existing_index() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let index = server.index("test");
|
|
||||||
let (_, code) = index.create(Some("primary")).await;
|
|
||||||
|
|
||||||
assert_eq!(code, 201);
|
|
||||||
|
|
||||||
let (_response, code) = index.create(Some("primary")).await;
|
|
||||||
assert_eq!(code, 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn create_with_invalid_index_uid() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let index = server.index("test test#!");
|
|
||||||
let (_, code) = index.create(None).await;
|
|
||||||
assert_eq!(code, 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_create_multiple_indexes() {
|
async fn test_create_multiple_indexes() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
@@ -83,8 +63,51 @@ async fn test_create_multiple_indexes() {
|
|||||||
index2.create(None).await;
|
index2.create(None).await;
|
||||||
index3.create(None).await;
|
index3.create(None).await;
|
||||||
|
|
||||||
|
index1.wait_task(0).await;
|
||||||
|
index1.wait_task(1).await;
|
||||||
|
index1.wait_task(2).await;
|
||||||
|
|
||||||
assert_eq!(index1.get().await.1, 200);
|
assert_eq!(index1.get().await.1, 200);
|
||||||
assert_eq!(index2.get().await.1, 200);
|
assert_eq!(index2.get().await.1, 200);
|
||||||
assert_eq!(index3.get().await.1, 200);
|
assert_eq!(index3.get().await.1, 200);
|
||||||
assert_eq!(index4.get().await.1, 404);
|
assert_eq!(index4.get().await.1, 404);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_create_existing_index() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
let (_, code) = index.create(Some("primary")).await;
|
||||||
|
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
index.create(Some("primary")).await;
|
||||||
|
|
||||||
|
let response = index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Index `test` already exists.",
|
||||||
|
"code": "index_already_exists",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link":"https://docs.meilisearch.com/errors#index_already_exists"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response["error"], expected_response);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_create_with_invalid_index_uid() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test test#!");
|
||||||
|
let (response, code) = index.create(None).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "`test test#!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||||
|
"code": "invalid_index_uid",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
}
|
||||||
|
|||||||
@@ -8,33 +8,59 @@ async fn create_and_delete_index() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (_response, code) = index.create(None).await;
|
let (_response, code) = index.create(None).await;
|
||||||
|
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
assert_eq!(index.get().await.1, 200);
|
||||||
|
|
||||||
let (_response, code) = index.delete().await;
|
let (_response, code) = index.delete().await;
|
||||||
|
|
||||||
assert_eq!(code, 204);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
assert_eq!(index.get().await.1, 404);
|
assert_eq!(index.get().await.1, 404);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn delete_unexisting_index() {
|
async fn error_delete_unexisting_index() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (_response, code) = index.delete().await;
|
let (_, code) = index.delete().await;
|
||||||
|
|
||||||
assert_eq!(code, 404);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Index `test` not found.",
|
||||||
|
"code": "index_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||||
|
});
|
||||||
|
|
||||||
|
let response = index.wait_task(0).await;
|
||||||
|
assert_eq!(response["status"], "failed");
|
||||||
|
assert_eq!(response["error"], expected_response);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
async fn loop_delete_add_documents() {
|
async fn loop_delete_add_documents() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let documents = json!([{"id": 1, "field1": "hello"}]);
|
let documents = json!([{"id": 1, "field1": "hello"}]);
|
||||||
|
let mut tasks = Vec::new();
|
||||||
for _ in 0..50 {
|
for _ in 0..50 {
|
||||||
let (response, code) = index.add_documents(documents.clone(), None).await;
|
let (response, code) = index.add_documents(documents.clone(), None).await;
|
||||||
|
tasks.push(response["uid"].as_u64().unwrap());
|
||||||
assert_eq!(code, 202, "{}", response);
|
assert_eq!(code, 202, "{}", response);
|
||||||
let (response, code) = index.delete().await;
|
let (response, code) = index.delete().await;
|
||||||
assert_eq!(code, 204, "{}", response);
|
tasks.push(response["uid"].as_u64().unwrap());
|
||||||
|
assert_eq!(code, 202, "{}", response);
|
||||||
|
}
|
||||||
|
|
||||||
|
for task in tasks {
|
||||||
|
let response = index.wait_task(task).await;
|
||||||
|
assert_eq!(response["status"], "succeeded", "{}", response);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use serde_json::json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -7,7 +8,9 @@ async fn create_and_get_index() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (_, code) = index.create(None).await;
|
let (_, code) = index.create(None).await;
|
||||||
|
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
let (response, code) = index.get().await;
|
let (response, code) = index.get().await;
|
||||||
|
|
||||||
@@ -21,15 +24,21 @@ async fn create_and_get_index() {
|
|||||||
assert_eq!(response.as_object().unwrap().len(), 5);
|
assert_eq!(response.as_object().unwrap().len(), 5);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: partial test since we are testing error, and error is not yet fully implemented in
|
|
||||||
// transplant
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn get_unexisting_index() {
|
async fn error_get_unexisting_index() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let (_response, code) = index.get().await;
|
let (response, code) = index.get().await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Index `test` not found.",
|
||||||
|
"code": "index_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
assert_eq!(code, 404);
|
assert_eq!(code, 404);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -48,6 +57,8 @@ async fn list_multiple_indexes() {
|
|||||||
server.index("test").create(None).await;
|
server.index("test").create(None).await;
|
||||||
server.index("test1").create(Some("key")).await;
|
server.index("test1").create(Some("key")).await;
|
||||||
|
|
||||||
|
server.index("test").wait_task(1).await;
|
||||||
|
|
||||||
let (response, code) = server.list_indexes().await;
|
let (response, code) = server.list_indexes().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert!(response.is_array());
|
assert!(response.is_array());
|
||||||
@@ -60,3 +71,22 @@ async fn list_multiple_indexes() {
|
|||||||
.iter()
|
.iter()
|
||||||
.any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
|
.any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn get_invalid_index_uid() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("this is not a valid index name");
|
||||||
|
let (response, code) = index.get().await;
|
||||||
|
|
||||||
|
assert_eq!(code, 404);
|
||||||
|
assert_eq!(
|
||||||
|
response,
|
||||||
|
json!(
|
||||||
|
{
|
||||||
|
"message": "Index `this is not a valid index name` not found.",
|
||||||
|
"code": "index_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|||||||
@@ -8,7 +8,9 @@ async fn stats() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (_, code) = index.create(Some("id")).await;
|
let (_, code) = index.create(Some("id")).await;
|
||||||
|
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
let (response, code) = index.stats().await;
|
let (response, code) = index.stats().await;
|
||||||
|
|
||||||
@@ -33,9 +35,9 @@ async fn stats() {
|
|||||||
|
|
||||||
let (response, code) = index.add_documents(documents, None).await;
|
let (response, code) = index.add_documents(documents, None).await;
|
||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
assert_eq!(response["updateId"], 0);
|
assert_eq!(response["uid"], 1);
|
||||||
|
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (response, code) = index.stats().await;
|
let (response, code) = index.stats().await;
|
||||||
|
|
||||||
@@ -46,3 +48,19 @@ async fn stats() {
|
|||||||
assert_eq!(response["fieldDistribution"]["name"], 1);
|
assert_eq!(response["fieldDistribution"]["name"], 1);
|
||||||
assert_eq!(response["fieldDistribution"]["age"], 1);
|
assert_eq!(response["fieldDistribution"]["age"], 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_get_stats_unexisting_index() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let (response, code) = server.index("test").stats().await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Index `test` not found.",
|
||||||
|
"code": "index_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 404);
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
use chrono::DateTime;
|
use serde_json::json;
|
||||||
|
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn update_primary_key() {
|
async fn update_primary_key() {
|
||||||
@@ -7,18 +8,27 @@ async fn update_primary_key() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (_, code) = index.create(None).await;
|
let (_, code) = index.create(None).await;
|
||||||
|
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
let (response, code) = index.update(Some("primary")).await;
|
index.update(Some("primary")).await;
|
||||||
|
|
||||||
|
let response = index.wait_task(1).await;
|
||||||
|
|
||||||
|
assert_eq!(response["status"], "succeeded");
|
||||||
|
|
||||||
|
let (response, code) = index.get().await;
|
||||||
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
|
|
||||||
assert_eq!(response["uid"], "test");
|
assert_eq!(response["uid"], "test");
|
||||||
assert_eq!(response["name"], "test");
|
assert_eq!(response["name"], "test");
|
||||||
assert!(response.get("createdAt").is_some());
|
assert!(response.get("createdAt").is_some());
|
||||||
assert!(response.get("updatedAt").is_some());
|
assert!(response.get("updatedAt").is_some());
|
||||||
|
|
||||||
let created_at = DateTime::parse_from_rfc3339(response["createdAt"].as_str().unwrap()).unwrap();
|
let created_at =
|
||||||
let updated_at = DateTime::parse_from_rfc3339(response["updatedAt"].as_str().unwrap()).unwrap();
|
OffsetDateTime::parse(response["createdAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||||
|
let updated_at =
|
||||||
|
OffsetDateTime::parse(response["updatedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||||
assert!(created_at < updated_at);
|
assert!(created_at < updated_at);
|
||||||
|
|
||||||
assert_eq!(response["primaryKey"], "primary");
|
assert_eq!(response["primaryKey"], "primary");
|
||||||
@@ -29,36 +39,68 @@ async fn update_primary_key() {
|
|||||||
async fn update_nothing() {
|
async fn update_nothing() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (response, code) = index.create(None).await;
|
let (_, code) = index.create(None).await;
|
||||||
|
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
let (update, code) = index.update(None).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
assert_eq!(code, 200);
|
let (_, code) = index.update(None).await;
|
||||||
assert_eq!(response, update);
|
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
let response = index.wait_task(1).await;
|
||||||
|
|
||||||
|
assert_eq!(response["status"], "succeeded");
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
|
|
||||||
// transplant
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn update_existing_primary_key() {
|
async fn error_update_existing_primary_key() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (_response, code) = index.create(Some("primary")).await;
|
let (_response, code) = index.create(Some("id")).await;
|
||||||
|
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
let (_update, code) = index.update(Some("primary2")).await;
|
let documents = json!([
|
||||||
|
{
|
||||||
|
"id": "11",
|
||||||
|
"content": "foobar"
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
|
||||||
assert_eq!(code, 400);
|
let (_, code) = index.update(Some("primary")).await;
|
||||||
|
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
let response = index.wait_task(2).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Index already has a primary key: `id`.",
|
||||||
|
"code": "index_primary_key_already_exists",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response["error"], expected_response);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
|
|
||||||
// transplant
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_unexisting_index() {
|
async fn error_update_unexisting_index() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let (_response, code) = server.index("test").update(None).await;
|
let (_, code) = server.index("test").update(None).await;
|
||||||
assert_eq!(code, 404);
|
|
||||||
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
let response = server.index("test").wait_task(0).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Index `test` not found.",
|
||||||
|
"code": "index_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response["error"], expected_response);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
|
mod auth;
|
||||||
mod common;
|
mod common;
|
||||||
|
mod dashboard;
|
||||||
mod documents;
|
mod documents;
|
||||||
mod index;
|
mod index;
|
||||||
mod search;
|
mod search;
|
||||||
mod settings;
|
mod settings;
|
||||||
mod snapshot;
|
mod snapshot;
|
||||||
mod stats;
|
mod stats;
|
||||||
mod updates;
|
mod tasks;
|
||||||
|
|
||||||
// Tests are isolated by features in different modules to allow better readability, test
|
// Tests are isolated by features in different modules to allow better readability, test
|
||||||
// targetability, and improved incremental compilation times.
|
// targetability, and improved incremental compilation times.
|
||||||
|
|||||||
@@ -1,15 +1,24 @@
|
|||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
|
use super::DOCUMENTS;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_unexisting_index() {
|
async fn search_unexisting_index() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Index `test` not found.",
|
||||||
|
"code": "index_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||||
|
});
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(json!({"q": "hello"}), |response, code| {
|
.search(json!({"q": "hello"}), |response, code| {
|
||||||
assert_eq!(code, 404, "{}", response);
|
assert_eq!(code, 404);
|
||||||
assert_eq!(response["errorCode"], "index_not_found");
|
assert_eq!(response, expected_response);
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
@@ -22,7 +31,421 @@ async fn search_unexisting_parameter() {
|
|||||||
index
|
index
|
||||||
.search(json!({"marin": "hello"}), |response, code| {
|
.search(json!({"marin": "hello"}), |response, code| {
|
||||||
assert_eq!(code, 400, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
assert_eq!(response["errorCode"], "bad_request");
|
assert_eq!(response["code"], "bad_request");
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn filter_invalid_syntax_object() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"filterableAttributes": ["title"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO` or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||||
|
"code": "invalid_filter",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(json!({"filter": "title & Glass"}), |response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn filter_invalid_syntax_array() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"filterableAttributes": ["title"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO` or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||||
|
"code": "invalid_filter",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(json!({"filter": [["title & Glass"]]}), |response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn filter_invalid_syntax_string() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"filterableAttributes": ["title"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Found unexpected characters at the end of the filter: `XOR title = Glass`. You probably forgot an `OR` or an `AND` rule.\n15:32 title = Glass XOR title = Glass",
|
||||||
|
"code": "invalid_filter",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({"filter": "title = Glass XOR title = Glass"}),
|
||||||
|
|response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn filter_invalid_attribute_array() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"filterableAttributes": ["title"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass",
|
||||||
|
"code": "invalid_filter",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(json!({"filter": [["many = Glass"]]}), |response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn filter_invalid_attribute_string() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"filterableAttributes": ["title"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass",
|
||||||
|
"code": "invalid_filter",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(json!({"filter": "many = Glass"}), |response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn filter_reserved_geo_attribute_array() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"filterableAttributes": ["title"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass",
|
||||||
|
"code": "invalid_filter",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(json!({"filter": [["_geo = Glass"]]}), |response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn filter_reserved_geo_attribute_string() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"filterableAttributes": ["title"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass",
|
||||||
|
"code": "invalid_filter",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(json!({"filter": "_geo = Glass"}), |response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn filter_reserved_attribute_array() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"filterableAttributes": ["title"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.\n1:13 _geoDistance = Glass",
|
||||||
|
"code": "invalid_filter",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({"filter": [["_geoDistance = Glass"]]}),
|
||||||
|
|response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn filter_reserved_attribute_string() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"filterableAttributes": ["title"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.\n1:13 _geoDistance = Glass",
|
||||||
|
"code": "invalid_filter",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({"filter": "_geoDistance = Glass"}),
|
||||||
|
|response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn sort_geo_reserved_attribute() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"sortableAttributes": ["id"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "`_geo` is a reserved keyword and thus can't be used as a sort expression. Use the _geoPoint(latitude, longitude) built-in rule to sort on _geo field coordinates.",
|
||||||
|
"code": "invalid_sort",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_sort"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({
|
||||||
|
"sort": ["_geo:asc"]
|
||||||
|
}),
|
||||||
|
|response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn sort_reserved_attribute() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"sortableAttributes": ["id"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a sort expression.",
|
||||||
|
"code": "invalid_sort",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_sort"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({
|
||||||
|
"sort": ["_geoDistance:asc"]
|
||||||
|
}),
|
||||||
|
|response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn sort_unsortable_attribute() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"sortableAttributes": ["id"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Attribute `title` is not sortable. Available sortable attributes are: `id`.",
|
||||||
|
"code": "invalid_sort",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_sort"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({
|
||||||
|
"sort": ["title:asc"]
|
||||||
|
}),
|
||||||
|
|response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn sort_invalid_syntax() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({"sortableAttributes": ["id"]}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Invalid syntax for the sort parameter: expected expression ending by `:asc` or `:desc`, found `title`.",
|
||||||
|
"code": "invalid_sort",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_sort"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({
|
||||||
|
"sort": ["title"]
|
||||||
|
}),
|
||||||
|
|response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn sort_unset_ranking_rule() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(
|
||||||
|
json!({"sortableAttributes": ["title"], "rankingRules": ["proximity", "exactness"]}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "The sort ranking rule must be specified in the ranking rules settings to use the sort parameter at search time.",
|
||||||
|
"code": "invalid_sort",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_sort"
|
||||||
|
});
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({
|
||||||
|
"sort": ["title:asc"]
|
||||||
|
}),
|
||||||
|
|response, code| {
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 400);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ async fn simple_placeholder_search() {
|
|||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(json!({}), |response, code| {
|
.search(json!({}), |response, code| {
|
||||||
@@ -54,7 +54,7 @@ async fn simple_search() {
|
|||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(json!({"q": "glass"}), |response, code| {
|
.search(json!({"q": "glass"}), |response, code| {
|
||||||
@@ -71,7 +71,7 @@ async fn search_multiple_params() {
|
|||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
@@ -101,7 +101,7 @@ async fn search_with_filter_string_notation() {
|
|||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
index.wait_update_id(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
@@ -127,7 +127,7 @@ async fn search_with_filter_array_notation() {
|
|||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
index.wait_update_id(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({
|
.search_post(json!({
|
||||||
@@ -157,7 +157,7 @@ async fn search_with_sort_on_numbers() {
|
|||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
index.wait_update_id(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
@@ -183,7 +183,7 @@ async fn search_with_sort_on_strings() {
|
|||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
index.wait_update_id(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
@@ -209,7 +209,7 @@ async fn search_with_multiple_sort() {
|
|||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
index.wait_update_id(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({
|
.search_post(json!({
|
||||||
@@ -231,7 +231,7 @@ async fn search_facet_distribution() {
|
|||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
index.wait_update_id(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
@@ -259,7 +259,7 @@ async fn displayed_attributes() {
|
|||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
index.wait_update_id(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({ "attributesToRetrieve": ["title", "id"] }))
|
.search_post(json!({ "attributesToRetrieve": ["title", "id"] }))
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ async fn set_and_reset_distinct_attribute() {
|
|||||||
let (_response, _code) = index
|
let (_response, _code) = index
|
||||||
.update_settings(json!({ "distinctAttribute": "test"}))
|
.update_settings(json!({ "distinctAttribute": "test"}))
|
||||||
.await;
|
.await;
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
let (response, _) = index.settings().await;
|
let (response, _) = index.settings().await;
|
||||||
|
|
||||||
@@ -19,7 +19,7 @@ async fn set_and_reset_distinct_attribute() {
|
|||||||
.update_settings(json!({ "distinctAttribute": null }))
|
.update_settings(json!({ "distinctAttribute": null }))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index.wait_update_id(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (response, _) = index.settings().await;
|
let (response, _) = index.settings().await;
|
||||||
|
|
||||||
@@ -32,7 +32,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let (_response, _code) = index.update_distinct_attribute(json!("test")).await;
|
let (_response, _code) = index.update_distinct_attribute(json!("test")).await;
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
let (response, _) = index.get_distinct_attribute().await;
|
let (response, _) = index.get_distinct_attribute().await;
|
||||||
|
|
||||||
@@ -40,7 +40,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
|||||||
|
|
||||||
index.update_distinct_attribute(json!(null)).await;
|
index.update_distinct_attribute(json!(null)).await;
|
||||||
|
|
||||||
index.wait_update_id(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (response, _) = index.get_distinct_attribute().await;
|
let (response, _) = index.get_distinct_attribute().await;
|
||||||
|
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ async fn get_settings() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
|
index.wait_task(0).await;
|
||||||
let (response, code) = index.settings().await;
|
let (response, code) = index.settings().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
let settings = response.as_object().unwrap();
|
let settings = response.as_object().unwrap();
|
||||||
@@ -63,7 +64,7 @@ async fn get_settings() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn update_settings_unknown_field() {
|
async fn error_update_settings_unknown_field() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (_response, code) = index.update_settings(json!({"foo": 12})).await;
|
let (_response, code) = index.update_settings(json!({"foo": 12})).await;
|
||||||
@@ -77,7 +78,7 @@ async fn test_partial_update() {
|
|||||||
let (_response, _code) = index
|
let (_response, _code) = index
|
||||||
.update_settings(json!({"displayedAttributes": ["foo"]}))
|
.update_settings(json!({"displayedAttributes": ["foo"]}))
|
||||||
.await;
|
.await;
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
let (response, code) = index.settings().await;
|
let (response, code) = index.settings().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["displayedAttributes"], json!(["foo"]));
|
assert_eq!(response["displayedAttributes"], json!(["foo"]));
|
||||||
@@ -86,7 +87,7 @@ async fn test_partial_update() {
|
|||||||
let (_response, _) = index
|
let (_response, _) = index
|
||||||
.update_settings(json!({"searchableAttributes": ["bar"]}))
|
.update_settings(json!({"searchableAttributes": ["bar"]}))
|
||||||
.await;
|
.await;
|
||||||
index.wait_update_id(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (response, code) = index.settings().await;
|
let (response, code) = index.settings().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
@@ -95,11 +96,15 @@ async fn test_partial_update() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn delete_settings_unexisting_index() {
|
async fn error_delete_settings_unexisting_index() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (_response, code) = index.delete_settings().await;
|
let (_response, code) = index.delete_settings().await;
|
||||||
assert_eq!(code, 404);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
|
let response = index.wait_task(0).await;
|
||||||
|
|
||||||
|
assert_eq!(response["status"], "failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -117,13 +122,13 @@ async fn reset_all_settings() {
|
|||||||
|
|
||||||
let (response, code) = index.add_documents(documents, None).await;
|
let (response, code) = index.add_documents(documents, None).await;
|
||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
assert_eq!(response["updateId"], 0);
|
assert_eq!(response["uid"], 0);
|
||||||
index.wait_update_id(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.update_settings(json!({"displayedAttributes": ["name", "age"], "searchableAttributes": ["name"], "stopWords": ["the"], "filterableAttributes": ["age"], "synonyms": {"puppy": ["dog", "doggo", "potat"] }}))
|
.update_settings(json!({"displayedAttributes": ["name", "age"], "searchableAttributes": ["name"], "stopWords": ["the"], "filterableAttributes": ["age"], "synonyms": {"puppy": ["dog", "doggo", "potat"] }}))
|
||||||
.await;
|
.await;
|
||||||
index.wait_update_id(1).await;
|
index.wait_task(1).await;
|
||||||
let (response, code) = index.settings().await;
|
let (response, code) = index.settings().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["displayedAttributes"], json!(["name", "age"]));
|
assert_eq!(response["displayedAttributes"], json!(["name", "age"]));
|
||||||
@@ -136,7 +141,7 @@ async fn reset_all_settings() {
|
|||||||
assert_eq!(response["filterableAttributes"], json!(["age"]));
|
assert_eq!(response["filterableAttributes"], json!(["age"]));
|
||||||
|
|
||||||
index.delete_settings().await;
|
index.delete_settings().await;
|
||||||
index.wait_update_id(2).await;
|
index.wait_task(2).await;
|
||||||
|
|
||||||
let (response, code) = index.settings().await;
|
let (response, code) = index.settings().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
@@ -157,18 +162,29 @@ async fn update_setting_unexisting_index() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (_response, code) = index.update_settings(json!({})).await;
|
let (_response, code) = index.update_settings(json!({})).await;
|
||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
|
let response = index.wait_task(0).await;
|
||||||
|
assert_eq!(response["status"], "succeeded");
|
||||||
let (_response, code) = index.get().await;
|
let (_response, code) = index.get().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
let (_response, code) = index.delete_settings().await;
|
index.delete_settings().await;
|
||||||
assert_eq!(code, 202);
|
let response = index.wait_task(1).await;
|
||||||
|
assert_eq!(response["status"], "succeeded");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn update_setting_unexisting_index_invalid_uid() {
|
async fn error_update_setting_unexisting_index_invalid_uid() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test##! ");
|
let index = server.index("test##! ");
|
||||||
let (response, code) = index.update_settings(json!({})).await;
|
let (response, code) = index.update_settings(json!({})).await;
|
||||||
assert_eq!(code, 400, "{}", response);
|
assert_eq!(code, 400);
|
||||||
|
|
||||||
|
let expected = json!({
|
||||||
|
"message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||||
|
"code": "invalid_index_uid",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"});
|
||||||
|
|
||||||
|
assert_eq!(response, expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! test_setting_routes {
|
macro_rules! test_setting_routes {
|
||||||
@@ -200,6 +216,7 @@ macro_rules! test_setting_routes {
|
|||||||
.collect::<String>());
|
.collect::<String>());
|
||||||
let (response, code) = server.service.post(url, serde_json::Value::Null).await;
|
let (response, code) = server.service.post(url, serde_json::Value::Null).await;
|
||||||
assert_eq!(code, 202, "{}", response);
|
assert_eq!(code, 202, "{}", response);
|
||||||
|
server.index("").wait_task(0).await;
|
||||||
let (response, code) = server.index("test").get().await;
|
let (response, code) = server.index("test").get().await;
|
||||||
assert_eq!(code, 200, "{}", response);
|
assert_eq!(code, 200, "{}", response);
|
||||||
}
|
}
|
||||||
@@ -212,8 +229,10 @@ macro_rules! test_setting_routes {
|
|||||||
.chars()
|
.chars()
|
||||||
.map(|c| if c == '_' { '-' } else { c })
|
.map(|c| if c == '_' { '-' } else { c })
|
||||||
.collect::<String>());
|
.collect::<String>());
|
||||||
let (response, code) = server.service.delete(url).await;
|
let (_, code) = server.service.delete(url).await;
|
||||||
assert_eq!(code, 404, "{}", response);
|
assert_eq!(code, 202);
|
||||||
|
let response = server.index("").wait_task(0).await;
|
||||||
|
assert_eq!(response["status"], "failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -221,7 +240,8 @@ macro_rules! test_setting_routes {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (response, code) = index.create(None).await;
|
let (response, code) = index.create(None).await;
|
||||||
assert_eq!(code, 201, "{}", response);
|
assert_eq!(code, 202, "{}", response);
|
||||||
|
index.wait_task(0).await;
|
||||||
let url = format!("/indexes/test/settings/{}",
|
let url = format!("/indexes/test/settings/{}",
|
||||||
stringify!($setting)
|
stringify!($setting)
|
||||||
.chars()
|
.chars()
|
||||||
@@ -246,3 +266,49 @@ test_setting_routes!(
|
|||||||
ranking_rules,
|
ranking_rules,
|
||||||
synonyms
|
synonyms
|
||||||
);
|
);
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_set_invalid_ranking_rules() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
index.create(None).await;
|
||||||
|
|
||||||
|
let (_response, _code) = index
|
||||||
|
.update_settings(json!({ "rankingRules": [ "manyTheFish"]}))
|
||||||
|
.await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
let (response, code) = index.get_task(1).await;
|
||||||
|
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
assert_eq!(response["status"], "failed");
|
||||||
|
|
||||||
|
let expected_error = json!({
|
||||||
|
"message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are Words, Typo, Sort, Proximity, Attribute, Exactness and custom ranking rules."#,
|
||||||
|
"code": "invalid_ranking_rule",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_ranking_rule"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response["error"], expected_error);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (_response, _code) = index.update_distinct_attribute(json!("test")).await;
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
let (response, _) = index.get_distinct_attribute().await;
|
||||||
|
|
||||||
|
assert_eq!(response, "test");
|
||||||
|
|
||||||
|
index.update_distinct_attribute(json!(null)).await;
|
||||||
|
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let (response, _) = index.get_distinct_attribute().await;
|
||||||
|
|
||||||
|
assert_eq!(response, json!(null));
|
||||||
|
}
|
||||||
|
|||||||
@@ -7,6 +7,28 @@ use tokio::time::sleep;
|
|||||||
|
|
||||||
use meilisearch_http::Opt;
|
use meilisearch_http::Opt;
|
||||||
|
|
||||||
|
macro_rules! verify_snapshot {
|
||||||
|
(
|
||||||
|
$orig:expr,
|
||||||
|
$snapshot: expr,
|
||||||
|
|$server:ident| =>
|
||||||
|
$($e:expr,)+) => {
|
||||||
|
use std::sync::Arc;
|
||||||
|
let snapshot = Arc::new($snapshot);
|
||||||
|
let orig = Arc::new($orig);
|
||||||
|
$(
|
||||||
|
{
|
||||||
|
let test= |$server: Arc<Server>| async move {
|
||||||
|
$e.await
|
||||||
|
};
|
||||||
|
let (snapshot, _) = test(snapshot.clone()).await;
|
||||||
|
let (orig, _) = test(orig.clone()).await;
|
||||||
|
assert_eq!(snapshot, orig);
|
||||||
|
}
|
||||||
|
)*
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn perform_snapshot() {
|
async fn perform_snapshot() {
|
||||||
let temp = tempfile::tempdir().unwrap();
|
let temp = tempfile::tempdir().unwrap();
|
||||||
@@ -20,33 +42,42 @@ async fn perform_snapshot() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let server = Server::new_with_options(options).await;
|
let server = Server::new_with_options(options).await;
|
||||||
|
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
index
|
||||||
|
.update_settings(serde_json::json! ({
|
||||||
|
"searchableAttributes": [],
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
|
||||||
index.load_test_set().await;
|
index.load_test_set().await;
|
||||||
|
|
||||||
let (response, _) = index
|
server.index("test1").create(Some("prim")).await;
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
|
||||||
.await;
|
index.wait_task(2).await;
|
||||||
|
|
||||||
sleep(Duration::from_secs(2)).await;
|
sleep(Duration::from_secs(2)).await;
|
||||||
|
|
||||||
let temp = tempfile::tempdir().unwrap();
|
let temp = tempfile::tempdir().unwrap();
|
||||||
|
|
||||||
let snapshot_path = snapshot_dir
|
let snapshot_path = snapshot_dir.path().to_owned().join("db.snapshot");
|
||||||
.path()
|
|
||||||
.to_owned()
|
|
||||||
.join("db.snapshot".to_string());
|
|
||||||
|
|
||||||
let options = Opt {
|
let options = Opt {
|
||||||
import_snapshot: Some(snapshot_path),
|
import_snapshot: Some(snapshot_path),
|
||||||
..default_settings(temp.path())
|
..default_settings(temp.path())
|
||||||
};
|
};
|
||||||
|
|
||||||
let server = Server::new_with_options(options).await;
|
let snapshot_server = Server::new_with_options(options).await;
|
||||||
let index = server.index("test");
|
|
||||||
|
|
||||||
let (response_from_snapshot, _) = index
|
verify_snapshot!(server, snapshot_server, |server| =>
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
server.list_indexes(),
|
||||||
.await;
|
// for some reason the db sizes differ. this may be due to the compaction options we have
|
||||||
|
// set when performing the snapshot
|
||||||
assert_eq!(response, response_from_snapshot);
|
//server.stats(),
|
||||||
|
server.tasks(),
|
||||||
|
server.index("test").get_all_documents(GetAllDocumentsOptions::default()),
|
||||||
|
server.index("test").settings(),
|
||||||
|
server.index("test1").get_all_documents(GetAllDocumentsOptions::default()),
|
||||||
|
server.index("test1").settings(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
|
||||||
@@ -28,7 +29,8 @@ async fn stats() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (_, code) = index.create(Some("id")).await;
|
let (_, code) = index.create(Some("id")).await;
|
||||||
|
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 202);
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
let (response, code) = server.stats().await;
|
let (response, code) = server.stats().await;
|
||||||
|
|
||||||
@@ -52,16 +54,19 @@ async fn stats() {
|
|||||||
|
|
||||||
let (response, code) = index.add_documents(documents, None).await;
|
let (response, code) = index.add_documents(documents, None).await;
|
||||||
assert_eq!(code, 202, "{}", response);
|
assert_eq!(code, 202, "{}", response);
|
||||||
assert_eq!(response["updateId"], 0);
|
assert_eq!(response["uid"], 1);
|
||||||
|
|
||||||
let response = index.wait_update_id(0).await;
|
index.wait_task(1).await;
|
||||||
println!("response: {}", response);
|
|
||||||
|
|
||||||
|
let timestamp = OffsetDateTime::now_utc();
|
||||||
let (response, code) = server.stats().await;
|
let (response, code) = server.stats().await;
|
||||||
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert!(response["databaseSize"].as_u64().unwrap() > 0);
|
assert!(response["databaseSize"].as_u64().unwrap() > 0);
|
||||||
assert!(response.get("lastUpdate").is_some());
|
let last_update =
|
||||||
|
OffsetDateTime::parse(response["lastUpdate"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||||
|
assert!(last_update - timestamp < time::Duration::SECOND);
|
||||||
|
|
||||||
assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 2);
|
assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 2);
|
||||||
assert!(response["indexes"]["test"]["isIndexing"] == false);
|
assert!(response["indexes"]["test"]["isIndexing"] == false);
|
||||||
assert_eq!(response["indexes"]["test"]["fieldDistribution"]["id"], 2);
|
assert_eq!(response["indexes"]["test"]["fieldDistribution"]["id"], 2);
|
||||||
|
|||||||
135
meilisearch-http/tests/tasks/mod.rs
Normal file
135
meilisearch-http/tests/tasks/mod.rs
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
use crate::common::Server;
|
||||||
|
use serde_json::json;
|
||||||
|
use time::format_description::well_known::Rfc3339;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_get_task_unexisting_index() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let (response, code) = server.service.get("/indexes/test/tasks").await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Index `test` not found.",
|
||||||
|
"code": "index_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_get_unexisting_task_status() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
index.create(None).await;
|
||||||
|
index.wait_task(0).await;
|
||||||
|
let (response, code) = index.get_task(1).await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Task `1` not found.",
|
||||||
|
"code": "task_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#task_not_found"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn get_task_status() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
index.create(None).await;
|
||||||
|
index
|
||||||
|
.add_documents(
|
||||||
|
serde_json::json!([{
|
||||||
|
"id": 1,
|
||||||
|
"content": "foobar",
|
||||||
|
}]),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
index.wait_task(0).await;
|
||||||
|
let (_response, code) = index.get_task(1).await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
// TODO check resonse format, as per #48
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn error_list_tasks_unexisting_index() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let (response, code) = server.index("test").list_tasks().await;
|
||||||
|
|
||||||
|
let expected_response = json!({
|
||||||
|
"message": "Index `test` not found.",
|
||||||
|
"code": "index_not_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(response, expected_response);
|
||||||
|
assert_eq!(code, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn list_tasks() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
index.create(None).await;
|
||||||
|
index.wait_task(0).await;
|
||||||
|
index
|
||||||
|
.add_documents(
|
||||||
|
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
let (response, code) = index.list_tasks().await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! assert_valid_summarized_task {
|
||||||
|
($response:expr, $task_type:literal, $index:literal) => {{
|
||||||
|
assert_eq!($response.as_object().unwrap().len(), 5);
|
||||||
|
assert!($response["uid"].as_u64().is_some());
|
||||||
|
assert_eq!($response["indexUid"], $index);
|
||||||
|
assert_eq!($response["status"], "enqueued");
|
||||||
|
assert_eq!($response["type"], $task_type);
|
||||||
|
let date = $response["enqueuedAt"].as_str().expect("missing date");
|
||||||
|
|
||||||
|
OffsetDateTime::parse(date, &Rfc3339).unwrap();
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_web::test]
|
||||||
|
async fn test_summarized_task_view() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, _) = index.create(None).await;
|
||||||
|
assert_valid_summarized_task!(response, "indexCreation", "test");
|
||||||
|
|
||||||
|
let (response, _) = index.update(None).await;
|
||||||
|
assert_valid_summarized_task!(response, "indexUpdate", "test");
|
||||||
|
|
||||||
|
let (response, _) = index.update_settings(json!({})).await;
|
||||||
|
assert_valid_summarized_task!(response, "settingsUpdate", "test");
|
||||||
|
|
||||||
|
let (response, _) = index.update_documents(json!([{"id": 1}]), None).await;
|
||||||
|
assert_valid_summarized_task!(response, "documentPartial", "test");
|
||||||
|
|
||||||
|
let (response, _) = index.add_documents(json!([{"id": 1}]), None).await;
|
||||||
|
assert_valid_summarized_task!(response, "documentAddition", "test");
|
||||||
|
|
||||||
|
let (response, _) = index.delete_document(1).await;
|
||||||
|
assert_valid_summarized_task!(response, "documentDeletion", "test");
|
||||||
|
|
||||||
|
let (response, _) = index.clear_all_documents().await;
|
||||||
|
assert_valid_summarized_task!(response, "clearAll", "test");
|
||||||
|
|
||||||
|
let (response, _) = index.delete().await;
|
||||||
|
assert_valid_summarized_task!(response, "indexDeletion", "test");
|
||||||
|
}
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
use crate::common::Server;
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn get_update_unexisting_index() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let (_response, code) = server.index("test").get_update(0).await;
|
|
||||||
assert_eq!(code, 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn get_unexisting_update_status() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let index = server.index("test");
|
|
||||||
index.create(None).await;
|
|
||||||
let (_response, code) = index.get_update(0).await;
|
|
||||||
assert_eq!(code, 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn get_update_status() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let index = server.index("test");
|
|
||||||
index.create(None).await;
|
|
||||||
index
|
|
||||||
.add_documents(
|
|
||||||
serde_json::json!([{
|
|
||||||
"id": 1,
|
|
||||||
"content": "foobar",
|
|
||||||
}]),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
let (_response, code) = index.get_update(0).await;
|
|
||||||
assert_eq!(code, 200);
|
|
||||||
// TODO check resonse format, as per #48
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn list_updates_unexisting_index() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let (_response, code) = server.index("test").list_updates().await;
|
|
||||||
assert_eq!(code, 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn list_no_updates() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let index = server.index("test");
|
|
||||||
index.create(None).await;
|
|
||||||
let (response, code) = index.list_updates().await;
|
|
||||||
assert_eq!(code, 200);
|
|
||||||
assert!(response.as_array().unwrap().is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn list_updates() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let index = server.index("test");
|
|
||||||
index.create(None).await;
|
|
||||||
index
|
|
||||||
.add_documents(
|
|
||||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
let (response, code) = index.list_updates().await;
|
|
||||||
assert_eq!(code, 200);
|
|
||||||
assert_eq!(response.as_array().unwrap().len(), 1);
|
|
||||||
}
|
|
||||||
@@ -1,20 +1,17 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "meilisearch-lib"
|
name = "meilisearch-lib"
|
||||||
version = "0.23.0"
|
version = "0.26.1"
|
||||||
edition = "2018"
|
edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = { version = "4.0.0-beta.9", features = ["rustls"] }
|
actix-web = { version = "4", default-features = false }
|
||||||
actix-web-static-files = { git = "https://github.com/MarinPostma/actix-web-static-files.git", rev = "39d8006", optional = true }
|
|
||||||
anyhow = { version = "1.0.43", features = ["backtrace"] }
|
anyhow = { version = "1.0.43", features = ["backtrace"] }
|
||||||
async-stream = "0.3.2"
|
async-stream = "0.3.2"
|
||||||
async-trait = "0.1.51"
|
async-trait = "0.1.51"
|
||||||
arc-swap = "1.3.2"
|
|
||||||
byte-unit = { version = "4.0.12", default-features = false, features = ["std"] }
|
byte-unit = { version = "4.0.12", default-features = false, features = ["std"] }
|
||||||
bytes = "1.1.0"
|
bytes = "1.1.0"
|
||||||
chrono = { version = "0.4.19", features = ["serde"] }
|
|
||||||
csv = "1.1.6"
|
csv = "1.1.6"
|
||||||
crossbeam-channel = "0.5.1"
|
crossbeam-channel = "0.5.1"
|
||||||
either = "1.6.1"
|
either = "1.6.1"
|
||||||
@@ -29,9 +26,8 @@ itertools = "0.10.1"
|
|||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.14"
|
log = "0.4.14"
|
||||||
meilisearch-error = { path = "../meilisearch-error" }
|
meilisearch-error = { path = "../meilisearch-error" }
|
||||||
meilisearch-tokenizer = { git = "https://github.com/meilisearch/tokenizer.git", tag = "v0.2.5" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
memmap = "0.7.0"
|
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.22.2" }
|
||||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.17.2" }
|
|
||||||
mime = "0.3.16"
|
mime = "0.3.16"
|
||||||
num_cpus = "1.13.0"
|
num_cpus = "1.13.0"
|
||||||
once_cell = "1.8.0"
|
once_cell = "1.8.0"
|
||||||
@@ -44,10 +40,11 @@ serde = { version = "1.0.130", features = ["derive"] }
|
|||||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
||||||
siphasher = "0.3.7"
|
siphasher = "0.3.7"
|
||||||
slice-group-by = "0.2.6"
|
slice-group-by = "0.2.6"
|
||||||
structopt = "0.3.23"
|
clap = { version = "3.0", features = ["derive", "env"] }
|
||||||
tar = "0.4.37"
|
tar = "0.4.37"
|
||||||
tempfile = "3.2.0"
|
tempfile = "3.2.0"
|
||||||
thiserror = "1.0.28"
|
thiserror = "1.0.28"
|
||||||
|
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
tokio = { version = "1.11.0", features = ["full"] }
|
tokio = { version = "1.11.0", features = ["full"] }
|
||||||
uuid = { version = "0.8.2", features = ["serde"] }
|
uuid = { version = "0.8.2", features = ["serde"] }
|
||||||
walkdir = "2.3.2"
|
walkdir = "2.3.2"
|
||||||
@@ -57,8 +54,14 @@ whoami = { version = "1.1.3", optional = true }
|
|||||||
reqwest = { version = "0.11.4", features = ["json", "rustls-tls"], default-features = false, optional = true }
|
reqwest = { version = "0.11.4", features = ["json", "rustls-tls"], default-features = false, optional = true }
|
||||||
sysinfo = "0.20.2"
|
sysinfo = "0.20.2"
|
||||||
derivative = "2.2.0"
|
derivative = "2.2.0"
|
||||||
|
fs_extra = "1.2.0"
|
||||||
|
atomic_refcell = "0.1.8"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.2.0"
|
actix-rt = "2.2.0"
|
||||||
mockall = "0.10.2"
|
mockall = "0.10.2"
|
||||||
paste = "1.0.5"
|
paste = "1.0.5"
|
||||||
|
nelson = { git = "https://github.com/MarinPostma/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"}
|
||||||
|
meilisearch-error = { path = "../meilisearch-error", features = ["test-traits"] }
|
||||||
|
proptest = "1.0.0"
|
||||||
|
proptest-derive = "0.3.0"
|
||||||
|
|||||||
19
meilisearch-lib/proptest-regressions/index_resolver/mod.txt
Normal file
19
meilisearch-lib/proptest-regressions/index_resolver/mod.txt
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Seeds for failure cases proptest has generated in the past. It is
|
||||||
|
# automatically read and these particular cases re-run before any
|
||||||
|
# novel cases are generated.
|
||||||
|
#
|
||||||
|
# It is recommended to check this file in to source control so that
|
||||||
|
# everyone who runs the test benefits from these saved cases.
|
||||||
|
cc 6f3ae3cba934ba3e328e2306218c32f27a46ce2d54a1258b05fef65663208662 # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: DocumentAddition { content_uuid: 37bc137d-2038-47f0-819f-b133233daadc, merge_strategy: ReplaceDocuments, primary_key: None, documents_count: 0 }, events: [] }
|
||||||
|
cc b726f7d9f44a9216aad302ddba0f04e7108817e741d656a4759aea8562de4d63 # shrinks to task = Task { id: 0, index_uid: IndexUid("_"), content: IndexDeletion, events: [] }, index_exists = false, index_op_fails = false, any_int = 0
|
||||||
|
cc 427ec2dde3260b1ab334207bdc22adef28a5b8532b9902c84b55fd2c017ea7e1 # shrinks to task = Task { id: 0, index_uid: IndexUid("A"), content: IndexDeletion, events: [] }, index_exists = true, index_op_fails = false, any_int = 0
|
||||||
|
cc c24f3d42f0f36fbdbf4e9d4327e75529b163ac580d63a5934ca05e9b5bd23a65 # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: IndexDeletion, events: [] }, index_exists = true, index_op_fails = true, any_int = 0
|
||||||
|
cc 8084e2410801b997533b0bcbad75cd212873cfc2677f26847f68c568ead1604c # shrinks to task = Task { id: 0, index_uid: IndexUid("A"), content: SettingsUpdate { settings: Settings { displayed_attributes: NotSet, searchable_attributes: NotSet, filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, synonyms: NotSet, distinct_attribute: NotSet, _kind: PhantomData }, is_deletion: false }, events: [] }, index_exists = false, index_op_fails = false, any_int = 0
|
||||||
|
cc 330085e0200a9a2ddfdd764a03d768aa95c431bcaafbd530c8c949425beed18b # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: CreateIndex { primary_key: None }, events: [] }, index_exists = false, index_op_fails = true, any_int = 0
|
||||||
|
cc c70e901576ef2fb9622e814bdecd11e4747cd70d71a9a6ce771b5b7256a187c0 # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: SettingsUpdate { settings: Settings { displayed_attributes: NotSet, searchable_attributes: NotSet, filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, synonyms: NotSet, distinct_attribute: NotSet, _kind: PhantomData }, is_deletion: true }, events: [] }, index_exists = false, index_op_fails = false, any_int = 0
|
||||||
|
cc 3fe2c38cbc2cca34ecde321472141d386056f0cd332cbf700773657715a382b5 # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: UpdateIndex { primary_key: None }, events: [] }, index_exists = false, index_op_fails = false, any_int = 0
|
||||||
|
cc c31cf86692968483f1ab08a6a9d4667ccb9635c306998551bf1eb1f135ef0d4b # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: UpdateIndex { primary_key: Some("") }, events: [] }, index_exists = true, index_op_fails = false, any_int = 0
|
||||||
|
cc 3a01c78db082434b8a4f8914abf0d1059d39f4426d16df20d72e1bd7ebb94a6a # shrinks to task = Task { id: 0, index_uid: IndexUid("0"), content: UpdateIndex { primary_key: None }, events: [] }, index_exists = true, index_op_fails = true, any_int = 0
|
||||||
|
cc c450806df3921d1e6fe9b6af93d999e8196d0175b69b64f1810802582421e94a # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: CreateIndex { primary_key: Some("") }, events: [] }, index_exists = false, index_op_fails = false, any_int = 0
|
||||||
|
cc fb6b98947cbdbdee05ed3c0bf2923aad2c311edc276253642eb43a0c0ec4888a # shrinks to task = Task { id: 0, index_uid: IndexUid("A"), content: CreateIndex { primary_key: Some("") }, events: [] }, index_exists = false, index_op_fails = true, any_int = 0
|
||||||
|
cc 1aa59d8e22484e9915efbb5818e1e1ab684aa61b166dc82130d6221663ba00bf # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: DocumentDeletion(Clear), events: [] }, index_exists = true, index_op_fails = false, any_int = 0
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
# Seeds for failure cases proptest has generated in the past. It is
|
||||||
|
# automatically read and these particular cases re-run before any
|
||||||
|
# novel cases are generated.
|
||||||
|
#
|
||||||
|
# It is recommended to check this file in to source control so that
|
||||||
|
# everyone who runs the test benefits from these saved cases.
|
||||||
|
cc 8cbd6c45ce8c5611ec3f2f94fd485f6a8eeccc470fa426e59bdfd4d9e7fce0e1 # shrinks to bytes = []
|
||||||
8
meilisearch-lib/src/analytics.rs
Normal file
8
meilisearch-lib/src/analytics.rs
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
use std::{fs, path::Path};
|
||||||
|
|
||||||
|
/// Copy the `instance-uid` contained in one db to another. Ignore all errors.
|
||||||
|
pub fn copy_user_id(src: &Path, dst: &Path) {
|
||||||
|
if let Ok(user_id) = fs::read_to_string(src.join("instance-uid")) {
|
||||||
|
let _ = fs::write(dst.join("instance-uid"), &user_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,10 +1,8 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::io::{self, Read, Result as IoResult, Seek, Write};
|
use std::io::{self, BufRead, BufReader, BufWriter, Cursor, Read, Seek, Write};
|
||||||
|
|
||||||
use csv::{Reader as CsvReader, StringRecordsIntoIter};
|
use meilisearch_error::{internal_error, Code, ErrorCode};
|
||||||
use meilisearch_error::{Code, ErrorCode};
|
|
||||||
use milli::documents::DocumentBatchBuilder;
|
use milli::documents::DocumentBatchBuilder;
|
||||||
use serde_json::{Deserializer, Map, Value};
|
|
||||||
|
|
||||||
type Result<T> = std::result::Result<T, DocumentFormatError>;
|
type Result<T> = std::result::Result<T, DocumentFormatError>;
|
||||||
|
|
||||||
@@ -27,15 +25,24 @@ impl fmt::Display for PayloadType {
|
|||||||
|
|
||||||
#[derive(thiserror::Error, Debug)]
|
#[derive(thiserror::Error, Debug)]
|
||||||
pub enum DocumentFormatError {
|
pub enum DocumentFormatError {
|
||||||
#[error("Internal error: {0}")]
|
#[error("An internal error has occurred. `{0}`.")]
|
||||||
Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
|
Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
|
||||||
#[error("{0}. The {1} payload provided is malformed.")]
|
#[error("The `{1}` payload provided is malformed. `{0}`.")]
|
||||||
MalformedPayload(
|
MalformedPayload(
|
||||||
Box<dyn std::error::Error + Send + Sync + 'static>,
|
Box<dyn std::error::Error + Send + Sync + 'static>,
|
||||||
PayloadType,
|
PayloadType,
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<(PayloadType, milli::documents::Error)> for DocumentFormatError {
|
||||||
|
fn from((ty, error): (PayloadType, milli::documents::Error)) -> Self {
|
||||||
|
match error {
|
||||||
|
milli::documents::Error::Io(e) => Self::Internal(Box::new(e)),
|
||||||
|
e => Self::MalformedPayload(Box::new(e), ty),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ErrorCode for DocumentFormatError {
|
impl ErrorCode for DocumentFormatError {
|
||||||
fn error_code(&self) -> Code {
|
fn error_code(&self) -> Code {
|
||||||
match self {
|
match self {
|
||||||
@@ -45,330 +52,53 @@ impl ErrorCode for DocumentFormatError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
internal_error!(DocumentFormatError: milli::documents::Error, io::Error);
|
internal_error!(DocumentFormatError: io::Error);
|
||||||
|
|
||||||
macro_rules! malformed {
|
/// reads csv from input and write an obkv batch to writer.
|
||||||
($type:path, $e:expr) => {
|
pub fn read_csv(input: impl Read, writer: impl Write + Seek) -> Result<usize> {
|
||||||
$e.map_err(|e| DocumentFormatError::MalformedPayload(Box::new(e), $type))
|
let writer = BufWriter::new(writer);
|
||||||
};
|
let builder =
|
||||||
|
DocumentBatchBuilder::from_csv(input, writer).map_err(|e| (PayloadType::Csv, e))?;
|
||||||
|
|
||||||
|
let count = builder.finish().map_err(|e| (PayloadType::Csv, e))?;
|
||||||
|
|
||||||
|
Ok(count)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read_csv(input: impl Read, writer: impl Write + Seek) -> Result<()> {
|
/// reads jsonl from input and write an obkv batch to writer.
|
||||||
let mut builder = DocumentBatchBuilder::new(writer).unwrap();
|
pub fn read_ndjson(input: impl Read, writer: impl Write + Seek) -> Result<usize> {
|
||||||
|
let mut reader = BufReader::new(input);
|
||||||
|
let writer = BufWriter::new(writer);
|
||||||
|
|
||||||
let iter = CsvDocumentIter::from_reader(input)?;
|
let mut builder = DocumentBatchBuilder::new(writer).map_err(|e| (PayloadType::Ndjson, e))?;
|
||||||
for doc in iter {
|
let mut buf = String::new();
|
||||||
let doc = doc?;
|
|
||||||
builder.add_documents(doc).unwrap();
|
while reader.read_line(&mut buf)? > 0 {
|
||||||
|
// skip empty lines
|
||||||
|
if buf == "\n" {
|
||||||
|
buf.clear();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
builder
|
||||||
|
.extend_from_json(Cursor::new(&buf.as_bytes()))
|
||||||
|
.map_err(|e| (PayloadType::Ndjson, e))?;
|
||||||
|
buf.clear();
|
||||||
}
|
}
|
||||||
builder.finish().unwrap();
|
|
||||||
|
|
||||||
Ok(())
|
let count = builder.finish().map_err(|e| (PayloadType::Ndjson, e))?;
|
||||||
|
|
||||||
|
Ok(count)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// read jsonl from input and write an obkv batch to writer.
|
/// reads json from input and write an obkv batch to writer.
|
||||||
pub fn read_ndjson(input: impl Read, writer: impl Write + Seek) -> Result<()> {
|
pub fn read_json(input: impl Read, writer: impl Write + Seek) -> Result<usize> {
|
||||||
let mut builder = DocumentBatchBuilder::new(writer)?;
|
let writer = BufWriter::new(writer);
|
||||||
let stream = Deserializer::from_reader(input).into_iter::<Map<String, Value>>();
|
let mut builder = DocumentBatchBuilder::new(writer).map_err(|e| (PayloadType::Json, e))?;
|
||||||
|
builder
|
||||||
|
.extend_from_json(input)
|
||||||
|
.map_err(|e| (PayloadType::Json, e))?;
|
||||||
|
|
||||||
for value in stream {
|
let count = builder.finish().map_err(|e| (PayloadType::Json, e))?;
|
||||||
let value = malformed!(PayloadType::Ndjson, value)?;
|
|
||||||
builder.add_documents(&value)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
builder.finish()?;
|
Ok(count)
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// read json from input and write an obkv batch to writer.
|
|
||||||
pub fn read_json(input: impl Read, writer: impl Write + Seek) -> Result<()> {
|
|
||||||
let mut builder = DocumentBatchBuilder::new(writer).unwrap();
|
|
||||||
|
|
||||||
let documents: Vec<Map<String, Value>> =
|
|
||||||
malformed!(PayloadType::Json, serde_json::from_reader(input))?;
|
|
||||||
builder.add_documents(documents).unwrap();
|
|
||||||
builder.finish().unwrap();
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
enum AllowedType {
|
|
||||||
String,
|
|
||||||
Number,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_csv_header(header: &str) -> (String, AllowedType) {
|
|
||||||
// if there are several separators we only split on the last one.
|
|
||||||
match header.rsplit_once(':') {
|
|
||||||
Some((field_name, field_type)) => match field_type {
|
|
||||||
"string" => (field_name.to_string(), AllowedType::String),
|
|
||||||
"number" => (field_name.to_string(), AllowedType::Number),
|
|
||||||
// if the pattern isn't reconized, we keep the whole field.
|
|
||||||
_otherwise => (header.to_string(), AllowedType::String),
|
|
||||||
},
|
|
||||||
None => (header.to_string(), AllowedType::String),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct CsvDocumentIter<R>
|
|
||||||
where
|
|
||||||
R: Read,
|
|
||||||
{
|
|
||||||
documents: StringRecordsIntoIter<R>,
|
|
||||||
headers: Vec<(String, AllowedType)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<R: Read> CsvDocumentIter<R> {
|
|
||||||
pub fn from_reader(reader: R) -> IoResult<Self> {
|
|
||||||
let mut records = CsvReader::from_reader(reader);
|
|
||||||
|
|
||||||
let headers = records
|
|
||||||
.headers()?
|
|
||||||
.into_iter()
|
|
||||||
.map(parse_csv_header)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
documents: records.into_records(),
|
|
||||||
headers,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<R: Read> Iterator for CsvDocumentIter<R> {
|
|
||||||
type Item = Result<Map<String, Value>>;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
let csv_document = self.documents.next()?;
|
|
||||||
|
|
||||||
match csv_document {
|
|
||||||
Ok(csv_document) => {
|
|
||||||
let mut document = Map::new();
|
|
||||||
|
|
||||||
for ((field_name, field_type), value) in
|
|
||||||
self.headers.iter().zip(csv_document.into_iter())
|
|
||||||
{
|
|
||||||
let parsed_value = match field_type {
|
|
||||||
AllowedType::Number => {
|
|
||||||
malformed!(PayloadType::Csv, value.parse::<f64>().map(Value::from))
|
|
||||||
}
|
|
||||||
AllowedType::String => Ok(Value::String(value.to_string())),
|
|
||||||
};
|
|
||||||
|
|
||||||
match parsed_value {
|
|
||||||
Ok(value) => drop(document.insert(field_name.to_string(), value)),
|
|
||||||
Err(e) => return Some(Err(e)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(Ok(document))
|
|
||||||
}
|
|
||||||
Err(e) => Some(Err(DocumentFormatError::MalformedPayload(
|
|
||||||
Box::new(e),
|
|
||||||
PayloadType::Csv,
|
|
||||||
))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn simple_csv_document() {
|
|
||||||
let documents = r#"city,country,pop
|
|
||||||
"Boston","United States","4628910""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
Value::Object(csv_iter.next().unwrap().unwrap()),
|
|
||||||
json!({
|
|
||||||
"city": "Boston",
|
|
||||||
"country": "United States",
|
|
||||||
"pop": "4628910",
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn coma_in_field() {
|
|
||||||
let documents = r#"city,country,pop
|
|
||||||
"Boston","United, States","4628910""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
Value::Object(csv_iter.next().unwrap().unwrap()),
|
|
||||||
json!({
|
|
||||||
"city": "Boston",
|
|
||||||
"country": "United, States",
|
|
||||||
"pop": "4628910",
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn quote_in_field() {
|
|
||||||
let documents = r#"city,country,pop
|
|
||||||
"Boston","United"" States","4628910""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
Value::Object(csv_iter.next().unwrap().unwrap()),
|
|
||||||
json!({
|
|
||||||
"city": "Boston",
|
|
||||||
"country": "United\" States",
|
|
||||||
"pop": "4628910",
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn integer_in_field() {
|
|
||||||
let documents = r#"city,country,pop:number
|
|
||||||
"Boston","United States","4628910""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
Value::Object(csv_iter.next().unwrap().unwrap()),
|
|
||||||
json!({
|
|
||||||
"city": "Boston",
|
|
||||||
"country": "United States",
|
|
||||||
"pop": 4628910.0,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn float_in_field() {
|
|
||||||
let documents = r#"city,country,pop:number
|
|
||||||
"Boston","United States","4628910.01""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
Value::Object(csv_iter.next().unwrap().unwrap()),
|
|
||||||
json!({
|
|
||||||
"city": "Boston",
|
|
||||||
"country": "United States",
|
|
||||||
"pop": 4628910.01,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn several_colon_in_header() {
|
|
||||||
let documents = r#"city:love:string,country:state,pop
|
|
||||||
"Boston","United States","4628910""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
Value::Object(csv_iter.next().unwrap().unwrap()),
|
|
||||||
json!({
|
|
||||||
"city:love": "Boston",
|
|
||||||
"country:state": "United States",
|
|
||||||
"pop": "4628910",
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn ending_by_colon_in_header() {
|
|
||||||
let documents = r#"city:,country,pop
|
|
||||||
"Boston","United States","4628910""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
Value::Object(csv_iter.next().unwrap().unwrap()),
|
|
||||||
json!({
|
|
||||||
"city:": "Boston",
|
|
||||||
"country": "United States",
|
|
||||||
"pop": "4628910",
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn starting_by_colon_in_header() {
|
|
||||||
let documents = r#":city,country,pop
|
|
||||||
"Boston","United States","4628910""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
Value::Object(csv_iter.next().unwrap().unwrap()),
|
|
||||||
json!({
|
|
||||||
":city": "Boston",
|
|
||||||
"country": "United States",
|
|
||||||
"pop": "4628910",
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[ignore]
|
|
||||||
#[test]
|
|
||||||
fn starting_by_colon_in_header2() {
|
|
||||||
let documents = r#":string,country,pop
|
|
||||||
"Boston","United States","4628910""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert!(csv_iter.next().unwrap().is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn double_colon_in_header() {
|
|
||||||
let documents = r#"city::string,country,pop
|
|
||||||
"Boston","United States","4628910""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
Value::Object(csv_iter.next().unwrap().unwrap()),
|
|
||||||
json!({
|
|
||||||
"city:": "Boston",
|
|
||||||
"country": "United States",
|
|
||||||
"pop": "4628910",
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn bad_type_in_header() {
|
|
||||||
let documents = r#"city,country:number,pop
|
|
||||||
"Boston","United States","4628910""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert!(csv_iter.next().unwrap().is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn bad_column_count1() {
|
|
||||||
let documents = r#"city,country,pop
|
|
||||||
"Boston","United States","4628910", "too much""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert!(csv_iter.next().unwrap().is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn bad_column_count2() {
|
|
||||||
let documents = r#"city,country,pop
|
|
||||||
"Boston","United States""#;
|
|
||||||
|
|
||||||
let mut csv_iter = CsvDocumentIter::from_reader(documents.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert!(csv_iter.next().unwrap().is_err());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,18 +4,6 @@ use std::fmt;
|
|||||||
use meilisearch_error::{Code, ErrorCode};
|
use meilisearch_error::{Code, ErrorCode};
|
||||||
use milli::UserError;
|
use milli::UserError;
|
||||||
|
|
||||||
macro_rules! internal_error {
|
|
||||||
($target:ty : $($other:path), *) => {
|
|
||||||
$(
|
|
||||||
impl From<$other> for $target {
|
|
||||||
fn from(other: $other) -> Self {
|
|
||||||
Self::Internal(Box::new(other))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct MilliError<'a>(pub &'a milli::Error);
|
pub struct MilliError<'a>(pub &'a milli::Error);
|
||||||
|
|
||||||
@@ -36,20 +24,18 @@ impl ErrorCode for MilliError<'_> {
|
|||||||
match error {
|
match error {
|
||||||
// TODO: wait for spec for new error codes.
|
// TODO: wait for spec for new error codes.
|
||||||
UserError::SerdeJson(_)
|
UserError::SerdeJson(_)
|
||||||
| UserError::MaxDatabaseSizeReached
|
| UserError::DocumentLimitReached
|
||||||
| UserError::InvalidDocumentId { .. }
|
| UserError::UnknownInternalDocumentId { .. } => Code::Internal,
|
||||||
| UserError::InvalidStoreFile
|
UserError::InvalidStoreFile => Code::InvalidStore,
|
||||||
| UserError::NoSpaceLeftOnDevice
|
UserError::NoSpaceLeftOnDevice => Code::NoSpaceLeftOnDevice,
|
||||||
| UserError::DocumentLimitReached => Code::Internal,
|
UserError::MaxDatabaseSizeReached => Code::DatabaseSizeLimitReached,
|
||||||
UserError::AttributeLimitReached => Code::MaxFieldsLimitExceeded,
|
UserError::AttributeLimitReached => Code::MaxFieldsLimitExceeded,
|
||||||
UserError::InvalidFilter(_) => Code::Filter,
|
UserError::InvalidFilter(_) => Code::Filter,
|
||||||
UserError::InvalidFilterAttribute(_) => Code::Filter,
|
|
||||||
UserError::MissingDocumentId { .. } => Code::MissingDocumentId,
|
UserError::MissingDocumentId { .. } => Code::MissingDocumentId,
|
||||||
|
UserError::InvalidDocumentId { .. } => Code::InvalidDocumentId,
|
||||||
UserError::MissingPrimaryKey => Code::MissingPrimaryKey,
|
UserError::MissingPrimaryKey => Code::MissingPrimaryKey,
|
||||||
UserError::PrimaryKeyCannotBeChanged => Code::PrimaryKeyAlreadyPresent,
|
UserError::PrimaryKeyCannotBeChanged(_) => Code::PrimaryKeyAlreadyPresent,
|
||||||
UserError::PrimaryKeyCannotBeReset => Code::PrimaryKeyAlreadyPresent,
|
|
||||||
UserError::SortRankingRuleMissing => Code::Sort,
|
UserError::SortRankingRuleMissing => Code::Sort,
|
||||||
UserError::UnknownInternalDocumentId { .. } => Code::DocumentNotFound,
|
|
||||||
UserError::InvalidFacetsDistribution { .. } => Code::BadRequest,
|
UserError::InvalidFacetsDistribution { .. } => Code::BadRequest,
|
||||||
UserError::InvalidSortableAttribute { .. } => Code::Sort,
|
UserError::InvalidSortableAttribute { .. } => Code::Sort,
|
||||||
UserError::CriterionError(_) => Code::InvalidRankingRule,
|
UserError::CriterionError(_) => Code::InvalidRankingRule,
|
||||||
|
|||||||
@@ -6,10 +6,10 @@ use anyhow::Context;
|
|||||||
use heed::{EnvOpenOptions, RoTxn};
|
use heed::{EnvOpenOptions, RoTxn};
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use milli::documents::DocumentBatchReader;
|
use milli::documents::DocumentBatchReader;
|
||||||
|
use milli::update::{IndexDocumentsConfig, IndexerConfig};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::document_formats::read_ndjson;
|
use crate::document_formats::read_ndjson;
|
||||||
use crate::index::update_handler::UpdateHandler;
|
|
||||||
use crate::index::updates::apply_settings_to_builder;
|
use crate::index::updates::apply_settings_to_builder;
|
||||||
|
|
||||||
use super::error::Result;
|
use super::error::Result;
|
||||||
@@ -28,9 +28,7 @@ impl Index {
|
|||||||
pub fn dump(&self, path: impl AsRef<Path>) -> Result<()> {
|
pub fn dump(&self, path: impl AsRef<Path>) -> Result<()> {
|
||||||
// acquire write txn make sure any ongoing write is finished before we start.
|
// acquire write txn make sure any ongoing write is finished before we start.
|
||||||
let txn = self.env.write_txn()?;
|
let txn = self.env.write_txn()?;
|
||||||
let path = path
|
let path = path.as_ref().join(format!("indexes/{}", self.uuid));
|
||||||
.as_ref()
|
|
||||||
.join(format!("indexes/{}", self.uuid.to_string()));
|
|
||||||
|
|
||||||
create_dir_all(&path)?;
|
create_dir_all(&path)?;
|
||||||
|
|
||||||
@@ -87,7 +85,7 @@ impl Index {
|
|||||||
src: impl AsRef<Path>,
|
src: impl AsRef<Path>,
|
||||||
dst: impl AsRef<Path>,
|
dst: impl AsRef<Path>,
|
||||||
size: usize,
|
size: usize,
|
||||||
update_handler: &UpdateHandler,
|
indexer_config: &IndexerConfig,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
let dir_name = src
|
let dir_name = src
|
||||||
.as_ref()
|
.as_ref()
|
||||||
@@ -112,8 +110,7 @@ impl Index {
|
|||||||
let mut txn = index.write_txn()?;
|
let mut txn = index.write_txn()?;
|
||||||
|
|
||||||
// Apply settings first
|
// Apply settings first
|
||||||
let builder = update_handler.update_builder(0);
|
let mut builder = milli::update::Settings::new(&mut txn, &index, indexer_config);
|
||||||
let mut builder = builder.settings(&mut txn, &index);
|
|
||||||
|
|
||||||
if let Some(primary_key) = primary_key {
|
if let Some(primary_key) = primary_key {
|
||||||
builder.set_primary_key(primary_key);
|
builder.set_primary_key(primary_key);
|
||||||
@@ -121,30 +118,40 @@ impl Index {
|
|||||||
|
|
||||||
apply_settings_to_builder(&settings, &mut builder);
|
apply_settings_to_builder(&settings, &mut builder);
|
||||||
|
|
||||||
builder.execute(|_, _| ())?;
|
builder.execute(|_| ())?;
|
||||||
|
|
||||||
let document_file_path = src.as_ref().join(DATA_FILE_NAME);
|
let document_file_path = src.as_ref().join(DATA_FILE_NAME);
|
||||||
let reader = BufReader::new(File::open(&document_file_path)?);
|
let reader = BufReader::new(File::open(&document_file_path)?);
|
||||||
|
|
||||||
let mut tmp_doc_file = tempfile::tempfile()?;
|
let mut tmp_doc_file = tempfile::tempfile()?;
|
||||||
|
|
||||||
read_ndjson(reader, &mut tmp_doc_file)?;
|
let empty = match read_ndjson(reader, &mut tmp_doc_file) {
|
||||||
|
// if there was no document in the file it's because the index was empty
|
||||||
|
Ok(0) => true,
|
||||||
|
Ok(_) => false,
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
if !empty {
|
||||||
tmp_doc_file.seek(SeekFrom::Start(0))?;
|
tmp_doc_file.seek(SeekFrom::Start(0))?;
|
||||||
|
|
||||||
let documents_reader = DocumentBatchReader::from_reader(tmp_doc_file)?;
|
let documents_reader = DocumentBatchReader::from_reader(tmp_doc_file)?;
|
||||||
|
|
||||||
//If the document file is empty, we don't perform the document addition, to prevent
|
//If the document file is empty, we don't perform the document addition, to prevent
|
||||||
//a primary key error to be thrown.
|
//a primary key error to be thrown.
|
||||||
if !documents_reader.is_empty() {
|
let config = IndexDocumentsConfig::default();
|
||||||
let builder = update_handler
|
let mut builder = milli::update::IndexDocuments::new(
|
||||||
.update_builder(0)
|
&mut txn,
|
||||||
.index_documents(&mut txn, &index);
|
&index,
|
||||||
builder.execute(documents_reader, |_, _| ())?;
|
indexer_config,
|
||||||
|
config,
|
||||||
|
|_| (),
|
||||||
|
);
|
||||||
|
builder.add_documents(documents_reader)?;
|
||||||
|
builder.execute()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
txn.commit()?;
|
txn.commit()?;
|
||||||
|
|
||||||
index.prepare_for_closing().wait();
|
index.prepare_for_closing().wait();
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
@@ -1,31 +1,31 @@
|
|||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
|
||||||
use meilisearch_error::{Code, ErrorCode};
|
use meilisearch_error::{internal_error, Code, ErrorCode};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::error::MilliError;
|
use crate::{error::MilliError, update_file_store};
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, IndexError>;
|
pub type Result<T> = std::result::Result<T, IndexError>;
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum IndexError {
|
pub enum IndexError {
|
||||||
#[error("Internal error: {0}")]
|
#[error("An internal error has occurred. `{0}`.")]
|
||||||
Internal(Box<dyn Error + Send + Sync + 'static>),
|
Internal(Box<dyn Error + Send + Sync + 'static>),
|
||||||
#[error("Document with id {0} not found.")]
|
#[error("Document `{0}` not found.")]
|
||||||
DocumentNotFound(String),
|
DocumentNotFound(String),
|
||||||
#[error("{0}")]
|
#[error("{0}")]
|
||||||
Facet(#[from] FacetError),
|
Facet(#[from] FacetError),
|
||||||
#[error("{0}")]
|
#[error("{0}")]
|
||||||
Milli(#[from] milli::Error),
|
Milli(#[from] milli::Error),
|
||||||
#[error("A primary key is already present. It's impossible to update it")]
|
|
||||||
ExistingPrimaryKey,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
internal_error!(
|
internal_error!(
|
||||||
IndexError: std::io::Error,
|
IndexError: std::io::Error,
|
||||||
heed::Error,
|
heed::Error,
|
||||||
fst::Error,
|
fst::Error,
|
||||||
serde_json::Error
|
serde_json::Error,
|
||||||
|
update_file_store::UpdateFileStoreError,
|
||||||
|
milli::documents::Error
|
||||||
);
|
);
|
||||||
|
|
||||||
impl ErrorCode for IndexError {
|
impl ErrorCode for IndexError {
|
||||||
@@ -35,21 +35,20 @@ impl ErrorCode for IndexError {
|
|||||||
IndexError::DocumentNotFound(_) => Code::DocumentNotFound,
|
IndexError::DocumentNotFound(_) => Code::DocumentNotFound,
|
||||||
IndexError::Facet(e) => e.error_code(),
|
IndexError::Facet(e) => e.error_code(),
|
||||||
IndexError::Milli(e) => MilliError(e).error_code(),
|
IndexError::Milli(e) => MilliError(e).error_code(),
|
||||||
IndexError::ExistingPrimaryKey => Code::PrimaryKeyAlreadyPresent,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum FacetError {
|
pub enum FacetError {
|
||||||
#[error("Invalid facet expression, expected {}, found: {1}", .0.join(", "))]
|
#[error("Invalid syntax for the filter parameter: `expected {}, found: {1}`.", .0.join(", "))]
|
||||||
InvalidExpression(&'static [&'static str], Value),
|
InvalidExpression(&'static [&'static str], Value),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ErrorCode for FacetError {
|
impl ErrorCode for FacetError {
|
||||||
fn error_code(&self) -> Code {
|
fn error_code(&self) -> Code {
|
||||||
match self {
|
match self {
|
||||||
FacetError::InvalidExpression(_, _) => Code::Facet,
|
FacetError::InvalidExpression(_, _) => Code::Filter,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,20 +5,18 @@ use std::ops::Deref;
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use heed::{EnvOpenOptions, RoTxn};
|
use heed::{EnvOpenOptions, RoTxn};
|
||||||
use milli::update::Setting;
|
use milli::update::{IndexerConfig, Setting};
|
||||||
use milli::{obkv_to_json, FieldDistribution, FieldId};
|
use milli::{obkv_to_json, FieldDistribution, FieldId};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::index_controller::update_file_store::UpdateFileStore;
|
|
||||||
use crate::EnvSizer;
|
use crate::EnvSizer;
|
||||||
|
|
||||||
use super::error::IndexError;
|
use super::error::IndexError;
|
||||||
use super::error::Result;
|
use super::error::Result;
|
||||||
use super::update_handler::UpdateHandler;
|
|
||||||
use super::{Checked, Settings};
|
use super::{Checked, Settings};
|
||||||
|
|
||||||
pub type Document = Map<String, Value>;
|
pub type Document = Map<String, Value>;
|
||||||
@@ -26,8 +24,10 @@ pub type Document = Map<String, Value>;
|
|||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct IndexMeta {
|
pub struct IndexMeta {
|
||||||
created_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
pub updated_at: DateTime<Utc>,
|
pub created_at: OffsetDateTime,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub updated_at: OffsetDateTime,
|
||||||
pub primary_key: Option<String>,
|
pub primary_key: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -69,9 +69,7 @@ pub struct Index {
|
|||||||
#[derivative(Debug = "ignore")]
|
#[derivative(Debug = "ignore")]
|
||||||
pub inner: Arc<milli::Index>,
|
pub inner: Arc<milli::Index>,
|
||||||
#[derivative(Debug = "ignore")]
|
#[derivative(Debug = "ignore")]
|
||||||
pub update_file_store: Arc<UpdateFileStore>,
|
pub indexer_config: Arc<IndexerConfig>,
|
||||||
#[derivative(Debug = "ignore")]
|
|
||||||
pub update_handler: Arc<UpdateHandler>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Deref for Index {
|
impl Deref for Index {
|
||||||
@@ -86,24 +84,24 @@ impl Index {
|
|||||||
pub fn open(
|
pub fn open(
|
||||||
path: impl AsRef<Path>,
|
path: impl AsRef<Path>,
|
||||||
size: usize,
|
size: usize,
|
||||||
update_file_store: Arc<UpdateFileStore>,
|
|
||||||
uuid: Uuid,
|
uuid: Uuid,
|
||||||
update_handler: Arc<UpdateHandler>,
|
update_handler: Arc<IndexerConfig>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
|
log::debug!("opening index in {}", path.as_ref().display());
|
||||||
create_dir_all(&path)?;
|
create_dir_all(&path)?;
|
||||||
let mut options = EnvOpenOptions::new();
|
let mut options = EnvOpenOptions::new();
|
||||||
options.map_size(size);
|
options.map_size(size);
|
||||||
let inner = Arc::new(milli::Index::new(options, &path)?);
|
let inner = Arc::new(milli::Index::new(options, &path)?);
|
||||||
Ok(Index {
|
Ok(Index {
|
||||||
inner,
|
inner,
|
||||||
update_file_store,
|
|
||||||
uuid,
|
uuid,
|
||||||
update_handler,
|
indexer_config: update_handler,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn inner(&self) -> &milli::Index {
|
/// Asynchronously close the underlying index
|
||||||
&self.inner
|
pub fn close(self) {
|
||||||
|
self.inner.as_ref().clone().prepare_for_closing();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn stats(&self) -> Result<IndexStats> {
|
pub fn stats(&self) -> Result<IndexStats> {
|
||||||
@@ -154,7 +152,7 @@ impl Index {
|
|||||||
Ok(stop_words.stream().into_strs()?.into_iter().collect())
|
Ok(stop_words.stream().into_strs()?.into_iter().collect())
|
||||||
})
|
})
|
||||||
.transpose()?
|
.transpose()?
|
||||||
.unwrap_or_else(BTreeSet::new);
|
.unwrap_or_default();
|
||||||
let distinct_field = self.distinct_field(txn)?.map(String::from);
|
let distinct_field = self.distinct_field(txn)?.map(String::from);
|
||||||
|
|
||||||
// in milli each word in the synonyms map were split on their separator. Since we lost
|
// in milli each word in the synonyms map were split on their separator. Since we lost
|
||||||
@@ -284,3 +282,17 @@ impl Index {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// When running tests, when a server instance is dropped, the environment is not actually closed,
|
||||||
|
/// leaving a lot of open file descriptors.
|
||||||
|
impl Drop for Index {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
// When dropping the last instance of an index, we want to close the index
|
||||||
|
// Note that the close is actually performed only if all the instances a effectively
|
||||||
|
// dropped
|
||||||
|
|
||||||
|
if Arc::strong_count(&self.inner) == 1 {
|
||||||
|
self.inner.as_ref().clone().prepare_for_closing();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -4,8 +4,7 @@ pub use updates::{apply_settings_to_builder, Checked, Facets, Settings, Unchecke
|
|||||||
mod dump;
|
mod dump;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
mod search;
|
mod search;
|
||||||
pub mod update_handler;
|
pub mod updates;
|
||||||
mod updates;
|
|
||||||
|
|
||||||
#[allow(clippy::module_inception)]
|
#[allow(clippy::module_inception)]
|
||||||
mod index;
|
mod index;
|
||||||
@@ -22,234 +21,75 @@ pub use test::MockIndex as Index;
|
|||||||
/// code for unit testing, in places where an index would normally be used.
|
/// code for unit testing, in places where an index would normally be used.
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod test {
|
pub mod test {
|
||||||
use std::any::Any;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::panic::{RefUnwindSafe, UnwindSafe};
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::atomic::{AtomicBool, Ordering};
|
use std::sync::Arc;
|
||||||
use std::sync::{Arc, Mutex};
|
|
||||||
|
|
||||||
|
use milli::update::IndexerConfig;
|
||||||
|
use milli::update::{DocumentAdditionResult, DocumentDeletionResult, IndexDocumentsMethod};
|
||||||
|
use nelson::Mocker;
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::index_controller::update_file_store::UpdateFileStore;
|
|
||||||
use crate::index_controller::updates::status::{Failed, Processed, Processing};
|
|
||||||
|
|
||||||
use super::error::Result;
|
use super::error::Result;
|
||||||
use super::index::Index;
|
use super::index::Index;
|
||||||
use super::update_handler::UpdateHandler;
|
|
||||||
use super::{Checked, IndexMeta, IndexStats, SearchQuery, SearchResult, Settings};
|
use super::{Checked, IndexMeta, IndexStats, SearchQuery, SearchResult, Settings};
|
||||||
|
use crate::update_file_store::UpdateFileStore;
|
||||||
|
|
||||||
pub struct Stub<A, R> {
|
#[derive(Clone)]
|
||||||
name: String,
|
|
||||||
times: Mutex<Option<usize>>,
|
|
||||||
stub: Box<dyn Fn(A) -> R + Sync + Send>,
|
|
||||||
invalidated: AtomicBool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<A, R> Drop for Stub<A, R> {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
if !self.invalidated.load(Ordering::Relaxed) {
|
|
||||||
let lock = self.times.lock().unwrap();
|
|
||||||
if let Some(n) = *lock {
|
|
||||||
assert_eq!(n, 0, "{} not called enough times", self.name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<A, R> Stub<A, R> {
|
|
||||||
fn invalidate(&self) {
|
|
||||||
self.invalidated.store(true, Ordering::Relaxed);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<A: UnwindSafe, R> Stub<A, R> {
|
|
||||||
fn call(&self, args: A) -> R {
|
|
||||||
let mut lock = self.times.lock().unwrap();
|
|
||||||
match *lock {
|
|
||||||
Some(0) => panic!("{} called to many times", self.name),
|
|
||||||
Some(ref mut times) => {
|
|
||||||
*times -= 1;
|
|
||||||
}
|
|
||||||
None => (),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Since we add assertions in the drop implementation for Stub, a panic can occur in a
|
|
||||||
// panic, causing a hard abort of the program. To handle that, we catch the panic, and
|
|
||||||
// set the stub as invalidated so the assertions aren't run during the drop.
|
|
||||||
impl<'a, A, R> RefUnwindSafe for StubHolder<'a, A, R> {}
|
|
||||||
struct StubHolder<'a, A, R>(&'a (dyn Fn(A) -> R + Sync + Send));
|
|
||||||
|
|
||||||
let stub = StubHolder(self.stub.as_ref());
|
|
||||||
|
|
||||||
match std::panic::catch_unwind(|| (stub.0)(args)) {
|
|
||||||
Ok(r) => r,
|
|
||||||
Err(panic) => {
|
|
||||||
self.invalidate();
|
|
||||||
std::panic::resume_unwind(panic);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
struct StubStore {
|
|
||||||
inner: Arc<Mutex<HashMap<String, Box<dyn Any + Sync + Send>>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StubStore {
|
|
||||||
pub fn insert<A: 'static, R: 'static>(&self, name: String, stub: Stub<A, R>) {
|
|
||||||
let mut lock = self.inner.lock().unwrap();
|
|
||||||
lock.insert(name, Box::new(stub));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get<A, B>(&self, name: &str) -> Option<&Stub<A, B>> {
|
|
||||||
let mut lock = self.inner.lock().unwrap();
|
|
||||||
match lock.get_mut(name) {
|
|
||||||
Some(s) => {
|
|
||||||
let s = s.as_mut() as *mut dyn Any as *mut Stub<A, B>;
|
|
||||||
Some(unsafe { &mut *s })
|
|
||||||
}
|
|
||||||
None => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct StubBuilder<'a, A, R> {
|
|
||||||
name: String,
|
|
||||||
store: &'a StubStore,
|
|
||||||
times: Option<usize>,
|
|
||||||
_f: std::marker::PhantomData<fn(A) -> R>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, A: 'static, R: 'static> StubBuilder<'a, A, R> {
|
|
||||||
/// Asserts the stub has been called exactly `times` times.
|
|
||||||
#[must_use]
|
|
||||||
pub fn times(mut self, times: usize) -> Self {
|
|
||||||
self.times = Some(times);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Asserts the stub has been called exactly once.
|
|
||||||
#[must_use]
|
|
||||||
pub fn once(mut self) -> Self {
|
|
||||||
self.times = Some(1);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The function that will be called when the stub is called. This needs to be called to
|
|
||||||
/// actually build the stub and register it to the stub store.
|
|
||||||
pub fn then(self, f: impl Fn(A) -> R + Sync + Send + 'static) {
|
|
||||||
let times = Mutex::new(self.times);
|
|
||||||
let stub = Stub {
|
|
||||||
stub: Box::new(f),
|
|
||||||
times,
|
|
||||||
name: self.name.clone(),
|
|
||||||
invalidated: AtomicBool::new(false),
|
|
||||||
};
|
|
||||||
|
|
||||||
self.store.insert(self.name, stub);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Mocker allows to stub metod call on any struct. you can register stubs by calling
|
|
||||||
/// `Mocker::when` and retrieve it in the proxy implementation when with `Mocker::get`.
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct Mocker {
|
|
||||||
store: StubStore,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Mocker {
|
|
||||||
pub fn when<A, R>(&self, name: &str) -> StubBuilder<A, R> {
|
|
||||||
StubBuilder {
|
|
||||||
name: name.to_string(),
|
|
||||||
store: &self.store,
|
|
||||||
times: None,
|
|
||||||
_f: std::marker::PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get<A, R>(&self, name: &str) -> &Stub<A, R> {
|
|
||||||
match self.store.get(name) {
|
|
||||||
Some(stub) => stub,
|
|
||||||
None => {
|
|
||||||
// panic here causes the stubs to get dropped, and panic in turn. To prevent
|
|
||||||
// that, we forget them, and let them be cleaned by the os later. This is not
|
|
||||||
// optimal, but is still better than nested panicks.
|
|
||||||
let mut stubs = self.store.inner.lock().unwrap();
|
|
||||||
let stubs = std::mem::take(&mut *stubs);
|
|
||||||
std::mem::forget(stubs);
|
|
||||||
panic!("unexpected call to {}", name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum MockIndex {
|
pub enum MockIndex {
|
||||||
Vrai(Index),
|
Real(Index),
|
||||||
Faux(Arc<Mocker>),
|
Mock(Arc<Mocker>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MockIndex {
|
impl MockIndex {
|
||||||
pub fn faux(faux: Mocker) -> Self {
|
pub fn mock(mocker: Mocker) -> Self {
|
||||||
Self::Faux(Arc::new(faux))
|
Self::Mock(Arc::new(mocker))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn open(
|
pub fn open(
|
||||||
path: impl AsRef<Path>,
|
path: impl AsRef<Path>,
|
||||||
size: usize,
|
size: usize,
|
||||||
update_file_store: Arc<UpdateFileStore>,
|
|
||||||
uuid: Uuid,
|
uuid: Uuid,
|
||||||
update_handler: Arc<UpdateHandler>,
|
update_handler: Arc<IndexerConfig>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let index = Index::open(path, size, update_file_store, uuid, update_handler)?;
|
let index = Index::open(path, size, uuid, update_handler)?;
|
||||||
Ok(Self::Vrai(index))
|
Ok(Self::Real(index))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn load_dump(
|
pub fn load_dump(
|
||||||
src: impl AsRef<Path>,
|
src: impl AsRef<Path>,
|
||||||
dst: impl AsRef<Path>,
|
dst: impl AsRef<Path>,
|
||||||
size: usize,
|
size: usize,
|
||||||
update_handler: &UpdateHandler,
|
update_handler: &IndexerConfig,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
Index::load_dump(src, dst, size, update_handler)?;
|
Index::load_dump(src, dst, size, update_handler)
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn handle_update(&self, update: Processing) -> std::result::Result<Processed, Failed> {
|
|
||||||
match self {
|
|
||||||
MockIndex::Vrai(index) => index.handle_update(update),
|
|
||||||
MockIndex::Faux(faux) => faux.get("handle_update").call(update),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn uuid(&self) -> Uuid {
|
pub fn uuid(&self) -> Uuid {
|
||||||
match self {
|
match self {
|
||||||
MockIndex::Vrai(index) => index.uuid(),
|
MockIndex::Real(index) => index.uuid(),
|
||||||
MockIndex::Faux(faux) => faux.get("uuid").call(()),
|
MockIndex::Mock(m) => unsafe { m.get("uuid").call(()) },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn stats(&self) -> Result<IndexStats> {
|
pub fn stats(&self) -> Result<IndexStats> {
|
||||||
match self {
|
match self {
|
||||||
MockIndex::Vrai(index) => index.stats(),
|
MockIndex::Real(index) => index.stats(),
|
||||||
MockIndex::Faux(_) => todo!(),
|
MockIndex::Mock(m) => unsafe { m.get("stats").call(()) },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn meta(&self) -> Result<IndexMeta> {
|
pub fn meta(&self) -> Result<IndexMeta> {
|
||||||
match self {
|
match self {
|
||||||
MockIndex::Vrai(index) => index.meta(),
|
MockIndex::Real(index) => index.meta(),
|
||||||
MockIndex::Faux(_) => todo!(),
|
MockIndex::Mock(_) => todo!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn settings(&self) -> Result<Settings<Checked>> {
|
pub fn settings(&self) -> Result<Settings<Checked>> {
|
||||||
match self {
|
match self {
|
||||||
MockIndex::Vrai(index) => index.settings(),
|
MockIndex::Real(index) => index.settings(),
|
||||||
MockIndex::Faux(_) => todo!(),
|
MockIndex::Mock(_) => todo!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -260,10 +100,10 @@ pub mod test {
|
|||||||
attributes_to_retrieve: Option<Vec<S>>,
|
attributes_to_retrieve: Option<Vec<S>>,
|
||||||
) -> Result<Vec<Map<String, Value>>> {
|
) -> Result<Vec<Map<String, Value>>> {
|
||||||
match self {
|
match self {
|
||||||
MockIndex::Vrai(index) => {
|
MockIndex::Real(index) => {
|
||||||
index.retrieve_documents(offset, limit, attributes_to_retrieve)
|
index.retrieve_documents(offset, limit, attributes_to_retrieve)
|
||||||
}
|
}
|
||||||
MockIndex::Faux(_) => todo!(),
|
MockIndex::Mock(_) => todo!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -273,49 +113,90 @@ pub mod test {
|
|||||||
attributes_to_retrieve: Option<Vec<S>>,
|
attributes_to_retrieve: Option<Vec<S>>,
|
||||||
) -> Result<Map<String, Value>> {
|
) -> Result<Map<String, Value>> {
|
||||||
match self {
|
match self {
|
||||||
MockIndex::Vrai(index) => index.retrieve_document(doc_id, attributes_to_retrieve),
|
MockIndex::Real(index) => index.retrieve_document(doc_id, attributes_to_retrieve),
|
||||||
MockIndex::Faux(_) => todo!(),
|
MockIndex::Mock(_) => todo!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn size(&self) -> u64 {
|
pub fn size(&self) -> u64 {
|
||||||
match self {
|
match self {
|
||||||
MockIndex::Vrai(index) => index.size(),
|
MockIndex::Real(index) => index.size(),
|
||||||
MockIndex::Faux(_) => todo!(),
|
MockIndex::Mock(_) => todo!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn snapshot(&self, path: impl AsRef<Path>) -> Result<()> {
|
pub fn snapshot(&self, path: impl AsRef<Path>) -> Result<()> {
|
||||||
match self {
|
match self {
|
||||||
MockIndex::Vrai(index) => index.snapshot(path),
|
MockIndex::Real(index) => index.snapshot(path),
|
||||||
MockIndex::Faux(faux) => faux.get("snapshot").call(path.as_ref()),
|
MockIndex::Mock(m) => unsafe { m.get("snapshot").call(path.as_ref()) },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn inner(&self) -> &milli::Index {
|
pub fn close(self) {
|
||||||
match self {
|
match self {
|
||||||
MockIndex::Vrai(index) => index.inner(),
|
MockIndex::Real(index) => index.close(),
|
||||||
MockIndex::Faux(_) => todo!(),
|
MockIndex::Mock(m) => unsafe { m.get("close").call(()) },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_primary_key(&self, primary_key: Option<String>) -> Result<IndexMeta> {
|
|
||||||
match self {
|
|
||||||
MockIndex::Vrai(index) => index.update_primary_key(primary_key),
|
|
||||||
MockIndex::Faux(_) => todo!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn perform_search(&self, query: SearchQuery) -> Result<SearchResult> {
|
pub fn perform_search(&self, query: SearchQuery) -> Result<SearchResult> {
|
||||||
match self {
|
match self {
|
||||||
MockIndex::Vrai(index) => index.perform_search(query),
|
MockIndex::Real(index) => index.perform_search(query),
|
||||||
MockIndex::Faux(faux) => faux.get("perform_search").call(query),
|
MockIndex::Mock(m) => unsafe { m.get("perform_search").call(query) },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dump(&self, path: impl AsRef<Path>) -> Result<()> {
|
pub fn dump(&self, path: impl AsRef<Path>) -> Result<()> {
|
||||||
match self {
|
match self {
|
||||||
MockIndex::Vrai(index) => index.dump(path),
|
MockIndex::Real(index) => index.dump(path),
|
||||||
MockIndex::Faux(faux) => faux.get("dump").call(path.as_ref()),
|
MockIndex::Mock(m) => unsafe { m.get("dump").call(path.as_ref()) },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_documents(
|
||||||
|
&self,
|
||||||
|
method: IndexDocumentsMethod,
|
||||||
|
primary_key: Option<String>,
|
||||||
|
file_store: UpdateFileStore,
|
||||||
|
contents: impl Iterator<Item = Uuid>,
|
||||||
|
) -> Result<DocumentAdditionResult> {
|
||||||
|
match self {
|
||||||
|
MockIndex::Real(index) => {
|
||||||
|
index.update_documents(method, primary_key, file_store, contents)
|
||||||
|
}
|
||||||
|
MockIndex::Mock(mocker) => unsafe {
|
||||||
|
mocker
|
||||||
|
.get("update_documents")
|
||||||
|
.call((method, primary_key, file_store, contents))
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_settings(&self, settings: &Settings<Checked>) -> Result<()> {
|
||||||
|
match self {
|
||||||
|
MockIndex::Real(index) => index.update_settings(settings),
|
||||||
|
MockIndex::Mock(m) => unsafe { m.get("update_settings").call(settings) },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_primary_key(&self, primary_key: String) -> Result<IndexMeta> {
|
||||||
|
match self {
|
||||||
|
MockIndex::Real(index) => index.update_primary_key(primary_key),
|
||||||
|
MockIndex::Mock(m) => unsafe { m.get("update_primary_key").call(primary_key) },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_documents(&self, ids: &[String]) -> Result<DocumentDeletionResult> {
|
||||||
|
match self {
|
||||||
|
MockIndex::Real(index) => index.delete_documents(ids),
|
||||||
|
MockIndex::Mock(m) => unsafe { m.get("delete_documents").call(ids) },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn clear_documents(&self) -> Result<()> {
|
||||||
|
match self {
|
||||||
|
MockIndex::Real(index) => index.clear_documents(),
|
||||||
|
MockIndex::Mock(m) => unsafe { m.get("clear_documents").call(()) },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -327,7 +208,7 @@ pub mod test {
|
|||||||
.times(2)
|
.times(2)
|
||||||
.then(|_: &Path| -> Result<()> { Ok(()) });
|
.then(|_: &Path| -> Result<()> { Ok(()) });
|
||||||
|
|
||||||
let index = MockIndex::faux(faux);
|
let index = MockIndex::mock(faux);
|
||||||
|
|
||||||
let path = PathBuf::from("hello");
|
let path = PathBuf::from("hello");
|
||||||
index.snapshot(&path).unwrap();
|
index.snapshot(&path).unwrap();
|
||||||
@@ -339,7 +220,7 @@ pub mod test {
|
|||||||
fn test_faux_unexisting_method_stub() {
|
fn test_faux_unexisting_method_stub() {
|
||||||
let faux = Mocker::default();
|
let faux = Mocker::default();
|
||||||
|
|
||||||
let index = MockIndex::faux(faux);
|
let index = MockIndex::mock(faux);
|
||||||
|
|
||||||
let path = PathBuf::from("hello");
|
let path = PathBuf::from("hello");
|
||||||
index.snapshot(&path).unwrap();
|
index.snapshot(&path).unwrap();
|
||||||
@@ -356,7 +237,7 @@ pub mod test {
|
|||||||
panic!();
|
panic!();
|
||||||
});
|
});
|
||||||
|
|
||||||
let index = MockIndex::faux(faux);
|
let index = MockIndex::mock(faux);
|
||||||
|
|
||||||
let path = PathBuf::from("hello");
|
let path = PathBuf::from("hello");
|
||||||
index.snapshot(&path).unwrap();
|
index.snapshot(&path).unwrap();
|
||||||
|
|||||||
@@ -3,10 +3,9 @@ use std::str::FromStr;
|
|||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use heed::RoTxn;
|
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use meilisearch_tokenizer::{Analyzer, AnalyzerConfig, Token};
|
use milli::tokenizer::{Analyzer, AnalyzerConfig, Token};
|
||||||
use milli::{AscDesc, FieldId, FieldsIdsMap, FilterCondition, MatchingWords, SortError};
|
use milli::{AscDesc, FieldId, FieldsIdsMap, Filter, MatchingWords, SortError};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
@@ -102,7 +101,7 @@ impl Index {
|
|||||||
search.offset(query.offset.unwrap_or_default());
|
search.offset(query.offset.unwrap_or_default());
|
||||||
|
|
||||||
if let Some(ref filter) = query.filter {
|
if let Some(ref filter) = query.filter {
|
||||||
if let Some(facets) = parse_filter(filter, self, &rtxn)? {
|
if let Some(facets) = parse_filter(filter)? {
|
||||||
search.filter(facets);
|
search.filter(facets);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -296,7 +295,7 @@ fn compute_value_matches<'a, A: AsRef<[u8]>>(
|
|||||||
let mut start = 0;
|
let mut start = 0;
|
||||||
for (word, token) in analyzed.reconstruct() {
|
for (word, token) in analyzed.reconstruct() {
|
||||||
if token.is_word() {
|
if token.is_word() {
|
||||||
if let Some(length) = matcher.matches(token.text()) {
|
if let Some(length) = matcher.matches(&token) {
|
||||||
infos.push(MatchInfo { start, length });
|
infos.push(MatchInfo { start, length });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -310,6 +309,9 @@ fn compute_value_matches<'a, A: AsRef<[u8]>>(
|
|||||||
Value::Object(vals) => vals
|
Value::Object(vals) => vals
|
||||||
.values()
|
.values()
|
||||||
.for_each(|val| compute_value_matches(infos, val, matcher, analyzer)),
|
.for_each(|val| compute_value_matches(infos, val, matcher, analyzer)),
|
||||||
|
Value::Number(number) => {
|
||||||
|
compute_value_matches(infos, &Value::String(number.to_string()), matcher, analyzer)
|
||||||
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -484,18 +486,18 @@ fn format_fields<A: AsRef<[u8]>>(
|
|||||||
|
|
||||||
/// trait to allow unit testing of `format_fields`
|
/// trait to allow unit testing of `format_fields`
|
||||||
trait Matcher {
|
trait Matcher {
|
||||||
fn matches(&self, w: &str) -> Option<usize>;
|
fn matches(&self, w: &Token) -> Option<usize>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
impl Matcher for BTreeMap<&str, Option<usize>> {
|
impl Matcher for BTreeMap<&str, Option<usize>> {
|
||||||
fn matches(&self, w: &str) -> Option<usize> {
|
fn matches(&self, w: &Token) -> Option<usize> {
|
||||||
self.get(w).cloned().flatten()
|
self.get(w.text()).cloned().flatten()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Matcher for MatchingWords {
|
impl Matcher for MatchingWords {
|
||||||
fn matches(&self, w: &str) -> Option<usize> {
|
fn matches(&self, w: &Token) -> Option<usize> {
|
||||||
self.matching_bytes(w)
|
self.matching_bytes(w)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -554,6 +556,11 @@ impl<'a, A: AsRef<[u8]>> Formatter<'a, A> {
|
|||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
),
|
),
|
||||||
|
Value::Number(number) => {
|
||||||
|
let number_string_value =
|
||||||
|
self.format_string(number.to_string(), matcher, format_options);
|
||||||
|
Value::String(number_string_value)
|
||||||
|
}
|
||||||
value => value,
|
value => value,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -572,7 +579,7 @@ impl<'a, A: AsRef<[u8]>> Formatter<'a, A> {
|
|||||||
let mut tokens = analyzed.reconstruct().peekable();
|
let mut tokens = analyzed.reconstruct().peekable();
|
||||||
|
|
||||||
while let Some((word, token)) =
|
while let Some((word, token)) =
|
||||||
tokens.next_if(|(_, token)| matcher.matches(token.text()).is_none())
|
tokens.next_if(|(_, token)| matcher.matches(token).is_none())
|
||||||
{
|
{
|
||||||
buffer.push((word, token));
|
buffer.push((word, token));
|
||||||
}
|
}
|
||||||
@@ -616,7 +623,7 @@ impl<'a, A: AsRef<[u8]>> Formatter<'a, A> {
|
|||||||
// Check if we need to do highlighting or computed matches before calling
|
// Check if we need to do highlighting or computed matches before calling
|
||||||
// Matcher::match since the call is expensive.
|
// Matcher::match since the call is expensive.
|
||||||
if format_options.highlight && token.is_word() {
|
if format_options.highlight && token.is_word() {
|
||||||
if let Some(length) = matcher.matches(token.text()) {
|
if let Some(length) = matcher.matches(&token) {
|
||||||
match word.get(..length).zip(word.get(length..)) {
|
match word.get(..length).zip(word.get(length..)) {
|
||||||
Some((head, tail)) => {
|
Some((head, tail)) => {
|
||||||
out.push_str(&self.marks.0);
|
out.push_str(&self.marks.0);
|
||||||
@@ -642,31 +649,27 @@ impl<'a, A: AsRef<[u8]>> Formatter<'a, A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_filter(facets: &Value, index: &Index, txn: &RoTxn) -> Result<Option<FilterCondition>> {
|
fn parse_filter(facets: &Value) -> Result<Option<Filter>> {
|
||||||
match facets {
|
match facets {
|
||||||
Value::String(expr) => {
|
Value::String(expr) => {
|
||||||
let condition = FilterCondition::from_str(txn, index, expr)?;
|
let condition = Filter::from_str(expr)?;
|
||||||
Ok(Some(condition))
|
Ok(condition)
|
||||||
}
|
}
|
||||||
Value::Array(arr) => parse_filter_array(txn, index, arr),
|
Value::Array(arr) => parse_filter_array(arr),
|
||||||
v => Err(FacetError::InvalidExpression(&["Array"], v.clone()).into()),
|
v => Err(FacetError::InvalidExpression(&["Array"], v.clone()).into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_filter_array(
|
fn parse_filter_array(arr: &[Value]) -> Result<Option<Filter>> {
|
||||||
txn: &RoTxn,
|
|
||||||
index: &Index,
|
|
||||||
arr: &[Value],
|
|
||||||
) -> Result<Option<FilterCondition>> {
|
|
||||||
let mut ands = Vec::new();
|
let mut ands = Vec::new();
|
||||||
for value in arr {
|
for value in arr {
|
||||||
match value {
|
match value {
|
||||||
Value::String(s) => ands.push(Either::Right(s.clone())),
|
Value::String(s) => ands.push(Either::Right(s.as_str())),
|
||||||
Value::Array(arr) => {
|
Value::Array(arr) => {
|
||||||
let mut ors = Vec::new();
|
let mut ors = Vec::new();
|
||||||
for value in arr {
|
for value in arr {
|
||||||
match value {
|
match value {
|
||||||
Value::String(s) => ors.push(s.clone()),
|
Value::String(s) => ors.push(s.as_str()),
|
||||||
v => {
|
v => {
|
||||||
return Err(FacetError::InvalidExpression(&["String"], v.clone()).into())
|
return Err(FacetError::InvalidExpression(&["String"], v.clone()).into())
|
||||||
}
|
}
|
||||||
@@ -682,7 +685,7 @@ fn parse_filter_array(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(FilterCondition::from_array(txn, index, ands)?)
|
Ok(Filter::from_array(ands)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@@ -788,7 +791,93 @@ mod test {
|
|||||||
assert_eq!(value["author"], "J. R. R. Tolkien");
|
assert_eq!(value["author"], "J. R. R. Tolkien");
|
||||||
}
|
}
|
||||||
|
|
||||||
/// https://github.com/meilisearch/MeiliSearch/issues/1368
|
#[test]
|
||||||
|
fn formatted_with_highlight_in_number() {
|
||||||
|
let stop_words = fst::Set::default();
|
||||||
|
let mut config = AnalyzerConfig::default();
|
||||||
|
config.stop_words(&stop_words);
|
||||||
|
let analyzer = Analyzer::new(config);
|
||||||
|
let formatter = Formatter::new(&analyzer, (String::from("<em>"), String::from("</em>")));
|
||||||
|
|
||||||
|
let mut fields = FieldsIdsMap::new();
|
||||||
|
let title = fields.insert("title").unwrap();
|
||||||
|
let author = fields.insert("author").unwrap();
|
||||||
|
let publication_year = fields.insert("publication_year").unwrap();
|
||||||
|
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
let mut obkv = obkv::KvWriter::new(&mut buf);
|
||||||
|
|
||||||
|
obkv.insert(
|
||||||
|
title,
|
||||||
|
Value::String("The Hobbit".into()).to_string().as_bytes(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
obkv.finish().unwrap();
|
||||||
|
obkv = obkv::KvWriter::new(&mut buf);
|
||||||
|
|
||||||
|
obkv.insert(
|
||||||
|
author,
|
||||||
|
Value::String("J. R. R. Tolkien".into())
|
||||||
|
.to_string()
|
||||||
|
.as_bytes(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
obkv.finish().unwrap();
|
||||||
|
|
||||||
|
obkv = obkv::KvWriter::new(&mut buf);
|
||||||
|
|
||||||
|
obkv.insert(
|
||||||
|
publication_year,
|
||||||
|
Value::Number(1937.into()).to_string().as_bytes(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
obkv.finish().unwrap();
|
||||||
|
|
||||||
|
let obkv = obkv::KvReader::new(&buf);
|
||||||
|
|
||||||
|
let mut formatted_options = BTreeMap::new();
|
||||||
|
formatted_options.insert(
|
||||||
|
title,
|
||||||
|
FormatOptions {
|
||||||
|
highlight: false,
|
||||||
|
crop: None,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
formatted_options.insert(
|
||||||
|
author,
|
||||||
|
FormatOptions {
|
||||||
|
highlight: false,
|
||||||
|
crop: None,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
formatted_options.insert(
|
||||||
|
publication_year,
|
||||||
|
FormatOptions {
|
||||||
|
highlight: true,
|
||||||
|
crop: None,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut matching_words = BTreeMap::new();
|
||||||
|
matching_words.insert("1937", Some(4));
|
||||||
|
|
||||||
|
let value = format_fields(
|
||||||
|
&fields,
|
||||||
|
obkv,
|
||||||
|
&formatter,
|
||||||
|
&matching_words,
|
||||||
|
&formatted_options,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(value["title"], "The Hobbit");
|
||||||
|
assert_eq!(value["author"], "J. R. R. Tolkien");
|
||||||
|
assert_eq!(value["publication_year"], "<em>1937</em>");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// https://github.com/meilisearch/meilisearch/issues/1368
|
||||||
#[test]
|
#[test]
|
||||||
fn formatted_with_highlight_emoji() {
|
fn formatted_with_highlight_emoji() {
|
||||||
let stop_words = fst::Set::default();
|
let stop_words = fst::Set::default();
|
||||||
@@ -1333,13 +1422,15 @@ mod test {
|
|||||||
"color": "Green",
|
"color": "Green",
|
||||||
"name": "Lucas Hess",
|
"name": "Lucas Hess",
|
||||||
"gender": "male",
|
"gender": "male",
|
||||||
|
"price": 3.5,
|
||||||
"address": "412 Losee Terrace, Blairstown, Georgia, 2825",
|
"address": "412 Losee Terrace, Blairstown, Georgia, 2825",
|
||||||
"about": "Mollit ad in exercitation quis Laboris . Anim est ut consequat fugiat duis magna aliquip velit nisi. Commodo eiusmod est consequat proident consectetur aliqua enim fugiat. Aliqua adipisicing laboris elit proident enim veniam laboris mollit. Incididunt fugiat minim ad nostrud deserunt tempor in. Id irure officia labore qui est labore nulla nisi. Magna sit quis tempor esse consectetur amet labore duis aliqua consequat.\r\n"
|
"about": "Mollit ad in exercitation quis Laboris . Anim est ut consequat fugiat duis magna aliquip velit nisi. Commodo eiusmod est consequat proident consectetur aliqua enim fugiat. Aliqua adipisicing laboris elit proident enim veniam laboris mollit. Incididunt fugiat minim ad nostrud deserunt tempor in. Id irure officia labore qui est labore nulla nisi. Magna sit quis tempor esse consectetur amet labore duis aliqua consequat.\r\n"
|
||||||
}"#).unwrap();
|
}"#).unwrap();
|
||||||
let mut matcher = BTreeMap::new();
|
let mut matcher = BTreeMap::new();
|
||||||
matcher.insert("green", Some(3));
|
matcher.insert("green", Some(5));
|
||||||
matcher.insert("mollit", Some(6));
|
matcher.insert("mollit", Some(6));
|
||||||
matcher.insert("laboris", Some(7));
|
matcher.insert("laboris", Some(7));
|
||||||
|
matcher.insert("3", Some(1));
|
||||||
|
|
||||||
let stop_words = fst::Set::default();
|
let stop_words = fst::Set::default();
|
||||||
let mut config = AnalyzerConfig::default();
|
let mut config = AnalyzerConfig::default();
|
||||||
@@ -1349,7 +1440,7 @@ mod test {
|
|||||||
let matches = compute_matches(&matcher, &value, &analyzer);
|
let matches = compute_matches(&matcher, &value, &analyzer);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
format!("{:?}", matches),
|
format!("{:?}", matches),
|
||||||
r##"{"about": [MatchInfo { start: 0, length: 6 }, MatchInfo { start: 31, length: 7 }, MatchInfo { start: 191, length: 7 }, MatchInfo { start: 225, length: 7 }, MatchInfo { start: 233, length: 6 }], "color": [MatchInfo { start: 0, length: 3 }]}"##
|
r##"{"about": [MatchInfo { start: 0, length: 6 }, MatchInfo { start: 31, length: 7 }, MatchInfo { start: 191, length: 7 }, MatchInfo { start: 225, length: 7 }, MatchInfo { start: 233, length: 6 }], "color": [MatchInfo { start: 0, length: 5 }], "price": [MatchInfo { start: 0, length: 1 }]}"##
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,49 +0,0 @@
|
|||||||
use milli::update::UpdateBuilder;
|
|
||||||
use milli::CompressionType;
|
|
||||||
use rayon::ThreadPool;
|
|
||||||
|
|
||||||
use crate::options::IndexerOpts;
|
|
||||||
|
|
||||||
pub struct UpdateHandler {
|
|
||||||
max_nb_chunks: Option<usize>,
|
|
||||||
chunk_compression_level: Option<u32>,
|
|
||||||
thread_pool: ThreadPool,
|
|
||||||
log_frequency: usize,
|
|
||||||
max_memory: Option<usize>,
|
|
||||||
chunk_compression_type: CompressionType,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UpdateHandler {
|
|
||||||
pub fn new(opt: &IndexerOpts) -> anyhow::Result<Self> {
|
|
||||||
let thread_pool = rayon::ThreadPoolBuilder::new()
|
|
||||||
.num_threads(opt.indexing_jobs.unwrap_or(num_cpus::get() / 2))
|
|
||||||
.build()?;
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
max_nb_chunks: opt.max_nb_chunks,
|
|
||||||
chunk_compression_level: opt.chunk_compression_level,
|
|
||||||
thread_pool,
|
|
||||||
log_frequency: opt.log_every_n,
|
|
||||||
max_memory: opt.max_memory.map(|m| m.get_bytes() as usize),
|
|
||||||
chunk_compression_type: opt.chunk_compression_type,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_builder(&self, update_id: u64) -> UpdateBuilder {
|
|
||||||
// We prepare the update by using the update builder.
|
|
||||||
let mut update_builder = UpdateBuilder::new(update_id);
|
|
||||||
if let Some(max_nb_chunks) = self.max_nb_chunks {
|
|
||||||
update_builder.max_nb_chunks(max_nb_chunks);
|
|
||||||
}
|
|
||||||
if let Some(chunk_compression_level) = self.chunk_compression_level {
|
|
||||||
update_builder.chunk_compression_level(chunk_compression_level);
|
|
||||||
}
|
|
||||||
update_builder.thread_pool(&self.thread_pool);
|
|
||||||
update_builder.log_every_n(self.log_frequency);
|
|
||||||
if let Some(max_memory) = self.max_memory {
|
|
||||||
update_builder.max_memory(max_memory);
|
|
||||||
}
|
|
||||||
update_builder.chunk_compression_type(self.chunk_compression_type);
|
|
||||||
update_builder
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -4,15 +4,16 @@ use std::num::NonZeroUsize;
|
|||||||
|
|
||||||
use log::{debug, info, trace};
|
use log::{debug, info, trace};
|
||||||
use milli::documents::DocumentBatchReader;
|
use milli::documents::DocumentBatchReader;
|
||||||
use milli::update::{IndexDocumentsMethod, Setting, UpdateBuilder};
|
use milli::update::{
|
||||||
|
DocumentAdditionResult, DocumentDeletionResult, IndexDocumentsConfig, IndexDocumentsMethod,
|
||||||
|
Setting,
|
||||||
|
};
|
||||||
use serde::{Deserialize, Serialize, Serializer};
|
use serde::{Deserialize, Serialize, Serializer};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::index_controller::updates::status::{Failed, Processed, Processing, UpdateResult};
|
use super::error::Result;
|
||||||
use crate::Update;
|
|
||||||
|
|
||||||
use super::error::{IndexError, Result};
|
|
||||||
use super::index::{Index, IndexMeta};
|
use super::index::{Index, IndexMeta};
|
||||||
|
use crate::update_file_store::UpdateFileStore;
|
||||||
|
|
||||||
fn serialize_with_wildcard<S>(
|
fn serialize_with_wildcard<S>(
|
||||||
field: &Setting<Vec<String>>,
|
field: &Setting<Vec<String>>,
|
||||||
@@ -30,25 +31,27 @@ where
|
|||||||
.serialize(s)
|
.serialize(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Default, Debug, Serialize)]
|
#[derive(Clone, Default, Debug, Serialize, PartialEq)]
|
||||||
pub struct Checked;
|
pub struct Checked;
|
||||||
|
|
||||||
#[derive(Clone, Default, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq)]
|
||||||
pub struct Unchecked;
|
pub struct Unchecked;
|
||||||
|
|
||||||
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
|
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
|
||||||
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
|
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
|
||||||
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
|
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
#[serde(bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>"))]
|
#[serde(bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>"))]
|
||||||
|
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
||||||
pub struct Settings<T> {
|
pub struct Settings<T> {
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
serialize_with = "serialize_with_wildcard",
|
serialize_with = "serialize_with_wildcard",
|
||||||
skip_serializing_if = "Setting::is_not_set"
|
skip_serializing_if = "Setting::is_not_set"
|
||||||
)]
|
)]
|
||||||
|
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||||
pub displayed_attributes: Setting<Vec<String>>,
|
pub displayed_attributes: Setting<Vec<String>>,
|
||||||
|
|
||||||
#[serde(
|
#[serde(
|
||||||
@@ -56,19 +59,26 @@ pub struct Settings<T> {
|
|||||||
serialize_with = "serialize_with_wildcard",
|
serialize_with = "serialize_with_wildcard",
|
||||||
skip_serializing_if = "Setting::is_not_set"
|
skip_serializing_if = "Setting::is_not_set"
|
||||||
)]
|
)]
|
||||||
|
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||||
pub searchable_attributes: Setting<Vec<String>>,
|
pub searchable_attributes: Setting<Vec<String>>,
|
||||||
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||||
pub filterable_attributes: Setting<BTreeSet<String>>,
|
pub filterable_attributes: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||||
pub sortable_attributes: Setting<BTreeSet<String>>,
|
pub sortable_attributes: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||||
pub ranking_rules: Setting<Vec<String>>,
|
pub ranking_rules: Setting<Vec<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||||
pub stop_words: Setting<BTreeSet<String>>,
|
pub stop_words: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||||
pub distinct_attribute: Setting<String>,
|
pub distinct_attribute: Setting<String>,
|
||||||
|
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
@@ -164,34 +174,31 @@ pub struct Facets {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Index {
|
impl Index {
|
||||||
pub fn handle_update(&self, update: Processing) -> std::result::Result<Processed, Failed> {
|
fn update_primary_key_txn<'a, 'b>(
|
||||||
let update_id = update.id();
|
&'a self,
|
||||||
let update_builder = self.update_handler.update_builder(update_id);
|
txn: &mut heed::RwTxn<'a, 'b>,
|
||||||
let result = (|| {
|
primary_key: String,
|
||||||
|
) -> Result<IndexMeta> {
|
||||||
|
let mut builder = milli::update::Settings::new(txn, self, self.indexer_config.as_ref());
|
||||||
|
builder.set_primary_key(primary_key);
|
||||||
|
builder.execute(|_| ())?;
|
||||||
|
let meta = IndexMeta::new_txn(self, txn)?;
|
||||||
|
|
||||||
|
Ok(meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_primary_key(&self, primary_key: String) -> Result<IndexMeta> {
|
||||||
let mut txn = self.write_txn()?;
|
let mut txn = self.write_txn()?;
|
||||||
let result = match update.meta() {
|
let res = self.update_primary_key_txn(&mut txn, primary_key)?;
|
||||||
Update::DocumentAddition {
|
txn.commit()?;
|
||||||
primary_key,
|
|
||||||
content_uuid,
|
Ok(res)
|
||||||
method,
|
|
||||||
} => self.update_documents(
|
|
||||||
&mut txn,
|
|
||||||
*method,
|
|
||||||
*content_uuid,
|
|
||||||
update_builder,
|
|
||||||
primary_key.as_deref(),
|
|
||||||
),
|
|
||||||
Update::Settings(settings) => {
|
|
||||||
let settings = settings.clone().check();
|
|
||||||
self.update_settings(&mut txn, &settings, update_builder)
|
|
||||||
}
|
}
|
||||||
Update::ClearDocuments => {
|
|
||||||
let builder = update_builder.clear_documents(&mut txn, self);
|
/// Deletes `ids` from the index, and returns how many documents were deleted.
|
||||||
let _count = builder.execute()?;
|
pub fn delete_documents(&self, ids: &[String]) -> Result<DocumentDeletionResult> {
|
||||||
Ok(UpdateResult::Other)
|
let mut txn = self.write_txn()?;
|
||||||
}
|
let mut builder = milli::update::DeleteDocuments::new(&mut txn, self)?;
|
||||||
Update::DeleteDocuments(ids) => {
|
|
||||||
let mut builder = update_builder.delete_documents(&mut txn, self)?;
|
|
||||||
|
|
||||||
// We ignore unexisting document ids
|
// We ignore unexisting document ids
|
||||||
ids.iter().for_each(|id| {
|
ids.iter().for_each(|id| {
|
||||||
@@ -199,94 +206,78 @@ impl Index {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let deleted = builder.execute()?;
|
let deleted = builder.execute()?;
|
||||||
Ok(UpdateResult::DocumentDeletion { deleted })
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if result.is_ok() {
|
|
||||||
txn.commit()?;
|
txn.commit()?;
|
||||||
}
|
|
||||||
result
|
|
||||||
})();
|
|
||||||
|
|
||||||
if let Update::DocumentAddition { content_uuid, .. } = update.from.meta() {
|
Ok(deleted)
|
||||||
let _ = self.update_file_store.delete(*content_uuid);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match result {
|
pub fn clear_documents(&self) -> Result<()> {
|
||||||
Ok(result) => Ok(update.process(result)),
|
|
||||||
Err(e) => Err(update.fail(e)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_primary_key(&self, primary_key: Option<String>) -> Result<IndexMeta> {
|
|
||||||
match primary_key {
|
|
||||||
Some(primary_key) => {
|
|
||||||
let mut txn = self.write_txn()?;
|
let mut txn = self.write_txn()?;
|
||||||
if self.primary_key(&txn)?.is_some() {
|
milli::update::ClearDocuments::new(&mut txn, self).execute()?;
|
||||||
return Err(IndexError::ExistingPrimaryKey);
|
|
||||||
}
|
|
||||||
let mut builder = UpdateBuilder::new(0).settings(&mut txn, self);
|
|
||||||
builder.set_primary_key(primary_key);
|
|
||||||
builder.execute(|_, _| ())?;
|
|
||||||
let meta = IndexMeta::new_txn(self, &txn)?;
|
|
||||||
txn.commit()?;
|
txn.commit()?;
|
||||||
Ok(meta)
|
|
||||||
}
|
Ok(())
|
||||||
None => {
|
|
||||||
let meta = IndexMeta::new(self)?;
|
|
||||||
Ok(meta)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_documents<'a, 'b>(
|
pub fn update_documents(
|
||||||
&'a self,
|
&self,
|
||||||
txn: &mut heed::RwTxn<'a, 'b>,
|
|
||||||
method: IndexDocumentsMethod,
|
method: IndexDocumentsMethod,
|
||||||
content_uuid: Uuid,
|
primary_key: Option<String>,
|
||||||
update_builder: UpdateBuilder,
|
file_store: UpdateFileStore,
|
||||||
primary_key: Option<&str>,
|
contents: impl IntoIterator<Item = Uuid>,
|
||||||
) -> Result<UpdateResult> {
|
) -> Result<DocumentAdditionResult> {
|
||||||
trace!("performing document addition");
|
trace!("performing document addition");
|
||||||
|
let mut txn = self.write_txn()?;
|
||||||
|
|
||||||
// Set the primary key if not set already, ignore if already set.
|
if let Some(primary_key) = primary_key {
|
||||||
if let (None, Some(primary_key)) = (self.primary_key(txn)?, primary_key) {
|
if self.primary_key(&txn)?.is_none() {
|
||||||
let mut builder = UpdateBuilder::new(0).settings(txn, self);
|
self.update_primary_key_txn(&mut txn, primary_key)?;
|
||||||
builder.set_primary_key(primary_key.to_string());
|
}
|
||||||
builder.execute(|_, _| ())?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let indexing_callback =
|
let config = IndexDocumentsConfig {
|
||||||
|indexing_step, update_id| debug!("update {}: {:?}", update_id, indexing_step);
|
update_method: method,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
let content_file = self.update_file_store.get_update(content_uuid).unwrap();
|
let indexing_callback = |indexing_step| debug!("update: {:?}", indexing_step);
|
||||||
let reader = DocumentBatchReader::from_reader(content_file).unwrap();
|
let mut builder = milli::update::IndexDocuments::new(
|
||||||
|
&mut txn,
|
||||||
|
self,
|
||||||
|
self.indexer_config.as_ref(),
|
||||||
|
config,
|
||||||
|
indexing_callback,
|
||||||
|
);
|
||||||
|
|
||||||
let mut builder = update_builder.index_documents(txn, self);
|
for content_uuid in contents.into_iter() {
|
||||||
builder.index_documents_method(method);
|
let content_file = file_store.get_update(content_uuid)?;
|
||||||
let addition = builder.execute(reader, indexing_callback)?;
|
let reader = DocumentBatchReader::from_reader(content_file)?;
|
||||||
|
builder.add_documents(reader)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let addition = builder.execute()?;
|
||||||
|
|
||||||
|
txn.commit()?;
|
||||||
|
|
||||||
info!("document addition done: {:?}", addition);
|
info!("document addition done: {:?}", addition);
|
||||||
|
|
||||||
Ok(UpdateResult::DocumentsAddition(addition))
|
Ok(addition)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_settings<'a, 'b>(
|
pub fn update_settings(&self, settings: &Settings<Checked>) -> Result<()> {
|
||||||
&'a self,
|
|
||||||
txn: &mut heed::RwTxn<'a, 'b>,
|
|
||||||
settings: &Settings<Checked>,
|
|
||||||
update_builder: UpdateBuilder,
|
|
||||||
) -> Result<UpdateResult> {
|
|
||||||
// We must use the write transaction of the update here.
|
// We must use the write transaction of the update here.
|
||||||
let mut builder = update_builder.settings(txn, self);
|
let mut txn = self.write_txn()?;
|
||||||
|
let mut builder =
|
||||||
|
milli::update::Settings::new(&mut txn, self, self.indexer_config.as_ref());
|
||||||
|
|
||||||
apply_settings_to_builder(settings, &mut builder);
|
apply_settings_to_builder(settings, &mut builder);
|
||||||
|
|
||||||
builder.execute(|indexing_step, update_id| {
|
builder.execute(|indexing_step| debug!("update: {:?}", indexing_step))?;
|
||||||
debug!("update {}: {:?}", update_id, indexing_step)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(UpdateResult::Other)
|
txn.commit()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -346,9 +337,19 @@ pub fn apply_settings_to_builder(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
pub(crate) mod test {
|
||||||
|
use proptest::prelude::*;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
pub(super) fn setting_strategy<T: Arbitrary + Clone>() -> impl Strategy<Value = Setting<T>> {
|
||||||
|
prop_oneof![
|
||||||
|
Just(Setting::NotSet),
|
||||||
|
Just(Setting::Reset),
|
||||||
|
any::<T>().prop_map(Setting::Set)
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_setting_check() {
|
fn test_setting_check() {
|
||||||
// test no changes
|
// test no changes
|
||||||
|
|||||||
@@ -3,25 +3,25 @@ use std::path::{Path, PathBuf};
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use async_stream::stream;
|
use async_stream::stream;
|
||||||
use chrono::Utc;
|
|
||||||
use futures::{lock::Mutex, stream::StreamExt};
|
use futures::{lock::Mutex, stream::StreamExt};
|
||||||
use log::{error, trace};
|
use log::{error, trace};
|
||||||
|
use time::macros::format_description;
|
||||||
|
use time::OffsetDateTime;
|
||||||
use tokio::sync::{mpsc, oneshot, RwLock};
|
use tokio::sync::{mpsc, oneshot, RwLock};
|
||||||
|
|
||||||
use super::error::{DumpActorError, Result};
|
use super::error::{DumpActorError, Result};
|
||||||
use super::{DumpInfo, DumpMsg, DumpStatus, DumpTask};
|
use super::{DumpInfo, DumpJob, DumpMsg, DumpStatus};
|
||||||
use crate::index_controller::index_resolver::index_store::IndexStore;
|
use crate::tasks::Scheduler;
|
||||||
use crate::index_controller::index_resolver::uuid_store::UuidStore;
|
use crate::update_file_store::UpdateFileStore;
|
||||||
use crate::index_controller::index_resolver::IndexResolver;
|
|
||||||
use crate::index_controller::updates::UpdateSender;
|
|
||||||
|
|
||||||
pub const CONCURRENT_DUMP_MSG: usize = 10;
|
pub const CONCURRENT_DUMP_MSG: usize = 10;
|
||||||
|
|
||||||
pub struct DumpActor<U, I> {
|
pub struct DumpActor {
|
||||||
inbox: Option<mpsc::Receiver<DumpMsg>>,
|
inbox: Option<mpsc::Receiver<DumpMsg>>,
|
||||||
index_resolver: Arc<IndexResolver<U, I>>,
|
update_file_store: UpdateFileStore,
|
||||||
update: UpdateSender,
|
scheduler: Arc<RwLock<Scheduler>>,
|
||||||
dump_path: PathBuf,
|
dump_path: PathBuf,
|
||||||
|
analytics_path: PathBuf,
|
||||||
lock: Arc<Mutex<()>>,
|
lock: Arc<Mutex<()>>,
|
||||||
dump_infos: Arc<RwLock<HashMap<String, DumpInfo>>>,
|
dump_infos: Arc<RwLock<HashMap<String, DumpInfo>>>,
|
||||||
update_db_size: usize,
|
update_db_size: usize,
|
||||||
@@ -30,19 +30,20 @@ pub struct DumpActor<U, I> {
|
|||||||
|
|
||||||
/// Generate uid from creation date
|
/// Generate uid from creation date
|
||||||
fn generate_uid() -> String {
|
fn generate_uid() -> String {
|
||||||
Utc::now().format("%Y%m%d-%H%M%S%3f").to_string()
|
OffsetDateTime::now_utc()
|
||||||
|
.format(format_description!(
|
||||||
|
"[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
|
||||||
|
))
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<U, I> DumpActor<U, I>
|
impl DumpActor {
|
||||||
where
|
|
||||||
U: UuidStore + Sync + Send + 'static,
|
|
||||||
I: IndexStore + Sync + Send + 'static,
|
|
||||||
{
|
|
||||||
pub fn new(
|
pub fn new(
|
||||||
inbox: mpsc::Receiver<DumpMsg>,
|
inbox: mpsc::Receiver<DumpMsg>,
|
||||||
index_resolver: Arc<IndexResolver<U, I>>,
|
update_file_store: UpdateFileStore,
|
||||||
update: UpdateSender,
|
scheduler: Arc<RwLock<Scheduler>>,
|
||||||
dump_path: impl AsRef<Path>,
|
dump_path: impl AsRef<Path>,
|
||||||
|
analytics_path: impl AsRef<Path>,
|
||||||
index_db_size: usize,
|
index_db_size: usize,
|
||||||
update_db_size: usize,
|
update_db_size: usize,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
@@ -50,9 +51,10 @@ where
|
|||||||
let lock = Arc::new(Mutex::new(()));
|
let lock = Arc::new(Mutex::new(()));
|
||||||
Self {
|
Self {
|
||||||
inbox: Some(inbox),
|
inbox: Some(inbox),
|
||||||
index_resolver,
|
scheduler,
|
||||||
update,
|
update_file_store,
|
||||||
dump_path: dump_path.as_ref().into(),
|
dump_path: dump_path.as_ref().into(),
|
||||||
|
analytics_path: analytics_path.as_ref().into(),
|
||||||
dump_infos,
|
dump_infos,
|
||||||
lock,
|
lock,
|
||||||
index_db_size,
|
index_db_size,
|
||||||
@@ -117,16 +119,17 @@ where
|
|||||||
|
|
||||||
ret.send(Ok(info)).expect("Dump actor is dead");
|
ret.send(Ok(info)).expect("Dump actor is dead");
|
||||||
|
|
||||||
let task = DumpTask {
|
let task = DumpJob {
|
||||||
path: self.dump_path.clone(),
|
dump_path: self.dump_path.clone(),
|
||||||
index_resolver: self.index_resolver.clone(),
|
db_path: self.analytics_path.clone(),
|
||||||
update_sender: self.update.clone(),
|
update_file_store: self.update_file_store.clone(),
|
||||||
|
scheduler: self.scheduler.clone(),
|
||||||
uid: uid.clone(),
|
uid: uid.clone(),
|
||||||
update_db_size: self.update_db_size,
|
update_db_size: self.update_db_size,
|
||||||
index_db_size: self.index_db_size,
|
index_db_size: self.index_db_size,
|
||||||
};
|
};
|
||||||
|
|
||||||
let task_result = tokio::task::spawn(task.run()).await;
|
let task_result = tokio::task::spawn_local(task.run()).await;
|
||||||
|
|
||||||
let mut dump_infos = self.dump_infos.write().await;
|
let mut dump_infos = self.dump_infos.write().await;
|
||||||
let dump_infos = dump_infos
|
let dump_infos = dump_infos
|
||||||
@@ -156,3 +159,33 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_generate_uid() {
|
||||||
|
let current = OffsetDateTime::now_utc();
|
||||||
|
|
||||||
|
let uid = generate_uid();
|
||||||
|
let (date, time) = uid.split_once('-').unwrap();
|
||||||
|
|
||||||
|
let date = time::Date::parse(
|
||||||
|
date,
|
||||||
|
&format_description!("[year repr:full][month repr:numerical][day padding:zero]"),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let time = time::Time::parse(
|
||||||
|
time,
|
||||||
|
&format_description!(
|
||||||
|
"[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let datetime = time::PrimitiveDateTime::new(date, time);
|
||||||
|
let datetime = datetime.assume_utc();
|
||||||
|
|
||||||
|
assert!(current - datetime < time::Duration::SECOND);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user