mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-11-28 00:40:31 +00:00
Compare commits
240 Commits
v0.27.2
...
filter/fie
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
016afc8c07 | ||
|
|
d833e62282 | ||
|
|
a733271ced | ||
|
|
38b85ec547 | ||
|
|
ed185fb636 | ||
|
|
f7b47b43f4 | ||
|
|
8e703fbabe | ||
|
|
7ced5c2cc7 | ||
|
|
ef95d1d545 | ||
|
|
f98c8d7f8b | ||
|
|
4862993482 | ||
|
|
bf865f51bb | ||
|
|
f8aa21bc16 | ||
|
|
ba839a909f | ||
|
|
8b98303191 | ||
|
|
54cd9976d7 | ||
|
|
5ae5b06018 | ||
|
|
6f910f89eb | ||
|
|
9a8fb6c55a | ||
|
|
4016161035 | ||
|
|
3340af1ba9 | ||
|
|
e46b853fbf | ||
|
|
44e004d895 | ||
|
|
71bf9b5b9b | ||
|
|
053071d866 | ||
|
|
0f6e650ba2 | ||
|
|
97daea5a66 | ||
|
|
8990b12609 | ||
|
|
07a35c6445 | ||
|
|
4fc73195e6 | ||
|
|
1425d62a31 | ||
|
|
87d4bf672c | ||
|
|
2063fbd985 | ||
|
|
de356061db | ||
|
|
9a6841c7ce | ||
|
|
354f7fb2bf | ||
|
|
0333bad057 | ||
|
|
0e416b4bcd | ||
|
|
20dd259f23 | ||
|
|
18095fa4e1 | ||
|
|
b8745420da | ||
|
|
36cb09eb25 | ||
|
|
8fc3b7d3b0 | ||
|
|
64e3096790 | ||
|
|
b594d49def | ||
|
|
fbba67fbe9 | ||
|
|
232b2baaa3 | ||
|
|
02c5c193a2 | ||
|
|
b9b32d65a8 | ||
|
|
680606fd82 | ||
|
|
4a494ad2fa | ||
|
|
2b2e571c76 | ||
|
|
6f0d3472b1 | ||
|
|
5cd13cc303 | ||
|
|
1e3dcbea3f | ||
|
|
b96399d24b | ||
|
|
5450b5ced3 | ||
|
|
c924614527 | ||
|
|
96d4fd54bb | ||
|
|
3e5d6be86b | ||
|
|
2b944ecd89 | ||
|
|
db42268888 | ||
|
|
108b3520de | ||
|
|
f64b824c45 | ||
|
|
fc4990b968 | ||
|
|
39a1dcb32c | ||
|
|
afcc493480 | ||
|
|
6171f17f1d | ||
|
|
55169ff914 | ||
|
|
0a16f71563 | ||
|
|
bd280f75e7 | ||
|
|
1a7631c807 | ||
|
|
17f30c2b2d | ||
|
|
09938c9b6f | ||
|
|
f5306eb5b0 | ||
|
|
173eea06e1 | ||
|
|
8d09772334 | ||
|
|
987a7f8926 | ||
|
|
0928f3d41c | ||
|
|
09ec8e9fca | ||
|
|
1968950b0f | ||
|
|
6ffa222218 | ||
|
|
79e67df73d | ||
|
|
72be296852 | ||
|
|
a7bff35e49 | ||
|
|
3b01ed4fe8 | ||
|
|
cbd27d313c | ||
|
|
6ac8675c6d | ||
|
|
df61ca9cae | ||
|
|
bbd685af5e | ||
|
|
9b9cbc815b | ||
|
|
fd11903920 | ||
|
|
c3003065e8 | ||
|
|
c6ce3452cf | ||
|
|
e5b760c59a | ||
|
|
277a0a7967 | ||
|
|
64b5b2e1f8 | ||
|
|
10d3b367dc | ||
|
|
ba55905377 | ||
|
|
0e7e16ae72 | ||
|
|
80c156df3f | ||
|
|
4b6c3e72ff | ||
|
|
3e46543060 | ||
|
|
b83455f345 | ||
|
|
953a209f02 | ||
|
|
0c5352fc22 | ||
|
|
8ac8fcb0c9 | ||
|
|
4667c9fe1a | ||
|
|
12b5eabd5d | ||
|
|
cf2d8de48a | ||
|
|
419922e475 | ||
|
|
c9cd1738a5 | ||
|
|
0258659278 | ||
|
|
ce37f53a16 | ||
|
|
bcb51905d7 | ||
|
|
10a71fdb10 | ||
|
|
f8d3f739ad | ||
|
|
bb405aa729 | ||
|
|
7e3d5ebc8e | ||
|
|
dfce9ba468 | ||
|
|
9eea142e2b | ||
|
|
8b8c3e32f0 | ||
|
|
08d72e32a4 | ||
|
|
ac9e7bdbe3 | ||
|
|
4512eed8f5 | ||
|
|
e769043576 | ||
|
|
df721b2e9e | ||
|
|
0656df3a6d | ||
|
|
7652295d2c | ||
|
|
94b32cce01 | ||
|
|
b2e2dc8558 | ||
|
|
1816db8c1f | ||
|
|
c295924ea2 | ||
|
|
1f62e83267 | ||
|
|
b3c8915702 | ||
|
|
151f494110 | ||
|
|
96152a3d32 | ||
|
|
84f52ac175 | ||
|
|
70916d6596 | ||
|
|
b9a79eb858 | ||
|
|
a57b2d9538 | ||
|
|
34c8888f56 | ||
|
|
d54643455c | ||
|
|
96a5791e39 | ||
|
|
e2c204cf86 | ||
|
|
d80e8b64af | ||
|
|
c11d21879a | ||
|
|
d6dd234914 | ||
|
|
4970525541 | ||
|
|
461b91fd13 | ||
|
|
004c8b6be3 | ||
|
|
9d5cc88cd5 | ||
|
|
d22f07f5b2 | ||
|
|
e81c7aa2e6 | ||
|
|
39db6ea42b | ||
|
|
47007fa71b | ||
|
|
627f13df85 | ||
|
|
97c14f6fcc | ||
|
|
446f1f31e0 | ||
|
|
ddad6cc069 | ||
|
|
ab39df9693 | ||
|
|
1465b5e0ff | ||
|
|
8800b348f0 | ||
|
|
082d6b89ff | ||
|
|
b82c86c8f5 | ||
|
|
36d94257d8 | ||
|
|
3f80468f18 | ||
|
|
8509243e68 | ||
|
|
3684c822f1 | ||
|
|
80f7d87356 | ||
|
|
d2f457a076 | ||
|
|
e5ef5a6f9c | ||
|
|
5450fecaef | ||
|
|
deba0cc096 | ||
|
|
26e7bdf702 | ||
|
|
3441cc6c36 | ||
|
|
c7711c7816 | ||
|
|
d47b997120 | ||
|
|
1e310ecc7d | ||
|
|
4cb2c6ef1e | ||
|
|
a9ef399a6b | ||
|
|
5a2972fc19 | ||
|
|
ba51ca83ec | ||
|
|
1647ca3c1f | ||
|
|
74a1f88d88 | ||
|
|
f58507379a | ||
|
|
6b2016b350 | ||
|
|
3015265bde | ||
|
|
49d8fadb52 | ||
|
|
127171c812 | ||
|
|
67b6f4340a | ||
|
|
986a99296d | ||
|
|
92d86ce6aa | ||
|
|
3c85b29865 | ||
|
|
8349f38197 | ||
|
|
64654ef7c3 | ||
|
|
0f9c134114 | ||
|
|
7b47e4e87a | ||
|
|
8743d73973 | ||
|
|
f0aceb4fba | ||
|
|
61035a3ea4 | ||
|
|
4778884105 | ||
|
|
57fde30b91 | ||
|
|
56eb2907c9 | ||
|
|
414d0907ce | ||
|
|
60a8249de6 | ||
|
|
46cdc17701 | ||
|
|
6a0231cb28 | ||
|
|
7fa3eb1003 | ||
|
|
2f0625a984 | ||
|
|
737b891a41 | ||
|
|
5a5066023b | ||
|
|
aa50acb031 | ||
|
|
9935db86c7 | ||
|
|
f65116b208 | ||
|
|
341756a0eb | ||
|
|
5f0e9b63d2 | ||
|
|
ca9ba2d90c | ||
|
|
2c248a68a4 | ||
|
|
641ca5a857 | ||
|
|
6bf4db0bca | ||
|
|
4e9accdeb7 | ||
|
|
ae4e419db4 | ||
|
|
50763aac82 | ||
|
|
3517eae47f | ||
|
|
0250ea9157 | ||
|
|
6d221058f1 | ||
|
|
b9866d8df2 | ||
|
|
b9b9cba154 | ||
|
|
348af6cfbf | ||
|
|
c07f3b44b7 | ||
|
|
38d681c230 | ||
|
|
e85377e725 | ||
|
|
6ff8bf823d | ||
|
|
4d25229df9 | ||
|
|
f1cd6b6ee8 | ||
|
|
63f75bd187 | ||
|
|
acf3357cf3 | ||
|
|
202d6105b2 | ||
|
|
0714551101 |
9
.github/dependabot.yml
vendored
Normal file
9
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
# Set update schedule for GitHub Actions only
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
20
.github/workflows/README.md
vendored
20
.github/workflows/README.md
vendored
@@ -1,20 +0,0 @@
|
||||
# GitHub Actions Workflow for Meilisearch
|
||||
|
||||
> **Note:**
|
||||
|
||||
> - We do not use [cache](https://github.com/actions/cache) yet but we could use it to speed up CI
|
||||
|
||||
## Workflow
|
||||
|
||||
- On each pull request, we trigger `cargo test`.
|
||||
- On each tag, we build:
|
||||
- the tagged Docker image and publish it to Docker Hub
|
||||
- the binaries for MacOS, Ubuntu, and Windows
|
||||
- the Debian package
|
||||
- On each stable release (`v*.*.*` tag):
|
||||
- we build the `latest` Docker image and publish it to Docker Hub
|
||||
- we publish the binary to Hombrew and Gemfury
|
||||
|
||||
## Problems
|
||||
|
||||
- We do not test on Windows because we are unable to make it work, there is a disk space problem.
|
||||
4
.github/workflows/coverage.yml
vendored
4
.github/workflows/coverage.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
||||
nightly-coverage:
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=unwind -Zpanic_abort_tests"
|
||||
- uses: actions-rs/grcov@v0.1
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
file: ${{ steps.coverage.outputs.report }}
|
||||
|
||||
2
.github/workflows/flaky.yml
vendored
2
.github/workflows/flaky.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
||||
runs-on: ubuntu-18.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install cargo-flaky
|
||||
run: cargo install cargo-flaky
|
||||
- name: Run cargo flaky 100 times
|
||||
|
||||
4
.github/workflows/publish-binaries.yml
vendored
4
.github/workflows/publish-binaries.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
- uses: hecrj/setup-rust-action@master
|
||||
with:
|
||||
rust-version: stable
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build
|
||||
run: cargo build --release --locked
|
||||
- name: Upload binaries to release
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Installing Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
|
||||
2
.github/workflows/publish-deb-brew-pkg.yml
vendored
2
.github/workflows/publish-deb-brew-pkg.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
rust-version: stable
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build deb package
|
||||
run: cargo deb -p meilisearch-http -o target/debian/meilisearch.deb
|
||||
- name: Upload debian pkg to release
|
||||
|
||||
64
.github/workflows/publish-docker-images.yml
vendored
Normal file
64
.github/workflows/publish-docker-images.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
---
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 4 * * *' # Every day at 4:00am
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
release:
|
||||
types: [released]
|
||||
|
||||
name: Publish tagged images to Docker Hub
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: docker
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: github.event_name != 'schedule'
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Check tag format
|
||||
id: check-tag-format
|
||||
run: |
|
||||
# Escape submitted tag name
|
||||
escaped_tag=$(printf "%q" ${{ github.ref_name }})
|
||||
|
||||
# Check if tag has format v<nmumber>.<number>.<number> and set output.match
|
||||
# to create a vX.Y (without patch version) Docker tag
|
||||
if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo ::set-output name=match::true
|
||||
else
|
||||
echo ::set-output name=match::false
|
||||
fi
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: getmeili/meilisearch
|
||||
# The lastest tag is only pushed for the official Meilisearch release
|
||||
# See https://github.com/docker/metadata-action#latest-tag
|
||||
flavor: latest=false
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.match }}
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
# We do not push tags for the cron jobs, this is only for test purposes
|
||||
push: ${{ github.event_name != 'schedule' }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
30
.github/workflows/publish-docker-latest.yml
vendored
30
.github/workflows/publish-docker-latest.yml
vendored
@@ -1,30 +0,0 @@
|
||||
---
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
|
||||
name: Publish latest image to Docker Hub
|
||||
|
||||
jobs:
|
||||
docker-latest:
|
||||
runs-on: docker
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: getmeili/meilisearch:latest
|
||||
39
.github/workflows/publish-docker-tag.yml
vendored
39
.github/workflows/publish-docker-tag.yml
vendored
@@ -1,39 +0,0 @@
|
||||
---
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
name: Publish tagged image to Docker Hub
|
||||
|
||||
jobs:
|
||||
docker-tag:
|
||||
runs-on: docker
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: getmeili/meilisearch
|
||||
flavor: latest=false
|
||||
tags: type=ref,event=tag
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
11
.github/workflows/rust.yml
vendored
11
.github/workflows/rust.yml
vendored
@@ -12,6 +12,7 @@ on:
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: 1
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
@@ -22,7 +23,7 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-18.04, macos-latest, windows-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
- name: Run cargo check without any default features
|
||||
@@ -41,7 +42,7 @@ jobs:
|
||||
name: Run tests in debug
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
@@ -59,7 +60,7 @@ jobs:
|
||||
name: Run Clippy
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
@@ -78,11 +79,11 @@ jobs:
|
||||
name: Run Rustfmt
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
toolchain: stable
|
||||
override: true
|
||||
components: rustfmt
|
||||
- name: Cache dependencies
|
||||
|
||||
@@ -5,14 +5,16 @@ First, thank you for contributing to Meilisearch! The goal of this document is t
|
||||
Remember that there are many ways to contribute other than writing code: writing [tutorials or blog posts](https://github.com/meilisearch/awesome-meilisearch), improving [the documentation](https://github.com/meilisearch/documentation), submitting [bug reports](https://github.com/meilisearch/meilisearch/issues/new?assignees=&labels=&template=bug_report.md&title=) and [feature requests](https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal)...
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Assumptions](#assumptions)
|
||||
- [How to Contribute](#how-to-contribute)
|
||||
- [Development Workflow](#development-workflow)
|
||||
- [Git Guidelines](#git-guidelines)
|
||||
- [Release Process (for internal team only)](#release-process-for-internal-team-only)
|
||||
|
||||
## Assumptions
|
||||
|
||||
1. **You're familiar with [Github](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
||||
1. **You're familiar with [GitHub](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
||||
2. **You've read the Meilisearch [documentation](https://docs.meilisearch.com).**
|
||||
3. **You know about the [Meilisearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
||||
Please use this for help.**
|
||||
@@ -22,7 +24,7 @@ Remember that there are many ways to contribute other than writing code: writing
|
||||
1. Ensure your change has an issue! Find an
|
||||
[existing issue](https://github.com/meilisearch/meilisearch/issues/) or [open a new issue](https://github.com/meilisearch/meilisearch/issues/new).
|
||||
* This is where you can get a feel if the change will be accepted or not.
|
||||
2. Once approved, [fork the Meilisearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own Github account.
|
||||
2. Once approved, [fork the Meilisearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own GitHub account.
|
||||
3. [Create a new Git branch](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-and-deleting-branches-within-your-repository)
|
||||
4. Review the [Development Workflow](#development-workflow) section that describes the steps to maintain the repository.
|
||||
5. Make your changes on your branch.
|
||||
@@ -44,6 +46,8 @@ We recommend using the `--release` flag to test the full performance of Meilisea
|
||||
cargo test
|
||||
```
|
||||
|
||||
This command will be triggered to each PR as a requirement for merging it.
|
||||
|
||||
If you get a "Too many open files" error you might want to increase the open file limit using this command:
|
||||
|
||||
```bash
|
||||
@@ -68,7 +72,7 @@ As minimal requirements, your commit message should:
|
||||
|
||||
We don't follow any other convention, but if you want to use one, we recommend [the Chris Beams one](https://chris.beams.io/posts/git-commit/).
|
||||
|
||||
### Github Pull Requests
|
||||
### GitHub Pull Requests
|
||||
|
||||
Some notes on GitHub PRs:
|
||||
|
||||
@@ -78,6 +82,29 @@ Some notes on GitHub PRs:
|
||||
The draft PRs are recommended when you want to show that you are working on something and make your work visible.
|
||||
- The branch related to the PR must be **up-to-date with `main`** before merging. Fortunately, this project uses [Bors](https://github.com/bors-ng/bors-ng) to automatically enforce this requirement without the PR author having to rebase manually.
|
||||
|
||||
## Release Process (for internal team only)
|
||||
|
||||
Meilisearch tools follow the [Semantic Versioning Convention](https://semver.org/).
|
||||
|
||||
### Automation to rebase and Merge the PRs
|
||||
|
||||
This project integrates a bot that helps us manage pull requests merging.<br>
|
||||
_[Read more about this](https://github.com/meilisearch/integration-guides/blob/main/resources/bors.md)._
|
||||
|
||||
### How to Publish a new Release
|
||||
|
||||
The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/core-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release.
|
||||
|
||||
### Release assets
|
||||
|
||||
For each release, the following assets are created:
|
||||
- Binaries for differents platforms (Linux, MacOS, Windows and ARM architectures) are attached to the GitHub release
|
||||
- Binaries are pushed to HomeBrew and APT (not published for RC)
|
||||
- Docker tags are created/updated:
|
||||
- `vX.Y.Z`
|
||||
- `vX.Y` (not published for RC)
|
||||
- `latest` (not published for RC)
|
||||
|
||||
<hr>
|
||||
|
||||
Thank you again for reading this through, we can not wait to begin to work with you if you made your way through this contributing guide ❤️
|
||||
|
||||
770
Cargo.lock
generated
770
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -2,8 +2,14 @@
|
||||
resolver = "2"
|
||||
members = [
|
||||
"meilisearch-http",
|
||||
"meilisearch-error",
|
||||
"meilisearch-types",
|
||||
"meilisearch-lib",
|
||||
"meilisearch-auth",
|
||||
"permissive-json-pointer",
|
||||
]
|
||||
|
||||
[profile.dev.package.flate2]
|
||||
opt-level = 3
|
||||
|
||||
[profile.dev.package.milli]
|
||||
opt-level = 3
|
||||
|
||||
@@ -58,7 +58,7 @@ meilisearch
|
||||
#### Docker
|
||||
|
||||
```bash
|
||||
docker run -p 7700:7700 -v "$(pwd)/data.ms:/data.ms" getmeili/meilisearch
|
||||
docker run -p 7700:7700 -v "$(pwd)/meili_data:/meili_data" getmeili/meilisearch
|
||||
```
|
||||
|
||||
#### Announcing a cloud-hosted Meilisearch
|
||||
@@ -109,7 +109,7 @@ cargo run --release
|
||||
Let's create an index! If you need a sample dataset, use [this movie database](https://www.notion.so/meilisearch/A-movies-dataset-to-test-Meili-1cbf7c9cfa4247249c40edfa22d7ca87#b5ae399b81834705ba5420ac70358a65). You can also find it in the `datasets/` directory.
|
||||
|
||||
```bash
|
||||
curl -L 'https://bit.ly/2PAcw9l' -o movies.json
|
||||
curl -L https://docs.meilisearch.com/movies.json -o movies.json
|
||||
```
|
||||
|
||||
Now, you're ready to index some data.
|
||||
|
||||
@@ -4,7 +4,7 @@ Meilisearch takes the security of our software products and services seriously.
|
||||
|
||||
If you believe you have found a security vulnerability in any Meilisearch-owned repository, please report it to us as described below.
|
||||
|
||||
## Suported versions
|
||||
## Supported versions
|
||||
|
||||
As long as we are pre-v1.0, only the latest version of Meilisearch will be supported with security updates.
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ status = [
|
||||
'Tests on ubuntu-18.04',
|
||||
'Tests on macos-latest',
|
||||
'Tests on windows-latest',
|
||||
# 'Run Clippy',
|
||||
'Run Clippy',
|
||||
'Run Rustfmt',
|
||||
'Run tests in debug',
|
||||
]
|
||||
|
||||
@@ -67,8 +67,8 @@ semverLT() {
|
||||
return 1
|
||||
}
|
||||
|
||||
# Get a token from https://github.com/settings/tokens to increasae rate limit (from 60 to 5000), make sure the token scope is set to 'public_repo'
|
||||
# Create GITHUB_PAT enviroment variable once you aquired the token to start using it
|
||||
# Get a token from https://github.com/settings/tokens to increase rate limit (from 60 to 5000), make sure the token scope is set to 'public_repo'
|
||||
# Create GITHUB_PAT environment variable once you acquired the token to start using it
|
||||
# Returns the tag of the latest stable release (in terms of semver and not of release date)
|
||||
get_latest() {
|
||||
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
|
||||
@@ -89,7 +89,7 @@ get_latest() {
|
||||
latest=''
|
||||
current_tag=''
|
||||
for release_info in $releases; do
|
||||
if [ $i -eq 0 ]; then # Cheking tag_name
|
||||
if [ $i -eq 0 ]; then # Checking tag_name
|
||||
if echo "$release_info" | grep -q "$GREP_SEMVER_REGEXP"; then # If it's not an alpha or beta release
|
||||
current_tag=$release_info
|
||||
else
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
[package]
|
||||
name = "meilisearch-auth"
|
||||
version = "0.27.1"
|
||||
version = "0.28.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.13.0"
|
||||
enum-iterator = "0.7.0"
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.26.5" }
|
||||
hmac = "0.12.1"
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.3-filter.beta.0" }
|
||||
rand = "0.8.4"
|
||||
serde = { version = "1.0.136", features = ["derive"] }
|
||||
serde_json = { version = "1.0.79", features = ["preserve_order"] }
|
||||
sha2 = "0.10.2"
|
||||
thiserror = "1.0.30"
|
||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
uuid = { version = "0.8.2", features = ["serde", "v4"] }
|
||||
|
||||
@@ -5,9 +5,11 @@ use serde::{Deserialize, Serialize};
|
||||
#[repr(u8)]
|
||||
pub enum Action {
|
||||
#[serde(rename = "*")]
|
||||
All = 0,
|
||||
All = actions::ALL,
|
||||
#[serde(rename = "search")]
|
||||
Search = actions::SEARCH,
|
||||
#[serde(rename = "documents.*")]
|
||||
DocumentsAll = actions::DOCUMENTS_ALL,
|
||||
#[serde(rename = "documents.add")]
|
||||
DocumentsAdd = actions::DOCUMENTS_ADD,
|
||||
#[serde(rename = "documents.get")]
|
||||
@@ -32,18 +34,25 @@ pub enum Action {
|
||||
StatsGet = actions::STATS_GET,
|
||||
#[serde(rename = "dumps.create")]
|
||||
DumpsCreate = actions::DUMPS_CREATE,
|
||||
#[serde(rename = "dumps.get")]
|
||||
DumpsGet = actions::DUMPS_GET,
|
||||
#[serde(rename = "version")]
|
||||
Version = actions::VERSION,
|
||||
#[serde(rename = "keys.create")]
|
||||
KeysAdd = actions::KEYS_CREATE,
|
||||
#[serde(rename = "keys.get")]
|
||||
KeysGet = actions::KEYS_GET,
|
||||
#[serde(rename = "keys.update")]
|
||||
KeysUpdate = actions::KEYS_UPDATE,
|
||||
#[serde(rename = "keys.delete")]
|
||||
KeysDelete = actions::KEYS_DELETE,
|
||||
}
|
||||
|
||||
impl Action {
|
||||
pub fn from_repr(repr: u8) -> Option<Self> {
|
||||
use actions::*;
|
||||
match repr {
|
||||
0 => Some(Self::All),
|
||||
ALL => Some(Self::All),
|
||||
SEARCH => Some(Self::Search),
|
||||
DOCUMENTS_ALL => Some(Self::DocumentsAll),
|
||||
DOCUMENTS_ADD => Some(Self::DocumentsAdd),
|
||||
DOCUMENTS_GET => Some(Self::DocumentsGet),
|
||||
DOCUMENTS_DELETE => Some(Self::DocumentsDelete),
|
||||
@@ -56,8 +65,11 @@ impl Action {
|
||||
SETTINGS_UPDATE => Some(Self::SettingsUpdate),
|
||||
STATS_GET => Some(Self::StatsGet),
|
||||
DUMPS_CREATE => Some(Self::DumpsCreate),
|
||||
DUMPS_GET => Some(Self::DumpsGet),
|
||||
VERSION => Some(Self::Version),
|
||||
KEYS_CREATE => Some(Self::KeysAdd),
|
||||
KEYS_GET => Some(Self::KeysGet),
|
||||
KEYS_UPDATE => Some(Self::KeysUpdate),
|
||||
KEYS_DELETE => Some(Self::KeysDelete),
|
||||
_otherwise => None,
|
||||
}
|
||||
}
|
||||
@@ -65,8 +77,9 @@ impl Action {
|
||||
pub fn repr(&self) -> u8 {
|
||||
use actions::*;
|
||||
match self {
|
||||
Self::All => 0,
|
||||
Self::All => ALL,
|
||||
Self::Search => SEARCH,
|
||||
Self::DocumentsAll => DOCUMENTS_ALL,
|
||||
Self::DocumentsAdd => DOCUMENTS_ADD,
|
||||
Self::DocumentsGet => DOCUMENTS_GET,
|
||||
Self::DocumentsDelete => DOCUMENTS_DELETE,
|
||||
@@ -79,26 +92,34 @@ impl Action {
|
||||
Self::SettingsUpdate => SETTINGS_UPDATE,
|
||||
Self::StatsGet => STATS_GET,
|
||||
Self::DumpsCreate => DUMPS_CREATE,
|
||||
Self::DumpsGet => DUMPS_GET,
|
||||
Self::Version => VERSION,
|
||||
Self::KeysAdd => KEYS_CREATE,
|
||||
Self::KeysGet => KEYS_GET,
|
||||
Self::KeysUpdate => KEYS_UPDATE,
|
||||
Self::KeysDelete => KEYS_DELETE,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub mod actions {
|
||||
pub(crate) const ALL: u8 = 0;
|
||||
pub const SEARCH: u8 = 1;
|
||||
pub const DOCUMENTS_ADD: u8 = 2;
|
||||
pub const DOCUMENTS_GET: u8 = 3;
|
||||
pub const DOCUMENTS_DELETE: u8 = 4;
|
||||
pub const INDEXES_CREATE: u8 = 5;
|
||||
pub const INDEXES_GET: u8 = 6;
|
||||
pub const INDEXES_UPDATE: u8 = 7;
|
||||
pub const INDEXES_DELETE: u8 = 8;
|
||||
pub const TASKS_GET: u8 = 9;
|
||||
pub const SETTINGS_GET: u8 = 10;
|
||||
pub const SETTINGS_UPDATE: u8 = 11;
|
||||
pub const STATS_GET: u8 = 12;
|
||||
pub const DUMPS_CREATE: u8 = 13;
|
||||
pub const DUMPS_GET: u8 = 14;
|
||||
pub const DOCUMENTS_ALL: u8 = 2;
|
||||
pub const DOCUMENTS_ADD: u8 = 3;
|
||||
pub const DOCUMENTS_GET: u8 = 4;
|
||||
pub const DOCUMENTS_DELETE: u8 = 5;
|
||||
pub const INDEXES_CREATE: u8 = 6;
|
||||
pub const INDEXES_GET: u8 = 7;
|
||||
pub const INDEXES_UPDATE: u8 = 8;
|
||||
pub const INDEXES_DELETE: u8 = 9;
|
||||
pub const TASKS_GET: u8 = 10;
|
||||
pub const SETTINGS_GET: u8 = 11;
|
||||
pub const SETTINGS_UPDATE: u8 = 12;
|
||||
pub const STATS_GET: u8 = 13;
|
||||
pub const DUMPS_CREATE: u8 = 14;
|
||||
pub const VERSION: u8 = 15;
|
||||
pub const KEYS_CREATE: u8 = 16;
|
||||
pub const KEYS_GET: u8 = 17;
|
||||
pub const KEYS_UPDATE: u8 = 18;
|
||||
pub const KEYS_DELETE: u8 = 19;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use serde_json::Deserializer;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
@@ -36,10 +37,9 @@ impl AuthController {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut reader = BufReader::new(File::open(&keys_file_path)?).lines();
|
||||
while let Some(key) = reader.next().transpose()? {
|
||||
let key = serde_json::from_str(&key)?;
|
||||
store.put_api_key(key)?;
|
||||
let reader = BufReader::new(File::open(&keys_file_path)?);
|
||||
for key in Deserializer::from_reader(reader).into_iter() {
|
||||
store.put_api_key(key?)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::error::Error;
|
||||
|
||||
use meilisearch_error::ErrorCode;
|
||||
use meilisearch_error::{internal_error, Code};
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
use meilisearch_types::internal_error;
|
||||
use serde_json::Value;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, AuthControllerError>;
|
||||
@@ -18,8 +18,18 @@ pub enum AuthControllerError {
|
||||
InvalidApiKeyExpiresAt(Value),
|
||||
#[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")]
|
||||
InvalidApiKeyDescription(Value),
|
||||
#[error(
|
||||
"`name` field value `{0}` is invalid. It should be a string or specified as a null value."
|
||||
)]
|
||||
InvalidApiKeyName(Value),
|
||||
#[error("`uid` field value `{0}` is invalid. It should be a valid UUID v4 string or omitted.")]
|
||||
InvalidApiKeyUid(Value),
|
||||
#[error("API key `{0}` not found.")]
|
||||
ApiKeyNotFound(String),
|
||||
#[error("`uid` field value `{0}` is already an existing API key.")]
|
||||
ApiKeyAlreadyExists(String),
|
||||
#[error("The `{0}` field cannot be modified for the given resource.")]
|
||||
ImmutableField(String),
|
||||
#[error("Internal error: {0}")]
|
||||
Internal(Box<dyn Error + Send + Sync + 'static>),
|
||||
}
|
||||
@@ -39,7 +49,11 @@ impl ErrorCode for AuthControllerError {
|
||||
Self::InvalidApiKeyIndexes(_) => Code::InvalidApiKeyIndexes,
|
||||
Self::InvalidApiKeyExpiresAt(_) => Code::InvalidApiKeyExpiresAt,
|
||||
Self::InvalidApiKeyDescription(_) => Code::InvalidApiKeyDescription,
|
||||
Self::InvalidApiKeyName(_) => Code::InvalidApiKeyName,
|
||||
Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound,
|
||||
Self::InvalidApiKeyUid(_) => Code::InvalidApiKeyUid,
|
||||
Self::ApiKeyAlreadyExists(_) => Code::ApiKeyAlreadyExists,
|
||||
Self::ImmutableField(_) => Code::ImmutableField,
|
||||
Self::Internal(_) => Code::Internal,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +1,25 @@
|
||||
use crate::action::Action;
|
||||
use crate::error::{AuthControllerError, Result};
|
||||
use crate::store::{KeyId, KEY_ID_LENGTH};
|
||||
use rand::Rng;
|
||||
use crate::store::KeyId;
|
||||
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{from_value, Value};
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::macros::{format_description, time};
|
||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct Key {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
pub id: KeyId,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub name: Option<String>,
|
||||
pub uid: KeyId,
|
||||
pub actions: Vec<Action>,
|
||||
pub indexes: Vec<String>,
|
||||
pub indexes: Vec<StarOr<IndexUid>>,
|
||||
#[serde(with = "time::serde::rfc3339::option")]
|
||||
pub expires_at: Option<OffsetDateTime>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
@@ -25,16 +30,27 @@ pub struct Key {
|
||||
|
||||
impl Key {
|
||||
pub fn create_from_value(value: Value) -> Result<Self> {
|
||||
let description = match value.get("description") {
|
||||
Some(Value::Null) => None,
|
||||
Some(des) => Some(
|
||||
from_value(des.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()))?,
|
||||
),
|
||||
None => None,
|
||||
let name = match value.get("name") {
|
||||
None | Some(Value::Null) => None,
|
||||
Some(des) => from_value(des.clone())
|
||||
.map(Some)
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyName(des.clone()))?,
|
||||
};
|
||||
|
||||
let id = generate_id();
|
||||
let description = match value.get("description") {
|
||||
None | Some(Value::Null) => None,
|
||||
Some(des) => from_value(des.clone())
|
||||
.map(Some)
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()))?,
|
||||
};
|
||||
|
||||
let uid = value.get("uid").map_or_else(
|
||||
|| Ok(Uuid::new_v4()),
|
||||
|uid| {
|
||||
from_value(uid.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyUid(uid.clone()))
|
||||
},
|
||||
)?;
|
||||
|
||||
let actions = value
|
||||
.get("actions")
|
||||
@@ -61,8 +77,9 @@ impl Key {
|
||||
let updated_at = created_at;
|
||||
|
||||
Ok(Self {
|
||||
name,
|
||||
description,
|
||||
id,
|
||||
uid,
|
||||
actions,
|
||||
indexes,
|
||||
expires_at,
|
||||
@@ -78,20 +95,34 @@ impl Key {
|
||||
self.description = des?;
|
||||
}
|
||||
|
||||
if let Some(act) = value.get("actions") {
|
||||
let act = from_value(act.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyActions(act.clone()));
|
||||
self.actions = act?;
|
||||
if let Some(des) = value.get("name") {
|
||||
let des = from_value(des.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyName(des.clone()));
|
||||
self.name = des?;
|
||||
}
|
||||
|
||||
if let Some(ind) = value.get("indexes") {
|
||||
let ind = from_value(ind.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyIndexes(ind.clone()));
|
||||
self.indexes = ind?;
|
||||
if value.get("uid").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("uid".to_string()));
|
||||
}
|
||||
|
||||
if let Some(exp) = value.get("expiresAt") {
|
||||
self.expires_at = parse_expiration_date(exp)?;
|
||||
if value.get("actions").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("actions".to_string()));
|
||||
}
|
||||
|
||||
if value.get("indexes").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("indexes".to_string()));
|
||||
}
|
||||
|
||||
if value.get("expiresAt").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("expiresAt".to_string()));
|
||||
}
|
||||
|
||||
if value.get("createdAt").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("createdAt".to_string()));
|
||||
}
|
||||
|
||||
if value.get("updatedAt").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("updatedAt".to_string()));
|
||||
}
|
||||
|
||||
self.updated_at = OffsetDateTime::now_utc();
|
||||
@@ -101,11 +132,13 @@ impl Key {
|
||||
|
||||
pub(crate) fn default_admin() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let uid = Uuid::new_v4();
|
||||
Self {
|
||||
description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()),
|
||||
id: generate_id(),
|
||||
name: Some("Default Admin API Key".to_string()),
|
||||
description: Some("Use it for anything that is not a search operation. Caution! Do not expose it on a public frontend".to_string()),
|
||||
uid,
|
||||
actions: vec![Action::All],
|
||||
indexes: vec!["*".to_string()],
|
||||
indexes: vec![StarOr::Star],
|
||||
expires_at: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
@@ -114,13 +147,13 @@ impl Key {
|
||||
|
||||
pub(crate) fn default_search() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let uid = Uuid::new_v4();
|
||||
Self {
|
||||
description: Some(
|
||||
"Default Search API Key (Use it to search from the frontend)".to_string(),
|
||||
),
|
||||
id: generate_id(),
|
||||
name: Some("Default Search API Key".to_string()),
|
||||
description: Some("Use it to search from the frontend".to_string()),
|
||||
uid,
|
||||
actions: vec![Action::Search],
|
||||
indexes: vec!["*".to_string()],
|
||||
indexes: vec![StarOr::Star],
|
||||
expires_at: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
@@ -128,19 +161,6 @@ impl Key {
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a printable key of 64 characters using thread_rng.
|
||||
fn generate_id() -> [u8; KEY_ID_LENGTH] {
|
||||
const CHARSET: &[u8] = b"abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
let mut bytes = [0; KEY_ID_LENGTH];
|
||||
for byte in bytes.iter_mut() {
|
||||
*byte = CHARSET[rng.gen_range(0..CHARSET.len())];
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> {
|
||||
match value {
|
||||
Value::String(string) => OffsetDateTime::parse(string, &Rfc3339)
|
||||
|
||||
@@ -5,18 +5,20 @@ mod key;
|
||||
mod store;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::ops::Deref;
|
||||
use std::path::Path;
|
||||
use std::str::from_utf8;
|
||||
use std::sync::Arc;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use sha2::{Digest, Sha256};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub use action::{actions, Action};
|
||||
use error::{AuthControllerError, Result};
|
||||
pub use key::Key;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use store::generate_key_as_base64;
|
||||
pub use store::open_auth_store_env;
|
||||
use store::HeedAuthStore;
|
||||
|
||||
@@ -42,62 +44,77 @@ impl AuthController {
|
||||
|
||||
pub fn create_key(&self, value: Value) -> Result<Key> {
|
||||
let key = Key::create_from_value(value)?;
|
||||
self.store.put_api_key(key)
|
||||
match self.store.get_api_key(key.uid)? {
|
||||
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(
|
||||
key.uid.to_string(),
|
||||
)),
|
||||
None => self.store.put_api_key(key),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_key(&self, key: impl AsRef<str>, value: Value) -> Result<Key> {
|
||||
let mut key = self.get_key(key)?;
|
||||
pub fn update_key(&self, uid: Uuid, value: Value) -> Result<Key> {
|
||||
let mut key = self.get_key(uid)?;
|
||||
key.update_from_value(value)?;
|
||||
self.store.put_api_key(key)
|
||||
}
|
||||
|
||||
pub fn get_key(&self, key: impl AsRef<str>) -> Result<Key> {
|
||||
pub fn get_key(&self, uid: Uuid) -> Result<Key> {
|
||||
self.store
|
||||
.get_api_key(&key)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))
|
||||
.get_api_key(uid)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))
|
||||
}
|
||||
|
||||
pub fn get_optional_uid_from_encoded_key(&self, encoded_key: &[u8]) -> Result<Option<Uuid>> {
|
||||
match &self.master_key {
|
||||
Some(master_key) => self
|
||||
.store
|
||||
.get_uid_from_encoded_key(encoded_key, master_key.as_bytes()),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_uid_from_encoded_key(&self, encoded_key: &str) -> Result<Uuid> {
|
||||
self.get_optional_uid_from_encoded_key(encoded_key.as_bytes())?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(encoded_key.to_string()))
|
||||
}
|
||||
|
||||
pub fn get_key_filters(
|
||||
&self,
|
||||
key: impl AsRef<str>,
|
||||
uid: Uuid,
|
||||
search_rules: Option<SearchRules>,
|
||||
) -> Result<AuthFilter> {
|
||||
let mut filters = AuthFilter::default();
|
||||
if self
|
||||
.master_key
|
||||
.as_ref()
|
||||
.map_or(false, |master_key| master_key != key.as_ref())
|
||||
{
|
||||
let key = self
|
||||
.store
|
||||
.get_api_key(&key)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))?;
|
||||
let key = self
|
||||
.store
|
||||
.get_api_key(uid)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))?;
|
||||
|
||||
if !key.indexes.iter().any(|i| i.as_str() == "*") {
|
||||
filters.search_rules = match search_rules {
|
||||
// Intersect search_rules with parent key authorized indexes.
|
||||
Some(search_rules) => SearchRules::Map(
|
||||
key.indexes
|
||||
.into_iter()
|
||||
.filter_map(|index| {
|
||||
search_rules
|
||||
.get_index_search_rules(&index)
|
||||
.map(|index_search_rules| (index, Some(index_search_rules)))
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
None => SearchRules::Set(key.indexes.into_iter().collect()),
|
||||
};
|
||||
} else if let Some(search_rules) = search_rules {
|
||||
filters.search_rules = search_rules;
|
||||
}
|
||||
|
||||
filters.allow_index_creation = key
|
||||
.actions
|
||||
.iter()
|
||||
.any(|&action| action == Action::IndexesAdd || action == Action::All);
|
||||
if !key.indexes.iter().any(|i| i == &StarOr::Star) {
|
||||
filters.search_rules = match search_rules {
|
||||
// Intersect search_rules with parent key authorized indexes.
|
||||
Some(search_rules) => SearchRules::Map(
|
||||
key.indexes
|
||||
.into_iter()
|
||||
.filter_map(|index| {
|
||||
search_rules.get_index_search_rules(index.deref()).map(
|
||||
|index_search_rules| {
|
||||
(String::from(index), Some(index_search_rules))
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
None => SearchRules::Set(key.indexes.into_iter().map(String::from).collect()),
|
||||
};
|
||||
} else if let Some(search_rules) = search_rules {
|
||||
filters.search_rules = search_rules;
|
||||
}
|
||||
|
||||
filters.allow_index_creation = key
|
||||
.actions
|
||||
.iter()
|
||||
.any(|&action| action == Action::IndexesAdd || action == Action::All);
|
||||
|
||||
Ok(filters)
|
||||
}
|
||||
|
||||
@@ -105,13 +122,11 @@ impl AuthController {
|
||||
self.store.list_api_keys()
|
||||
}
|
||||
|
||||
pub fn delete_key(&self, key: impl AsRef<str>) -> Result<()> {
|
||||
if self.store.delete_api_key(&key)? {
|
||||
pub fn delete_key(&self, uid: Uuid) -> Result<()> {
|
||||
if self.store.delete_api_key(uid)? {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(AuthControllerError::ApiKeyNotFound(
|
||||
key.as_ref().to_string(),
|
||||
))
|
||||
Err(AuthControllerError::ApiKeyNotFound(uid.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,32 +136,32 @@ impl AuthController {
|
||||
|
||||
/// Generate a valid key from a key id using the current master key.
|
||||
/// Returns None if no master key has been set.
|
||||
pub fn generate_key(&self, id: &str) -> Option<String> {
|
||||
pub fn generate_key(&self, uid: Uuid) -> Option<String> {
|
||||
self.master_key
|
||||
.as_ref()
|
||||
.map(|master_key| generate_key(master_key.as_bytes(), id))
|
||||
.map(|master_key| generate_key_as_base64(uid.as_bytes(), master_key.as_bytes()))
|
||||
}
|
||||
|
||||
/// Check if the provided key is authorized to make a specific action
|
||||
/// without checking if the key is valid.
|
||||
pub fn is_key_authorized(
|
||||
&self,
|
||||
key: &[u8],
|
||||
uid: Uuid,
|
||||
action: Action,
|
||||
index: Option<&str>,
|
||||
) -> Result<bool> {
|
||||
match self
|
||||
.store
|
||||
// check if the key has access to all indexes.
|
||||
.get_expiration_date(key, action, None)?
|
||||
.get_expiration_date(uid, action, None)?
|
||||
.or(match index {
|
||||
// else check if the key has access to the requested index.
|
||||
Some(index) => {
|
||||
self.store
|
||||
.get_expiration_date(key, action, Some(index.as_bytes()))?
|
||||
.get_expiration_date(uid, action, Some(index.as_bytes()))?
|
||||
}
|
||||
// or to any index if no index has been requested.
|
||||
None => self.store.prefix_first_expiration_date(key, action)?,
|
||||
None => self.store.prefix_first_expiration_date(uid, action)?,
|
||||
}) {
|
||||
// check expiration date.
|
||||
Some(Some(exp)) => Ok(OffsetDateTime::now_utc() < exp),
|
||||
@@ -156,29 +171,6 @@ impl AuthController {
|
||||
None => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if the provided key is valid
|
||||
/// without checking if the key is authorized to make a specific action.
|
||||
pub fn is_key_valid(&self, key: &[u8]) -> Result<bool> {
|
||||
if let Some(id) = self.store.get_key_id(key) {
|
||||
let id = from_utf8(&id)?;
|
||||
if let Some(generated) = self.generate_key(id) {
|
||||
return Ok(generated.as_bytes() == key);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
/// Check if the provided key is valid
|
||||
/// and is authorized to make a specific action.
|
||||
pub fn authenticate(&self, key: &[u8], action: Action, index: Option<&str>) -> Result<bool> {
|
||||
if self.is_key_authorized(key, action, index)? {
|
||||
self.is_key_valid(key)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AuthFilter {
|
||||
@@ -258,12 +250,6 @@ pub struct IndexSearchRules {
|
||||
pub filter: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
fn generate_key(master_key: &[u8], keyid: &str) -> String {
|
||||
let key = [keyid.as_bytes(), master_key].concat();
|
||||
let sha = Sha256::digest(&key);
|
||||
format!("{}{:x}", keyid, sha)
|
||||
}
|
||||
|
||||
fn generate_default_keys(store: &HeedAuthStore) -> Result<()> {
|
||||
store.put_api_key(Key::default_admin())?;
|
||||
store.put_api_key(Key::default_search())?;
|
||||
|
||||
@@ -1,27 +1,31 @@
|
||||
use enum_iterator::IntoEnumIterator;
|
||||
use std::borrow::Cow;
|
||||
use std::cmp::Reverse;
|
||||
use std::convert::TryFrom;
|
||||
use std::convert::TryInto;
|
||||
use std::fs::create_dir_all;
|
||||
use std::ops::Deref;
|
||||
use std::path::Path;
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
|
||||
use enum_iterator::IntoEnumIterator;
|
||||
use hmac::{Hmac, Mac};
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
|
||||
use milli::heed::{Database, Env, EnvOpenOptions, RwTxn};
|
||||
use sha2::{Digest, Sha256};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::error::Result;
|
||||
use super::{Action, Key};
|
||||
|
||||
const AUTH_STORE_SIZE: usize = 1_073_741_824; //1GiB
|
||||
pub const KEY_ID_LENGTH: usize = 8;
|
||||
const AUTH_DB_PATH: &str = "auth";
|
||||
const KEY_DB_NAME: &str = "api-keys";
|
||||
const KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME: &str = "keyid-action-index-expiration";
|
||||
|
||||
pub type KeyId = [u8; KEY_ID_LENGTH];
|
||||
pub type KeyId = Uuid;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct HeedAuthStore {
|
||||
@@ -73,33 +77,43 @@ impl HeedAuthStore {
|
||||
}
|
||||
|
||||
pub fn put_api_key(&self, key: Key) -> Result<Key> {
|
||||
let uid = key.uid;
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
self.keys.put(&mut wtxn, &key.id, &key)?;
|
||||
|
||||
let id = key.id;
|
||||
self.keys.put(&mut wtxn, uid.as_bytes(), &key)?;
|
||||
|
||||
// delete key from inverted database before refilling it.
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &id)?;
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &uid)?;
|
||||
// create inverted database.
|
||||
let db = self.action_keyid_index_expiration;
|
||||
|
||||
let actions = if key.actions.contains(&Action::All) {
|
||||
// if key.actions contains All, we iterate over all actions.
|
||||
Action::into_enum_iter().collect()
|
||||
} else if key.actions.contains(&Action::DocumentsAll) {
|
||||
// if key.actions.contains.DocumentsAll add all actions related to documents.
|
||||
let mut actions = key.actions.clone();
|
||||
actions.append(&mut vec![
|
||||
Action::DocumentsAdd,
|
||||
Action::DocumentsGet,
|
||||
Action::DocumentsDelete,
|
||||
]);
|
||||
actions
|
||||
} else {
|
||||
key.actions.clone()
|
||||
};
|
||||
|
||||
let no_index_restriction = key.indexes.contains(&"*".to_owned());
|
||||
let no_index_restriction = key.indexes.contains(&StarOr::Star);
|
||||
for action in actions {
|
||||
if no_index_restriction {
|
||||
// If there is no index restriction we put None.
|
||||
db.put(&mut wtxn, &(&id, &action, None), &key.expires_at)?;
|
||||
db.put(&mut wtxn, &(&uid, &action, None), &key.expires_at)?;
|
||||
} else {
|
||||
// else we create a key for each index.
|
||||
for index in key.indexes.iter() {
|
||||
db.put(
|
||||
&mut wtxn,
|
||||
&(&id, &action, Some(index.as_bytes())),
|
||||
&(&uid, &action, Some(index.deref().as_bytes())),
|
||||
&key.expires_at,
|
||||
)?;
|
||||
}
|
||||
@@ -111,24 +125,39 @@ impl HeedAuthStore {
|
||||
Ok(key)
|
||||
}
|
||||
|
||||
pub fn get_api_key(&self, key: impl AsRef<str>) -> Result<Option<Key>> {
|
||||
pub fn get_api_key(&self, uid: Uuid) -> Result<Option<Key>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match self.get_key_id(key.as_ref().as_bytes()) {
|
||||
Some(id) => self.keys.get(&rtxn, &id).map_err(|e| e.into()),
|
||||
None => Ok(None),
|
||||
}
|
||||
self.keys.get(&rtxn, uid.as_bytes()).map_err(|e| e.into())
|
||||
}
|
||||
|
||||
pub fn delete_api_key(&self, key: impl AsRef<str>) -> Result<bool> {
|
||||
pub fn get_uid_from_encoded_key(
|
||||
&self,
|
||||
encoded_key: &[u8],
|
||||
master_key: &[u8],
|
||||
) -> Result<Option<Uuid>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let uid = self
|
||||
.keys
|
||||
.remap_data_type::<DecodeIgnore>()
|
||||
.iter(&rtxn)?
|
||||
.filter_map(|res| match res {
|
||||
Ok((uid, _))
|
||||
if generate_key_as_base64(uid, master_key).as_bytes() == encoded_key =>
|
||||
{
|
||||
let (uid, _) = try_split_array_at(uid)?;
|
||||
Some(Uuid::from_bytes(*uid))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.next();
|
||||
|
||||
Ok(uid)
|
||||
}
|
||||
|
||||
pub fn delete_api_key(&self, uid: Uuid) -> Result<bool> {
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let existing = match self.get_key_id(key.as_ref().as_bytes()) {
|
||||
Some(id) => {
|
||||
let existing = self.keys.delete(&mut wtxn, &id)?;
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &id)?;
|
||||
existing
|
||||
}
|
||||
None => false,
|
||||
};
|
||||
let existing = self.keys.delete(&mut wtxn, uid.as_bytes())?;
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &uid)?;
|
||||
wtxn.commit()?;
|
||||
|
||||
Ok(existing)
|
||||
@@ -147,49 +176,37 @@ impl HeedAuthStore {
|
||||
|
||||
pub fn get_expiration_date(
|
||||
&self,
|
||||
key: &[u8],
|
||||
uid: Uuid,
|
||||
action: Action,
|
||||
index: Option<&[u8]>,
|
||||
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match self.get_key_id(key) {
|
||||
Some(id) => {
|
||||
let tuple = (&id, &action, index);
|
||||
Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?)
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
let tuple = (&uid, &action, index);
|
||||
Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?)
|
||||
}
|
||||
|
||||
pub fn prefix_first_expiration_date(
|
||||
&self,
|
||||
key: &[u8],
|
||||
uid: Uuid,
|
||||
action: Action,
|
||||
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match self.get_key_id(key) {
|
||||
Some(id) => {
|
||||
let tuple = (&id, &action, None);
|
||||
Ok(self
|
||||
.action_keyid_index_expiration
|
||||
.prefix_iter(&rtxn, &tuple)?
|
||||
.next()
|
||||
.transpose()?
|
||||
.map(|(_, expiration)| expiration))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
let tuple = (&uid, &action, None);
|
||||
let exp = self
|
||||
.action_keyid_index_expiration
|
||||
.prefix_iter(&rtxn, &tuple)?
|
||||
.next()
|
||||
.transpose()?
|
||||
.map(|(_, expiration)| expiration);
|
||||
|
||||
pub fn get_key_id(&self, key: &[u8]) -> Option<KeyId> {
|
||||
try_split_array_at::<_, KEY_ID_LENGTH>(key).map(|(id, _)| *id)
|
||||
Ok(exp)
|
||||
}
|
||||
|
||||
fn delete_key_from_inverted_db(&self, wtxn: &mut RwTxn, key: &KeyId) -> Result<()> {
|
||||
let mut iter = self
|
||||
.action_keyid_index_expiration
|
||||
.remap_types::<ByteSlice, DecodeIgnore>()
|
||||
.prefix_iter_mut(wtxn, key)?;
|
||||
.prefix_iter_mut(wtxn, key.as_bytes())?;
|
||||
while iter.next().transpose()?.is_some() {
|
||||
// safety: we don't keep references from inside the LMDB database.
|
||||
unsafe { iter.del_current()? };
|
||||
@@ -200,21 +217,22 @@ impl HeedAuthStore {
|
||||
}
|
||||
|
||||
/// Codec allowing to retrieve the expiration date of an action,
|
||||
/// optionnally on a spcific index, for a given key.
|
||||
/// optionally on a specific index, for a given key.
|
||||
pub struct KeyIdActionCodec;
|
||||
|
||||
impl<'a> milli::heed::BytesDecode<'a> for KeyIdActionCodec {
|
||||
type DItem = (KeyId, Action, Option<&'a [u8]>);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (key_id, action_bytes) = try_split_array_at(bytes)?;
|
||||
let (key_id_bytes, action_bytes) = try_split_array_at(bytes)?;
|
||||
let (action_bytes, index) = match try_split_array_at(action_bytes)? {
|
||||
(action, []) => (action, None),
|
||||
(action, index) => (action, Some(index)),
|
||||
};
|
||||
let key_id = Uuid::from_bytes(*key_id_bytes);
|
||||
let action = Action::from_repr(u8::from_be_bytes(*action_bytes))?;
|
||||
|
||||
Some((*key_id, action, index))
|
||||
Some((key_id, action, index))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -224,7 +242,7 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
fn bytes_encode((key_id, action, index): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut bytes = Vec::new();
|
||||
|
||||
bytes.extend_from_slice(*key_id);
|
||||
bytes.extend_from_slice(key_id.as_bytes());
|
||||
let action_bytes = u8::to_be_bytes(action.repr());
|
||||
bytes.extend_from_slice(&action_bytes);
|
||||
if let Some(index) = index {
|
||||
@@ -235,6 +253,15 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_key_as_base64(uid: &[u8], master_key: &[u8]) -> String {
|
||||
let master_key_sha = Sha256::digest(master_key);
|
||||
let mut mac = Hmac::<Sha256>::new_from_slice(master_key_sha.as_slice()).unwrap();
|
||||
mac.update(uid);
|
||||
|
||||
let result = mac.finalize();
|
||||
base64::encode_config(result.into_bytes(), base64::URL_SAFE_NO_PAD)
|
||||
}
|
||||
|
||||
/// Divides one slice into two at an index, returns `None` if mid is out of bounds.
|
||||
pub fn try_split_at<T>(slice: &[T], mid: usize) -> Option<(&[T], &[T])> {
|
||||
if mid <= slice.len() {
|
||||
|
||||
@@ -4,7 +4,7 @@ description = "Meilisearch HTTP server"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
name = "meilisearch-http"
|
||||
version = "0.27.1"
|
||||
version = "0.28.0"
|
||||
|
||||
[[bin]]
|
||||
name = "meilisearch"
|
||||
@@ -45,7 +45,7 @@ itertools = "0.10.3"
|
||||
jsonwebtoken = "8.0.1"
|
||||
log = "0.4.14"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
meilisearch-lib = { path = "../meilisearch-lib" }
|
||||
mime = "0.3.16"
|
||||
num_cpus = "1.13.1"
|
||||
@@ -57,10 +57,12 @@ platform-dirs = "0.3.0"
|
||||
rand = "0.8.5"
|
||||
rayon = "1.5.1"
|
||||
regex = "1.5.5"
|
||||
reqwest = { version = "0.11.4", features = ["rustls-tls", "json"], default-features = false }
|
||||
rustls = "0.20.4"
|
||||
rustls-pemfile = "0.3.0"
|
||||
segment = { version = "0.2.0", optional = true }
|
||||
serde = { version = "1.0.136", features = ["derive"] }
|
||||
serde-cs = "0.2.3"
|
||||
serde_json = { version = "1.0.79", features = ["preserve_order"] }
|
||||
sha2 = "0.10.2"
|
||||
siphasher = "0.3.10"
|
||||
@@ -73,14 +75,14 @@ thiserror = "1.0.30"
|
||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
tokio = { version = "1.17.0", features = ["full"] }
|
||||
tokio-stream = "0.1.8"
|
||||
uuid = { version = "0.8.2", features = ["serde"] }
|
||||
uuid = { version = "0.8.2", features = ["serde", "v4"] }
|
||||
walkdir = "2.3.2"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.7.0"
|
||||
assert-json-diff = "2.0.1"
|
||||
manifest-dir-macros = "0.1.14"
|
||||
maplit = "1.0.2"
|
||||
paste = "1.0.6"
|
||||
serde_url_params = "0.2.1"
|
||||
urlencoding = "2.1.0"
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ pub trait Analytics: Sync + Send {
|
||||
/// The method used to publish most analytics that do not need to be batched every hours
|
||||
fn publish(&self, event_name: String, send: Value, request: Option<&HttpRequest>);
|
||||
|
||||
/// This method should be called to aggergate a get search
|
||||
/// This method should be called to aggregate a get search
|
||||
fn get_search(&self, aggregate: SearchAggregator);
|
||||
|
||||
/// This method should be called to aggregate a post search
|
||||
|
||||
@@ -31,6 +31,8 @@ use crate::Opt;
|
||||
|
||||
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
|
||||
|
||||
const ANALYTICS_HEADER: &str = "X-Meilisearch-Client";
|
||||
|
||||
/// Write the instance-uid in the `data.ms` and in `~/.config/MeiliSearch/path-to-db-instance-uid`. Ignore the errors.
|
||||
fn write_user_id(db_path: &Path, user_id: &str) {
|
||||
let _ = fs::write(db_path.join("instance-uid"), user_id.as_bytes());
|
||||
@@ -48,7 +50,8 @@ const SEGMENT_API_KEY: &str = "P3FWhhEsJiEDCuEHpmcN9DHcK4hVfBvb";
|
||||
pub fn extract_user_agents(request: &HttpRequest) -> Vec<String> {
|
||||
request
|
||||
.headers()
|
||||
.get(USER_AGENT)
|
||||
.get(ANALYTICS_HEADER)
|
||||
.or_else(|| request.headers().get(USER_AGENT))
|
||||
.map(|header| header.to_str().ok())
|
||||
.flatten()
|
||||
.unwrap_or("unknown")
|
||||
@@ -78,7 +81,19 @@ impl SegmentAnalytics {
|
||||
let user_id = user_id.unwrap_or_else(|| Uuid::new_v4().to_string());
|
||||
write_user_id(&opt.db_path, &user_id);
|
||||
|
||||
let client = HttpClient::default();
|
||||
let client = reqwest::Client::builder()
|
||||
.connect_timeout(Duration::from_secs(10))
|
||||
.build();
|
||||
|
||||
// if reqwest throws an error we won't be able to send analytics
|
||||
if client.is_err() {
|
||||
return super::MockAnalytics::new(opt);
|
||||
}
|
||||
|
||||
let client = HttpClient::new(
|
||||
client.unwrap(),
|
||||
"https://telemetry.meilisearch.com".to_string(),
|
||||
);
|
||||
let user = User::UserId { user_id };
|
||||
let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
|
||||
|
||||
@@ -130,11 +145,7 @@ impl SegmentAnalytics {
|
||||
|
||||
impl super::Analytics for SegmentAnalytics {
|
||||
fn publish(&self, event_name: String, mut send: Value, request: Option<&HttpRequest>) {
|
||||
let user_agent = request
|
||||
.map(|req| req.headers().get(USER_AGENT))
|
||||
.flatten()
|
||||
.map(|header| header.to_str().unwrap_or("unknown"))
|
||||
.map(|s| s.split(';').map(str::trim).collect::<Vec<&str>>());
|
||||
let user_agent = request.map(|req| extract_user_agents(req));
|
||||
|
||||
send["user-agent"] = json!(user_agent);
|
||||
let event = Track {
|
||||
@@ -363,7 +374,7 @@ pub struct SearchAggregator {
|
||||
highlight_pre_tag: bool,
|
||||
highlight_post_tag: bool,
|
||||
crop_marker: bool,
|
||||
matches: bool,
|
||||
show_matches_position: bool,
|
||||
crop_length: bool,
|
||||
}
|
||||
|
||||
@@ -415,11 +426,11 @@ impl SearchAggregator {
|
||||
ret.max_limit = query.limit;
|
||||
ret.max_offset = query.offset.unwrap_or_default();
|
||||
|
||||
ret.highlight_pre_tag = query.highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG;
|
||||
ret.highlight_post_tag = query.highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG;
|
||||
ret.crop_marker = query.crop_marker != DEFAULT_CROP_MARKER;
|
||||
ret.crop_length = query.crop_length != DEFAULT_CROP_LENGTH;
|
||||
ret.matches = query.matches;
|
||||
ret.highlight_pre_tag = query.highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG();
|
||||
ret.highlight_post_tag = query.highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG();
|
||||
ret.crop_marker = query.crop_marker != DEFAULT_CROP_MARKER();
|
||||
ret.crop_length = query.crop_length != DEFAULT_CROP_LENGTH();
|
||||
ret.show_matches_position = query.show_matches_position;
|
||||
|
||||
ret
|
||||
}
|
||||
@@ -472,7 +483,7 @@ impl SearchAggregator {
|
||||
self.highlight_pre_tag |= other.highlight_pre_tag;
|
||||
self.highlight_post_tag |= other.highlight_post_tag;
|
||||
self.crop_marker |= other.crop_marker;
|
||||
self.matches |= other.matches;
|
||||
self.show_matches_position |= other.show_matches_position;
|
||||
self.crop_length |= other.crop_length;
|
||||
}
|
||||
|
||||
@@ -484,7 +495,7 @@ impl SearchAggregator {
|
||||
let percentile_99th = 0.99 * (self.total_succeeded as f64 - 1.) + 1.;
|
||||
// we get all the values in a sorted manner
|
||||
let time_spent = self.time_spent.into_sorted_vec();
|
||||
// We are only intersted by the slowest value of the 99th fastest results
|
||||
// We are only interested by the slowest value of the 99th fastest results
|
||||
let time_spent = time_spent.get(percentile_99th as usize);
|
||||
|
||||
let properties = json!({
|
||||
@@ -515,7 +526,7 @@ impl SearchAggregator {
|
||||
"highlight_pre_tag": self.highlight_pre_tag,
|
||||
"highlight_post_tag": self.highlight_post_tag,
|
||||
"crop_marker": self.crop_marker,
|
||||
"matches": self.matches,
|
||||
"show_matches_position": self.show_matches_position,
|
||||
"crop_length": self.crop_length,
|
||||
},
|
||||
});
|
||||
@@ -563,8 +574,8 @@ impl DocumentsAggregator {
|
||||
let content_type = request
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.map(|s| s.to_str().unwrap_or("unkown"))
|
||||
.unwrap_or("unkown")
|
||||
.map(|s| s.to_str().unwrap_or("unknown"))
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
ret.content_types.insert(content_type);
|
||||
ret.index_creation = index_creation;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use actix_web as aweb;
|
||||
use aweb::error::{JsonPayloadError, QueryPayloadError};
|
||||
use meilisearch_error::{Code, ErrorCode, ResponseError};
|
||||
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum MeilisearchHttpError {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum AuthenticationError {
|
||||
|
||||
@@ -5,12 +5,11 @@ use std::ops::Deref;
|
||||
use std::pin::Pin;
|
||||
|
||||
use actix_web::FromRequest;
|
||||
use error::AuthenticationError;
|
||||
use futures::future::err;
|
||||
use futures::Future;
|
||||
use meilisearch_error::{Code, ResponseError};
|
||||
|
||||
use error::AuthenticationError;
|
||||
use meilisearch_auth::{AuthController, AuthFilter};
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
|
||||
pub struct GuardedData<P, D> {
|
||||
data: D,
|
||||
@@ -132,6 +131,7 @@ pub mod policies {
|
||||
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::extractors::authentication::Policy;
|
||||
use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules};
|
||||
@@ -146,34 +146,21 @@ pub mod policies {
|
||||
validation
|
||||
}
|
||||
|
||||
/// Extracts the key prefix used to sign the payload from the payload, without performing any validation.
|
||||
fn extract_key_prefix(token: &str) -> Option<String> {
|
||||
/// Extracts the key id used to sign the payload, without performing any validation.
|
||||
fn extract_key_id(token: &str) -> Option<Uuid> {
|
||||
let mut validation = tenant_token_validation();
|
||||
validation.insecure_disable_signature_validation();
|
||||
let dummy_key = DecodingKey::from_secret(b"secret");
|
||||
let token_data = decode::<Claims>(token, &dummy_key, &validation).ok()?;
|
||||
|
||||
// get token fields without validating it.
|
||||
let Claims { api_key_prefix, .. } = token_data.claims;
|
||||
Some(api_key_prefix)
|
||||
let Claims { api_key_uid, .. } = token_data.claims;
|
||||
Some(api_key_uid)
|
||||
}
|
||||
|
||||
pub struct MasterPolicy;
|
||||
|
||||
impl Policy for MasterPolicy {
|
||||
fn authenticate(
|
||||
auth: AuthController,
|
||||
token: &str,
|
||||
_index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
if let Some(master_key) = auth.get_master_key() {
|
||||
if master_key == token {
|
||||
return Some(AuthFilter::default());
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
fn is_keys_action(action: u8) -> bool {
|
||||
use actions::*;
|
||||
matches!(action, KEYS_GET | KEYS_CREATE | KEYS_UPDATE | KEYS_DELETE)
|
||||
}
|
||||
|
||||
pub struct ActionPolicy<const A: u8>;
|
||||
@@ -185,7 +172,12 @@ pub mod policies {
|
||||
index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
// authenticate if token is the master key.
|
||||
if auth.get_master_key().map_or(true, |mk| mk == token) {
|
||||
// master key can only have access to keys routes.
|
||||
// if master key is None only keys routes are inaccessible.
|
||||
if auth
|
||||
.get_master_key()
|
||||
.map_or_else(|| !is_keys_action(A), |mk| mk == token)
|
||||
{
|
||||
return Some(AuthFilter::default());
|
||||
}
|
||||
|
||||
@@ -195,8 +187,10 @@ pub mod policies {
|
||||
return Some(filters);
|
||||
} else if let Some(action) = Action::from_repr(A) {
|
||||
// API key
|
||||
if let Ok(true) = auth.authenticate(token.as_bytes(), action, index) {
|
||||
return auth.get_key_filters(token, None).ok();
|
||||
if let Ok(Some(uid)) = auth.get_optional_uid_from_encoded_key(token.as_bytes()) {
|
||||
if let Ok(true) = auth.is_key_authorized(uid, action, index) {
|
||||
return auth.get_key_filters(uid, None).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -215,14 +209,11 @@ pub mod policies {
|
||||
return None;
|
||||
}
|
||||
|
||||
let api_key_prefix = extract_key_prefix(token)?;
|
||||
let uid = extract_key_id(token)?;
|
||||
// check if parent key is authorized to do the action.
|
||||
if auth
|
||||
.is_key_authorized(api_key_prefix.as_bytes(), Action::Search, index)
|
||||
.ok()?
|
||||
{
|
||||
if auth.is_key_authorized(uid, Action::Search, index).ok()? {
|
||||
// Check if tenant token is valid.
|
||||
let key = auth.generate_key(&api_key_prefix)?;
|
||||
let key = auth.generate_key(uid)?;
|
||||
let data = decode::<Claims>(
|
||||
token,
|
||||
&DecodingKey::from_secret(key.as_bytes()),
|
||||
@@ -245,7 +236,7 @@ pub mod policies {
|
||||
}
|
||||
|
||||
return auth
|
||||
.get_key_filters(api_key_prefix, Some(data.claims.search_rules))
|
||||
.get_key_filters(uid, Some(data.claims.search_rules))
|
||||
.ok();
|
||||
}
|
||||
|
||||
@@ -258,6 +249,6 @@ pub mod policies {
|
||||
struct Claims {
|
||||
search_rules: SearchRules,
|
||||
exp: Option<i64>,
|
||||
api_key_prefix: String,
|
||||
api_key_uid: Uuid,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
#[macro_use]
|
||||
pub mod error;
|
||||
pub mod analytics;
|
||||
mod task;
|
||||
pub mod task;
|
||||
#[macro_use]
|
||||
pub mod extractors;
|
||||
pub mod helpers;
|
||||
@@ -148,10 +148,10 @@ macro_rules! create_app {
|
||||
use actix_web::middleware::TrailingSlash;
|
||||
use actix_web::App;
|
||||
use actix_web::{middleware, web};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_http::error::MeilisearchHttpError;
|
||||
use meilisearch_http::routes;
|
||||
use meilisearch_http::{configure_data, dashboard};
|
||||
use meilisearch_types::error::ResponseError;
|
||||
|
||||
App::new()
|
||||
.configure(|s| configure_data(s, $data.clone(), $auth.clone(), &$opt, $analytics))
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
|
||||
use actix_web::http::KeepAlive;
|
||||
use actix_web::HttpServer;
|
||||
use clap::Parser;
|
||||
use meilisearch_auth::AuthController;
|
||||
@@ -83,7 +84,8 @@ async fn run_http(
|
||||
)
|
||||
})
|
||||
// Disable signals allows the server to terminate immediately when a user enter CTRL-C
|
||||
.disable_signals();
|
||||
.disable_signals()
|
||||
.keep_alive(KeepAlive::Os);
|
||||
|
||||
if let Some(config) = opt.get_ssl_config()? {
|
||||
http_server
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
use std::str;
|
||||
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
|
||||
use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key};
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
|
||||
use crate::extractors::{
|
||||
authentication::{policies::*, GuardedData},
|
||||
sequential_extractor::SeqHandler,
|
||||
};
|
||||
use meilisearch_error::{Code, ResponseError};
|
||||
use crate::routes::Pagination;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
@@ -20,7 +22,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.route(web::get().to(SeqHandler(list_api_keys))),
|
||||
)
|
||||
.service(
|
||||
web::resource("/{api_key}")
|
||||
web::resource("/{key}")
|
||||
.route(web::get().to(SeqHandler(get_api_key)))
|
||||
.route(web::patch().to(SeqHandler(patch_api_key)))
|
||||
.route(web::delete().to(SeqHandler(delete_api_key))),
|
||||
@@ -28,7 +30,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
}
|
||||
|
||||
pub async fn create_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>,
|
||||
body: web::Json<Value>,
|
||||
_req: HttpRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
@@ -44,30 +46,35 @@ pub async fn create_api_key(
|
||||
}
|
||||
|
||||
pub async fn list_api_keys(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
_req: HttpRequest,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
|
||||
paginate: web::Query<Pagination>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let keys = auth_controller.list_keys()?;
|
||||
let res: Vec<_> = keys
|
||||
.into_iter()
|
||||
.map(|k| KeyView::from_key(k, &auth_controller))
|
||||
.collect();
|
||||
Ok(res)
|
||||
let page_view = paginate.auto_paginate_sized(
|
||||
keys.into_iter()
|
||||
.map(|k| KeyView::from_key(k, &auth_controller)),
|
||||
);
|
||||
|
||||
Ok(page_view)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::Ok().json(KeyListView::from(res)))
|
||||
Ok(HttpResponse::Ok().json(page_view))
|
||||
}
|
||||
|
||||
pub async fn get_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let api_key = path.into_inner().api_key;
|
||||
let key = path.into_inner().key;
|
||||
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let key = auth_controller.get_key(&api_key)?;
|
||||
let uid =
|
||||
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
|
||||
let key = auth_controller.get_key(uid)?;
|
||||
|
||||
Ok(KeyView::from_key(key, &auth_controller))
|
||||
})
|
||||
.await
|
||||
@@ -77,14 +84,17 @@ pub async fn get_api_key(
|
||||
}
|
||||
|
||||
pub async fn patch_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>,
|
||||
body: web::Json<Value>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let api_key = path.into_inner().api_key;
|
||||
let key = path.into_inner().key;
|
||||
let body = body.into_inner();
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let key = auth_controller.update_key(&api_key, body)?;
|
||||
let uid =
|
||||
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
|
||||
let key = auth_controller.update_key(uid, body)?;
|
||||
|
||||
Ok(KeyView::from_key(key, &auth_controller))
|
||||
})
|
||||
.await
|
||||
@@ -94,27 +104,33 @@ pub async fn patch_api_key(
|
||||
}
|
||||
|
||||
pub async fn delete_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_DELETE }>, AuthController>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let api_key = path.into_inner().api_key;
|
||||
tokio::task::spawn_blocking(move || auth_controller.delete_key(&api_key))
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
let key = path.into_inner().key;
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let uid =
|
||||
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
|
||||
auth_controller.delete_key(uid)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AuthParam {
|
||||
api_key: String,
|
||||
key: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct KeyView {
|
||||
name: Option<String>,
|
||||
description: Option<String>,
|
||||
key: String,
|
||||
uid: Uuid,
|
||||
actions: Vec<Action>,
|
||||
indexes: Vec<String>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||
@@ -127,28 +143,18 @@ struct KeyView {
|
||||
|
||||
impl KeyView {
|
||||
fn from_key(key: Key, auth: &AuthController) -> Self {
|
||||
let key_id = str::from_utf8(&key.id).unwrap();
|
||||
let generated_key = auth.generate_key(key_id).unwrap_or_default();
|
||||
let generated_key = auth.generate_key(key.uid).unwrap_or_default();
|
||||
|
||||
KeyView {
|
||||
name: key.name,
|
||||
description: key.description,
|
||||
key: generated_key,
|
||||
uid: key.uid,
|
||||
actions: key.actions,
|
||||
indexes: key.indexes,
|
||||
indexes: key.indexes.into_iter().map(String::from).collect(),
|
||||
expires_at: key.expires_at,
|
||||
created_at: key.created_at,
|
||||
updated_at: key.updated_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct KeyListView {
|
||||
results: Vec<KeyView>,
|
||||
}
|
||||
|
||||
impl From<Vec<KeyView>> for KeyListView {
|
||||
fn from(results: Vec<KeyView>) -> Self {
|
||||
Self { results }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,16 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))))
|
||||
.service(
|
||||
web::resource("/{dump_uid}/status").route(web::get().to(SeqHandler(get_dump_status))),
|
||||
);
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))));
|
||||
}
|
||||
|
||||
pub async fn create_dump(
|
||||
@@ -23,29 +20,8 @@ pub async fn create_dump(
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish("Dump Created".to_string(), json!({}), Some(&req));
|
||||
|
||||
let res = meilisearch.create_dump().await?;
|
||||
let res: SummarizedTaskView = meilisearch.register_dump_task().await?.into();
|
||||
|
||||
debug!("returns: {:?}", res);
|
||||
Ok(HttpResponse::Accepted().json(res))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct DumpStatusResponse {
|
||||
status: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DumpParam {
|
||||
dump_uid: String,
|
||||
}
|
||||
|
||||
async fn get_dump_status(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DUMPS_GET }>, MeiliSearch>,
|
||||
path: web::Path<DumpParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let res = meilisearch.dump_info(path.dump_uid.clone()).await?;
|
||||
|
||||
debug!("returns: {:?}", res);
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
}
|
||||
|
||||
@@ -6,13 +6,15 @@ use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use bstr::ByteSlice;
|
||||
use futures::{Stream, StreamExt};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index_controller::{DocumentAdditionFormat, Update};
|
||||
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use mime::Mime;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::Deserialize;
|
||||
use serde_cs::vec::CS;
|
||||
use serde_json::Value;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
@@ -21,11 +23,9 @@ use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::payload::Payload;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::{fold_star_or, PaginationView};
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
const DEFAULT_RETRIEVE_DOCUMENTS_OFFSET: usize = 0;
|
||||
const DEFAULT_RETRIEVE_DOCUMENTS_LIMIT: usize = 20;
|
||||
|
||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||
vec![
|
||||
"application/json".to_string(),
|
||||
@@ -86,14 +86,24 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct GetDocument {
|
||||
fields: Option<CS<StarOr<String>>>,
|
||||
}
|
||||
|
||||
pub async fn get_document(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, MeiliSearch>,
|
||||
path: web::Path<DocumentParam>,
|
||||
params: web::Query<GetDocument>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = path.index_uid.clone();
|
||||
let id = path.document_id.clone();
|
||||
let GetDocument { fields } = params.into_inner();
|
||||
let attributes_to_retrieve = fields.and_then(fold_star_or);
|
||||
|
||||
let document = meilisearch
|
||||
.document(index, id, None as Option<Vec<String>>)
|
||||
.document(index, id, attributes_to_retrieve)
|
||||
.await?;
|
||||
debug!("returns: {:?}", document);
|
||||
Ok(HttpResponse::Ok().json(document))
|
||||
@@ -116,9 +126,11 @@ pub async fn delete_document(
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct BrowseQuery {
|
||||
offset: Option<usize>,
|
||||
limit: Option<usize>,
|
||||
attributes_to_retrieve: Option<String>,
|
||||
#[serde(default)]
|
||||
offset: usize,
|
||||
#[serde(default = "crate::routes::PAGINATION_DEFAULT_LIMIT")]
|
||||
limit: usize,
|
||||
fields: Option<CS<StarOr<String>>>,
|
||||
}
|
||||
|
||||
pub async fn get_all_documents(
|
||||
@@ -127,27 +139,21 @@ pub async fn get_all_documents(
|
||||
params: web::Query<BrowseQuery>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!("called with params: {:?}", params);
|
||||
let attributes_to_retrieve = params.attributes_to_retrieve.as_ref().and_then(|attrs| {
|
||||
let mut names = Vec::new();
|
||||
for name in attrs.split(',').map(String::from) {
|
||||
if name == "*" {
|
||||
return None;
|
||||
}
|
||||
names.push(name);
|
||||
}
|
||||
Some(names)
|
||||
});
|
||||
let BrowseQuery {
|
||||
limit,
|
||||
offset,
|
||||
fields,
|
||||
} = params.into_inner();
|
||||
let attributes_to_retrieve = fields.and_then(fold_star_or);
|
||||
|
||||
let documents = meilisearch
|
||||
.documents(
|
||||
path.into_inner(),
|
||||
params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET),
|
||||
params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT),
|
||||
attributes_to_retrieve,
|
||||
)
|
||||
let (total, documents) = meilisearch
|
||||
.documents(path.into_inner(), offset, limit, attributes_to_retrieve)
|
||||
.await?;
|
||||
debug!("returns: {:?}", documents);
|
||||
Ok(HttpResponse::Ok().json(documents))
|
||||
|
||||
let ret = PaginationView::new(offset, limit, total as usize, documents);
|
||||
|
||||
debug!("returns: {:?}", ret);
|
||||
Ok(HttpResponse::Ok().json(ret))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index_controller::Update;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use time::OffsetDateTime;
|
||||
@@ -12,10 +12,11 @@ use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
use super::Pagination;
|
||||
|
||||
pub mod documents;
|
||||
pub mod search;
|
||||
pub mod settings;
|
||||
pub mod tasks;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
@@ -28,30 +29,32 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(SeqHandler(get_index)))
|
||||
.route(web::put().to(SeqHandler(update_index)))
|
||||
.route(web::patch().to(SeqHandler(update_index)))
|
||||
.route(web::delete().to(SeqHandler(delete_index))),
|
||||
)
|
||||
.service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats))))
|
||||
.service(web::scope("/documents").configure(documents::configure))
|
||||
.service(web::scope("/search").configure(search::configure))
|
||||
.service(web::scope("/tasks").configure(tasks::configure))
|
||||
.service(web::scope("/settings").configure(settings::configure)),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn list_indexes(
|
||||
data: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, MeiliSearch>,
|
||||
paginate: web::Query<Pagination>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let search_rules = &data.filters().search_rules;
|
||||
let indexes: Vec<_> = data
|
||||
.list_indexes()
|
||||
.await?
|
||||
let indexes: Vec<_> = data.list_indexes().await?;
|
||||
let nb_indexes = indexes.len();
|
||||
let iter = indexes
|
||||
.into_iter()
|
||||
.filter(|i| search_rules.is_index_authorized(&i.uid))
|
||||
.collect();
|
||||
.filter(|i| search_rules.is_index_authorized(&i.uid));
|
||||
let ret = paginate
|
||||
.into_inner()
|
||||
.auto_paginate_unsized(nb_indexes, iter);
|
||||
|
||||
debug!("returns: {:?}", indexes);
|
||||
Ok(HttpResponse::Ok().json(indexes))
|
||||
debug!("returns: {:?}", ret);
|
||||
Ok(HttpResponse::Ok().json(ret))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
|
||||
@@ -1,18 +1,20 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{
|
||||
default_crop_length, default_crop_marker, default_highlight_post_tag,
|
||||
default_highlight_pre_tag, SearchQuery, DEFAULT_SEARCH_LIMIT,
|
||||
SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG,
|
||||
DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||
};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::Deserialize;
|
||||
use serde_cs::vec::CS;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::analytics::{Analytics, SearchAggregator};
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::{fold_star_or, StarOr};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
@@ -28,42 +30,26 @@ pub struct SearchQueryGet {
|
||||
q: Option<String>,
|
||||
offset: Option<usize>,
|
||||
limit: Option<usize>,
|
||||
attributes_to_retrieve: Option<String>,
|
||||
attributes_to_crop: Option<String>,
|
||||
#[serde(default = "default_crop_length")]
|
||||
attributes_to_retrieve: Option<CS<StarOr<String>>>,
|
||||
attributes_to_crop: Option<CS<StarOr<String>>>,
|
||||
#[serde(default = "DEFAULT_CROP_LENGTH")]
|
||||
crop_length: usize,
|
||||
attributes_to_highlight: Option<String>,
|
||||
attributes_to_highlight: Option<CS<StarOr<String>>>,
|
||||
filter: Option<String>,
|
||||
sort: Option<String>,
|
||||
#[serde(default = "Default::default")]
|
||||
matches: bool,
|
||||
facets_distribution: Option<String>,
|
||||
#[serde(default = "default_highlight_pre_tag")]
|
||||
show_matches_position: bool,
|
||||
facets: Option<CS<StarOr<String>>>,
|
||||
#[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")]
|
||||
highlight_pre_tag: String,
|
||||
#[serde(default = "default_highlight_post_tag")]
|
||||
#[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")]
|
||||
highlight_post_tag: String,
|
||||
#[serde(default = "default_crop_marker")]
|
||||
#[serde(default = "DEFAULT_CROP_MARKER")]
|
||||
crop_marker: String,
|
||||
}
|
||||
|
||||
impl From<SearchQueryGet> for SearchQuery {
|
||||
fn from(other: SearchQueryGet) -> Self {
|
||||
let attributes_to_retrieve = other
|
||||
.attributes_to_retrieve
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect());
|
||||
|
||||
let attributes_to_crop = other
|
||||
.attributes_to_crop
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect());
|
||||
|
||||
let attributes_to_highlight = other
|
||||
.attributes_to_highlight
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect());
|
||||
|
||||
let facets_distribution = other
|
||||
.facets_distribution
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect());
|
||||
|
||||
let filter = match other.filter {
|
||||
Some(f) => match serde_json::from_str(&f) {
|
||||
Ok(v) => Some(v),
|
||||
@@ -72,20 +58,18 @@ impl From<SearchQueryGet> for SearchQuery {
|
||||
None => None,
|
||||
};
|
||||
|
||||
let sort = other.sort.map(|attr| fix_sort_query_parameters(&attr));
|
||||
|
||||
Self {
|
||||
q: other.q,
|
||||
offset: other.offset,
|
||||
limit: other.limit.unwrap_or(DEFAULT_SEARCH_LIMIT),
|
||||
attributes_to_retrieve,
|
||||
attributes_to_crop,
|
||||
limit: other.limit.unwrap_or_else(DEFAULT_SEARCH_LIMIT),
|
||||
attributes_to_retrieve: other.attributes_to_retrieve.and_then(fold_star_or),
|
||||
attributes_to_crop: other.attributes_to_crop.and_then(fold_star_or),
|
||||
crop_length: other.crop_length,
|
||||
attributes_to_highlight,
|
||||
attributes_to_highlight: other.attributes_to_highlight.and_then(fold_star_or),
|
||||
filter,
|
||||
sort,
|
||||
matches: other.matches,
|
||||
facets_distribution,
|
||||
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
|
||||
show_matches_position: other.show_matches_position,
|
||||
facets: other.facets.and_then(fold_star_or),
|
||||
highlight_pre_tag: other.highlight_pre_tag,
|
||||
highlight_post_tag: other.highlight_post_tag,
|
||||
crop_marker: other.crop_marker,
|
||||
@@ -169,10 +153,6 @@ pub async fn search_with_url_query(
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
// Tests that the nb_hits is always set to false
|
||||
#[cfg(test)]
|
||||
assert!(!search_result.exhaustive_nb_hits);
|
||||
|
||||
debug!("returns: {:?}", search_result);
|
||||
Ok(HttpResponse::Ok().json(search_result))
|
||||
}
|
||||
@@ -207,10 +187,6 @@ pub async fn search_with_post(
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
// Tests that the nb_hits is always set to false
|
||||
#[cfg(test)]
|
||||
assert!(!search_result.exhaustive_nb_hits);
|
||||
|
||||
debug!("returns: {:?}", search_result);
|
||||
Ok(HttpResponse::Ok().json(search_result))
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use log::debug;
|
||||
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::index_controller::Update;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
@@ -13,7 +13,7 @@ use crate::task::SummarizedTaskView;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! make_setting_route {
|
||||
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
|
||||
($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
|
||||
pub mod $attr {
|
||||
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
||||
use log::debug;
|
||||
@@ -21,7 +21,7 @@ macro_rules! make_setting_route {
|
||||
use meilisearch_lib::milli::update::Setting;
|
||||
use meilisearch_lib::{index::Settings, index_controller::Update, MeiliSearch};
|
||||
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use $crate::analytics::Analytics;
|
||||
use $crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use $crate::extractors::sequential_extractor::SeqHandler;
|
||||
@@ -100,18 +100,27 @@ macro_rules! make_setting_route {
|
||||
pub fn resources() -> Resource {
|
||||
Resource::new($route)
|
||||
.route(web::get().to(SeqHandler(get)))
|
||||
.route(web::post().to(SeqHandler(update)))
|
||||
.route(web::$update_verb().to(SeqHandler(update)))
|
||||
.route(web::delete().to(SeqHandler(delete)))
|
||||
}
|
||||
}
|
||||
};
|
||||
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal) => {
|
||||
make_setting_route!($route, $type, $attr, $camelcase_attr, _analytics, |_, _| {});
|
||||
($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal) => {
|
||||
make_setting_route!(
|
||||
$route,
|
||||
$update_verb,
|
||||
$type,
|
||||
$attr,
|
||||
$camelcase_attr,
|
||||
_analytics,
|
||||
|_, _| {}
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
make_setting_route!(
|
||||
"/filterable-attributes",
|
||||
put,
|
||||
std::collections::BTreeSet<String>,
|
||||
filterable_attributes,
|
||||
"filterableAttributes",
|
||||
@@ -134,6 +143,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/sortable-attributes",
|
||||
put,
|
||||
std::collections::BTreeSet<String>,
|
||||
sortable_attributes,
|
||||
"sortableAttributes",
|
||||
@@ -156,6 +166,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/displayed-attributes",
|
||||
put,
|
||||
Vec<String>,
|
||||
displayed_attributes,
|
||||
"displayedAttributes"
|
||||
@@ -163,6 +174,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/typo-tolerance",
|
||||
patch,
|
||||
meilisearch_lib::index::updates::TypoSettings,
|
||||
typo_tolerance,
|
||||
"typoTolerance",
|
||||
@@ -204,6 +216,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/searchable-attributes",
|
||||
put,
|
||||
Vec<String>,
|
||||
searchable_attributes,
|
||||
"searchableAttributes",
|
||||
@@ -225,6 +238,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/stop-words",
|
||||
put,
|
||||
std::collections::BTreeSet<String>,
|
||||
stop_words,
|
||||
"stopWords"
|
||||
@@ -232,6 +246,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/synonyms",
|
||||
put,
|
||||
std::collections::BTreeMap<String, Vec<String>>,
|
||||
synonyms,
|
||||
"synonyms"
|
||||
@@ -239,6 +254,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/distinct-attribute",
|
||||
put,
|
||||
String,
|
||||
distinct_attribute,
|
||||
"distinctAttribute"
|
||||
@@ -246,6 +262,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/ranking-rules",
|
||||
put,
|
||||
Vec<String>,
|
||||
ranking_rules,
|
||||
"rankingRules",
|
||||
@@ -265,13 +282,57 @@ make_setting_route!(
|
||||
}
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/faceting",
|
||||
patch,
|
||||
meilisearch_lib::index::updates::FacetingSettings,
|
||||
faceting,
|
||||
"faceting",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_lib::index::updates::FacetingSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"Faceting Updated".to_string(),
|
||||
json!({
|
||||
"faceting": {
|
||||
"max_values_per_facet": setting.as_ref().and_then(|s| s.max_values_per_facet.set()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/pagination",
|
||||
patch,
|
||||
meilisearch_lib::index::updates::PaginationSettings,
|
||||
pagination,
|
||||
"pagination",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_lib::index::updates::PaginationSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"Pagination Updated".to_string(),
|
||||
json!({
|
||||
"pagination": {
|
||||
"limited_to": setting.as_ref().and_then(|s| s.limited_to.set()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
macro_rules! generate_configure {
|
||||
($($mod:ident),*) => {
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::post().to(SeqHandler(update_all)))
|
||||
.route(web::patch().to(SeqHandler(update_all)))
|
||||
.route(web::get().to(SeqHandler(get_all)))
|
||||
.route(web::delete().to(SeqHandler(delete_all))))
|
||||
$(.service($mod::resources()))*;
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::{TaskListView, TaskView};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_all_tasks_status))))
|
||||
.service(web::resource("{task_id}").route(web::get().to(SeqHandler(get_task_status))));
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UpdateIndexResponse {
|
||||
name: String,
|
||||
uid: String,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
created_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
updated_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
primary_key: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateParam {
|
||||
index_uid: String,
|
||||
task_id: u64,
|
||||
}
|
||||
|
||||
pub async fn get_task_status(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
index_uid: web::Path<UpdateParam>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish(
|
||||
"Index Tasks Seen".to_string(),
|
||||
json!({ "per_task_uid": true }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let UpdateParam { index_uid, task_id } = index_uid.into_inner();
|
||||
|
||||
let task: TaskView = meilisearch.get_index_task(index_uid, task_id).await?.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Ok().json(task))
|
||||
}
|
||||
|
||||
pub async fn get_all_tasks_status(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
index_uid: web::Path<String>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish(
|
||||
"Index Tasks Seen".to_string(),
|
||||
json!({ "per_task_uid": false }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let tasks: TaskListView = meilisearch
|
||||
.list_index_task(index_uid.into_inner(), None, None)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(TaskView::from)
|
||||
.collect::<Vec<_>>()
|
||||
.into();
|
||||
|
||||
debug!("returns: {:?}", tasks);
|
||||
Ok(HttpResponse::Ok().json(tasks))
|
||||
}
|
||||
@@ -1,11 +1,13 @@
|
||||
use actix_web::{web, HttpResponse};
|
||||
use log::debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
|
||||
@@ -24,6 +26,101 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::scope("/indexes").configure(indexes::configure));
|
||||
}
|
||||
|
||||
/// Extracts the raw values from the `StarOr` types and
|
||||
/// return None if a `StarOr::Star` is encountered.
|
||||
pub fn fold_star_or<T, O>(content: impl IntoIterator<Item = StarOr<T>>) -> Option<O>
|
||||
where
|
||||
O: FromIterator<T>,
|
||||
{
|
||||
content
|
||||
.into_iter()
|
||||
.map(|value| match value {
|
||||
StarOr::Star => None,
|
||||
StarOr::Other(val) => Some(val),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct Pagination {
|
||||
#[serde(default)]
|
||||
pub offset: usize,
|
||||
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
|
||||
pub limit: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct PaginationView<T> {
|
||||
pub results: Vec<T>,
|
||||
pub offset: usize,
|
||||
pub limit: usize,
|
||||
pub total: usize,
|
||||
}
|
||||
|
||||
impl Pagination {
|
||||
/// Given the full data to paginate, returns the selected section.
|
||||
pub fn auto_paginate_sized<T>(
|
||||
self,
|
||||
content: impl IntoIterator<Item = T> + ExactSizeIterator,
|
||||
) -> PaginationView<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let total = content.len();
|
||||
let content: Vec<_> = content
|
||||
.into_iter()
|
||||
.skip(self.offset)
|
||||
.take(self.limit)
|
||||
.collect();
|
||||
self.format_with(total, content)
|
||||
}
|
||||
|
||||
/// Given an iterator and the total number of elements, returns the selected section.
|
||||
pub fn auto_paginate_unsized<T>(
|
||||
self,
|
||||
total: usize,
|
||||
content: impl IntoIterator<Item = T>,
|
||||
) -> PaginationView<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let content: Vec<_> = content
|
||||
.into_iter()
|
||||
.skip(self.offset)
|
||||
.take(self.limit)
|
||||
.collect();
|
||||
self.format_with(total, content)
|
||||
}
|
||||
|
||||
/// Given the data already paginated + the total number of elements, it stores
|
||||
/// everything in a [PaginationResult].
|
||||
pub fn format_with<T>(self, total: usize, results: Vec<T>) -> PaginationView<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
PaginationView {
|
||||
results,
|
||||
offset: self.offset,
|
||||
limit: self.limit,
|
||||
total,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PaginationView<T> {
|
||||
pub fn new(offset: usize, limit: usize, total: usize, results: Vec<T>) -> Self {
|
||||
Self {
|
||||
offset,
|
||||
limit,
|
||||
results,
|
||||
total,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[serde(tag = "name")]
|
||||
|
||||
@@ -1,22 +1,67 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::tasks::task::TaskId;
|
||||
use meilisearch_lib::tasks::task::{TaskContent, TaskEvent, TaskId};
|
||||
use meilisearch_lib::tasks::TaskFilter;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use serde::Deserialize;
|
||||
use serde_cs::vec::CS;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::{TaskListView, TaskView};
|
||||
use crate::task::{TaskListView, TaskStatus, TaskType, TaskView};
|
||||
|
||||
use super::fold_star_or;
|
||||
|
||||
const DEFAULT_LIMIT: fn() -> usize = || 20;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks))))
|
||||
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct TaskFilterQuery {
|
||||
#[serde(rename = "type")]
|
||||
type_: Option<CS<StarOr<TaskType>>>,
|
||||
status: Option<CS<StarOr<TaskStatus>>>,
|
||||
index_uid: Option<CS<StarOr<IndexUid>>>,
|
||||
#[serde(default = "DEFAULT_LIMIT")]
|
||||
limit: usize,
|
||||
from: Option<TaskId>,
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn task_type_matches_content(type_: &TaskType, content: &TaskContent) -> bool {
|
||||
matches!((type_, content),
|
||||
(TaskType::IndexCreation, TaskContent::IndexCreation { .. })
|
||||
| (TaskType::IndexUpdate, TaskContent::IndexUpdate { .. })
|
||||
| (TaskType::IndexDeletion, TaskContent::IndexDeletion { .. })
|
||||
| (TaskType::DocumentAdditionOrUpdate, TaskContent::DocumentAddition { .. })
|
||||
| (TaskType::DocumentDeletion, TaskContent::DocumentDeletion{ .. })
|
||||
| (TaskType::SettingsUpdate, TaskContent::SettingsUpdate { .. })
|
||||
)
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn task_status_matches_events(status: &TaskStatus, events: &[TaskEvent]) -> bool {
|
||||
events.last().map_or(false, |event| {
|
||||
matches!((status, event),
|
||||
(TaskStatus::Enqueued, TaskEvent::Created(_))
|
||||
| (TaskStatus::Processing, TaskEvent::Processing(_) | TaskEvent::Batched { .. })
|
||||
| (TaskStatus::Succeeded, TaskEvent::Succeeded { .. })
|
||||
| (TaskStatus::Failed, TaskEvent::Failed { .. }),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_tasks(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
params: web::Query<TaskFilterQuery>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
@@ -26,24 +71,98 @@ async fn get_tasks(
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let TaskFilterQuery {
|
||||
type_,
|
||||
status,
|
||||
index_uid,
|
||||
limit,
|
||||
from,
|
||||
} = params.into_inner();
|
||||
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
let filters = if search_rules.is_index_authorized("*") {
|
||||
None
|
||||
} else {
|
||||
let mut filters = TaskFilter::default();
|
||||
for (index, _policy) in search_rules.clone() {
|
||||
filters.filter_index(index);
|
||||
|
||||
// We first transform a potential indexUid=* into a "not specified indexUid filter"
|
||||
// for every one of the filters: type, status, and indexUid.
|
||||
let type_: Option<Vec<_>> = type_.and_then(fold_star_or);
|
||||
let status: Option<Vec<_>> = status.and_then(fold_star_or);
|
||||
let index_uid: Option<Vec<_>> = index_uid.and_then(fold_star_or);
|
||||
|
||||
// Then we filter on potential indexes and make sure that the search filter
|
||||
// restrictions are also applied.
|
||||
let indexes_filters = match index_uid {
|
||||
Some(indexes) => {
|
||||
let mut filters = TaskFilter::default();
|
||||
for name in indexes {
|
||||
if search_rules.is_index_authorized(&name) {
|
||||
filters.filter_index(name.to_string());
|
||||
}
|
||||
}
|
||||
Some(filters)
|
||||
}
|
||||
None => {
|
||||
if search_rules.is_index_authorized("*") {
|
||||
None
|
||||
} else {
|
||||
let mut filters = TaskFilter::default();
|
||||
for (index, _policy) in search_rules.clone() {
|
||||
filters.filter_index(index);
|
||||
}
|
||||
Some(filters)
|
||||
}
|
||||
}
|
||||
Some(filters)
|
||||
};
|
||||
|
||||
let tasks: TaskListView = meilisearch
|
||||
.list_tasks(filters, None, None)
|
||||
// Then we complete the task filter with other potential status and types filters.
|
||||
let filters = if type_.is_some() || status.is_some() {
|
||||
let mut filters = indexes_filters.unwrap_or_default();
|
||||
filters.filter_fn(move |task| {
|
||||
let matches_type = match &type_ {
|
||||
Some(types) => types
|
||||
.iter()
|
||||
.any(|t| task_type_matches_content(t, &task.content)),
|
||||
None => true,
|
||||
};
|
||||
|
||||
let matches_status = match &status {
|
||||
Some(statuses) => statuses
|
||||
.iter()
|
||||
.any(|t| task_status_matches_events(t, &task.events)),
|
||||
None => true,
|
||||
};
|
||||
|
||||
matches_type && matches_status
|
||||
});
|
||||
Some(filters)
|
||||
} else {
|
||||
indexes_filters
|
||||
};
|
||||
|
||||
// We +1 just to know if there is more after this "page" or not.
|
||||
let limit = limit.saturating_add(1);
|
||||
|
||||
let mut tasks_results: Vec<_> = meilisearch
|
||||
.list_tasks(filters, Some(limit), from)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(TaskView::from)
|
||||
.collect::<Vec<_>>()
|
||||
.into();
|
||||
.collect();
|
||||
|
||||
// If we were able to fetch the number +1 tasks we asked
|
||||
// it means that there is more to come.
|
||||
let next = if tasks_results.len() == limit {
|
||||
tasks_results.pop().map(|t| t.uid)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let from = tasks_results.first().map(|t| t.uid);
|
||||
|
||||
let tasks = TaskListView {
|
||||
results: tasks_results,
|
||||
limit: limit.saturating_sub(1),
|
||||
from,
|
||||
next,
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok().json(tasks))
|
||||
}
|
||||
|
||||
@@ -1,62 +1,137 @@
|
||||
use std::fmt::Write;
|
||||
use std::error::Error;
|
||||
use std::fmt::{self, Write};
|
||||
use std::str::FromStr;
|
||||
use std::write;
|
||||
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
||||
use meilisearch_lib::tasks::batch::BatchId;
|
||||
use meilisearch_lib::tasks::task::{
|
||||
DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult,
|
||||
};
|
||||
use serde::{Serialize, Serializer};
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
use crate::AUTOBATCHING_ENABLED;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
enum TaskType {
|
||||
pub enum TaskType {
|
||||
IndexCreation,
|
||||
IndexUpdate,
|
||||
IndexDeletion,
|
||||
DocumentAddition,
|
||||
DocumentPartial,
|
||||
DocumentAdditionOrUpdate,
|
||||
DocumentDeletion,
|
||||
SettingsUpdate,
|
||||
ClearAll,
|
||||
DumpCreation,
|
||||
}
|
||||
|
||||
impl From<TaskContent> for TaskType {
|
||||
fn from(other: TaskContent) -> Self {
|
||||
match other {
|
||||
TaskContent::DocumentAddition {
|
||||
merge_strategy: IndexDocumentsMethod::ReplaceDocuments,
|
||||
..
|
||||
} => TaskType::DocumentAddition,
|
||||
TaskContent::DocumentAddition {
|
||||
merge_strategy: IndexDocumentsMethod::UpdateDocuments,
|
||||
..
|
||||
} => TaskType::DocumentPartial,
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => TaskType::ClearAll,
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentDeletion,
|
||||
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
|
||||
TaskContent::IndexDeletion => TaskType::IndexDeletion,
|
||||
TaskContent::IndexCreation { .. } => TaskType::IndexCreation,
|
||||
TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate,
|
||||
_ => unreachable!("unexpected task type"),
|
||||
TaskContent::IndexDeletion { .. } => TaskType::IndexDeletion,
|
||||
TaskContent::DocumentAddition { .. } => TaskType::DocumentAdditionOrUpdate,
|
||||
TaskContent::DocumentDeletion { .. } => TaskType::DocumentDeletion,
|
||||
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
|
||||
TaskContent::Dump { .. } => TaskType::DumpCreation,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[derive(Debug)]
|
||||
pub struct TaskTypeError {
|
||||
invalid_type: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for TaskTypeError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"invalid task type `{}`, expecting one of: \
|
||||
indexCreation, indexUpdate, indexDeletion, documentAdditionOrUpdate, \
|
||||
documentDeletion, settingsUpdate, dumpCreation",
|
||||
self.invalid_type
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for TaskTypeError {}
|
||||
|
||||
impl FromStr for TaskType {
|
||||
type Err = TaskTypeError;
|
||||
|
||||
fn from_str(type_: &str) -> Result<Self, TaskTypeError> {
|
||||
if type_.eq_ignore_ascii_case("indexCreation") {
|
||||
Ok(TaskType::IndexCreation)
|
||||
} else if type_.eq_ignore_ascii_case("indexUpdate") {
|
||||
Ok(TaskType::IndexUpdate)
|
||||
} else if type_.eq_ignore_ascii_case("indexDeletion") {
|
||||
Ok(TaskType::IndexDeletion)
|
||||
} else if type_.eq_ignore_ascii_case("documentAdditionOrUpdate") {
|
||||
Ok(TaskType::DocumentAdditionOrUpdate)
|
||||
} else if type_.eq_ignore_ascii_case("documentDeletion") {
|
||||
Ok(TaskType::DocumentDeletion)
|
||||
} else if type_.eq_ignore_ascii_case("settingsUpdate") {
|
||||
Ok(TaskType::SettingsUpdate)
|
||||
} else if type_.eq_ignore_ascii_case("dumpCreation") {
|
||||
Ok(TaskType::DumpCreation)
|
||||
} else {
|
||||
Err(TaskTypeError {
|
||||
invalid_type: type_.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
enum TaskStatus {
|
||||
pub enum TaskStatus {
|
||||
Enqueued,
|
||||
Processing,
|
||||
Succeeded,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TaskStatusError {
|
||||
invalid_status: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for TaskStatusError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"invalid task status `{}`, expecting one of: \
|
||||
enqueued, processing, succeeded, or failed",
|
||||
self.invalid_status,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for TaskStatusError {}
|
||||
|
||||
impl FromStr for TaskStatus {
|
||||
type Err = TaskStatusError;
|
||||
|
||||
fn from_str(status: &str) -> Result<Self, TaskStatusError> {
|
||||
if status.eq_ignore_ascii_case("enqueued") {
|
||||
Ok(TaskStatus::Enqueued)
|
||||
} else if status.eq_ignore_ascii_case("processing") {
|
||||
Ok(TaskStatus::Processing)
|
||||
} else if status.eq_ignore_ascii_case("succeeded") {
|
||||
Ok(TaskStatus::Succeeded)
|
||||
} else if status.eq_ignore_ascii_case("failed") {
|
||||
Ok(TaskStatus::Failed)
|
||||
} else {
|
||||
Err(TaskStatusError {
|
||||
invalid_status: status.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(untagged)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
@@ -80,6 +155,8 @@ enum TaskDetails {
|
||||
},
|
||||
#[serde(rename_all = "camelCase")]
|
||||
ClearAll { deleted_documents: Option<u64> },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
Dump { dump_uid: String },
|
||||
}
|
||||
|
||||
/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for
|
||||
@@ -136,8 +213,8 @@ fn serialize_duration<S: Serializer>(
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TaskView {
|
||||
uid: TaskId,
|
||||
index_uid: String,
|
||||
pub uid: TaskId,
|
||||
index_uid: Option<String>,
|
||||
status: TaskStatus,
|
||||
#[serde(rename = "type")]
|
||||
task_type: TaskType,
|
||||
@@ -159,46 +236,44 @@ pub struct TaskView {
|
||||
|
||||
impl From<Task> for TaskView {
|
||||
fn from(task: Task) -> Self {
|
||||
let index_uid = task.index_uid().map(String::from);
|
||||
let Task {
|
||||
id,
|
||||
index_uid,
|
||||
content,
|
||||
events,
|
||||
} = task;
|
||||
|
||||
let (task_type, mut details) = match content {
|
||||
TaskContent::DocumentAddition {
|
||||
merge_strategy,
|
||||
documents_count,
|
||||
..
|
||||
documents_count, ..
|
||||
} => {
|
||||
let details = TaskDetails::DocumentAddition {
|
||||
received_documents: documents_count,
|
||||
indexed_documents: None,
|
||||
};
|
||||
|
||||
let task_type = match merge_strategy {
|
||||
IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentPartial,
|
||||
IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentAddition,
|
||||
_ => unreachable!("Unexpected document merge strategy."),
|
||||
};
|
||||
|
||||
(task_type, Some(details))
|
||||
(TaskType::DocumentAdditionOrUpdate, Some(details))
|
||||
}
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => (
|
||||
TaskContent::DocumentDeletion {
|
||||
deletion: DocumentDeletion::Ids(ids),
|
||||
..
|
||||
} => (
|
||||
TaskType::DocumentDeletion,
|
||||
Some(TaskDetails::DocumentDeletion {
|
||||
received_document_ids: ids.len(),
|
||||
deleted_documents: None,
|
||||
}),
|
||||
),
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => (
|
||||
TaskType::ClearAll,
|
||||
TaskContent::DocumentDeletion {
|
||||
deletion: DocumentDeletion::Clear,
|
||||
..
|
||||
} => (
|
||||
TaskType::DocumentDeletion,
|
||||
Some(TaskDetails::ClearAll {
|
||||
deleted_documents: None,
|
||||
}),
|
||||
),
|
||||
TaskContent::IndexDeletion => (
|
||||
TaskContent::IndexDeletion { .. } => (
|
||||
TaskType::IndexDeletion,
|
||||
Some(TaskDetails::ClearAll {
|
||||
deleted_documents: None,
|
||||
@@ -208,14 +283,18 @@ impl From<Task> for TaskView {
|
||||
TaskType::SettingsUpdate,
|
||||
Some(TaskDetails::Settings { settings }),
|
||||
),
|
||||
TaskContent::IndexCreation { primary_key } => (
|
||||
TaskContent::IndexCreation { primary_key, .. } => (
|
||||
TaskType::IndexCreation,
|
||||
Some(TaskDetails::IndexInfo { primary_key }),
|
||||
),
|
||||
TaskContent::IndexUpdate { primary_key } => (
|
||||
TaskContent::IndexUpdate { primary_key, .. } => (
|
||||
TaskType::IndexUpdate,
|
||||
Some(TaskDetails::IndexInfo { primary_key }),
|
||||
),
|
||||
TaskContent::Dump { uid } => (
|
||||
TaskType::DumpCreation,
|
||||
Some(TaskDetails::Dump { dump_uid: uid }),
|
||||
),
|
||||
};
|
||||
|
||||
// An event always has at least one event: "Created"
|
||||
@@ -223,7 +302,7 @@ impl From<Task> for TaskView {
|
||||
TaskEvent::Created(_) => (TaskStatus::Enqueued, None, None),
|
||||
TaskEvent::Batched { .. } => (TaskStatus::Enqueued, None, None),
|
||||
TaskEvent::Processing(_) => (TaskStatus::Processing, None, None),
|
||||
TaskEvent::Succeded { timestamp, result } => {
|
||||
TaskEvent::Succeeded { timestamp, result } => {
|
||||
match (result, &mut details) {
|
||||
(
|
||||
TaskResult::DocumentAddition {
|
||||
@@ -313,7 +392,7 @@ impl From<Task> for TaskView {
|
||||
|
||||
Self {
|
||||
uid: id,
|
||||
index_uid: index_uid.into_inner(),
|
||||
index_uid,
|
||||
status,
|
||||
task_type,
|
||||
details,
|
||||
@@ -329,20 +408,17 @@ impl From<Task> for TaskView {
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct TaskListView {
|
||||
results: Vec<TaskView>,
|
||||
}
|
||||
|
||||
impl From<Vec<TaskView>> for TaskListView {
|
||||
fn from(results: Vec<TaskView>) -> Self {
|
||||
Self { results }
|
||||
}
|
||||
pub results: Vec<TaskView>,
|
||||
pub limit: usize,
|
||||
pub from: Option<TaskId>,
|
||||
pub next: Option<TaskId>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SummarizedTaskView {
|
||||
uid: TaskId,
|
||||
index_uid: String,
|
||||
task_uid: TaskId,
|
||||
index_uid: Option<String>,
|
||||
status: TaskStatus,
|
||||
#[serde(rename = "type")]
|
||||
task_type: TaskType,
|
||||
@@ -364,8 +440,8 @@ impl From<Task> for SummarizedTaskView {
|
||||
};
|
||||
|
||||
Self {
|
||||
uid: other.id,
|
||||
index_uid: other.index_uid.to_string(),
|
||||
task_uid: other.id,
|
||||
index_uid: other.index_uid().map(String::from),
|
||||
status: TaskStatus::Enqueued,
|
||||
task_type: other.content.into(),
|
||||
enqueued_at,
|
||||
|
||||
BIN
meilisearch-http/tests/assets/v1_v0.20.0_movies.dump
Normal file
BIN
meilisearch-http/tests/assets/v1_v0.20.0_movies.dump
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
meilisearch-http/tests/assets/v2_v0.21.1_movies.dump
Normal file
BIN
meilisearch-http/tests/assets/v2_v0.21.1_movies.dump
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
meilisearch-http/tests/assets/v3_v0.24.0_movies.dump
Normal file
BIN
meilisearch-http/tests/assets/v3_v0.24.0_movies.dump
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
meilisearch-http/tests/assets/v4_v0.25.2_movies.dump
Normal file
BIN
meilisearch-http/tests/assets/v4_v0.25.2_movies.dump
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
meilisearch-http/tests/assets/v5_v0.28.0_test_dump.dump
Normal file
BIN
meilisearch-http/tests/assets/v5_v0.28.0_test_dump.dump
Normal file
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@@ -11,14 +11,14 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
|
||||
hashmap! {
|
||||
("POST", "/indexes/products/search") => hashset!{"search", "*"},
|
||||
("GET", "/indexes/products/search") => hashset!{"search", "*"},
|
||||
("POST", "/indexes/products/documents") => hashset!{"documents.add", "*"},
|
||||
("GET", "/indexes/products/documents") => hashset!{"documents.get", "*"},
|
||||
("GET", "/indexes/products/documents/0") => hashset!{"documents.get", "*"},
|
||||
("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "*"},
|
||||
("POST", "/indexes/products/documents") => hashset!{"documents.add", "documents.*", "*"},
|
||||
("GET", "/indexes/products/documents") => hashset!{"documents.get", "documents.*", "*"},
|
||||
("GET", "/indexes/products/documents/0") => hashset!{"documents.get", "documents.*", "*"},
|
||||
("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "documents.*", "*"},
|
||||
("GET", "/tasks") => hashset!{"tasks.get", "*"},
|
||||
("GET", "/indexes/products/tasks") => hashset!{"tasks.get", "*"},
|
||||
("GET", "/indexes/products/tasks/0") => hashset!{"tasks.get", "*"},
|
||||
("PUT", "/indexes/products/") => hashset!{"indexes.update", "*"},
|
||||
("GET", "/tasks?indexUid=products") => hashset!{"tasks.get", "*"},
|
||||
("GET", "/tasks/0") => hashset!{"tasks.get", "*"},
|
||||
("PATCH", "/indexes/products/") => hashset!{"indexes.update", "*"},
|
||||
("GET", "/indexes/products/") => hashset!{"indexes.get", "*"},
|
||||
("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "*"},
|
||||
("POST", "/indexes") => hashset!{"indexes.create", "*"},
|
||||
@@ -33,20 +33,25 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
|
||||
("GET", "/indexes/products/settings/stop-words") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/synonyms") => hashset!{"settings.get", "*"},
|
||||
("DELETE", "/indexes/products/settings") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "*"},
|
||||
("PATCH", "/indexes/products/settings") => hashset!{"settings.update", "*"},
|
||||
("PATCH", "/indexes/products/settings/typo-tolerance") => hashset!{"settings.update", "*"},
|
||||
("PUT", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "*"},
|
||||
("PUT", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "*"},
|
||||
("PUT", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "*"},
|
||||
("PUT", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "*"},
|
||||
("PUT", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "*"},
|
||||
("PUT", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "*"},
|
||||
("PUT", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "*"},
|
||||
("PUT", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "*"},
|
||||
("GET", "/indexes/products/stats") => hashset!{"stats.get", "*"},
|
||||
("GET", "/stats") => hashset!{"stats.get", "*"},
|
||||
("POST", "/dumps") => hashset!{"dumps.create", "*"},
|
||||
("GET", "/dumps/0/status") => hashset!{"dumps.get", "*"},
|
||||
("GET", "/version") => hashset!{"version", "*"},
|
||||
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
|
||||
("GET", "/keys/mykey/") => hashset!{"keys.get", "*"},
|
||||
("DELETE", "/keys/mykey/") => hashset!{"keys.delete", "*"},
|
||||
("POST", "/keys") => hashset!{"keys.create", "*"},
|
||||
("GET", "/keys") => hashset!{"keys.get", "*"},
|
||||
}
|
||||
});
|
||||
|
||||
@@ -81,7 +86,7 @@ async fn error_access_expired_key() {
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
@@ -93,8 +98,14 @@ async fn error_access_expired_key() {
|
||||
for (method, route) in AUTHORIZATIONS.keys() {
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
assert_eq!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?}",
|
||||
method,
|
||||
route
|
||||
);
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -111,7 +122,7 @@ async fn error_access_unauthorized_index() {
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
@@ -124,8 +135,14 @@ async fn error_access_unauthorized_index() {
|
||||
{
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
assert_eq!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?}",
|
||||
method,
|
||||
route
|
||||
);
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,36 +150,54 @@ async fn error_access_unauthorized_index() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_unauthorized_action() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
for ((method, route), action) in AUTHORIZATIONS.iter() {
|
||||
// create a new API key letting only the needed action.
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// Patch API key letting all rights but the needed one.
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ALL_ACTIONS.difference(action).collect::<Vec<_>>(),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
assert_eq!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?}",
|
||||
method,
|
||||
route
|
||||
);
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_master_key() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// master key must have access to all routes.
|
||||
for ((method, route), _) in AUTHORIZATIONS.iter() {
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?}",
|
||||
method,
|
||||
route
|
||||
);
|
||||
assert_ne!(code, 403);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -170,36 +205,34 @@ async fn error_access_unauthorized_action() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
for action in actions {
|
||||
// Patch API key letting only the needed action.
|
||||
// create a new API key letting only the needed action.
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [action],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?} with action: {:?}",
|
||||
method,
|
||||
route,
|
||||
action
|
||||
);
|
||||
assert_ne!(code, 403);
|
||||
}
|
||||
}
|
||||
@@ -209,36 +242,35 @@ async fn access_authorized_restricted_index() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": [],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
for action in actions {
|
||||
// create a new API key letting only the needed action.
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// Patch API key letting only the needed action.
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": [action],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?} with action: {:?}",
|
||||
method,
|
||||
route,
|
||||
action
|
||||
);
|
||||
assert_ne!(code, 403);
|
||||
}
|
||||
}
|
||||
@@ -248,16 +280,16 @@ async fn access_authorized_no_index_restriction() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_stats_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
@@ -267,7 +299,7 @@ async fn access_authorized_stats_restricted_index() {
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
@@ -275,7 +307,7 @@ async fn access_authorized_stats_restricted_index() {
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
|
||||
// key should have access on `products` index.
|
||||
assert!(response["indexes"].get("products").is_some());
|
||||
@@ -288,16 +320,16 @@ async fn access_authorized_stats_restricted_index() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_stats_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
@@ -307,7 +339,7 @@ async fn access_authorized_stats_no_index_restriction() {
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
@@ -315,7 +347,7 @@ async fn access_authorized_stats_no_index_restriction() {
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
|
||||
// key should have access on `products` index.
|
||||
assert!(response["indexes"].get("products").is_some());
|
||||
@@ -328,16 +360,16 @@ async fn access_authorized_stats_no_index_restriction() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn list_authorized_indexes_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
@@ -347,17 +379,17 @@ async fn list_authorized_indexes_restricted_index() {
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.list_indexes().await;
|
||||
assert_eq!(code, 200);
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
|
||||
let response = response.as_array().unwrap();
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
assert!(response.iter().any(|index| index["uid"] == "products"));
|
||||
|
||||
@@ -369,16 +401,16 @@ async fn list_authorized_indexes_restricted_index() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn list_authorized_indexes_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
@@ -388,17 +420,17 @@ async fn list_authorized_indexes_no_index_restriction() {
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.list_indexes().await;
|
||||
assert_eq!(code, 200);
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
|
||||
let response = response.as_array().unwrap();
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
assert!(response.iter().any(|index| index["uid"] == "products"));
|
||||
|
||||
@@ -409,16 +441,16 @@ async fn list_authorized_indexes_no_index_restriction() {
|
||||
#[actix_rt::test]
|
||||
async fn list_authorized_tasks_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
@@ -428,7 +460,7 @@ async fn list_authorized_tasks_restricted_index() {
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
@@ -436,7 +468,7 @@ async fn list_authorized_tasks_restricted_index() {
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.service.get("/tasks").await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
println!("{}", response);
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
@@ -449,16 +481,16 @@ async fn list_authorized_tasks_restricted_index() {
|
||||
#[actix_rt::test]
|
||||
async fn list_authorized_tasks_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
@@ -468,7 +500,7 @@ async fn list_authorized_tasks_no_index_restriction() {
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
@@ -476,7 +508,7 @@ async fn list_authorized_tasks_no_index_restriction() {
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.service.get("/tasks").await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
@@ -499,7 +531,7 @@ async fn error_creating_index_without_action() {
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
@@ -523,8 +555,8 @@ async fn error_creating_index_without_action() {
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "{:?}", response);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
@@ -534,8 +566,8 @@ async fn error_creating_index_without_action() {
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
|
||||
@@ -544,8 +576,8 @@ async fn error_creating_index_without_action() {
|
||||
|
||||
// try to create a index via add specialized settings route
|
||||
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
|
||||
@@ -566,7 +598,7 @@ async fn lazy_create_index() {
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
@@ -583,13 +615,13 @@ async fn lazy_create_index() {
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "{:?}", response);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// try to create a index via add settings route
|
||||
@@ -597,24 +629,24 @@ async fn lazy_create_index() {
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// try to create a index via add specialized settings route
|
||||
let index = server.index("test2");
|
||||
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
|
||||
@@ -13,6 +13,15 @@ impl Server {
|
||||
self.service.api_key = Some(api_key.as_ref().to_string());
|
||||
}
|
||||
|
||||
/// Fetch and use the default admin key for nexts http requests.
|
||||
pub async fn use_admin_key(&mut self, master_key: impl AsRef<str>) {
|
||||
self.use_api_key(master_key);
|
||||
let (response, code) = self.list_api_keys().await;
|
||||
assert_eq!(200, code, "{:?}", response);
|
||||
let admin_key = &response["results"][1]["key"];
|
||||
self.use_api_key(admin_key.as_str().unwrap());
|
||||
}
|
||||
|
||||
pub async fn add_api_key(&self, content: Value) -> (Value, StatusCode) {
|
||||
let url = "/keys";
|
||||
self.service.post(url, content).await
|
||||
|
||||
@@ -8,11 +8,15 @@ use time::{Duration, OffsetDateTime};
|
||||
|
||||
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
||||
|
||||
fn generate_tenant_token(parent_key: impl AsRef<str>, mut body: HashMap<&str, Value>) -> String {
|
||||
fn generate_tenant_token(
|
||||
parent_uid: impl AsRef<str>,
|
||||
parent_key: impl AsRef<str>,
|
||||
mut body: HashMap<&str, Value>,
|
||||
) -> String {
|
||||
use jsonwebtoken::{encode, EncodingKey, Header};
|
||||
|
||||
let key_id = &parent_key.as_ref()[..8];
|
||||
body.insert("apiKeyPrefix", json!(key_id));
|
||||
let parent_uid = parent_uid.as_ref();
|
||||
body.insert("apiKeyUid", json!(parent_uid));
|
||||
encode(
|
||||
&Header::default(),
|
||||
&body,
|
||||
@@ -114,7 +118,7 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||
macro_rules! compute_autorized_search {
|
||||
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
@@ -130,9 +134,10 @@ macro_rules! compute_autorized_search {
|
||||
let (response, code) = server.add_api_key(key_content.clone()).await;
|
||||
assert_eq!(code, 201);
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
for tenant_token in $tenant_tokens.iter() {
|
||||
let web_token = generate_tenant_token(&key, tenant_token.clone());
|
||||
let web_token = generate_tenant_token(&uid, &key, tenant_token.clone());
|
||||
server.use_api_key(&web_token);
|
||||
let index = server.index("sales");
|
||||
index
|
||||
@@ -160,7 +165,7 @@ macro_rules! compute_autorized_search {
|
||||
macro_rules! compute_forbidden_search {
|
||||
($tenant_tokens:expr, $parent_keys:expr) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
@@ -172,9 +177,10 @@ macro_rules! compute_forbidden_search {
|
||||
let (response, code) = server.add_api_key(key_content.clone()).await;
|
||||
assert_eq!(code, 201, "{:?}", response);
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
for tenant_token in $tenant_tokens.iter() {
|
||||
let web_token = generate_tenant_token(&key, tenant_token.clone());
|
||||
let web_token = generate_tenant_token(&uid, &key, tenant_token.clone());
|
||||
server.use_api_key(&web_token);
|
||||
let index = server.index("sales");
|
||||
index
|
||||
@@ -461,12 +467,13 @@ async fn error_access_forbidden_routes() {
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(&key, tenant_token);
|
||||
let web_token = generate_tenant_token(&uid, &key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
@@ -496,12 +503,13 @@ async fn error_access_expired_parent_key() {
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(&key, tenant_token);
|
||||
let web_token = generate_tenant_token(&uid, &key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
// test search request while parent_key is not expired
|
||||
@@ -538,12 +546,13 @@ async fn error_access_modified_token() {
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(&key, tenant_token);
|
||||
let web_token = generate_tenant_token(&uid, &key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
// test search request while web_token is valid
|
||||
@@ -558,7 +567,7 @@ async fn error_access_modified_token() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
|
||||
let alt = generate_tenant_token(&key, tenant_token);
|
||||
let alt = generate_tenant_token(&uid, &key, tenant_token);
|
||||
let altered_token = [
|
||||
web_token.split('.').next().unwrap(),
|
||||
alt.split('.').nth(1).unwrap(),
|
||||
|
||||
@@ -4,29 +4,12 @@ use std::{
|
||||
};
|
||||
|
||||
use actix_web::http::StatusCode;
|
||||
use paste::paste;
|
||||
use serde_json::{json, Value};
|
||||
use tokio::time::sleep;
|
||||
use urlencoding::encode;
|
||||
|
||||
use super::service::Service;
|
||||
|
||||
macro_rules! make_settings_test_routes {
|
||||
($($name:ident),+) => {
|
||||
$(paste! {
|
||||
pub async fn [<update_$name>](&self, value: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-"));
|
||||
self.service.post(url, value).await
|
||||
}
|
||||
|
||||
pub async fn [<get_$name>](&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-"));
|
||||
self.service.get(url).await
|
||||
}
|
||||
})*
|
||||
};
|
||||
}
|
||||
|
||||
pub struct Index<'a> {
|
||||
pub uid: String,
|
||||
pub service: &'a Service,
|
||||
@@ -46,7 +29,7 @@ impl Index<'_> {
|
||||
.post_str(url, include_str!("../assets/test_set.json"))
|
||||
.await;
|
||||
assert_eq!(code, 202);
|
||||
let update_id = response["uid"].as_i64().unwrap();
|
||||
let update_id = response["taskUid"].as_i64().unwrap();
|
||||
self.wait_task(update_id as u64).await;
|
||||
update_id as u64
|
||||
}
|
||||
@@ -65,7 +48,7 @@ impl Index<'_> {
|
||||
});
|
||||
let url = format!("/indexes/{}", encode(self.uid.as_ref()));
|
||||
|
||||
self.service.put(url, body).await
|
||||
self.service.patch(url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete(&self) -> (Value, StatusCode) {
|
||||
@@ -106,37 +89,52 @@ impl Index<'_> {
|
||||
}
|
||||
|
||||
pub async fn wait_task(&self, update_id: u64) -> Value {
|
||||
// try 10 times to get status, or panic to not wait forever
|
||||
// try several times to get status, or panic to not wait forever
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
for _ in 0..10 {
|
||||
for _ in 0..100 {
|
||||
let (response, status_code) = self.service.get(&url).await;
|
||||
assert_eq!(status_code, 200, "response: {}", response);
|
||||
assert_eq!(200, status_code, "response: {}", response);
|
||||
|
||||
if response["status"] == "succeeded" || response["status"] == "failed" {
|
||||
return response;
|
||||
}
|
||||
|
||||
sleep(Duration::from_secs(1)).await;
|
||||
// wait 0.5 second.
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
panic!("Timeout waiting for update id");
|
||||
}
|
||||
|
||||
pub async fn get_task(&self, update_id: u64) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/tasks/{}", self.uid, update_id);
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn list_tasks(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/tasks", self.uid);
|
||||
let url = format!("/tasks?indexUid={}", self.uid);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn filtered_tasks(&self, type_: &[&str], status: &[&str]) -> (Value, StatusCode) {
|
||||
let mut url = format!("/tasks?indexUid={}", self.uid);
|
||||
if !type_.is_empty() {
|
||||
url += &format!("&type={}", type_.join(","));
|
||||
}
|
||||
if !status.is_empty() {
|
||||
url += &format!("&status={}", status.join(","));
|
||||
}
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_document(
|
||||
&self,
|
||||
id: u64,
|
||||
_options: Option<GetDocumentOptions>,
|
||||
options: Option<GetDocumentOptions>,
|
||||
) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id);
|
||||
let mut url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id);
|
||||
if let Some(fields) = options.and_then(|o| o.fields) {
|
||||
url.push_str(&format!("?fields={}", fields.join(",")));
|
||||
}
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
@@ -151,10 +149,7 @@ impl Index<'_> {
|
||||
}
|
||||
|
||||
if let Some(attributes_to_retrieve) = options.attributes_to_retrieve {
|
||||
url.push_str(&format!(
|
||||
"attributesToRetrieve={}&",
|
||||
attributes_to_retrieve.join(",")
|
||||
));
|
||||
url.push_str(&format!("fields={}&", attributes_to_retrieve.join(",")));
|
||||
}
|
||||
|
||||
self.service.get(url).await
|
||||
@@ -187,7 +182,7 @@ impl Index<'_> {
|
||||
|
||||
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", encode(self.uid.as_ref()));
|
||||
self.service.post(url, settings).await
|
||||
self.service.patch(url, settings).await
|
||||
}
|
||||
|
||||
pub async fn delete_settings(&self) -> (Value, StatusCode) {
|
||||
@@ -231,10 +226,28 @@ impl Index<'_> {
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
make_settings_test_routes!(distinct_attribute);
|
||||
pub async fn update_distinct_attribute(&self, value: Value) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/settings/{}",
|
||||
encode(self.uid.as_ref()),
|
||||
"distinct-attribute"
|
||||
);
|
||||
self.service.put(url, value).await
|
||||
}
|
||||
|
||||
pub async fn get_distinct_attribute(&self) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/settings/{}",
|
||||
encode(self.uid.as_ref()),
|
||||
"distinct-attribute"
|
||||
);
|
||||
self.service.get(url).await
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GetDocumentOptions;
|
||||
pub struct GetDocumentOptions {
|
||||
pub fields: Option<Vec<&'static str>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct GetAllDocumentsOptions {
|
||||
|
||||
@@ -3,7 +3,7 @@ pub mod server;
|
||||
pub mod service;
|
||||
|
||||
pub use index::{GetAllDocumentsOptions, GetDocumentOptions};
|
||||
pub use server::Server;
|
||||
pub use server::{default_settings, Server};
|
||||
|
||||
/// Performs a search test on both post and get routes
|
||||
#[macro_export]
|
||||
|
||||
@@ -52,16 +52,13 @@ impl Server {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn new_auth() -> Self {
|
||||
let dir = TempDir::new().unwrap();
|
||||
|
||||
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
|
||||
if cfg!(windows) {
|
||||
std::env::set_var("TMP", TEST_TEMP_DIR.path());
|
||||
} else {
|
||||
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
|
||||
}
|
||||
|
||||
let mut options = default_settings(dir.path());
|
||||
options.master_key = Some("MASTER_KEY".to_string());
|
||||
|
||||
let meilisearch = setup_meilisearch(&options).unwrap();
|
||||
@@ -79,9 +76,15 @@ impl Server {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn new_with_options(options: Opt) -> Self {
|
||||
let meilisearch = setup_meilisearch(&options).unwrap();
|
||||
let auth = AuthController::new(&options.db_path, &options.master_key).unwrap();
|
||||
pub async fn new_auth() -> Self {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let options = default_settings(dir.path());
|
||||
Self::new_auth_with_options(options, dir).await
|
||||
}
|
||||
|
||||
pub async fn new_with_options(options: Opt) -> Result<Self, anyhow::Error> {
|
||||
let meilisearch = setup_meilisearch(&options)?;
|
||||
let auth = AuthController::new(&options.db_path, &options.master_key)?;
|
||||
let service = Service {
|
||||
meilisearch,
|
||||
auth,
|
||||
@@ -89,10 +92,10 @@ impl Server {
|
||||
api_key: None,
|
||||
};
|
||||
|
||||
Server {
|
||||
Ok(Server {
|
||||
service,
|
||||
_dir: None,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a view to an index. There is no guarantee that the index exists.
|
||||
@@ -103,8 +106,27 @@ impl Server {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_indexes(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/indexes").await
|
||||
pub async fn list_indexes(
|
||||
&self,
|
||||
offset: Option<usize>,
|
||||
limit: Option<usize>,
|
||||
) -> (Value, StatusCode) {
|
||||
let (offset, limit) = (
|
||||
offset.map(|offset| format!("offset={offset}")),
|
||||
limit.map(|limit| format!("limit={limit}")),
|
||||
);
|
||||
let query_parameter = offset
|
||||
.as_ref()
|
||||
.zip(limit.as_ref())
|
||||
.map(|(offset, limit)| format!("{offset}&{limit}"))
|
||||
.or_else(|| offset.xor(limit));
|
||||
if let Some(query_parameter) = query_parameter {
|
||||
self.service
|
||||
.get(format!("/indexes?{query_parameter}"))
|
||||
.await
|
||||
} else {
|
||||
self.service.get("/indexes").await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn version(&self) -> (Value, StatusCode) {
|
||||
@@ -131,8 +153,8 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
env: "development".to_owned(),
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
no_analytics: true,
|
||||
max_index_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
||||
max_task_db_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
||||
max_index_size: Byte::from_unit(100.0, ByteUnit::MiB).unwrap(),
|
||||
max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(),
|
||||
http_payload_size_limit: Byte::from_unit(10.0, ByteUnit::MiB).unwrap(),
|
||||
snapshot_dir: ".".into(),
|
||||
indexer_options: IndexerOpts {
|
||||
|
||||
@@ -7,23 +7,45 @@ use actix_web::test;
|
||||
use meilisearch_http::{analytics, create_app};
|
||||
use serde_json::{json, Value};
|
||||
|
||||
enum HttpVerb {
|
||||
Put,
|
||||
Patch,
|
||||
Post,
|
||||
Get,
|
||||
Delete,
|
||||
}
|
||||
|
||||
impl HttpVerb {
|
||||
fn test_request(&self) -> test::TestRequest {
|
||||
match self {
|
||||
HttpVerb::Put => test::TestRequest::put(),
|
||||
HttpVerb::Patch => test::TestRequest::patch(),
|
||||
HttpVerb::Post => test::TestRequest::post(),
|
||||
HttpVerb::Get => test::TestRequest::get(),
|
||||
HttpVerb::Delete => test::TestRequest::delete(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_json_bad_content_type() {
|
||||
use HttpVerb::{Patch, Post, Put};
|
||||
|
||||
let routes = [
|
||||
// all the POST routes except the dumps that can be created without any body or content-type
|
||||
// all the routes except the dumps that can be created without any body or content-type
|
||||
// and the search that is not a strict json
|
||||
"/indexes",
|
||||
"/indexes/doggo/documents/delete-batch",
|
||||
"/indexes/doggo/search",
|
||||
"/indexes/doggo/settings",
|
||||
"/indexes/doggo/settings/displayed-attributes",
|
||||
"/indexes/doggo/settings/distinct-attribute",
|
||||
"/indexes/doggo/settings/filterable-attributes",
|
||||
"/indexes/doggo/settings/ranking-rules",
|
||||
"/indexes/doggo/settings/searchable-attributes",
|
||||
"/indexes/doggo/settings/sortable-attributes",
|
||||
"/indexes/doggo/settings/stop-words",
|
||||
"/indexes/doggo/settings/synonyms",
|
||||
(Post, "/indexes"),
|
||||
(Post, "/indexes/doggo/documents/delete-batch"),
|
||||
(Post, "/indexes/doggo/search"),
|
||||
(Patch, "/indexes/doggo/settings"),
|
||||
(Put, "/indexes/doggo/settings/displayed-attributes"),
|
||||
(Put, "/indexes/doggo/settings/distinct-attribute"),
|
||||
(Put, "/indexes/doggo/settings/filterable-attributes"),
|
||||
(Put, "/indexes/doggo/settings/ranking-rules"),
|
||||
(Put, "/indexes/doggo/settings/searchable-attributes"),
|
||||
(Put, "/indexes/doggo/settings/sortable-attributes"),
|
||||
(Put, "/indexes/doggo/settings/stop-words"),
|
||||
(Put, "/indexes/doggo/settings/synonyms"),
|
||||
];
|
||||
let bad_content_types = [
|
||||
"application/csv",
|
||||
@@ -45,10 +67,11 @@ async fn error_json_bad_content_type() {
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
for route in routes {
|
||||
for (verb, route) in routes {
|
||||
// Good content-type, we probably have an error since we didn't send anything in the json
|
||||
// so we only ensure we didn't get a bad media type error.
|
||||
let req = test::TestRequest::post()
|
||||
let req = verb
|
||||
.test_request()
|
||||
.uri(route)
|
||||
.set_payload(document)
|
||||
.insert_header(("content-type", "application/json"))
|
||||
@@ -59,7 +82,8 @@ async fn error_json_bad_content_type() {
|
||||
"calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route);
|
||||
|
||||
// No content-type.
|
||||
let req = test::TestRequest::post()
|
||||
let req = verb
|
||||
.test_request()
|
||||
.uri(route)
|
||||
.set_payload(document)
|
||||
.to_request();
|
||||
@@ -82,7 +106,8 @@ async fn error_json_bad_content_type() {
|
||||
|
||||
for bad_content_type in bad_content_types {
|
||||
// Always bad content-type
|
||||
let req = test::TestRequest::post()
|
||||
let req = verb
|
||||
.test_request()
|
||||
.uri(route)
|
||||
.set_payload(document.to_string())
|
||||
.insert_header(("content-type", bad_content_type))
|
||||
|
||||
@@ -35,7 +35,7 @@ async fn add_documents_test_json_content_types() {
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 202);
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["taskUid"], 0);
|
||||
|
||||
// put
|
||||
let req = test::TestRequest::put()
|
||||
@@ -48,7 +48,7 @@ async fn add_documents_test_json_content_types() {
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 202);
|
||||
assert_eq!(response["uid"], 1);
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
}
|
||||
|
||||
/// any other content-type is must be refused
|
||||
@@ -599,7 +599,7 @@ async fn add_documents_no_index_creation() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["taskUid"], 0);
|
||||
/*
|
||||
* currently we don’t check these field to stay ISO with meilisearch
|
||||
* assert_eq!(response["status"], "pending");
|
||||
@@ -615,7 +615,7 @@ async fn add_documents_no_index_creation() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["type"], "documentAddition");
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["details"]["receivedDocuments"], 1);
|
||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||
|
||||
@@ -638,7 +638,7 @@ async fn error_document_add_create_index_bad_uid() {
|
||||
let (response, code) = index.add_documents(json!([{"id": 1}]), None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"message": "invalid index uid `883 fj!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
@@ -655,7 +655,7 @@ async fn error_document_update_create_index_bad_uid() {
|
||||
let (response, code) = index.update_documents(json!([{"id": 1}]), None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"message": "invalid index uid `883 fj!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
@@ -685,7 +685,7 @@ async fn document_addition_with_primary_key() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["type"], "documentAddition");
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["details"]["receivedDocuments"], 1);
|
||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||
|
||||
@@ -714,7 +714,7 @@ async fn document_update_with_primary_key() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["type"], "documentPartial");
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||
assert_eq!(response["details"]["receivedDocuments"], 1);
|
||||
|
||||
@@ -818,7 +818,7 @@ async fn add_larger_dataset() {
|
||||
let (response, code) = index.get_task(update_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "documentAddition");
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["details"]["indexedDocuments"], 77);
|
||||
assert_eq!(response["details"]["receivedDocuments"], 77);
|
||||
let (response, code) = index
|
||||
@@ -827,8 +827,8 @@ async fn add_larger_dataset() {
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 77);
|
||||
assert_eq!(code, 200, "failed with `{}`", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 77);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -840,7 +840,7 @@ async fn update_larger_dataset() {
|
||||
index.wait_task(0).await;
|
||||
let (response, code) = index.get_task(0).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["type"], "documentPartial");
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["details"]["indexedDocuments"], 77);
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -849,7 +849,7 @@ async fn update_larger_dataset() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 77);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 77);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -868,7 +868,12 @@ async fn error_add_documents_bad_document_id() {
|
||||
let (response, code) = index.get_task(1).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], json!("failed"));
|
||||
assert_eq!(response["error"]["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."));
|
||||
assert_eq!(
|
||||
response["error"]["message"],
|
||||
json!(
|
||||
r#"Document identifier `"foo & bar"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["error"]["code"], json!("invalid_document_id"));
|
||||
assert_eq!(response["error"]["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
@@ -891,7 +896,12 @@ async fn error_update_documents_bad_document_id() {
|
||||
index.update_documents(documents, None).await;
|
||||
let response = index.wait_task(1).await;
|
||||
assert_eq!(response["status"], json!("failed"));
|
||||
assert_eq!(response["error"]["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."));
|
||||
assert_eq!(
|
||||
response["error"]["message"],
|
||||
json!(
|
||||
r#"Document identifier `"foo & bar"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["error"]["code"], json!("invalid_document_id"));
|
||||
assert_eq!(response["error"]["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
|
||||
@@ -72,7 +72,7 @@ async fn clear_all_documents() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -89,7 +89,7 @@ async fn clear_all_documents_empty_index() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -125,8 +125,8 @@ async fn delete_batch() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 1);
|
||||
assert_eq!(response.as_array().unwrap()[0]["id"], 3);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["results"][0]["id"], json!(3));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -143,5 +143,5 @@ async fn delete_no_document_batch() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 3);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 3);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::common::GetAllDocumentsOptions;
|
||||
use crate::common::Server;
|
||||
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
|
||||
|
||||
use serde_json::json;
|
||||
|
||||
@@ -39,19 +38,51 @@ async fn get_document() {
|
||||
let documents = serde_json::json!([
|
||||
{
|
||||
"id": 0,
|
||||
"content": "foobar",
|
||||
"nested": { "content": "foobar" },
|
||||
}
|
||||
]);
|
||||
let (_, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
index.wait_task(1).await;
|
||||
let (response, code) = index.get_document(0, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!( {
|
||||
serde_json::json!({
|
||||
"id": 0,
|
||||
"content": "foobar",
|
||||
"nested": { "content": "foobar" },
|
||||
})
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
0,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["id"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!({
|
||||
"id": 0,
|
||||
})
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
0,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["nested.content"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!({
|
||||
"nested": { "content": "foobar" },
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -88,7 +119,7 @@ async fn get_no_document() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -101,7 +132,7 @@ async fn get_all_documents_no_options() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
let arr = response.as_array().unwrap();
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 20);
|
||||
let first = serde_json::json!({
|
||||
"id":0,
|
||||
@@ -137,8 +168,11 @@ async fn test_get_all_documents_limit() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 5);
|
||||
assert_eq!(response.as_array().unwrap()[0]["id"], 0);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 5);
|
||||
assert_eq!(response["results"][0]["id"], json!(0));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(5));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -154,8 +188,11 @@ async fn test_get_all_documents_offset() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(response.as_array().unwrap()[0]["id"], 5);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
assert_eq!(response["results"][0]["id"], json!(5));
|
||||
assert_eq!(response["offset"], json!(5));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -171,20 +208,14 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
1
|
||||
);
|
||||
assert!(response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.get("name")
|
||||
.is_some());
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 1);
|
||||
assert!(results["name"] != json!(null));
|
||||
}
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -193,15 +224,13 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
0
|
||||
);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 0);
|
||||
}
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -210,15 +239,13 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
0
|
||||
);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 0);
|
||||
}
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -227,15 +254,12 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
2
|
||||
);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 2);
|
||||
assert!(results["name"] != json!(null));
|
||||
assert!(results["tags"] != json!(null));
|
||||
}
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -244,15 +268,10 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
16
|
||||
);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 16);
|
||||
}
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -261,19 +280,99 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 16);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_document_s_nested_attributes_to_retrieve() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 0,
|
||||
"content.truc": "foobar",
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"content": {
|
||||
"truc": "foobar",
|
||||
"machin": "bidule",
|
||||
},
|
||||
},
|
||||
]);
|
||||
let (_, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
0,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["content"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response, json!({}));
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
1,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["content"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
16
|
||||
response,
|
||||
json!({
|
||||
"content": {
|
||||
"truc": "foobar",
|
||||
"machin": "bidule",
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
0,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["content.truc"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
json!({
|
||||
"content.truc": "foobar",
|
||||
})
|
||||
);
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
1,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["content.truc"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
json!({
|
||||
"content": {
|
||||
"truc": "foobar",
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_documents_displayed_attributes() {
|
||||
async fn get_documents_displayed_attributes_is_ignored() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index
|
||||
@@ -285,23 +384,19 @@ async fn get_documents_displayed_attributes() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
1
|
||||
response["results"][0].as_object().unwrap().keys().count(),
|
||||
16
|
||||
);
|
||||
assert!(response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.get("gender")
|
||||
.is_some());
|
||||
assert!(response["results"][0]["gender"] != json!(null));
|
||||
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
|
||||
let (response, code) = index.get_document(0, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_object().unwrap().keys().count(), 1);
|
||||
assert_eq!(response.as_object().unwrap().keys().count(), 16);
|
||||
assert!(response.as_object().unwrap().get("gender").is_some());
|
||||
}
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
#![allow(dead_code)]
|
||||
mod common;
|
||||
|
||||
use crate::common::Server;
|
||||
use serde_json::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_unexisting_dump_status() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server.get_dump_status("foobar").await;
|
||||
assert_eq!(code, 404);
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Dump `foobar` not found.",
|
||||
"code": "dump_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#dump_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
}
|
||||
73
meilisearch-http/tests/dumps/data.rs
Normal file
73
meilisearch-http/tests/dumps/data.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use manifest_dir_macros::exist_relative_path;
|
||||
|
||||
pub enum GetDump {
|
||||
MoviesRawV1,
|
||||
MoviesWithSettingsV1,
|
||||
RubyGemsWithSettingsV1,
|
||||
|
||||
MoviesRawV2,
|
||||
MoviesWithSettingsV2,
|
||||
RubyGemsWithSettingsV2,
|
||||
|
||||
MoviesRawV3,
|
||||
MoviesWithSettingsV3,
|
||||
RubyGemsWithSettingsV3,
|
||||
|
||||
MoviesRawV4,
|
||||
MoviesWithSettingsV4,
|
||||
RubyGemsWithSettingsV4,
|
||||
|
||||
TestV5,
|
||||
}
|
||||
|
||||
impl GetDump {
|
||||
pub fn path(&self) -> PathBuf {
|
||||
match self {
|
||||
GetDump::MoviesRawV1 => {
|
||||
exist_relative_path!("tests/assets/v1_v0.20.0_movies.dump").into()
|
||||
}
|
||||
GetDump::MoviesWithSettingsV1 => {
|
||||
exist_relative_path!("tests/assets/v1_v0.20.0_movies_with_settings.dump").into()
|
||||
}
|
||||
GetDump::RubyGemsWithSettingsV1 => {
|
||||
exist_relative_path!("tests/assets/v1_v0.20.0_rubygems_with_settings.dump").into()
|
||||
}
|
||||
|
||||
GetDump::MoviesRawV2 => {
|
||||
exist_relative_path!("tests/assets/v2_v0.21.1_movies.dump").into()
|
||||
}
|
||||
GetDump::MoviesWithSettingsV2 => {
|
||||
exist_relative_path!("tests/assets/v2_v0.21.1_movies_with_settings.dump").into()
|
||||
}
|
||||
|
||||
GetDump::RubyGemsWithSettingsV2 => {
|
||||
exist_relative_path!("tests/assets/v2_v0.21.1_rubygems_with_settings.dump").into()
|
||||
}
|
||||
|
||||
GetDump::MoviesRawV3 => {
|
||||
exist_relative_path!("tests/assets/v3_v0.24.0_movies.dump").into()
|
||||
}
|
||||
GetDump::MoviesWithSettingsV3 => {
|
||||
exist_relative_path!("tests/assets/v3_v0.24.0_movies_with_settings.dump").into()
|
||||
}
|
||||
GetDump::RubyGemsWithSettingsV3 => {
|
||||
exist_relative_path!("tests/assets/v3_v0.24.0_rubygems_with_settings.dump").into()
|
||||
}
|
||||
|
||||
GetDump::MoviesRawV4 => {
|
||||
exist_relative_path!("tests/assets/v4_v0.25.2_movies.dump").into()
|
||||
}
|
||||
GetDump::MoviesWithSettingsV4 => {
|
||||
exist_relative_path!("tests/assets/v4_v0.25.2_movies_with_settings.dump").into()
|
||||
}
|
||||
GetDump::RubyGemsWithSettingsV4 => {
|
||||
exist_relative_path!("tests/assets/v4_v0.25.2_rubygems_with_settings.dump").into()
|
||||
}
|
||||
GetDump::TestV5 => {
|
||||
exist_relative_path!("tests/assets/v5_v0.28.0_test_dump.dump").into()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
677
meilisearch-http/tests/dumps/mod.rs
Normal file
677
meilisearch-http/tests/dumps/mod.rs
Normal file
@@ -0,0 +1,677 @@
|
||||
mod data;
|
||||
|
||||
use crate::common::{default_settings, GetAllDocumentsOptions, Server};
|
||||
use meilisearch_http::Opt;
|
||||
use serde_json::json;
|
||||
|
||||
use self::data::GetDump;
|
||||
|
||||
// all the following test are ignored on windows. See #2364
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v1() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
for path in [
|
||||
GetDump::MoviesRawV1.path(),
|
||||
GetDump::MoviesWithSettingsV1.path(),
|
||||
GetDump::RubyGemsWithSettingsV1.path(),
|
||||
] {
|
||||
let options = Opt {
|
||||
import_dump: Some(path),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let error = Server::new_with_options(options)
|
||||
.await
|
||||
.map(|_| ())
|
||||
.unwrap_err();
|
||||
|
||||
assert_eq!(error.to_string(), "The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.");
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v2_movie_raw() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesRawV2.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v2_movie_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesWithSettingsV2.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 })
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v2_rubygems_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::RubyGemsWithSettingsV2.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("rubygems"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("rubygems");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 }})
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks["results"][0],
|
||||
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(188040, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(191940, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(159227, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v3_movie_raw() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesRawV3.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v3_movie_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesWithSettingsV3.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can["results"] still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 })
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v3_rubygems_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::RubyGemsWithSettingsV3.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("rubygems"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("rubygems");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks["results"][0],
|
||||
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(188040, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(191940, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(159227, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v4_movie_raw() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesRawV4.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit" : 20, "from": 0, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v4_movie_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesWithSettingsV4.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 })
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v4_rubygems_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::RubyGemsWithSettingsV4.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("rubygems"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("rubygems");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks["results"][0],
|
||||
json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(188040, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(191940, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(159227, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v5() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::TestV5.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let mut server = Server::new_auth_with_options(options, temp).await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200, "{indexes}");
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("test"));
|
||||
assert_eq!(indexes["results"][1]["uid"], json!("test2"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let expected_stats = json!({
|
||||
"numberOfDocuments": 10,
|
||||
"isIndexing": false,
|
||||
"fieldDistribution": {
|
||||
"cast": 10,
|
||||
"director": 10,
|
||||
"genres": 10,
|
||||
"id": 10,
|
||||
"overview": 10,
|
||||
"popularity": 10,
|
||||
"poster_path": 10,
|
||||
"producer": 10,
|
||||
"production_companies": 10,
|
||||
"release_date": 10,
|
||||
"tagline": 10,
|
||||
"title": 10,
|
||||
"vote_average": 10,
|
||||
"vote_count": 10
|
||||
}
|
||||
});
|
||||
|
||||
let index1 = server.index("test");
|
||||
let index2 = server.index("test2");
|
||||
|
||||
let (stats, code) = index1.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(stats, expected_stats);
|
||||
|
||||
let (docs, code) = index2
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
|
||||
let (docs, code) = index1
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
|
||||
|
||||
let (stats, code) = index2.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(stats, expected_stats);
|
||||
|
||||
let (keys, code) = server.list_api_keys().await;
|
||||
assert_eq!(code, 200);
|
||||
let key = &keys["results"][0];
|
||||
|
||||
assert_eq!(key["name"], "my key");
|
||||
}
|
||||
@@ -102,7 +102,7 @@ async fn error_create_with_invalid_index_uid() {
|
||||
let (response, code) = index.create(None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`test test#!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"message": "invalid index uid `test test#!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
|
||||
@@ -52,10 +52,10 @@ async fn loop_delete_add_documents() {
|
||||
let mut tasks = Vec::new();
|
||||
for _ in 0..50 {
|
||||
let (response, code) = index.add_documents(documents.clone(), None).await;
|
||||
tasks.push(response["uid"].as_u64().unwrap());
|
||||
tasks.push(response["taskUid"].as_u64().unwrap());
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
let (response, code) = index.delete().await;
|
||||
tasks.push(response["uid"].as_u64().unwrap());
|
||||
tasks.push(response["taskUid"].as_u64().unwrap());
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
}
|
||||
|
||||
|
||||
@@ -16,12 +16,11 @@ async fn create_and_get_index() {
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert_eq!(response["name"], "test");
|
||||
assert!(response.get("createdAt").is_some());
|
||||
assert!(response.get("updatedAt").is_some());
|
||||
assert_eq!(response["createdAt"], response["updatedAt"]);
|
||||
assert_eq!(response["primaryKey"], Value::Null);
|
||||
assert_eq!(response.as_object().unwrap().len(), 5);
|
||||
assert_eq!(response.as_object().unwrap().len(), 4);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -45,10 +44,10 @@ async fn error_get_unexisting_index() {
|
||||
#[actix_rt::test]
|
||||
async fn no_index_return_empty_list() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.list_indexes().await;
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.is_array());
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
assert!(response["results"].is_array());
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -59,10 +58,10 @@ async fn list_multiple_indexes() {
|
||||
|
||||
server.index("test").wait_task(1).await;
|
||||
|
||||
let (response, code) = server.list_indexes().await;
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.is_array());
|
||||
let arr = response.as_array().unwrap();
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 2);
|
||||
assert!(arr
|
||||
.iter()
|
||||
@@ -72,6 +71,118 @@ async fn list_multiple_indexes() {
|
||||
.any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_and_paginate_indexes() {
|
||||
let server = Server::new().await;
|
||||
const NB_INDEXES: usize = 50;
|
||||
for i in 0..NB_INDEXES {
|
||||
server.index(&format!("test_{i:02}")).create(None).await;
|
||||
server
|
||||
.index(&format!("test_{i:02}"))
|
||||
.wait_task(i as u64)
|
||||
.await;
|
||||
}
|
||||
|
||||
// basic
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 20);
|
||||
// ensuring we get all the indexes in the alphabetical order
|
||||
assert!((0..20)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with an offset
|
||||
let (response, code) = server.list_indexes(Some(15), None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["offset"], json!(15));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 20);
|
||||
assert!((15..35)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with an offset and not enough elements
|
||||
let (response, code) = server.list_indexes(Some(45), None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["offset"], json!(45));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 5);
|
||||
assert!((45..50)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with a limit lower than the default
|
||||
let (response, code) = server.list_indexes(None, Some(5)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(5));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 5);
|
||||
assert!((0..5)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with a limit higher than the default
|
||||
let (response, code) = server.list_indexes(None, Some(40)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(40));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 40);
|
||||
assert!((0..40)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with a limit higher than the default
|
||||
let (response, code) = server.list_indexes(None, Some(80)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(80));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 50);
|
||||
assert!((0..50)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with a limit and an offset
|
||||
let (response, code) = server.list_indexes(Some(20), Some(10)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(10));
|
||||
assert_eq!(response["offset"], json!(20));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 10);
|
||||
assert!((20..30)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_invalid_index_uid() {
|
||||
let server = Server::new().await;
|
||||
|
||||
@@ -35,7 +35,7 @@ async fn stats() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["uid"], 1);
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
|
||||
index.wait_task(1).await;
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@ async fn update_primary_key() {
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert_eq!(response["name"], "test");
|
||||
assert!(response.get("createdAt").is_some());
|
||||
assert!(response.get("updatedAt").is_some());
|
||||
|
||||
@@ -32,7 +31,7 @@ async fn update_primary_key() {
|
||||
assert!(created_at < updated_at);
|
||||
|
||||
assert_eq!(response["primaryKey"], "primary");
|
||||
assert_eq!(response.as_object().unwrap().len(), 5);
|
||||
assert_eq!(response.as_object().unwrap().len(), 4);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -2,6 +2,7 @@ mod auth;
|
||||
mod common;
|
||||
mod dashboard;
|
||||
mod documents;
|
||||
mod dumps;
|
||||
mod index;
|
||||
mod search;
|
||||
mod settings;
|
||||
|
||||
@@ -82,7 +82,7 @@ async fn filter_invalid_syntax_object() {
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO` or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
@@ -109,7 +109,7 @@ async fn filter_invalid_syntax_array() {
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO` or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
|
||||
@@ -16,7 +16,7 @@ async fn formatted_contain_wildcard() {
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(json!({ "q": "pesti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"] }))
|
||||
.search_post(json!({ "q": "pesti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "showMatchesPosition": true }))
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
@@ -25,7 +25,8 @@ async fn formatted_contain_wildcard() {
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "<em>pesti</em>",
|
||||
}
|
||||
},
|
||||
"_matchesPosition": {"cattos": [{"start": 0, "length": 5}]},
|
||||
})
|
||||
);
|
||||
|
||||
@@ -43,7 +44,7 @@ async fn formatted_contain_wildcard() {
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"] }),
|
||||
json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"], "showMatchesPosition": true }),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
@@ -55,7 +56,8 @@ async fn formatted_contain_wildcard() {
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pesti",
|
||||
}
|
||||
},
|
||||
"_matchesPosition": {"cattos": [{"start": 0, "length": 5}]},
|
||||
})
|
||||
);
|
||||
|
||||
@@ -141,6 +143,27 @@ async fn format_nested() {
|
||||
})
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
json!({ "q": "bobby", "attributesToRetrieve": ["doggos.name"], "showMatchesPosition": true }),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
"_matchesPosition": {"doggos.name": [{"start": 0, "length": 5}]},
|
||||
})
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.name"] }))
|
||||
.await;
|
||||
|
||||
@@ -420,11 +420,11 @@ async fn search_facet_distribution() {
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facetsDistribution": ["title"]
|
||||
"facets": ["title"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let dist = response["facetsDistribution"].as_object().unwrap();
|
||||
let dist = response["facetDistribution"].as_object().unwrap();
|
||||
assert_eq!(dist.len(), 1);
|
||||
assert!(dist.get("title").is_some());
|
||||
},
|
||||
@@ -445,12 +445,12 @@ async fn search_facet_distribution() {
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
// "facetsDistribution": ["father", "doggos.name"]
|
||||
"facetsDistribution": ["father"]
|
||||
// "facets": ["father", "doggos.name"]
|
||||
"facets": ["father"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let dist = response["facetsDistribution"].as_object().unwrap();
|
||||
let dist = response["facetDistribution"].as_object().unwrap();
|
||||
assert_eq!(dist.len(), 1);
|
||||
assert_eq!(
|
||||
dist["father"],
|
||||
@@ -474,11 +474,11 @@ async fn search_facet_distribution() {
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facetsDistribution": ["doggos.name"]
|
||||
"facets": ["doggos.name"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let dist = response["facetsDistribution"].as_object().unwrap();
|
||||
let dist = response["facetDistribution"].as_object().unwrap();
|
||||
assert_eq!(dist.len(), 1);
|
||||
assert_eq!(
|
||||
dist["doggos.name"],
|
||||
@@ -491,12 +491,11 @@ async fn search_facet_distribution() {
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facetsDistribution": ["doggos"]
|
||||
"facets": ["doggos"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let dist = response["facetsDistribution"].as_object().unwrap();
|
||||
dbg!(&dist);
|
||||
let dist = response["facetDistribution"].as_object().unwrap();
|
||||
assert_eq!(dist.len(), 3);
|
||||
assert_eq!(
|
||||
dist["doggos.name"],
|
||||
@@ -566,6 +565,36 @@ async fn placeholder_search_is_hard_limited() {
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.update_settings(json!({ "pagination": { "limitedTo": 10_000 } }))
|
||||
.await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"limit": 1500,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"offset": 1000,
|
||||
"limit": 400,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -605,4 +634,85 @@ async fn search_is_hard_limited() {
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.update_settings(json!({ "pagination": { "limitedTo": 10_000 } }))
|
||||
.await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"q": "unique",
|
||||
"limit": 1500,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"q": "unique",
|
||||
"offset": 1000,
|
||||
"limit": 400,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn faceting_max_values_per_facet() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
index
|
||||
.update_settings(json!({ "filterableAttributes": ["number"] }))
|
||||
.await;
|
||||
|
||||
let documents: Vec<_> = (0..10_000)
|
||||
.map(|id| json!({ "id": id, "number": id * 10 }))
|
||||
.collect();
|
||||
index.add_documents(json!(documents), None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facets": ["number"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let numbers = response["facetDistribution"]["number"].as_object().unwrap();
|
||||
assert_eq!(numbers.len(), 100);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } }))
|
||||
.await;
|
||||
index.wait_task(2).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facets": ["number"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let numbers = dbg!(&response)["facetDistribution"]["number"]
|
||||
.as_object()
|
||||
.unwrap();
|
||||
assert_eq!(numbers.len(), 10_000);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
@@ -24,6 +24,12 @@ static DEFAULT_SETTINGS_VALUES: Lazy<HashMap<&'static str, Value>> = Lazy::new(|
|
||||
);
|
||||
map.insert("stop_words", json!([]));
|
||||
map.insert("synonyms", json!({}));
|
||||
map.insert(
|
||||
"faceting",
|
||||
json!({
|
||||
"maxValuesByFacet": json!(100),
|
||||
}),
|
||||
);
|
||||
map
|
||||
});
|
||||
|
||||
@@ -43,7 +49,7 @@ async fn get_settings() {
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
let settings = response.as_object().unwrap();
|
||||
assert_eq!(settings.keys().len(), 9);
|
||||
assert_eq!(settings.keys().len(), 11);
|
||||
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
||||
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
||||
assert_eq!(settings["filterableAttributes"], json!([]));
|
||||
@@ -61,6 +67,18 @@ async fn get_settings() {
|
||||
])
|
||||
);
|
||||
assert_eq!(settings["stopWords"], json!([]));
|
||||
assert_eq!(
|
||||
settings["faceting"],
|
||||
json!({
|
||||
"maxValuesPerFacet": 100,
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
settings["pagination"],
|
||||
json!({
|
||||
"limitedTo": 1000,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -122,7 +140,7 @@ async fn reset_all_settings() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["taskUid"], 0);
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
@@ -179,7 +197,7 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
|
||||
assert_eq!(code, 400);
|
||||
|
||||
let expected = json!({
|
||||
"message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"message": "invalid index uid `test##! `, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"});
|
||||
@@ -214,7 +232,7 @@ macro_rules! test_setting_routes {
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.post(url, serde_json::Value::Null).await;
|
||||
let (response, code) = server.service.put(url, serde_json::Value::Null).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
server.index("").wait_task(0).await;
|
||||
let (response, code) = server.index("test").get().await;
|
||||
@@ -283,7 +301,7 @@ async fn error_set_invalid_ranking_rules() {
|
||||
assert_eq!(response["status"], "failed");
|
||||
|
||||
let expected_error = json!({
|
||||
"message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are Words, Typo, Sort, Proximity, Attribute, Exactness and custom ranking rules."#,
|
||||
"message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules."#,
|
||||
"code": "invalid_ranking_rule",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_ranking_rule"
|
||||
|
||||
@@ -41,7 +41,7 @@ async fn perform_snapshot() {
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
|
||||
let server = Server::new_with_options(options).await;
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let index = server.index("test");
|
||||
index
|
||||
@@ -67,10 +67,10 @@ async fn perform_snapshot() {
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
|
||||
let snapshot_server = Server::new_with_options(options).await;
|
||||
let snapshot_server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
verify_snapshot!(server, snapshot_server, |server| =>
|
||||
server.list_indexes(),
|
||||
server.list_indexes(None, None),
|
||||
// for some reason the db sizes differ. this may be due to the compaction options we have
|
||||
// set when performing the snapshot
|
||||
//server.stats(),
|
||||
|
||||
@@ -54,7 +54,7 @@ async fn stats() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
assert_eq!(response["uid"], 1);
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
|
||||
index.wait_task(1).await;
|
||||
|
||||
|
||||
@@ -3,22 +3,6 @@ use serde_json::json;
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_task_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.service.get("/indexes/test/tasks").await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_unexisting_task_status() {
|
||||
let server = Server::new().await;
|
||||
@@ -58,22 +42,6 @@ async fn get_task_status() {
|
||||
// TODO check resonse format, as per #48
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_list_tasks_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").list_tasks().await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks() {
|
||||
let server = Server::new().await;
|
||||
@@ -91,10 +59,140 @@ async fn list_tasks() {
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_with_star_filters() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
let (response, code) = index.service.get("/tasks?indexUid=test").await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index.service.get("/tasks?indexUid=*").await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index.service.get("/tasks?indexUid=*,pasteque").await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index.service.get("/tasks?type=*").await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index
|
||||
.service
|
||||
.get("/tasks?type=*,documentAdditionOrUpdate&status=*")
|
||||
.await;
|
||||
assert_eq!(code, 200, "{:?}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index
|
||||
.service
|
||||
.get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test")
|
||||
.await;
|
||||
assert_eq!(code, 200, "{:?}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index
|
||||
.service
|
||||
.get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test,*")
|
||||
.await;
|
||||
assert_eq!(code, 200, "{:?}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_status_filtered() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
|
||||
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
// We can't be sure that the update isn't already processed so we can't test this
|
||||
// let (response, code) = index.filtered_tasks(&[], &["processing"]).await;
|
||||
// assert_eq!(code, 200, "{}", response);
|
||||
// assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_type_filtered() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
|
||||
let (response, code) = index.filtered_tasks(&["indexCreation"], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
let (response, code) = index
|
||||
.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[])
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_status_and_type_filtered() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
|
||||
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 0);
|
||||
|
||||
let (response, code) = index
|
||||
.filtered_tasks(
|
||||
&["indexCreation", "documentAdditionOrUpdate"],
|
||||
&["succeeded", "processing", "enqueued"],
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
macro_rules! assert_valid_summarized_task {
|
||||
($response:expr, $task_type:literal, $index:literal) => {{
|
||||
assert_eq!($response.as_object().unwrap().len(), 5);
|
||||
assert!($response["uid"].as_u64().is_some());
|
||||
assert!($response["taskUid"].as_u64().is_some());
|
||||
assert_eq!($response["indexUid"], $index);
|
||||
assert_eq!($response["status"], "enqueued");
|
||||
assert_eq!($response["type"], $task_type);
|
||||
@@ -119,16 +217,16 @@ async fn test_summarized_task_view() {
|
||||
assert_valid_summarized_task!(response, "settingsUpdate", "test");
|
||||
|
||||
let (response, _) = index.update_documents(json!([{"id": 1}]), None).await;
|
||||
assert_valid_summarized_task!(response, "documentPartial", "test");
|
||||
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
|
||||
|
||||
let (response, _) = index.add_documents(json!([{"id": 1}]), None).await;
|
||||
assert_valid_summarized_task!(response, "documentAddition", "test");
|
||||
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
|
||||
|
||||
let (response, _) = index.delete_document(1).await;
|
||||
assert_valid_summarized_task!(response, "documentDeletion", "test");
|
||||
|
||||
let (response, _) = index.clear_all_documents().await;
|
||||
assert_valid_summarized_task!(response, "clearAll", "test");
|
||||
assert_valid_summarized_task!(response, "documentDeletion", "test");
|
||||
|
||||
let (response, _) = index.delete().await;
|
||||
assert_valid_summarized_task!(response, "indexDeletion", "test");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-lib"
|
||||
version = "0.27.1"
|
||||
version = "0.28.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
@@ -29,8 +29,8 @@ itertools = "0.10.3"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.14"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.26.5" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.3-filter.beta.0" }
|
||||
mime = "0.3.16"
|
||||
num_cpus = "1.13.1"
|
||||
obkv = "0.2.0"
|
||||
@@ -41,6 +41,7 @@ rand = "0.8.5"
|
||||
rayon = "1.5.1"
|
||||
regex = "1.5.5"
|
||||
reqwest = { version = "0.11.9", features = ["json", "rustls-tls"], default-features = false, optional = true }
|
||||
roaring = "0.9.0"
|
||||
rustls = "0.20.4"
|
||||
serde = { version = "1.0.136", features = ["derive"] }
|
||||
serde_json = { version = "1.0.79", features = ["preserve_order"] }
|
||||
@@ -52,15 +53,15 @@ tempfile = "3.3.0"
|
||||
thiserror = "1.0.30"
|
||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
tokio = { version = "1.17.0", features = ["full"] }
|
||||
uuid = { version = "0.8.2", features = ["serde"] }
|
||||
uuid = { version = "0.8.2", features = ["serde", "v4"] }
|
||||
walkdir = "2.3.2"
|
||||
whoami = { version = "1.2.1", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.7.0"
|
||||
meilisearch-error = { path = "../meilisearch-error", features = ["test-traits"] }
|
||||
meilisearch-types = { path = "../meilisearch-types", features = ["test-traits"] }
|
||||
mockall = "0.11.0"
|
||||
nelson = { git = "https://github.com/MarinPostma/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"}
|
||||
nelson = { git = "https://github.com/meilisearch/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"}
|
||||
paste = "1.0.6"
|
||||
proptest = "1.0.0"
|
||||
proptest-derive = "0.3.0"
|
||||
|
||||
@@ -2,7 +2,8 @@ use std::borrow::Borrow;
|
||||
use std::fmt::{self, Debug, Display};
|
||||
use std::io::{self, BufRead, BufReader, BufWriter, Cursor, Read, Seek, Write};
|
||||
|
||||
use meilisearch_error::{internal_error, Code, ErrorCode};
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
use meilisearch_types::internal_error;
|
||||
use milli::documents::DocumentBatchBuilder;
|
||||
|
||||
type Result<T> = std::result::Result<T, DocumentFormatError>;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
pub mod v2;
|
||||
pub mod v3;
|
||||
pub mod v4;
|
||||
|
||||
/// Parses the v1 version of the Asc ranking rules `asc(price)`and returns the field name.
|
||||
pub fn asc_ranking_rule(text: &str) -> Option<&str> {
|
||||
@@ -1,5 +1,5 @@
|
||||
use anyhow::bail;
|
||||
use meilisearch_error::Code;
|
||||
use meilisearch_types::error::Code;
|
||||
use milli::update::IndexDocumentsMethod;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
@@ -1,12 +1,13 @@
|
||||
use meilisearch_error::{Code, ResponseError};
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use milli::update::IndexDocumentsMethod;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::v4::{Task, TaskContent, TaskEvent};
|
||||
use crate::index::{Settings, Unchecked};
|
||||
use crate::index_resolver::IndexUid;
|
||||
use crate::tasks::task::{DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult};
|
||||
use crate::tasks::task::{DocumentDeletion, TaskId, TaskResult};
|
||||
|
||||
use super::v2;
|
||||
|
||||
@@ -58,9 +59,9 @@ pub enum Update {
|
||||
ClearDocuments,
|
||||
}
|
||||
|
||||
impl From<Update> for TaskContent {
|
||||
fn from(other: Update) -> Self {
|
||||
match other {
|
||||
impl From<Update> for super::v4::TaskContent {
|
||||
fn from(update: Update) -> Self {
|
||||
match update {
|
||||
Update::DeleteDocuments(ids) => {
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids))
|
||||
}
|
||||
@@ -185,10 +186,10 @@ impl Failed {
|
||||
impl From<(UpdateStatus, String, TaskId)> for Task {
|
||||
fn from((update, uid, task_id): (UpdateStatus, String, TaskId)) -> Self {
|
||||
// Dummy task
|
||||
let mut task = Task {
|
||||
let mut task = super::v4::Task {
|
||||
id: task_id,
|
||||
index_uid: IndexUid::new(uid).unwrap(),
|
||||
content: TaskContent::IndexDeletion,
|
||||
index_uid: IndexUid::new_unchecked(uid),
|
||||
content: super::v4::TaskContent::IndexDeletion,
|
||||
events: Vec::new(),
|
||||
};
|
||||
|
||||
145
meilisearch-lib/src/dump/compat/v4.rs
Normal file
145
meilisearch-lib/src/dump/compat/v4.rs
Normal file
@@ -0,0 +1,145 @@
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use milli::update::IndexDocumentsMethod;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::index::{Settings, Unchecked};
|
||||
use crate::tasks::batch::BatchId;
|
||||
use crate::tasks::task::{
|
||||
DocumentDeletion, TaskContent as NewTaskContent, TaskEvent as NewTaskEvent, TaskId, TaskResult,
|
||||
};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Task {
|
||||
pub id: TaskId,
|
||||
pub index_uid: IndexUid,
|
||||
pub content: TaskContent,
|
||||
pub events: Vec<TaskEvent>,
|
||||
}
|
||||
|
||||
impl From<Task> for crate::tasks::task::Task {
|
||||
fn from(other: Task) -> Self {
|
||||
Self {
|
||||
id: other.id,
|
||||
content: NewTaskContent::from((other.index_uid, other.content)),
|
||||
events: other.events.into_iter().map(Into::into).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum TaskEvent {
|
||||
Created(#[serde(with = "time::serde::rfc3339")] OffsetDateTime),
|
||||
Batched {
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
timestamp: OffsetDateTime,
|
||||
batch_id: BatchId,
|
||||
},
|
||||
Processing(#[serde(with = "time::serde::rfc3339")] OffsetDateTime),
|
||||
Succeded {
|
||||
result: TaskResult,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
timestamp: OffsetDateTime,
|
||||
},
|
||||
Failed {
|
||||
error: ResponseError,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
timestamp: OffsetDateTime,
|
||||
},
|
||||
}
|
||||
|
||||
impl From<TaskEvent> for NewTaskEvent {
|
||||
fn from(other: TaskEvent) -> Self {
|
||||
match other {
|
||||
TaskEvent::Created(x) => NewTaskEvent::Created(x),
|
||||
TaskEvent::Batched {
|
||||
timestamp,
|
||||
batch_id,
|
||||
} => NewTaskEvent::Batched {
|
||||
timestamp,
|
||||
batch_id,
|
||||
},
|
||||
TaskEvent::Processing(x) => NewTaskEvent::Processing(x),
|
||||
TaskEvent::Succeded { result, timestamp } => {
|
||||
NewTaskEvent::Succeeded { result, timestamp }
|
||||
}
|
||||
TaskEvent::Failed { error, timestamp } => NewTaskEvent::Failed { error, timestamp },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum TaskContent {
|
||||
DocumentAddition {
|
||||
content_uuid: Uuid,
|
||||
merge_strategy: IndexDocumentsMethod,
|
||||
primary_key: Option<String>,
|
||||
documents_count: usize,
|
||||
allow_index_creation: bool,
|
||||
},
|
||||
DocumentDeletion(DocumentDeletion),
|
||||
SettingsUpdate {
|
||||
settings: Settings<Unchecked>,
|
||||
/// Indicates whether the task was a deletion
|
||||
is_deletion: bool,
|
||||
allow_index_creation: bool,
|
||||
},
|
||||
IndexDeletion,
|
||||
IndexCreation {
|
||||
primary_key: Option<String>,
|
||||
},
|
||||
IndexUpdate {
|
||||
primary_key: Option<String>,
|
||||
},
|
||||
Dump {
|
||||
uid: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl From<(IndexUid, TaskContent)> for NewTaskContent {
|
||||
fn from((index_uid, content): (IndexUid, TaskContent)) -> Self {
|
||||
match content {
|
||||
TaskContent::DocumentAddition {
|
||||
content_uuid,
|
||||
merge_strategy,
|
||||
primary_key,
|
||||
documents_count,
|
||||
allow_index_creation,
|
||||
} => NewTaskContent::DocumentAddition {
|
||||
index_uid,
|
||||
content_uuid,
|
||||
merge_strategy,
|
||||
primary_key,
|
||||
documents_count,
|
||||
allow_index_creation,
|
||||
},
|
||||
TaskContent::DocumentDeletion(deletion) => NewTaskContent::DocumentDeletion {
|
||||
index_uid,
|
||||
deletion,
|
||||
},
|
||||
TaskContent::SettingsUpdate {
|
||||
settings,
|
||||
is_deletion,
|
||||
allow_index_creation,
|
||||
} => NewTaskContent::SettingsUpdate {
|
||||
index_uid,
|
||||
settings,
|
||||
is_deletion,
|
||||
allow_index_creation,
|
||||
},
|
||||
TaskContent::IndexDeletion => NewTaskContent::IndexDeletion { index_uid },
|
||||
TaskContent::IndexCreation { primary_key } => NewTaskContent::IndexCreation {
|
||||
index_uid,
|
||||
primary_key,
|
||||
},
|
||||
TaskContent::IndexUpdate { primary_key } => NewTaskContent::IndexUpdate {
|
||||
index_uid,
|
||||
primary_key,
|
||||
},
|
||||
TaskContent::Dump { uid } => NewTaskContent::Dump { uid },
|
||||
}
|
||||
}
|
||||
}
|
||||
36
meilisearch-lib/src/dump/error.rs
Normal file
36
meilisearch-lib/src/dump/error.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use meilisearch_auth::error::AuthControllerError;
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
use meilisearch_types::internal_error;
|
||||
|
||||
use crate::{index_resolver::error::IndexResolverError, tasks::error::TaskError};
|
||||
|
||||
pub type Result<T> = std::result::Result<T, DumpError>;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum DumpError {
|
||||
#[error("An internal error has occurred. `{0}`.")]
|
||||
Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
|
||||
#[error("{0}")]
|
||||
IndexResolver(#[from] IndexResolverError),
|
||||
}
|
||||
|
||||
internal_error!(
|
||||
DumpError: milli::heed::Error,
|
||||
std::io::Error,
|
||||
tokio::task::JoinError,
|
||||
tokio::sync::oneshot::error::RecvError,
|
||||
serde_json::error::Error,
|
||||
tempfile::PersistError,
|
||||
fs_extra::error::Error,
|
||||
AuthControllerError,
|
||||
TaskError
|
||||
);
|
||||
|
||||
impl ErrorCode for DumpError {
|
||||
fn error_code(&self) -> Code {
|
||||
match self {
|
||||
DumpError::Internal(_) => Code::Internal,
|
||||
DumpError::IndexResolver(e) => e.error_code(),
|
||||
}
|
||||
}
|
||||
}
|
||||
188
meilisearch-lib/src/dump/handler.rs
Normal file
188
meilisearch-lib/src/dump/handler.rs
Normal file
@@ -0,0 +1,188 @@
|
||||
#[cfg(not(test))]
|
||||
pub use real::DumpHandler;
|
||||
|
||||
#[cfg(test)]
|
||||
pub use test::MockDumpHandler as DumpHandler;
|
||||
|
||||
use time::{macros::format_description, OffsetDateTime};
|
||||
|
||||
/// Generate uid from creation date
|
||||
pub fn generate_uid() -> String {
|
||||
OffsetDateTime::now_utc()
|
||||
.format(format_description!(
|
||||
"[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
|
||||
))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
mod real {
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use log::{info, trace};
|
||||
use meilisearch_auth::AuthController;
|
||||
use milli::heed::Env;
|
||||
use tokio::fs::create_dir_all;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
use crate::analytics;
|
||||
use crate::compression::to_tar_gz;
|
||||
use crate::dump::error::{DumpError, Result};
|
||||
use crate::dump::{MetadataVersion, META_FILE_NAME};
|
||||
use crate::index_resolver::{
|
||||
index_store::IndexStore, meta_store::IndexMetaStore, IndexResolver,
|
||||
};
|
||||
use crate::tasks::TaskStore;
|
||||
use crate::update_file_store::UpdateFileStore;
|
||||
|
||||
pub struct DumpHandler<U, I> {
|
||||
dump_path: PathBuf,
|
||||
db_path: PathBuf,
|
||||
update_file_store: UpdateFileStore,
|
||||
task_store_size: usize,
|
||||
index_db_size: usize,
|
||||
env: Arc<Env>,
|
||||
index_resolver: Arc<IndexResolver<U, I>>,
|
||||
}
|
||||
|
||||
impl<U, I> DumpHandler<U, I>
|
||||
where
|
||||
U: IndexMetaStore + Sync + Send + 'static,
|
||||
I: IndexStore + Sync + Send + 'static,
|
||||
{
|
||||
pub fn new(
|
||||
dump_path: PathBuf,
|
||||
db_path: PathBuf,
|
||||
update_file_store: UpdateFileStore,
|
||||
task_store_size: usize,
|
||||
index_db_size: usize,
|
||||
env: Arc<Env>,
|
||||
index_resolver: Arc<IndexResolver<U, I>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
dump_path,
|
||||
db_path,
|
||||
update_file_store,
|
||||
task_store_size,
|
||||
index_db_size,
|
||||
env,
|
||||
index_resolver,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run(&self, uid: String) -> Result<()> {
|
||||
trace!("Performing dump.");
|
||||
|
||||
create_dir_all(&self.dump_path).await?;
|
||||
|
||||
let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??;
|
||||
let temp_dump_path = temp_dump_dir.path().to_owned();
|
||||
|
||||
let meta = MetadataVersion::new_v5(self.index_db_size, self.task_store_size);
|
||||
let meta_path = temp_dump_path.join(META_FILE_NAME);
|
||||
|
||||
let meta_bytes = serde_json::to_vec(&meta)?;
|
||||
let mut meta_file = tokio::fs::File::create(&meta_path).await?;
|
||||
meta_file.write_all(&meta_bytes).await?;
|
||||
|
||||
analytics::copy_user_id(&self.db_path, &temp_dump_path);
|
||||
|
||||
create_dir_all(&temp_dump_path.join("indexes")).await?;
|
||||
|
||||
let db_path = self.db_path.clone();
|
||||
let temp_dump_path_clone = temp_dump_path.clone();
|
||||
tokio::task::spawn_blocking(move || -> Result<()> {
|
||||
AuthController::dump(db_path, temp_dump_path_clone)?;
|
||||
Ok(())
|
||||
})
|
||||
.await??;
|
||||
TaskStore::dump(
|
||||
self.env.clone(),
|
||||
&temp_dump_path,
|
||||
self.update_file_store.clone(),
|
||||
)
|
||||
.await?;
|
||||
self.index_resolver.dump(&temp_dump_path).await?;
|
||||
|
||||
let dump_path = self.dump_path.clone();
|
||||
let dump_path = tokio::task::spawn_blocking(move || -> Result<PathBuf> {
|
||||
// for now we simply copy the updates/updates_files
|
||||
// FIXME: We may copy more files than necessary, if new files are added while we are
|
||||
// performing the dump. We need a way to filter them out.
|
||||
|
||||
let temp_dump_file = tempfile::NamedTempFile::new_in(&dump_path)?;
|
||||
to_tar_gz(temp_dump_path, temp_dump_file.path())
|
||||
.map_err(|e| DumpError::Internal(e.into()))?;
|
||||
|
||||
let dump_path = dump_path.join(uid).with_extension("dump");
|
||||
temp_dump_file.persist(&dump_path)?;
|
||||
|
||||
Ok(dump_path)
|
||||
})
|
||||
.await??;
|
||||
|
||||
info!("Created dump in {:?}.", dump_path);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use milli::heed::Env;
|
||||
use nelson::Mocker;
|
||||
|
||||
use crate::dump::error::Result;
|
||||
use crate::index_resolver::IndexResolver;
|
||||
use crate::index_resolver::{index_store::IndexStore, meta_store::IndexMetaStore};
|
||||
use crate::update_file_store::UpdateFileStore;
|
||||
|
||||
use super::*;
|
||||
|
||||
pub enum MockDumpHandler<U, I> {
|
||||
Real(super::real::DumpHandler<U, I>),
|
||||
Mock(Mocker),
|
||||
}
|
||||
|
||||
impl<U, I> MockDumpHandler<U, I> {
|
||||
pub fn mock(mocker: Mocker) -> Self {
|
||||
Self::Mock(mocker)
|
||||
}
|
||||
}
|
||||
|
||||
impl<U, I> MockDumpHandler<U, I>
|
||||
where
|
||||
U: IndexMetaStore + Sync + Send + 'static,
|
||||
I: IndexStore + Sync + Send + 'static,
|
||||
{
|
||||
pub fn new(
|
||||
dump_path: PathBuf,
|
||||
db_path: PathBuf,
|
||||
update_file_store: UpdateFileStore,
|
||||
task_store_size: usize,
|
||||
index_db_size: usize,
|
||||
env: Arc<Env>,
|
||||
index_resolver: Arc<IndexResolver<U, I>>,
|
||||
) -> Self {
|
||||
Self::Real(super::real::DumpHandler::new(
|
||||
dump_path,
|
||||
db_path,
|
||||
update_file_store,
|
||||
task_store_size,
|
||||
index_db_size,
|
||||
env,
|
||||
index_resolver,
|
||||
))
|
||||
}
|
||||
pub async fn run(&self, uid: String) -> Result<()> {
|
||||
match self {
|
||||
DumpHandler::Real(real) => real.run(uid).await,
|
||||
DumpHandler::Mock(mocker) => unsafe { mocker.get("run").call(uid) },
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod v2;
|
||||
pub mod v3;
|
||||
pub mod v4;
|
||||
pub mod v5;
|
||||
@@ -5,8 +5,8 @@ use std::path::{Path, PathBuf};
|
||||
use serde_json::{Deserializer, Value};
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use crate::index_controller::dump_actor::compat::{self, v2, v3};
|
||||
use crate::index_controller::dump_actor::Metadata;
|
||||
use crate::dump::compat::{self, v2, v3};
|
||||
use crate::dump::Metadata;
|
||||
use crate::options::IndexerOpts;
|
||||
|
||||
/// The dump v2 reads the dump folder and patches all the needed file to make it compatible with a
|
||||
@@ -9,11 +9,11 @@ use log::info;
|
||||
use tempfile::tempdir;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::index_controller::dump_actor::compat::v3;
|
||||
use crate::index_controller::dump_actor::Metadata;
|
||||
use crate::dump::compat::{self, v3};
|
||||
use crate::dump::Metadata;
|
||||
use crate::index_resolver::meta_store::{DumpEntry, IndexMeta};
|
||||
use crate::options::IndexerOpts;
|
||||
use crate::tasks::task::{Task, TaskId};
|
||||
use crate::tasks::task::TaskId;
|
||||
|
||||
/// dump structure for V3:
|
||||
/// .
|
||||
@@ -124,7 +124,7 @@ fn patch_updates(
|
||||
.clone();
|
||||
serde_json::to_writer(
|
||||
&mut dst_file,
|
||||
&Task::from((entry.update, name, task_id as TaskId)),
|
||||
&compat::v4::Task::from((entry.update, name, task_id as TaskId)),
|
||||
)?;
|
||||
dst_file.write_all(b"\n")?;
|
||||
Ok(())
|
||||
103
meilisearch-lib/src/dump/loaders/v4.rs
Normal file
103
meilisearch-lib/src/dump/loaders/v4.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
use std::fs::{self, create_dir_all, File};
|
||||
use std::io::{BufReader, Write};
|
||||
use std::path::Path;
|
||||
|
||||
use fs_extra::dir::{self, CopyOptions};
|
||||
use log::info;
|
||||
use serde_json::{Deserializer, Map, Value};
|
||||
use tempfile::tempdir;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::dump::{compat, Metadata};
|
||||
use crate::options::IndexerOpts;
|
||||
use crate::tasks::task::Task;
|
||||
|
||||
pub fn load_dump(
|
||||
meta: Metadata,
|
||||
src: impl AsRef<Path>,
|
||||
dst: impl AsRef<Path>,
|
||||
index_db_size: usize,
|
||||
meta_env_size: usize,
|
||||
indexing_options: &IndexerOpts,
|
||||
) -> anyhow::Result<()> {
|
||||
info!("Patching dump V4 to dump V5...");
|
||||
|
||||
let patched_dir = tempdir()?;
|
||||
let options = CopyOptions::default();
|
||||
|
||||
// Indexes
|
||||
dir::copy(src.as_ref().join("indexes"), &patched_dir, &options)?;
|
||||
|
||||
// Index uuids
|
||||
dir::copy(src.as_ref().join("index_uuids"), &patched_dir, &options)?;
|
||||
|
||||
// Metadata
|
||||
fs::copy(
|
||||
src.as_ref().join("metadata.json"),
|
||||
patched_dir.path().join("metadata.json"),
|
||||
)?;
|
||||
|
||||
// Updates
|
||||
patch_updates(&src, &patched_dir)?;
|
||||
|
||||
// Keys
|
||||
patch_keys(&src, &patched_dir)?;
|
||||
|
||||
super::v5::load_dump(
|
||||
meta,
|
||||
&patched_dir,
|
||||
dst,
|
||||
index_db_size,
|
||||
meta_env_size,
|
||||
indexing_options,
|
||||
)
|
||||
}
|
||||
|
||||
fn patch_updates(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> anyhow::Result<()> {
|
||||
let updates_path = src.as_ref().join("updates/data.jsonl");
|
||||
let output_updates_path = dst.as_ref().join("updates/data.jsonl");
|
||||
create_dir_all(output_updates_path.parent().unwrap())?;
|
||||
let udpates_file = File::open(updates_path)?;
|
||||
let mut output_update_file = File::create(output_updates_path)?;
|
||||
|
||||
serde_json::Deserializer::from_reader(udpates_file)
|
||||
.into_iter::<compat::v4::Task>()
|
||||
.try_for_each(|task| -> anyhow::Result<()> {
|
||||
let task: Task = task?.into();
|
||||
|
||||
serde_json::to_writer(&mut output_update_file, &task)?;
|
||||
output_update_file.write_all(b"\n")?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
output_update_file.flush()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn patch_keys(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> anyhow::Result<()> {
|
||||
let keys_file_src = src.as_ref().join("keys");
|
||||
|
||||
if !keys_file_src.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
fs::create_dir_all(&dst)?;
|
||||
let keys_file_dst = dst.as_ref().join("keys");
|
||||
let mut writer = File::create(&keys_file_dst)?;
|
||||
|
||||
let reader = BufReader::new(File::open(&keys_file_src)?);
|
||||
for key in Deserializer::from_reader(reader).into_iter() {
|
||||
let mut key: Map<String, Value> = key?;
|
||||
|
||||
// generate a new uuid v4 and insert it in the key.
|
||||
let uid = serde_json::to_value(Uuid::new_v4()).unwrap();
|
||||
key.insert("uid".to_string(), uid);
|
||||
|
||||
serde_json::to_writer(&mut writer, &key)?;
|
||||
writer.write_all(b"\n")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,12 +1,11 @@
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use log::info;
|
||||
use meilisearch_auth::AuthController;
|
||||
use milli::heed::EnvOpenOptions;
|
||||
|
||||
use crate::analytics;
|
||||
use crate::index_controller::dump_actor::Metadata;
|
||||
use crate::dump::Metadata;
|
||||
use crate::index_resolver::IndexResolver;
|
||||
use crate::options::IndexerOpts;
|
||||
use crate::tasks::TaskStore;
|
||||
@@ -21,7 +20,7 @@ pub fn load_dump(
|
||||
indexing_options: &IndexerOpts,
|
||||
) -> anyhow::Result<()> {
|
||||
info!(
|
||||
"Loading dump from {}, dump database version: {}, dump version: V4",
|
||||
"Loading dump from {}, dump database version: {}, dump version: V5",
|
||||
meta.dump_date, meta.db_version
|
||||
);
|
||||
|
||||
262
meilisearch-lib/src/dump/mod.rs
Normal file
262
meilisearch-lib/src/dump/mod.rs
Normal file
@@ -0,0 +1,262 @@
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::bail;
|
||||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use tempfile::TempDir;
|
||||
|
||||
use crate::compression::from_tar_gz;
|
||||
use crate::options::IndexerOpts;
|
||||
|
||||
use self::loaders::{v2, v3, v4, v5};
|
||||
|
||||
pub use handler::{generate_uid, DumpHandler};
|
||||
|
||||
mod compat;
|
||||
pub mod error;
|
||||
mod handler;
|
||||
mod loaders;
|
||||
|
||||
const META_FILE_NAME: &str = "metadata.json";
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Metadata {
|
||||
db_version: String,
|
||||
index_db_size: usize,
|
||||
update_db_size: usize,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
dump_date: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl Metadata {
|
||||
pub fn new(index_db_size: usize, update_db_size: usize) -> Self {
|
||||
Self {
|
||||
db_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
index_db_size,
|
||||
update_db_size,
|
||||
dump_date: OffsetDateTime::now_utc(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MetadataV1 {
|
||||
pub db_version: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "dumpVersion")]
|
||||
pub enum MetadataVersion {
|
||||
V1(MetadataV1),
|
||||
V2(Metadata),
|
||||
V3(Metadata),
|
||||
V4(Metadata),
|
||||
// V5 is forward compatible with V4 but not backward compatible.
|
||||
V5(Metadata),
|
||||
}
|
||||
|
||||
impl MetadataVersion {
|
||||
pub fn load_dump(
|
||||
self,
|
||||
src: impl AsRef<Path>,
|
||||
dst: impl AsRef<Path>,
|
||||
index_db_size: usize,
|
||||
meta_env_size: usize,
|
||||
indexing_options: &IndexerOpts,
|
||||
) -> anyhow::Result<()> {
|
||||
match self {
|
||||
MetadataVersion::V1(_meta) => {
|
||||
anyhow::bail!("The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.")
|
||||
}
|
||||
MetadataVersion::V2(meta) => v2::load_dump(
|
||||
meta,
|
||||
src,
|
||||
dst,
|
||||
index_db_size,
|
||||
meta_env_size,
|
||||
indexing_options,
|
||||
)?,
|
||||
MetadataVersion::V3(meta) => v3::load_dump(
|
||||
meta,
|
||||
src,
|
||||
dst,
|
||||
index_db_size,
|
||||
meta_env_size,
|
||||
indexing_options,
|
||||
)?,
|
||||
MetadataVersion::V4(meta) => v4::load_dump(
|
||||
meta,
|
||||
src,
|
||||
dst,
|
||||
index_db_size,
|
||||
meta_env_size,
|
||||
indexing_options,
|
||||
)?,
|
||||
MetadataVersion::V5(meta) => v5::load_dump(
|
||||
meta,
|
||||
src,
|
||||
dst,
|
||||
index_db_size,
|
||||
meta_env_size,
|
||||
indexing_options,
|
||||
)?,
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn new_v5(index_db_size: usize, update_db_size: usize) -> Self {
|
||||
let meta = Metadata::new(index_db_size, update_db_size);
|
||||
Self::V5(meta)
|
||||
}
|
||||
|
||||
pub fn db_version(&self) -> &str {
|
||||
match self {
|
||||
Self::V1(meta) => &meta.db_version,
|
||||
Self::V2(meta) | Self::V3(meta) | Self::V4(meta) | Self::V5(meta) => &meta.db_version,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn version(&self) -> &'static str {
|
||||
match self {
|
||||
MetadataVersion::V1(_) => "V1",
|
||||
MetadataVersion::V2(_) => "V2",
|
||||
MetadataVersion::V3(_) => "V3",
|
||||
MetadataVersion::V4(_) => "V4",
|
||||
MetadataVersion::V5(_) => "V5",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dump_date(&self) -> Option<&OffsetDateTime> {
|
||||
match self {
|
||||
MetadataVersion::V1(_) => None,
|
||||
MetadataVersion::V2(meta)
|
||||
| MetadataVersion::V3(meta)
|
||||
| MetadataVersion::V4(meta)
|
||||
| MetadataVersion::V5(meta) => Some(&meta.dump_date),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DumpStatus {
|
||||
Done,
|
||||
InProgress,
|
||||
Failed,
|
||||
}
|
||||
|
||||
pub fn load_dump(
|
||||
dst_path: impl AsRef<Path>,
|
||||
src_path: impl AsRef<Path>,
|
||||
ignore_dump_if_db_exists: bool,
|
||||
ignore_missing_dump: bool,
|
||||
index_db_size: usize,
|
||||
update_db_size: usize,
|
||||
indexer_opts: &IndexerOpts,
|
||||
) -> anyhow::Result<()> {
|
||||
let empty_db = crate::is_empty_db(&dst_path);
|
||||
let src_path_exists = src_path.as_ref().exists();
|
||||
|
||||
if empty_db && src_path_exists {
|
||||
let (tmp_src, tmp_dst, meta) = extract_dump(&dst_path, &src_path)?;
|
||||
meta.load_dump(
|
||||
tmp_src.path(),
|
||||
tmp_dst.path(),
|
||||
index_db_size,
|
||||
update_db_size,
|
||||
indexer_opts,
|
||||
)?;
|
||||
persist_dump(&dst_path, tmp_dst)?;
|
||||
Ok(())
|
||||
} else if !empty_db && !ignore_dump_if_db_exists {
|
||||
bail!(
|
||||
"database already exists at {:?}, try to delete it or rename it",
|
||||
dst_path
|
||||
.as_ref()
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| dst_path.as_ref().to_owned())
|
||||
)
|
||||
} else if !src_path_exists && !ignore_missing_dump {
|
||||
bail!("dump doesn't exist at {:?}", src_path.as_ref())
|
||||
} else {
|
||||
// there is nothing to do
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_dump(
|
||||
dst_path: impl AsRef<Path>,
|
||||
src_path: impl AsRef<Path>,
|
||||
) -> anyhow::Result<(TempDir, TempDir, MetadataVersion)> {
|
||||
// Setup a temp directory path in the same path as the database, to prevent cross devices
|
||||
// references.
|
||||
let temp_path = dst_path
|
||||
.as_ref()
|
||||
.parent()
|
||||
.map(ToOwned::to_owned)
|
||||
.unwrap_or_else(|| ".".into());
|
||||
|
||||
let tmp_src = tempfile::tempdir_in(temp_path)?;
|
||||
let tmp_src_path = tmp_src.path();
|
||||
|
||||
from_tar_gz(&src_path, tmp_src_path)?;
|
||||
|
||||
let meta_path = tmp_src_path.join(META_FILE_NAME);
|
||||
let mut meta_file = File::open(&meta_path)?;
|
||||
let meta: MetadataVersion = serde_json::from_reader(&mut meta_file)?;
|
||||
|
||||
if !dst_path.as_ref().exists() {
|
||||
std::fs::create_dir_all(dst_path.as_ref())?;
|
||||
}
|
||||
|
||||
let tmp_dst = tempfile::tempdir_in(dst_path.as_ref())?;
|
||||
|
||||
info!(
|
||||
"Loading dump {}, dump database version: {}, dump version: {}",
|
||||
meta.dump_date()
|
||||
.map(|t| format!("from {}", t))
|
||||
.unwrap_or_else(String::new),
|
||||
meta.db_version(),
|
||||
meta.version()
|
||||
);
|
||||
|
||||
Ok((tmp_src, tmp_dst, meta))
|
||||
}
|
||||
|
||||
fn persist_dump(dst_path: impl AsRef<Path>, tmp_dst: TempDir) -> anyhow::Result<()> {
|
||||
let persisted_dump = tmp_dst.into_path();
|
||||
|
||||
// Delete everything in the `data.ms` except the tempdir.
|
||||
if dst_path.as_ref().exists() {
|
||||
for file in dst_path.as_ref().read_dir().unwrap() {
|
||||
let file = file.unwrap().path();
|
||||
if file.file_name() == persisted_dump.file_name() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if file.is_file() {
|
||||
std::fs::remove_file(&file)?;
|
||||
} else {
|
||||
std::fs::remove_dir_all(&file)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Move the whole content of the tempdir into the `data.ms`.
|
||||
for file in persisted_dump.read_dir().unwrap() {
|
||||
let file = file.unwrap().path();
|
||||
|
||||
std::fs::rename(&file, &dst_path.as_ref().join(file.file_name().unwrap()))?;
|
||||
}
|
||||
|
||||
// Delete the empty tempdir.
|
||||
std::fs::remove_dir_all(&persisted_dump)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::error::Error;
|
||||
use std::fmt;
|
||||
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
use milli::UserError;
|
||||
|
||||
#[derive(Debug)]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::error::Error;
|
||||
|
||||
use meilisearch_error::{internal_error, Code, ErrorCode};
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
use meilisearch_types::internal_error;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{error::MilliError, update_file_store};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user