mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-16 01:16:56 +00:00
Compare commits
190 Commits
v0.24.0rc3
...
v0.26.1-cl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8058970523 | ||
|
|
3273fe0470 | ||
|
|
7468a5e96c | ||
|
|
a87faa0db7 | ||
|
|
b669a73432 | ||
|
|
833f7fbdbe | ||
|
|
62ce8e0bda | ||
|
|
ddd25bfe01 | ||
|
|
19da45c53b | ||
|
|
0026410c61 | ||
|
|
b57c59baa4 | ||
|
|
af8a5f2c21 | ||
|
|
d6400aef27 | ||
|
|
81fe65afed | ||
|
|
c2b58720d1 | ||
|
|
5515aa5045 | ||
|
|
15150db957 | ||
|
|
3b2e467ca6 | ||
|
|
4fbb83a34d | ||
|
|
ff6a7b6007 | ||
|
|
6312e7f1f3 | ||
|
|
bfb375ac87 | ||
|
|
21d277a0ef | ||
|
|
c3e3c900f2 | ||
|
|
05c8d81e65 | ||
|
|
cd6276eef9 | ||
|
|
7bcaa2fd13 | ||
|
|
67ecd7c147 | ||
|
|
5890600101 | ||
|
|
e2a9414c7a | ||
|
|
216965e9d9 | ||
|
|
d0ddbcc2b2 | ||
|
|
41db2601a6 | ||
|
|
42cb94e1f4 | ||
|
|
6e7a0cc65d | ||
|
|
23eba82038 | ||
|
|
001b9acb63 | ||
|
|
af65ccfd6a | ||
|
|
0c7251475d | ||
|
|
1a87b2f37d | ||
|
|
752a0e13ad | ||
|
|
ccaca33446 | ||
|
|
2a90e805a2 | ||
|
|
c4a2d70d19 | ||
|
|
f7e4a0177d | ||
|
|
cca65499de | ||
|
|
80fa7dbbfa | ||
|
|
c24b1e5250 | ||
|
|
78cf8f1f9f | ||
|
|
1da7277817 | ||
|
|
c71c95feb0 | ||
|
|
3bee31e6c7 | ||
|
|
9448ca58aa | ||
|
|
c9a236b0af | ||
|
|
622c15e825 | ||
|
|
054598734a | ||
|
|
7ca647f0d0 | ||
|
|
aa50fcb1f0 | ||
|
|
b408de0761 | ||
|
|
72d9c5ee5c | ||
|
|
2b7440d4b5 | ||
|
|
3bc6a18bcd | ||
|
|
db56d6cb11 | ||
|
|
a5759139bf | ||
|
|
8a959da120 | ||
|
|
0a78750465 | ||
|
|
372f4fc924 | ||
|
|
ae5b401e74 | ||
|
|
c562655be7 | ||
|
|
c8bb54cd94 | ||
|
|
8c80326dd5 | ||
|
|
bad4bed439 | ||
|
|
7828da15c3 | ||
|
|
7e2f6063ae | ||
|
|
2b766a2f26 | ||
|
|
8ae504bfb0 | ||
|
|
5981e6c57c | ||
|
|
1be3a1e945 | ||
|
|
629b897845 | ||
|
|
9f5fee404b | ||
|
|
40bf98711c | ||
|
|
f9f075bca2 | ||
|
|
0c1a3d59eb | ||
|
|
523fb5cd56 | ||
|
|
436f61a7f4 | ||
|
|
3fab5869fa | ||
|
|
0515c6e844 | ||
|
|
38176181ac | ||
|
|
a7e634bd4f | ||
|
|
78a381a30b | ||
|
|
343bce6a29 | ||
|
|
d263f762bf | ||
|
|
dfaeb19566 | ||
|
|
010dcc3e80 | ||
|
|
d0aa5f747c | ||
|
|
f6d53e03f1 | ||
|
|
3ecebd15ee | ||
|
|
db83e39a7f | ||
|
|
5d48f72ade | ||
|
|
1818026a84 | ||
|
|
0ad7d38eec | ||
|
|
b17ad5c2be | ||
|
|
1824b3c07b | ||
|
|
c9c7da3626 | ||
|
|
030a90523d | ||
|
|
56d223a51d | ||
|
|
f558ff826a | ||
|
|
5fb4ed60e7 | ||
|
|
0d2a358cc2 | ||
|
|
595250c93e | ||
|
|
c636988935 | ||
|
|
eea483c470 | ||
|
|
d53c61a6d0 | ||
|
|
c0d4f71a34 | ||
|
|
f56989e46e | ||
|
|
c0251eb680 | ||
|
|
450b81ca13 | ||
|
|
2f3faadcbf | ||
|
|
5986a2d126 | ||
|
|
d75e84f625 | ||
|
|
c221277fd2 | ||
|
|
3b30fadb55 | ||
|
|
d7df4d6b84 | ||
|
|
fd854035c1 | ||
|
|
4d1c138842 | ||
|
|
7649239b08 | ||
|
|
0e2f6ba1b6 | ||
|
|
f529c46598 | ||
|
|
1ba49d2ddb | ||
|
|
1b5ca88231 | ||
|
|
37329e0784 | ||
|
|
eaff393c76 | ||
|
|
a845cd8880 | ||
|
|
b28a465304 | ||
|
|
845d3114ea | ||
|
|
287fa7ca74 | ||
|
|
80ed9654e1 | ||
|
|
7ddab7ef31 | ||
|
|
d534a7f7c8 | ||
|
|
5af51c852c | ||
|
|
ee7970f603 | ||
|
|
5453877ca7 | ||
|
|
ea0a5271f7 | ||
|
|
879cc4ec26 | ||
|
|
6ac2475aba | ||
|
|
47d5f659e0 | ||
|
|
8c9e51e94f | ||
|
|
0da5aca9f6 | ||
|
|
9906db9e64 | ||
|
|
8096b568f0 | ||
|
|
2934a77832 | ||
|
|
cf6cb938a6 | ||
|
|
8ff6b1b540 | ||
|
|
a938a9ab0f | ||
|
|
ae73386723 | ||
|
|
34c8a859eb | ||
|
|
80d039042b | ||
|
|
5606e22d97 | ||
|
|
23e35fa526 | ||
|
|
82033f935e | ||
|
|
ae2b0e7aa7 | ||
|
|
948615537b | ||
|
|
a0e129304c | ||
|
|
8d72d538de | ||
|
|
ffefd0caf2 | ||
|
|
fa196986c2 | ||
|
|
a30e02c18c | ||
|
|
c9f3726447 | ||
|
|
8363200fd7 | ||
|
|
37548eb720 | ||
|
|
dadce6032d | ||
|
|
53fc2edab3 | ||
|
|
b7c5b78a61 | ||
|
|
f081dc2001 | ||
|
|
2cf7daa227 | ||
|
|
9d75fbc619 | ||
|
|
3b1b9a277b | ||
|
|
40e87b9544 | ||
|
|
ded7922be5 | ||
|
|
11ef64ee43 | ||
|
|
5e6d7b7649 | ||
|
|
5fd9616b5f | ||
|
|
a1227648ba | ||
|
|
ac5535055f | ||
|
|
15cb4dafa9 | ||
|
|
8ca76d9fdf | ||
|
|
f62e52ec68 | ||
|
|
bf01c674ea | ||
|
|
ec0716ddd1 | ||
|
|
f7f2421e71 |
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -23,8 +23,8 @@ A clear and concise description of what you expected to happen.
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**MeiliSearch version:** [e.g. v0.20.0]
|
||||
**Meilisearch version:** [e.g. v0.20.0]
|
||||
|
||||
**Additional context**
|
||||
Additional information that may be relevant to the issue.
|
||||
[e.g. architecture, device, OS, browser]
|
||||
[e.g. architecture, device, OS, browser]
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -6,5 +6,5 @@ contact_links:
|
||||
url: https://github.com/meilisearch/documentation/issues/new
|
||||
about: For documentation issues, open an issue or a PR in the documentation repository
|
||||
- name: Support questions & other
|
||||
url: https://github.com/meilisearch/MeiliSearch/discussions/new
|
||||
url: https://github.com/meilisearch/meilisearch/discussions/new
|
||||
about: For any other question, open a discussion in this repository
|
||||
|
||||
2
.github/is-latest-release.sh
vendored
2
.github/is-latest-release.sh
vendored
@@ -74,7 +74,7 @@ semverLT() {
|
||||
# Returns the tag of the latest stable release (in terms of semver and not of release date)
|
||||
get_latest() {
|
||||
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
|
||||
curl -s 'https://api.github.com/repos/meilisearch/MeiliSearch/releases' > "$temp_file"
|
||||
curl -s 'https://api.github.com/repos/meilisearch/meilisearch/releases' > "$temp_file"
|
||||
releases=$(cat "$temp_file" | \
|
||||
grep -E "tag_name|draft|prerelease" \
|
||||
| tr -d ',"' | cut -d ':' -f2 | tr -d ' ')
|
||||
|
||||
13
.github/release-draft-template.yml
vendored
13
.github/release-draft-template.yml
vendored
@@ -1,13 +0,0 @@
|
||||
name-template: 'v$RESOLVED_VERSION'
|
||||
tag-template: 'v$RESOLVED_VERSION'
|
||||
version-template: '0.21.0-alpha.$PATCH'
|
||||
exclude-labels:
|
||||
- 'skip-changelog'
|
||||
template: |
|
||||
## Changes
|
||||
|
||||
$CHANGES
|
||||
no-changes-template: 'Changes are coming soon 😎'
|
||||
sort-direction: 'ascending'
|
||||
version-resolver:
|
||||
default: patch
|
||||
2
.github/workflows/README.md
vendored
2
.github/workflows/README.md
vendored
@@ -1,4 +1,4 @@
|
||||
# GitHub Actions Workflow for MeiliSearch
|
||||
# GitHub Actions Workflow for Meilisearch
|
||||
|
||||
> **Note:**
|
||||
|
||||
|
||||
78
.github/workflows/publish-binaries.yml
vendored
78
.github/workflows/publish-binaries.yml
vendored
@@ -9,6 +9,7 @@ jobs:
|
||||
name: Publish for ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-18.04, macos-latest, windows-latest]
|
||||
include:
|
||||
@@ -37,28 +38,69 @@ jobs:
|
||||
asset_name: ${{ matrix.asset_name }}
|
||||
tag: ${{ github.ref }}
|
||||
|
||||
publish-armv8:
|
||||
name: Publish for ARMv8
|
||||
runs-on: ubuntu-18.04
|
||||
publish-aarch64:
|
||||
name: Publish to GitHub
|
||||
runs-on: ${{ matrix.os }}
|
||||
continue-on-error: false
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- build: aarch64
|
||||
os: ubuntu-18.04
|
||||
target: aarch64-unknown-linux-gnu
|
||||
linker: gcc-aarch64-linux-gnu
|
||||
use-cross: true
|
||||
asset_name: meilisearch-linux-aarch64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: uraimo/run-on-arch-action@v2.1.1
|
||||
id: runcmd
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Installing Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
arch: aarch64 # aka ARMv8
|
||||
distro: ubuntu18.04
|
||||
env: |
|
||||
JEMALLOC_SYS_WITH_LG_PAGE: 16
|
||||
run: |
|
||||
apt update
|
||||
apt install -y curl gcc make
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal --default-toolchain stable
|
||||
source $HOME/.cargo/env
|
||||
cargo build --release --locked
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
override: true
|
||||
|
||||
- name: APT update
|
||||
run: |
|
||||
sudo apt update
|
||||
|
||||
- name: Install target specific tools
|
||||
if: matrix.use-cross
|
||||
run: |
|
||||
sudo apt-get install -y ${{ matrix.linker }}
|
||||
|
||||
- name: Configure target aarch64 GNU
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
||||
## Environment variable is not passed using env:
|
||||
## LD gold won't work with MUSL
|
||||
# env:
|
||||
# JEMALLOC_SYS_WITH_LG_PAGE: 16
|
||||
# RUSTFLAGS: '-Clink-arg=-fuse-ld=gold'
|
||||
run: |
|
||||
echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config
|
||||
echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config
|
||||
echo 'JEMALLOC_SYS_WITH_LG_PAGE=16' >> $GITHUB_ENV
|
||||
echo RUSTFLAGS="-Clink-arg=-fuse-ld=gold" >> $GITHUB_ENV
|
||||
|
||||
- name: Cargo build
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
use-cross: ${{ matrix.use-cross }}
|
||||
args: --release --target ${{ matrix.target }}
|
||||
|
||||
- name: List target output files
|
||||
run: ls -lR ./target
|
||||
|
||||
- name: Upload the binary to release
|
||||
uses: svenstaro/upload-release-action@v1-release
|
||||
with:
|
||||
repo_token: ${{ secrets.PUBLISH_TOKEN }}
|
||||
file: target/release/meilisearch
|
||||
asset_name: meilisearch-linux-armv8
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
asset_name: ${{ matrix.asset_name }}
|
||||
tag: ${{ github.ref }}
|
||||
|
||||
76
.github/workflows/publish-crossbuild.yml
vendored
76
.github/workflows/publish-crossbuild.yml
vendored
@@ -1,76 +0,0 @@
|
||||
name: Publish aarch64 binary
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
publish-aarch64:
|
||||
name: Publish to Github
|
||||
runs-on: ${{ matrix.os }}
|
||||
continue-on-error: false
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- build: aarch64
|
||||
os: ubuntu-18.04
|
||||
target: aarch64-unknown-linux-gnu
|
||||
linker: gcc-aarch64-linux-gnu
|
||||
use-cross: true
|
||||
asset_name: meilisearch-linux-aarch64
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Installing Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
override: true
|
||||
|
||||
- name: APT update
|
||||
run: |
|
||||
sudo apt update
|
||||
|
||||
- name: Install target specific tools
|
||||
if: matrix.use-cross
|
||||
run: |
|
||||
sudo apt-get install -y ${{ matrix.linker }}
|
||||
|
||||
- name: Configure target aarch64 GNU
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
||||
## Environment variable is not passed using env:
|
||||
## LD gold won't work with MUSL
|
||||
# env:
|
||||
# JEMALLOC_SYS_WITH_LG_PAGE: 16
|
||||
# RUSTFLAGS: '-Clink-arg=-fuse-ld=gold'
|
||||
run: |
|
||||
echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config
|
||||
echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config
|
||||
echo 'JEMALLOC_SYS_WITH_LG_PAGE=16' >> $GITHUB_ENV
|
||||
echo RUSTFLAGS="-Clink-arg=-fuse-ld=gold" >> $GITHUB_ENV
|
||||
|
||||
- name: Cargo build
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
use-cross: ${{ matrix.use-cross }}
|
||||
args: --release --target ${{ matrix.target }}
|
||||
|
||||
- name: List target output files
|
||||
run: ls -lR ./target
|
||||
|
||||
- name: Upload the binary to release
|
||||
uses: svenstaro/upload-release-action@v1-release
|
||||
with:
|
||||
repo_token: ${{ secrets.PUBLISH_TOKEN }}
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
asset_name: ${{ matrix.asset_name }}
|
||||
tag: ${{ github.ref }}
|
||||
28
.github/workflows/publish-docker-latest.yml
vendored
28
.github/workflows/publish-docker-latest.yml
vendored
@@ -6,17 +6,25 @@ on:
|
||||
name: Publish latest image to Docker Hub
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-18.04
|
||||
docker-latest:
|
||||
runs-on: docker
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Check if current release is latest
|
||||
run: echo "##[set-output name=is_latest;]$(sh .github/is-latest-release.sh)"
|
||||
id: release
|
||||
- name: Publish to Registry
|
||||
if: steps.release.outputs.is_latest == 'true'
|
||||
uses: elgohr/Publish-Docker-Github-Action@master
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
name: getmeili/meilisearch
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: getmeili/meilisearch:latest
|
||||
|
||||
35
.github/workflows/publish-docker-tag.yml
vendored
35
.github/workflows/publish-docker-tag.yml
vendored
@@ -7,16 +7,33 @@ on:
|
||||
name: Publish tagged image to Docker Hub
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-18.04
|
||||
docker-tag:
|
||||
runs-on: docker
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Publish to Registry
|
||||
uses: elgohr/Publish-Docker-Github-Action@master
|
||||
env:
|
||||
COMMIT_SHA: ${{ github.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
name: getmeili/meilisearch
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
tag_names: true
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: getmeili/meilisearch
|
||||
flavor: latest=false
|
||||
tags: type=ref,event=tag
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
16
.github/workflows/release-drafter.yml
vendored
16
.github/workflows/release-drafter.yml
vendored
@@ -1,16 +0,0 @@
|
||||
name: Release Drafter
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
with:
|
||||
config-name: release-draft-template.yml
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_DRAFTER_TOKEN }}
|
||||
1
.github/workflows/rust.yml
vendored
1
.github/workflows/rust.yml
vendored
@@ -11,6 +11,7 @@ on:
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
|
||||
@@ -1,28 +1,20 @@
|
||||
# Contributing
|
||||
|
||||
First, thank you for contributing to MeiliSearch! The goal of this document is to provide everything you need to start contributing to MeiliSearch.
|
||||
First, thank you for contributing to Meilisearch! The goal of this document is to provide everything you need to start contributing to Meilisearch.
|
||||
|
||||
- [Hacktoberfest](#hacktoberfest)
|
||||
Remember that there are many ways to contribute other than writing code: writing [tutorials or blog posts](https://github.com/meilisearch/awesome-meilisearch), improving [the documentation](https://github.com/meilisearch/documentation), submitting [bug reports](https://github.com/meilisearch/meilisearch/issues/new?assignees=&labels=&template=bug_report.md&title=) and [feature requests](https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal)...
|
||||
|
||||
## Table of Contents
|
||||
- [Assumptions](#assumptions)
|
||||
- [How to Contribute](#how-to-contribute)
|
||||
- [Development Workflow](#development-workflow)
|
||||
- [Git Guidelines](#git-guidelines)
|
||||
|
||||
## Hacktoberfest
|
||||
|
||||
It's [Hacktoberfest month](https://blog.meilisearch.com/contribute-hacktoberfest-2021/)! 🥳
|
||||
|
||||
🚀 If your PR gets accepted it will count into your participation to Hacktoberfest!
|
||||
|
||||
✅ To be accepted it has either to have been merged, approved or tagged with the `hacktoberfest-accepted` label.
|
||||
|
||||
🧐 Don't forget to check the [quality standards](https://hacktoberfest.digitalocean.com/resources/qualitystandards)! Low-quality PRs might get marked as `spam` or `invalid`, and will not count toward your participation in Hacktoberfest.
|
||||
|
||||
## Assumptions
|
||||
|
||||
1. **You're familiar with [Github](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
||||
2. **You've read the MeiliSearch [documentation](https://docs.meilisearch.com).**
|
||||
3. **You know about the [MeiliSearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
||||
2. **You've read the Meilisearch [documentation](https://docs.meilisearch.com).**
|
||||
3. **You know about the [Meilisearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
||||
Please use this for help.**
|
||||
|
||||
## How to Contribute
|
||||
@@ -30,21 +22,21 @@ It's [Hacktoberfest month](https://blog.meilisearch.com/contribute-hacktoberfest
|
||||
1. Ensure your change has an issue! Find an
|
||||
[existing issue](https://github.com/meilisearch/meilisearch/issues/) or [open a new issue](https://github.com/meilisearch/meilisearch/issues/new).
|
||||
* This is where you can get a feel if the change will be accepted or not.
|
||||
2. Once approved, [fork the MeiliSearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own Github account.
|
||||
2. Once approved, [fork the Meilisearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own Github account.
|
||||
3. [Create a new Git branch](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-and-deleting-branches-within-your-repository)
|
||||
4. Review the [Development Workflow](#development-workflow) section that describes the steps to maintain the repository.
|
||||
5. Make your changes on your branch.
|
||||
6. [Submit the branch as a Pull Request](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request-from-a-fork) pointing to the `main` branch of the MeiliSearch repository. A maintainer should comment and/or review your Pull Request within a few days. Although depending on the circumstances, it may take longer.
|
||||
6. [Submit the branch as a Pull Request](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request-from-a-fork) pointing to the `main` branch of the Meilisearch repository. A maintainer should comment and/or review your Pull Request within a few days. Although depending on the circumstances, it may take longer.
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Setup and run MeiliSearch
|
||||
### Setup and run Meilisearch
|
||||
|
||||
```bash
|
||||
cargo run --release
|
||||
```
|
||||
|
||||
We recommend using the `--release` flag to test the full performance of MeiliSearch.
|
||||
We recommend using the `--release` flag to test the full performance of Meilisearch.
|
||||
|
||||
### Test
|
||||
|
||||
|
||||
1740
Cargo.lock
generated
1740
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -3,8 +3,5 @@ members = [
|
||||
"meilisearch-http",
|
||||
"meilisearch-error",
|
||||
"meilisearch-lib",
|
||||
"meilisearch-auth",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[patch.crates-io]
|
||||
pest = { git = "https://github.com/pest-parser/pest.git", rev = "51fd1d49f1041f7839975664ef71fe15c7dcaf67" }
|
||||
|
||||
21
Dockerfile
21
Dockerfile
@@ -11,6 +11,7 @@ WORKDIR /meilisearch
|
||||
COPY Cargo.lock .
|
||||
COPY Cargo.toml .
|
||||
|
||||
COPY meilisearch-auth/Cargo.toml meilisearch-auth/
|
||||
COPY meilisearch-error/Cargo.toml meilisearch-error/
|
||||
COPY meilisearch-http/Cargo.toml meilisearch-http/
|
||||
COPY meilisearch-lib/Cargo.toml meilisearch-lib/
|
||||
@@ -20,7 +21,10 @@ ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
# Create dummy main.rs files for each workspace member to be able to compile all the dependencies
|
||||
RUN find . -type d -name "meilisearch-*" | xargs -I{} sh -c 'mkdir {}/src; echo "fn main() { }" > {}/src/main.rs;'
|
||||
# Use `cargo build` instead of `cargo vendor` because we need to not only download but compile dependencies too
|
||||
RUN $HOME/.cargo/bin/cargo build --release
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
|
||||
export JEMALLOC_SYS_WITH_LG_PAGE=16; \
|
||||
fi && \
|
||||
$HOME/.cargo/bin/cargo build --release
|
||||
# Cleanup dummy main.rs files
|
||||
RUN find . -path "*/src/main.rs" -delete
|
||||
|
||||
@@ -29,23 +33,20 @@ ARG COMMIT_DATE
|
||||
ENV COMMIT_SHA=${COMMIT_SHA} COMMIT_DATE=${COMMIT_DATE}
|
||||
|
||||
COPY . .
|
||||
RUN $HOME/.cargo/bin/cargo build --release
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
|
||||
export JEMALLOC_SYS_WITH_LG_PAGE=16; \
|
||||
fi && \
|
||||
$HOME/.cargo/bin/cargo build --release
|
||||
|
||||
# Run
|
||||
FROM alpine:3.14
|
||||
|
||||
ARG USER=meili
|
||||
ENV HOME /home/${USER}
|
||||
ENV MEILI_HTTP_ADDR 0.0.0.0:7700
|
||||
ENV MEILI_SERVER_PROVIDER docker
|
||||
|
||||
# download runtime deps as root and create ${USER}
|
||||
RUN apk update --quiet \
|
||||
&& apk add -q --no-cache libgcc tini curl \
|
||||
&& adduser -D ${USER}
|
||||
WORKDIR ${HOME}
|
||||
USER ${USER}
|
||||
# copy file as ${USER} to ${HOME}
|
||||
&& apk add -q --no-cache libgcc tini curl
|
||||
|
||||
COPY --from=compiler /meilisearch/target/release/meilisearch .
|
||||
|
||||
EXPOSE 7700/tcp
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019-2021 Meili SAS
|
||||
Copyright (c) 2019-2022 Meilisearch
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
42
README.md
42
README.md
@@ -1,8 +1,8 @@
|
||||
<p align="center">
|
||||
<img src="assets/logo.svg" alt="MeiliSearch" width="200" height="200" />
|
||||
<img src="assets/logo.svg" alt="Meilisearch" width="200" height="200" />
|
||||
</p>
|
||||
|
||||
<h1 align="center">MeiliSearch</h1>
|
||||
<h1 align="center">Meilisearch</h1>
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://www.meilisearch.com">Website</a> |
|
||||
@@ -15,17 +15,17 @@
|
||||
</h4>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/meilisearch/MeiliSearch/actions"><img src="https://github.com/meilisearch/MeiliSearch/workflows/Cargo%20test/badge.svg" alt="Build Status"></a>
|
||||
<a href="https://deps.rs/repo/github/meilisearch/MeiliSearch"><img src="https://deps.rs/repo/github/meilisearch/MeiliSearch/status.svg" alt="Dependency status"></a>
|
||||
<a href="https://github.com/meilisearch/MeiliSearch/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-MIT-informational" alt="License"></a>
|
||||
<a href="https://slack.meilisearch.com"><img src="https://img.shields.io/badge/slack-MeiliSearch-blue.svg?logo=slack" alt="Slack"></a>
|
||||
<a href="https://github.com/meilisearch/MeiliSearch/discussions" alt="Discussions"><img src="https://img.shields.io/badge/github-discussions-red" /></a>
|
||||
<a href="https://github.com/meilisearch/meilisearch/actions"><img src="https://github.com/meilisearch/meilisearch/workflows/Cargo%20test/badge.svg" alt="Build Status"></a>
|
||||
<a href="https://deps.rs/repo/github/meilisearch/meilisearch"><img src="https://deps.rs/repo/github/meilisearch/meilisearch/status.svg" alt="Dependency status"></a>
|
||||
<a href="https://github.com/meilisearch/meilisearch/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-MIT-informational" alt="License"></a>
|
||||
<a href="https://slack.meilisearch.com"><img src="https://img.shields.io/badge/slack-meilisearch-blue.svg?logo=slack" alt="Slack"></a>
|
||||
<a href="https://github.com/meilisearch/meilisearch/discussions" alt="Discussions"><img src="https://img.shields.io/badge/github-discussions-red" /></a>
|
||||
<a href="https://app.bors.tech/repositories/26457"><img src="https://bors.tech/images/badge_small.svg" alt="Bors enabled"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">⚡ Lightning Fast, Ultra Relevant, and Typo-Tolerant Search Engine 🔍</p>
|
||||
|
||||
**MeiliSearch** is a powerful, fast, open-source, easy to use and deploy search engine. Both searching and indexing are highly customizable. Features such as typo-tolerance, filters, and synonyms are provided out-of-the-box.
|
||||
**Meilisearch** is a powerful, fast, open-source, easy to use and deploy search engine. Both searching and indexing are highly customizable. Features such as typo-tolerance, filters, and synonyms are provided out-of-the-box.
|
||||
For more information about features go to [our documentation](https://docs.meilisearch.com/).
|
||||
|
||||
<p align="center">
|
||||
@@ -61,13 +61,13 @@ meilisearch
|
||||
docker run -p 7700:7700 -v "$(pwd)/data.ms:/data.ms" getmeili/meilisearch
|
||||
```
|
||||
|
||||
#### Announcing a cloud-hosted MeiliSearch
|
||||
#### Announcing a cloud-hosted Meilisearch
|
||||
|
||||
Join the closed beta by filling out this [form](https://meilisearch.typeform.com/to/FtnzvZfh).
|
||||
|
||||
#### Try MeiliSearch in our Sandbox
|
||||
#### Try Meilisearch in our Sandbox
|
||||
|
||||
Create a MeiliSearch instance in [MeiliSearch Sandbox](https://sandbox.meilisearch.com/). This instance is free, and will be active for 48 hours.
|
||||
Create a Meilisearch instance in [Meilisearch Sandbox](https://sandbox.meilisearch.com/). This instance is free, and will be active for 48 hours.
|
||||
|
||||
#### Run on Digital Ocean
|
||||
|
||||
@@ -99,8 +99,8 @@ curl -L https://install.meilisearch.com | sh
|
||||
If you have the latest stable Rust toolchain installed on your local system, clone the repository and change it to your working directory.
|
||||
|
||||
```bash
|
||||
git clone https://github.com/meilisearch/MeiliSearch.git
|
||||
cd MeiliSearch
|
||||
git clone https://github.com/meilisearch/meilisearch.git
|
||||
cd meilisearch
|
||||
cargo run --release
|
||||
```
|
||||
|
||||
@@ -161,19 +161,19 @@ curl 'http://127.0.0.1:7700/indexes/movies/search?q=botman+robin&limit=2' | jq
|
||||
|
||||
#### Use the Web Interface
|
||||
|
||||
We also deliver an **out-of-the-box [web interface](https://github.com/meilisearch/mini-dashboard)** in which you can test MeiliSearch interactively.
|
||||
We also deliver an **out-of-the-box [web interface](https://github.com/meilisearch/mini-dashboard)** in which you can test Meilisearch interactively.
|
||||
|
||||
You can access the web interface in your web browser at the root of the server. The default URL is [http://127.0.0.1:7700](http://127.0.0.1:7700). All you need to do is open your web browser and enter MeiliSearch’s address to visit it. This will lead you to a web page with a search bar that will allow you to search in the selected index.
|
||||
You can access the web interface in your web browser at the root of the server. The default URL is [http://127.0.0.1:7700](http://127.0.0.1:7700). All you need to do is open your web browser and enter Meilisearch’s address to visit it. This will lead you to a web page with a search bar that will allow you to search in the selected index.
|
||||
|
||||
| [See the gif above](#demo)
|
||||
|
||||
## Documentation
|
||||
|
||||
Now that your MeiliSearch server is up and running, you can learn more about how to tune your search engine in [the documentation](https://docs.meilisearch.com).
|
||||
Now that your Meilisearch server is up and running, you can learn more about how to tune your search engine in [the documentation](https://docs.meilisearch.com).
|
||||
|
||||
## Contributing
|
||||
|
||||
Hey! We're glad you're thinking about contributing to MeiliSearch! Feel free to pick an [issue labeled as `good first issue`](https://github.com/meilisearch/MeiliSearch/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22), and to ask any question you need. Some points might not be clear and we are available to help you!
|
||||
Hey! We're glad you're thinking about contributing to Meilisearch! Feel free to pick an [issue labeled as `good first issue`](https://github.com/meilisearch/meilisearch/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22), and to ask any question you need. Some points might not be clear and we are available to help you!
|
||||
|
||||
Also, we recommend following the [CONTRIBUTING](./CONTRIBUTING.md) to create your PR.
|
||||
|
||||
@@ -184,8 +184,8 @@ The code in this repository is only concerned with managing multiple indexes, ha
|
||||
Search and indexation are the domain of our core engine, [`milli`](https://github.com/meilisearch/milli), while tokenization is handled by [our `tokenizer` library](https://github.com/meilisearch/tokenizer/).
|
||||
## Telemetry
|
||||
|
||||
MeiliSearch collects anonymous data regarding general usage.
|
||||
This helps us better understand developers' usage of MeiliSearch features.
|
||||
Meilisearch collects anonymous data regarding general usage.
|
||||
This helps us better understand developers' usage of Meilisearch features.
|
||||
|
||||
To find out more on what information we're retrieving, please see our documentation on [Telemetry](https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html).
|
||||
|
||||
@@ -193,7 +193,7 @@ This program is optional, you can disable these analytics by using the `MEILI_NO
|
||||
|
||||
## Feature request
|
||||
|
||||
The feature requests are not managed in this repository. Please visit our [dedicated repository](https://github.com/meilisearch/product) to see our work about the MeiliSearch product.
|
||||
The feature requests are not managed in this repository. Please visit our [dedicated repository](https://github.com/meilisearch/product) to see our work about the Meilisearch product.
|
||||
|
||||
If you have a feature request or any feedback about an existing feature, please open [a discussion](https://github.com/meilisearch/product/discussions).
|
||||
Also, feel free to participate in the current discussions, we are looking forward to reading your comments.
|
||||
@@ -202,4 +202,4 @@ Also, feel free to participate in the current discussions, we are looking forwar
|
||||
|
||||
Please visit [this page](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html#contact-us).
|
||||
|
||||
MeiliSearch is developed by [Meili](https://www.meilisearch.com), a young company. To know more about us, you can [read our blog](https://blog.meilisearch.com). Any suggestion or feedback is highly appreciated. Thank you for your support!
|
||||
Meilisearch is developed by [Meili](https://www.meilisearch.com), a young company. To know more about us, you can [read our blog](https://blog.meilisearch.com). Any suggestion or feedback is highly appreciated. Thank you for your support!
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
# Security
|
||||
|
||||
MeiliSearch takes the security of our software products and services seriously.
|
||||
Meilisearch takes the security of our software products and services seriously.
|
||||
|
||||
If you believe you have found a security vulnerability in any MeiliSearch-owned repository, please report it to us as described below.
|
||||
If you believe you have found a security vulnerability in any Meilisearch-owned repository, please report it to us as described below.
|
||||
|
||||
## Suported versions
|
||||
|
||||
As long as we are pre-v1.0, only the latest version of MeiliSearch will be supported with security updates.
|
||||
As long as we are pre-v1.0, only the latest version of Meilisearch will be supported with security updates.
|
||||
|
||||
## Reporting security issues
|
||||
|
||||
⚠️ Please do not report security vulnerabilities through public GitHub issues. ⚠️
|
||||
⚠️ Please do not report security vulnerabilities through public GitHub issues. ⚠️
|
||||
|
||||
Instead, please kindly email us at security@meilisearch.com
|
||||
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
<svg width="360" height="360" viewBox="0 0 360 360" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g id="logo_main">
|
||||
<rect id="Rectangle" x="107.333" y="0.150146" width="274.315" height="274.315" rx="98.8334" transform="rotate(23 107.333 0.150146)" fill="url(#paint0_linear)"/>
|
||||
<path id="Rectangle_2" fill-rule="evenodd" clip-rule="evenodd" d="M61.3296 230.199C46.2224 194.608 38.6688 176.813 38.208 160.329C37.5286 136.025 47.0175 112.539 64.3891 95.5282C76.1718 83.9904 93.9669 76.4368 129.557 61.3296C165.147 46.2224 182.943 38.6688 199.427 38.208C223.731 37.5286 247.217 47.0175 264.228 64.3891C275.766 76.1718 283.319 93.9669 298.426 129.557C313.534 165.147 321.087 182.943 321.548 199.427C322.227 223.731 312.738 247.217 295.367 264.228C283.584 275.766 265.789 283.319 230.199 298.426C194.608 313.534 176.813 321.087 160.329 321.548C136.025 322.227 112.539 312.738 95.5282 295.367C83.9903 283.584 76.4368 265.789 61.3296 230.199Z" fill="url(#paint1_linear)"/>
|
||||
<path id="m" fill-rule="evenodd" clip-rule="evenodd" d="M219.568 130.748C242.363 130.748 259.263 147.451 259.263 174.569V229.001H227.232V179.678C227.232 166.119 220.747 159.634 210.136 159.634C205.223 159.634 200.311 161.796 195.595 167.494C195.791 169.852 195.988 172.21 195.988 174.569V229.001H164.154V179.678C164.154 166.119 157.472 159.634 147.057 159.634C142.145 159.634 137.429 161.992 132.712 168.084V229.001H100.878V133.695H132.712V139.394C139.197 133.892 145.878 130.748 156.49 130.748C168.477 130.748 178.695 135.267 185.769 143.52C195.791 134.678 205.42 130.748 219.568 130.748Z" fill="white"/>
|
||||
</g>
|
||||
<svg width="300" height="300" viewBox="0 0 300 300" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M0 237L55.426 96.7678C63.2367 77.0063 82.499 64 103.955 64H137.371L81.9447 204.232C74.1341 223.993 54.8717 237 33.4156 237H0Z" fill="url(#paint0_linear_1_898)"/>
|
||||
<path d="M81.3123 237L136.738 96.7682C144.549 77.0067 163.811 64.0004 185.267 64.0004H218.683L163.257 204.232C155.446 223.994 136.184 237 114.728 237H81.3123Z" fill="url(#paint1_linear_1_898)"/>
|
||||
<path d="M162.629 237L218.055 96.7682C225.866 77.0067 245.128 64.0004 266.584 64.0004H300L244.574 204.232C236.763 223.994 217.501 237 196.045 237H162.629Z" fill="url(#paint2_linear_1_898)"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear" x1="-13.6248" y1="129.208" x2="244.49" y2="403.522" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#E41359"/>
|
||||
<stop offset="1" stop-color="#F23C79"/>
|
||||
<linearGradient id="paint0_linear_1_898" x1="300.001" y1="50.7858" x2="1.63474" y2="221.244" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FF5CAA"/>
|
||||
<stop offset="1" stop-color="#FF4E62"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear" x1="11.0088" y1="111.65" x2="111.65" y2="348.747" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#24222F"/>
|
||||
<stop offset="1" stop-color="#2B2937"/>
|
||||
<linearGradient id="paint1_linear_1_898" x1="300.001" y1="50.7858" x2="1.63474" y2="221.244" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FF5CAA"/>
|
||||
<stop offset="1" stop-color="#FF4E62"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint2_linear_1_898" x1="300.001" y1="50.7858" x2="1.63474" y2="221.244" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FF5CAA"/>
|
||||
<stop offset="1" stop-color="#FF4E62"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 2.0 KiB After Width: | Height: | Size: 1.3 KiB |
@@ -73,14 +73,14 @@ semverLT() {
|
||||
get_latest() {
|
||||
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
|
||||
|
||||
if [ -z "$GITHUB_PAT" ]; then
|
||||
curl -s 'https://api.github.com/repos/meilisearch/MeiliSearch/releases' > "$temp_file" || return 1
|
||||
if [ -z "$GITHUB_PAT" ]; then
|
||||
curl -s 'https://api.github.com/repos/meilisearch/meilisearch/releases' > "$temp_file" || return 1
|
||||
else
|
||||
curl -H "Authorization: token $GITHUB_PAT" -s 'https://api.github.com/repos/meilisearch/MeiliSearch/releases' > "$temp_file" || return 1
|
||||
curl -H "Authorization: token $GITHUB_PAT" -s 'https://api.github.com/repos/meilisearch/meilisearch/releases' > "$temp_file" || return 1
|
||||
fi
|
||||
|
||||
releases=$(cat "$temp_file" | \
|
||||
grep -E "tag_name|draft|prerelease" \
|
||||
grep -E '"tag_name":|"draft":|"prerelease":' \
|
||||
| tr -d ',"' | cut -d ':' -f2 | tr -d ' ')
|
||||
# Returns a list of [tag_name draft_boolean prerelease_boolean ...]
|
||||
# Ex: v0.10.1 false false v0.9.1-rc.1 false true v0.9.0 false false...
|
||||
@@ -120,7 +120,7 @@ get_latest() {
|
||||
done
|
||||
|
||||
rm -f "$temp_file"
|
||||
echo $latest
|
||||
return 0
|
||||
}
|
||||
|
||||
# Gets the OS by setting the $os variable
|
||||
@@ -148,11 +148,18 @@ get_os() {
|
||||
get_archi() {
|
||||
architecture=$(uname -m)
|
||||
case "$architecture" in
|
||||
'x86_64' | 'amd64' | 'arm64')
|
||||
'x86_64' | 'amd64' )
|
||||
archi='amd64'
|
||||
;;
|
||||
'arm64')
|
||||
if [ $os = 'macos' ]; then # MacOS M1
|
||||
archi='amd64'
|
||||
else
|
||||
archi='aarch64'
|
||||
fi
|
||||
;;
|
||||
'aarch64')
|
||||
archi='armv8'
|
||||
archi='aarch64'
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
@@ -161,7 +168,7 @@ get_archi() {
|
||||
}
|
||||
|
||||
success_usage() {
|
||||
printf "$GREEN%s\n$DEFAULT" "MeiliSearch $latest binary successfully downloaded as '$binary_name' file."
|
||||
printf "$GREEN%s\n$DEFAULT" "Meilisearch $latest binary successfully downloaded as '$binary_name' file."
|
||||
echo ''
|
||||
echo 'Run it:'
|
||||
echo ' $ ./meilisearch'
|
||||
@@ -169,47 +176,65 @@ success_usage() {
|
||||
echo ' $ ./meilisearch --help'
|
||||
}
|
||||
|
||||
failure_usage() {
|
||||
printf "$RED%s\n$DEFAULT" 'ERROR: MeiliSearch binary is not available for your OS distribution or your architecture yet.'
|
||||
not_available_failure_usage() {
|
||||
printf "$RED%s\n$DEFAULT" 'ERROR: Meilisearch binary is not available for your OS distribution or your architecture yet.'
|
||||
echo ''
|
||||
echo 'However, you can easily compile the binary from the source files.'
|
||||
echo 'Follow the steps at the page ("Source" tab): https://docs.meilisearch.com/learn/getting_started/installation.html'
|
||||
}
|
||||
|
||||
fetch_release_failure_usage() {
|
||||
echo ''
|
||||
printf "$RED%s\n$DEFAULT" 'ERROR: Impossible to get the latest stable version of Meilisearch.'
|
||||
echo 'Please let us know about this issue: https://github.com/meilisearch/meilisearch/issues/new/choose'
|
||||
}
|
||||
|
||||
# MAIN
|
||||
latest="$(get_latest)"
|
||||
|
||||
# Fill $latest variable
|
||||
if ! get_latest; then
|
||||
fetch_release_failure_usage # TO CHANGE
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$latest" = '' ]; then
|
||||
echo ''
|
||||
echo 'Impossible to get the latest stable version of MeiliSearch.'
|
||||
echo 'Please let us know about this issue: https://github.com/meilisearch/meilisearch-swift/issues/new/choose'
|
||||
fetch_release_failure_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Fill $os variable
|
||||
if ! get_os; then
|
||||
failure_usage
|
||||
not_available_failure_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Fill $archi variable
|
||||
if ! get_archi; then
|
||||
failure_usage
|
||||
not_available_failure_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Downloading MeiliSearch binary $latest for $os, architecture $archi..."
|
||||
echo "Downloading Meilisearch binary $latest for $os, architecture $archi..."
|
||||
case "$os" in
|
||||
'windows')
|
||||
release_file="meilisearch-$os-$archi.exe"
|
||||
binary_name='meilisearch.exe'
|
||||
binary_name='meilisearch.exe'
|
||||
|
||||
;;
|
||||
*)
|
||||
release_file="meilisearch-$os-$archi"
|
||||
binary_name='meilisearch'
|
||||
*)
|
||||
release_file="meilisearch-$os-$archi"
|
||||
binary_name='meilisearch'
|
||||
|
||||
esac
|
||||
link="https://github.com/meilisearch/MeiliSearch/releases/download/$latest/$release_file"
|
||||
curl -OL "$link"
|
||||
|
||||
# Fetch the Meilisearch binary
|
||||
link="https://github.com/meilisearch/meilisearch/releases/download/$latest/$release_file"
|
||||
curl --fail -OL "$link"
|
||||
if [ $? -ne 0 ]; then
|
||||
fetch_release_failure_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mv "$release_file" "$binary_name"
|
||||
chmod 744 "$binary_name"
|
||||
success_usage
|
||||
|
||||
15
meilisearch-auth/Cargo.toml
Normal file
15
meilisearch-auth/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "meilisearch-auth"
|
||||
version = "0.26.1"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
enum-iterator = "0.7.0"
|
||||
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
||||
sha2 = "0.9.6"
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
rand = "0.8.4"
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
thiserror = "1.0.28"
|
||||
104
meilisearch-auth/src/action.rs
Normal file
104
meilisearch-auth/src/action.rs
Normal file
@@ -0,0 +1,104 @@
|
||||
use enum_iterator::IntoEnumIterator;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(IntoEnumIterator, Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[repr(u8)]
|
||||
pub enum Action {
|
||||
#[serde(rename = "*")]
|
||||
All = 0,
|
||||
#[serde(rename = "search")]
|
||||
Search = actions::SEARCH,
|
||||
#[serde(rename = "documents.add")]
|
||||
DocumentsAdd = actions::DOCUMENTS_ADD,
|
||||
#[serde(rename = "documents.get")]
|
||||
DocumentsGet = actions::DOCUMENTS_GET,
|
||||
#[serde(rename = "documents.delete")]
|
||||
DocumentsDelete = actions::DOCUMENTS_DELETE,
|
||||
#[serde(rename = "indexes.create")]
|
||||
IndexesAdd = actions::INDEXES_CREATE,
|
||||
#[serde(rename = "indexes.get")]
|
||||
IndexesGet = actions::INDEXES_GET,
|
||||
#[serde(rename = "indexes.update")]
|
||||
IndexesUpdate = actions::INDEXES_UPDATE,
|
||||
#[serde(rename = "indexes.delete")]
|
||||
IndexesDelete = actions::INDEXES_DELETE,
|
||||
#[serde(rename = "tasks.get")]
|
||||
TasksGet = actions::TASKS_GET,
|
||||
#[serde(rename = "settings.get")]
|
||||
SettingsGet = actions::SETTINGS_GET,
|
||||
#[serde(rename = "settings.update")]
|
||||
SettingsUpdate = actions::SETTINGS_UPDATE,
|
||||
#[serde(rename = "stats.get")]
|
||||
StatsGet = actions::STATS_GET,
|
||||
#[serde(rename = "dumps.create")]
|
||||
DumpsCreate = actions::DUMPS_CREATE,
|
||||
#[serde(rename = "dumps.get")]
|
||||
DumpsGet = actions::DUMPS_GET,
|
||||
#[serde(rename = "version")]
|
||||
Version = actions::VERSION,
|
||||
}
|
||||
|
||||
impl Action {
|
||||
pub fn from_repr(repr: u8) -> Option<Self> {
|
||||
use actions::*;
|
||||
match repr {
|
||||
0 => Some(Self::All),
|
||||
SEARCH => Some(Self::Search),
|
||||
DOCUMENTS_ADD => Some(Self::DocumentsAdd),
|
||||
DOCUMENTS_GET => Some(Self::DocumentsGet),
|
||||
DOCUMENTS_DELETE => Some(Self::DocumentsDelete),
|
||||
INDEXES_CREATE => Some(Self::IndexesAdd),
|
||||
INDEXES_GET => Some(Self::IndexesGet),
|
||||
INDEXES_UPDATE => Some(Self::IndexesUpdate),
|
||||
INDEXES_DELETE => Some(Self::IndexesDelete),
|
||||
TASKS_GET => Some(Self::TasksGet),
|
||||
SETTINGS_GET => Some(Self::SettingsGet),
|
||||
SETTINGS_UPDATE => Some(Self::SettingsUpdate),
|
||||
STATS_GET => Some(Self::StatsGet),
|
||||
DUMPS_CREATE => Some(Self::DumpsCreate),
|
||||
DUMPS_GET => Some(Self::DumpsGet),
|
||||
VERSION => Some(Self::Version),
|
||||
_otherwise => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn repr(&self) -> u8 {
|
||||
use actions::*;
|
||||
match self {
|
||||
Self::All => 0,
|
||||
Self::Search => SEARCH,
|
||||
Self::DocumentsAdd => DOCUMENTS_ADD,
|
||||
Self::DocumentsGet => DOCUMENTS_GET,
|
||||
Self::DocumentsDelete => DOCUMENTS_DELETE,
|
||||
Self::IndexesAdd => INDEXES_CREATE,
|
||||
Self::IndexesGet => INDEXES_GET,
|
||||
Self::IndexesUpdate => INDEXES_UPDATE,
|
||||
Self::IndexesDelete => INDEXES_DELETE,
|
||||
Self::TasksGet => TASKS_GET,
|
||||
Self::SettingsGet => SETTINGS_GET,
|
||||
Self::SettingsUpdate => SETTINGS_UPDATE,
|
||||
Self::StatsGet => STATS_GET,
|
||||
Self::DumpsCreate => DUMPS_CREATE,
|
||||
Self::DumpsGet => DUMPS_GET,
|
||||
Self::Version => VERSION,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub mod actions {
|
||||
pub const SEARCH: u8 = 1;
|
||||
pub const DOCUMENTS_ADD: u8 = 2;
|
||||
pub const DOCUMENTS_GET: u8 = 3;
|
||||
pub const DOCUMENTS_DELETE: u8 = 4;
|
||||
pub const INDEXES_CREATE: u8 = 5;
|
||||
pub const INDEXES_GET: u8 = 6;
|
||||
pub const INDEXES_UPDATE: u8 = 7;
|
||||
pub const INDEXES_DELETE: u8 = 8;
|
||||
pub const TASKS_GET: u8 = 9;
|
||||
pub const SETTINGS_GET: u8 = 10;
|
||||
pub const SETTINGS_UPDATE: u8 = 11;
|
||||
pub const STATS_GET: u8 = 12;
|
||||
pub const DUMPS_CREATE: u8 = 13;
|
||||
pub const DUMPS_GET: u8 = 14;
|
||||
pub const VERSION: u8 = 15;
|
||||
}
|
||||
47
meilisearch-auth/src/dump.rs
Normal file
47
meilisearch-auth/src/dump.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use std::fs::File;
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::{AuthController, HeedAuthStore, Result};
|
||||
|
||||
const KEYS_PATH: &str = "keys";
|
||||
|
||||
impl AuthController {
|
||||
pub fn dump(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()> {
|
||||
let mut store = HeedAuthStore::new(&src)?;
|
||||
|
||||
// do not attempt to close the database on drop!
|
||||
store.set_drop_on_close(false);
|
||||
|
||||
let keys_file_path = dst.as_ref().join(KEYS_PATH);
|
||||
|
||||
let keys = store.list_api_keys()?;
|
||||
let mut keys_file = File::create(&keys_file_path)?;
|
||||
for key in keys {
|
||||
serde_json::to_writer(&mut keys_file, &key)?;
|
||||
keys_file.write_all(b"\n")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn load_dump(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()> {
|
||||
let store = HeedAuthStore::new(&dst)?;
|
||||
|
||||
let keys_file_path = src.as_ref().join(KEYS_PATH);
|
||||
|
||||
if !keys_file_path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut reader = BufReader::new(File::open(&keys_file_path)?).lines();
|
||||
while let Some(key) = reader.next().transpose()? {
|
||||
let key = serde_json::from_str(&key)?;
|
||||
store.put_api_key(key)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
46
meilisearch-auth/src/error.rs
Normal file
46
meilisearch-auth/src/error.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use std::error::Error;
|
||||
|
||||
use meilisearch_error::ErrorCode;
|
||||
use meilisearch_error::{internal_error, Code};
|
||||
use serde_json::Value;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, AuthControllerError>;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum AuthControllerError {
|
||||
#[error("`{0}` field is mandatory.")]
|
||||
MissingParameter(&'static str),
|
||||
#[error("`actions` field value `{0}` is invalid. It should be an array of string representing action names.")]
|
||||
InvalidApiKeyActions(Value),
|
||||
#[error("`indexes` field value `{0}` is invalid. It should be an array of string representing index names.")]
|
||||
InvalidApiKeyIndexes(Value),
|
||||
#[error("`expiresAt` field value `{0}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.")]
|
||||
InvalidApiKeyExpiresAt(Value),
|
||||
#[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")]
|
||||
InvalidApiKeyDescription(Value),
|
||||
#[error("API key `{0}` not found.")]
|
||||
ApiKeyNotFound(String),
|
||||
#[error("Internal error: {0}")]
|
||||
Internal(Box<dyn Error + Send + Sync + 'static>),
|
||||
}
|
||||
|
||||
internal_error!(
|
||||
AuthControllerError: heed::Error,
|
||||
std::io::Error,
|
||||
serde_json::Error,
|
||||
std::str::Utf8Error
|
||||
);
|
||||
|
||||
impl ErrorCode for AuthControllerError {
|
||||
fn error_code(&self) -> Code {
|
||||
match self {
|
||||
Self::MissingParameter(_) => Code::MissingParameter,
|
||||
Self::InvalidApiKeyActions(_) => Code::InvalidApiKeyActions,
|
||||
Self::InvalidApiKeyIndexes(_) => Code::InvalidApiKeyIndexes,
|
||||
Self::InvalidApiKeyExpiresAt(_) => Code::InvalidApiKeyExpiresAt,
|
||||
Self::InvalidApiKeyDescription(_) => Code::InvalidApiKeyDescription,
|
||||
Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound,
|
||||
Self::Internal(_) => Code::Internal,
|
||||
}
|
||||
}
|
||||
}
|
||||
181
meilisearch-auth/src/key.rs
Normal file
181
meilisearch-auth/src/key.rs
Normal file
@@ -0,0 +1,181 @@
|
||||
use crate::action::Action;
|
||||
use crate::error::{AuthControllerError, Result};
|
||||
use crate::store::{KeyId, KEY_ID_LENGTH};
|
||||
use rand::Rng;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{from_value, Value};
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::macros::{format_description, time};
|
||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct Key {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
pub id: KeyId,
|
||||
pub actions: Vec<Action>,
|
||||
pub indexes: Vec<String>,
|
||||
#[serde(with = "time::serde::rfc3339::option")]
|
||||
pub expires_at: Option<OffsetDateTime>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub created_at: OffsetDateTime,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub updated_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl Key {
|
||||
pub fn create_from_value(value: Value) -> Result<Self> {
|
||||
let description = match value.get("description") {
|
||||
Some(Value::Null) => None,
|
||||
Some(des) => Some(
|
||||
from_value(des.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()))?,
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
|
||||
let id = generate_id();
|
||||
|
||||
let actions = value
|
||||
.get("actions")
|
||||
.map(|act| {
|
||||
from_value(act.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyActions(act.clone()))
|
||||
})
|
||||
.ok_or(AuthControllerError::MissingParameter("actions"))??;
|
||||
|
||||
let indexes = value
|
||||
.get("indexes")
|
||||
.map(|ind| {
|
||||
from_value(ind.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyIndexes(ind.clone()))
|
||||
})
|
||||
.ok_or(AuthControllerError::MissingParameter("indexes"))??;
|
||||
|
||||
let expires_at = value
|
||||
.get("expiresAt")
|
||||
.map(parse_expiration_date)
|
||||
.ok_or(AuthControllerError::MissingParameter("expiresAt"))??;
|
||||
|
||||
let created_at = OffsetDateTime::now_utc();
|
||||
let updated_at = created_at;
|
||||
|
||||
Ok(Self {
|
||||
description,
|
||||
id,
|
||||
actions,
|
||||
indexes,
|
||||
expires_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn update_from_value(&mut self, value: Value) -> Result<()> {
|
||||
if let Some(des) = value.get("description") {
|
||||
let des = from_value(des.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()));
|
||||
self.description = des?;
|
||||
}
|
||||
|
||||
if let Some(act) = value.get("actions") {
|
||||
let act = from_value(act.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyActions(act.clone()));
|
||||
self.actions = act?;
|
||||
}
|
||||
|
||||
if let Some(ind) = value.get("indexes") {
|
||||
let ind = from_value(ind.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyIndexes(ind.clone()));
|
||||
self.indexes = ind?;
|
||||
}
|
||||
|
||||
if let Some(exp) = value.get("expiresAt") {
|
||||
self.expires_at = parse_expiration_date(exp)?;
|
||||
}
|
||||
|
||||
self.updated_at = OffsetDateTime::now_utc();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn default_admin() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()),
|
||||
id: generate_id(),
|
||||
actions: vec![Action::All],
|
||||
indexes: vec!["*".to_string()],
|
||||
expires_at: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn default_search() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
description: Some(
|
||||
"Default Search API Key (Use it to search from the frontend)".to_string(),
|
||||
),
|
||||
id: generate_id(),
|
||||
actions: vec![Action::Search],
|
||||
indexes: vec!["*".to_string()],
|
||||
expires_at: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a printable key of 64 characters using thread_rng.
|
||||
fn generate_id() -> [u8; KEY_ID_LENGTH] {
|
||||
const CHARSET: &[u8] = b"abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
let mut bytes = [0; KEY_ID_LENGTH];
|
||||
for byte in bytes.iter_mut() {
|
||||
*byte = CHARSET[rng.gen_range(0..CHARSET.len())];
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> {
|
||||
match value {
|
||||
Value::String(string) => OffsetDateTime::parse(string, &Rfc3339)
|
||||
.or_else(|_| {
|
||||
PrimitiveDateTime::parse(
|
||||
string,
|
||||
format_description!(
|
||||
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
|
||||
),
|
||||
).map(|datetime| datetime.assume_utc())
|
||||
})
|
||||
.or_else(|_| {
|
||||
PrimitiveDateTime::parse(
|
||||
string,
|
||||
format_description!(
|
||||
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
|
||||
),
|
||||
).map(|datetime| datetime.assume_utc())
|
||||
})
|
||||
.or_else(|_| {
|
||||
Date::parse(string, format_description!(
|
||||
"[year repr:full base:calendar]-[month repr:numerical]-[day]"
|
||||
)).map(|date| PrimitiveDateTime::new(date, time!(00:00)).assume_utc())
|
||||
})
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyExpiresAt(value.clone()))
|
||||
// check if the key is already expired.
|
||||
.and_then(|d| {
|
||||
if d > OffsetDateTime::now_utc() {
|
||||
Ok(d)
|
||||
} else {
|
||||
Err(AuthControllerError::InvalidApiKeyExpiresAt(value.clone()))
|
||||
}
|
||||
})
|
||||
.map(Option::Some),
|
||||
Value::Null => Ok(None),
|
||||
_otherwise => Err(AuthControllerError::InvalidApiKeyExpiresAt(value.clone())),
|
||||
}
|
||||
}
|
||||
272
meilisearch-auth/src/lib.rs
Normal file
272
meilisearch-auth/src/lib.rs
Normal file
@@ -0,0 +1,272 @@
|
||||
mod action;
|
||||
mod dump;
|
||||
pub mod error;
|
||||
mod key;
|
||||
mod store;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::Path;
|
||||
use std::str::from_utf8;
|
||||
use std::sync::Arc;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use sha2::{Digest, Sha256};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
pub use action::{actions, Action};
|
||||
use error::{AuthControllerError, Result};
|
||||
pub use key::Key;
|
||||
pub use store::open_auth_store_env;
|
||||
use store::HeedAuthStore;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AuthController {
|
||||
store: Arc<HeedAuthStore>,
|
||||
master_key: Option<String>,
|
||||
}
|
||||
|
||||
impl AuthController {
|
||||
pub fn new(db_path: impl AsRef<Path>, master_key: &Option<String>) -> Result<Self> {
|
||||
let store = HeedAuthStore::new(db_path)?;
|
||||
|
||||
if store.is_empty()? {
|
||||
generate_default_keys(&store)?;
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
store: Arc::new(store),
|
||||
master_key: master_key.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_key(&self, value: Value) -> Result<Key> {
|
||||
let key = Key::create_from_value(value)?;
|
||||
self.store.put_api_key(key)
|
||||
}
|
||||
|
||||
pub fn update_key(&self, key: impl AsRef<str>, value: Value) -> Result<Key> {
|
||||
let mut key = self.get_key(key)?;
|
||||
key.update_from_value(value)?;
|
||||
self.store.put_api_key(key)
|
||||
}
|
||||
|
||||
pub fn get_key(&self, key: impl AsRef<str>) -> Result<Key> {
|
||||
self.store
|
||||
.get_api_key(&key)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))
|
||||
}
|
||||
|
||||
pub fn get_key_filters(
|
||||
&self,
|
||||
key: impl AsRef<str>,
|
||||
search_rules: Option<SearchRules>,
|
||||
) -> Result<AuthFilter> {
|
||||
let mut filters = AuthFilter::default();
|
||||
if self
|
||||
.master_key
|
||||
.as_ref()
|
||||
.map_or(false, |master_key| master_key != key.as_ref())
|
||||
{
|
||||
let key = self
|
||||
.store
|
||||
.get_api_key(&key)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))?;
|
||||
|
||||
if !key.indexes.iter().any(|i| i.as_str() == "*") {
|
||||
filters.search_rules = match search_rules {
|
||||
// Intersect search_rules with parent key authorized indexes.
|
||||
Some(search_rules) => SearchRules::Map(
|
||||
key.indexes
|
||||
.into_iter()
|
||||
.filter_map(|index| {
|
||||
search_rules
|
||||
.get_index_search_rules(&index)
|
||||
.map(|index_search_rules| (index, Some(index_search_rules)))
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
None => SearchRules::Set(key.indexes.into_iter().collect()),
|
||||
};
|
||||
} else if let Some(search_rules) = search_rules {
|
||||
filters.search_rules = search_rules;
|
||||
}
|
||||
|
||||
filters.allow_index_creation = key
|
||||
.actions
|
||||
.iter()
|
||||
.any(|&action| action == Action::IndexesAdd || action == Action::All);
|
||||
}
|
||||
|
||||
Ok(filters)
|
||||
}
|
||||
|
||||
pub fn list_keys(&self) -> Result<Vec<Key>> {
|
||||
self.store.list_api_keys()
|
||||
}
|
||||
|
||||
pub fn delete_key(&self, key: impl AsRef<str>) -> Result<()> {
|
||||
if self.store.delete_api_key(&key)? {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(AuthControllerError::ApiKeyNotFound(
|
||||
key.as_ref().to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_master_key(&self) -> Option<&String> {
|
||||
self.master_key.as_ref()
|
||||
}
|
||||
|
||||
/// Generate a valid key from a key id using the current master key.
|
||||
/// Returns None if no master key has been set.
|
||||
pub fn generate_key(&self, id: &str) -> Option<String> {
|
||||
self.master_key
|
||||
.as_ref()
|
||||
.map(|master_key| generate_key(master_key.as_bytes(), id))
|
||||
}
|
||||
|
||||
/// Check if the provided key is authorized to make a specific action
|
||||
/// without checking if the key is valid.
|
||||
pub fn is_key_authorized(
|
||||
&self,
|
||||
key: &[u8],
|
||||
action: Action,
|
||||
index: Option<&str>,
|
||||
) -> Result<bool> {
|
||||
match self
|
||||
.store
|
||||
// check if the key has access to all indexes.
|
||||
.get_expiration_date(key, action, None)?
|
||||
.or(match index {
|
||||
// else check if the key has access to the requested index.
|
||||
Some(index) => {
|
||||
self.store
|
||||
.get_expiration_date(key, action, Some(index.as_bytes()))?
|
||||
}
|
||||
// or to any index if no index has been requested.
|
||||
None => self.store.prefix_first_expiration_date(key, action)?,
|
||||
}) {
|
||||
// check expiration date.
|
||||
Some(Some(exp)) => Ok(OffsetDateTime::now_utc() < exp),
|
||||
// no expiration date.
|
||||
Some(None) => Ok(true),
|
||||
// action or index forbidden.
|
||||
None => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if the provided key is valid
|
||||
/// without checking if the key is authorized to make a specific action.
|
||||
pub fn is_key_valid(&self, key: &[u8]) -> Result<bool> {
|
||||
if let Some(id) = self.store.get_key_id(key) {
|
||||
let id = from_utf8(&id)?;
|
||||
if let Some(generated) = self.generate_key(id) {
|
||||
return Ok(generated.as_bytes() == key);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
/// Check if the provided key is valid
|
||||
/// and is authorized to make a specific action.
|
||||
pub fn authenticate(&self, key: &[u8], action: Action, index: Option<&str>) -> Result<bool> {
|
||||
if self.is_key_authorized(key, action, index)? {
|
||||
self.is_key_valid(key)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AuthFilter {
|
||||
pub search_rules: SearchRules,
|
||||
pub allow_index_creation: bool,
|
||||
}
|
||||
|
||||
impl Default for AuthFilter {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
search_rules: SearchRules::default(),
|
||||
allow_index_creation: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Transparent wrapper around a list of allowed indexes with the search rules to apply for each.
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum SearchRules {
|
||||
Set(HashSet<String>),
|
||||
Map(HashMap<String, Option<IndexSearchRules>>),
|
||||
}
|
||||
|
||||
impl Default for SearchRules {
|
||||
fn default() -> Self {
|
||||
Self::Set(Some("*".to_string()).into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchRules {
|
||||
pub fn is_index_authorized(&self, index: &str) -> bool {
|
||||
match self {
|
||||
Self::Set(set) => set.contains("*") || set.contains(index),
|
||||
Self::Map(map) => map.contains_key("*") || map.contains_key(index),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_index_search_rules(&self, index: &str) -> Option<IndexSearchRules> {
|
||||
match self {
|
||||
Self::Set(set) => {
|
||||
if set.contains("*") || set.contains(index) {
|
||||
Some(IndexSearchRules::default())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Self::Map(map) => map
|
||||
.get(index)
|
||||
.or_else(|| map.get("*"))
|
||||
.map(|isr| isr.clone().unwrap_or_default()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for SearchRules {
|
||||
type Item = (String, IndexSearchRules);
|
||||
type IntoIter = Box<dyn Iterator<Item = Self::Item>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
match self {
|
||||
Self::Set(array) => {
|
||||
Box::new(array.into_iter().map(|i| (i, IndexSearchRules::default())))
|
||||
}
|
||||
Self::Map(map) => {
|
||||
Box::new(map.into_iter().map(|(i, isr)| (i, isr.unwrap_or_default())))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Contains the rules to apply on the top of the search query for a specific index.
|
||||
///
|
||||
/// filter: search filter to apply in addition to query filters.
|
||||
#[derive(Debug, Serialize, Deserialize, Default, Clone)]
|
||||
pub struct IndexSearchRules {
|
||||
pub filter: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
fn generate_key(master_key: &[u8], keyid: &str) -> String {
|
||||
let key = [keyid.as_bytes(), master_key].concat();
|
||||
let sha = Sha256::digest(&key);
|
||||
format!("{}{:x}", keyid, sha)
|
||||
}
|
||||
|
||||
fn generate_default_keys(store: &HeedAuthStore) -> Result<()> {
|
||||
store.put_api_key(Key::default_admin())?;
|
||||
store.put_api_key(Key::default_search())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
256
meilisearch-auth/src/store.rs
Normal file
256
meilisearch-auth/src/store.rs
Normal file
@@ -0,0 +1,256 @@
|
||||
use enum_iterator::IntoEnumIterator;
|
||||
use std::borrow::Cow;
|
||||
use std::cmp::Reverse;
|
||||
use std::convert::TryFrom;
|
||||
use std::convert::TryInto;
|
||||
use std::fs::create_dir_all;
|
||||
use std::path::Path;
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
|
||||
use heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
|
||||
use heed::{Database, Env, EnvOpenOptions, RwTxn};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use super::error::Result;
|
||||
use super::{Action, Key};
|
||||
|
||||
const AUTH_STORE_SIZE: usize = 1_073_741_824; //1GiB
|
||||
pub const KEY_ID_LENGTH: usize = 8;
|
||||
const AUTH_DB_PATH: &str = "auth";
|
||||
const KEY_DB_NAME: &str = "api-keys";
|
||||
const KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME: &str = "keyid-action-index-expiration";
|
||||
|
||||
pub type KeyId = [u8; KEY_ID_LENGTH];
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct HeedAuthStore {
|
||||
env: Arc<Env>,
|
||||
keys: Database<ByteSlice, SerdeJson<Key>>,
|
||||
action_keyid_index_expiration: Database<KeyIdActionCodec, SerdeJson<Option<OffsetDateTime>>>,
|
||||
should_close_on_drop: bool,
|
||||
}
|
||||
|
||||
impl Drop for HeedAuthStore {
|
||||
fn drop(&mut self) {
|
||||
if self.should_close_on_drop && Arc::strong_count(&self.env) == 1 {
|
||||
self.env.as_ref().clone().prepare_for_closing();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn open_auth_store_env(path: &Path) -> heed::Result<heed::Env> {
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(AUTH_STORE_SIZE); // 1GB
|
||||
options.max_dbs(2);
|
||||
options.open(path)
|
||||
}
|
||||
|
||||
impl HeedAuthStore {
|
||||
pub fn new(path: impl AsRef<Path>) -> Result<Self> {
|
||||
let path = path.as_ref().join(AUTH_DB_PATH);
|
||||
create_dir_all(&path)?;
|
||||
let env = Arc::new(open_auth_store_env(path.as_ref())?);
|
||||
let keys = env.create_database(Some(KEY_DB_NAME))?;
|
||||
let action_keyid_index_expiration =
|
||||
env.create_database(Some(KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME))?;
|
||||
Ok(Self {
|
||||
env,
|
||||
keys,
|
||||
action_keyid_index_expiration,
|
||||
should_close_on_drop: true,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_drop_on_close(&mut self, v: bool) {
|
||||
self.should_close_on_drop = v;
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> Result<bool> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
|
||||
Ok(self.keys.len(&rtxn)? == 0)
|
||||
}
|
||||
|
||||
pub fn put_api_key(&self, key: Key) -> Result<Key> {
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
self.keys.put(&mut wtxn, &key.id, &key)?;
|
||||
|
||||
let id = key.id;
|
||||
// delete key from inverted database before refilling it.
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &id)?;
|
||||
// create inverted database.
|
||||
let db = self.action_keyid_index_expiration;
|
||||
|
||||
let actions = if key.actions.contains(&Action::All) {
|
||||
// if key.actions contains All, we iterate over all actions.
|
||||
Action::into_enum_iter().collect()
|
||||
} else {
|
||||
key.actions.clone()
|
||||
};
|
||||
|
||||
let no_index_restriction = key.indexes.contains(&"*".to_owned());
|
||||
for action in actions {
|
||||
if no_index_restriction {
|
||||
// If there is no index restriction we put None.
|
||||
db.put(&mut wtxn, &(&id, &action, None), &key.expires_at)?;
|
||||
} else {
|
||||
// else we create a key for each index.
|
||||
for index in key.indexes.iter() {
|
||||
db.put(
|
||||
&mut wtxn,
|
||||
&(&id, &action, Some(index.as_bytes())),
|
||||
&key.expires_at,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
wtxn.commit()?;
|
||||
|
||||
Ok(key)
|
||||
}
|
||||
|
||||
pub fn get_api_key(&self, key: impl AsRef<str>) -> Result<Option<Key>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match self.get_key_id(key.as_ref().as_bytes()) {
|
||||
Some(id) => self.keys.get(&rtxn, &id).map_err(|e| e.into()),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete_api_key(&self, key: impl AsRef<str>) -> Result<bool> {
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let existing = match self.get_key_id(key.as_ref().as_bytes()) {
|
||||
Some(id) => {
|
||||
let existing = self.keys.delete(&mut wtxn, &id)?;
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &id)?;
|
||||
existing
|
||||
}
|
||||
None => false,
|
||||
};
|
||||
wtxn.commit()?;
|
||||
|
||||
Ok(existing)
|
||||
}
|
||||
|
||||
pub fn list_api_keys(&self) -> Result<Vec<Key>> {
|
||||
let mut list = Vec::new();
|
||||
let rtxn = self.env.read_txn()?;
|
||||
for result in self.keys.remap_key_type::<DecodeIgnore>().iter(&rtxn)? {
|
||||
let (_, content) = result?;
|
||||
list.push(content);
|
||||
}
|
||||
list.sort_unstable_by_key(|k| Reverse(k.created_at));
|
||||
Ok(list)
|
||||
}
|
||||
|
||||
pub fn get_expiration_date(
|
||||
&self,
|
||||
key: &[u8],
|
||||
action: Action,
|
||||
index: Option<&[u8]>,
|
||||
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match self.get_key_id(key) {
|
||||
Some(id) => {
|
||||
let tuple = (&id, &action, index);
|
||||
Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?)
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prefix_first_expiration_date(
|
||||
&self,
|
||||
key: &[u8],
|
||||
action: Action,
|
||||
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match self.get_key_id(key) {
|
||||
Some(id) => {
|
||||
let tuple = (&id, &action, None);
|
||||
Ok(self
|
||||
.action_keyid_index_expiration
|
||||
.prefix_iter(&rtxn, &tuple)?
|
||||
.next()
|
||||
.transpose()?
|
||||
.map(|(_, expiration)| expiration))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_key_id(&self, key: &[u8]) -> Option<KeyId> {
|
||||
try_split_array_at::<_, KEY_ID_LENGTH>(key).map(|(id, _)| *id)
|
||||
}
|
||||
|
||||
fn delete_key_from_inverted_db(&self, wtxn: &mut RwTxn, key: &KeyId) -> Result<()> {
|
||||
let mut iter = self
|
||||
.action_keyid_index_expiration
|
||||
.remap_types::<ByteSlice, DecodeIgnore>()
|
||||
.prefix_iter_mut(wtxn, key)?;
|
||||
while iter.next().transpose()?.is_some() {
|
||||
// safety: we don't keep references from inside the LMDB database.
|
||||
unsafe { iter.del_current()? };
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Codec allowing to retrieve the expiration date of an action,
|
||||
/// optionnally on a spcific index, for a given key.
|
||||
pub struct KeyIdActionCodec;
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for KeyIdActionCodec {
|
||||
type DItem = (KeyId, Action, Option<&'a [u8]>);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (key_id, action_bytes) = try_split_array_at(bytes)?;
|
||||
let (action_bytes, index) = match try_split_array_at(action_bytes)? {
|
||||
(action, []) => (action, None),
|
||||
(action, index) => (action, Some(index)),
|
||||
};
|
||||
let action = Action::from_repr(u8::from_be_bytes(*action_bytes))?;
|
||||
|
||||
Some((*key_id, action, index))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
type EItem = (&'a KeyId, &'a Action, Option<&'a [u8]>);
|
||||
|
||||
fn bytes_encode((key_id, action, index): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut bytes = Vec::new();
|
||||
|
||||
bytes.extend_from_slice(*key_id);
|
||||
let action_bytes = u8::to_be_bytes(action.repr());
|
||||
bytes.extend_from_slice(&action_bytes);
|
||||
if let Some(index) = index {
|
||||
bytes.extend_from_slice(index);
|
||||
}
|
||||
|
||||
Some(Cow::Owned(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
/// Divides one slice into two at an index, returns `None` if mid is out of bounds.
|
||||
pub fn try_split_at<T>(slice: &[T], mid: usize) -> Option<(&[T], &[T])> {
|
||||
if mid <= slice.len() {
|
||||
Some(slice.split_at(mid))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Divides one slice into an array and the tail at an index,
|
||||
/// returns `None` if `N` is out of bounds.
|
||||
pub fn try_split_array_at<T, const N: usize>(slice: &[T]) -> Option<(&[T; N], &[T])>
|
||||
where
|
||||
[T; N]: for<'a> TryFrom<&'a [T]>,
|
||||
{
|
||||
let (head, tail) = try_split_at(slice, N)?;
|
||||
let head = head.try_into().ok()?;
|
||||
Some((head, tail))
|
||||
}
|
||||
@@ -1,9 +1,15 @@
|
||||
[package]
|
||||
name = "meilisearch-error"
|
||||
version = "0.24.0"
|
||||
version = "0.26.1"
|
||||
authors = ["marin <postma.marin@protonmail.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
actix-http = "=3.0.0-beta.10"
|
||||
actix-web = { version = "4", default-features = false }
|
||||
proptest = { version = "1.0.0", optional = true }
|
||||
proptest-derive = { version = "0.3.0", optional = true }
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
serde_json = "1.0.69"
|
||||
|
||||
[features]
|
||||
test-traits = ["proptest", "proptest-derive"]
|
||||
|
||||
@@ -1,8 +1,75 @@
|
||||
use std::fmt;
|
||||
|
||||
use actix_http::http::StatusCode;
|
||||
use actix_web::{self as aweb, http::StatusCode, HttpResponseBuilder};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
||||
pub struct ResponseError {
|
||||
#[serde(skip)]
|
||||
#[cfg_attr(
|
||||
feature = "test-traits",
|
||||
proptest(strategy = "strategy::status_code_strategy()")
|
||||
)]
|
||||
code: StatusCode,
|
||||
message: String,
|
||||
#[serde(rename = "code")]
|
||||
error_code: String,
|
||||
#[serde(rename = "type")]
|
||||
error_type: String,
|
||||
#[serde(rename = "link")]
|
||||
error_link: String,
|
||||
}
|
||||
|
||||
impl ResponseError {
|
||||
pub fn from_msg(message: String, code: Code) -> Self {
|
||||
Self {
|
||||
code: code.http(),
|
||||
message,
|
||||
error_code: code.err_code().error_name.to_string(),
|
||||
error_type: code.type_(),
|
||||
error_link: code.url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ResponseError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.message.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for ResponseError {}
|
||||
|
||||
impl<T> From<T> for ResponseError
|
||||
where
|
||||
T: ErrorCode,
|
||||
{
|
||||
fn from(other: T) -> Self {
|
||||
Self {
|
||||
code: other.http_status(),
|
||||
message: other.to_string(),
|
||||
error_code: other.error_name(),
|
||||
error_type: other.error_type(),
|
||||
error_link: other.error_url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl aweb::error::ResponseError for ResponseError {
|
||||
fn error_response(&self) -> aweb::HttpResponse {
|
||||
let json = serde_json::to_vec(self).unwrap();
|
||||
HttpResponseBuilder::new(self.status_code())
|
||||
.content_type("application/json")
|
||||
.body(json)
|
||||
}
|
||||
|
||||
fn status_code(&self) -> StatusCode {
|
||||
self.code
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ErrorCode: std::error::Error {
|
||||
fn error_code(&self) -> Code;
|
||||
|
||||
@@ -91,6 +158,13 @@ pub enum Code {
|
||||
MissingContentType,
|
||||
MalformedPayload,
|
||||
MissingPayload,
|
||||
|
||||
ApiKeyNotFound,
|
||||
MissingParameter,
|
||||
InvalidApiKeyActions,
|
||||
InvalidApiKeyIndexes,
|
||||
InvalidApiKeyExpiresAt,
|
||||
InvalidApiKeyDescription,
|
||||
}
|
||||
|
||||
impl Code {
|
||||
@@ -181,6 +255,22 @@ impl Code {
|
||||
ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
|
||||
}
|
||||
MissingPayload => ErrCode::invalid("missing_payload", StatusCode::BAD_REQUEST),
|
||||
|
||||
// error related to keys
|
||||
ApiKeyNotFound => ErrCode::invalid("api_key_not_found", StatusCode::NOT_FOUND),
|
||||
MissingParameter => ErrCode::invalid("missing_parameter", StatusCode::BAD_REQUEST),
|
||||
InvalidApiKeyActions => {
|
||||
ErrCode::invalid("invalid_api_key_actions", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
InvalidApiKeyIndexes => {
|
||||
ErrCode::invalid("invalid_api_key_indexes", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
InvalidApiKeyExpiresAt => {
|
||||
ErrCode::invalid("invalid_api_key_expires_at", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
InvalidApiKeyDescription => {
|
||||
ErrCode::invalid("invalid_api_key_description", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -237,3 +327,27 @@ impl ErrCode {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "test-traits")]
|
||||
mod strategy {
|
||||
use proptest::strategy::Strategy;
|
||||
|
||||
use super::*;
|
||||
|
||||
pub(super) fn status_code_strategy() -> impl Strategy<Value = StatusCode> {
|
||||
(100..999u16).prop_map(|i| StatusCode::from_u16(i).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! internal_error {
|
||||
($target:ty : $($other:path), *) => {
|
||||
$(
|
||||
impl From<$other> for $target {
|
||||
fn from(other: $other) -> Self {
|
||||
Self::Internal(Box::new(other))
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
[package]
|
||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||
description = "MeiliSearch HTTP server"
|
||||
edition = "2018"
|
||||
description = "Meilisearch HTTP server"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
name = "meilisearch-http"
|
||||
version = "0.24.0"
|
||||
version = "0.26.1"
|
||||
|
||||
[[bin]]
|
||||
name = "meilisearch"
|
||||
path = "src/main.rs"
|
||||
|
||||
[build-dependencies]
|
||||
actix-web-static-files = { git = "https://github.com/MarinPostma/actix-web-static-files.git", rev = "39d8006", optional = true }
|
||||
static-files = { version = "0.2.1", optional = true }
|
||||
anyhow = { version = "1.0.43", optional = true }
|
||||
cargo_toml = { version = "0.9", optional = true }
|
||||
hex = { version = "0.4.3", optional = true }
|
||||
@@ -22,16 +22,16 @@ vergen = { version = "5.1.15", default-features = false, features = ["git"] }
|
||||
zip = { version = "0.5.13", optional = true }
|
||||
|
||||
[dependencies]
|
||||
actix-cors = { git = "https://github.com/MarinPostma/actix-extras.git", rev = "963ac94d" }
|
||||
actix-web = { version = "4.0.0-beta.9", features = ["rustls"] }
|
||||
actix-web-static-files = { git = "https://github.com/MarinPostma/actix-web-static-files.git", rev = "39d8006", optional = true }
|
||||
actix-cors = "0.6"
|
||||
actix-web = { version = "4", features = ["rustls"] }
|
||||
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
||||
anyhow = { version = "1.0.43", features = ["backtrace"] }
|
||||
arc-swap = "1.3.2"
|
||||
async-stream = "0.3.2"
|
||||
async-trait = "0.1.51"
|
||||
arc-swap = "1.3.2"
|
||||
byte-unit = { version = "4.0.12", default-features = false, features = ["std"] }
|
||||
bstr = "0.2.17"
|
||||
byte-unit = { version = "4.0.12", default-features = false, features = ["std", "serde"] }
|
||||
bytes = "1.1.0"
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
crossbeam-channel = "0.5.1"
|
||||
either = "1.6.1"
|
||||
env_logger = "0.9.0"
|
||||
@@ -42,40 +42,47 @@ futures-util = "0.3.17"
|
||||
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
||||
http = "0.2.4"
|
||||
indexmap = { version = "1.7.0", features = ["serde-1"] }
|
||||
iso8601-duration = "0.1.0"
|
||||
itertools = "0.10.1"
|
||||
jsonwebtoken = "7"
|
||||
log = "0.4.14"
|
||||
meilisearch-lib = { path = "../meilisearch-lib" }
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
meilisearch-tokenizer = { git = "https://github.com/meilisearch/tokenizer.git", tag = "v0.2.5" }
|
||||
meilisearch-lib = { path = "../meilisearch-lib" }
|
||||
mime = "0.3.16"
|
||||
num_cpus = "1.13.0"
|
||||
obkv = "0.2.0"
|
||||
once_cell = "1.8.0"
|
||||
parking_lot = "0.11.2"
|
||||
platform-dirs = "0.3.0"
|
||||
rand = "0.8.4"
|
||||
rayon = "1.5.1"
|
||||
regex = "1.5.4"
|
||||
rustls = "0.19.1"
|
||||
segment = { version = "0.1.2", optional = true }
|
||||
rustls = "0.20.2"
|
||||
rustls-pemfile = "0.2"
|
||||
segment = { version = "0.2.0", optional = true }
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
||||
sha2 = "0.9.6"
|
||||
siphasher = "0.3.7"
|
||||
slice-group-by = "0.2.6"
|
||||
structopt = "0.3.23"
|
||||
static-files = { version = "0.2.1", optional = true }
|
||||
clap = { version = "3.0", features = ["derive", "env"] }
|
||||
sysinfo = "0.20.2"
|
||||
tar = "0.4.37"
|
||||
tempfile = "3.2.0"
|
||||
thiserror = "1.0.28"
|
||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
tokio = { version = "1.11.0", features = ["full"] }
|
||||
tokio-stream = "0.1.7"
|
||||
uuid = { version = "0.8.2", features = ["serde"] }
|
||||
walkdir = "2.3.2"
|
||||
obkv = "0.2.0"
|
||||
pin-project = "1.0.8"
|
||||
sysinfo = "0.20.2"
|
||||
tokio-stream = "0.1.7"
|
||||
pin-project-lite = "0.2.8"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2.0"
|
||||
assert-json-diff = "2.0.1"
|
||||
maplit = "1.0.2"
|
||||
paste = "1.0.5"
|
||||
serde_url_params = "0.2.1"
|
||||
urlencoding = "2.1.0"
|
||||
@@ -83,6 +90,7 @@ urlencoding = "2.1.0"
|
||||
[features]
|
||||
mini-dashboard = [
|
||||
"actix-web-static-files",
|
||||
"static-files",
|
||||
"anyhow",
|
||||
"cargo_toml",
|
||||
"hex",
|
||||
@@ -98,5 +106,5 @@ default = ["analytics", "mini-dashboard"]
|
||||
tikv-jemallocator = "0.4.1"
|
||||
|
||||
[package.metadata.mini-dashboard]
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.5/build.zip"
|
||||
sha1 = "1d955ea91b7691bd6fc207cb39866b82210783f0"
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.9/build.zip"
|
||||
sha1 = "b1833c3e5dc6b5d9d519ae4834935ae6c8a47024"
|
||||
|
||||
@@ -16,11 +16,11 @@ mod mini_dashboard {
|
||||
use std::io::{Cursor, Read, Write};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use actix_web_static_files::resource_dir;
|
||||
use anyhow::Context;
|
||||
use cargo_toml::Manifest;
|
||||
use reqwest::blocking::get;
|
||||
use sha1::{Digest, Sha1};
|
||||
use static_files::resource_dir;
|
||||
|
||||
pub fn setup_mini_dashboard() -> anyhow::Result<()> {
|
||||
let cargo_manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
|
||||
|
||||
@@ -29,12 +29,12 @@ pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
||||
|
||||
/// The MeiliSearch config dir:
|
||||
/// `~/.config/MeiliSearch` on *NIX or *BSD.
|
||||
/// The Meilisearch config dir:
|
||||
/// `~/.config/Meilisearch` on *NIX or *BSD.
|
||||
/// `~/Library/ApplicationSupport` on macOS.
|
||||
/// `%APPDATA` (= `C:\Users%USERNAME%\AppData\Roaming`) on windows.
|
||||
static MEILISEARCH_CONFIG_PATH: Lazy<Option<PathBuf>> =
|
||||
Lazy::new(|| AppDirs::new(Some("MeiliSearch"), false).map(|appdir| appdir.config_dir));
|
||||
Lazy::new(|| AppDirs::new(Some("Meilisearch"), false).map(|appdir| appdir.config_dir));
|
||||
|
||||
fn config_user_id_path(db_path: &Path) -> Option<PathBuf> {
|
||||
db_path
|
||||
@@ -44,13 +44,13 @@ fn config_user_id_path(db_path: &Path) -> Option<PathBuf> {
|
||||
path.join("instance-uid")
|
||||
.display()
|
||||
.to_string()
|
||||
.replace("/", "-")
|
||||
.replace('/', "-")
|
||||
})
|
||||
.zip(MEILISEARCH_CONFIG_PATH.as_ref())
|
||||
.map(|(filename, config_path)| config_path.join(filename.trim_start_matches('-')))
|
||||
}
|
||||
|
||||
/// Look for the instance-uid in the `data.ms` or in `~/.config/MeiliSearch/path-to-db-instance-uid`
|
||||
/// Look for the instance-uid in the `data.ms` or in `~/.config/Meilisearch/path-to-db-instance-uid`
|
||||
fn find_user_id(db_path: &Path) -> Option<String> {
|
||||
fs::read_to_string(db_path.join("instance-uid"))
|
||||
.ok()
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
use std::collections::{BinaryHeap, HashMap, HashSet};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use actix_web::http::header::USER_AGENT;
|
||||
use actix_web::HttpRequest;
|
||||
use http::header::CONTENT_TYPE;
|
||||
use meilisearch_auth::SearchRules;
|
||||
use meilisearch_lib::index::{SearchQuery, SearchResult};
|
||||
use meilisearch_lib::index_controller::Stats;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
@@ -16,6 +17,7 @@ use segment::message::{Identify, Track, User};
|
||||
use segment::{AutoBatcher, Batcher, HttpClient};
|
||||
use serde_json::{json, Value};
|
||||
use sysinfo::{DiskExt, System, SystemExt};
|
||||
use time::OffsetDateTime;
|
||||
use tokio::select;
|
||||
use tokio::sync::mpsc::{self, Receiver, Sender};
|
||||
use uuid::Uuid;
|
||||
@@ -75,7 +77,30 @@ impl SegmentAnalytics {
|
||||
|
||||
let client = HttpClient::default();
|
||||
let user = User::UserId { user_id };
|
||||
let batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
|
||||
let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
|
||||
|
||||
// If Meilisearch is Launched for the first time:
|
||||
// 1. Send an event Launched associated to the user `total_launch`.
|
||||
// 2. Batch an event Launched with the real instance-id and send it in one hour.
|
||||
if first_time_run {
|
||||
let _ = batcher
|
||||
.push(Track {
|
||||
user: User::UserId {
|
||||
user_id: "total_launch".to_string(),
|
||||
},
|
||||
event: "Launched".to_string(),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
let _ = batcher.flush().await;
|
||||
let _ = batcher
|
||||
.push(Track {
|
||||
user: user.clone(),
|
||||
event: "Launched".to_string(),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
let (sender, inbox) = mpsc::channel(100); // How many analytics can we bufferize
|
||||
|
||||
@@ -95,10 +120,6 @@ impl SegmentAnalytics {
|
||||
sender,
|
||||
user: user.clone(),
|
||||
};
|
||||
// batch the launched for the first time track event
|
||||
if first_time_run {
|
||||
this.publish("Launched".to_string(), json!({}), None);
|
||||
}
|
||||
|
||||
(Arc::new(this), user.to_string())
|
||||
}
|
||||
@@ -191,10 +212,30 @@ impl Segment {
|
||||
"server_provider": std::env::var("MEILI_SERVER_PROVIDER").ok(),
|
||||
})
|
||||
});
|
||||
let infos = json!({
|
||||
"env": opt.env.clone(),
|
||||
"has_snapshot": opt.schedule_snapshot,
|
||||
});
|
||||
// The infos are all cli option except every option containing sensitive information.
|
||||
// We consider an information as sensible if it contains a path, an address or a key.
|
||||
let infos = {
|
||||
// First we see if any sensitive fields were used.
|
||||
let db_path = opt.db_path != PathBuf::from("./data.ms");
|
||||
let import_dump = opt.import_dump.is_some();
|
||||
let dumps_dir = opt.dumps_dir != PathBuf::from("dumps/");
|
||||
let import_snapshot = opt.import_snapshot.is_some();
|
||||
let snapshots_dir = opt.snapshot_dir != PathBuf::from("snapshots/");
|
||||
let http_addr = opt.http_addr != "127.0.0.1:7700";
|
||||
|
||||
let mut infos = serde_json::to_value(opt).unwrap();
|
||||
|
||||
// Then we overwrite all sensitive field with a boolean representing if
|
||||
// the feature was used or not.
|
||||
infos["db_path"] = json!(db_path);
|
||||
infos["import_dump"] = json!(import_dump);
|
||||
infos["dumps_dir"] = json!(dumps_dir);
|
||||
infos["import_snapshot"] = json!(import_snapshot);
|
||||
infos["snapshot_dir"] = json!(snapshots_dir);
|
||||
infos["http_addr"] = json!(http_addr);
|
||||
|
||||
infos
|
||||
};
|
||||
|
||||
let number_of_documents = stats
|
||||
.indexes
|
||||
@@ -216,7 +257,9 @@ impl Segment {
|
||||
|
||||
async fn run(mut self, meilisearch: MeiliSearch) {
|
||||
const INTERVAL: Duration = Duration::from_secs(60 * 60); // one hour
|
||||
let mut interval = tokio::time::interval(INTERVAL);
|
||||
// The first batch must be sent after one hour.
|
||||
let mut interval =
|
||||
tokio::time::interval_at(tokio::time::Instant::now() + INTERVAL, INTERVAL);
|
||||
|
||||
loop {
|
||||
select! {
|
||||
@@ -238,7 +281,7 @@ impl Segment {
|
||||
}
|
||||
|
||||
async fn tick(&mut self, meilisearch: MeiliSearch) {
|
||||
if let Ok(stats) = meilisearch.get_all_stats().await {
|
||||
if let Ok(stats) = meilisearch.get_all_stats(&SearchRules::default()).await {
|
||||
let _ = self
|
||||
.batcher
|
||||
.push(Identify {
|
||||
@@ -280,6 +323,8 @@ impl Segment {
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SearchAggregator {
|
||||
timestamp: Option<OffsetDateTime>,
|
||||
|
||||
// context
|
||||
user_agents: HashSet<String>,
|
||||
|
||||
@@ -304,10 +349,8 @@ pub struct SearchAggregator {
|
||||
used_syntax: HashMap<String, usize>,
|
||||
|
||||
// q
|
||||
// everytime a request has a q field, this field must be incremented by the number of terms
|
||||
sum_of_terms_count: usize,
|
||||
// everytime a request has a q field, this field must be incremented by one
|
||||
total_number_of_q: usize,
|
||||
// The maximum number of terms in a q request
|
||||
max_terms_number: usize,
|
||||
|
||||
// pagination
|
||||
max_limit: usize,
|
||||
@@ -317,6 +360,8 @@ pub struct SearchAggregator {
|
||||
impl SearchAggregator {
|
||||
pub fn from_query(query: &SearchQuery, request: &HttpRequest) -> Self {
|
||||
let mut ret = Self::default();
|
||||
ret.timestamp = Some(OffsetDateTime::now_utc());
|
||||
|
||||
ret.total_received = 1;
|
||||
ret.user_agents = extract_user_agents(request).into_iter().collect();
|
||||
|
||||
@@ -354,8 +399,7 @@ impl SearchAggregator {
|
||||
}
|
||||
|
||||
if let Some(ref q) = query.q {
|
||||
ret.total_number_of_q = 1;
|
||||
ret.sum_of_terms_count = q.split_whitespace().count();
|
||||
ret.max_terms_number = q.split_whitespace().count();
|
||||
}
|
||||
|
||||
ret.max_limit = query.limit;
|
||||
@@ -365,34 +409,46 @@ impl SearchAggregator {
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, result: &SearchResult) {
|
||||
self.total_succeeded += 1;
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(1);
|
||||
self.time_spent.push(result.processing_time_ms as usize);
|
||||
}
|
||||
|
||||
/// Aggregate one [SearchAggregator] into another.
|
||||
pub fn aggregate(&mut self, mut other: Self) {
|
||||
if self.timestamp.is_none() {
|
||||
self.timestamp = other.timestamp;
|
||||
}
|
||||
|
||||
// context
|
||||
for user_agent in other.user_agents.into_iter() {
|
||||
self.user_agents.insert(user_agent);
|
||||
}
|
||||
// request
|
||||
self.total_received += other.total_received;
|
||||
self.total_succeeded += other.total_succeeded;
|
||||
self.total_received = self.total_received.saturating_add(other.total_received);
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(other.total_succeeded);
|
||||
self.time_spent.append(&mut other.time_spent);
|
||||
// sort
|
||||
self.sort_with_geo_point |= other.sort_with_geo_point;
|
||||
self.sort_sum_of_criteria_terms += other.sort_sum_of_criteria_terms;
|
||||
self.sort_total_number_of_criteria += other.sort_total_number_of_criteria;
|
||||
self.sort_sum_of_criteria_terms = self
|
||||
.sort_sum_of_criteria_terms
|
||||
.saturating_add(other.sort_sum_of_criteria_terms);
|
||||
self.sort_total_number_of_criteria = self
|
||||
.sort_total_number_of_criteria
|
||||
.saturating_add(other.sort_total_number_of_criteria);
|
||||
// filter
|
||||
self.filter_with_geo_radius |= other.filter_with_geo_radius;
|
||||
self.filter_sum_of_criteria_terms += other.filter_sum_of_criteria_terms;
|
||||
self.filter_total_number_of_criteria += other.filter_total_number_of_criteria;
|
||||
self.filter_sum_of_criteria_terms = self
|
||||
.filter_sum_of_criteria_terms
|
||||
.saturating_add(other.filter_sum_of_criteria_terms);
|
||||
self.filter_total_number_of_criteria = self
|
||||
.filter_total_number_of_criteria
|
||||
.saturating_add(other.filter_total_number_of_criteria);
|
||||
for (key, value) in other.used_syntax.into_iter() {
|
||||
*self.used_syntax.entry(key).or_insert(0) += value;
|
||||
let used_syntax = self.used_syntax.entry(key).or_insert(0);
|
||||
*used_syntax = used_syntax.saturating_add(value);
|
||||
}
|
||||
// q
|
||||
self.sum_of_terms_count += other.sum_of_terms_count;
|
||||
self.total_number_of_q += other.total_number_of_q;
|
||||
self.max_terms_number = self.max_terms_number.max(other.max_terms_number);
|
||||
// pagination
|
||||
self.max_limit = self.max_limit.max(other.max_limit);
|
||||
self.max_offset = self.max_offset.max(other.max_offset);
|
||||
@@ -407,12 +463,12 @@ impl SearchAggregator {
|
||||
// we get all the values in a sorted manner
|
||||
let time_spent = self.time_spent.into_sorted_vec();
|
||||
// We are only intersted by the slowest value of the 99th fastest results
|
||||
let time_spent = time_spent[percentile_99th as usize];
|
||||
let time_spent = time_spent.get(percentile_99th as usize);
|
||||
|
||||
let properties = json!({
|
||||
"user-agent": self.user_agents,
|
||||
"requests": {
|
||||
"99th_response_time": format!("{:.2}", time_spent),
|
||||
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
|
||||
"total_succeeded": self.total_succeeded,
|
||||
"total_failed": self.total_received.saturating_sub(self.total_succeeded), // just to be sure we never panics
|
||||
"total_received": self.total_received,
|
||||
@@ -427,7 +483,7 @@ impl SearchAggregator {
|
||||
"most_used_syntax": self.used_syntax.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
||||
},
|
||||
"q": {
|
||||
"avg_terms_number": format!("{:.2}", self.sum_of_terms_count as f64 / self.total_number_of_q as f64),
|
||||
"max_terms_number": self.max_terms_number,
|
||||
},
|
||||
"pagination": {
|
||||
"max_limit": self.max_limit,
|
||||
@@ -436,6 +492,7 @@ impl SearchAggregator {
|
||||
});
|
||||
|
||||
Some(Track {
|
||||
timestamp: self.timestamp,
|
||||
user: user.clone(),
|
||||
event: event_name.to_string(),
|
||||
properties,
|
||||
@@ -447,6 +504,8 @@ impl SearchAggregator {
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct DocumentsAggregator {
|
||||
timestamp: Option<OffsetDateTime>,
|
||||
|
||||
// set to true when at least one request was received
|
||||
updated: bool,
|
||||
|
||||
@@ -465,6 +524,7 @@ impl DocumentsAggregator {
|
||||
request: &HttpRequest,
|
||||
) -> Self {
|
||||
let mut ret = Self::default();
|
||||
ret.timestamp = Some(OffsetDateTime::now_utc());
|
||||
|
||||
ret.updated = true;
|
||||
ret.user_agents = extract_user_agents(request).into_iter().collect();
|
||||
@@ -485,6 +545,10 @@ impl DocumentsAggregator {
|
||||
|
||||
/// Aggregate one [DocumentsAggregator] into another.
|
||||
pub fn aggregate(&mut self, other: Self) {
|
||||
if self.timestamp.is_none() {
|
||||
self.timestamp = other.timestamp;
|
||||
}
|
||||
|
||||
self.updated |= other.updated;
|
||||
// we can't create a union because there is no `into_union` method
|
||||
for user_agent in other.user_agents.into_iter() {
|
||||
@@ -511,6 +575,7 @@ impl DocumentsAggregator {
|
||||
});
|
||||
|
||||
Some(Track {
|
||||
timestamp: self.timestamp,
|
||||
user: user.clone(),
|
||||
event: event_name.to_string(),
|
||||
properties,
|
||||
|
||||
@@ -1,13 +1,6 @@
|
||||
use std::error::Error;
|
||||
use std::fmt;
|
||||
|
||||
use actix_web as aweb;
|
||||
use actix_web::body::Body;
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::HttpResponseBuilder;
|
||||
use aweb::error::{JsonPayloadError, QueryPayloadError};
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use meilisearch_error::{Code, ErrorCode, ResponseError};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum MeilisearchHttpError {
|
||||
@@ -36,71 +29,18 @@ impl From<MeilisearchHttpError> for aweb::Error {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResponseError {
|
||||
#[serde(skip)]
|
||||
code: StatusCode,
|
||||
message: String,
|
||||
#[serde(rename = "code")]
|
||||
error_code: String,
|
||||
#[serde(rename = "type")]
|
||||
error_type: String,
|
||||
#[serde(rename = "link")]
|
||||
error_link: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for ResponseError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.message.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for ResponseError
|
||||
where
|
||||
T: ErrorCode,
|
||||
{
|
||||
fn from(other: T) -> Self {
|
||||
Self {
|
||||
code: other.http_status(),
|
||||
message: other.to_string(),
|
||||
error_code: other.error_name(),
|
||||
error_type: other.error_type(),
|
||||
error_link: other.error_url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl aweb::error::ResponseError for ResponseError {
|
||||
fn error_response(&self) -> aweb::HttpResponse<Body> {
|
||||
let json = serde_json::to_vec(self).unwrap();
|
||||
HttpResponseBuilder::new(self.status_code())
|
||||
.content_type("application/json")
|
||||
.body(json)
|
||||
}
|
||||
|
||||
fn status_code(&self) -> StatusCode {
|
||||
self.code
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PayloadError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
PayloadError::Json(e) => e.fmt(f),
|
||||
PayloadError::Query(e) => e.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum PayloadError {
|
||||
#[error("{0}")]
|
||||
Json(JsonPayloadError),
|
||||
#[error("{0}")]
|
||||
Query(QueryPayloadError),
|
||||
#[error("The json payload provided is malformed. `{0}`.")]
|
||||
MalformedPayload(serde_json::error::Error),
|
||||
#[error("A json payload is missing.")]
|
||||
MissingPayload,
|
||||
}
|
||||
|
||||
impl Error for PayloadError {}
|
||||
|
||||
impl ErrorCode for PayloadError {
|
||||
fn error_code(&self) -> Code {
|
||||
match self {
|
||||
@@ -110,7 +50,8 @@ impl ErrorCode for PayloadError {
|
||||
JsonPayloadError::Payload(aweb::error::PayloadError::Overflow) => {
|
||||
Code::PayloadTooLarge
|
||||
}
|
||||
JsonPayloadError::Deserialize(_) | JsonPayloadError::Payload(_) => Code::BadRequest,
|
||||
JsonPayloadError::Payload(_) => Code::BadRequest,
|
||||
JsonPayloadError::Deserialize(_) => Code::BadRequest,
|
||||
JsonPayloadError::Serialize(_) => Code::Internal,
|
||||
_ => Code::Internal,
|
||||
},
|
||||
@@ -118,13 +59,29 @@ impl ErrorCode for PayloadError {
|
||||
QueryPayloadError::Deserialize(_) => Code::BadRequest,
|
||||
_ => Code::Internal,
|
||||
},
|
||||
PayloadError::MissingPayload => Code::MissingPayload,
|
||||
PayloadError::MalformedPayload(_) => Code::MalformedPayload,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<JsonPayloadError> for PayloadError {
|
||||
fn from(other: JsonPayloadError) -> Self {
|
||||
Self::Json(other)
|
||||
match other {
|
||||
JsonPayloadError::Deserialize(e)
|
||||
if e.classify() == serde_json::error::Category::Eof
|
||||
&& e.line() == 1
|
||||
&& e.column() == 0 =>
|
||||
{
|
||||
Self::MissingPayload
|
||||
}
|
||||
JsonPayloadError::Deserialize(e)
|
||||
if e.classify() != serde_json::error::Category::Data =>
|
||||
{
|
||||
Self::MalformedPayload(e)
|
||||
}
|
||||
_ => Self::Json(other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,24 +2,21 @@ use meilisearch_error::{Code, ErrorCode};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum AuthenticationError {
|
||||
#[error("The X-MEILI-API-KEY header is missing.")]
|
||||
#[error("The Authorization header is missing. It must use the bearer authorization method.")]
|
||||
MissingAuthorizationHeader,
|
||||
#[error("The provided API key is invalid.")]
|
||||
InvalidToken(String),
|
||||
InvalidToken,
|
||||
// Triggered on configuration error.
|
||||
#[error("An internal error has occurred. `Irretrievable state`.")]
|
||||
IrretrievableState,
|
||||
#[error("An internal error has occurred. `Unknown authentication policy`.")]
|
||||
UnknownPolicy,
|
||||
}
|
||||
|
||||
impl ErrorCode for AuthenticationError {
|
||||
fn error_code(&self) -> Code {
|
||||
match self {
|
||||
AuthenticationError::MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
|
||||
AuthenticationError::InvalidToken(_) => Code::InvalidToken,
|
||||
AuthenticationError::InvalidToken => Code::InvalidToken,
|
||||
AuthenticationError::IrretrievableState => Code::Internal,
|
||||
AuthenticationError::UnknownPolicy => Code::Internal,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,84 +1,84 @@
|
||||
mod error;
|
||||
|
||||
use std::any::{Any, TypeId};
|
||||
use std::collections::HashMap;
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::Deref;
|
||||
use std::pin::Pin;
|
||||
|
||||
use actix_web::FromRequest;
|
||||
use futures::future::err;
|
||||
use futures::future::{ok, Ready};
|
||||
use futures::Future;
|
||||
use meilisearch_error::{Code, ResponseError};
|
||||
|
||||
use crate::error::ResponseError;
|
||||
use error::AuthenticationError;
|
||||
use meilisearch_auth::{AuthController, AuthFilter};
|
||||
|
||||
macro_rules! create_policies {
|
||||
($($name:ident), *) => {
|
||||
pub mod policies {
|
||||
use std::collections::HashSet;
|
||||
use crate::extractors::authentication::Policy;
|
||||
|
||||
$(
|
||||
#[derive(Debug, Default)]
|
||||
pub struct $name {
|
||||
inner: HashSet<Vec<u8>>
|
||||
}
|
||||
|
||||
impl $name {
|
||||
pub fn new() -> Self {
|
||||
Self { inner: HashSet::new() }
|
||||
}
|
||||
|
||||
pub fn add(&mut self, token: Vec<u8>) {
|
||||
self.inner.insert(token);
|
||||
}
|
||||
}
|
||||
|
||||
impl Policy for $name {
|
||||
fn authenticate(&self, token: &[u8]) -> bool {
|
||||
self.inner.contains(token)
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
create_policies!(Public, Private, Admin);
|
||||
|
||||
/// Instanciate a `Policies`, filled with the given policies.
|
||||
macro_rules! init_policies {
|
||||
($($name:ident), *) => {
|
||||
{
|
||||
let mut policies = crate::extractors::authentication::Policies::new();
|
||||
$(
|
||||
let policy = $name::new();
|
||||
policies.insert(policy);
|
||||
)*
|
||||
policies
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Adds user to all specified policies.
|
||||
macro_rules! create_users {
|
||||
($policies:ident, $($user:expr => { $($policy:ty), * }), *) => {
|
||||
{
|
||||
$(
|
||||
$(
|
||||
$policies.get_mut::<$policy>().map(|p| p.add($user.to_owned()));
|
||||
)*
|
||||
)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub struct GuardedData<T, D> {
|
||||
pub struct GuardedData<P, D> {
|
||||
data: D,
|
||||
_marker: PhantomData<T>,
|
||||
filters: AuthFilter,
|
||||
_marker: PhantomData<P>,
|
||||
}
|
||||
|
||||
impl<T, D> Deref for GuardedData<T, D> {
|
||||
impl<P, D> GuardedData<P, D> {
|
||||
pub fn filters(&self) -> &AuthFilter {
|
||||
&self.filters
|
||||
}
|
||||
|
||||
async fn auth_bearer(
|
||||
auth: AuthController,
|
||||
token: String,
|
||||
index: Option<String>,
|
||||
data: Option<D>,
|
||||
) -> Result<Self, ResponseError>
|
||||
where
|
||||
P: Policy + 'static,
|
||||
{
|
||||
match Self::authenticate(auth, token, index).await? {
|
||||
Some(filters) => match data {
|
||||
Some(data) => Ok(Self {
|
||||
data,
|
||||
filters,
|
||||
_marker: PhantomData,
|
||||
}),
|
||||
None => Err(AuthenticationError::IrretrievableState.into()),
|
||||
},
|
||||
None => Err(AuthenticationError::InvalidToken.into()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn auth_token(auth: AuthController, data: Option<D>) -> Result<Self, ResponseError>
|
||||
where
|
||||
P: Policy + 'static,
|
||||
{
|
||||
match Self::authenticate(auth, String::new(), None).await? {
|
||||
Some(filters) => match data {
|
||||
Some(data) => Ok(Self {
|
||||
data,
|
||||
filters,
|
||||
_marker: PhantomData,
|
||||
}),
|
||||
None => Err(AuthenticationError::IrretrievableState.into()),
|
||||
},
|
||||
None => Err(AuthenticationError::MissingAuthorizationHeader.into()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn authenticate(
|
||||
auth: AuthController,
|
||||
token: String,
|
||||
index: Option<String>,
|
||||
) -> Result<Option<AuthFilter>, ResponseError>
|
||||
where
|
||||
P: Policy + 'static,
|
||||
{
|
||||
Ok(tokio::task::spawn_blocking(move || {
|
||||
P::authenticate(auth, token.as_ref(), index.as_deref())
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))?)
|
||||
}
|
||||
}
|
||||
|
||||
impl<P, D> Deref for GuardedData<P, D> {
|
||||
type Target = D;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
@@ -86,98 +86,173 @@ impl<T, D> Deref for GuardedData<T, D> {
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Policy {
|
||||
fn authenticate(&self, token: &[u8]) -> bool;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Policies {
|
||||
inner: HashMap<TypeId, Box<dyn Any>>,
|
||||
}
|
||||
|
||||
impl Policies {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
inner: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert<S: Policy + 'static>(&mut self, policy: S) {
|
||||
self.inner.insert(TypeId::of::<S>(), Box::new(policy));
|
||||
}
|
||||
|
||||
pub fn get<S: Policy + 'static>(&self) -> Option<&S> {
|
||||
self.inner
|
||||
.get(&TypeId::of::<S>())
|
||||
.and_then(|p| p.downcast_ref::<S>())
|
||||
}
|
||||
|
||||
pub fn get_mut<S: Policy + 'static>(&mut self) -> Option<&mut S> {
|
||||
self.inner
|
||||
.get_mut(&TypeId::of::<S>())
|
||||
.and_then(|p| p.downcast_mut::<S>())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Policies {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub enum AuthConfig {
|
||||
NoAuth,
|
||||
Auth(Policies),
|
||||
}
|
||||
|
||||
impl Default for AuthConfig {
|
||||
fn default() -> Self {
|
||||
Self::NoAuth
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D> {
|
||||
type Config = AuthConfig;
|
||||
|
||||
type Error = ResponseError;
|
||||
|
||||
type Future = Ready<Result<Self, Self::Error>>;
|
||||
type Future = Pin<Box<dyn Future<Output = Result<Self, Self::Error>>>>;
|
||||
|
||||
fn from_request(
|
||||
req: &actix_web::HttpRequest,
|
||||
_payload: &mut actix_web::dev::Payload,
|
||||
) -> Self::Future {
|
||||
match req.app_data::<Self::Config>() {
|
||||
Some(config) => match config {
|
||||
AuthConfig::NoAuth => match req.app_data::<D>().cloned() {
|
||||
Some(data) => ok(Self {
|
||||
data,
|
||||
_marker: PhantomData,
|
||||
}),
|
||||
None => err(AuthenticationError::IrretrievableState.into()),
|
||||
},
|
||||
AuthConfig::Auth(policies) => match policies.get::<P>() {
|
||||
Some(policy) => match req.headers().get("x-meili-api-key") {
|
||||
Some(token) => {
|
||||
if policy.authenticate(token.as_bytes()) {
|
||||
match req.app_data::<D>().cloned() {
|
||||
Some(data) => ok(Self {
|
||||
data,
|
||||
_marker: PhantomData,
|
||||
}),
|
||||
None => err(AuthenticationError::IrretrievableState.into()),
|
||||
}
|
||||
} else {
|
||||
let token = token.to_str().unwrap_or("unknown").to_string();
|
||||
err(AuthenticationError::InvalidToken(token).into())
|
||||
}
|
||||
match req.app_data::<AuthController>().cloned() {
|
||||
Some(auth) => match req
|
||||
.headers()
|
||||
.get("Authorization")
|
||||
.map(|type_token| type_token.to_str().unwrap_or_default().splitn(2, ' '))
|
||||
{
|
||||
Some(mut type_token) => match type_token.next() {
|
||||
Some("Bearer") => {
|
||||
// TODO: find a less hardcoded way?
|
||||
let index = req.match_info().get("index_uid");
|
||||
match type_token.next() {
|
||||
Some(token) => Box::pin(Self::auth_bearer(
|
||||
auth,
|
||||
token.to_string(),
|
||||
index.map(String::from),
|
||||
req.app_data::<D>().cloned(),
|
||||
)),
|
||||
None => Box::pin(err(AuthenticationError::InvalidToken.into())),
|
||||
}
|
||||
None => err(AuthenticationError::MissingAuthorizationHeader.into()),
|
||||
},
|
||||
None => err(AuthenticationError::UnknownPolicy.into()),
|
||||
}
|
||||
_otherwise => {
|
||||
Box::pin(err(AuthenticationError::MissingAuthorizationHeader.into()))
|
||||
}
|
||||
},
|
||||
None => Box::pin(Self::auth_token(auth, req.app_data::<D>().cloned())),
|
||||
},
|
||||
None => err(AuthenticationError::IrretrievableState.into()),
|
||||
None => Box::pin(err(AuthenticationError::IrretrievableState.into())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Policy {
|
||||
fn authenticate(auth: AuthController, token: &str, index: Option<&str>) -> Option<AuthFilter>;
|
||||
}
|
||||
|
||||
pub mod policies {
|
||||
use jsonwebtoken::{dangerous_insecure_decode, decode, Algorithm, DecodingKey, Validation};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::extractors::authentication::Policy;
|
||||
use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules};
|
||||
// reexport actions in policies in order to be used in routes configuration.
|
||||
pub use meilisearch_auth::actions;
|
||||
|
||||
pub static TENANT_TOKEN_VALIDATION: Lazy<Validation> = Lazy::new(|| Validation {
|
||||
validate_exp: false,
|
||||
algorithms: vec![Algorithm::HS256, Algorithm::HS384, Algorithm::HS512],
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
pub struct MasterPolicy;
|
||||
|
||||
impl Policy for MasterPolicy {
|
||||
fn authenticate(
|
||||
auth: AuthController,
|
||||
token: &str,
|
||||
_index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
if let Some(master_key) = auth.get_master_key() {
|
||||
if master_key == token {
|
||||
return Some(AuthFilter::default());
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ActionPolicy<const A: u8>;
|
||||
|
||||
impl<const A: u8> Policy for ActionPolicy<A> {
|
||||
fn authenticate(
|
||||
auth: AuthController,
|
||||
token: &str,
|
||||
index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
// authenticate if token is the master key.
|
||||
if auth.get_master_key().map_or(true, |mk| mk == token) {
|
||||
return Some(AuthFilter::default());
|
||||
}
|
||||
|
||||
// Tenant token
|
||||
if let Some(filters) = ActionPolicy::<A>::authenticate_tenant_token(&auth, token, index)
|
||||
{
|
||||
return Some(filters);
|
||||
} else if let Some(action) = Action::from_repr(A) {
|
||||
// API key
|
||||
if let Ok(true) = auth.authenticate(token.as_bytes(), action, index) {
|
||||
return auth.get_key_filters(token, None).ok();
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<const A: u8> ActionPolicy<A> {
|
||||
fn authenticate_tenant_token(
|
||||
auth: &AuthController,
|
||||
token: &str,
|
||||
index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
// Only search action can be accessed by a tenant token.
|
||||
if A != actions::SEARCH {
|
||||
return None;
|
||||
}
|
||||
|
||||
// get token fields without validating it.
|
||||
let Claims {
|
||||
search_rules,
|
||||
exp,
|
||||
api_key_prefix,
|
||||
} = dangerous_insecure_decode::<Claims>(token).ok()?.claims;
|
||||
|
||||
// Check index access if an index restriction is provided.
|
||||
if let Some(index) = index {
|
||||
if !search_rules.is_index_authorized(index) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if token is expired.
|
||||
if let Some(exp) = exp {
|
||||
if OffsetDateTime::now_utc().unix_timestamp() > exp {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// check if parent key is authorized to do the action.
|
||||
if auth
|
||||
.is_key_authorized(api_key_prefix.as_bytes(), Action::Search, index)
|
||||
.ok()?
|
||||
{
|
||||
// Check if tenant token is valid.
|
||||
let key = auth.generate_key(&api_key_prefix)?;
|
||||
decode::<Claims>(
|
||||
token,
|
||||
&DecodingKey::from_secret(key.as_bytes()),
|
||||
&TENANT_TOKEN_VALIDATION,
|
||||
)
|
||||
.ok()?;
|
||||
|
||||
return auth
|
||||
.get_key_filters(api_key_prefix, Some(search_rules))
|
||||
.ok();
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct Claims {
|
||||
search_rules: SearchRules,
|
||||
exp: Option<i64>,
|
||||
api_key_prefix: String,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod payload;
|
||||
#[macro_use]
|
||||
pub mod authentication;
|
||||
pub mod sequential_extractor;
|
||||
|
||||
@@ -28,8 +28,6 @@ impl Default for PayloadConfig {
|
||||
}
|
||||
|
||||
impl FromRequest for Payload {
|
||||
type Config = PayloadConfig;
|
||||
|
||||
type Error = PayloadError;
|
||||
|
||||
type Future = Ready<Result<Payload, Self::Error>>;
|
||||
@@ -39,7 +37,7 @@ impl FromRequest for Payload {
|
||||
let limit = req
|
||||
.app_data::<PayloadConfig>()
|
||||
.map(|c| c.limit)
|
||||
.unwrap_or(Self::Config::default().limit);
|
||||
.unwrap_or(PayloadConfig::default().limit);
|
||||
ready(Ok(Payload {
|
||||
payload: payload.take(),
|
||||
limit,
|
||||
|
||||
148
meilisearch-http/src/extractors/sequential_extractor.rs
Normal file
148
meilisearch-http/src/extractors/sequential_extractor.rs
Normal file
@@ -0,0 +1,148 @@
|
||||
#![allow(non_snake_case)]
|
||||
use std::{future::Future, pin::Pin, task::Poll};
|
||||
|
||||
use actix_web::{dev::Payload, FromRequest, Handler, HttpRequest};
|
||||
use pin_project_lite::pin_project;
|
||||
|
||||
/// `SeqHandler` is an actix `Handler` that enforces that extractors errors are returned in the
|
||||
/// same order as they are defined in the wrapped handler. This is needed because, by default, actix
|
||||
/// resolves the extractors concurrently, whereas we always need the authentication extractor to
|
||||
/// throw first.
|
||||
#[derive(Clone)]
|
||||
pub struct SeqHandler<H>(pub H);
|
||||
|
||||
pub struct SeqFromRequest<T>(T);
|
||||
|
||||
/// This macro implements `FromRequest` for arbitrary arity handler, except for one, which is
|
||||
/// useless anyway.
|
||||
macro_rules! gen_seq {
|
||||
($ty:ident; $($T:ident)+) => {
|
||||
pin_project! {
|
||||
pub struct $ty<$($T: FromRequest), +> {
|
||||
$(
|
||||
#[pin]
|
||||
$T: ExtractFuture<$T::Future, $T, $T::Error>,
|
||||
)+
|
||||
}
|
||||
}
|
||||
|
||||
impl<$($T: FromRequest), +> Future for $ty<$($T),+> {
|
||||
type Output = Result<SeqFromRequest<($($T),+)>, actix_web::Error>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {
|
||||
let mut this = self.project();
|
||||
|
||||
let mut count_fut = 0;
|
||||
let mut count_finished = 0;
|
||||
|
||||
$(
|
||||
count_fut += 1;
|
||||
match this.$T.as_mut().project() {
|
||||
ExtractProj::Future { fut } => match fut.poll(cx) {
|
||||
Poll::Ready(Ok(output)) => {
|
||||
count_finished += 1;
|
||||
let _ = this
|
||||
.$T
|
||||
.as_mut()
|
||||
.project_replace(ExtractFuture::Done { output });
|
||||
}
|
||||
Poll::Ready(Err(error)) => {
|
||||
count_finished += 1;
|
||||
let _ = this
|
||||
.$T
|
||||
.as_mut()
|
||||
.project_replace(ExtractFuture::Error { error });
|
||||
}
|
||||
Poll::Pending => (),
|
||||
},
|
||||
ExtractProj::Done { .. } => count_finished += 1,
|
||||
ExtractProj::Error { .. } => {
|
||||
// short circuit if all previous are finished and we had an error.
|
||||
if count_finished == count_fut {
|
||||
match this.$T.project_replace(ExtractFuture::Empty) {
|
||||
ExtractReplaceProj::Error { error } => {
|
||||
return Poll::Ready(Err(error.into()))
|
||||
}
|
||||
_ => unreachable!("Invalid future state"),
|
||||
}
|
||||
} else {
|
||||
count_finished += 1;
|
||||
}
|
||||
}
|
||||
ExtractProj::Empty => unreachable!("From request polled after being finished. {}", stringify!($T)),
|
||||
}
|
||||
)+
|
||||
|
||||
if count_fut == count_finished {
|
||||
let result = (
|
||||
$(
|
||||
match this.$T.project_replace(ExtractFuture::Empty) {
|
||||
ExtractReplaceProj::Done { output } => output,
|
||||
ExtractReplaceProj::Error { error } => return Poll::Ready(Err(error.into())),
|
||||
_ => unreachable!("Invalid future state"),
|
||||
},
|
||||
)+
|
||||
);
|
||||
|
||||
Poll::Ready(Ok(SeqFromRequest(result)))
|
||||
} else {
|
||||
Poll::Pending
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<$($T: FromRequest,)+> FromRequest for SeqFromRequest<($($T,)+)> {
|
||||
type Error = actix_web::Error;
|
||||
|
||||
type Future = $ty<$($T),+>;
|
||||
|
||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
||||
$ty {
|
||||
$(
|
||||
$T: ExtractFuture::Future {
|
||||
fut: $T::from_request(req, payload),
|
||||
},
|
||||
)+
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Han, $($T: FromRequest),+> Handler<SeqFromRequest<($($T),+)>> for SeqHandler<Han>
|
||||
where
|
||||
Han: Handler<($($T),+)>,
|
||||
{
|
||||
type Output = Han::Output;
|
||||
type Future = Han::Future;
|
||||
|
||||
fn call(&self, args: SeqFromRequest<($($T),+)>) -> Self::Future {
|
||||
self.0.call(args.0)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not working for a single argument, but then, it is not really necessary.
|
||||
// gen_seq! { SeqFromRequestFut1; A }
|
||||
gen_seq! { SeqFromRequestFut2; A B }
|
||||
gen_seq! { SeqFromRequestFut3; A B C }
|
||||
gen_seq! { SeqFromRequestFut4; A B C D }
|
||||
gen_seq! { SeqFromRequestFut5; A B C D E }
|
||||
gen_seq! { SeqFromRequestFut6; A B C D E F }
|
||||
|
||||
pin_project! {
|
||||
#[project = ExtractProj]
|
||||
#[project_replace = ExtractReplaceProj]
|
||||
enum ExtractFuture<Fut, Res, Err> {
|
||||
Future {
|
||||
#[pin]
|
||||
fut: Fut,
|
||||
},
|
||||
Done {
|
||||
output: Res,
|
||||
},
|
||||
Error {
|
||||
error: Err,
|
||||
},
|
||||
Empty,
|
||||
}
|
||||
}
|
||||
@@ -1,17 +1,18 @@
|
||||
#![allow(rustdoc::private_intra_doc_links)]
|
||||
#[macro_use]
|
||||
pub mod error;
|
||||
pub mod analytics;
|
||||
mod task;
|
||||
#[macro_use]
|
||||
pub mod extractors;
|
||||
pub mod analytics;
|
||||
pub mod helpers;
|
||||
pub mod option;
|
||||
pub mod routes;
|
||||
use std::sync::Arc;
|
||||
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::AuthConfig;
|
||||
use actix_web::error::JsonPayloadError;
|
||||
use analytics::Analytics;
|
||||
use error::PayloadError;
|
||||
@@ -20,45 +21,33 @@ pub use option::Opt;
|
||||
|
||||
use actix_web::{web, HttpRequest};
|
||||
|
||||
use extractors::authentication::policies::*;
|
||||
use extractors::payload::PayloadConfig;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use sha2::Digest;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ApiKeys {
|
||||
pub public: Option<String>,
|
||||
pub private: Option<String>,
|
||||
pub master: Option<String>,
|
||||
}
|
||||
|
||||
impl ApiKeys {
|
||||
pub fn generate_missing_api_keys(&mut self) {
|
||||
if let Some(master_key) = &self.master {
|
||||
if self.private.is_none() {
|
||||
let key = format!("{}-private", master_key);
|
||||
let sha = sha2::Sha256::digest(key.as_bytes());
|
||||
self.private = Some(format!("{:x}", sha));
|
||||
}
|
||||
if self.public.is_none() {
|
||||
let key = format!("{}-public", master_key);
|
||||
let sha = sha2::Sha256::digest(key.as_bytes());
|
||||
self.public = Some(format!("{:x}", sha));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
pub static AUTOBATCHING_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<MeiliSearch> {
|
||||
let mut meilisearch = MeiliSearch::builder();
|
||||
|
||||
// enable autobatching?
|
||||
let _ = AUTOBATCHING_ENABLED.store(
|
||||
opt.scheduler_options.enable_auto_batching,
|
||||
std::sync::atomic::Ordering::Relaxed,
|
||||
);
|
||||
|
||||
meilisearch
|
||||
.set_max_index_size(opt.max_index_size.get_bytes() as usize)
|
||||
.set_max_update_store_size(opt.max_udb_size.get_bytes() as usize)
|
||||
.set_max_task_store_size(opt.max_task_db_size.get_bytes() as usize)
|
||||
// snapshot
|
||||
.set_ignore_missing_snapshot(opt.ignore_missing_snapshot)
|
||||
.set_ignore_snapshot_if_db_exists(opt.ignore_snapshot_if_db_exists)
|
||||
.set_dump_dst(opt.dumps_dir.clone())
|
||||
.set_snapshot_interval(Duration::from_secs(opt.snapshot_interval_sec))
|
||||
.set_snapshot_dir(opt.snapshot_dir.clone());
|
||||
.set_snapshot_dir(opt.snapshot_dir.clone())
|
||||
// dump
|
||||
.set_ignore_missing_dump(opt.ignore_missing_dump)
|
||||
.set_ignore_dump_if_db_exists(opt.ignore_dump_if_db_exists)
|
||||
.set_dump_dst(opt.dumps_dir.clone());
|
||||
|
||||
if let Some(ref path) = opt.import_snapshot {
|
||||
meilisearch.set_import_snapshot(path.clone());
|
||||
@@ -72,18 +61,24 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<MeiliSearch> {
|
||||
meilisearch.set_schedule_snapshot();
|
||||
}
|
||||
|
||||
meilisearch.build(opt.db_path.clone(), opt.indexer_options.clone())
|
||||
meilisearch.build(
|
||||
opt.db_path.clone(),
|
||||
opt.indexer_options.clone(),
|
||||
opt.scheduler_options.clone(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn configure_data(
|
||||
config: &mut web::ServiceConfig,
|
||||
data: MeiliSearch,
|
||||
auth: AuthController,
|
||||
opt: &Opt,
|
||||
analytics: Arc<dyn Analytics>,
|
||||
) {
|
||||
let http_payload_size_limit = opt.http_payload_size_limit.get_bytes() as usize;
|
||||
config
|
||||
.app_data(data)
|
||||
.app_data(auth)
|
||||
.app_data(web::Data::from(analytics))
|
||||
.app_data(
|
||||
web::JsonConfig::default()
|
||||
@@ -109,37 +104,10 @@ pub fn configure_data(
|
||||
);
|
||||
}
|
||||
|
||||
pub fn configure_auth(config: &mut web::ServiceConfig, opts: &Opt) {
|
||||
let mut keys = ApiKeys {
|
||||
master: opts.master_key.clone(),
|
||||
private: None,
|
||||
public: None,
|
||||
};
|
||||
|
||||
keys.generate_missing_api_keys();
|
||||
|
||||
let auth_config = if let Some(ref master_key) = keys.master {
|
||||
let private_key = keys.private.as_ref().unwrap();
|
||||
let public_key = keys.public.as_ref().unwrap();
|
||||
let mut policies = init_policies!(Public, Private, Admin);
|
||||
create_users!(
|
||||
policies,
|
||||
master_key.as_bytes() => { Admin, Private, Public },
|
||||
private_key.as_bytes() => { Private, Public },
|
||||
public_key.as_bytes() => { Public }
|
||||
);
|
||||
AuthConfig::Auth(policies)
|
||||
} else {
|
||||
AuthConfig::NoAuth
|
||||
};
|
||||
|
||||
config.app_data(auth_config).app_data(keys);
|
||||
}
|
||||
|
||||
#[cfg(feature = "mini-dashboard")]
|
||||
pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
|
||||
use actix_web::HttpResponse;
|
||||
use actix_web_static_files::Resource;
|
||||
use static_files::Resource;
|
||||
|
||||
mod generated {
|
||||
include!(concat!(env!("OUT_DIR"), "/generated.rs"));
|
||||
@@ -154,13 +122,13 @@ pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
|
||||
} = resource;
|
||||
// Redirect index.html to /
|
||||
if path == "index.html" {
|
||||
config.service(web::resource("/").route(
|
||||
web::get().to(move || HttpResponse::Ok().content_type(mime_type).body(data)),
|
||||
));
|
||||
config.service(web::resource("/").route(web::get().to(move || async move {
|
||||
HttpResponse::Ok().content_type(mime_type).body(data)
|
||||
})));
|
||||
} else {
|
||||
config.service(web::resource(path).route(
|
||||
web::get().to(move || HttpResponse::Ok().content_type(mime_type).body(data)),
|
||||
));
|
||||
config.service(web::resource(path).route(web::get().to(move || async move {
|
||||
HttpResponse::Ok().content_type(mime_type).body(data)
|
||||
})));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -175,24 +143,24 @@ pub fn dashboard(config: &mut web::ServiceConfig, _enable_frontend: bool) {
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! create_app {
|
||||
($data:expr, $enable_frontend:expr, $opt:expr, $analytics:expr) => {{
|
||||
($data:expr, $auth:expr, $enable_frontend:expr, $opt:expr, $analytics:expr) => {{
|
||||
use actix_cors::Cors;
|
||||
use actix_web::middleware::TrailingSlash;
|
||||
use actix_web::App;
|
||||
use actix_web::{middleware, web};
|
||||
use meilisearch_http::error::{MeilisearchHttpError, ResponseError};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_http::error::MeilisearchHttpError;
|
||||
use meilisearch_http::routes;
|
||||
use meilisearch_http::{configure_auth, configure_data, dashboard};
|
||||
use meilisearch_http::{configure_data, dashboard};
|
||||
|
||||
App::new()
|
||||
.configure(|s| configure_data(s, $data.clone(), &$opt, $analytics))
|
||||
.configure(|s| configure_auth(s, &$opt))
|
||||
.configure(|s| configure_data(s, $data.clone(), $auth.clone(), &$opt, $analytics))
|
||||
.configure(routes::configure)
|
||||
.configure(|s| dashboard(s, $enable_frontend))
|
||||
.wrap(
|
||||
Cors::default()
|
||||
.send_wildcard()
|
||||
.allowed_headers(vec!["content-type", "x-meili-api-key"])
|
||||
.allow_any_header()
|
||||
.allow_any_origin()
|
||||
.allow_any_method()
|
||||
.max_age(86_400), // 24h
|
||||
|
||||
@@ -2,11 +2,12 @@ use std::env;
|
||||
use std::sync::Arc;
|
||||
|
||||
use actix_web::HttpServer;
|
||||
use clap::Parser;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_http::analytics;
|
||||
use meilisearch_http::analytics::Analytics;
|
||||
use meilisearch_http::{create_app, setup_meilisearch, Opt};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use structopt::StructOpt;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
#[global_allocator]
|
||||
@@ -28,7 +29,7 @@ fn setup(opt: &Opt) -> anyhow::Result<()> {
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let opt = Opt::from_args();
|
||||
let opt = Opt::parse();
|
||||
|
||||
setup(&opt)?;
|
||||
|
||||
@@ -46,6 +47,8 @@ async fn main() -> anyhow::Result<()> {
|
||||
|
||||
let meilisearch = setup_meilisearch(&opt)?;
|
||||
|
||||
let auth_controller = AuthController::new(&opt.db_path, &opt.master_key)?;
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
let (analytics, user) = if !opt.no_analytics {
|
||||
analytics::SegmentAnalytics::new(&opt, &meilisearch).await
|
||||
@@ -57,22 +60,30 @@ async fn main() -> anyhow::Result<()> {
|
||||
|
||||
print_launch_resume(&opt, &user);
|
||||
|
||||
run_http(meilisearch, opt, analytics).await?;
|
||||
run_http(meilisearch, auth_controller, opt, analytics).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_http(
|
||||
data: MeiliSearch,
|
||||
auth_controller: AuthController,
|
||||
opt: Opt,
|
||||
analytics: Arc<dyn Analytics>,
|
||||
) -> anyhow::Result<()> {
|
||||
let _enable_dashboard = &opt.env == "development";
|
||||
let opt_clone = opt.clone();
|
||||
let http_server =
|
||||
HttpServer::new(move || create_app!(data, _enable_dashboard, opt_clone, analytics.clone()))
|
||||
// Disable signals allows the server to terminate immediately when a user enter CTRL-C
|
||||
.disable_signals();
|
||||
let http_server = HttpServer::new(move || {
|
||||
create_app!(
|
||||
data,
|
||||
auth_controller,
|
||||
_enable_dashboard,
|
||||
opt_clone,
|
||||
analytics.clone()
|
||||
)
|
||||
})
|
||||
// Disable signals allows the server to terminate immediately when a user enter CTRL-C
|
||||
.disable_signals();
|
||||
|
||||
if let Some(config) = opt.get_ssl_config()? {
|
||||
http_server
|
||||
@@ -90,14 +101,14 @@ pub fn print_launch_resume(opt: &Opt, user: &str) {
|
||||
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
||||
|
||||
let ascii_name = r#"
|
||||
888b d888 d8b 888 d8b .d8888b. 888
|
||||
8888b d8888 Y8P 888 Y8P d88P Y88b 888
|
||||
88888b.d88888 888 Y88b. 888
|
||||
888Y88888P888 .d88b. 888 888 888 "Y888b. .d88b. 8888b. 888d888 .d8888b 88888b.
|
||||
888 Y888P 888 d8P Y8b 888 888 888 "Y88b. d8P Y8b "88b 888P" d88P" 888 "88b
|
||||
888 Y8P 888 88888888 888 888 888 "888 88888888 .d888888 888 888 888 888
|
||||
888 " 888 Y8b. 888 888 888 Y88b d88P Y8b. 888 888 888 Y88b. 888 888
|
||||
888 888 "Y8888 888 888 888 "Y8888P" "Y8888 "Y888888 888 "Y8888P 888 888
|
||||
888b d888 d8b 888 d8b 888
|
||||
8888b d8888 Y8P 888 Y8P 888
|
||||
88888b.d88888 888 888
|
||||
888Y88888P888 .d88b. 888 888 888 .d8888b .d88b. 8888b. 888d888 .d8888b 88888b.
|
||||
888 Y888P 888 d8P Y8b 888 888 888 88K d8P Y8b "88b 888P" d88P" 888 "88b
|
||||
888 Y8P 888 88888888 888 888 888 "Y8888b. 88888888 .d888888 888 888 888 888
|
||||
888 " 888 Y8b. 888 888 888 X88 Y8b. 888 888 888 Y88b. 888 888
|
||||
888 888 "Y8888 888 888 888 88888P' "Y8888 "Y888888 888 "Y8888P 888 888
|
||||
"#;
|
||||
|
||||
eprintln!("{}", ascii_name);
|
||||
@@ -114,17 +125,17 @@ pub fn print_launch_resume(opt: &Opt, user: &str) {
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
{
|
||||
if opt.no_analytics {
|
||||
eprintln!("Anonymous telemetry:\t\"Disabled\"");
|
||||
} else {
|
||||
if !opt.no_analytics {
|
||||
eprintln!(
|
||||
"
|
||||
Thank you for using MeiliSearch!
|
||||
Thank you for using Meilisearch!
|
||||
|
||||
We collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html
|
||||
|
||||
Anonymous telemetry:\t\"Enabled\""
|
||||
);
|
||||
} else {
|
||||
eprintln!("Anonymous telemetry:\t\"Disabled\"");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -135,7 +146,7 @@ Anonymous telemetry:\t\"Enabled\""
|
||||
eprintln!();
|
||||
|
||||
if opt.master_key.is_some() {
|
||||
eprintln!("A Master Key has been set. Requests to MeiliSearch won't be authorized unless you provide an authentication key.");
|
||||
eprintln!("A Master Key has been set. Requests to Meilisearch won't be authorized unless you provide an authentication key.");
|
||||
} else {
|
||||
eprintln!("No master key found; The server will accept unidentified requests. \
|
||||
If you need some protection in development mode, please export a key: export MEILI_MASTER_KEY=xxx");
|
||||
@@ -144,6 +155,6 @@ Anonymous telemetry:\t\"Enabled\""
|
||||
eprintln!();
|
||||
eprintln!("Documentation:\t\thttps://docs.meilisearch.com");
|
||||
eprintln!("Source code:\t\thttps://github.com/meilisearch/meilisearch");
|
||||
eprintln!("Contact:\t\thttps://docs.meilisearch.com/resources/contact.html or bonjour@meilisearch.com");
|
||||
eprintln!("Contact:\t\thttps://docs.meilisearch.com/resources/contact.html");
|
||||
eprintln!();
|
||||
}
|
||||
|
||||
@@ -4,134 +4,168 @@ use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use byte_unit::Byte;
|
||||
use meilisearch_lib::options::IndexerOpts;
|
||||
use rustls::internal::pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
||||
use clap::Parser;
|
||||
use meilisearch_lib::options::{IndexerOpts, SchedulerConfig};
|
||||
use rustls::{
|
||||
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient, NoClientAuth,
|
||||
server::{
|
||||
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient,
|
||||
ServerSessionMemoryCache,
|
||||
},
|
||||
RootCertStore,
|
||||
};
|
||||
use structopt::StructOpt;
|
||||
use rustls_pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
||||
use serde::Serialize;
|
||||
|
||||
const POSSIBLE_ENV: [&str; 2] = ["development", "production"];
|
||||
|
||||
#[derive(Debug, Clone, StructOpt)]
|
||||
#[derive(Debug, Clone, Parser, Serialize)]
|
||||
pub struct Opt {
|
||||
/// The destination where the database must be created.
|
||||
#[structopt(long, env = "MEILI_DB_PATH", default_value = "./data.ms")]
|
||||
#[clap(long, env = "MEILI_DB_PATH", default_value = "./data.ms")]
|
||||
pub db_path: PathBuf,
|
||||
|
||||
/// The address on which the http server will listen.
|
||||
#[structopt(long, env = "MEILI_HTTP_ADDR", default_value = "127.0.0.1:7700")]
|
||||
#[clap(long, env = "MEILI_HTTP_ADDR", default_value = "127.0.0.1:7700")]
|
||||
pub http_addr: String,
|
||||
|
||||
/// The master key allowing you to do everything on the server.
|
||||
#[structopt(long, env = "MEILI_MASTER_KEY")]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_MASTER_KEY")]
|
||||
pub master_key: Option<String>,
|
||||
|
||||
/// This environment variable must be set to `production` if you are running in production.
|
||||
/// If the server is running in development mode more logs will be displayed,
|
||||
/// and the master key can be avoided which implies that there is no security on the updates routes.
|
||||
/// This is useful to debug when integrating the engine with another service.
|
||||
#[structopt(long, env = "MEILI_ENV", default_value = "development", possible_values = &POSSIBLE_ENV)]
|
||||
#[clap(long, env = "MEILI_ENV", default_value = "development", possible_values = &POSSIBLE_ENV)]
|
||||
pub env: String,
|
||||
|
||||
/// Do not send analytics to Meili.
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
#[structopt(long, env = "MEILI_NO_ANALYTICS")]
|
||||
#[serde(skip)] // we can't send true
|
||||
#[clap(long, env = "MEILI_NO_ANALYTICS")]
|
||||
pub no_analytics: bool,
|
||||
|
||||
/// The maximum size, in bytes, of the main lmdb database directory
|
||||
#[structopt(long, env = "MEILI_MAX_INDEX_SIZE", default_value = "100 GiB")]
|
||||
#[clap(long, env = "MEILI_MAX_INDEX_SIZE", default_value = "100 GiB")]
|
||||
pub max_index_size: Byte,
|
||||
|
||||
/// The maximum size, in bytes, of the update lmdb database directory
|
||||
#[structopt(long, env = "MEILI_MAX_UDB_SIZE", default_value = "100 GiB")]
|
||||
pub max_udb_size: Byte,
|
||||
#[clap(long, env = "MEILI_MAX_TASK_DB_SIZE", default_value = "100 GiB")]
|
||||
pub max_task_db_size: Byte,
|
||||
|
||||
/// The maximum size, in bytes, of accepted JSON payloads
|
||||
#[structopt(long, env = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT", default_value = "100 MB")]
|
||||
#[clap(long, env = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT", default_value = "100 MB")]
|
||||
pub http_payload_size_limit: Byte,
|
||||
|
||||
/// Read server certificates from CERTFILE.
|
||||
/// This should contain PEM-format certificates
|
||||
/// in the right order (the first certificate should
|
||||
/// certify KEYFILE, the last should be a root CA).
|
||||
#[structopt(long, env = "MEILI_SSL_CERT_PATH", parse(from_os_str))]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_SSL_CERT_PATH", parse(from_os_str))]
|
||||
pub ssl_cert_path: Option<PathBuf>,
|
||||
|
||||
/// Read private key from KEYFILE. This should be a RSA
|
||||
/// private key or PKCS8-encoded private key, in PEM format.
|
||||
#[structopt(long, env = "MEILI_SSL_KEY_PATH", parse(from_os_str))]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_SSL_KEY_PATH", parse(from_os_str))]
|
||||
pub ssl_key_path: Option<PathBuf>,
|
||||
|
||||
/// Enable client authentication, and accept certificates
|
||||
/// signed by those roots provided in CERTFILE.
|
||||
#[structopt(long, env = "MEILI_SSL_AUTH_PATH", parse(from_os_str))]
|
||||
#[clap(long, env = "MEILI_SSL_AUTH_PATH", parse(from_os_str))]
|
||||
#[serde(skip)]
|
||||
pub ssl_auth_path: Option<PathBuf>,
|
||||
|
||||
/// Read DER-encoded OCSP response from OCSPFILE and staple to certificate.
|
||||
/// Optional
|
||||
#[structopt(long, env = "MEILI_SSL_OCSP_PATH", parse(from_os_str))]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_SSL_OCSP_PATH", parse(from_os_str))]
|
||||
pub ssl_ocsp_path: Option<PathBuf>,
|
||||
|
||||
/// Send a fatal alert if the client does not complete client authentication.
|
||||
#[structopt(long, env = "MEILI_SSL_REQUIRE_AUTH")]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_SSL_REQUIRE_AUTH")]
|
||||
pub ssl_require_auth: bool,
|
||||
|
||||
/// SSL support session resumption
|
||||
#[structopt(long, env = "MEILI_SSL_RESUMPTION")]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_SSL_RESUMPTION")]
|
||||
pub ssl_resumption: bool,
|
||||
|
||||
/// SSL support tickets.
|
||||
#[structopt(long, env = "MEILI_SSL_TICKETS")]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_SSL_TICKETS")]
|
||||
pub ssl_tickets: bool,
|
||||
|
||||
/// Defines the path of the snapshot file to import.
|
||||
/// This option will, by default, stop the process if a database already exist or if no snapshot exists at
|
||||
/// the given path. If this option is not specified no snapshot is imported.
|
||||
#[structopt(long)]
|
||||
#[clap(long)]
|
||||
pub import_snapshot: Option<PathBuf>,
|
||||
|
||||
/// The engine will ignore a missing snapshot and not return an error in such case.
|
||||
#[structopt(long, requires = "import-snapshot")]
|
||||
#[clap(long, requires = "import-snapshot")]
|
||||
pub ignore_missing_snapshot: bool,
|
||||
|
||||
/// The engine will skip snapshot importation and not return an error in such case.
|
||||
#[structopt(long, requires = "import-snapshot")]
|
||||
#[clap(long, requires = "import-snapshot")]
|
||||
pub ignore_snapshot_if_db_exists: bool,
|
||||
|
||||
/// Defines the directory path where meilisearch will create snapshot each snapshot_time_gap.
|
||||
#[structopt(long, env = "MEILI_SNAPSHOT_DIR", default_value = "snapshots/")]
|
||||
#[clap(long, env = "MEILI_SNAPSHOT_DIR", default_value = "snapshots/")]
|
||||
pub snapshot_dir: PathBuf,
|
||||
|
||||
/// Activate snapshot scheduling.
|
||||
#[structopt(long, env = "MEILI_SCHEDULE_SNAPSHOT")]
|
||||
#[clap(long, env = "MEILI_SCHEDULE_SNAPSHOT")]
|
||||
pub schedule_snapshot: bool,
|
||||
|
||||
/// Defines time interval, in seconds, between each snapshot creation.
|
||||
#[structopt(long, env = "MEILI_SNAPSHOT_INTERVAL_SEC", default_value = "86400")] // 24h
|
||||
#[clap(long, env = "MEILI_SNAPSHOT_INTERVAL_SEC", default_value = "86400")] // 24h
|
||||
pub snapshot_interval_sec: u64,
|
||||
|
||||
/// Folder where dumps are created when the dump route is called.
|
||||
#[structopt(long, env = "MEILI_DUMPS_DIR", default_value = "dumps/")]
|
||||
pub dumps_dir: PathBuf,
|
||||
|
||||
/// Import a dump from the specified path, must be a `.dump` file.
|
||||
#[structopt(long, conflicts_with = "import-snapshot")]
|
||||
#[clap(long, conflicts_with = "import-snapshot")]
|
||||
pub import_dump: Option<PathBuf>,
|
||||
|
||||
/// If the dump doesn't exists, load or create the database specified by `db-path` instead.
|
||||
#[clap(long, requires = "import-dump")]
|
||||
pub ignore_missing_dump: bool,
|
||||
|
||||
/// Ignore the dump if a database already exists, and load that database instead.
|
||||
#[clap(long, requires = "import-dump")]
|
||||
pub ignore_dump_if_db_exists: bool,
|
||||
|
||||
/// Folder where dumps are created when the dump route is called.
|
||||
#[clap(long, env = "MEILI_DUMPS_DIR", default_value = "dumps/")]
|
||||
pub dumps_dir: PathBuf,
|
||||
|
||||
/// Set the log level
|
||||
#[structopt(long, env = "MEILI_LOG_LEVEL", default_value = "info")]
|
||||
#[clap(long, env = "MEILI_LOG_LEVEL", default_value = "info")]
|
||||
pub log_level: String,
|
||||
|
||||
#[structopt(skip)]
|
||||
#[serde(skip)]
|
||||
#[clap(flatten)]
|
||||
pub indexer_options: IndexerOpts,
|
||||
|
||||
#[serde(flatten)]
|
||||
#[clap(flatten)]
|
||||
pub scheduler_options: SchedulerConfig,
|
||||
}
|
||||
|
||||
impl Opt {
|
||||
/// Wether analytics should be enabled or not.
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
pub fn analytics(&self) -> bool {
|
||||
!self.no_analytics
|
||||
}
|
||||
|
||||
pub fn get_ssl_config(&self) -> anyhow::Result<Option<rustls::ServerConfig>> {
|
||||
if let (Some(cert_path), Some(key_path)) = (&self.ssl_cert_path, &self.ssl_key_path) {
|
||||
let client_auth = match &self.ssl_auth_path {
|
||||
let config = rustls::ServerConfig::builder().with_safe_defaults();
|
||||
|
||||
let config = match &self.ssl_auth_path {
|
||||
Some(auth_path) => {
|
||||
let roots = load_certs(auth_path.to_path_buf())?;
|
||||
let mut client_auth_roots = RootCertStore::empty();
|
||||
@@ -139,30 +173,32 @@ impl Opt {
|
||||
client_auth_roots.add(&root).unwrap();
|
||||
}
|
||||
if self.ssl_require_auth {
|
||||
AllowAnyAuthenticatedClient::new(client_auth_roots)
|
||||
let verifier = AllowAnyAuthenticatedClient::new(client_auth_roots);
|
||||
config.with_client_cert_verifier(verifier)
|
||||
} else {
|
||||
AllowAnyAnonymousOrAuthenticatedClient::new(client_auth_roots)
|
||||
let verifier =
|
||||
AllowAnyAnonymousOrAuthenticatedClient::new(client_auth_roots);
|
||||
config.with_client_cert_verifier(verifier)
|
||||
}
|
||||
}
|
||||
None => NoClientAuth::new(),
|
||||
None => config.with_no_client_auth(),
|
||||
};
|
||||
|
||||
let mut config = rustls::ServerConfig::new(client_auth);
|
||||
config.key_log = Arc::new(rustls::KeyLogFile::new());
|
||||
|
||||
let certs = load_certs(cert_path.to_path_buf())?;
|
||||
let privkey = load_private_key(key_path.to_path_buf())?;
|
||||
let ocsp = load_ocsp(&self.ssl_ocsp_path)?;
|
||||
config
|
||||
.set_single_cert_with_ocsp_and_sct(certs, privkey, ocsp, vec![])
|
||||
let mut config = config
|
||||
.with_single_cert_with_ocsp_and_sct(certs, privkey, ocsp, vec![])
|
||||
.map_err(|_| anyhow::anyhow!("bad certificates/private key"))?;
|
||||
|
||||
config.key_log = Arc::new(rustls::KeyLogFile::new());
|
||||
|
||||
if self.ssl_resumption {
|
||||
config.set_persistence(rustls::ServerSessionMemoryCache::new(256));
|
||||
config.session_storage = ServerSessionMemoryCache::new(256);
|
||||
}
|
||||
|
||||
if self.ssl_tickets {
|
||||
config.ticketer = rustls::Ticketer::new();
|
||||
config.ticketer = rustls::Ticketer::new().unwrap();
|
||||
}
|
||||
|
||||
Ok(Some(config))
|
||||
@@ -176,7 +212,9 @@ fn load_certs(filename: PathBuf) -> anyhow::Result<Vec<rustls::Certificate>> {
|
||||
let certfile =
|
||||
fs::File::open(filename).map_err(|_| anyhow::anyhow!("cannot open certificate file"))?;
|
||||
let mut reader = BufReader::new(certfile);
|
||||
certs(&mut reader).map_err(|_| anyhow::anyhow!("cannot read certificate file"))
|
||||
certs(&mut reader)
|
||||
.map(|certs| certs.into_iter().map(rustls::Certificate).collect())
|
||||
.map_err(|_| anyhow::anyhow!("cannot read certificate file"))
|
||||
}
|
||||
|
||||
fn load_private_key(filename: PathBuf) -> anyhow::Result<rustls::PrivateKey> {
|
||||
@@ -201,10 +239,10 @@ fn load_private_key(filename: PathBuf) -> anyhow::Result<rustls::PrivateKey> {
|
||||
|
||||
// prefer to load pkcs8 keys
|
||||
if !pkcs8_keys.is_empty() {
|
||||
Ok(pkcs8_keys[0].clone())
|
||||
Ok(rustls::PrivateKey(pkcs8_keys[0].clone()))
|
||||
} else {
|
||||
assert!(!rsa_keys.is_empty());
|
||||
Ok(rsa_keys[0].clone())
|
||||
Ok(rustls::PrivateKey(rsa_keys[0].clone()))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
154
meilisearch-http/src/routes/api_key.rs
Normal file
154
meilisearch-http/src/routes/api_key.rs
Normal file
@@ -0,0 +1,154 @@
|
||||
use std::str;
|
||||
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
|
||||
use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::extractors::{
|
||||
authentication::{policies::*, GuardedData},
|
||||
sequential_extractor::SeqHandler,
|
||||
};
|
||||
use meilisearch_error::{Code, ResponseError};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::post().to(SeqHandler(create_api_key)))
|
||||
.route(web::get().to(SeqHandler(list_api_keys))),
|
||||
)
|
||||
.service(
|
||||
web::resource("/{api_key}")
|
||||
.route(web::get().to(SeqHandler(get_api_key)))
|
||||
.route(web::patch().to(SeqHandler(patch_api_key)))
|
||||
.route(web::delete().to(SeqHandler(delete_api_key))),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn create_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
body: web::Json<Value>,
|
||||
_req: HttpRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let v = body.into_inner();
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let key = auth_controller.create_key(v)?;
|
||||
Ok(KeyView::from_key(key, &auth_controller))
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::Created().json(res))
|
||||
}
|
||||
|
||||
pub async fn list_api_keys(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
_req: HttpRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let keys = auth_controller.list_keys()?;
|
||||
let res: Vec<_> = keys
|
||||
.into_iter()
|
||||
.map(|k| KeyView::from_key(k, &auth_controller))
|
||||
.collect();
|
||||
Ok(res)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::Ok().json(KeyListView::from(res)))
|
||||
}
|
||||
|
||||
pub async fn get_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let api_key = path.into_inner().api_key;
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let key = auth_controller.get_key(&api_key)?;
|
||||
Ok(KeyView::from_key(key, &auth_controller))
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
}
|
||||
|
||||
pub async fn patch_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
body: web::Json<Value>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let api_key = path.into_inner().api_key;
|
||||
let body = body.into_inner();
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let key = auth_controller.update_key(&api_key, body)?;
|
||||
Ok(KeyView::from_key(key, &auth_controller))
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
}
|
||||
|
||||
pub async fn delete_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let api_key = path.into_inner().api_key;
|
||||
tokio::task::spawn_blocking(move || auth_controller.delete_key(&api_key))
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AuthParam {
|
||||
api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct KeyView {
|
||||
description: Option<String>,
|
||||
key: String,
|
||||
actions: Vec<Action>,
|
||||
indexes: Vec<String>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||
expires_at: Option<OffsetDateTime>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
created_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
updated_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl KeyView {
|
||||
fn from_key(key: Key, auth: &AuthController) -> Self {
|
||||
let key_id = str::from_utf8(&key.id).unwrap();
|
||||
let generated_key = auth.generate_key(key_id).unwrap_or_default();
|
||||
|
||||
KeyView {
|
||||
description: key.description,
|
||||
key: generated_key,
|
||||
actions: key.actions,
|
||||
indexes: key.indexes,
|
||||
expires_at: key.expires_at,
|
||||
created_at: key.created_at,
|
||||
updated_at: key.updated_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct KeyListView {
|
||||
results: Vec<KeyView>,
|
||||
}
|
||||
|
||||
impl From<Vec<KeyView>> for KeyListView {
|
||||
fn from(results: Vec<KeyView>) -> Self {
|
||||
Self { results }
|
||||
}
|
||||
}
|
||||
@@ -1,20 +1,23 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::ResponseError;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(create_dump)))
|
||||
.service(web::resource("/{dump_uid}/status").route(web::get().to(get_dump_status)));
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))))
|
||||
.service(
|
||||
web::resource("/{dump_uid}/status").route(web::get().to(SeqHandler(get_dump_status))),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn create_dump(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DUMPS_CREATE }>, MeiliSearch>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
@@ -38,7 +41,7 @@ struct DumpParam {
|
||||
}
|
||||
|
||||
async fn get_dump_status(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DUMPS_GET }>, MeiliSearch>,
|
||||
path: web::Path<DumpParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let res = meilisearch.dump_info(path.dump_uid.clone()).await?;
|
||||
|
||||
@@ -1,25 +1,39 @@
|
||||
use actix_web::error::PayloadError;
|
||||
use actix_web::http::header::CONTENT_TYPE;
|
||||
use actix_web::web::Bytes;
|
||||
use actix_web::HttpMessage;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use bstr::ByteSlice;
|
||||
use futures::{Stream, StreamExt};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index_controller::{DocumentAdditionFormat, Update};
|
||||
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use mime::Mime;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::{MeilisearchHttpError, ResponseError};
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::payload::Payload;
|
||||
use crate::routes::IndexParam;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
const DEFAULT_RETRIEVE_DOCUMENTS_OFFSET: usize = 0;
|
||||
const DEFAULT_RETRIEVE_DOCUMENTS_LIMIT: usize = 20;
|
||||
|
||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||
vec![
|
||||
"application/json".to_string(),
|
||||
"application/x-ndjson".to_string(),
|
||||
"text/csv".to_string(),
|
||||
]
|
||||
});
|
||||
|
||||
/// This is required because Payload is not Sync nor Send
|
||||
fn payload_to_stream(mut payload: Payload) -> impl Stream<Item = Result<Bytes, PayloadError>> {
|
||||
let (snd, recv) = mpsc::channel(1);
|
||||
@@ -31,6 +45,24 @@ fn payload_to_stream(mut payload: Payload) -> impl Stream<Item = Result<Bytes, P
|
||||
tokio_stream::wrappers::ReceiverStream::new(recv)
|
||||
}
|
||||
|
||||
/// Extracts the mime type from the content type and return
|
||||
/// a meilisearch error if anyhthing bad happen.
|
||||
fn extract_mime_type(req: &HttpRequest) -> Result<Option<Mime>, MeilisearchHttpError> {
|
||||
match req.mime_type() {
|
||||
Ok(Some(mime)) => Ok(Some(mime)),
|
||||
Ok(None) => Ok(None),
|
||||
Err(_) => match req.headers().get(CONTENT_TYPE) {
|
||||
Some(content_type) => Err(MeilisearchHttpError::InvalidContentType(
|
||||
content_type.as_bytes().as_bstr().to_string(),
|
||||
ACCEPTED_CONTENT_TYPE.clone(),
|
||||
)),
|
||||
None => Err(MeilisearchHttpError::MissingContentType(
|
||||
ACCEPTED_CONTENT_TYPE.clone(),
|
||||
)),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct DocumentParam {
|
||||
index_uid: String,
|
||||
@@ -40,22 +72,22 @@ pub struct DocumentParam {
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(get_all_documents))
|
||||
.route(web::post().to(add_documents))
|
||||
.route(web::put().to(update_documents))
|
||||
.route(web::delete().to(clear_all_documents)),
|
||||
.route(web::get().to(SeqHandler(get_all_documents)))
|
||||
.route(web::post().to(SeqHandler(add_documents)))
|
||||
.route(web::put().to(SeqHandler(update_documents)))
|
||||
.route(web::delete().to(SeqHandler(clear_all_documents))),
|
||||
)
|
||||
// this route needs to be before the /documents/{document_id} to match properly
|
||||
.service(web::resource("/delete-batch").route(web::post().to(delete_documents)))
|
||||
.service(web::resource("/delete-batch").route(web::post().to(SeqHandler(delete_documents))))
|
||||
.service(
|
||||
web::resource("/{document_id}")
|
||||
.route(web::get().to(get_document))
|
||||
.route(web::delete().to(delete_document)),
|
||||
.route(web::get().to(SeqHandler(get_document)))
|
||||
.route(web::delete().to(SeqHandler(delete_document))),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn get_document(
|
||||
meilisearch: GuardedData<Public, MeiliSearch>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, MeiliSearch>,
|
||||
path: web::Path<DocumentParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = path.index_uid.clone();
|
||||
@@ -68,7 +100,7 @@ pub async fn get_document(
|
||||
}
|
||||
|
||||
pub async fn delete_document(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, MeiliSearch>,
|
||||
path: web::Path<DocumentParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let DocumentParam {
|
||||
@@ -76,11 +108,9 @@ pub async fn delete_document(
|
||||
index_uid,
|
||||
} = path.into_inner();
|
||||
let update = Update::DeleteDocuments(vec![document_id]);
|
||||
let update_status = meilisearch
|
||||
.register_update(index_uid, update, false)
|
||||
.await?;
|
||||
debug!("returns: {:?}", update_status);
|
||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
||||
let task: SummarizedTaskView = meilisearch.register_update(index_uid, update).await?.into();
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
@@ -92,8 +122,8 @@ pub struct BrowseQuery {
|
||||
}
|
||||
|
||||
pub async fn get_all_documents(
|
||||
meilisearch: GuardedData<Public, MeiliSearch>,
|
||||
path: web::Path<IndexParam>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
params: web::Query<BrowseQuery>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!("called with params: {:?}", params);
|
||||
@@ -110,7 +140,7 @@ pub async fn get_all_documents(
|
||||
|
||||
let documents = meilisearch
|
||||
.documents(
|
||||
path.index_uid.clone(),
|
||||
path.into_inner(),
|
||||
params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET),
|
||||
params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT),
|
||||
attributes_to_retrieve,
|
||||
@@ -127,92 +157,89 @@ pub struct UpdateDocumentsQuery {
|
||||
}
|
||||
|
||||
pub async fn add_documents(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
path: web::Path<IndexParam>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
params: web::Query<UpdateDocumentsQuery>,
|
||||
body: Payload,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!("called with params: {:?}", params);
|
||||
let content_type = req
|
||||
.headers()
|
||||
.get("Content-type")
|
||||
.map(|s| s.to_str().unwrap_or("unkown"));
|
||||
let params = params.into_inner();
|
||||
let index_uid = path.into_inner();
|
||||
|
||||
analytics.add_documents(
|
||||
¶ms,
|
||||
meilisearch.get_index(path.index_uid.clone()).await.is_err(),
|
||||
meilisearch.get_index(index_uid.clone()).await.is_err(),
|
||||
&req,
|
||||
);
|
||||
|
||||
document_addition(
|
||||
content_type,
|
||||
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||
let task = document_addition(
|
||||
extract_mime_type(&req)?,
|
||||
meilisearch,
|
||||
path.index_uid.clone(),
|
||||
index_uid,
|
||||
params.primary_key,
|
||||
body,
|
||||
IndexDocumentsMethod::ReplaceDocuments,
|
||||
allow_index_creation,
|
||||
)
|
||||
.await
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
pub async fn update_documents(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
path: web::Path<IndexParam>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
params: web::Query<UpdateDocumentsQuery>,
|
||||
body: Payload,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!("called with params: {:?}", params);
|
||||
let content_type = req
|
||||
.headers()
|
||||
.get("Content-type")
|
||||
.map(|s| s.to_str().unwrap_or("unkown"));
|
||||
let index_uid = path.into_inner();
|
||||
|
||||
analytics.update_documents(
|
||||
¶ms,
|
||||
meilisearch.get_index(path.index_uid.clone()).await.is_err(),
|
||||
meilisearch.get_index(index_uid.clone()).await.is_err(),
|
||||
&req,
|
||||
);
|
||||
|
||||
document_addition(
|
||||
content_type,
|
||||
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||
let task = document_addition(
|
||||
extract_mime_type(&req)?,
|
||||
meilisearch,
|
||||
path.into_inner().index_uid,
|
||||
index_uid,
|
||||
params.into_inner().primary_key,
|
||||
body,
|
||||
IndexDocumentsMethod::UpdateDocuments,
|
||||
allow_index_creation,
|
||||
)
|
||||
.await
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
/// Route used when the payload type is "application/json"
|
||||
/// Used to add or replace documents
|
||||
async fn document_addition(
|
||||
content_type: Option<&str>,
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
mime_type: Option<Mime>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, MeiliSearch>,
|
||||
index_uid: String,
|
||||
primary_key: Option<String>,
|
||||
body: Payload,
|
||||
method: IndexDocumentsMethod,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||
vec![
|
||||
"application/json".to_string(),
|
||||
"application/x-ndjson".to_string(),
|
||||
"text/csv".to_string(),
|
||||
]
|
||||
});
|
||||
let format = match content_type {
|
||||
Some("application/json") => DocumentAdditionFormat::Json,
|
||||
Some("application/x-ndjson") => DocumentAdditionFormat::Ndjson,
|
||||
Some("text/csv") => DocumentAdditionFormat::Csv,
|
||||
Some(other) => {
|
||||
allow_index_creation: bool,
|
||||
) -> Result<SummarizedTaskView, ResponseError> {
|
||||
let format = match mime_type
|
||||
.as_ref()
|
||||
.map(|m| (m.type_().as_str(), m.subtype().as_str()))
|
||||
{
|
||||
Some(("application", "json")) => DocumentAdditionFormat::Json,
|
||||
Some(("application", "x-ndjson")) => DocumentAdditionFormat::Ndjson,
|
||||
Some(("text", "csv")) => DocumentAdditionFormat::Csv,
|
||||
Some((type_, subtype)) => {
|
||||
return Err(MeilisearchHttpError::InvalidContentType(
|
||||
other.to_string(),
|
||||
format!("{}/{}", type_, subtype),
|
||||
ACCEPTED_CONTENT_TYPE.clone(),
|
||||
)
|
||||
.into())
|
||||
@@ -229,17 +256,18 @@ async fn document_addition(
|
||||
primary_key,
|
||||
method,
|
||||
format,
|
||||
allow_index_creation,
|
||||
};
|
||||
|
||||
let update_status = meilisearch.register_update(index_uid, update, true).await?;
|
||||
let task = meilisearch.register_update(index_uid, update).await?.into();
|
||||
|
||||
debug!("returns: {:?}", update_status);
|
||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(task)
|
||||
}
|
||||
|
||||
pub async fn delete_documents(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
path: web::Path<IndexParam>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
body: web::Json<Vec<Value>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!("called with params: {:?}", body);
|
||||
@@ -253,21 +281,25 @@ pub async fn delete_documents(
|
||||
.collect();
|
||||
|
||||
let update = Update::DeleteDocuments(ids);
|
||||
let update_status = meilisearch
|
||||
.register_update(path.into_inner().index_uid, update, false)
|
||||
.await?;
|
||||
debug!("returns: {:?}", update_status);
|
||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
||||
let task: SummarizedTaskView = meilisearch
|
||||
.register_update(path.into_inner(), update)
|
||||
.await?
|
||||
.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
pub async fn clear_all_documents(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
path: web::Path<IndexParam>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let update = Update::ClearDocuments;
|
||||
let update_status = meilisearch
|
||||
.register_update(path.into_inner().index_uid, update, false)
|
||||
.await?;
|
||||
debug!("returns: {:?}", update_status);
|
||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
||||
let task: SummarizedTaskView = meilisearch
|
||||
.register_update(path.into_inner(), update)
|
||||
.await?
|
||||
.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
@@ -1,47 +1,55 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::debug;
|
||||
use meilisearch_lib::index_controller::IndexSettings;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index_controller::Update;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::ResponseError;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::routes::IndexParam;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
pub mod documents;
|
||||
pub mod search;
|
||||
pub mod settings;
|
||||
pub mod updates;
|
||||
pub mod tasks;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(list_indexes))
|
||||
.route(web::post().to(create_index)),
|
||||
.route(web::post().to(SeqHandler(create_index))),
|
||||
)
|
||||
.service(
|
||||
web::scope("/{index_uid}")
|
||||
.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(get_index))
|
||||
.route(web::put().to(update_index))
|
||||
.route(web::delete().to(delete_index)),
|
||||
.route(web::get().to(SeqHandler(get_index)))
|
||||
.route(web::put().to(SeqHandler(update_index)))
|
||||
.route(web::delete().to(SeqHandler(delete_index))),
|
||||
)
|
||||
.service(web::resource("/stats").route(web::get().to(get_index_stats)))
|
||||
.service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats))))
|
||||
.service(web::scope("/documents").configure(documents::configure))
|
||||
.service(web::scope("/search").configure(search::configure))
|
||||
.service(web::scope("/updates").configure(updates::configure))
|
||||
.service(web::scope("/tasks").configure(tasks::configure))
|
||||
.service(web::scope("/settings").configure(settings::configure)),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn list_indexes(
|
||||
data: GuardedData<Private, MeiliSearch>,
|
||||
data: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, MeiliSearch>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let indexes = data.list_indexes().await?;
|
||||
let search_rules = &data.filters().search_rules;
|
||||
let indexes: Vec<_> = data
|
||||
.list_indexes()
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|i| search_rules.is_index_authorized(&i.uid))
|
||||
.collect();
|
||||
|
||||
debug!("returns: {:?}", indexes);
|
||||
Ok(HttpResponse::Ok().json(indexes))
|
||||
}
|
||||
@@ -54,24 +62,30 @@ pub struct IndexCreateRequest {
|
||||
}
|
||||
|
||||
pub async fn create_index(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, MeiliSearch>,
|
||||
body: web::Json<IndexCreateRequest>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let body = body.into_inner();
|
||||
let IndexCreateRequest {
|
||||
primary_key, uid, ..
|
||||
} = body.into_inner();
|
||||
|
||||
analytics.publish(
|
||||
"Index Created".to_string(),
|
||||
json!({ "primary_key": body.primary_key}),
|
||||
json!({ "primary_key": primary_key }),
|
||||
Some(&req),
|
||||
);
|
||||
let meta = meilisearch.create_index(body.uid, body.primary_key).await?;
|
||||
Ok(HttpResponse::Created().json(meta))
|
||||
|
||||
let update = Update::CreateIndex { primary_key };
|
||||
let task: SummarizedTaskView = meilisearch.register_update(uid, update).await?.into();
|
||||
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
#[allow(dead_code)]
|
||||
pub struct UpdateIndexRequest {
|
||||
uid: Option<String>,
|
||||
primary_key: Option<String>,
|
||||
@@ -82,23 +96,26 @@ pub struct UpdateIndexRequest {
|
||||
pub struct UpdateIndexResponse {
|
||||
name: String,
|
||||
uid: String,
|
||||
created_at: DateTime<Utc>,
|
||||
updated_at: DateTime<Utc>,
|
||||
primary_key: Option<String>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
created_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
updated_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
primary_key: OffsetDateTime,
|
||||
}
|
||||
|
||||
pub async fn get_index(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
path: web::Path<IndexParam>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let meta = meilisearch.get_index(path.index_uid.clone()).await?;
|
||||
let meta = meilisearch.get_index(path.into_inner()).await?;
|
||||
debug!("returns: {:?}", meta);
|
||||
Ok(HttpResponse::Ok().json(meta))
|
||||
}
|
||||
|
||||
pub async fn update_index(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
path: web::Path<IndexParam>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
body: web::Json<UpdateIndexRequest>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
@@ -110,30 +127,36 @@ pub async fn update_index(
|
||||
json!({ "primary_key": body.primary_key}),
|
||||
Some(&req),
|
||||
);
|
||||
let settings = IndexSettings {
|
||||
uid: body.uid,
|
||||
|
||||
let update = Update::UpdateIndex {
|
||||
primary_key: body.primary_key,
|
||||
};
|
||||
let meta = meilisearch
|
||||
.update_index(path.into_inner().index_uid, settings)
|
||||
.await?;
|
||||
debug!("returns: {:?}", meta);
|
||||
Ok(HttpResponse::Ok().json(meta))
|
||||
|
||||
let task: SummarizedTaskView = meilisearch
|
||||
.register_update(path.into_inner(), update)
|
||||
.await?
|
||||
.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
pub async fn delete_index(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
path: web::Path<IndexParam>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
meilisearch.delete_index(path.index_uid.clone()).await?;
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
let uid = path.into_inner();
|
||||
let update = Update::DeleteIndex;
|
||||
let task: SummarizedTaskView = meilisearch.register_update(uid, update).await?.into();
|
||||
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
pub async fn get_index_stats(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
path: web::Path<IndexParam>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::STATS_GET }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let response = meilisearch.get_index_stats(path.index_uid.clone()).await?;
|
||||
let response = meilisearch.get_index_stats(path.into_inner()).await?;
|
||||
|
||||
debug!("returns: {:?}", response);
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{default_crop_length, SearchQuery, DEFAULT_SEARCH_LIMIT};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::analytics::{Analytics, SearchAggregator};
|
||||
use crate::error::ResponseError;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::routes::IndexParam;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(search_with_url_query))
|
||||
.route(web::post().to(search_with_post)),
|
||||
.route(web::get().to(SeqHandler(search_with_url_query)))
|
||||
.route(web::post().to(SeqHandler(search_with_post))),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -80,6 +81,26 @@ impl From<SearchQueryGet> for SearchQuery {
|
||||
}
|
||||
}
|
||||
|
||||
/// Incorporate search rules in search query
|
||||
fn add_search_rules(query: &mut SearchQuery, rules: IndexSearchRules) {
|
||||
query.filter = match (query.filter.take(), rules.filter) {
|
||||
(None, rules_filter) => rules_filter,
|
||||
(filter, None) => filter,
|
||||
(Some(filter), Some(rules_filter)) => {
|
||||
let filter = match filter {
|
||||
Value::Array(filter) => filter,
|
||||
filter => vec![filter],
|
||||
};
|
||||
let rules_filter = match rules_filter {
|
||||
Value::Array(rules_filter) => rules_filter,
|
||||
rules_filter => vec![rules_filter],
|
||||
};
|
||||
|
||||
Some(Value::Array([filter, rules_filter].concat()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: TAMO: split on :asc, and :desc, instead of doing some weird things
|
||||
|
||||
/// Transform the sort query parameter into something that matches the post expected format.
|
||||
@@ -107,18 +128,28 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
|
||||
}
|
||||
|
||||
pub async fn search_with_url_query(
|
||||
meilisearch: GuardedData<Public, MeiliSearch>,
|
||||
path: web::Path<IndexParam>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::SEARCH }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
params: web::Query<SearchQueryGet>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!("called with params: {:?}", params);
|
||||
let query: SearchQuery = params.into_inner().into();
|
||||
let mut query: SearchQuery = params.into_inner().into();
|
||||
|
||||
let index_uid = path.into_inner();
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) = meilisearch
|
||||
.filters()
|
||||
.search_rules
|
||||
.get_index_search_rules(&index_uid)
|
||||
{
|
||||
add_search_rules(&mut query, search_rules);
|
||||
}
|
||||
|
||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||
|
||||
let search_result = meilisearch.search(path.into_inner().index_uid, query).await;
|
||||
let search_result = meilisearch.search(index_uid, query).await;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
@@ -135,18 +166,28 @@ pub async fn search_with_url_query(
|
||||
}
|
||||
|
||||
pub async fn search_with_post(
|
||||
meilisearch: GuardedData<Public, MeiliSearch>,
|
||||
path: web::Path<IndexParam>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::SEARCH }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
params: web::Json<SearchQuery>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let query = params.into_inner();
|
||||
let mut query = params.into_inner();
|
||||
debug!("search called with params: {:?}", query);
|
||||
|
||||
let index_uid = path.into_inner();
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) = meilisearch
|
||||
.filters()
|
||||
.search_rules
|
||||
.get_index_search_rules(&index_uid)
|
||||
{
|
||||
add_search_rules(&mut query, search_rules);
|
||||
}
|
||||
|
||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||
|
||||
let search_result = meilisearch.search(path.into_inner().index_uid, query).await;
|
||||
let search_result = meilisearch.search(index_uid, query).await;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
|
||||
@@ -1,49 +1,62 @@
|
||||
use log::debug;
|
||||
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::index_controller::Update;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::ResponseError;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! make_setting_route {
|
||||
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
|
||||
pub mod $attr {
|
||||
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
||||
use log::debug;
|
||||
use actix_web::{web, HttpResponse, HttpRequest, Resource};
|
||||
|
||||
use meilisearch_lib::milli::update::Setting;
|
||||
use meilisearch_lib::{MeiliSearch, index::Settings, index_controller::Update};
|
||||
use meilisearch_lib::{index::Settings, index_controller::Update, MeiliSearch};
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::ResponseError;
|
||||
use crate::extractors::authentication::{GuardedData, policies::*};
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::SummarizedTaskView;
|
||||
use meilisearch_error::ResponseError;
|
||||
|
||||
pub async fn delete(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, MeiliSearch>,
|
||||
index_uid: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let settings = Settings {
|
||||
$attr: Setting::Reset,
|
||||
..Default::default()
|
||||
};
|
||||
let update = Update::Settings(settings);
|
||||
let update_status = meilisearch.register_update(index_uid.into_inner(), update, false).await?;
|
||||
debug!("returns: {:?}", update_status);
|
||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
||||
|
||||
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||
let update = Update::Settings {
|
||||
settings,
|
||||
is_deletion: true,
|
||||
allow_index_creation,
|
||||
};
|
||||
let task: SummarizedTaskView = meilisearch
|
||||
.register_update(index_uid.into_inner(), update)
|
||||
.await?
|
||||
.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
pub async fn update(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, MeiliSearch>,
|
||||
index_uid: actix_web::web::Path<String>,
|
||||
body: actix_web::web::Json<Option<$type>>,
|
||||
req: HttpRequest,
|
||||
$analytics_var: web::Data< dyn Analytics>,
|
||||
$analytics_var: web::Data<dyn Analytics>,
|
||||
) -> std::result::Result<HttpResponse, ResponseError> {
|
||||
let body = body.into_inner();
|
||||
|
||||
@@ -52,33 +65,43 @@ macro_rules! make_setting_route {
|
||||
let settings = Settings {
|
||||
$attr: match body {
|
||||
Some(inner_body) => Setting::Set(inner_body),
|
||||
None => Setting::Reset
|
||||
None => Setting::Reset,
|
||||
},
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let update = Update::Settings(settings);
|
||||
let update_status = meilisearch.register_update(index_uid.into_inner(), update, true).await?;
|
||||
debug!("returns: {:?}", update_status);
|
||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
||||
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||
let update = Update::Settings {
|
||||
settings,
|
||||
is_deletion: false,
|
||||
allow_index_creation,
|
||||
};
|
||||
let task: SummarizedTaskView = meilisearch
|
||||
.register_update(index_uid.into_inner(), update)
|
||||
.await?
|
||||
.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
pub async fn get(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_GET }>, MeiliSearch>,
|
||||
index_uid: actix_web::web::Path<String>,
|
||||
) -> std::result::Result<HttpResponse, ResponseError> {
|
||||
let settings = meilisearch.settings(index_uid.into_inner()).await?;
|
||||
debug!("returns: {:?}", settings);
|
||||
let mut json = serde_json::json!(&settings);
|
||||
let val = json[$camelcase_attr].take();
|
||||
|
||||
Ok(HttpResponse::Ok().json(val))
|
||||
}
|
||||
|
||||
pub fn resources() -> Resource {
|
||||
Resource::new($route)
|
||||
.route(web::get().to(get))
|
||||
.route(web::post().to(update))
|
||||
.route(web::delete().to(delete))
|
||||
.route(web::get().to(SeqHandler(get)))
|
||||
.route(web::post().to(SeqHandler(update)))
|
||||
.route(web::delete().to(SeqHandler(delete)))
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -151,7 +174,7 @@ make_setting_route!(
|
||||
"SearchableAttributes Updated".to_string(),
|
||||
json!({
|
||||
"searchable_attributes": {
|
||||
"total": setting.as_ref().map(|sort| sort.len()).unwrap_or(0),
|
||||
"total": setting.as_ref().map(|searchable| searchable.len()).unwrap_or(0),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
@@ -204,11 +227,12 @@ make_setting_route!(
|
||||
macro_rules! generate_configure {
|
||||
($($mod:ident),*) => {
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::post().to(update_all))
|
||||
.route(web::get().to(get_all))
|
||||
.route(web::delete().to(delete_all)))
|
||||
.route(web::post().to(SeqHandler(update_all)))
|
||||
.route(web::get().to(SeqHandler(get_all)))
|
||||
.route(web::delete().to(SeqHandler(delete_all))))
|
||||
$(.service($mod::resources()))*;
|
||||
}
|
||||
};
|
||||
@@ -226,7 +250,7 @@ generate_configure!(
|
||||
);
|
||||
|
||||
pub async fn update_all(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, MeiliSearch>,
|
||||
index_uid: web::Path<String>,
|
||||
body: web::Json<Settings<Unchecked>>,
|
||||
req: HttpRequest,
|
||||
@@ -240,6 +264,9 @@ pub async fn update_all(
|
||||
"ranking_rules": {
|
||||
"sort_position": settings.ranking_rules.as_ref().set().map(|sort| sort.iter().position(|s| s == "sort")),
|
||||
},
|
||||
"searchable_attributes": {
|
||||
"total": settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()).unwrap_or(0),
|
||||
},
|
||||
"sortable_attributes": {
|
||||
"total": settings.sortable_attributes.as_ref().set().map(|sort| sort.len()).unwrap_or(0),
|
||||
"has_geo": settings.sortable_attributes.as_ref().set().map(|sort| sort.iter().any(|s| s == "_geo")).unwrap_or(false),
|
||||
@@ -252,17 +279,23 @@ pub async fn update_all(
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let update = Update::Settings(settings);
|
||||
let update_result = meilisearch
|
||||
.register_update(index_uid.into_inner(), update, true)
|
||||
.await?;
|
||||
let json = serde_json::json!({ "updateId": update_result.id() });
|
||||
debug!("returns: {:?}", json);
|
||||
Ok(HttpResponse::Accepted().json(json))
|
||||
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||
let update = Update::Settings {
|
||||
settings,
|
||||
is_deletion: false,
|
||||
allow_index_creation,
|
||||
};
|
||||
let task: SummarizedTaskView = meilisearch
|
||||
.register_update(index_uid.into_inner(), update)
|
||||
.await?
|
||||
.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
pub async fn get_all(
|
||||
data: GuardedData<Private, MeiliSearch>,
|
||||
data: GuardedData<ActionPolicy<{ actions::SETTINGS_GET }>, MeiliSearch>,
|
||||
index_uid: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let settings = data.settings(index_uid.into_inner()).await?;
|
||||
@@ -271,16 +304,22 @@ pub async fn get_all(
|
||||
}
|
||||
|
||||
pub async fn delete_all(
|
||||
data: GuardedData<Private, MeiliSearch>,
|
||||
data: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, MeiliSearch>,
|
||||
index_uid: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let settings = Settings::cleared();
|
||||
let settings = Settings::cleared().into_unchecked();
|
||||
|
||||
let update = Update::Settings(settings.into_unchecked());
|
||||
let update_result = data
|
||||
.register_update(index_uid.into_inner(), update, false)
|
||||
.await?;
|
||||
let json = serde_json::json!({ "updateId": update_result.id() });
|
||||
debug!("returns: {:?}", json);
|
||||
Ok(HttpResponse::Accepted().json(json))
|
||||
let allow_index_creation = data.filters().allow_index_creation;
|
||||
let update = Update::Settings {
|
||||
settings,
|
||||
is_deletion: true,
|
||||
allow_index_creation,
|
||||
};
|
||||
let task: SummarizedTaskView = data
|
||||
.register_update(index_uid.into_inner(), update)
|
||||
.await?
|
||||
.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
80
meilisearch-http/src/routes/indexes/tasks.rs
Normal file
80
meilisearch-http/src/routes/indexes/tasks.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::{TaskListView, TaskView};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_all_tasks_status))))
|
||||
.service(web::resource("{task_id}").route(web::get().to(SeqHandler(get_task_status))));
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UpdateIndexResponse {
|
||||
name: String,
|
||||
uid: String,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
created_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
updated_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
primary_key: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateParam {
|
||||
index_uid: String,
|
||||
task_id: u64,
|
||||
}
|
||||
|
||||
pub async fn get_task_status(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
index_uid: web::Path<UpdateParam>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish(
|
||||
"Index Tasks Seen".to_string(),
|
||||
json!({ "per_task_uid": true }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let UpdateParam { index_uid, task_id } = index_uid.into_inner();
|
||||
|
||||
let task: TaskView = meilisearch.get_index_task(index_uid, task_id).await?.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Ok().json(task))
|
||||
}
|
||||
|
||||
pub async fn get_all_tasks_status(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
index_uid: web::Path<String>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish(
|
||||
"Index Tasks Seen".to_string(),
|
||||
json!({ "per_task_uid": false }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let tasks: TaskListView = meilisearch
|
||||
.list_index_task(index_uid.into_inner(), None, None)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(TaskView::from)
|
||||
.collect::<Vec<_>>()
|
||||
.into();
|
||||
|
||||
debug!("returns: {:?}", tasks);
|
||||
Ok(HttpResponse::Ok().json(tasks))
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
use actix_web::{web, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::debug;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::error::ResponseError;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::routes::{IndexParam, UpdateStatusResponse};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::get().to(get_all_updates_status)))
|
||||
.service(web::resource("{update_id}").route(web::get().to(get_update_status)));
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UpdateIndexResponse {
|
||||
name: String,
|
||||
uid: String,
|
||||
created_at: DateTime<Utc>,
|
||||
updated_at: DateTime<Utc>,
|
||||
primary_key: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateParam {
|
||||
index_uid: String,
|
||||
update_id: u64,
|
||||
}
|
||||
|
||||
pub async fn get_update_status(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
path: web::Path<UpdateParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let params = path.into_inner();
|
||||
let meta = meilisearch
|
||||
.update_status(params.index_uid, params.update_id)
|
||||
.await?;
|
||||
let meta = UpdateStatusResponse::from(meta);
|
||||
debug!("returns: {:?}", meta);
|
||||
Ok(HttpResponse::Ok().json(meta))
|
||||
}
|
||||
|
||||
pub async fn get_all_updates_status(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
path: web::Path<IndexParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let metas = meilisearch
|
||||
.all_update_status(path.into_inner().index_uid)
|
||||
.await?;
|
||||
let metas = metas
|
||||
.into_iter()
|
||||
.map(UpdateStatusResponse::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
debug!("returns: {:?}", metas);
|
||||
Ok(HttpResponse::Ok().json(metas))
|
||||
}
|
||||
|
||||
@@ -1,25 +1,24 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_web::{web, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::debug;
|
||||
use meilisearch_lib::index_controller::updates::status::{UpdateResult, UpdateStatus};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::{MeiliSearch, Update};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
|
||||
use crate::error::ResponseError;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::ApiKeys;
|
||||
|
||||
mod api_key;
|
||||
mod dump;
|
||||
pub mod indexes;
|
||||
mod tasks;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("/health").route(web::get().to(get_health)))
|
||||
cfg.service(web::scope("/tasks").configure(tasks::configure))
|
||||
.service(web::resource("/health").route(web::get().to(get_health)))
|
||||
.service(web::scope("/keys").configure(api_key::configure))
|
||||
.service(web::scope("/dumps").configure(dump::configure))
|
||||
.service(web::resource("/keys").route(web::get().to(list_keys)))
|
||||
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
||||
.service(web::resource("/version").route(web::get().to(get_version)))
|
||||
.service(web::scope("/indexes").configure(indexes::configure));
|
||||
@@ -48,38 +47,6 @@ pub enum UpdateType {
|
||||
},
|
||||
}
|
||||
|
||||
impl From<&UpdateStatus> for UpdateType {
|
||||
fn from(other: &UpdateStatus) -> Self {
|
||||
use meilisearch_lib::milli::update::IndexDocumentsMethod::*;
|
||||
match other.meta() {
|
||||
Update::DocumentAddition { method, .. } => {
|
||||
let number = match other {
|
||||
UpdateStatus::Processed(processed) => match processed.success {
|
||||
UpdateResult::DocumentsAddition(ref addition) => {
|
||||
Some(addition.nb_documents)
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
|
||||
match method {
|
||||
ReplaceDocuments => UpdateType::DocumentsAddition { number },
|
||||
UpdateDocuments => UpdateType::DocumentsPartial { number },
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
Update::Settings(settings) => UpdateType::Settings {
|
||||
settings: settings.clone(),
|
||||
},
|
||||
Update::ClearDocuments => UpdateType::ClearAll,
|
||||
Update::DeleteDocuments(ids) => UpdateType::DocumentsDeletion {
|
||||
number: Some(ids.len()),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ProcessedUpdateResult {
|
||||
@@ -87,8 +54,10 @@ pub struct ProcessedUpdateResult {
|
||||
#[serde(rename = "type")]
|
||||
pub update_type: UpdateType,
|
||||
pub duration: f64, // in seconds
|
||||
pub enqueued_at: DateTime<Utc>,
|
||||
pub processed_at: DateTime<Utc>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub enqueued_at: OffsetDateTime,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub processed_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -99,8 +68,10 @@ pub struct FailedUpdateResult {
|
||||
pub update_type: UpdateType,
|
||||
pub error: ResponseError,
|
||||
pub duration: f64, // in seconds
|
||||
pub enqueued_at: DateTime<Utc>,
|
||||
pub processed_at: DateTime<Utc>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub enqueued_at: OffsetDateTime,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub processed_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -109,9 +80,13 @@ pub struct EnqueuedUpdateResult {
|
||||
pub update_id: u64,
|
||||
#[serde(rename = "type")]
|
||||
pub update_type: UpdateType,
|
||||
pub enqueued_at: DateTime<Utc>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub started_processing_at: Option<DateTime<Utc>>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub enqueued_at: OffsetDateTime,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "time::serde::rfc3339::option"
|
||||
)]
|
||||
pub started_processing_at: Option<OffsetDateTime>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -135,81 +110,6 @@ pub enum UpdateStatusResponse {
|
||||
},
|
||||
}
|
||||
|
||||
impl From<UpdateStatus> for UpdateStatusResponse {
|
||||
fn from(other: UpdateStatus) -> Self {
|
||||
let update_type = UpdateType::from(&other);
|
||||
|
||||
match other {
|
||||
UpdateStatus::Processing(processing) => {
|
||||
let content = EnqueuedUpdateResult {
|
||||
update_id: processing.id(),
|
||||
update_type,
|
||||
enqueued_at: processing.from.enqueued_at,
|
||||
started_processing_at: Some(processing.started_processing_at),
|
||||
};
|
||||
UpdateStatusResponse::Processing { content }
|
||||
}
|
||||
UpdateStatus::Enqueued(enqueued) => {
|
||||
let content = EnqueuedUpdateResult {
|
||||
update_id: enqueued.id(),
|
||||
update_type,
|
||||
enqueued_at: enqueued.enqueued_at,
|
||||
started_processing_at: None,
|
||||
};
|
||||
UpdateStatusResponse::Enqueued { content }
|
||||
}
|
||||
UpdateStatus::Processed(processed) => {
|
||||
let duration = processed
|
||||
.processed_at
|
||||
.signed_duration_since(processed.from.started_processing_at)
|
||||
.num_milliseconds();
|
||||
|
||||
// necessary since chrono::duration don't expose a f64 secs method.
|
||||
let duration = Duration::from_millis(duration as u64).as_secs_f64();
|
||||
|
||||
let content = ProcessedUpdateResult {
|
||||
update_id: processed.id(),
|
||||
update_type,
|
||||
duration,
|
||||
enqueued_at: processed.from.from.enqueued_at,
|
||||
processed_at: processed.processed_at,
|
||||
};
|
||||
UpdateStatusResponse::Processed { content }
|
||||
}
|
||||
UpdateStatus::Aborted(_) => unreachable!(),
|
||||
UpdateStatus::Failed(failed) => {
|
||||
let duration = failed
|
||||
.failed_at
|
||||
.signed_duration_since(failed.from.started_processing_at)
|
||||
.num_milliseconds();
|
||||
|
||||
// necessary since chrono::duration don't expose a f64 secs method.
|
||||
let duration = Duration::from_millis(duration as u64).as_secs_f64();
|
||||
|
||||
let update_id = failed.id();
|
||||
let processed_at = failed.failed_at;
|
||||
let enqueued_at = failed.from.from.enqueued_at;
|
||||
let error = failed.into();
|
||||
|
||||
let content = FailedUpdateResult {
|
||||
update_id,
|
||||
update_type,
|
||||
error,
|
||||
duration,
|
||||
enqueued_at,
|
||||
processed_at,
|
||||
};
|
||||
UpdateStatusResponse::Failed { content }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct IndexParam {
|
||||
index_uid: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct IndexUpdateResponse {
|
||||
@@ -229,13 +129,14 @@ impl IndexUpdateResponse {
|
||||
/// }
|
||||
/// ```
|
||||
pub async fn running() -> HttpResponse {
|
||||
HttpResponse::Ok().json(serde_json::json!({ "status": "MeiliSearch is running" }))
|
||||
HttpResponse::Ok().json(serde_json::json!({ "status": "Meilisearch is running" }))
|
||||
}
|
||||
|
||||
async fn get_stats(
|
||||
meilisearch: GuardedData<Private, MeiliSearch>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::STATS_GET }>, MeiliSearch>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let response = meilisearch.get_all_stats().await?;
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
let response = meilisearch.get_all_stats(search_rules).await?;
|
||||
|
||||
debug!("returns: {:?}", response);
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
@@ -249,7 +150,9 @@ struct VersionResponse {
|
||||
pkg_version: String,
|
||||
}
|
||||
|
||||
async fn get_version(_meilisearch: GuardedData<Private, MeiliSearch>) -> HttpResponse {
|
||||
async fn get_version(
|
||||
_meilisearch: GuardedData<ActionPolicy<{ actions::VERSION }>, MeiliSearch>,
|
||||
) -> HttpResponse {
|
||||
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
|
||||
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
||||
|
||||
@@ -266,108 +169,6 @@ struct KeysResponse {
|
||||
public: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn list_keys(meilisearch: GuardedData<Admin, ApiKeys>) -> HttpResponse {
|
||||
let api_keys = (*meilisearch).clone();
|
||||
HttpResponse::Ok().json(&KeysResponse {
|
||||
private: api_keys.private,
|
||||
public: api_keys.public,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_health() -> Result<HttpResponse, ResponseError> {
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({ "status": "available" })))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
|
||||
/// A type implemented for a route that uses a authentication policy `Policy`.
|
||||
///
|
||||
/// This trait is used for regression testing of route authenticaton policies.
|
||||
trait Is<Policy, Data, T> {}
|
||||
|
||||
macro_rules! impl_is_policy {
|
||||
($($param:ident)*) => {
|
||||
impl<Policy, Func, Data, $($param,)* Res> Is<Policy, Data, (($($param,)*), Res)> for Func
|
||||
where Func: Fn(GuardedData<Policy, Data>, $($param,)*) -> Res {}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
impl_is_policy! {}
|
||||
impl_is_policy! {A}
|
||||
impl_is_policy! {A B}
|
||||
impl_is_policy! {A B C}
|
||||
impl_is_policy! {A B C D}
|
||||
impl_is_policy! {A B C D E}
|
||||
|
||||
/// Emits a compile error if a route doesn't have the correct authentication policy.
|
||||
///
|
||||
/// This works by trying to cast the route function into a Is<Policy, _> type, where Policy it
|
||||
/// the authentication policy defined for the route.
|
||||
macro_rules! test_auth_routes {
|
||||
($($policy:ident => { $($route:expr,)*})*) => {
|
||||
#[test]
|
||||
fn test_auth() {
|
||||
$($(let _: &dyn Is<$policy, _, _> = &$route;)*)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
test_auth_routes! {
|
||||
Public => {
|
||||
indexes::search::search_with_url_query,
|
||||
indexes::search::search_with_post,
|
||||
|
||||
indexes::documents::get_document,
|
||||
indexes::documents::get_all_documents,
|
||||
}
|
||||
Private => {
|
||||
get_stats,
|
||||
get_version,
|
||||
|
||||
indexes::create_index,
|
||||
indexes::list_indexes,
|
||||
indexes::get_index_stats,
|
||||
indexes::delete_index,
|
||||
indexes::update_index,
|
||||
indexes::get_index,
|
||||
|
||||
dump::create_dump,
|
||||
|
||||
indexes::settings::filterable_attributes::get,
|
||||
indexes::settings::displayed_attributes::get,
|
||||
indexes::settings::searchable_attributes::get,
|
||||
indexes::settings::stop_words::get,
|
||||
indexes::settings::synonyms::get,
|
||||
indexes::settings::distinct_attribute::get,
|
||||
indexes::settings::filterable_attributes::update,
|
||||
indexes::settings::displayed_attributes::update,
|
||||
indexes::settings::searchable_attributes::update,
|
||||
indexes::settings::stop_words::update,
|
||||
indexes::settings::synonyms::update,
|
||||
indexes::settings::distinct_attribute::update,
|
||||
indexes::settings::filterable_attributes::delete,
|
||||
indexes::settings::displayed_attributes::delete,
|
||||
indexes::settings::searchable_attributes::delete,
|
||||
indexes::settings::stop_words::delete,
|
||||
indexes::settings::synonyms::delete,
|
||||
indexes::settings::distinct_attribute::delete,
|
||||
indexes::settings::delete_all,
|
||||
indexes::settings::get_all,
|
||||
indexes::settings::update_all,
|
||||
|
||||
indexes::documents::clear_all_documents,
|
||||
indexes::documents::delete_documents,
|
||||
indexes::documents::update_documents,
|
||||
indexes::documents::add_documents,
|
||||
indexes::documents::delete_document,
|
||||
|
||||
indexes::updates::get_all_updates_status,
|
||||
indexes::updates::get_update_status,
|
||||
}
|
||||
Admin => { list_keys, }
|
||||
}
|
||||
}
|
||||
|
||||
80
meilisearch-http/src/routes/tasks.rs
Normal file
80
meilisearch-http/src/routes/tasks.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::tasks::task::TaskId;
|
||||
use meilisearch_lib::tasks::TaskFilter;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::{TaskListView, TaskView};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks))))
|
||||
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
|
||||
}
|
||||
|
||||
async fn get_tasks(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish(
|
||||
"Tasks Seen".to_string(),
|
||||
json!({ "per_task_uid": false }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
let filters = if search_rules.is_index_authorized("*") {
|
||||
None
|
||||
} else {
|
||||
let mut filters = TaskFilter::default();
|
||||
for (index, _policy) in search_rules.clone() {
|
||||
filters.filter_index(index);
|
||||
}
|
||||
Some(filters)
|
||||
};
|
||||
|
||||
let tasks: TaskListView = meilisearch
|
||||
.list_tasks(filters, None, None)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(TaskView::from)
|
||||
.collect::<Vec<_>>()
|
||||
.into();
|
||||
|
||||
Ok(HttpResponse::Ok().json(tasks))
|
||||
}
|
||||
|
||||
async fn get_task(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
task_id: web::Path<TaskId>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish(
|
||||
"Tasks Seen".to_string(),
|
||||
json!({ "per_task_uid": true }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
let filters = if search_rules.is_index_authorized("*") {
|
||||
None
|
||||
} else {
|
||||
let mut filters = TaskFilter::default();
|
||||
for (index, _policy) in search_rules.clone() {
|
||||
filters.filter_index(index);
|
||||
}
|
||||
Some(filters)
|
||||
};
|
||||
|
||||
let task: TaskView = meilisearch
|
||||
.get_task(task_id.into_inner(), filters)
|
||||
.await?
|
||||
.into();
|
||||
|
||||
Ok(HttpResponse::Ok().json(task))
|
||||
}
|
||||
374
meilisearch-http/src/task.rs
Normal file
374
meilisearch-http/src/task.rs
Normal file
@@ -0,0 +1,374 @@
|
||||
use std::fmt::Write;
|
||||
use std::write;
|
||||
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
||||
use meilisearch_lib::tasks::batch::BatchId;
|
||||
use meilisearch_lib::tasks::task::{
|
||||
DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult,
|
||||
};
|
||||
use serde::{Serialize, Serializer};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
use crate::AUTOBATCHING_ENABLED;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
enum TaskType {
|
||||
IndexCreation,
|
||||
IndexUpdate,
|
||||
IndexDeletion,
|
||||
DocumentAddition,
|
||||
DocumentPartial,
|
||||
DocumentDeletion,
|
||||
SettingsUpdate,
|
||||
ClearAll,
|
||||
}
|
||||
|
||||
impl From<TaskContent> for TaskType {
|
||||
fn from(other: TaskContent) -> Self {
|
||||
match other {
|
||||
TaskContent::DocumentAddition {
|
||||
merge_strategy: IndexDocumentsMethod::ReplaceDocuments,
|
||||
..
|
||||
} => TaskType::DocumentAddition,
|
||||
TaskContent::DocumentAddition {
|
||||
merge_strategy: IndexDocumentsMethod::UpdateDocuments,
|
||||
..
|
||||
} => TaskType::DocumentPartial,
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => TaskType::ClearAll,
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentDeletion,
|
||||
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
|
||||
TaskContent::IndexDeletion => TaskType::IndexDeletion,
|
||||
TaskContent::IndexCreation { .. } => TaskType::IndexCreation,
|
||||
TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate,
|
||||
_ => unreachable!("unexpected task type"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
enum TaskStatus {
|
||||
Enqueued,
|
||||
Processing,
|
||||
Succeeded,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(untagged)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
enum TaskDetails {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
DocumentAddition {
|
||||
received_documents: usize,
|
||||
indexed_documents: Option<u64>,
|
||||
},
|
||||
#[serde(rename_all = "camelCase")]
|
||||
Settings {
|
||||
#[serde(flatten)]
|
||||
settings: Settings<Unchecked>,
|
||||
},
|
||||
#[serde(rename_all = "camelCase")]
|
||||
IndexInfo { primary_key: Option<String> },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
DocumentDeletion {
|
||||
received_document_ids: usize,
|
||||
deleted_documents: Option<u64>,
|
||||
},
|
||||
#[serde(rename_all = "camelCase")]
|
||||
ClearAll { deleted_documents: Option<u64> },
|
||||
}
|
||||
|
||||
/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for
|
||||
/// https://github.com/time-rs/time/issues/378.
|
||||
/// This code is a port of the old code of time that was removed in 0.2.
|
||||
fn serialize_duration<S: Serializer>(
|
||||
duration: &Option<Duration>,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error> {
|
||||
match duration {
|
||||
Some(duration) => {
|
||||
// technically speaking, negative duration is not valid ISO 8601
|
||||
if duration.is_negative() {
|
||||
return serializer.serialize_none();
|
||||
}
|
||||
|
||||
const SECS_PER_DAY: i64 = Duration::DAY.whole_seconds();
|
||||
let secs = duration.whole_seconds();
|
||||
let days = secs / SECS_PER_DAY;
|
||||
let secs = secs - days * SECS_PER_DAY;
|
||||
let hasdate = days != 0;
|
||||
let nanos = duration.subsec_nanoseconds();
|
||||
let hastime = (secs != 0 || nanos != 0) || !hasdate;
|
||||
|
||||
// all the following unwrap can't fail
|
||||
let mut res = String::new();
|
||||
write!(&mut res, "P").unwrap();
|
||||
|
||||
if hasdate {
|
||||
write!(&mut res, "{}D", days).unwrap();
|
||||
}
|
||||
|
||||
const NANOS_PER_MILLI: i32 = Duration::MILLISECOND.subsec_nanoseconds();
|
||||
const NANOS_PER_MICRO: i32 = Duration::MICROSECOND.subsec_nanoseconds();
|
||||
|
||||
if hastime {
|
||||
if nanos == 0 {
|
||||
write!(&mut res, "T{}S", secs).unwrap();
|
||||
} else if nanos % NANOS_PER_MILLI == 0 {
|
||||
write!(&mut res, "T{}.{:03}S", secs, nanos / NANOS_PER_MILLI).unwrap();
|
||||
} else if nanos % NANOS_PER_MICRO == 0 {
|
||||
write!(&mut res, "T{}.{:06}S", secs, nanos / NANOS_PER_MICRO).unwrap();
|
||||
} else {
|
||||
write!(&mut res, "T{}.{:09}S", secs, nanos).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
serializer.serialize_str(&res)
|
||||
}
|
||||
None => serializer.serialize_none(),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TaskView {
|
||||
uid: TaskId,
|
||||
index_uid: String,
|
||||
status: TaskStatus,
|
||||
#[serde(rename = "type")]
|
||||
task_type: TaskType,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
details: Option<TaskDetails>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
error: Option<ResponseError>,
|
||||
#[serde(serialize_with = "serialize_duration")]
|
||||
duration: Option<Duration>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
enqueued_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||
started_at: Option<OffsetDateTime>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||
finished_at: Option<OffsetDateTime>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
batch_uid: Option<Option<BatchId>>,
|
||||
}
|
||||
|
||||
impl From<Task> for TaskView {
|
||||
fn from(task: Task) -> Self {
|
||||
let Task {
|
||||
id,
|
||||
index_uid,
|
||||
content,
|
||||
events,
|
||||
} = task;
|
||||
|
||||
let (task_type, mut details) = match content {
|
||||
TaskContent::DocumentAddition {
|
||||
merge_strategy,
|
||||
documents_count,
|
||||
..
|
||||
} => {
|
||||
let details = TaskDetails::DocumentAddition {
|
||||
received_documents: documents_count,
|
||||
indexed_documents: None,
|
||||
};
|
||||
|
||||
let task_type = match merge_strategy {
|
||||
IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentPartial,
|
||||
IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentAddition,
|
||||
_ => unreachable!("Unexpected document merge strategy."),
|
||||
};
|
||||
|
||||
(task_type, Some(details))
|
||||
}
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => (
|
||||
TaskType::DocumentDeletion,
|
||||
Some(TaskDetails::DocumentDeletion {
|
||||
received_document_ids: ids.len(),
|
||||
deleted_documents: None,
|
||||
}),
|
||||
),
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => (
|
||||
TaskType::ClearAll,
|
||||
Some(TaskDetails::ClearAll {
|
||||
deleted_documents: None,
|
||||
}),
|
||||
),
|
||||
TaskContent::IndexDeletion => (
|
||||
TaskType::IndexDeletion,
|
||||
Some(TaskDetails::ClearAll {
|
||||
deleted_documents: None,
|
||||
}),
|
||||
),
|
||||
TaskContent::SettingsUpdate { settings, .. } => (
|
||||
TaskType::SettingsUpdate,
|
||||
Some(TaskDetails::Settings { settings }),
|
||||
),
|
||||
TaskContent::IndexCreation { primary_key } => (
|
||||
TaskType::IndexCreation,
|
||||
Some(TaskDetails::IndexInfo { primary_key }),
|
||||
),
|
||||
TaskContent::IndexUpdate { primary_key } => (
|
||||
TaskType::IndexUpdate,
|
||||
Some(TaskDetails::IndexInfo { primary_key }),
|
||||
),
|
||||
};
|
||||
|
||||
// An event always has at least one event: "Created"
|
||||
let (status, error, finished_at) = match events.last().unwrap() {
|
||||
TaskEvent::Created(_) => (TaskStatus::Enqueued, None, None),
|
||||
TaskEvent::Batched { .. } => (TaskStatus::Enqueued, None, None),
|
||||
TaskEvent::Processing(_) => (TaskStatus::Processing, None, None),
|
||||
TaskEvent::Succeded { timestamp, result } => {
|
||||
match (result, &mut details) {
|
||||
(
|
||||
TaskResult::DocumentAddition {
|
||||
indexed_documents: num,
|
||||
..
|
||||
},
|
||||
Some(TaskDetails::DocumentAddition {
|
||||
ref mut indexed_documents,
|
||||
..
|
||||
}),
|
||||
) => {
|
||||
indexed_documents.replace(*num);
|
||||
}
|
||||
(
|
||||
TaskResult::DocumentDeletion {
|
||||
deleted_documents: docs,
|
||||
..
|
||||
},
|
||||
Some(TaskDetails::DocumentDeletion {
|
||||
ref mut deleted_documents,
|
||||
..
|
||||
}),
|
||||
) => {
|
||||
deleted_documents.replace(*docs);
|
||||
}
|
||||
(
|
||||
TaskResult::ClearAll {
|
||||
deleted_documents: docs,
|
||||
},
|
||||
Some(TaskDetails::ClearAll {
|
||||
ref mut deleted_documents,
|
||||
}),
|
||||
) => {
|
||||
deleted_documents.replace(*docs);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
(TaskStatus::Succeeded, None, Some(*timestamp))
|
||||
}
|
||||
TaskEvent::Failed { timestamp, error } => {
|
||||
match details {
|
||||
Some(TaskDetails::DocumentDeletion {
|
||||
ref mut deleted_documents,
|
||||
..
|
||||
}) => {
|
||||
deleted_documents.replace(0);
|
||||
}
|
||||
Some(TaskDetails::ClearAll {
|
||||
ref mut deleted_documents,
|
||||
..
|
||||
}) => {
|
||||
deleted_documents.replace(0);
|
||||
}
|
||||
Some(TaskDetails::DocumentAddition {
|
||||
ref mut indexed_documents,
|
||||
..
|
||||
}) => {
|
||||
indexed_documents.replace(0);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
(TaskStatus::Failed, Some(error.clone()), Some(*timestamp))
|
||||
}
|
||||
};
|
||||
|
||||
let enqueued_at = match events.first() {
|
||||
Some(TaskEvent::Created(ts)) => *ts,
|
||||
_ => unreachable!("A task must always have a creation event."),
|
||||
};
|
||||
|
||||
let started_at = events.iter().find_map(|e| match e {
|
||||
TaskEvent::Processing(ts) => Some(*ts),
|
||||
_ => None,
|
||||
});
|
||||
|
||||
let duration = finished_at.zip(started_at).map(|(tf, ts)| (tf - ts));
|
||||
|
||||
let batch_uid = if AUTOBATCHING_ENABLED.load(std::sync::atomic::Ordering::Relaxed) {
|
||||
let id = events.iter().find_map(|e| match e {
|
||||
TaskEvent::Batched { batch_id, .. } => Some(*batch_id),
|
||||
_ => None,
|
||||
});
|
||||
Some(id)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Self {
|
||||
uid: id,
|
||||
index_uid: index_uid.into_inner(),
|
||||
status,
|
||||
task_type,
|
||||
details,
|
||||
error,
|
||||
duration,
|
||||
enqueued_at,
|
||||
started_at,
|
||||
finished_at,
|
||||
batch_uid,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct TaskListView {
|
||||
results: Vec<TaskView>,
|
||||
}
|
||||
|
||||
impl From<Vec<TaskView>> for TaskListView {
|
||||
fn from(results: Vec<TaskView>) -> Self {
|
||||
Self { results }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SummarizedTaskView {
|
||||
uid: TaskId,
|
||||
index_uid: String,
|
||||
status: TaskStatus,
|
||||
#[serde(rename = "type")]
|
||||
task_type: TaskType,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
enqueued_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl From<Task> for SummarizedTaskView {
|
||||
fn from(mut other: Task) -> Self {
|
||||
let created_event = other
|
||||
.events
|
||||
.drain(..1)
|
||||
.next()
|
||||
.expect("Task must have an enqueued event.");
|
||||
|
||||
let enqueued_at = match created_event {
|
||||
TaskEvent::Created(ts) => ts,
|
||||
_ => unreachable!("The first event of a task must always be 'Created'"),
|
||||
};
|
||||
|
||||
Self {
|
||||
uid: other.id,
|
||||
index_uid: other.index_uid.to_string(),
|
||||
status: TaskStatus::Enqueued,
|
||||
task_type: other.content.into(),
|
||||
enqueued_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
1335
meilisearch-http/tests/auth/api_keys.rs
Normal file
1335
meilisearch-http/tests/auth/api_keys.rs
Normal file
File diff suppressed because it is too large
Load Diff
620
meilisearch-http/tests/auth/authorization.rs
Normal file
620
meilisearch-http/tests/auth/authorization.rs
Normal file
@@ -0,0 +1,620 @@
|
||||
use crate::common::Server;
|
||||
use ::time::format_description::well_known::Rfc3339;
|
||||
use maplit::{hashmap, hashset};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
||||
Lazy::new(|| {
|
||||
hashmap! {
|
||||
("POST", "/indexes/products/search") => hashset!{"search", "*"},
|
||||
("GET", "/indexes/products/search") => hashset!{"search", "*"},
|
||||
("POST", "/indexes/products/documents") => hashset!{"documents.add", "*"},
|
||||
("GET", "/indexes/products/documents") => hashset!{"documents.get", "*"},
|
||||
("GET", "/indexes/products/documents/0") => hashset!{"documents.get", "*"},
|
||||
("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "*"},
|
||||
("GET", "/tasks") => hashset!{"tasks.get", "*"},
|
||||
("GET", "/indexes/products/tasks") => hashset!{"tasks.get", "*"},
|
||||
("GET", "/indexes/products/tasks/0") => hashset!{"tasks.get", "*"},
|
||||
("PUT", "/indexes/products/") => hashset!{"indexes.update", "*"},
|
||||
("GET", "/indexes/products/") => hashset!{"indexes.get", "*"},
|
||||
("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "*"},
|
||||
("POST", "/indexes") => hashset!{"indexes.create", "*"},
|
||||
("GET", "/indexes") => hashset!{"indexes.get", "*"},
|
||||
("GET", "/indexes/products/settings") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/ranking-rules") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/stop-words") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/synonyms") => hashset!{"settings.get", "*"},
|
||||
("DELETE", "/indexes/products/settings") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "*"},
|
||||
("GET", "/indexes/products/stats") => hashset!{"stats.get", "*"},
|
||||
("GET", "/stats") => hashset!{"stats.get", "*"},
|
||||
("POST", "/dumps") => hashset!{"dumps.create", "*"},
|
||||
("GET", "/dumps/0/status") => hashset!{"dumps.get", "*"},
|
||||
("GET", "/version") => hashset!{"version", "*"},
|
||||
}
|
||||
});
|
||||
|
||||
pub static ALL_ACTIONS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
|
||||
AUTHORIZATIONS
|
||||
.values()
|
||||
.cloned()
|
||||
.reduce(|l, r| l.union(&r).cloned().collect())
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||
json!({"message": "The provided API key is invalid.",
|
||||
"code": "invalid_api_key",
|
||||
"type": "auth",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||
})
|
||||
});
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_expired_key() {
|
||||
use std::{thread, time};
|
||||
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ALL_ACTIONS.clone(),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
// wait until the key is expired.
|
||||
thread::sleep(time::Duration::new(1, 0));
|
||||
|
||||
for (method, route) in AUTHORIZATIONS.keys() {
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_unauthorized_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["sales"],
|
||||
"actions": ALL_ACTIONS.clone(),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
for (method, route) in AUTHORIZATIONS
|
||||
.keys()
|
||||
// filter `products` index routes
|
||||
.filter(|(_, route)| route.starts_with("/indexes/products"))
|
||||
{
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_unauthorized_action() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
for ((method, route), action) in AUTHORIZATIONS.iter() {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// Patch API key letting all rights but the needed one.
|
||||
let content = json!({
|
||||
"actions": ALL_ACTIONS.difference(action).collect::<Vec<_>>(),
|
||||
});
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
for action in actions {
|
||||
// Patch API key letting only the needed action.
|
||||
let content = json!({
|
||||
"actions": [action],
|
||||
});
|
||||
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": [],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
for action in actions {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// Patch API key letting only the needed action.
|
||||
let content = json!({
|
||||
"actions": [action],
|
||||
});
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_stats_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["stats.get"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
// key should have access on `products` index.
|
||||
assert!(response["indexes"].get("products").is_some());
|
||||
|
||||
// key should not have access on `test` index.
|
||||
assert!(response["indexes"].get("test").is_none());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_stats_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["stats.get"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
// key should have access on `products` index.
|
||||
assert!(response["indexes"].get("products").is_some());
|
||||
|
||||
// key should have access on `test` index.
|
||||
assert!(response["indexes"].get("test").is_some());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn list_authorized_indexes_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["indexes.get"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.list_indexes().await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let response = response.as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
assert!(response.iter().any(|index| index["uid"] == "products"));
|
||||
|
||||
// key should not have access on `test` index.
|
||||
assert!(!response.iter().any(|index| index["uid"] == "test"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn list_authorized_indexes_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["indexes.get"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.list_indexes().await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let response = response.as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
assert!(response.iter().any(|index| index["uid"] == "products"));
|
||||
|
||||
// key should have access on `test` index.
|
||||
assert!(response.iter().any(|index| index["uid"] == "test"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_authorized_tasks_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["tasks.get"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.service.get("/tasks").await;
|
||||
assert_eq!(code, 200);
|
||||
println!("{}", response);
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
assert!(response.iter().any(|task| task["indexUid"] == "products"));
|
||||
|
||||
// key should not have access on `test` index.
|
||||
assert!(!response.iter().any(|task| task["indexUid"] == "test"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_authorized_tasks_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["tasks.get"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.service.get("/tasks").await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
assert!(response.iter().any(|task| task["indexUid"] == "products"));
|
||||
|
||||
// key should have access on `test` index.
|
||||
assert!(response.iter().any(|task| task["indexUid"] == "test"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_creating_index_without_action() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
// Give all action but the ones allowing to create an index.
|
||||
"actions": ALL_ACTIONS.iter().cloned().filter(|a| !AUTHORIZATIONS.get(&("POST","/indexes")).unwrap().contains(a)).collect::<Vec<_>>(),
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let expected_error = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
// try to create a index via add documents route
|
||||
let index = server.index("test");
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"content": "foo",
|
||||
}
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "{:?}", response);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
assert_eq!(response["error"], expected_error.clone());
|
||||
|
||||
// try to create a index via add settings route
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
assert_eq!(response["error"], expected_error.clone());
|
||||
|
||||
// try to create a index via add specialized settings route
|
||||
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
assert_eq!(response["error"], expected_error.clone());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn lazy_create_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
// try to create a index via add documents route
|
||||
let index = server.index("test");
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"content": "foo",
|
||||
}
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "{:?}", response);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// try to create a index via add settings route
|
||||
let index = server.index("test1");
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// try to create a index via add specialized settings route
|
||||
let index = server.index("test2");
|
||||
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
55
meilisearch-http/tests/auth/mod.rs
Normal file
55
meilisearch-http/tests/auth/mod.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
mod api_keys;
|
||||
mod authorization;
|
||||
mod payload;
|
||||
mod tenant_token;
|
||||
|
||||
use crate::common::Server;
|
||||
use actix_web::http::StatusCode;
|
||||
|
||||
use serde_json::{json, Value};
|
||||
|
||||
impl Server {
|
||||
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
|
||||
self.service.api_key = Some(api_key.as_ref().to_string());
|
||||
}
|
||||
|
||||
pub async fn add_api_key(&self, content: Value) -> (Value, StatusCode) {
|
||||
let url = "/keys";
|
||||
self.service.post(url, content).await
|
||||
}
|
||||
|
||||
pub async fn get_api_key(&self, key: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let url = format!("/keys/{}", key.as_ref());
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn patch_api_key(&self, key: impl AsRef<str>, content: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/keys/{}", key.as_ref());
|
||||
self.service.patch(url, content).await
|
||||
}
|
||||
|
||||
pub async fn list_api_keys(&self) -> (Value, StatusCode) {
|
||||
let url = "/keys";
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn delete_api_key(&self, key: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let url = format!("/keys/{}", key.as_ref());
|
||||
self.service.delete(url).await
|
||||
}
|
||||
|
||||
pub async fn dummy_request(
|
||||
&self,
|
||||
method: impl AsRef<str>,
|
||||
url: impl AsRef<str>,
|
||||
) -> (Value, StatusCode) {
|
||||
match method.as_ref() {
|
||||
"POST" => self.service.post(url, json!({})).await,
|
||||
"PUT" => self.service.put(url, json!({})).await,
|
||||
"PATCH" => self.service.patch(url, json!({})).await,
|
||||
"GET" => self.service.get(url).await,
|
||||
"DELETE" => self.service.delete(url).await,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
340
meilisearch-http/tests/auth/payload.rs
Normal file
340
meilisearch-http/tests/auth/payload.rs
Normal file
@@ -0,0 +1,340 @@
|
||||
use crate::common::Server;
|
||||
use actix_web::test;
|
||||
use meilisearch_http::{analytics, create_app};
|
||||
use serde_json::{json, Value};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_api_key_bad_content_types() {
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [
|
||||
"documents.add"
|
||||
],
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
});
|
||||
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
// post
|
||||
let req = test::TestRequest::post()
|
||||
.uri("/keys")
|
||||
.set_payload(content.to_string())
|
||||
.insert_header(("content-type", "text/plain"))
|
||||
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 415);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The Content-Type `text/plain` is invalid. Accepted values for the Content-Type header are: `application/json`"#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], "invalid_content_type");
|
||||
assert_eq!(response["type"], "invalid_request");
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
"https://docs.meilisearch.com/errors#invalid_content_type"
|
||||
);
|
||||
|
||||
// patch
|
||||
let req = test::TestRequest::patch()
|
||||
.uri("/keys/d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4")
|
||||
.set_payload(content.to_string())
|
||||
.insert_header(("content-type", "text/plain"))
|
||||
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 415);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The Content-Type `text/plain` is invalid. Accepted values for the Content-Type header are: `application/json`"#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], "invalid_content_type");
|
||||
assert_eq!(response["type"], "invalid_request");
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
"https://docs.meilisearch.com/errors#invalid_content_type"
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_api_key_empty_content_types() {
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [
|
||||
"documents.add"
|
||||
],
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
});
|
||||
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
// post
|
||||
let req = test::TestRequest::post()
|
||||
.uri("/keys")
|
||||
.set_payload(content.to_string())
|
||||
.insert_header(("content-type", ""))
|
||||
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 415);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The Content-Type `` is invalid. Accepted values for the Content-Type header are: `application/json`"#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], "invalid_content_type");
|
||||
assert_eq!(response["type"], "invalid_request");
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
"https://docs.meilisearch.com/errors#invalid_content_type"
|
||||
);
|
||||
|
||||
// patch
|
||||
let req = test::TestRequest::patch()
|
||||
.uri("/keys/d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4")
|
||||
.set_payload(content.to_string())
|
||||
.insert_header(("content-type", ""))
|
||||
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 415);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The Content-Type `` is invalid. Accepted values for the Content-Type header are: `application/json`"#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], "invalid_content_type");
|
||||
assert_eq!(response["type"], "invalid_request");
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
"https://docs.meilisearch.com/errors#invalid_content_type"
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_api_key_missing_content_types() {
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [
|
||||
"documents.add"
|
||||
],
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
});
|
||||
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
// post
|
||||
let req = test::TestRequest::post()
|
||||
.uri("/keys")
|
||||
.set_payload(content.to_string())
|
||||
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 415);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"A Content-Type header is missing. Accepted values for the Content-Type header are: `application/json`"#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], "missing_content_type");
|
||||
assert_eq!(response["type"], "invalid_request");
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
"https://docs.meilisearch.com/errors#missing_content_type"
|
||||
);
|
||||
|
||||
// patch
|
||||
let req = test::TestRequest::patch()
|
||||
.uri("/keys/d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4")
|
||||
.set_payload(content.to_string())
|
||||
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 415);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"A Content-Type header is missing. Accepted values for the Content-Type header are: `application/json`"#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], "missing_content_type");
|
||||
assert_eq!(response["type"], "invalid_request");
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
"https://docs.meilisearch.com/errors#missing_content_type"
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_api_key_empty_payload() {
|
||||
let content = "";
|
||||
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
// post
|
||||
let req = test::TestRequest::post()
|
||||
.uri("/keys")
|
||||
.set_payload(content)
|
||||
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||
.insert_header(("content-type", "application/json"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(response["code"], json!("missing_payload"));
|
||||
assert_eq!(response["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
json!("https://docs.meilisearch.com/errors#missing_payload")
|
||||
);
|
||||
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
|
||||
|
||||
// patch
|
||||
let req = test::TestRequest::patch()
|
||||
.uri("/keys/d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4")
|
||||
.set_payload(content)
|
||||
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||
.insert_header(("content-type", "application/json"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(response["code"], json!("missing_payload"));
|
||||
assert_eq!(response["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
json!("https://docs.meilisearch.com/errors#missing_payload")
|
||||
);
|
||||
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_api_key_malformed_payload() {
|
||||
let content = r#"{"malormed": "payload""#;
|
||||
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
// post
|
||||
let req = test::TestRequest::post()
|
||||
.uri("/keys")
|
||||
.set_payload(content)
|
||||
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||
.insert_header(("content-type", "application/json"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(response["code"], json!("malformed_payload"));
|
||||
assert_eq!(response["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
||||
);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The json payload provided is malformed. `EOF while parsing an object at line 1 column 22`."#
|
||||
)
|
||||
);
|
||||
|
||||
// patch
|
||||
let req = test::TestRequest::patch()
|
||||
.uri("/keys/d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4")
|
||||
.set_payload(content)
|
||||
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||
.insert_header(("content-type", "application/json"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(response["code"], json!("malformed_payload"));
|
||||
assert_eq!(response["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
||||
);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The json payload provided is malformed. `EOF while parsing an object at line 1 column 22`."#
|
||||
)
|
||||
);
|
||||
}
|
||||
575
meilisearch-http/tests/auth/tenant_token.rs
Normal file
575
meilisearch-http/tests/auth/tenant_token.rs
Normal file
@@ -0,0 +1,575 @@
|
||||
use crate::common::Server;
|
||||
use ::time::format_description::well_known::Rfc3339;
|
||||
use maplit::hashmap;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
use std::collections::HashMap;
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
||||
|
||||
fn generate_tenant_token(parent_key: impl AsRef<str>, mut body: HashMap<&str, Value>) -> String {
|
||||
use jsonwebtoken::{encode, EncodingKey, Header};
|
||||
|
||||
let key_id = &parent_key.as_ref()[..8];
|
||||
body.insert("apiKeyPrefix", json!(key_id));
|
||||
encode(
|
||||
&Header::default(),
|
||||
&body,
|
||||
&EncodingKey::from_secret(parent_key.as_ref().as_bytes()),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"id": "287947",
|
||||
"color": ["green", "blue"]
|
||||
},
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"id": "299537",
|
||||
"color": ["yellow", "blue"]
|
||||
},
|
||||
{
|
||||
"title": "Escape Room",
|
||||
"id": "522681",
|
||||
"color": ["yellow", "red"]
|
||||
},
|
||||
{
|
||||
"title": "How to Train Your Dragon: The Hidden World",
|
||||
"id": "166428",
|
||||
"color": ["green", "red"]
|
||||
},
|
||||
{
|
||||
"title": "Glass",
|
||||
"id": "450465",
|
||||
"color": ["blue", "red"]
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||
json!({"message": "The provided API key is invalid.",
|
||||
"code": "invalid_api_key",
|
||||
"type": "auth",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||
})
|
||||
});
|
||||
|
||||
static ACCEPTED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||
vec![
|
||||
json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["search"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["sales"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["sales"],
|
||||
"actions": ["search"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
]
|
||||
});
|
||||
|
||||
static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||
vec![
|
||||
// no search action
|
||||
json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["sales"],
|
||||
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
// bad index
|
||||
json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["search"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
]
|
||||
});
|
||||
|
||||
macro_rules! compute_autorized_search {
|
||||
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
.update_settings(json!({"filterableAttributes": ["color"]}))
|
||||
.await;
|
||||
index.wait_task(1).await;
|
||||
drop(index);
|
||||
|
||||
for key_content in ACCEPTED_KEYS.iter() {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (response, code) = server.add_api_key(key_content.clone()).await;
|
||||
assert_eq!(code, 201);
|
||||
let key = response["key"].as_str().unwrap();
|
||||
|
||||
for tenant_token in $tenant_tokens.iter() {
|
||||
let web_token = generate_tenant_token(&key, tenant_token.clone());
|
||||
server.use_api_key(&web_token);
|
||||
let index = server.index("sales");
|
||||
index
|
||||
.search(json!({ "filter": $filter }), |response, code| {
|
||||
assert_eq!(
|
||||
code, 200,
|
||||
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||
response, tenant_token, key_content
|
||||
);
|
||||
assert_eq!(
|
||||
response["hits"].as_array().unwrap().len(),
|
||||
$expected_count,
|
||||
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||
response,
|
||||
tenant_token,
|
||||
key_content
|
||||
);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! compute_forbidden_search {
|
||||
($tenant_tokens:expr, $parent_keys:expr) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
drop(index);
|
||||
|
||||
for key_content in $parent_keys.iter() {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (response, code) = server.add_api_key(key_content.clone()).await;
|
||||
assert_eq!(code, 201, "{:?}", response);
|
||||
let key = response["key"].as_str().unwrap();
|
||||
|
||||
for tenant_token in $tenant_tokens.iter() {
|
||||
let web_token = generate_tenant_token(&key, tenant_token.clone());
|
||||
server.use_api_key(&web_token);
|
||||
let index = server.index("sales");
|
||||
index
|
||||
.search(json!({}), |response, code| {
|
||||
assert_eq!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||
response,
|
||||
tenant_token,
|
||||
key_content
|
||||
);
|
||||
assert_eq!(
|
||||
code, 403,
|
||||
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||
response, tenant_token, key_content
|
||||
);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn search_authorized_simple_token() {
|
||||
let tenant_tokens = vec![
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
];
|
||||
|
||||
compute_autorized_search!(tenant_tokens, {}, 5);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn search_authorized_filter_token() {
|
||||
let tenant_tokens = vec![
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": "color = blue"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": ["color = blue"]}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
// filter on sales should override filters on *
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {"filter": "color = green"},
|
||||
"sales": {"filter": "color = blue"}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sales": {"filter": "color = blue"}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {"filter": "color = green"},
|
||||
"sales": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sales": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_autorized_search!(tenant_tokens, {}, 3);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn filter_search_authorized_filter_token() {
|
||||
let tenant_tokens = vec![
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": "color = blue"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": ["color = blue"]}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
// filter on sales should override filters on *
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {"filter": "color = green"},
|
||||
"sales": {"filter": "color = blue"}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sales": {"filter": "color = blue"}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {"filter": "color = green"},
|
||||
"sales": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sales": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_autorized_search!(tenant_tokens, "color = yellow", 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_search_token_forbidden_parent_key() {
|
||||
let tenant_tokens = vec![
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_forbidden_search!(tenant_tokens, REFUSED_KEYS);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_search_forbidden_token() {
|
||||
let tenant_tokens = vec![
|
||||
// bad index
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": {}}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
// expired token
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_forbidden_search!(tenant_tokens, ACCEPTED_KEYS);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_forbidden_routes() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(&key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
if !actions.contains("search") {
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_expired_parent_key() {
|
||||
use std::{thread, time};
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(&key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
// test search request while parent_key is not expired
|
||||
let (response, code) = server
|
||||
.dummy_request("POST", "/indexes/products/search")
|
||||
.await;
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
|
||||
// wait until the key is expired.
|
||||
thread::sleep(time::Duration::new(1, 0));
|
||||
|
||||
let (response, code) = server
|
||||
.dummy_request("POST", "/indexes/products/search")
|
||||
.await;
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_modified_token() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(&key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
// test search request while web_token is valid
|
||||
let (response, code) = server
|
||||
.dummy_request("POST", "/indexes/products/search")
|
||||
.await;
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
|
||||
let alt = generate_tenant_token(&key, tenant_token);
|
||||
let altered_token = [
|
||||
web_token.split('.').next().unwrap(),
|
||||
alt.split('.').nth(1).unwrap(),
|
||||
web_token.split('.').nth(2).unwrap(),
|
||||
]
|
||||
.join(".");
|
||||
|
||||
server.use_api_key(&altered_token);
|
||||
let (response, code) = server
|
||||
.dummy_request("POST", "/indexes/products/search")
|
||||
.await;
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
@@ -35,22 +35,19 @@ pub struct Index<'a> {
|
||||
#[allow(dead_code)]
|
||||
impl Index<'_> {
|
||||
pub async fn get(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}", encode(self.uid.as_ref()).to_string());
|
||||
let url = format!("/indexes/{}", encode(self.uid.as_ref()));
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn load_test_set(&self) -> u64 {
|
||||
let url = format!(
|
||||
"/indexes/{}/documents",
|
||||
encode(self.uid.as_ref()).to_string()
|
||||
);
|
||||
let url = format!("/indexes/{}/documents", encode(self.uid.as_ref()));
|
||||
let (response, code) = self
|
||||
.service
|
||||
.post_str(url, include_str!("../assets/test_set.json"))
|
||||
.await;
|
||||
assert_eq!(code, 202);
|
||||
let update_id = response["updateId"].as_i64().unwrap();
|
||||
self.wait_update_id(update_id as u64).await;
|
||||
let update_id = response["uid"].as_i64().unwrap();
|
||||
self.wait_task(update_id as u64).await;
|
||||
update_id as u64
|
||||
}
|
||||
|
||||
@@ -66,13 +63,13 @@ impl Index<'_> {
|
||||
let body = json!({
|
||||
"primaryKey": primary_key,
|
||||
});
|
||||
let url = format!("/indexes/{}", encode(self.uid.as_ref()).to_string());
|
||||
let url = format!("/indexes/{}", encode(self.uid.as_ref()));
|
||||
|
||||
self.service.put(url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}", encode(self.uid.as_ref()).to_string());
|
||||
let url = format!("/indexes/{}", encode(self.uid.as_ref()));
|
||||
self.service.delete(url).await
|
||||
}
|
||||
|
||||
@@ -84,13 +81,10 @@ impl Index<'_> {
|
||||
let url = match primary_key {
|
||||
Some(key) => format!(
|
||||
"/indexes/{}/documents?primaryKey={}",
|
||||
encode(self.uid.as_ref()).to_string(),
|
||||
encode(self.uid.as_ref()),
|
||||
key
|
||||
),
|
||||
None => format!(
|
||||
"/indexes/{}/documents",
|
||||
encode(self.uid.as_ref()).to_string()
|
||||
),
|
||||
None => format!("/indexes/{}/documents", encode(self.uid.as_ref())),
|
||||
};
|
||||
self.service.post(url, documents).await
|
||||
}
|
||||
@@ -103,29 +97,22 @@ impl Index<'_> {
|
||||
let url = match primary_key {
|
||||
Some(key) => format!(
|
||||
"/indexes/{}/documents?primaryKey={}",
|
||||
encode(self.uid.as_ref()).to_string(),
|
||||
encode(self.uid.as_ref()),
|
||||
key
|
||||
),
|
||||
None => format!(
|
||||
"/indexes/{}/documents",
|
||||
encode(self.uid.as_ref()).to_string()
|
||||
),
|
||||
None => format!("/indexes/{}/documents", encode(self.uid.as_ref())),
|
||||
};
|
||||
self.service.put(url, documents).await
|
||||
}
|
||||
|
||||
pub async fn wait_update_id(&self, update_id: u64) -> Value {
|
||||
pub async fn wait_task(&self, update_id: u64) -> Value {
|
||||
// try 10 times to get status, or panic to not wait forever
|
||||
let url = format!(
|
||||
"/indexes/{}/updates/{}",
|
||||
encode(self.uid.as_ref()).to_string(),
|
||||
update_id
|
||||
);
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
for _ in 0..10 {
|
||||
let (response, status_code) = self.service.get(&url).await;
|
||||
assert_eq!(status_code, 200, "response: {}", response);
|
||||
|
||||
if response["status"] == "processed" || response["status"] == "failed" {
|
||||
if response["status"] == "succeeded" || response["status"] == "failed" {
|
||||
return response;
|
||||
}
|
||||
|
||||
@@ -134,17 +121,13 @@ impl Index<'_> {
|
||||
panic!("Timeout waiting for update id");
|
||||
}
|
||||
|
||||
pub async fn get_update(&self, update_id: u64) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/updates/{}",
|
||||
encode(self.uid.as_ref()).to_string(),
|
||||
update_id
|
||||
);
|
||||
pub async fn get_task(&self, update_id: u64) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/tasks/{}", self.uid, update_id);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn list_updates(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/updates", encode(self.uid.as_ref()).to_string());
|
||||
pub async fn list_tasks(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/tasks", self.uid);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
@@ -153,19 +136,12 @@ impl Index<'_> {
|
||||
id: u64,
|
||||
_options: Option<GetDocumentOptions>,
|
||||
) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/documents/{}",
|
||||
encode(self.uid.as_ref()).to_string(),
|
||||
id
|
||||
);
|
||||
let url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) {
|
||||
let mut url = format!(
|
||||
"/indexes/{}/documents?",
|
||||
encode(self.uid.as_ref()).to_string()
|
||||
);
|
||||
let mut url = format!("/indexes/{}/documents?", encode(self.uid.as_ref()));
|
||||
if let Some(limit) = options.limit {
|
||||
url.push_str(&format!("limit={}&", limit));
|
||||
}
|
||||
@@ -185,26 +161,19 @@ impl Index<'_> {
|
||||
}
|
||||
|
||||
pub async fn delete_document(&self, id: u64) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/documents/{}",
|
||||
encode(self.uid.as_ref()).to_string(),
|
||||
id
|
||||
);
|
||||
let url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id);
|
||||
self.service.delete(url).await
|
||||
}
|
||||
|
||||
pub async fn clear_all_documents(&self) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/documents",
|
||||
encode(self.uid.as_ref()).to_string()
|
||||
);
|
||||
let url = format!("/indexes/{}/documents", encode(self.uid.as_ref()));
|
||||
self.service.delete(url).await
|
||||
}
|
||||
|
||||
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/documents/delete-batch",
|
||||
encode(self.uid.as_ref()).to_string()
|
||||
encode(self.uid.as_ref())
|
||||
);
|
||||
self.service
|
||||
.post(url, serde_json::to_value(&ids).unwrap())
|
||||
@@ -212,31 +181,22 @@ impl Index<'_> {
|
||||
}
|
||||
|
||||
pub async fn settings(&self) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/settings",
|
||||
encode(self.uid.as_ref()).to_string()
|
||||
);
|
||||
let url = format!("/indexes/{}/settings", encode(self.uid.as_ref()));
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/settings",
|
||||
encode(self.uid.as_ref()).to_string()
|
||||
);
|
||||
let url = format!("/indexes/{}/settings", encode(self.uid.as_ref()));
|
||||
self.service.post(url, settings).await
|
||||
}
|
||||
|
||||
pub async fn delete_settings(&self) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/settings",
|
||||
encode(self.uid.as_ref()).to_string()
|
||||
);
|
||||
let url = format!("/indexes/{}/settings", encode(self.uid.as_ref()));
|
||||
self.service.delete(url).await
|
||||
}
|
||||
|
||||
pub async fn stats(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/stats", encode(self.uid.as_ref()).to_string());
|
||||
let url = format!("/indexes/{}/stats", encode(self.uid.as_ref()));
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
@@ -261,17 +221,13 @@ impl Index<'_> {
|
||||
}
|
||||
|
||||
pub async fn search_post(&self, query: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/search", encode(self.uid.as_ref()).to_string());
|
||||
let url = format!("/indexes/{}/search", encode(self.uid.as_ref()));
|
||||
self.service.post(url, query).await
|
||||
}
|
||||
|
||||
pub async fn search_get(&self, query: Value) -> (Value, StatusCode) {
|
||||
let params = serde_url_params::to_string(&query).unwrap();
|
||||
let url = format!(
|
||||
"/indexes/{}/search?{}",
|
||||
encode(self.uid.as_ref()).to_string(),
|
||||
params
|
||||
);
|
||||
let url = format!("/indexes/{}/search?{}", encode(self.uid.as_ref()), params);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
#![allow(dead_code)]
|
||||
use std::path::Path;
|
||||
|
||||
use actix_web::http::StatusCode;
|
||||
use byte_unit::{Byte, ByteUnit};
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_http::setup_meilisearch;
|
||||
use meilisearch_lib::options::{IndexerOpts, MaxMemory};
|
||||
use once_cell::sync::Lazy;
|
||||
@@ -19,7 +21,7 @@ pub struct Server {
|
||||
_dir: Option<TempDir>,
|
||||
}
|
||||
|
||||
static TEST_TEMP_DIR: Lazy<TempDir> = Lazy::new(|| TempDir::new().unwrap());
|
||||
pub static TEST_TEMP_DIR: Lazy<TempDir> = Lazy::new(|| TempDir::new().unwrap());
|
||||
|
||||
impl Server {
|
||||
pub async fn new() -> Self {
|
||||
@@ -34,9 +36,39 @@ impl Server {
|
||||
let options = default_settings(dir.path());
|
||||
|
||||
let meilisearch = setup_meilisearch(&options).unwrap();
|
||||
let auth = AuthController::new(&options.db_path, &options.master_key).unwrap();
|
||||
let service = Service {
|
||||
meilisearch,
|
||||
auth,
|
||||
options,
|
||||
api_key: None,
|
||||
};
|
||||
|
||||
Server {
|
||||
service,
|
||||
_dir: Some(dir),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn new_auth() -> Self {
|
||||
let dir = TempDir::new().unwrap();
|
||||
|
||||
if cfg!(windows) {
|
||||
std::env::set_var("TMP", TEST_TEMP_DIR.path());
|
||||
} else {
|
||||
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
|
||||
}
|
||||
|
||||
let mut options = default_settings(dir.path());
|
||||
options.master_key = Some("MASTER_KEY".to_string());
|
||||
|
||||
let meilisearch = setup_meilisearch(&options).unwrap();
|
||||
let auth = AuthController::new(&options.db_path, &options.master_key).unwrap();
|
||||
let service = Service {
|
||||
meilisearch,
|
||||
auth,
|
||||
options,
|
||||
api_key: None,
|
||||
};
|
||||
|
||||
Server {
|
||||
@@ -47,9 +79,12 @@ impl Server {
|
||||
|
||||
pub async fn new_with_options(options: Opt) -> Self {
|
||||
let meilisearch = setup_meilisearch(&options).unwrap();
|
||||
let auth = AuthController::new(&options.db_path, &options.master_key).unwrap();
|
||||
let service = Service {
|
||||
meilisearch,
|
||||
auth,
|
||||
options,
|
||||
api_key: None,
|
||||
};
|
||||
|
||||
Server {
|
||||
@@ -77,6 +112,14 @@ impl Server {
|
||||
pub async fn stats(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/stats").await
|
||||
}
|
||||
|
||||
pub async fn tasks(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/tasks").await
|
||||
}
|
||||
|
||||
pub async fn get_dump_status(&self, uid: &str) -> (Value, StatusCode) {
|
||||
self.service.get(format!("/dumps/{}/status", uid)).await
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
@@ -89,7 +132,7 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
no_analytics: true,
|
||||
max_index_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
||||
max_udb_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
||||
max_task_db_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
||||
http_payload_size_limit: Byte::from_unit(10.0, ByteUnit::MiB).unwrap(),
|
||||
ssl_cert_path: None,
|
||||
ssl_key_path: None,
|
||||
@@ -105,11 +148,14 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
schedule_snapshot: false,
|
||||
snapshot_interval_sec: 0,
|
||||
import_dump: None,
|
||||
ignore_missing_dump: false,
|
||||
ignore_dump_if_db_exists: false,
|
||||
indexer_options: IndexerOpts {
|
||||
// memory has to be unlimited because several meilisearch are running in test context.
|
||||
max_memory: MaxMemory::unlimited(),
|
||||
..Default::default()
|
||||
},
|
||||
log_level: "off".into(),
|
||||
scheduler_options: meilisearch_lib::options::SchedulerConfig::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use actix_web::{http::StatusCode, test};
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde_json::Value;
|
||||
|
||||
@@ -6,23 +7,27 @@ use meilisearch_http::{analytics, create_app, Opt};
|
||||
|
||||
pub struct Service {
|
||||
pub meilisearch: MeiliSearch,
|
||||
pub auth: AuthController,
|
||||
pub options: Opt,
|
||||
pub api_key: Option<String>,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
pub async fn post(&self, url: impl AsRef<str>, body: Value) -> (Value, StatusCode) {
|
||||
let app = test::init_service(create_app!(
|
||||
&self.meilisearch,
|
||||
&self.auth,
|
||||
true,
|
||||
&self.options,
|
||||
analytics::MockAnalytics::new(&self.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
let req = test::TestRequest::post()
|
||||
.uri(url.as_ref())
|
||||
.set_json(&body)
|
||||
.to_request();
|
||||
let mut req = test::TestRequest::post().uri(url.as_ref()).set_json(&body);
|
||||
if let Some(api_key) = &self.api_key {
|
||||
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||
}
|
||||
let req = req.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
|
||||
@@ -39,17 +44,21 @@ impl Service {
|
||||
) -> (Value, StatusCode) {
|
||||
let app = test::init_service(create_app!(
|
||||
&self.meilisearch,
|
||||
&self.auth,
|
||||
true,
|
||||
&self.options,
|
||||
analytics::MockAnalytics::new(&self.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
let req = test::TestRequest::post()
|
||||
let mut req = test::TestRequest::post()
|
||||
.uri(url.as_ref())
|
||||
.set_payload(body.as_ref().to_string())
|
||||
.insert_header(("content-type", "application/json"))
|
||||
.to_request();
|
||||
.insert_header(("content-type", "application/json"));
|
||||
if let Some(api_key) = &self.api_key {
|
||||
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||
}
|
||||
let req = req.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
|
||||
@@ -61,13 +70,18 @@ impl Service {
|
||||
pub async fn get(&self, url: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let app = test::init_service(create_app!(
|
||||
&self.meilisearch,
|
||||
&self.auth,
|
||||
true,
|
||||
&self.options,
|
||||
analytics::MockAnalytics::new(&self.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
let req = test::TestRequest::get().uri(url.as_ref()).to_request();
|
||||
let mut req = test::TestRequest::get().uri(url.as_ref());
|
||||
if let Some(api_key) = &self.api_key {
|
||||
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||
}
|
||||
let req = req.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
|
||||
@@ -79,16 +93,41 @@ impl Service {
|
||||
pub async fn put(&self, url: impl AsRef<str>, body: Value) -> (Value, StatusCode) {
|
||||
let app = test::init_service(create_app!(
|
||||
&self.meilisearch,
|
||||
&self.auth,
|
||||
true,
|
||||
&self.options,
|
||||
analytics::MockAnalytics::new(&self.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
let req = test::TestRequest::put()
|
||||
.uri(url.as_ref())
|
||||
.set_json(&body)
|
||||
.to_request();
|
||||
let mut req = test::TestRequest::put().uri(url.as_ref()).set_json(&body);
|
||||
if let Some(api_key) = &self.api_key {
|
||||
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||
}
|
||||
let req = req.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
|
||||
let body = test::read_body(res).await;
|
||||
let response = serde_json::from_slice(&body).unwrap_or_default();
|
||||
(response, status_code)
|
||||
}
|
||||
|
||||
pub async fn patch(&self, url: impl AsRef<str>, body: Value) -> (Value, StatusCode) {
|
||||
let app = test::init_service(create_app!(
|
||||
&self.meilisearch,
|
||||
&self.auth,
|
||||
true,
|
||||
&self.options,
|
||||
analytics::MockAnalytics::new(&self.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
let mut req = test::TestRequest::patch().uri(url.as_ref()).set_json(&body);
|
||||
if let Some(api_key) = &self.api_key {
|
||||
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||
}
|
||||
let req = req.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
|
||||
@@ -100,13 +139,18 @@ impl Service {
|
||||
pub async fn delete(&self, url: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let app = test::init_service(create_app!(
|
||||
&self.meilisearch,
|
||||
&self.auth,
|
||||
true,
|
||||
&self.options,
|
||||
analytics::MockAnalytics::new(&self.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
let req = test::TestRequest::delete().uri(url.as_ref()).to_request();
|
||||
let mut req = test::TestRequest::delete().uri(url.as_ref());
|
||||
if let Some(api_key) = &self.api_key {
|
||||
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||
}
|
||||
let req = req.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
|
||||
|
||||
@@ -39,6 +39,7 @@ async fn error_json_bad_content_type() {
|
||||
let server = Server::new().await;
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
@@ -110,3 +111,40 @@ async fn error_json_bad_content_type() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn extract_actual_content_type() {
|
||||
let route = "/indexes/doggo/documents";
|
||||
let documents = "[{}]";
|
||||
let server = Server::new().await;
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
// Good content-type, we probably have an error since we didn't send anything in the json
|
||||
// so we only ensure we didn't get a bad media type error.
|
||||
let req = test::TestRequest::post()
|
||||
.uri(route)
|
||||
.set_payload(documents)
|
||||
.insert_header(("content-type", "application/json; charset=utf-8"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
assert_ne!(status_code, 415,
|
||||
"calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route);
|
||||
|
||||
let req = test::TestRequest::put()
|
||||
.uri(route)
|
||||
.set_payload(documents)
|
||||
.insert_header(("content-type", "application/json; charset=latin-1"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
assert_ne!(status_code, 415,
|
||||
"calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route);
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use crate::common::{GetAllDocumentsOptions, Server};
|
||||
use actix_web::test;
|
||||
use chrono::DateTime;
|
||||
use meilisearch_http::{analytics, create_app};
|
||||
use serde_json::{json, Value};
|
||||
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||
|
||||
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
||||
#[actix_rt::test]
|
||||
@@ -18,6 +18,7 @@ async fn add_documents_test_json_content_types() {
|
||||
let server = Server::new().await;
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
@@ -34,7 +35,7 @@ async fn add_documents_test_json_content_types() {
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 202);
|
||||
assert_eq!(response, json!({ "updateId": 0 }));
|
||||
assert_eq!(response["uid"], 0);
|
||||
|
||||
// put
|
||||
let req = test::TestRequest::put()
|
||||
@@ -47,7 +48,7 @@ async fn add_documents_test_json_content_types() {
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 202);
|
||||
assert_eq!(response, json!({ "updateId": 1 }));
|
||||
assert_eq!(response["uid"], 1);
|
||||
}
|
||||
|
||||
/// any other content-type is must be refused
|
||||
@@ -63,6 +64,7 @@ async fn error_add_documents_test_bad_content_types() {
|
||||
let server = Server::new().await;
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
@@ -130,6 +132,7 @@ async fn error_add_documents_test_no_content_type() {
|
||||
let server = Server::new().await;
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
@@ -189,6 +192,7 @@ async fn error_add_malformed_csv_documents() {
|
||||
let server = Server::new().await;
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
@@ -250,6 +254,7 @@ async fn error_add_malformed_json_documents() {
|
||||
let server = Server::new().await;
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
@@ -311,6 +316,7 @@ async fn error_add_malformed_ndjson_documents() {
|
||||
let server = Server::new().await;
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
@@ -372,6 +378,7 @@ async fn error_add_missing_payload_csv_documents() {
|
||||
let server = Server::new().await;
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
@@ -423,6 +430,7 @@ async fn error_add_missing_payload_json_documents() {
|
||||
let server = Server::new().await;
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
@@ -474,6 +482,7 @@ async fn error_add_missing_payload_ndjson_documents() {
|
||||
let server = Server::new().await;
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
@@ -538,7 +547,7 @@ async fn add_documents_no_index_creation() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["updateId"], 0);
|
||||
assert_eq!(response["uid"], 0);
|
||||
/*
|
||||
* currently we don’t check these field to stay ISO with meilisearch
|
||||
* assert_eq!(response["status"], "pending");
|
||||
@@ -548,19 +557,20 @@ async fn add_documents_no_index_creation() {
|
||||
* assert!(response.get("enqueuedAt").is_some());
|
||||
*/
|
||||
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = index.get_update(0).await;
|
||||
let (response, code) = index.get_task(0).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
assert_eq!(response["updateId"], 0);
|
||||
assert_eq!(response["type"]["name"], "DocumentsAddition");
|
||||
assert_eq!(response["type"]["number"], 1);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["type"], "documentAddition");
|
||||
assert_eq!(response["details"]["receivedDocuments"], 1);
|
||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||
|
||||
let processed_at =
|
||||
DateTime::parse_from_rfc3339(response["processedAt"].as_str().unwrap()).unwrap();
|
||||
OffsetDateTime::parse(response["finishedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||
let enqueued_at =
|
||||
DateTime::parse_from_rfc3339(response["enqueuedAt"].as_str().unwrap()).unwrap();
|
||||
OffsetDateTime::parse(response["enqueuedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||
assert!(processed_at > enqueued_at);
|
||||
|
||||
// index was created, and primary key was infered.
|
||||
@@ -573,7 +583,7 @@ async fn add_documents_no_index_creation() {
|
||||
async fn error_document_add_create_index_bad_uid() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("883 fj!");
|
||||
let (response, code) = index.add_documents(json!([]), None).await;
|
||||
let (response, code) = index.add_documents(json!([{"id": 1}]), None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
@@ -582,15 +592,15 @@ async fn error_document_add_create_index_bad_uid() {
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
assert_eq!(response, expected_response);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_document_update_create_index_bad_uid() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("883 fj!");
|
||||
let (response, code) = index.update_documents(json!([]), None).await;
|
||||
let (response, code) = index.update_documents(json!([{"id": 1}]), None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
@@ -599,8 +609,8 @@ async fn error_document_update_create_index_bad_uid() {
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
assert_eq!(response, expected_response);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -617,14 +627,15 @@ async fn document_addition_with_primary_key() {
|
||||
let (response, code) = index.add_documents(documents, Some("primary")).await;
|
||||
assert_eq!(code, 202, "response: {}", response);
|
||||
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = index.get_update(0).await;
|
||||
let (response, code) = index.get_task(0).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
assert_eq!(response["updateId"], 0);
|
||||
assert_eq!(response["type"]["name"], "DocumentsAddition");
|
||||
assert_eq!(response["type"]["number"], 1);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["type"], "documentAddition");
|
||||
assert_eq!(response["details"]["receivedDocuments"], 1);
|
||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
@@ -645,14 +656,15 @@ async fn document_update_with_primary_key() {
|
||||
let (_response, code) = index.update_documents(documents, Some("primary")).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = index.get_update(0).await;
|
||||
let (response, code) = index.get_task(0).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
assert_eq!(response["updateId"], 0);
|
||||
assert_eq!(response["type"]["name"], "DocumentsPartial");
|
||||
assert_eq!(response["type"]["number"], 1);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["type"], "documentPartial");
|
||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||
assert_eq!(response["details"]["receivedDocuments"], 1);
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
@@ -674,7 +686,7 @@ async fn replace_document() {
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "response: {}", response);
|
||||
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@@ -686,11 +698,11 @@ async fn replace_document() {
|
||||
let (_response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index.get_update(1).await;
|
||||
let (response, code) = index.get_task(1).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
let (response, code) = index.get_document(1, None).await;
|
||||
assert_eq!(code, 200);
|
||||
@@ -698,20 +710,11 @@ async fn replace_document() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_add_no_documents() {
|
||||
async fn add_no_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.add_documents(json!([]), None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "The `json` payload must contain at least one document.",
|
||||
"code": "malformed_payload",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#malformed_payload"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
let (_response, code) = index.add_documents(json!([]), None).await;
|
||||
assert_eq!(code, 202);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -729,7 +732,7 @@ async fn update_document() {
|
||||
let (_response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@@ -741,11 +744,11 @@ async fn update_document() {
|
||||
let (response, code) = index.update_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "response: {}", response);
|
||||
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index.get_update(1).await;
|
||||
let (response, code) = index.get_task(1).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
let (response, code) = index.get_document(1, None).await;
|
||||
assert_eq!(code, 200);
|
||||
@@ -760,11 +763,12 @@ async fn add_larger_dataset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let update_id = index.load_test_set().await;
|
||||
let (response, code) = index.get_update(update_id).await;
|
||||
let (response, code) = index.get_task(update_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
assert_eq!(response["type"]["name"], "DocumentsAddition");
|
||||
assert_eq!(response["type"]["number"], 77);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "documentAddition");
|
||||
assert_eq!(response["details"]["indexedDocuments"], 77);
|
||||
assert_eq!(response["details"]["receivedDocuments"], 77);
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
limit: Some(1000),
|
||||
@@ -781,11 +785,11 @@ async fn update_larger_dataset() {
|
||||
let index = server.index("test");
|
||||
let documents = serde_json::from_str(include_str!("../assets/test_set.json")).unwrap();
|
||||
index.update_documents(documents, None).await;
|
||||
index.wait_update_id(0).await;
|
||||
let (response, code) = index.get_update(0).await;
|
||||
index.wait_task(0).await;
|
||||
let (response, code) = index.get_task(0).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["type"]["name"], "DocumentsPartial");
|
||||
assert_eq!(response["type"]["number"], 77);
|
||||
assert_eq!(response["type"], "documentPartial");
|
||||
assert_eq!(response["details"]["indexedDocuments"], 77);
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
limit: Some(1000),
|
||||
@@ -808,19 +812,17 @@ async fn error_add_documents_bad_document_id() {
|
||||
}
|
||||
]);
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(0).await;
|
||||
let (response, code) = index.get_update(0).await;
|
||||
index.wait_task(1).await;
|
||||
let (response, code) = index.get_task(1).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], json!("failed"));
|
||||
|
||||
let expected_error = json!({
|
||||
"message": "Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_).",
|
||||
"code": "invalid_document_id",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_document_id"
|
||||
});
|
||||
|
||||
assert_eq!(response["error"], expected_error);
|
||||
assert_eq!(response["error"]["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."));
|
||||
assert_eq!(response["error"]["code"], json!("invalid_document_id"));
|
||||
assert_eq!(response["error"]["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
response["error"]["link"],
|
||||
json!("https://docs.meilisearch.com/errors#invalid_document_id")
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -835,19 +837,15 @@ async fn error_update_documents_bad_document_id() {
|
||||
}
|
||||
]);
|
||||
index.update_documents(documents, None).await;
|
||||
index.wait_update_id(0).await;
|
||||
let (response, code) = index.get_update(0).await;
|
||||
assert_eq!(code, 200);
|
||||
let response = index.wait_task(1).await;
|
||||
assert_eq!(response["status"], json!("failed"));
|
||||
|
||||
let expected_error = json!({
|
||||
"message": "Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_).",
|
||||
"code": "invalid_document_id",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_document_id"
|
||||
});
|
||||
|
||||
assert_eq!(response["error"], expected_error);
|
||||
assert_eq!(response["error"]["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."));
|
||||
assert_eq!(response["error"]["code"], json!("invalid_document_id"));
|
||||
assert_eq!(response["error"]["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
response["error"]["link"],
|
||||
json!("https://docs.meilisearch.com/errors#invalid_document_id")
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -862,19 +860,20 @@ async fn error_add_documents_missing_document_id() {
|
||||
}
|
||||
]);
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(0).await;
|
||||
let (response, code) = index.get_update(0).await;
|
||||
index.wait_task(1).await;
|
||||
let (response, code) = index.get_task(1).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "failed");
|
||||
|
||||
let expected_error = json!({
|
||||
"message": r#"Document doesn't have a `docid` attribute: `{"id":"11","content":"foobar"}`."#,
|
||||
"code": "missing_document_id",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#missing_document_id"
|
||||
});
|
||||
|
||||
assert_eq!(response["error"], expected_error);
|
||||
assert_eq!(
|
||||
response["error"]["message"],
|
||||
json!(r#"Document doesn't have a `docid` attribute: `{"id":"11","content":"foobar"}`."#)
|
||||
);
|
||||
assert_eq!(response["error"]["code"], json!("missing_document_id"));
|
||||
assert_eq!(response["error"]["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
response["error"]["link"],
|
||||
json!("https://docs.meilisearch.com/errors#missing_document_id")
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -889,19 +888,18 @@ async fn error_update_documents_missing_document_id() {
|
||||
}
|
||||
]);
|
||||
index.update_documents(documents, None).await;
|
||||
index.wait_update_id(0).await;
|
||||
let (response, code) = index.get_update(0).await;
|
||||
assert_eq!(code, 200);
|
||||
let response = index.wait_task(1).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
|
||||
let expected_error = json!({
|
||||
"message": r#"Document doesn't have a `docid` attribute: `{"id":"11","content":"foobar"}`."#,
|
||||
"code": "missing_document_id",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#missing_document_id"
|
||||
});
|
||||
|
||||
assert_eq!(response["error"], expected_error);
|
||||
assert_eq!(
|
||||
response["error"]["message"],
|
||||
r#"Document doesn't have a `docid` attribute: `{"id":"11","content":"foobar"}`."#
|
||||
);
|
||||
assert_eq!(response["error"]["code"], "missing_document_id");
|
||||
assert_eq!(response["error"]["type"], "invalid_request");
|
||||
assert_eq!(
|
||||
response["error"]["link"],
|
||||
"https://docs.meilisearch.com/errors#missing_document_id"
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -924,8 +922,8 @@ async fn error_document_field_limit_reached() {
|
||||
let (_response, code) = index.update_documents(documents, Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_update_id(0).await;
|
||||
let (response, code) = index.get_update(0).await;
|
||||
index.wait_task(0).await;
|
||||
let (response, code) = index.get_task(0).await;
|
||||
assert_eq!(code, 200);
|
||||
// Documents without a primary key are not accepted.
|
||||
assert_eq!(response["status"], "failed");
|
||||
@@ -957,8 +955,8 @@ async fn error_add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
let (response, code) = index.get_update(1).await;
|
||||
index.wait_task(2).await;
|
||||
let (response, code) = index.get_task(2).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "failed");
|
||||
|
||||
@@ -1011,8 +1009,8 @@ async fn error_primary_key_inference() {
|
||||
]);
|
||||
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(0).await;
|
||||
let (response, code) = index.get_update(0).await;
|
||||
index.wait_task(0).await;
|
||||
let (response, code) = index.get_task(0).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "failed");
|
||||
|
||||
@@ -1025,3 +1023,26 @@ async fn error_primary_key_inference() {
|
||||
|
||||
assert_eq!(response["error"], expected_error);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_documents_with_primary_key_twice() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
"title": "11",
|
||||
"desc": "foobar"
|
||||
}
|
||||
]);
|
||||
|
||||
index.add_documents(documents.clone(), Some("title")).await;
|
||||
index.wait_task(0).await;
|
||||
let (response, _code) = index.get_task(0).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
index.add_documents(documents, Some("title")).await;
|
||||
index.wait_task(1).await;
|
||||
let (response, _code) = index.get_task(1).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
|
||||
@@ -5,8 +5,13 @@ use crate::common::{GetAllDocumentsOptions, Server};
|
||||
#[actix_rt::test]
|
||||
async fn delete_one_document_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (_response, code) = server.index("test").delete_document(0).await;
|
||||
assert_eq!(code, 404);
|
||||
let index = server.index("test");
|
||||
let (_response, code) = index.delete_document(0).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(0).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -16,8 +21,8 @@ async fn delete_one_unexisting_document() {
|
||||
index.create(None).await;
|
||||
let (response, code) = index.delete_document(0).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
let update = index.wait_update_id(0).await;
|
||||
assert_eq!(update["status"], "processed");
|
||||
let update = index.wait_task(0).await;
|
||||
assert_eq!(update["status"], "succeeded");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -27,10 +32,10 @@ async fn delete_one_document() {
|
||||
index
|
||||
.add_documents(json!([{ "id": 0, "content": "foobar" }]), None)
|
||||
.await;
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
let (_response, code) = server.index("test").delete_document(0).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (_response, code) = index.get_document(0, None).await;
|
||||
assert_eq!(code, 404);
|
||||
@@ -39,8 +44,13 @@ async fn delete_one_document() {
|
||||
#[actix_rt::test]
|
||||
async fn clear_all_documents_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (_response, code) = server.index("test").clear_all_documents().await;
|
||||
assert_eq!(code, 404);
|
||||
let index = server.index("test");
|
||||
let (_response, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(0).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -53,11 +63,11 @@ async fn clear_all_documents() {
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
let (_response, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let _update = index.wait_update_id(1).await;
|
||||
let _update = index.wait_task(1).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
@@ -74,7 +84,7 @@ async fn clear_all_documents_empty_index() {
|
||||
let (_response, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let _update = index.wait_update_id(0).await;
|
||||
let _update = index.wait_task(0).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
@@ -85,15 +95,20 @@ async fn clear_all_documents_empty_index() {
|
||||
#[actix_rt::test]
|
||||
async fn error_delete_batch_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").delete_batch(vec![]).await;
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.delete_batch(vec![]).await;
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
assert_eq!(code, 404);
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(0).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
assert_eq!(response["error"], expected_response);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -101,11 +116,11 @@ async fn delete_batch() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
let (_response, code) = index.delete_batch(vec![1, 0]).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let _update = index.wait_update_id(1).await;
|
||||
let _update = index.wait_task(1).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
@@ -119,11 +134,11 @@ async fn delete_no_document_batch() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
let (_response, code) = index.delete_batch(vec![]).await;
|
||||
assert_eq!(code, 202, "{}", _response);
|
||||
|
||||
let _update = index.wait_update_id(1).await;
|
||||
let _update = index.wait_task(1).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
|
||||
@@ -17,6 +17,7 @@ async fn error_get_unexisting_document() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
let (response, code) = index.get_document(1, None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
@@ -43,7 +44,7 @@ async fn get_document() {
|
||||
]);
|
||||
let (_, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
let (response, code) = index.get_document(0, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
@@ -75,11 +76,13 @@ async fn error_get_unexisting_index_all_documents() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_no_documents() {
|
||||
async fn get_no_document() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(None).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
|
||||
22
meilisearch-http/tests/dumps.rs
Normal file
22
meilisearch-http/tests/dumps.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
#![allow(dead_code)]
|
||||
mod common;
|
||||
|
||||
use crate::common::Server;
|
||||
use serde_json::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_unexisting_dump_status() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server.get_dump_status("foobar").await;
|
||||
assert_eq!(code, 404);
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Dump `foobar` not found.",
|
||||
"code": "dump_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#dump_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
}
|
||||
@@ -7,14 +7,15 @@ async fn create_index_no_primary_key() {
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert_eq!(response["name"], "test");
|
||||
assert!(response.get("createdAt").is_some());
|
||||
assert!(response.get("updatedAt").is_some());
|
||||
assert_eq!(response["createdAt"], response["updatedAt"]);
|
||||
assert_eq!(response["primaryKey"], Value::Null);
|
||||
assert_eq!(response.as_object().unwrap().len(), 5);
|
||||
assert_eq!(code, 202);
|
||||
|
||||
assert_eq!(response["status"], "enqueued");
|
||||
|
||||
let response = index.wait_task(0).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "indexCreation");
|
||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -23,14 +24,15 @@ async fn create_index_with_primary_key() {
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(Some("primary")).await;
|
||||
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert_eq!(response["name"], "test");
|
||||
assert!(response.get("createdAt").is_some());
|
||||
assert!(response.get("updatedAt").is_some());
|
||||
//assert_eq!(response["createdAt"], response["updatedAt"]);
|
||||
assert_eq!(response["primaryKey"], "primary");
|
||||
assert_eq!(response.as_object().unwrap().len(), 5);
|
||||
assert_eq!(code, 202);
|
||||
|
||||
assert_eq!(response["status"], "enqueued");
|
||||
|
||||
let response = index.wait_task(0).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "indexCreation");
|
||||
assert_eq!(response["details"]["primaryKey"], "primary");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -42,7 +44,7 @@ async fn create_index_with_invalid_primary_key() {
|
||||
let (_response, code) = index.add_documents(document, Some("title")).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
@@ -61,6 +63,10 @@ async fn test_create_multiple_indexes() {
|
||||
index2.create(None).await;
|
||||
index3.create(None).await;
|
||||
|
||||
index1.wait_task(0).await;
|
||||
index1.wait_task(1).await;
|
||||
index1.wait_task(2).await;
|
||||
|
||||
assert_eq!(index1.get().await.1, 200);
|
||||
assert_eq!(index2.get().await.1, 200);
|
||||
assert_eq!(index3.get().await.1, 200);
|
||||
@@ -73,9 +79,11 @@ async fn error_create_existing_index() {
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("primary")).await;
|
||||
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let (response, code) = index.create(Some("primary")).await;
|
||||
index.create(Some("primary")).await;
|
||||
|
||||
let response = index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` already exists.",
|
||||
@@ -84,8 +92,7 @@ async fn error_create_existing_index() {
|
||||
"link":"https://docs.meilisearch.com/errors#index_already_exists"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 409);
|
||||
assert_eq!(response["error"], expected_response);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -8,11 +8,17 @@ async fn create_and_delete_index() {
|
||||
let index = server.index("test");
|
||||
let (_response, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(0).await;
|
||||
|
||||
assert_eq!(index.get().await.1, 200);
|
||||
|
||||
let (_response, code) = index.delete().await;
|
||||
|
||||
assert_eq!(code, 204);
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(1).await;
|
||||
|
||||
assert_eq!(index.get().await.1, 404);
|
||||
}
|
||||
@@ -21,7 +27,9 @@ async fn create_and_delete_index() {
|
||||
async fn error_delete_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.delete().await;
|
||||
let (_, code) = index.delete().await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
@@ -30,19 +38,29 @@ async fn error_delete_unexisting_index() {
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
let response = index.wait_task(0).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
assert_eq!(response["error"], expected_response);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn loop_delete_add_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let documents = json!([{"id": 1, "field1": "hello"}]);
|
||||
let mut tasks = Vec::new();
|
||||
for _ in 0..50 {
|
||||
let (response, code) = index.add_documents(documents.clone(), None).await;
|
||||
tasks.push(response["uid"].as_u64().unwrap());
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
let (response, code) = index.delete().await;
|
||||
assert_eq!(code, 204, "{}", response);
|
||||
tasks.push(response["uid"].as_u64().unwrap());
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
}
|
||||
|
||||
for task in tasks {
|
||||
let response = index.wait_task(task).await;
|
||||
assert_eq!(response["status"], "succeeded", "{}", response);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,9 @@ async fn create_and_get_index() {
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
|
||||
@@ -55,6 +57,8 @@ async fn list_multiple_indexes() {
|
||||
server.index("test").create(None).await;
|
||||
server.index("test1").create(Some("key")).await;
|
||||
|
||||
server.index("test").wait_task(1).await;
|
||||
|
||||
let (response, code) = server.list_indexes().await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.is_array());
|
||||
@@ -67,3 +71,22 @@ async fn list_multiple_indexes() {
|
||||
.iter()
|
||||
.any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_invalid_index_uid() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("this is not a valid index name");
|
||||
let (response, code) = index.get().await;
|
||||
|
||||
assert_eq!(code, 404);
|
||||
assert_eq!(
|
||||
response,
|
||||
json!(
|
||||
{
|
||||
"message": "Index `this is not a valid index name` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
@@ -8,7 +8,9 @@ async fn stats() {
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = index.stats().await;
|
||||
|
||||
@@ -33,9 +35,9 @@ async fn stats() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["updateId"], 0);
|
||||
assert_eq!(response["uid"], 1);
|
||||
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index.stats().await;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::common::Server;
|
||||
use chrono::DateTime;
|
||||
use serde_json::json;
|
||||
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_primary_key() {
|
||||
@@ -8,18 +8,27 @@ async fn update_primary_key() {
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let (response, code) = index.update(Some("primary")).await;
|
||||
index.update(Some("primary")).await;
|
||||
|
||||
let response = index.wait_task(1).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert_eq!(response["name"], "test");
|
||||
assert!(response.get("createdAt").is_some());
|
||||
assert!(response.get("updatedAt").is_some());
|
||||
|
||||
let created_at = DateTime::parse_from_rfc3339(response["createdAt"].as_str().unwrap()).unwrap();
|
||||
let updated_at = DateTime::parse_from_rfc3339(response["updatedAt"].as_str().unwrap()).unwrap();
|
||||
let created_at =
|
||||
OffsetDateTime::parse(response["createdAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||
let updated_at =
|
||||
OffsetDateTime::parse(response["updatedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||
assert!(created_at < updated_at);
|
||||
|
||||
assert_eq!(response["primaryKey"], "primary");
|
||||
@@ -30,14 +39,19 @@ async fn update_primary_key() {
|
||||
async fn update_nothing() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(None).await;
|
||||
let (_, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let (update, code) = index.update(None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response, update);
|
||||
let (_, code) = index.update(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(1).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -46,7 +60,7 @@ async fn error_update_existing_primary_key() {
|
||||
let index = server.index("test");
|
||||
let (_response, code) = index.create(Some("id")).await;
|
||||
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@@ -55,9 +69,12 @@ async fn error_update_existing_primary_key() {
|
||||
}
|
||||
]);
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(0).await;
|
||||
|
||||
let (response, code) = index.update(Some("primary")).await;
|
||||
let (_, code) = index.update(Some("primary")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(2).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index already has a primary key: `id`.",
|
||||
@@ -66,14 +83,17 @@ async fn error_update_existing_primary_key() {
|
||||
"link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
assert_eq!(response["error"], expected_response);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_update_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").update(None).await;
|
||||
let (_, code) = server.index("test").update(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = server.index("test").wait_task(0).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
@@ -82,6 +102,5 @@ async fn error_update_unexisting_index() {
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
assert_eq!(response["error"], expected_response);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
mod auth;
|
||||
mod common;
|
||||
mod dashboard;
|
||||
mod documents;
|
||||
@@ -6,7 +7,7 @@ mod search;
|
||||
mod settings;
|
||||
mod snapshot;
|
||||
mod stats;
|
||||
mod updates;
|
||||
mod tasks;
|
||||
|
||||
// Tests are isolated by features in different modules to allow better readability, test
|
||||
// targetability, and improved incremental compilation times.
|
||||
|
||||
@@ -47,10 +47,10 @@ async fn filter_invalid_syntax_object() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Invalid syntax for the filter parameter: ` --> 1:7\n |\n1 | title & Glass\n | ^---\n |\n = expected word`.",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO` or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
@@ -74,10 +74,10 @@ async fn filter_invalid_syntax_array() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Invalid syntax for the filter parameter: ` --> 1:7\n |\n1 | title & Glass\n | ^---\n |\n = expected word`.",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO` or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
@@ -101,10 +101,10 @@ async fn filter_invalid_syntax_string() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Invalid syntax for the filter parameter: ` --> 1:15\n |\n1 | title = Glass XOR title = Glass\n | ^---\n |\n = expected EOI, and, or or`.",
|
||||
"message": "Found unexpected characters at the end of the filter: `XOR title = Glass`. You probably forgot an `OR` or an `AND` rule.\n15:32 title = Glass XOR title = Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
@@ -131,10 +131,10 @@ async fn filter_invalid_attribute_array() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.",
|
||||
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
@@ -158,10 +158,10 @@ async fn filter_invalid_attribute_string() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.",
|
||||
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
@@ -185,10 +185,10 @@ async fn filter_reserved_geo_attribute_array() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.",
|
||||
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
@@ -212,10 +212,10 @@ async fn filter_reserved_geo_attribute_string() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.",
|
||||
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
@@ -239,10 +239,10 @@ async fn filter_reserved_attribute_array() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.",
|
||||
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.\n1:13 _geoDistance = Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
@@ -269,10 +269,10 @@ async fn filter_reserved_attribute_string() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.",
|
||||
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.\n1:13 _geoDistance = Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
@@ -299,7 +299,7 @@ async fn sort_geo_reserved_attribute() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geo` is a reserved keyword and thus can't be used as a sort expression. Use the _geoPoint(latitude, longitude) built-in rule to sort on _geo field coordinates.",
|
||||
@@ -331,7 +331,7 @@ async fn sort_reserved_attribute() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a sort expression.",
|
||||
@@ -363,7 +363,7 @@ async fn sort_unsortable_attribute() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Attribute `title` is not sortable. Available sortable attributes are: `id`.",
|
||||
@@ -395,7 +395,7 @@ async fn sort_invalid_syntax() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Invalid syntax for the sort parameter: expected expression ending by `:asc` or `:desc`, found `title`.",
|
||||
@@ -429,7 +429,7 @@ async fn sort_unset_ranking_rule() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "The sort ranking rule must be specified in the ranking rules settings to use the sort parameter at search time.",
|
||||
|
||||
@@ -37,7 +37,7 @@ async fn simple_placeholder_search() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(json!({}), |response, code| {
|
||||
@@ -54,7 +54,7 @@ async fn simple_search() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "glass"}), |response, code| {
|
||||
@@ -71,7 +71,7 @@ async fn search_multiple_params() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -101,7 +101,7 @@ async fn search_with_filter_string_notation() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -127,7 +127,7 @@ async fn search_with_filter_array_notation() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(json!({
|
||||
@@ -157,7 +157,7 @@ async fn search_with_sort_on_numbers() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -183,7 +183,7 @@ async fn search_with_sort_on_strings() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -209,7 +209,7 @@ async fn search_with_multiple_sort() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(json!({
|
||||
@@ -231,7 +231,7 @@ async fn search_facet_distribution() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -259,7 +259,7 @@ async fn displayed_attributes() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(json!({ "attributesToRetrieve": ["title", "id"] }))
|
||||
|
||||
@@ -9,7 +9,7 @@ async fn set_and_reset_distinct_attribute() {
|
||||
let (_response, _code) = index
|
||||
.update_settings(json!({ "distinctAttribute": "test"}))
|
||||
.await;
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, _) = index.settings().await;
|
||||
|
||||
@@ -19,7 +19,7 @@ async fn set_and_reset_distinct_attribute() {
|
||||
.update_settings(json!({ "distinctAttribute": null }))
|
||||
.await;
|
||||
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, _) = index.settings().await;
|
||||
|
||||
@@ -32,7 +32,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
let index = server.index("test");
|
||||
|
||||
let (_response, _code) = index.update_distinct_attribute(json!("test")).await;
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, _) = index.get_distinct_attribute().await;
|
||||
|
||||
@@ -40,7 +40,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
|
||||
index.update_distinct_attribute(json!(null)).await;
|
||||
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, _) = index.get_distinct_attribute().await;
|
||||
|
||||
|
||||
@@ -39,6 +39,7 @@ async fn get_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
let settings = response.as_object().unwrap();
|
||||
@@ -77,7 +78,7 @@ async fn test_partial_update() {
|
||||
let (_response, _code) = index
|
||||
.update_settings(json!({"displayedAttributes": ["foo"]}))
|
||||
.await;
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["displayedAttributes"], json!(["foo"]));
|
||||
@@ -86,7 +87,7 @@ async fn test_partial_update() {
|
||||
let (_response, _) = index
|
||||
.update_settings(json!({"searchableAttributes": ["bar"]}))
|
||||
.await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
@@ -98,17 +99,12 @@ async fn test_partial_update() {
|
||||
async fn error_delete_settings_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.delete_settings().await;
|
||||
let (_response, code) = index.delete_settings().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
let response = index.wait_task(0).await;
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
assert_eq!(response["status"], "failed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -126,13 +122,13 @@ async fn reset_all_settings() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["updateId"], 0);
|
||||
index.wait_update_id(0).await;
|
||||
assert_eq!(response["uid"], 0);
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.update_settings(json!({"displayedAttributes": ["name", "age"], "searchableAttributes": ["name"], "stopWords": ["the"], "filterableAttributes": ["age"], "synonyms": {"puppy": ["dog", "doggo", "potat"] }}))
|
||||
.await;
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["displayedAttributes"], json!(["name", "age"]));
|
||||
@@ -145,7 +141,7 @@ async fn reset_all_settings() {
|
||||
assert_eq!(response["filterableAttributes"], json!(["age"]));
|
||||
|
||||
index.delete_settings().await;
|
||||
index.wait_update_id(2).await;
|
||||
index.wait_task(2).await;
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
@@ -166,10 +162,13 @@ async fn update_setting_unexisting_index() {
|
||||
let index = server.index("test");
|
||||
let (_response, code) = index.update_settings(json!({})).await;
|
||||
assert_eq!(code, 202);
|
||||
let response = index.wait_task(0).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
let (_response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
let (_response, code) = index.delete_settings().await;
|
||||
assert_eq!(code, 202);
|
||||
index.delete_settings().await;
|
||||
let response = index.wait_task(1).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -177,16 +176,15 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test##! ");
|
||||
let (response, code) = index.update_settings(json!({})).await;
|
||||
assert_eq!(code, 400);
|
||||
|
||||
let expected_response = json!({
|
||||
let expected = json!({
|
||||
"message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
});
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
assert_eq!(response, expected);
|
||||
}
|
||||
|
||||
macro_rules! test_setting_routes {
|
||||
@@ -218,6 +216,7 @@ macro_rules! test_setting_routes {
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.post(url, serde_json::Value::Null).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
server.index("").wait_task(0).await;
|
||||
let (response, code) = server.index("test").get().await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
}
|
||||
@@ -230,8 +229,10 @@ macro_rules! test_setting_routes {
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.delete(url).await;
|
||||
assert_eq!(code, 404, "{}", response);
|
||||
let (_, code) = server.service.delete(url).await;
|
||||
assert_eq!(code, 202);
|
||||
let response = server.index("").wait_task(0).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -239,7 +240,8 @@ macro_rules! test_setting_routes {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(None).await;
|
||||
assert_eq!(code, 201, "{}", response);
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
index.wait_task(0).await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
@@ -274,8 +276,8 @@ async fn error_set_invalid_ranking_rules() {
|
||||
let (_response, _code) = index
|
||||
.update_settings(json!({ "rankingRules": [ "manyTheFish"]}))
|
||||
.await;
|
||||
index.wait_update_id(0).await;
|
||||
let (response, code) = index.get_update(0).await;
|
||||
index.wait_task(1).await;
|
||||
let (response, code) = index.get_task(1).await;
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "failed");
|
||||
@@ -296,7 +298,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
let index = server.index("test");
|
||||
|
||||
let (_response, _code) = index.update_distinct_attribute(json!("test")).await;
|
||||
index.wait_update_id(0).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, _) = index.get_distinct_attribute().await;
|
||||
|
||||
@@ -304,7 +306,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
|
||||
index.update_distinct_attribute(json!(null)).await;
|
||||
|
||||
index.wait_update_id(1).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, _) = index.get_distinct_attribute().await;
|
||||
|
||||
|
||||
@@ -7,6 +7,28 @@ use tokio::time::sleep;
|
||||
|
||||
use meilisearch_http::Opt;
|
||||
|
||||
macro_rules! verify_snapshot {
|
||||
(
|
||||
$orig:expr,
|
||||
$snapshot: expr,
|
||||
|$server:ident| =>
|
||||
$($e:expr,)+) => {
|
||||
use std::sync::Arc;
|
||||
let snapshot = Arc::new($snapshot);
|
||||
let orig = Arc::new($orig);
|
||||
$(
|
||||
{
|
||||
let test= |$server: Arc<Server>| async move {
|
||||
$e.await
|
||||
};
|
||||
let (snapshot, _) = test(snapshot.clone()).await;
|
||||
let (orig, _) = test(orig.clone()).await;
|
||||
assert_eq!(snapshot, orig);
|
||||
}
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn perform_snapshot() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
@@ -20,33 +42,42 @@ async fn perform_snapshot() {
|
||||
};
|
||||
|
||||
let server = Server::new_with_options(options).await;
|
||||
|
||||
let index = server.index("test");
|
||||
index
|
||||
.update_settings(serde_json::json! ({
|
||||
"searchableAttributes": [],
|
||||
}))
|
||||
.await;
|
||||
|
||||
index.load_test_set().await;
|
||||
|
||||
let (response, _) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
server.index("test1").create(Some("prim")).await;
|
||||
|
||||
index.wait_task(2).await;
|
||||
|
||||
sleep(Duration::from_secs(2)).await;
|
||||
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let snapshot_path = snapshot_dir
|
||||
.path()
|
||||
.to_owned()
|
||||
.join("db.snapshot".to_string());
|
||||
let snapshot_path = snapshot_dir.path().to_owned().join("db.snapshot");
|
||||
|
||||
let options = Opt {
|
||||
import_snapshot: Some(snapshot_path),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
|
||||
let server = Server::new_with_options(options).await;
|
||||
let index = server.index("test");
|
||||
let snapshot_server = Server::new_with_options(options).await;
|
||||
|
||||
let (response_from_snapshot, _) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
|
||||
assert_eq!(response, response_from_snapshot);
|
||||
verify_snapshot!(server, snapshot_server, |server| =>
|
||||
server.list_indexes(),
|
||||
// for some reason the db sizes differ. this may be due to the compaction options we have
|
||||
// set when performing the snapshot
|
||||
//server.stats(),
|
||||
server.tasks(),
|
||||
server.index("test").get_all_documents(GetAllDocumentsOptions::default()),
|
||||
server.index("test").settings(),
|
||||
server.index("test1").get_all_documents(GetAllDocumentsOptions::default()),
|
||||
server.index("test1").settings(),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use serde_json::json;
|
||||
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||
|
||||
use crate::common::Server;
|
||||
|
||||
@@ -28,7 +29,8 @@ async fn stats() {
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = server.stats().await;
|
||||
|
||||
@@ -52,16 +54,19 @@ async fn stats() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
assert_eq!(response["updateId"], 0);
|
||||
assert_eq!(response["uid"], 1);
|
||||
|
||||
let response = index.wait_update_id(0).await;
|
||||
println!("response: {}", response);
|
||||
index.wait_task(1).await;
|
||||
|
||||
let timestamp = OffsetDateTime::now_utc();
|
||||
let (response, code) = server.stats().await;
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["databaseSize"].as_u64().unwrap() > 0);
|
||||
assert!(response.get("lastUpdate").is_some());
|
||||
let last_update =
|
||||
OffsetDateTime::parse(response["lastUpdate"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||
assert!(last_update - timestamp < time::Duration::SECOND);
|
||||
|
||||
assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 2);
|
||||
assert!(response["indexes"]["test"]["isIndexing"] == false);
|
||||
assert_eq!(response["indexes"]["test"]["fieldDistribution"]["id"], 2);
|
||||
|
||||
135
meilisearch-http/tests/tasks/mod.rs
Normal file
135
meilisearch-http/tests/tasks/mod.rs
Normal file
@@ -0,0 +1,135 @@
|
||||
use crate::common::Server;
|
||||
use serde_json::json;
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_task_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.service.get("/indexes/test/tasks").await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_unexisting_task_status() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
let (response, code) = index.get_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Task `1` not found.",
|
||||
"code": "task_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#task_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_task_status() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::json!([{
|
||||
"id": 1,
|
||||
"content": "foobar",
|
||||
}]),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
index.wait_task(0).await;
|
||||
let (_response, code) = index.get_task(1).await;
|
||||
assert_eq!(code, 200);
|
||||
// TODO check resonse format, as per #48
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_list_tasks_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").list_tasks().await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
let (response, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
macro_rules! assert_valid_summarized_task {
|
||||
($response:expr, $task_type:literal, $index:literal) => {{
|
||||
assert_eq!($response.as_object().unwrap().len(), 5);
|
||||
assert!($response["uid"].as_u64().is_some());
|
||||
assert_eq!($response["indexUid"], $index);
|
||||
assert_eq!($response["status"], "enqueued");
|
||||
assert_eq!($response["type"], $task_type);
|
||||
let date = $response["enqueuedAt"].as_str().expect("missing date");
|
||||
|
||||
OffsetDateTime::parse(date, &Rfc3339).unwrap();
|
||||
}};
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_task_view() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let (response, _) = index.create(None).await;
|
||||
assert_valid_summarized_task!(response, "indexCreation", "test");
|
||||
|
||||
let (response, _) = index.update(None).await;
|
||||
assert_valid_summarized_task!(response, "indexUpdate", "test");
|
||||
|
||||
let (response, _) = index.update_settings(json!({})).await;
|
||||
assert_valid_summarized_task!(response, "settingsUpdate", "test");
|
||||
|
||||
let (response, _) = index.update_documents(json!([{"id": 1}]), None).await;
|
||||
assert_valid_summarized_task!(response, "documentPartial", "test");
|
||||
|
||||
let (response, _) = index.add_documents(json!([{"id": 1}]), None).await;
|
||||
assert_valid_summarized_task!(response, "documentAddition", "test");
|
||||
|
||||
let (response, _) = index.delete_document(1).await;
|
||||
assert_valid_summarized_task!(response, "documentDeletion", "test");
|
||||
|
||||
let (response, _) = index.clear_all_documents().await;
|
||||
assert_valid_summarized_task!(response, "clearAll", "test");
|
||||
|
||||
let (response, _) = index.delete().await;
|
||||
assert_valid_summarized_task!(response, "indexDeletion", "test");
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
use crate::common::Server;
|
||||
use serde_json::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_update_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").get_update(0).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_unexisting_update_status() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
let (response, code) = index.get_update(0).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Task `0` not found.",
|
||||
"code": "task_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#task_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_update_status() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::json!([{
|
||||
"id": 1,
|
||||
"content": "foobar",
|
||||
}]),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
let (_response, code) = index.get_update(0).await;
|
||||
assert_eq!(code, 200);
|
||||
// TODO check resonse format, as per #48
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_list_updates_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").list_updates().await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_no_updates() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
let (response, code) = index.list_updates().await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_updates() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
let (response, code) = index.list_updates().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 1);
|
||||
}
|
||||
@@ -1,20 +1,17 @@
|
||||
[package]
|
||||
name = "meilisearch-lib"
|
||||
version = "0.24.0"
|
||||
edition = "2018"
|
||||
version = "0.26.1"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.0.0-beta.9", features = ["rustls"] }
|
||||
actix-web-static-files = { git = "https://github.com/MarinPostma/actix-web-static-files.git", rev = "39d8006", optional = true }
|
||||
actix-web = { version = "4", default-features = false }
|
||||
anyhow = { version = "1.0.43", features = ["backtrace"] }
|
||||
async-stream = "0.3.2"
|
||||
async-trait = "0.1.51"
|
||||
arc-swap = "1.3.2"
|
||||
byte-unit = { version = "4.0.12", default-features = false, features = ["std"] }
|
||||
bytes = "1.1.0"
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
csv = "1.1.6"
|
||||
crossbeam-channel = "0.5.1"
|
||||
either = "1.6.1"
|
||||
@@ -29,8 +26,8 @@ itertools = "0.10.1"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.14"
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
meilisearch-tokenizer = { git = "https://github.com/meilisearch/tokenizer.git", tag = "v0.2.5" }
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.20.2" }
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.22.2" }
|
||||
mime = "0.3.16"
|
||||
num_cpus = "1.13.0"
|
||||
once_cell = "1.8.0"
|
||||
@@ -43,10 +40,11 @@ serde = { version = "1.0.130", features = ["derive"] }
|
||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
||||
siphasher = "0.3.7"
|
||||
slice-group-by = "0.2.6"
|
||||
structopt = "0.3.23"
|
||||
clap = { version = "3.0", features = ["derive", "env"] }
|
||||
tar = "0.4.37"
|
||||
tempfile = "3.2.0"
|
||||
thiserror = "1.0.28"
|
||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
tokio = { version = "1.11.0", features = ["full"] }
|
||||
uuid = { version = "0.8.2", features = ["serde"] }
|
||||
walkdir = "2.3.2"
|
||||
@@ -56,8 +54,14 @@ whoami = { version = "1.1.3", optional = true }
|
||||
reqwest = { version = "0.11.4", features = ["json", "rustls-tls"], default-features = false, optional = true }
|
||||
sysinfo = "0.20.2"
|
||||
derivative = "2.2.0"
|
||||
fs_extra = "1.2.0"
|
||||
atomic_refcell = "0.1.8"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2.0"
|
||||
mockall = "0.10.2"
|
||||
paste = "1.0.5"
|
||||
nelson = { git = "https://github.com/MarinPostma/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"}
|
||||
meilisearch-error = { path = "../meilisearch-error", features = ["test-traits"] }
|
||||
proptest = "1.0.0"
|
||||
proptest-derive = "0.3.0"
|
||||
|
||||
19
meilisearch-lib/proptest-regressions/index_resolver/mod.txt
Normal file
19
meilisearch-lib/proptest-regressions/index_resolver/mod.txt
Normal file
@@ -0,0 +1,19 @@
|
||||
# Seeds for failure cases proptest has generated in the past. It is
|
||||
# automatically read and these particular cases re-run before any
|
||||
# novel cases are generated.
|
||||
#
|
||||
# It is recommended to check this file in to source control so that
|
||||
# everyone who runs the test benefits from these saved cases.
|
||||
cc 6f3ae3cba934ba3e328e2306218c32f27a46ce2d54a1258b05fef65663208662 # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: DocumentAddition { content_uuid: 37bc137d-2038-47f0-819f-b133233daadc, merge_strategy: ReplaceDocuments, primary_key: None, documents_count: 0 }, events: [] }
|
||||
cc b726f7d9f44a9216aad302ddba0f04e7108817e741d656a4759aea8562de4d63 # shrinks to task = Task { id: 0, index_uid: IndexUid("_"), content: IndexDeletion, events: [] }, index_exists = false, index_op_fails = false, any_int = 0
|
||||
cc 427ec2dde3260b1ab334207bdc22adef28a5b8532b9902c84b55fd2c017ea7e1 # shrinks to task = Task { id: 0, index_uid: IndexUid("A"), content: IndexDeletion, events: [] }, index_exists = true, index_op_fails = false, any_int = 0
|
||||
cc c24f3d42f0f36fbdbf4e9d4327e75529b163ac580d63a5934ca05e9b5bd23a65 # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: IndexDeletion, events: [] }, index_exists = true, index_op_fails = true, any_int = 0
|
||||
cc 8084e2410801b997533b0bcbad75cd212873cfc2677f26847f68c568ead1604c # shrinks to task = Task { id: 0, index_uid: IndexUid("A"), content: SettingsUpdate { settings: Settings { displayed_attributes: NotSet, searchable_attributes: NotSet, filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, synonyms: NotSet, distinct_attribute: NotSet, _kind: PhantomData }, is_deletion: false }, events: [] }, index_exists = false, index_op_fails = false, any_int = 0
|
||||
cc 330085e0200a9a2ddfdd764a03d768aa95c431bcaafbd530c8c949425beed18b # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: CreateIndex { primary_key: None }, events: [] }, index_exists = false, index_op_fails = true, any_int = 0
|
||||
cc c70e901576ef2fb9622e814bdecd11e4747cd70d71a9a6ce771b5b7256a187c0 # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: SettingsUpdate { settings: Settings { displayed_attributes: NotSet, searchable_attributes: NotSet, filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, synonyms: NotSet, distinct_attribute: NotSet, _kind: PhantomData }, is_deletion: true }, events: [] }, index_exists = false, index_op_fails = false, any_int = 0
|
||||
cc 3fe2c38cbc2cca34ecde321472141d386056f0cd332cbf700773657715a382b5 # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: UpdateIndex { primary_key: None }, events: [] }, index_exists = false, index_op_fails = false, any_int = 0
|
||||
cc c31cf86692968483f1ab08a6a9d4667ccb9635c306998551bf1eb1f135ef0d4b # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: UpdateIndex { primary_key: Some("") }, events: [] }, index_exists = true, index_op_fails = false, any_int = 0
|
||||
cc 3a01c78db082434b8a4f8914abf0d1059d39f4426d16df20d72e1bd7ebb94a6a # shrinks to task = Task { id: 0, index_uid: IndexUid("0"), content: UpdateIndex { primary_key: None }, events: [] }, index_exists = true, index_op_fails = true, any_int = 0
|
||||
cc c450806df3921d1e6fe9b6af93d999e8196d0175b69b64f1810802582421e94a # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: CreateIndex { primary_key: Some("") }, events: [] }, index_exists = false, index_op_fails = false, any_int = 0
|
||||
cc fb6b98947cbdbdee05ed3c0bf2923aad2c311edc276253642eb43a0c0ec4888a # shrinks to task = Task { id: 0, index_uid: IndexUid("A"), content: CreateIndex { primary_key: Some("") }, events: [] }, index_exists = false, index_op_fails = true, any_int = 0
|
||||
cc 1aa59d8e22484e9915efbb5818e1e1ab684aa61b166dc82130d6221663ba00bf # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: DocumentDeletion(Clear), events: [] }, index_exists = true, index_op_fails = false, any_int = 0
|
||||
@@ -0,0 +1,7 @@
|
||||
# Seeds for failure cases proptest has generated in the past. It is
|
||||
# automatically read and these particular cases re-run before any
|
||||
# novel cases are generated.
|
||||
#
|
||||
# It is recommended to check this file in to source control so that
|
||||
# everyone who runs the test benefits from these saved cases.
|
||||
cc 8cbd6c45ce8c5611ec3f2f94fd485f6a8eeccc470fa426e59bdfd4d9e7fce0e1 # shrinks to bytes = []
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::fmt;
|
||||
use std::io::{self, BufRead, BufReader, BufWriter, Cursor, Read, Seek, Write};
|
||||
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
use meilisearch_error::{internal_error, Code, ErrorCode};
|
||||
use milli::documents::DocumentBatchBuilder;
|
||||
|
||||
type Result<T> = std::result::Result<T, DocumentFormatError>;
|
||||
@@ -32,8 +32,6 @@ pub enum DocumentFormatError {
|
||||
Box<dyn std::error::Error + Send + Sync + 'static>,
|
||||
PayloadType,
|
||||
),
|
||||
#[error("The `{0}` payload must contain at least one document.")]
|
||||
EmptyPayload(PayloadType),
|
||||
}
|
||||
|
||||
impl From<(PayloadType, milli::documents::Error)> for DocumentFormatError {
|
||||
@@ -50,7 +48,6 @@ impl ErrorCode for DocumentFormatError {
|
||||
match self {
|
||||
DocumentFormatError::Internal(_) => Code::Internal,
|
||||
DocumentFormatError::MalformedPayload(_, _) => Code::MalformedPayload,
|
||||
DocumentFormatError::EmptyPayload(_) => Code::MalformedPayload,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -58,22 +55,18 @@ impl ErrorCode for DocumentFormatError {
|
||||
internal_error!(DocumentFormatError: io::Error);
|
||||
|
||||
/// reads csv from input and write an obkv batch to writer.
|
||||
pub fn read_csv(input: impl Read, writer: impl Write + Seek) -> Result<()> {
|
||||
pub fn read_csv(input: impl Read, writer: impl Write + Seek) -> Result<usize> {
|
||||
let writer = BufWriter::new(writer);
|
||||
let builder =
|
||||
DocumentBatchBuilder::from_csv(input, writer).map_err(|e| (PayloadType::Csv, e))?;
|
||||
|
||||
if builder.len() == 0 {
|
||||
return Err(DocumentFormatError::EmptyPayload(PayloadType::Csv));
|
||||
}
|
||||
let count = builder.finish().map_err(|e| (PayloadType::Csv, e))?;
|
||||
|
||||
builder.finish().map_err(|e| (PayloadType::Csv, e))?;
|
||||
|
||||
Ok(())
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// reads jsonl from input and write an obkv batch to writer.
|
||||
pub fn read_ndjson(input: impl Read, writer: impl Write + Seek) -> Result<()> {
|
||||
pub fn read_ndjson(input: impl Read, writer: impl Write + Seek) -> Result<usize> {
|
||||
let mut reader = BufReader::new(input);
|
||||
let writer = BufWriter::new(writer);
|
||||
|
||||
@@ -81,34 +74,31 @@ pub fn read_ndjson(input: impl Read, writer: impl Write + Seek) -> Result<()> {
|
||||
let mut buf = String::new();
|
||||
|
||||
while reader.read_line(&mut buf)? > 0 {
|
||||
// skip empty lines
|
||||
if buf == "\n" {
|
||||
buf.clear();
|
||||
continue;
|
||||
}
|
||||
builder
|
||||
.extend_from_json(Cursor::new(&buf.as_bytes()))
|
||||
.map_err(|e| (PayloadType::Ndjson, e))?;
|
||||
buf.clear();
|
||||
}
|
||||
|
||||
if builder.len() == 0 {
|
||||
return Err(DocumentFormatError::EmptyPayload(PayloadType::Ndjson));
|
||||
}
|
||||
let count = builder.finish().map_err(|e| (PayloadType::Ndjson, e))?;
|
||||
|
||||
builder.finish().map_err(|e| (PayloadType::Ndjson, e))?;
|
||||
|
||||
Ok(())
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// reads json from input and write an obkv batch to writer.
|
||||
pub fn read_json(input: impl Read, writer: impl Write + Seek) -> Result<()> {
|
||||
pub fn read_json(input: impl Read, writer: impl Write + Seek) -> Result<usize> {
|
||||
let writer = BufWriter::new(writer);
|
||||
let mut builder = DocumentBatchBuilder::new(writer).map_err(|e| (PayloadType::Json, e))?;
|
||||
builder
|
||||
.extend_from_json(input)
|
||||
.map_err(|e| (PayloadType::Json, e))?;
|
||||
|
||||
if builder.len() == 0 {
|
||||
return Err(DocumentFormatError::EmptyPayload(PayloadType::Json));
|
||||
}
|
||||
let count = builder.finish().map_err(|e| (PayloadType::Json, e))?;
|
||||
|
||||
builder.finish().map_err(|e| (PayloadType::Json, e))?;
|
||||
|
||||
Ok(())
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
@@ -4,18 +4,6 @@ use std::fmt;
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
use milli::UserError;
|
||||
|
||||
macro_rules! internal_error {
|
||||
($target:ty : $($other:path), *) => {
|
||||
$(
|
||||
impl From<$other> for $target {
|
||||
fn from(other: $other) -> Self {
|
||||
Self::Internal(Box::new(other))
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MilliError<'a>(pub &'a milli::Error);
|
||||
|
||||
|
||||
@@ -6,10 +6,10 @@ use anyhow::Context;
|
||||
use heed::{EnvOpenOptions, RoTxn};
|
||||
use indexmap::IndexMap;
|
||||
use milli::documents::DocumentBatchReader;
|
||||
use milli::update::{IndexDocumentsConfig, IndexerConfig};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::document_formats::read_ndjson;
|
||||
use crate::index::update_handler::UpdateHandler;
|
||||
use crate::index::updates::apply_settings_to_builder;
|
||||
|
||||
use super::error::Result;
|
||||
@@ -28,9 +28,7 @@ impl Index {
|
||||
pub fn dump(&self, path: impl AsRef<Path>) -> Result<()> {
|
||||
// acquire write txn make sure any ongoing write is finished before we start.
|
||||
let txn = self.env.write_txn()?;
|
||||
let path = path
|
||||
.as_ref()
|
||||
.join(format!("indexes/{}", self.uuid.to_string()));
|
||||
let path = path.as_ref().join(format!("indexes/{}", self.uuid));
|
||||
|
||||
create_dir_all(&path)?;
|
||||
|
||||
@@ -87,7 +85,7 @@ impl Index {
|
||||
src: impl AsRef<Path>,
|
||||
dst: impl AsRef<Path>,
|
||||
size: usize,
|
||||
update_handler: &UpdateHandler,
|
||||
indexer_config: &IndexerConfig,
|
||||
) -> anyhow::Result<()> {
|
||||
let dir_name = src
|
||||
.as_ref()
|
||||
@@ -112,8 +110,7 @@ impl Index {
|
||||
let mut txn = index.write_txn()?;
|
||||
|
||||
// Apply settings first
|
||||
let builder = update_handler.update_builder(0);
|
||||
let mut builder = builder.settings(&mut txn, &index);
|
||||
let mut builder = milli::update::Settings::new(&mut txn, &index, indexer_config);
|
||||
|
||||
if let Some(primary_key) = primary_key {
|
||||
builder.set_primary_key(primary_key);
|
||||
@@ -121,30 +118,40 @@ impl Index {
|
||||
|
||||
apply_settings_to_builder(&settings, &mut builder);
|
||||
|
||||
builder.execute(|_, _| ())?;
|
||||
builder.execute(|_| ())?;
|
||||
|
||||
let document_file_path = src.as_ref().join(DATA_FILE_NAME);
|
||||
let reader = BufReader::new(File::open(&document_file_path)?);
|
||||
|
||||
let mut tmp_doc_file = tempfile::tempfile()?;
|
||||
|
||||
read_ndjson(reader, &mut tmp_doc_file)?;
|
||||
let empty = match read_ndjson(reader, &mut tmp_doc_file) {
|
||||
// if there was no document in the file it's because the index was empty
|
||||
Ok(0) => true,
|
||||
Ok(_) => false,
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
|
||||
tmp_doc_file.seek(SeekFrom::Start(0))?;
|
||||
if !empty {
|
||||
tmp_doc_file.seek(SeekFrom::Start(0))?;
|
||||
|
||||
let documents_reader = DocumentBatchReader::from_reader(tmp_doc_file)?;
|
||||
let documents_reader = DocumentBatchReader::from_reader(tmp_doc_file)?;
|
||||
|
||||
//If the document file is empty, we don't perform the document addition, to prevent
|
||||
//a primary key error to be thrown.
|
||||
if !documents_reader.is_empty() {
|
||||
let builder = update_handler
|
||||
.update_builder(0)
|
||||
.index_documents(&mut txn, &index);
|
||||
builder.execute(documents_reader, |_, _| ())?;
|
||||
//If the document file is empty, we don't perform the document addition, to prevent
|
||||
//a primary key error to be thrown.
|
||||
let config = IndexDocumentsConfig::default();
|
||||
let mut builder = milli::update::IndexDocuments::new(
|
||||
&mut txn,
|
||||
&index,
|
||||
indexer_config,
|
||||
config,
|
||||
|_| (),
|
||||
);
|
||||
builder.add_documents(documents_reader)?;
|
||||
builder.execute()?;
|
||||
}
|
||||
|
||||
txn.commit()?;
|
||||
|
||||
index.prepare_for_closing().wait();
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::error::Error;
|
||||
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
use meilisearch_error::{internal_error, Code, ErrorCode};
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::error::MilliError;
|
||||
use crate::{error::MilliError, update_file_store};
|
||||
|
||||
pub type Result<T> = std::result::Result<T, IndexError>;
|
||||
|
||||
@@ -23,7 +23,9 @@ internal_error!(
|
||||
IndexError: std::io::Error,
|
||||
heed::Error,
|
||||
fst::Error,
|
||||
serde_json::Error
|
||||
serde_json::Error,
|
||||
update_file_store::UpdateFileStoreError,
|
||||
milli::documents::Error
|
||||
);
|
||||
|
||||
impl ErrorCode for IndexError {
|
||||
|
||||
@@ -5,20 +5,18 @@ use std::ops::Deref;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use heed::{EnvOpenOptions, RoTxn};
|
||||
use milli::update::Setting;
|
||||
use milli::update::{IndexerConfig, Setting};
|
||||
use milli::{obkv_to_json, FieldDistribution, FieldId};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Map, Value};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::index_controller::update_file_store::UpdateFileStore;
|
||||
use crate::EnvSizer;
|
||||
|
||||
use super::error::IndexError;
|
||||
use super::error::Result;
|
||||
use super::update_handler::UpdateHandler;
|
||||
use super::{Checked, Settings};
|
||||
|
||||
pub type Document = Map<String, Value>;
|
||||
@@ -26,8 +24,10 @@ pub type Document = Map<String, Value>;
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct IndexMeta {
|
||||
created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub created_at: OffsetDateTime,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub updated_at: OffsetDateTime,
|
||||
pub primary_key: Option<String>,
|
||||
}
|
||||
|
||||
@@ -69,9 +69,7 @@ pub struct Index {
|
||||
#[derivative(Debug = "ignore")]
|
||||
pub inner: Arc<milli::Index>,
|
||||
#[derivative(Debug = "ignore")]
|
||||
pub update_file_store: Arc<UpdateFileStore>,
|
||||
#[derivative(Debug = "ignore")]
|
||||
pub update_handler: Arc<UpdateHandler>,
|
||||
pub indexer_config: Arc<IndexerConfig>,
|
||||
}
|
||||
|
||||
impl Deref for Index {
|
||||
@@ -86,24 +84,24 @@ impl Index {
|
||||
pub fn open(
|
||||
path: impl AsRef<Path>,
|
||||
size: usize,
|
||||
update_file_store: Arc<UpdateFileStore>,
|
||||
uuid: Uuid,
|
||||
update_handler: Arc<UpdateHandler>,
|
||||
update_handler: Arc<IndexerConfig>,
|
||||
) -> Result<Self> {
|
||||
log::debug!("opening index in {}", path.as_ref().display());
|
||||
create_dir_all(&path)?;
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(size);
|
||||
let inner = Arc::new(milli::Index::new(options, &path)?);
|
||||
Ok(Index {
|
||||
inner,
|
||||
update_file_store,
|
||||
uuid,
|
||||
update_handler,
|
||||
indexer_config: update_handler,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn inner(&self) -> &milli::Index {
|
||||
&self.inner
|
||||
/// Asynchronously close the underlying index
|
||||
pub fn close(self) {
|
||||
self.inner.as_ref().clone().prepare_for_closing();
|
||||
}
|
||||
|
||||
pub fn stats(&self) -> Result<IndexStats> {
|
||||
@@ -154,7 +152,7 @@ impl Index {
|
||||
Ok(stop_words.stream().into_strs()?.into_iter().collect())
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(BTreeSet::new);
|
||||
.unwrap_or_default();
|
||||
let distinct_field = self.distinct_field(txn)?.map(String::from);
|
||||
|
||||
// in milli each word in the synonyms map were split on their separator. Since we lost
|
||||
@@ -284,3 +282,17 @@ impl Index {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// When running tests, when a server instance is dropped, the environment is not actually closed,
|
||||
/// leaving a lot of open file descriptors.
|
||||
impl Drop for Index {
|
||||
fn drop(&mut self) {
|
||||
// When dropping the last instance of an index, we want to close the index
|
||||
// Note that the close is actually performed only if all the instances a effectively
|
||||
// dropped
|
||||
|
||||
if Arc::strong_count(&self.inner) == 1 {
|
||||
self.inner.as_ref().clone().prepare_for_closing();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,8 +4,7 @@ pub use updates::{apply_settings_to_builder, Checked, Facets, Settings, Unchecke
|
||||
mod dump;
|
||||
pub mod error;
|
||||
mod search;
|
||||
pub mod update_handler;
|
||||
mod updates;
|
||||
pub mod updates;
|
||||
|
||||
#[allow(clippy::module_inception)]
|
||||
mod index;
|
||||
@@ -22,234 +21,75 @@ pub use test::MockIndex as Index;
|
||||
/// code for unit testing, in places where an index would normally be used.
|
||||
#[cfg(test)]
|
||||
pub mod test {
|
||||
use std::any::Any;
|
||||
use std::collections::HashMap;
|
||||
use std::panic::{RefUnwindSafe, UnwindSafe};
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::sync::Arc;
|
||||
|
||||
use milli::update::IndexerConfig;
|
||||
use milli::update::{DocumentAdditionResult, DocumentDeletionResult, IndexDocumentsMethod};
|
||||
use nelson::Mocker;
|
||||
use serde_json::{Map, Value};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::index_controller::update_file_store::UpdateFileStore;
|
||||
use crate::index_controller::updates::status::{Failed, Processed, Processing};
|
||||
|
||||
use super::error::Result;
|
||||
use super::index::Index;
|
||||
use super::update_handler::UpdateHandler;
|
||||
use super::{Checked, IndexMeta, IndexStats, SearchQuery, SearchResult, Settings};
|
||||
use crate::update_file_store::UpdateFileStore;
|
||||
|
||||
pub struct Stub<A, R> {
|
||||
name: String,
|
||||
times: Mutex<Option<usize>>,
|
||||
stub: Box<dyn Fn(A) -> R + Sync + Send>,
|
||||
invalidated: AtomicBool,
|
||||
}
|
||||
|
||||
impl<A, R> Drop for Stub<A, R> {
|
||||
fn drop(&mut self) {
|
||||
if !self.invalidated.load(Ordering::Relaxed) {
|
||||
let lock = self.times.lock().unwrap();
|
||||
if let Some(n) = *lock {
|
||||
assert_eq!(n, 0, "{} not called enough times", self.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, R> Stub<A, R> {
|
||||
fn invalidate(&self) {
|
||||
self.invalidated.store(true, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
impl<A: UnwindSafe, R> Stub<A, R> {
|
||||
fn call(&self, args: A) -> R {
|
||||
let mut lock = self.times.lock().unwrap();
|
||||
match *lock {
|
||||
Some(0) => panic!("{} called to many times", self.name),
|
||||
Some(ref mut times) => {
|
||||
*times -= 1;
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
|
||||
// Since we add assertions in the drop implementation for Stub, a panic can occur in a
|
||||
// panic, causing a hard abort of the program. To handle that, we catch the panic, and
|
||||
// set the stub as invalidated so the assertions aren't run during the drop.
|
||||
impl<'a, A, R> RefUnwindSafe for StubHolder<'a, A, R> {}
|
||||
struct StubHolder<'a, A, R>(&'a (dyn Fn(A) -> R + Sync + Send));
|
||||
|
||||
let stub = StubHolder(self.stub.as_ref());
|
||||
|
||||
match std::panic::catch_unwind(|| (stub.0)(args)) {
|
||||
Ok(r) => r,
|
||||
Err(panic) => {
|
||||
self.invalidate();
|
||||
std::panic::resume_unwind(panic);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct StubStore {
|
||||
inner: Arc<Mutex<HashMap<String, Box<dyn Any + Sync + Send>>>>,
|
||||
}
|
||||
|
||||
impl StubStore {
|
||||
pub fn insert<A: 'static, R: 'static>(&self, name: String, stub: Stub<A, R>) {
|
||||
let mut lock = self.inner.lock().unwrap();
|
||||
lock.insert(name, Box::new(stub));
|
||||
}
|
||||
|
||||
pub fn get<A, B>(&self, name: &str) -> Option<&Stub<A, B>> {
|
||||
let mut lock = self.inner.lock().unwrap();
|
||||
match lock.get_mut(name) {
|
||||
Some(s) => {
|
||||
let s = s.as_mut() as *mut dyn Any as *mut Stub<A, B>;
|
||||
Some(unsafe { &mut *s })
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StubBuilder<'a, A, R> {
|
||||
name: String,
|
||||
store: &'a StubStore,
|
||||
times: Option<usize>,
|
||||
_f: std::marker::PhantomData<fn(A) -> R>,
|
||||
}
|
||||
|
||||
impl<'a, A: 'static, R: 'static> StubBuilder<'a, A, R> {
|
||||
/// Asserts the stub has been called exactly `times` times.
|
||||
#[must_use]
|
||||
pub fn times(mut self, times: usize) -> Self {
|
||||
self.times = Some(times);
|
||||
self
|
||||
}
|
||||
|
||||
/// Asserts the stub has been called exactly once.
|
||||
#[must_use]
|
||||
pub fn once(mut self) -> Self {
|
||||
self.times = Some(1);
|
||||
self
|
||||
}
|
||||
|
||||
/// The function that will be called when the stub is called. This needs to be called to
|
||||
/// actually build the stub and register it to the stub store.
|
||||
pub fn then(self, f: impl Fn(A) -> R + Sync + Send + 'static) {
|
||||
let times = Mutex::new(self.times);
|
||||
let stub = Stub {
|
||||
stub: Box::new(f),
|
||||
times,
|
||||
name: self.name.clone(),
|
||||
invalidated: AtomicBool::new(false),
|
||||
};
|
||||
|
||||
self.store.insert(self.name, stub);
|
||||
}
|
||||
}
|
||||
|
||||
/// Mocker allows to stub metod call on any struct. you can register stubs by calling
|
||||
/// `Mocker::when` and retrieve it in the proxy implementation when with `Mocker::get`.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Mocker {
|
||||
store: StubStore,
|
||||
}
|
||||
|
||||
impl Mocker {
|
||||
pub fn when<A, R>(&self, name: &str) -> StubBuilder<A, R> {
|
||||
StubBuilder {
|
||||
name: name.to_string(),
|
||||
store: &self.store,
|
||||
times: None,
|
||||
_f: std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get<A, R>(&self, name: &str) -> &Stub<A, R> {
|
||||
match self.store.get(name) {
|
||||
Some(stub) => stub,
|
||||
None => {
|
||||
// panic here causes the stubs to get dropped, and panic in turn. To prevent
|
||||
// that, we forget them, and let them be cleaned by the os later. This is not
|
||||
// optimal, but is still better than nested panicks.
|
||||
let mut stubs = self.store.inner.lock().unwrap();
|
||||
let stubs = std::mem::take(&mut *stubs);
|
||||
std::mem::forget(stubs);
|
||||
panic!("unexpected call to {}", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Clone)]
|
||||
pub enum MockIndex {
|
||||
Vrai(Index),
|
||||
Faux(Arc<Mocker>),
|
||||
Real(Index),
|
||||
Mock(Arc<Mocker>),
|
||||
}
|
||||
|
||||
impl MockIndex {
|
||||
pub fn faux(faux: Mocker) -> Self {
|
||||
Self::Faux(Arc::new(faux))
|
||||
pub fn mock(mocker: Mocker) -> Self {
|
||||
Self::Mock(Arc::new(mocker))
|
||||
}
|
||||
|
||||
pub fn open(
|
||||
path: impl AsRef<Path>,
|
||||
size: usize,
|
||||
update_file_store: Arc<UpdateFileStore>,
|
||||
uuid: Uuid,
|
||||
update_handler: Arc<UpdateHandler>,
|
||||
update_handler: Arc<IndexerConfig>,
|
||||
) -> Result<Self> {
|
||||
let index = Index::open(path, size, update_file_store, uuid, update_handler)?;
|
||||
Ok(Self::Vrai(index))
|
||||
let index = Index::open(path, size, uuid, update_handler)?;
|
||||
Ok(Self::Real(index))
|
||||
}
|
||||
|
||||
pub fn load_dump(
|
||||
src: impl AsRef<Path>,
|
||||
dst: impl AsRef<Path>,
|
||||
size: usize,
|
||||
update_handler: &UpdateHandler,
|
||||
update_handler: &IndexerConfig,
|
||||
) -> anyhow::Result<()> {
|
||||
Index::load_dump(src, dst, size, update_handler)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn handle_update(&self, update: Processing) -> std::result::Result<Processed, Failed> {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => index.handle_update(update),
|
||||
MockIndex::Faux(faux) => faux.get("handle_update").call(update),
|
||||
}
|
||||
Index::load_dump(src, dst, size, update_handler)
|
||||
}
|
||||
|
||||
pub fn uuid(&self) -> Uuid {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => index.uuid(),
|
||||
MockIndex::Faux(faux) => faux.get("uuid").call(()),
|
||||
MockIndex::Real(index) => index.uuid(),
|
||||
MockIndex::Mock(m) => unsafe { m.get("uuid").call(()) },
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stats(&self) -> Result<IndexStats> {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => index.stats(),
|
||||
MockIndex::Faux(_) => todo!(),
|
||||
MockIndex::Real(index) => index.stats(),
|
||||
MockIndex::Mock(m) => unsafe { m.get("stats").call(()) },
|
||||
}
|
||||
}
|
||||
|
||||
pub fn meta(&self) -> Result<IndexMeta> {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => index.meta(),
|
||||
MockIndex::Faux(_) => todo!(),
|
||||
MockIndex::Real(index) => index.meta(),
|
||||
MockIndex::Mock(_) => todo!(),
|
||||
}
|
||||
}
|
||||
pub fn settings(&self) -> Result<Settings<Checked>> {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => index.settings(),
|
||||
MockIndex::Faux(_) => todo!(),
|
||||
MockIndex::Real(index) => index.settings(),
|
||||
MockIndex::Mock(_) => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -260,10 +100,10 @@ pub mod test {
|
||||
attributes_to_retrieve: Option<Vec<S>>,
|
||||
) -> Result<Vec<Map<String, Value>>> {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => {
|
||||
MockIndex::Real(index) => {
|
||||
index.retrieve_documents(offset, limit, attributes_to_retrieve)
|
||||
}
|
||||
MockIndex::Faux(_) => todo!(),
|
||||
MockIndex::Mock(_) => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -273,49 +113,90 @@ pub mod test {
|
||||
attributes_to_retrieve: Option<Vec<S>>,
|
||||
) -> Result<Map<String, Value>> {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => index.retrieve_document(doc_id, attributes_to_retrieve),
|
||||
MockIndex::Faux(_) => todo!(),
|
||||
MockIndex::Real(index) => index.retrieve_document(doc_id, attributes_to_retrieve),
|
||||
MockIndex::Mock(_) => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn size(&self) -> u64 {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => index.size(),
|
||||
MockIndex::Faux(_) => todo!(),
|
||||
MockIndex::Real(index) => index.size(),
|
||||
MockIndex::Mock(_) => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn snapshot(&self, path: impl AsRef<Path>) -> Result<()> {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => index.snapshot(path),
|
||||
MockIndex::Faux(faux) => faux.get("snapshot").call(path.as_ref()),
|
||||
MockIndex::Real(index) => index.snapshot(path),
|
||||
MockIndex::Mock(m) => unsafe { m.get("snapshot").call(path.as_ref()) },
|
||||
}
|
||||
}
|
||||
|
||||
pub fn inner(&self) -> &milli::Index {
|
||||
pub fn close(self) {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => index.inner(),
|
||||
MockIndex::Faux(_) => todo!(),
|
||||
MockIndex::Real(index) => index.close(),
|
||||
MockIndex::Mock(m) => unsafe { m.get("close").call(()) },
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_primary_key(&self, primary_key: Option<String>) -> Result<IndexMeta> {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => index.update_primary_key(primary_key),
|
||||
MockIndex::Faux(_) => todo!(),
|
||||
}
|
||||
}
|
||||
pub fn perform_search(&self, query: SearchQuery) -> Result<SearchResult> {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => index.perform_search(query),
|
||||
MockIndex::Faux(faux) => faux.get("perform_search").call(query),
|
||||
MockIndex::Real(index) => index.perform_search(query),
|
||||
MockIndex::Mock(m) => unsafe { m.get("perform_search").call(query) },
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dump(&self, path: impl AsRef<Path>) -> Result<()> {
|
||||
match self {
|
||||
MockIndex::Vrai(index) => index.dump(path),
|
||||
MockIndex::Faux(faux) => faux.get("dump").call(path.as_ref()),
|
||||
MockIndex::Real(index) => index.dump(path),
|
||||
MockIndex::Mock(m) => unsafe { m.get("dump").call(path.as_ref()) },
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_documents(
|
||||
&self,
|
||||
method: IndexDocumentsMethod,
|
||||
primary_key: Option<String>,
|
||||
file_store: UpdateFileStore,
|
||||
contents: impl Iterator<Item = Uuid>,
|
||||
) -> Result<DocumentAdditionResult> {
|
||||
match self {
|
||||
MockIndex::Real(index) => {
|
||||
index.update_documents(method, primary_key, file_store, contents)
|
||||
}
|
||||
MockIndex::Mock(mocker) => unsafe {
|
||||
mocker
|
||||
.get("update_documents")
|
||||
.call((method, primary_key, file_store, contents))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_settings(&self, settings: &Settings<Checked>) -> Result<()> {
|
||||
match self {
|
||||
MockIndex::Real(index) => index.update_settings(settings),
|
||||
MockIndex::Mock(m) => unsafe { m.get("update_settings").call(settings) },
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_primary_key(&self, primary_key: String) -> Result<IndexMeta> {
|
||||
match self {
|
||||
MockIndex::Real(index) => index.update_primary_key(primary_key),
|
||||
MockIndex::Mock(m) => unsafe { m.get("update_primary_key").call(primary_key) },
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete_documents(&self, ids: &[String]) -> Result<DocumentDeletionResult> {
|
||||
match self {
|
||||
MockIndex::Real(index) => index.delete_documents(ids),
|
||||
MockIndex::Mock(m) => unsafe { m.get("delete_documents").call(ids) },
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear_documents(&self) -> Result<()> {
|
||||
match self {
|
||||
MockIndex::Real(index) => index.clear_documents(),
|
||||
MockIndex::Mock(m) => unsafe { m.get("clear_documents").call(()) },
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -327,7 +208,7 @@ pub mod test {
|
||||
.times(2)
|
||||
.then(|_: &Path| -> Result<()> { Ok(()) });
|
||||
|
||||
let index = MockIndex::faux(faux);
|
||||
let index = MockIndex::mock(faux);
|
||||
|
||||
let path = PathBuf::from("hello");
|
||||
index.snapshot(&path).unwrap();
|
||||
@@ -339,7 +220,7 @@ pub mod test {
|
||||
fn test_faux_unexisting_method_stub() {
|
||||
let faux = Mocker::default();
|
||||
|
||||
let index = MockIndex::faux(faux);
|
||||
let index = MockIndex::mock(faux);
|
||||
|
||||
let path = PathBuf::from("hello");
|
||||
index.snapshot(&path).unwrap();
|
||||
@@ -356,7 +237,7 @@ pub mod test {
|
||||
panic!();
|
||||
});
|
||||
|
||||
let index = MockIndex::faux(faux);
|
||||
let index = MockIndex::mock(faux);
|
||||
|
||||
let path = PathBuf::from("hello");
|
||||
index.snapshot(&path).unwrap();
|
||||
|
||||
@@ -3,10 +3,9 @@ use std::str::FromStr;
|
||||
use std::time::Instant;
|
||||
|
||||
use either::Either;
|
||||
use heed::RoTxn;
|
||||
use indexmap::IndexMap;
|
||||
use meilisearch_tokenizer::{Analyzer, AnalyzerConfig, Token};
|
||||
use milli::{AscDesc, FieldId, FieldsIdsMap, FilterCondition, MatchingWords, SortError};
|
||||
use milli::tokenizer::{Analyzer, AnalyzerConfig, Token};
|
||||
use milli::{AscDesc, FieldId, FieldsIdsMap, Filter, MatchingWords, SortError};
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{json, Value};
|
||||
@@ -102,7 +101,7 @@ impl Index {
|
||||
search.offset(query.offset.unwrap_or_default());
|
||||
|
||||
if let Some(ref filter) = query.filter {
|
||||
if let Some(facets) = parse_filter(filter, self, &rtxn)? {
|
||||
if let Some(facets) = parse_filter(filter)? {
|
||||
search.filter(facets);
|
||||
}
|
||||
}
|
||||
@@ -296,7 +295,7 @@ fn compute_value_matches<'a, A: AsRef<[u8]>>(
|
||||
let mut start = 0;
|
||||
for (word, token) in analyzed.reconstruct() {
|
||||
if token.is_word() {
|
||||
if let Some(length) = matcher.matches(token.text()) {
|
||||
if let Some(length) = matcher.matches(&token) {
|
||||
infos.push(MatchInfo { start, length });
|
||||
}
|
||||
}
|
||||
@@ -310,6 +309,9 @@ fn compute_value_matches<'a, A: AsRef<[u8]>>(
|
||||
Value::Object(vals) => vals
|
||||
.values()
|
||||
.for_each(|val| compute_value_matches(infos, val, matcher, analyzer)),
|
||||
Value::Number(number) => {
|
||||
compute_value_matches(infos, &Value::String(number.to_string()), matcher, analyzer)
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
@@ -484,18 +486,18 @@ fn format_fields<A: AsRef<[u8]>>(
|
||||
|
||||
/// trait to allow unit testing of `format_fields`
|
||||
trait Matcher {
|
||||
fn matches(&self, w: &str) -> Option<usize>;
|
||||
fn matches(&self, w: &Token) -> Option<usize>;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl Matcher for BTreeMap<&str, Option<usize>> {
|
||||
fn matches(&self, w: &str) -> Option<usize> {
|
||||
self.get(w).cloned().flatten()
|
||||
fn matches(&self, w: &Token) -> Option<usize> {
|
||||
self.get(w.text()).cloned().flatten()
|
||||
}
|
||||
}
|
||||
|
||||
impl Matcher for MatchingWords {
|
||||
fn matches(&self, w: &str) -> Option<usize> {
|
||||
fn matches(&self, w: &Token) -> Option<usize> {
|
||||
self.matching_bytes(w)
|
||||
}
|
||||
}
|
||||
@@ -577,7 +579,7 @@ impl<'a, A: AsRef<[u8]>> Formatter<'a, A> {
|
||||
let mut tokens = analyzed.reconstruct().peekable();
|
||||
|
||||
while let Some((word, token)) =
|
||||
tokens.next_if(|(_, token)| matcher.matches(token.text()).is_none())
|
||||
tokens.next_if(|(_, token)| matcher.matches(token).is_none())
|
||||
{
|
||||
buffer.push((word, token));
|
||||
}
|
||||
@@ -621,7 +623,7 @@ impl<'a, A: AsRef<[u8]>> Formatter<'a, A> {
|
||||
// Check if we need to do highlighting or computed matches before calling
|
||||
// Matcher::match since the call is expensive.
|
||||
if format_options.highlight && token.is_word() {
|
||||
if let Some(length) = matcher.matches(token.text()) {
|
||||
if let Some(length) = matcher.matches(&token) {
|
||||
match word.get(..length).zip(word.get(length..)) {
|
||||
Some((head, tail)) => {
|
||||
out.push_str(&self.marks.0);
|
||||
@@ -647,31 +649,27 @@ impl<'a, A: AsRef<[u8]>> Formatter<'a, A> {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_filter(facets: &Value, index: &Index, txn: &RoTxn) -> Result<Option<FilterCondition>> {
|
||||
fn parse_filter(facets: &Value) -> Result<Option<Filter>> {
|
||||
match facets {
|
||||
Value::String(expr) => {
|
||||
let condition = FilterCondition::from_str(txn, index, expr)?;
|
||||
Ok(Some(condition))
|
||||
let condition = Filter::from_str(expr)?;
|
||||
Ok(condition)
|
||||
}
|
||||
Value::Array(arr) => parse_filter_array(txn, index, arr),
|
||||
Value::Array(arr) => parse_filter_array(arr),
|
||||
v => Err(FacetError::InvalidExpression(&["Array"], v.clone()).into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_filter_array(
|
||||
txn: &RoTxn,
|
||||
index: &Index,
|
||||
arr: &[Value],
|
||||
) -> Result<Option<FilterCondition>> {
|
||||
fn parse_filter_array(arr: &[Value]) -> Result<Option<Filter>> {
|
||||
let mut ands = Vec::new();
|
||||
for value in arr {
|
||||
match value {
|
||||
Value::String(s) => ands.push(Either::Right(s.clone())),
|
||||
Value::String(s) => ands.push(Either::Right(s.as_str())),
|
||||
Value::Array(arr) => {
|
||||
let mut ors = Vec::new();
|
||||
for value in arr {
|
||||
match value {
|
||||
Value::String(s) => ors.push(s.clone()),
|
||||
Value::String(s) => ors.push(s.as_str()),
|
||||
v => {
|
||||
return Err(FacetError::InvalidExpression(&["String"], v.clone()).into())
|
||||
}
|
||||
@@ -687,7 +685,7 @@ fn parse_filter_array(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(FilterCondition::from_array(txn, index, ands)?)
|
||||
Ok(Filter::from_array(ands)?)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -879,7 +877,7 @@ mod test {
|
||||
assert_eq!(value["publication_year"], "<em>1937</em>");
|
||||
}
|
||||
|
||||
/// https://github.com/meilisearch/MeiliSearch/issues/1368
|
||||
/// https://github.com/meilisearch/meilisearch/issues/1368
|
||||
#[test]
|
||||
fn formatted_with_highlight_emoji() {
|
||||
let stop_words = fst::Set::default();
|
||||
@@ -1424,13 +1422,15 @@ mod test {
|
||||
"color": "Green",
|
||||
"name": "Lucas Hess",
|
||||
"gender": "male",
|
||||
"price": 3.5,
|
||||
"address": "412 Losee Terrace, Blairstown, Georgia, 2825",
|
||||
"about": "Mollit ad in exercitation quis Laboris . Anim est ut consequat fugiat duis magna aliquip velit nisi. Commodo eiusmod est consequat proident consectetur aliqua enim fugiat. Aliqua adipisicing laboris elit proident enim veniam laboris mollit. Incididunt fugiat minim ad nostrud deserunt tempor in. Id irure officia labore qui est labore nulla nisi. Magna sit quis tempor esse consectetur amet labore duis aliqua consequat.\r\n"
|
||||
}"#).unwrap();
|
||||
let mut matcher = BTreeMap::new();
|
||||
matcher.insert("green", Some(3));
|
||||
matcher.insert("green", Some(5));
|
||||
matcher.insert("mollit", Some(6));
|
||||
matcher.insert("laboris", Some(7));
|
||||
matcher.insert("3", Some(1));
|
||||
|
||||
let stop_words = fst::Set::default();
|
||||
let mut config = AnalyzerConfig::default();
|
||||
@@ -1440,7 +1440,7 @@ mod test {
|
||||
let matches = compute_matches(&matcher, &value, &analyzer);
|
||||
assert_eq!(
|
||||
format!("{:?}", matches),
|
||||
r##"{"about": [MatchInfo { start: 0, length: 6 }, MatchInfo { start: 31, length: 7 }, MatchInfo { start: 191, length: 7 }, MatchInfo { start: 225, length: 7 }, MatchInfo { start: 233, length: 6 }], "color": [MatchInfo { start: 0, length: 3 }]}"##
|
||||
r##"{"about": [MatchInfo { start: 0, length: 6 }, MatchInfo { start: 31, length: 7 }, MatchInfo { start: 191, length: 7 }, MatchInfo { start: 225, length: 7 }, MatchInfo { start: 233, length: 6 }], "color": [MatchInfo { start: 0, length: 5 }], "price": [MatchInfo { start: 0, length: 1 }]}"##
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
use milli::update::UpdateBuilder;
|
||||
use milli::CompressionType;
|
||||
use rayon::ThreadPool;
|
||||
|
||||
use crate::options::IndexerOpts;
|
||||
|
||||
pub struct UpdateHandler {
|
||||
max_nb_chunks: Option<usize>,
|
||||
chunk_compression_level: Option<u32>,
|
||||
thread_pool: ThreadPool,
|
||||
log_frequency: usize,
|
||||
max_memory: Option<usize>,
|
||||
chunk_compression_type: CompressionType,
|
||||
}
|
||||
|
||||
impl UpdateHandler {
|
||||
pub fn new(opt: &IndexerOpts) -> anyhow::Result<Self> {
|
||||
let thread_pool = rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(opt.indexing_jobs.unwrap_or(num_cpus::get() / 2))
|
||||
.build()?;
|
||||
|
||||
Ok(Self {
|
||||
max_nb_chunks: opt.max_nb_chunks,
|
||||
chunk_compression_level: opt.chunk_compression_level,
|
||||
thread_pool,
|
||||
log_frequency: opt.log_every_n,
|
||||
max_memory: opt.max_memory.map(|m| m.get_bytes() as usize),
|
||||
chunk_compression_type: opt.chunk_compression_type,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn update_builder(&self, update_id: u64) -> UpdateBuilder {
|
||||
// We prepare the update by using the update builder.
|
||||
let mut update_builder = UpdateBuilder::new(update_id);
|
||||
if let Some(max_nb_chunks) = self.max_nb_chunks {
|
||||
update_builder.max_nb_chunks(max_nb_chunks);
|
||||
}
|
||||
if let Some(chunk_compression_level) = self.chunk_compression_level {
|
||||
update_builder.chunk_compression_level(chunk_compression_level);
|
||||
}
|
||||
update_builder.thread_pool(&self.thread_pool);
|
||||
update_builder.log_every_n(self.log_frequency);
|
||||
if let Some(max_memory) = self.max_memory {
|
||||
update_builder.max_memory(max_memory);
|
||||
}
|
||||
update_builder.chunk_compression_type(self.chunk_compression_type);
|
||||
update_builder
|
||||
}
|
||||
}
|
||||
@@ -4,15 +4,16 @@ use std::num::NonZeroUsize;
|
||||
|
||||
use log::{debug, info, trace};
|
||||
use milli::documents::DocumentBatchReader;
|
||||
use milli::update::{IndexDocumentsMethod, Setting, UpdateBuilder};
|
||||
use milli::update::{
|
||||
DocumentAdditionResult, DocumentDeletionResult, IndexDocumentsConfig, IndexDocumentsMethod,
|
||||
Setting,
|
||||
};
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::index_controller::updates::status::{Failed, Processed, Processing, UpdateResult};
|
||||
use crate::Update;
|
||||
|
||||
use super::error::Result;
|
||||
use super::index::{Index, IndexMeta};
|
||||
use crate::update_file_store::UpdateFileStore;
|
||||
|
||||
fn serialize_with_wildcard<S>(
|
||||
field: &Setting<Vec<String>>,
|
||||
@@ -30,25 +31,27 @@ where
|
||||
.serialize(s)
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Debug, Serialize)]
|
||||
#[derive(Clone, Default, Debug, Serialize, PartialEq)]
|
||||
pub struct Checked;
|
||||
|
||||
#[derive(Clone, Default, Debug, Serialize, Deserialize)]
|
||||
#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq)]
|
||||
pub struct Unchecked;
|
||||
|
||||
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
|
||||
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
|
||||
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>"))]
|
||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
||||
pub struct Settings<T> {
|
||||
#[serde(
|
||||
default,
|
||||
serialize_with = "serialize_with_wildcard",
|
||||
skip_serializing_if = "Setting::is_not_set"
|
||||
)]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub displayed_attributes: Setting<Vec<String>>,
|
||||
|
||||
#[serde(
|
||||
@@ -56,19 +59,26 @@ pub struct Settings<T> {
|
||||
serialize_with = "serialize_with_wildcard",
|
||||
skip_serializing_if = "Setting::is_not_set"
|
||||
)]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub searchable_attributes: Setting<Vec<String>>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub filterable_attributes: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub sortable_attributes: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub ranking_rules: Setting<Vec<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub stop_words: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub distinct_attribute: Setting<String>,
|
||||
|
||||
#[serde(skip)]
|
||||
@@ -164,126 +174,110 @@ pub struct Facets {
|
||||
}
|
||||
|
||||
impl Index {
|
||||
pub fn handle_update(&self, update: Processing) -> std::result::Result<Processed, Failed> {
|
||||
let update_id = update.id();
|
||||
let update_builder = self.update_handler.update_builder(update_id);
|
||||
let result = (|| {
|
||||
let mut txn = self.write_txn()?;
|
||||
let result = match update.meta() {
|
||||
Update::DocumentAddition {
|
||||
primary_key,
|
||||
content_uuid,
|
||||
method,
|
||||
} => self.update_documents(
|
||||
&mut txn,
|
||||
*method,
|
||||
*content_uuid,
|
||||
update_builder,
|
||||
primary_key.as_deref(),
|
||||
),
|
||||
Update::Settings(settings) => {
|
||||
let settings = settings.clone().check();
|
||||
self.update_settings(&mut txn, &settings, update_builder)
|
||||
}
|
||||
Update::ClearDocuments => {
|
||||
let builder = update_builder.clear_documents(&mut txn, self);
|
||||
let _count = builder.execute()?;
|
||||
Ok(UpdateResult::Other)
|
||||
}
|
||||
Update::DeleteDocuments(ids) => {
|
||||
let mut builder = update_builder.delete_documents(&mut txn, self)?;
|
||||
|
||||
// We ignore unexisting document ids
|
||||
ids.iter().for_each(|id| {
|
||||
builder.delete_external_id(id);
|
||||
});
|
||||
|
||||
let deleted = builder.execute()?;
|
||||
Ok(UpdateResult::DocumentDeletion { deleted })
|
||||
}
|
||||
};
|
||||
if result.is_ok() {
|
||||
txn.commit()?;
|
||||
}
|
||||
result
|
||||
})();
|
||||
|
||||
if let Update::DocumentAddition { content_uuid, .. } = update.from.meta() {
|
||||
let _ = self.update_file_store.delete(*content_uuid);
|
||||
}
|
||||
|
||||
match result {
|
||||
Ok(result) => Ok(update.process(result)),
|
||||
Err(e) => Err(update.fail(e)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_primary_key(&self, primary_key: Option<String>) -> Result<IndexMeta> {
|
||||
match primary_key {
|
||||
Some(primary_key) => {
|
||||
let mut txn = self.write_txn()?;
|
||||
let mut builder = UpdateBuilder::new(0).settings(&mut txn, self);
|
||||
builder.set_primary_key(primary_key);
|
||||
builder.execute(|_, _| ())?;
|
||||
let meta = IndexMeta::new_txn(self, &txn)?;
|
||||
txn.commit()?;
|
||||
Ok(meta)
|
||||
}
|
||||
None => {
|
||||
let meta = IndexMeta::new(self)?;
|
||||
Ok(meta)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn update_documents<'a, 'b>(
|
||||
fn update_primary_key_txn<'a, 'b>(
|
||||
&'a self,
|
||||
txn: &mut heed::RwTxn<'a, 'b>,
|
||||
method: IndexDocumentsMethod,
|
||||
content_uuid: Uuid,
|
||||
update_builder: UpdateBuilder,
|
||||
primary_key: Option<&str>,
|
||||
) -> Result<UpdateResult> {
|
||||
trace!("performing document addition");
|
||||
primary_key: String,
|
||||
) -> Result<IndexMeta> {
|
||||
let mut builder = milli::update::Settings::new(txn, self, self.indexer_config.as_ref());
|
||||
builder.set_primary_key(primary_key);
|
||||
builder.execute(|_| ())?;
|
||||
let meta = IndexMeta::new_txn(self, txn)?;
|
||||
|
||||
// Set the primary key if not set already, ignore if already set.
|
||||
if let (None, Some(primary_key)) = (self.primary_key(txn)?, primary_key) {
|
||||
let mut builder = UpdateBuilder::new(0).settings(txn, self);
|
||||
builder.set_primary_key(primary_key.to_string());
|
||||
builder.execute(|_, _| ())?;
|
||||
Ok(meta)
|
||||
}
|
||||
|
||||
pub fn update_primary_key(&self, primary_key: String) -> Result<IndexMeta> {
|
||||
let mut txn = self.write_txn()?;
|
||||
let res = self.update_primary_key_txn(&mut txn, primary_key)?;
|
||||
txn.commit()?;
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Deletes `ids` from the index, and returns how many documents were deleted.
|
||||
pub fn delete_documents(&self, ids: &[String]) -> Result<DocumentDeletionResult> {
|
||||
let mut txn = self.write_txn()?;
|
||||
let mut builder = milli::update::DeleteDocuments::new(&mut txn, self)?;
|
||||
|
||||
// We ignore unexisting document ids
|
||||
ids.iter().for_each(|id| {
|
||||
builder.delete_external_id(id);
|
||||
});
|
||||
|
||||
let deleted = builder.execute()?;
|
||||
|
||||
txn.commit()?;
|
||||
|
||||
Ok(deleted)
|
||||
}
|
||||
|
||||
pub fn clear_documents(&self) -> Result<()> {
|
||||
let mut txn = self.write_txn()?;
|
||||
milli::update::ClearDocuments::new(&mut txn, self).execute()?;
|
||||
txn.commit()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_documents(
|
||||
&self,
|
||||
method: IndexDocumentsMethod,
|
||||
primary_key: Option<String>,
|
||||
file_store: UpdateFileStore,
|
||||
contents: impl IntoIterator<Item = Uuid>,
|
||||
) -> Result<DocumentAdditionResult> {
|
||||
trace!("performing document addition");
|
||||
let mut txn = self.write_txn()?;
|
||||
|
||||
if let Some(primary_key) = primary_key {
|
||||
if self.primary_key(&txn)?.is_none() {
|
||||
self.update_primary_key_txn(&mut txn, primary_key)?;
|
||||
}
|
||||
}
|
||||
|
||||
let indexing_callback =
|
||||
|indexing_step, update_id| debug!("update {}: {:?}", update_id, indexing_step);
|
||||
let config = IndexDocumentsConfig {
|
||||
update_method: method,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let content_file = self.update_file_store.get_update(content_uuid).unwrap();
|
||||
let reader = DocumentBatchReader::from_reader(content_file).unwrap();
|
||||
let indexing_callback = |indexing_step| debug!("update: {:?}", indexing_step);
|
||||
let mut builder = milli::update::IndexDocuments::new(
|
||||
&mut txn,
|
||||
self,
|
||||
self.indexer_config.as_ref(),
|
||||
config,
|
||||
indexing_callback,
|
||||
);
|
||||
|
||||
let mut builder = update_builder.index_documents(txn, self);
|
||||
builder.index_documents_method(method);
|
||||
let addition = builder.execute(reader, indexing_callback)?;
|
||||
for content_uuid in contents.into_iter() {
|
||||
let content_file = file_store.get_update(content_uuid)?;
|
||||
let reader = DocumentBatchReader::from_reader(content_file)?;
|
||||
builder.add_documents(reader)?;
|
||||
}
|
||||
|
||||
let addition = builder.execute()?;
|
||||
|
||||
txn.commit()?;
|
||||
|
||||
info!("document addition done: {:?}", addition);
|
||||
|
||||
Ok(UpdateResult::DocumentsAddition(addition))
|
||||
Ok(addition)
|
||||
}
|
||||
|
||||
fn update_settings<'a, 'b>(
|
||||
&'a self,
|
||||
txn: &mut heed::RwTxn<'a, 'b>,
|
||||
settings: &Settings<Checked>,
|
||||
update_builder: UpdateBuilder,
|
||||
) -> Result<UpdateResult> {
|
||||
pub fn update_settings(&self, settings: &Settings<Checked>) -> Result<()> {
|
||||
// We must use the write transaction of the update here.
|
||||
let mut builder = update_builder.settings(txn, self);
|
||||
let mut txn = self.write_txn()?;
|
||||
let mut builder =
|
||||
milli::update::Settings::new(&mut txn, self, self.indexer_config.as_ref());
|
||||
|
||||
apply_settings_to_builder(settings, &mut builder);
|
||||
|
||||
builder.execute(|indexing_step, update_id| {
|
||||
debug!("update {}: {:?}", update_id, indexing_step)
|
||||
})?;
|
||||
builder.execute(|indexing_step| debug!("update: {:?}", indexing_step))?;
|
||||
|
||||
Ok(UpdateResult::Other)
|
||||
txn.commit()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -343,9 +337,19 @@ pub fn apply_settings_to_builder(
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
pub(crate) mod test {
|
||||
use proptest::prelude::*;
|
||||
|
||||
use super::*;
|
||||
|
||||
pub(super) fn setting_strategy<T: Arbitrary + Clone>() -> impl Strategy<Value = Setting<T>> {
|
||||
prop_oneof![
|
||||
Just(Setting::NotSet),
|
||||
Just(Setting::Reset),
|
||||
any::<T>().prop_map(Setting::Set)
|
||||
]
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_setting_check() {
|
||||
// test no changes
|
||||
|
||||
@@ -3,24 +3,23 @@ use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_stream::stream;
|
||||
use chrono::Utc;
|
||||
use futures::{lock::Mutex, stream::StreamExt};
|
||||
use log::{error, trace};
|
||||
use time::macros::format_description;
|
||||
use time::OffsetDateTime;
|
||||
use tokio::sync::{mpsc, oneshot, RwLock};
|
||||
|
||||
use super::error::{DumpActorError, Result};
|
||||
use super::{DumpInfo, DumpMsg, DumpStatus, DumpTask};
|
||||
use crate::index_controller::index_resolver::index_store::IndexStore;
|
||||
use crate::index_controller::index_resolver::uuid_store::UuidStore;
|
||||
use crate::index_controller::index_resolver::IndexResolver;
|
||||
use crate::index_controller::updates::UpdateSender;
|
||||
use super::{DumpInfo, DumpJob, DumpMsg, DumpStatus};
|
||||
use crate::tasks::Scheduler;
|
||||
use crate::update_file_store::UpdateFileStore;
|
||||
|
||||
pub const CONCURRENT_DUMP_MSG: usize = 10;
|
||||
|
||||
pub struct DumpActor<U, I> {
|
||||
pub struct DumpActor {
|
||||
inbox: Option<mpsc::Receiver<DumpMsg>>,
|
||||
index_resolver: Arc<IndexResolver<U, I>>,
|
||||
update: UpdateSender,
|
||||
update_file_store: UpdateFileStore,
|
||||
scheduler: Arc<RwLock<Scheduler>>,
|
||||
dump_path: PathBuf,
|
||||
analytics_path: PathBuf,
|
||||
lock: Arc<Mutex<()>>,
|
||||
@@ -31,18 +30,18 @@ pub struct DumpActor<U, I> {
|
||||
|
||||
/// Generate uid from creation date
|
||||
fn generate_uid() -> String {
|
||||
Utc::now().format("%Y%m%d-%H%M%S%3f").to_string()
|
||||
OffsetDateTime::now_utc()
|
||||
.format(format_description!(
|
||||
"[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
|
||||
))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
impl<U, I> DumpActor<U, I>
|
||||
where
|
||||
U: UuidStore + Sync + Send + 'static,
|
||||
I: IndexStore + Sync + Send + 'static,
|
||||
{
|
||||
impl DumpActor {
|
||||
pub fn new(
|
||||
inbox: mpsc::Receiver<DumpMsg>,
|
||||
index_resolver: Arc<IndexResolver<U, I>>,
|
||||
update: UpdateSender,
|
||||
update_file_store: UpdateFileStore,
|
||||
scheduler: Arc<RwLock<Scheduler>>,
|
||||
dump_path: impl AsRef<Path>,
|
||||
analytics_path: impl AsRef<Path>,
|
||||
index_db_size: usize,
|
||||
@@ -52,8 +51,8 @@ where
|
||||
let lock = Arc::new(Mutex::new(()));
|
||||
Self {
|
||||
inbox: Some(inbox),
|
||||
index_resolver,
|
||||
update,
|
||||
scheduler,
|
||||
update_file_store,
|
||||
dump_path: dump_path.as_ref().into(),
|
||||
analytics_path: analytics_path.as_ref().into(),
|
||||
dump_infos,
|
||||
@@ -120,17 +119,17 @@ where
|
||||
|
||||
ret.send(Ok(info)).expect("Dump actor is dead");
|
||||
|
||||
let task = DumpTask {
|
||||
let task = DumpJob {
|
||||
dump_path: self.dump_path.clone(),
|
||||
db_path: self.analytics_path.clone(),
|
||||
index_resolver: self.index_resolver.clone(),
|
||||
update_sender: self.update.clone(),
|
||||
update_file_store: self.update_file_store.clone(),
|
||||
scheduler: self.scheduler.clone(),
|
||||
uid: uid.clone(),
|
||||
update_db_size: self.update_db_size,
|
||||
index_db_size: self.index_db_size,
|
||||
};
|
||||
|
||||
let task_result = tokio::task::spawn(task.run()).await;
|
||||
let task_result = tokio::task::spawn_local(task.run()).await;
|
||||
|
||||
let mut dump_infos = self.dump_infos.write().await;
|
||||
let dump_infos = dump_infos
|
||||
@@ -160,3 +159,33 @@ where
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_generate_uid() {
|
||||
let current = OffsetDateTime::now_utc();
|
||||
|
||||
let uid = generate_uid();
|
||||
let (date, time) = uid.split_once('-').unwrap();
|
||||
|
||||
let date = time::Date::parse(
|
||||
date,
|
||||
&format_description!("[year repr:full][month repr:numerical][day padding:zero]"),
|
||||
)
|
||||
.unwrap();
|
||||
let time = time::Time::parse(
|
||||
time,
|
||||
&format_description!(
|
||||
"[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
let datetime = time::PrimitiveDateTime::new(date, time);
|
||||
let datetime = datetime.assume_utc();
|
||||
|
||||
assert!(current - datetime < time::Duration::SECOND);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
pub mod v2;
|
||||
pub mod v3;
|
||||
|
||||
/// Parses the v1 version of the Asc ranking rules `asc(price)`and returns the field name.
|
||||
pub fn asc_ranking_rule(text: &str) -> Option<&str> {
|
||||
text.split_once("asc(")
|
||||
.and_then(|(_, tail)| tail.rsplit_once(')'))
|
||||
.map(|(field, _)| field)
|
||||
}
|
||||
|
||||
/// Parses the v1 version of the Desc ranking rules `desc(price)`and returns the field name.
|
||||
pub fn desc_ranking_rule(text: &str) -> Option<&str> {
|
||||
text.split_once("desc(")
|
||||
.and_then(|(_, tail)| tail.rsplit_once(')'))
|
||||
.map(|(field, _)| field)
|
||||
}
|
||||
152
meilisearch-lib/src/index_controller/dump_actor/compat/v2.rs
Normal file
152
meilisearch-lib/src/index_controller/dump_actor/compat/v2.rs
Normal file
@@ -0,0 +1,152 @@
|
||||
use anyhow::bail;
|
||||
use meilisearch_error::Code;
|
||||
use milli::update::IndexDocumentsMethod;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::index::{Settings, Unchecked};
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct UpdateEntry {
|
||||
pub uuid: Uuid,
|
||||
pub update: UpdateStatus,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum UpdateFormat {
|
||||
Json,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct DocumentAdditionResult {
|
||||
pub nb_documents: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum UpdateResult {
|
||||
DocumentsAddition(DocumentAdditionResult),
|
||||
DocumentDeletion { deleted: u64 },
|
||||
Other,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum UpdateMeta {
|
||||
DocumentsAddition {
|
||||
method: IndexDocumentsMethod,
|
||||
format: UpdateFormat,
|
||||
primary_key: Option<String>,
|
||||
},
|
||||
ClearDocuments,
|
||||
DeleteDocuments {
|
||||
ids: Vec<String>,
|
||||
},
|
||||
Settings(Settings<Unchecked>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Enqueued {
|
||||
pub update_id: u64,
|
||||
pub meta: UpdateMeta,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub enqueued_at: OffsetDateTime,
|
||||
pub content: Option<Uuid>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Processed {
|
||||
pub success: UpdateResult,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub processed_at: OffsetDateTime,
|
||||
#[serde(flatten)]
|
||||
pub from: Processing,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Processing {
|
||||
#[serde(flatten)]
|
||||
pub from: Enqueued,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub started_processing_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Aborted {
|
||||
#[serde(flatten)]
|
||||
pub from: Enqueued,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub aborted_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Failed {
|
||||
#[serde(flatten)]
|
||||
pub from: Processing,
|
||||
pub error: ResponseError,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub failed_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "status", rename_all = "camelCase")]
|
||||
pub enum UpdateStatus {
|
||||
Processing(Processing),
|
||||
Enqueued(Enqueued),
|
||||
Processed(Processed),
|
||||
Aborted(Aborted),
|
||||
Failed(Failed),
|
||||
}
|
||||
|
||||
type StatusCode = ();
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResponseError {
|
||||
#[serde(skip)]
|
||||
pub code: StatusCode,
|
||||
pub message: String,
|
||||
pub error_code: String,
|
||||
pub error_type: String,
|
||||
pub error_link: String,
|
||||
}
|
||||
|
||||
pub fn error_code_from_str(s: &str) -> anyhow::Result<Code> {
|
||||
let code = match s {
|
||||
"index_creation_failed" => Code::CreateIndex,
|
||||
"index_already_exists" => Code::IndexAlreadyExists,
|
||||
"index_not_found" => Code::IndexNotFound,
|
||||
"invalid_index_uid" => Code::InvalidIndexUid,
|
||||
"invalid_state" => Code::InvalidState,
|
||||
"missing_primary_key" => Code::MissingPrimaryKey,
|
||||
"primary_key_already_present" => Code::PrimaryKeyAlreadyPresent,
|
||||
"invalid_request" => Code::InvalidRankingRule,
|
||||
"max_fields_limit_exceeded" => Code::MaxFieldsLimitExceeded,
|
||||
"missing_document_id" => Code::MissingDocumentId,
|
||||
"invalid_facet" => Code::Filter,
|
||||
"invalid_filter" => Code::Filter,
|
||||
"invalid_sort" => Code::Sort,
|
||||
"bad_parameter" => Code::BadParameter,
|
||||
"bad_request" => Code::BadRequest,
|
||||
"document_not_found" => Code::DocumentNotFound,
|
||||
"internal" => Code::Internal,
|
||||
"invalid_geo_field" => Code::InvalidGeoField,
|
||||
"invalid_token" => Code::InvalidToken,
|
||||
"missing_authorization_header" => Code::MissingAuthorizationHeader,
|
||||
"payload_too_large" => Code::PayloadTooLarge,
|
||||
"unretrievable_document" => Code::RetrieveDocument,
|
||||
"search_error" => Code::SearchDocuments,
|
||||
"unsupported_media_type" => Code::UnsupportedMediaType,
|
||||
"dump_already_in_progress" => Code::DumpAlreadyInProgress,
|
||||
"dump_process_failed" => Code::DumpProcessFailed,
|
||||
_ => bail!("unknow error code."),
|
||||
};
|
||||
|
||||
Ok(code)
|
||||
}
|
||||
204
meilisearch-lib/src/index_controller/dump_actor/compat/v3.rs
Normal file
204
meilisearch-lib/src/index_controller/dump_actor/compat/v3.rs
Normal file
@@ -0,0 +1,204 @@
|
||||
use meilisearch_error::{Code, ResponseError};
|
||||
use milli::update::IndexDocumentsMethod;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::index::{Settings, Unchecked};
|
||||
use crate::index_resolver::IndexUid;
|
||||
use crate::tasks::task::{DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult};
|
||||
|
||||
use super::v2;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct DumpEntry {
|
||||
pub uuid: Uuid,
|
||||
pub uid: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct UpdateEntry {
|
||||
pub uuid: Uuid,
|
||||
pub update: UpdateStatus,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "status", rename_all = "camelCase")]
|
||||
pub enum UpdateStatus {
|
||||
Processing(Processing),
|
||||
Enqueued(Enqueued),
|
||||
Processed(Processed),
|
||||
Failed(Failed),
|
||||
}
|
||||
|
||||
impl From<v2::UpdateResult> for TaskResult {
|
||||
fn from(other: v2::UpdateResult) -> Self {
|
||||
match other {
|
||||
v2::UpdateResult::DocumentsAddition(result) => TaskResult::DocumentAddition {
|
||||
indexed_documents: result.nb_documents as u64,
|
||||
},
|
||||
v2::UpdateResult::DocumentDeletion { deleted } => TaskResult::DocumentDeletion {
|
||||
deleted_documents: deleted,
|
||||
},
|
||||
v2::UpdateResult::Other => TaskResult::Other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum Update {
|
||||
DeleteDocuments(Vec<String>),
|
||||
DocumentAddition {
|
||||
primary_key: Option<String>,
|
||||
method: IndexDocumentsMethod,
|
||||
content_uuid: Uuid,
|
||||
},
|
||||
Settings(Settings<Unchecked>),
|
||||
ClearDocuments,
|
||||
}
|
||||
|
||||
impl From<Update> for TaskContent {
|
||||
fn from(other: Update) -> Self {
|
||||
match other {
|
||||
Update::DeleteDocuments(ids) => {
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids))
|
||||
}
|
||||
Update::DocumentAddition {
|
||||
primary_key,
|
||||
method,
|
||||
..
|
||||
} => TaskContent::DocumentAddition {
|
||||
content_uuid: Uuid::default(),
|
||||
merge_strategy: method,
|
||||
primary_key,
|
||||
// document count is unknown for legacy updates
|
||||
documents_count: 0,
|
||||
allow_index_creation: true,
|
||||
},
|
||||
Update::Settings(settings) => TaskContent::SettingsUpdate {
|
||||
settings,
|
||||
// There is no way to know now, so we assume it isn't
|
||||
is_deletion: false,
|
||||
allow_index_creation: true,
|
||||
},
|
||||
Update::ClearDocuments => TaskContent::DocumentDeletion(DocumentDeletion::Clear),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum UpdateMeta {
|
||||
DocumentsAddition {
|
||||
method: IndexDocumentsMethod,
|
||||
primary_key: Option<String>,
|
||||
},
|
||||
ClearDocuments,
|
||||
DeleteDocuments {
|
||||
ids: Vec<String>,
|
||||
},
|
||||
Settings(Settings<Unchecked>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Enqueued {
|
||||
pub update_id: u64,
|
||||
pub meta: Update,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub enqueued_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl Enqueued {
|
||||
fn update_task(self, task: &mut Task) {
|
||||
// we do not erase the `TaskId` that was given to us.
|
||||
task.content = self.meta.into();
|
||||
task.events.push(TaskEvent::Created(self.enqueued_at));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Processed {
|
||||
pub success: v2::UpdateResult,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub processed_at: OffsetDateTime,
|
||||
#[serde(flatten)]
|
||||
pub from: Processing,
|
||||
}
|
||||
|
||||
impl Processed {
|
||||
fn update_task(self, task: &mut Task) {
|
||||
self.from.update_task(task);
|
||||
|
||||
let event = TaskEvent::Succeded {
|
||||
result: TaskResult::from(self.success),
|
||||
timestamp: self.processed_at,
|
||||
};
|
||||
task.events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Processing {
|
||||
#[serde(flatten)]
|
||||
pub from: Enqueued,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub started_processing_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl Processing {
|
||||
fn update_task(self, task: &mut Task) {
|
||||
self.from.update_task(task);
|
||||
|
||||
let event = TaskEvent::Processing(self.started_processing_at);
|
||||
task.events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Failed {
|
||||
#[serde(flatten)]
|
||||
pub from: Processing,
|
||||
pub msg: String,
|
||||
pub code: Code,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub failed_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl Failed {
|
||||
fn update_task(self, task: &mut Task) {
|
||||
self.from.update_task(task);
|
||||
|
||||
let event = TaskEvent::Failed {
|
||||
error: ResponseError::from_msg(self.msg, self.code),
|
||||
timestamp: self.failed_at,
|
||||
};
|
||||
task.events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(UpdateStatus, String, TaskId)> for Task {
|
||||
fn from((update, uid, task_id): (UpdateStatus, String, TaskId)) -> Self {
|
||||
// Dummy task
|
||||
let mut task = Task {
|
||||
id: task_id,
|
||||
index_uid: IndexUid::new(uid).unwrap(),
|
||||
content: TaskContent::IndexDeletion,
|
||||
events: Vec::new(),
|
||||
};
|
||||
|
||||
match update {
|
||||
UpdateStatus::Processing(u) => u.update_task(&mut task),
|
||||
UpdateStatus::Enqueued(u) => u.update_task(&mut task),
|
||||
UpdateStatus::Processed(u) => u.update_task(&mut task),
|
||||
UpdateStatus::Failed(u) => u.update_task(&mut task),
|
||||
}
|
||||
|
||||
task
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
use meilisearch_auth::error::AuthControllerError;
|
||||
use meilisearch_error::{internal_error, Code, ErrorCode};
|
||||
|
||||
use crate::index_controller::index_resolver::error::IndexResolverError;
|
||||
use crate::index_controller::updates::error::UpdateLoopError;
|
||||
use crate::{index_resolver::error::IndexResolverError, tasks::error::TaskError};
|
||||
|
||||
pub type Result<T> = std::result::Result<T, DumpActorError>;
|
||||
|
||||
@@ -15,28 +15,18 @@ pub enum DumpActorError {
|
||||
Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
|
||||
#[error("{0}")]
|
||||
IndexResolver(#[from] IndexResolverError),
|
||||
#[error("{0}")]
|
||||
UpdateLoop(#[from] UpdateLoopError),
|
||||
}
|
||||
|
||||
macro_rules! internal_error {
|
||||
($($other:path), *) => {
|
||||
$(
|
||||
impl From<$other> for DumpActorError {
|
||||
fn from(other: $other) -> Self {
|
||||
Self::Internal(Box::new(other))
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
internal_error!(
|
||||
heed::Error,
|
||||
DumpActorError: heed::Error,
|
||||
std::io::Error,
|
||||
tokio::task::JoinError,
|
||||
tokio::sync::oneshot::error::RecvError,
|
||||
serde_json::error::Error,
|
||||
tempfile::PersistError
|
||||
tempfile::PersistError,
|
||||
fs_extra::error::Error,
|
||||
AuthControllerError,
|
||||
TaskError
|
||||
);
|
||||
|
||||
impl ErrorCode for DumpActorError {
|
||||
@@ -46,7 +36,6 @@ impl ErrorCode for DumpActorError {
|
||||
DumpActorError::DumpDoesNotExist(_) => Code::DumpNotFound,
|
||||
DumpActorError::Internal(_) => Code::Internal,
|
||||
DumpActorError::IndexResolver(e) => e.error_code(),
|
||||
DumpActorError::UpdateLoop(e) => e.error_code(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
|
||||
use crate::index_controller::index_resolver::HardStateIndexResolver;
|
||||
|
||||
use super::error::Result;
|
||||
use super::{DumpActor, DumpActorHandle, DumpInfo, DumpMsg};
|
||||
use super::{DumpActorHandle, DumpInfo, DumpMsg};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DumpActorHandleImpl {
|
||||
sender: mpsc::Sender<DumpMsg>,
|
||||
pub sender: mpsc::Sender<DumpMsg>,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
@@ -29,29 +24,3 @@ impl DumpActorHandle for DumpActorHandleImpl {
|
||||
receiver.await.expect("IndexActor has been killed")
|
||||
}
|
||||
}
|
||||
|
||||
impl DumpActorHandleImpl {
|
||||
pub fn new(
|
||||
path: impl AsRef<Path>,
|
||||
analytics_path: impl AsRef<Path>,
|
||||
index_resolver: Arc<HardStateIndexResolver>,
|
||||
update: crate::index_controller::updates::UpdateSender,
|
||||
index_db_size: usize,
|
||||
update_db_size: usize,
|
||||
) -> anyhow::Result<Self> {
|
||||
let (sender, receiver) = mpsc::channel(10);
|
||||
let actor = DumpActor::new(
|
||||
receiver,
|
||||
index_resolver,
|
||||
update,
|
||||
path,
|
||||
analytics_path,
|
||||
index_db_size,
|
||||
update_db_size,
|
||||
);
|
||||
|
||||
tokio::task::spawn(actor.run());
|
||||
|
||||
Ok(Self { sender })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,3 @@
|
||||
pub mod v1;
|
||||
pub mod v2;
|
||||
pub mod v3;
|
||||
|
||||
mod compat {
|
||||
/// Parses the v1 version of the Asc ranking rules `asc(price)`and returns the field name.
|
||||
pub fn asc_ranking_rule(text: &str) -> Option<&str> {
|
||||
text.split_once("asc(")
|
||||
.and_then(|(_, tail)| tail.rsplit_once(")"))
|
||||
.map(|(field, _)| field)
|
||||
}
|
||||
|
||||
/// Parses the v1 version of the Desc ranking rules `desc(price)`and returns the field name.
|
||||
pub fn desc_ranking_rule(text: &str) -> Option<&str> {
|
||||
text.split_once("desc(")
|
||||
.and_then(|(_, tail)| tail.rsplit_once(")"))
|
||||
.map(|(field, _)| field)
|
||||
}
|
||||
}
|
||||
pub mod v4;
|
||||
|
||||
@@ -1,23 +1,8 @@
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::fs::{create_dir_all, File};
|
||||
use std::io::{BufReader, Seek, SeekFrom};
|
||||
use std::marker::PhantomData;
|
||||
use std::path::Path;
|
||||
|
||||
use heed::EnvOpenOptions;
|
||||
use log::{error, warn};
|
||||
use milli::documents::DocumentBatchReader;
|
||||
use milli::update::Setting;
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use uuid::Uuid;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::document_formats::read_ndjson;
|
||||
use crate::index::apply_settings_to_builder;
|
||||
use crate::index::update_handler::UpdateHandler;
|
||||
use crate::index_controller::dump_actor::loaders::compat::{asc_ranking_rule, desc_ranking_rule};
|
||||
use crate::index_controller::index_resolver::uuid_store::HeedUuidStore;
|
||||
use crate::index_controller::{self, IndexMetadata};
|
||||
use crate::{index::Unchecked, options::IndexerOpts};
|
||||
use crate::index_controller::IndexMetadata;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
@@ -27,6 +12,7 @@ pub struct MetadataV1 {
|
||||
}
|
||||
|
||||
impl MetadataV1 {
|
||||
#[allow(dead_code, unreachable_code, unused_variables)]
|
||||
pub fn load_dump(
|
||||
self,
|
||||
src: impl AsRef<Path>,
|
||||
@@ -34,200 +20,5 @@ impl MetadataV1 {
|
||||
size: usize,
|
||||
indexer_options: &IndexerOpts,
|
||||
) -> anyhow::Result<()> {
|
||||
let uuid_store = HeedUuidStore::new(&dst)?;
|
||||
for index in self.indexes {
|
||||
let uuid = Uuid::new_v4();
|
||||
uuid_store.insert(index.uid.clone(), uuid)?;
|
||||
let src = src.as_ref().join(index.uid);
|
||||
load_index(
|
||||
&src,
|
||||
&dst,
|
||||
uuid,
|
||||
index.meta.primary_key.as_deref(),
|
||||
size,
|
||||
indexer_options,
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deserialize_some<'de, T, D>(deserializer: D) -> std::result::Result<Option<T>, D::Error>
|
||||
where
|
||||
T: Deserialize<'de>,
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
Deserialize::deserialize(deserializer).map(Some)
|
||||
}
|
||||
|
||||
// These are the settings used in legacy meilisearch (<v0.21.0).
|
||||
#[derive(Default, Clone, Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
struct Settings {
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub ranking_rules: Option<Option<Vec<String>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub distinct_attribute: Option<Option<String>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub searchable_attributes: Option<Option<Vec<String>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub displayed_attributes: Option<Option<BTreeSet<String>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub stop_words: Option<Option<BTreeSet<String>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub synonyms: Option<Option<BTreeMap<String, Vec<String>>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub attributes_for_faceting: Option<Option<Vec<String>>>,
|
||||
}
|
||||
|
||||
fn load_index(
|
||||
src: impl AsRef<Path>,
|
||||
dst: impl AsRef<Path>,
|
||||
uuid: Uuid,
|
||||
primary_key: Option<&str>,
|
||||
size: usize,
|
||||
indexer_options: &IndexerOpts,
|
||||
) -> anyhow::Result<()> {
|
||||
let index_path = dst.as_ref().join(&format!("indexes/{}", uuid));
|
||||
|
||||
create_dir_all(&index_path)?;
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(size);
|
||||
let index = milli::Index::new(options, index_path)?;
|
||||
|
||||
let update_handler = UpdateHandler::new(indexer_options)?;
|
||||
|
||||
let mut txn = index.write_txn()?;
|
||||
// extract `settings.json` file and import content
|
||||
let settings = import_settings(&src)?;
|
||||
let settings: index_controller::Settings<Unchecked> = settings.into();
|
||||
|
||||
let handler = UpdateHandler::new(indexer_options)?;
|
||||
|
||||
let mut builder = handler.update_builder(0).settings(&mut txn, &index);
|
||||
|
||||
if let Some(primary_key) = primary_key {
|
||||
builder.set_primary_key(primary_key.to_string());
|
||||
}
|
||||
|
||||
apply_settings_to_builder(&settings.check(), &mut builder);
|
||||
|
||||
builder.execute(|_, _| ())?;
|
||||
|
||||
let reader = BufReader::new(File::open(&src.as_ref().join("documents.jsonl"))?);
|
||||
|
||||
let mut tmp_doc_file = tempfile::tempfile()?;
|
||||
|
||||
read_ndjson(reader, &mut tmp_doc_file)?;
|
||||
|
||||
tmp_doc_file.seek(SeekFrom::Start(0))?;
|
||||
|
||||
let documents_reader = DocumentBatchReader::from_reader(tmp_doc_file)?;
|
||||
|
||||
//If the document file is empty, we don't perform the document addition, to prevent
|
||||
//a primary key error to be thrown.
|
||||
if !documents_reader.is_empty() {
|
||||
let builder = update_handler
|
||||
.update_builder(0)
|
||||
.index_documents(&mut txn, &index);
|
||||
builder.execute(documents_reader, |_, _| ())?;
|
||||
}
|
||||
|
||||
txn.commit()?;
|
||||
|
||||
// Finaly, we extract the original milli::Index and close it
|
||||
index.prepare_for_closing().wait();
|
||||
|
||||
// Updates are ignored in dumps V1.
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// we need to **always** be able to convert the old settings to the settings currently being used
|
||||
impl From<Settings> for index_controller::Settings<Unchecked> {
|
||||
fn from(settings: Settings) -> Self {
|
||||
Self {
|
||||
distinct_attribute: match settings.distinct_attribute {
|
||||
Some(Some(attr)) => Setting::Set(attr),
|
||||
Some(None) => Setting::Reset,
|
||||
None => Setting::NotSet
|
||||
},
|
||||
// we need to convert the old `Vec<String>` into a `BTreeSet<String>`
|
||||
displayed_attributes: match settings.displayed_attributes {
|
||||
Some(Some(attrs)) => Setting::Set(attrs.into_iter().collect()),
|
||||
Some(None) => Setting::Reset,
|
||||
None => Setting::NotSet
|
||||
},
|
||||
searchable_attributes: match settings.searchable_attributes {
|
||||
Some(Some(attrs)) => Setting::Set(attrs),
|
||||
Some(None) => Setting::Reset,
|
||||
None => Setting::NotSet
|
||||
},
|
||||
filterable_attributes: match settings.attributes_for_faceting {
|
||||
Some(Some(attrs)) => Setting::Set(attrs.into_iter().collect()),
|
||||
Some(None) => Setting::Reset,
|
||||
None => Setting::NotSet
|
||||
},
|
||||
sortable_attributes: Setting::NotSet,
|
||||
ranking_rules: match settings.ranking_rules {
|
||||
Some(Some(ranking_rules)) => Setting::Set(ranking_rules.into_iter().filter_map(|criterion| {
|
||||
match criterion.as_str() {
|
||||
"words" | "typo" | "proximity" | "attribute" | "exactness" => Some(criterion),
|
||||
s if s.starts_with("asc") => asc_ranking_rule(s).map(|f| format!("{}:asc", f)),
|
||||
s if s.starts_with("desc") => desc_ranking_rule(s).map(|f| format!("{}:desc", f)),
|
||||
"wordsPosition" => {
|
||||
warn!("The criteria `attribute` and `wordsPosition` have been merged \
|
||||
into a single criterion `attribute` so `wordsPositon` will be \
|
||||
ignored");
|
||||
None
|
||||
}
|
||||
s => {
|
||||
error!("Unknown criterion found in the dump: `{}`, it will be ignored", s);
|
||||
None
|
||||
}
|
||||
}
|
||||
}).collect()),
|
||||
Some(None) => Setting::Reset,
|
||||
None => Setting::NotSet
|
||||
},
|
||||
// we need to convert the old `Vec<String>` into a `BTreeSet<String>`
|
||||
stop_words: match settings.stop_words {
|
||||
Some(Some(stop_words)) => Setting::Set(stop_words.into_iter().collect()),
|
||||
Some(None) => Setting::Reset,
|
||||
None => Setting::NotSet
|
||||
},
|
||||
// we need to convert the old `Vec<String>` into a `BTreeMap<String>`
|
||||
synonyms: match settings.synonyms {
|
||||
Some(Some(synonyms)) => Setting::Set(synonyms.into_iter().collect()),
|
||||
Some(None) => Setting::Reset,
|
||||
None => Setting::NotSet
|
||||
},
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract Settings from `settings.json` file present at provided `dir_path`
|
||||
fn import_settings(dir_path: impl AsRef<Path>) -> anyhow::Result<Settings> {
|
||||
let path = dir_path.as_ref().join("settings.json");
|
||||
let file = File::open(path)?;
|
||||
let reader = std::io::BufReader::new(file);
|
||||
let metadata = serde_json::from_reader(reader)?;
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn settings_format_regression() {
|
||||
let settings = Settings::default();
|
||||
assert_eq!(
|
||||
r##"{"rankingRules":null,"distinctAttribute":null,"searchableAttributes":null,"displayedAttributes":null,"stopWords":null,"synonyms":null,"attributesForFaceting":null}"##,
|
||||
serde_json::to_string(&settings).unwrap()
|
||||
);
|
||||
}
|
||||
anyhow::bail!("The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.")
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user