mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-06 20:55:40 +00:00
Compare commits
1 Commits
v0.29.2
...
v0.26.1-cl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8058970523 |
7
.github/ISSUE_TEMPLATE/config.yml
vendored
7
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,10 +1,7 @@
|
||||
contact_links:
|
||||
- name: Language support request & feedback
|
||||
url: https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal?discussions_q=label%3Aproduct%3Acore%3Atokenizer+category%3A%22Feedback+%26+Feature+Proposal%22
|
||||
about: The requests and feedback regarding Language support are not managed in this repository. Please upvote the related discussion in our dedicated product repository or open a new one if it doesn't exist.
|
||||
- name: Feature request & feedback
|
||||
- name: Feature request
|
||||
url: https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal
|
||||
about: The feature requests and feedback regarding the already existing features are not managed in this repository. Please open a discussion in our dedicated product repository
|
||||
about: The feature requests are not managed in this repository, please open a discussion in our dedicated product repository
|
||||
- name: Documentation issue
|
||||
url: https://github.com/meilisearch/documentation/issues/new
|
||||
about: For documentation issues, open an issue or a PR in the documentation repository
|
||||
|
||||
13
.github/dependabot.yml
vendored
13
.github/dependabot.yml
vendored
@@ -1,13 +0,0 @@
|
||||
# Set update schedule for GitHub Actions only
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
labels:
|
||||
- 'skip changelog'
|
||||
- 'dependencies'
|
||||
rebase-strategy: disabled
|
||||
@@ -1,13 +1,13 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Was used in our CIs to publish the latest docker image. Not used anymore, will be used again when v1 and v2 will be out and we will want to maintain multiple stable versions.
|
||||
# Returns "true" or "false" (as a string) to be used in the `if` in GHA
|
||||
|
||||
# Checks if the current tag should be the latest (in terms of semver and not of release date).
|
||||
# Ex: previous tag -> v2.1.1
|
||||
# new tag -> v1.20.3
|
||||
# The new tag (v1.20.3) should NOT be the latest
|
||||
# So it returns "false", the `latest` tag should not be updated for the release v1.20.3 and still need to correspond to v2.1.1
|
||||
# Ex: previous tag -> v0.10.1
|
||||
# new tag -> v0.8.12
|
||||
# The new tag should not be the latest
|
||||
# So it returns "false", the CI should not run for the release v0.8.2
|
||||
|
||||
# Used in GHA in publish-docker-latest.yml
|
||||
# Returns "true" or "false" (as a string) to be used in the `if` in GHA
|
||||
|
||||
# GLOBAL
|
||||
GREP_SEMVER_REGEXP='v\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)$' # i.e. v[number].[number].[number]
|
||||
28
.github/scripts/check-release.sh
vendored
28
.github/scripts/check-release.sh
vendored
@@ -1,28 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# check_tag $current_tag $file_tag $file_name
|
||||
function check_tag {
|
||||
if [[ "$1" != "$2" ]]; then
|
||||
echo "Error: the current tag does not match the version in $3: found $2 - expected $1"
|
||||
ret=1
|
||||
fi
|
||||
}
|
||||
|
||||
ret=0
|
||||
current_tag=${GITHUB_REF#'refs/tags/v'}
|
||||
|
||||
toml_files='*/Cargo.toml'
|
||||
for toml_file in $toml_files;
|
||||
do
|
||||
file_tag="$(grep '^version = ' $toml_file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')"
|
||||
check_tag $current_tag $file_tag $toml_file
|
||||
done
|
||||
|
||||
lock_file='Cargo.lock'
|
||||
lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')
|
||||
check_tag $current_tag $lock_tag $lock_file
|
||||
|
||||
if [[ "$ret" -eq 0 ]] ; then
|
||||
echo 'OK'
|
||||
fi
|
||||
exit $ret
|
||||
20
.github/workflows/README.md
vendored
Normal file
20
.github/workflows/README.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
# GitHub Actions Workflow for Meilisearch
|
||||
|
||||
> **Note:**
|
||||
|
||||
> - We do not use [cache](https://github.com/actions/cache) yet but we could use it to speed up CI
|
||||
|
||||
## Workflow
|
||||
|
||||
- On each pull request, we trigger `cargo test`.
|
||||
- On each tag, we build:
|
||||
- the tagged Docker image and publish it to Docker Hub
|
||||
- the binaries for MacOS, Ubuntu, and Windows
|
||||
- the Debian package
|
||||
- On each stable release (`v*.*.*` tag):
|
||||
- we build the `latest` Docker image and publish it to Docker Hub
|
||||
- we publish the binary to Hombrew and Gemfury
|
||||
|
||||
## Problems
|
||||
|
||||
- We do not test on Windows because we are unable to make it work, there is a disk space problem.
|
||||
4
.github/workflows/coverage.yml
vendored
4
.github/workflows/coverage.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
||||
nightly-coverage:
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=unwind -Zpanic_abort_tests"
|
||||
- uses: actions-rs/grcov@v0.1
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
file: ${{ steps.coverage.outputs.report }}
|
||||
|
||||
23
.github/workflows/create-issue-dependencies.yml
vendored
23
.github/workflows/create-issue-dependencies.yml
vendored
@@ -1,23 +0,0 @@
|
||||
name: Create issue to upgrade dependencies
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 1 */3 *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
create-issue:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Create an issue
|
||||
uses: actions-ecosystem/action-create-issue@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
title: Upgrade dependencies
|
||||
body: |
|
||||
We need to update the dependencies of the Meilisearch repository, and, if possible, the dependencies of all the core-team repositories that Meilisearch depends on (milli, charabia, heed...).
|
||||
|
||||
⚠️ This issue should only be done at the beginning of the sprint!
|
||||
labels: |
|
||||
dependencies
|
||||
maintenance
|
||||
2
.github/workflows/flaky.yml
vendored
2
.github/workflows/flaky.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
||||
runs-on: ubuntu-18.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install cargo-flaky
|
||||
run: cargo install cargo-flaky
|
||||
- name: Run cargo flaky 100 times
|
||||
|
||||
33
.github/workflows/publish-binaries.yml
vendored
33
.github/workflows/publish-binaries.yml
vendored
@@ -5,33 +5,9 @@ on:
|
||||
name: Publish binaries to release
|
||||
|
||||
jobs:
|
||||
check-version:
|
||||
name: Check the version validity
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Check if the tag has the v<nmumber>.<number>.<number> format.
|
||||
# If yes, it means we are publishing an official release.
|
||||
# If no, we are releasing a RC, so no need to check the version.
|
||||
- name: Check tag format
|
||||
if: github.event_name != 'schedule'
|
||||
id: check-tag-format
|
||||
run: |
|
||||
escaped_tag=$(printf "%q" ${{ github.ref_name }})
|
||||
|
||||
if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo ::set-output name=stable::true
|
||||
else
|
||||
echo ::set-output name=stable::false
|
||||
fi
|
||||
- name: Check release validity
|
||||
if: steps.check-tag-format.outputs.stable == 'true'
|
||||
run: bash .github/scripts/check-release.sh
|
||||
|
||||
publish:
|
||||
name: Publish binary for ${{ matrix.os }}
|
||||
name: Publish for ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: check-version
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -51,7 +27,7 @@ jobs:
|
||||
- uses: hecrj/setup-rust-action@master
|
||||
with:
|
||||
rust-version: stable
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: cargo build --release --locked
|
||||
- name: Upload binaries to release
|
||||
@@ -63,9 +39,8 @@ jobs:
|
||||
tag: ${{ github.ref }}
|
||||
|
||||
publish-aarch64:
|
||||
name: Publish binary for aarch64
|
||||
name: Publish to GitHub
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: check-version
|
||||
continue-on-error: false
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@@ -80,7 +55,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Installing Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
|
||||
12
.github/workflows/publish-deb-brew-pkg.yml
vendored
12
.github/workflows/publish-deb-brew-pkg.yml
vendored
@@ -5,25 +5,16 @@ on:
|
||||
types: [released]
|
||||
|
||||
jobs:
|
||||
check-version:
|
||||
name: Check the version validity
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Check release validity
|
||||
run: bash .github/scripts/check-release.sh
|
||||
|
||||
debian:
|
||||
name: Publish debian packagge
|
||||
runs-on: ubuntu-18.04
|
||||
needs: check-version
|
||||
steps:
|
||||
- uses: hecrj/setup-rust-action@master
|
||||
with:
|
||||
rust-version: stable
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Build deb package
|
||||
run: cargo deb -p meilisearch-http -o target/debian/meilisearch.deb
|
||||
- name: Upload debian pkg to release
|
||||
@@ -39,7 +30,6 @@ jobs:
|
||||
homebrew:
|
||||
name: Bump Homebrew formula
|
||||
runs-on: ubuntu-18.04
|
||||
needs: check-version
|
||||
steps:
|
||||
- name: Create PR to Homebrew
|
||||
uses: mislav/bump-homebrew-formula-action@v1
|
||||
|
||||
71
.github/workflows/publish-docker-images.yml
vendored
71
.github/workflows/publish-docker-images.yml
vendored
@@ -1,71 +0,0 @@
|
||||
---
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 4 * * *' # Every day at 4:00am
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
name: Publish tagged images to Docker Hub
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: docker
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# Check if the tag has the v<nmumber>.<number>.<number> format. If yes, it means we are publishing an official release.
|
||||
# In this situation, we need to set `output.stable` to create/update the following tags (additionally to the `vX.Y.Z` Docker tag):
|
||||
# - a `vX.Y` (without patch version) Docker tag
|
||||
# - a `latest` Docker tag
|
||||
- name: Check tag format
|
||||
if: github.event_name != 'schedule'
|
||||
id: check-tag-format
|
||||
run: |
|
||||
escaped_tag=$(printf "%q" ${{ github.ref_name }})
|
||||
|
||||
if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo ::set-output name=stable::true
|
||||
else
|
||||
echo ::set-output name=stable::false
|
||||
fi
|
||||
|
||||
# Check only the validity of the tag for official releases (not for pre-releases or other tags)
|
||||
- name: Check release validity
|
||||
if: github.event_name != 'schedule' && steps.check-tag-format.outputs.stable == 'true'
|
||||
run: bash .github/scripts/check-release.sh
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: github.event_name != 'schedule'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: getmeili/meilisearch
|
||||
# The lastest and `vX.Y` tags are only pushed for the official Meilisearch releases
|
||||
# See https://github.com/docker/metadata-action#latest-tag
|
||||
flavor: latest=false
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
||||
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
# We do not push tags for the cron jobs, this is only for test purposes
|
||||
push: ${{ github.event_name != 'schedule' }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
30
.github/workflows/publish-docker-latest.yml
vendored
Normal file
30
.github/workflows/publish-docker-latest.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
---
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
|
||||
name: Publish latest image to Docker Hub
|
||||
|
||||
jobs:
|
||||
docker-latest:
|
||||
runs-on: docker
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: getmeili/meilisearch:latest
|
||||
39
.github/workflows/publish-docker-tag.yml
vendored
Normal file
39
.github/workflows/publish-docker-tag.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
---
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
name: Publish tagged image to Docker Hub
|
||||
|
||||
jobs:
|
||||
docker-tag:
|
||||
runs-on: docker
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: getmeili/meilisearch
|
||||
flavor: latest=false
|
||||
tags: type=ref,event=tag
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
34
.github/workflows/rust.yml
vendored
34
.github/workflows/rust.yml
vendored
@@ -12,7 +12,6 @@ on:
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: 1
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
@@ -23,9 +22,9 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-18.04, macos-latest, windows-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.0.0
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -37,30 +36,11 @@ jobs:
|
||||
command: test
|
||||
args: --locked --release
|
||||
|
||||
# We run tests in debug also, to make sure that the debug_assertions are hit
|
||||
test-debug:
|
||||
name: Run tests in debug
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.0.0
|
||||
- name: Run tests in debug
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --locked
|
||||
|
||||
clippy:
|
||||
name: Run Clippy
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
@@ -68,7 +48,7 @@ jobs:
|
||||
override: true
|
||||
components: clippy
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.0.0
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -79,14 +59,14 @@ jobs:
|
||||
name: Run Rustfmt
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
toolchain: nightly
|
||||
override: true
|
||||
components: rustfmt
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.0.0
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
- name: Run cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
@@ -4,21 +4,15 @@ First, thank you for contributing to Meilisearch! The goal of this document is t
|
||||
|
||||
Remember that there are many ways to contribute other than writing code: writing [tutorials or blog posts](https://github.com/meilisearch/awesome-meilisearch), improving [the documentation](https://github.com/meilisearch/documentation), submitting [bug reports](https://github.com/meilisearch/meilisearch/issues/new?assignees=&labels=&template=bug_report.md&title=) and [feature requests](https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal)...
|
||||
|
||||
The code in this repository is only concerned with managing multiple indexes, handling the update store, and exposing an HTTP API. Search and indexation are the domain of our core engine, [`milli`](https://github.com/meilisearch/milli), while tokenization is handled by [our `charabia` library](https://github.com/meilisearch/charabia/).
|
||||
|
||||
If Meilisearch does not offer optimized support for your language, please consider contributing to `charabia` by following the [CONTRIBUTING.md file](https://github.com/meilisearch/charabia/blob/main/CONTRIBUTING.md) and integrating your intended normalizer/segmenter.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Assumptions](#assumptions)
|
||||
- [How to Contribute](#how-to-contribute)
|
||||
- [Development Workflow](#development-workflow)
|
||||
- [Git Guidelines](#git-guidelines)
|
||||
- [Release Process (for internal team only)](#release-process-for-internal-team-only)
|
||||
|
||||
## Assumptions
|
||||
|
||||
1. **You're familiar with [GitHub](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
||||
1. **You're familiar with [Github](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
||||
2. **You've read the Meilisearch [documentation](https://docs.meilisearch.com).**
|
||||
3. **You know about the [Meilisearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
||||
Please use this for help.**
|
||||
@@ -28,7 +22,7 @@ If Meilisearch does not offer optimized support for your language, please consid
|
||||
1. Ensure your change has an issue! Find an
|
||||
[existing issue](https://github.com/meilisearch/meilisearch/issues/) or [open a new issue](https://github.com/meilisearch/meilisearch/issues/new).
|
||||
* This is where you can get a feel if the change will be accepted or not.
|
||||
2. Once approved, [fork the Meilisearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own GitHub account.
|
||||
2. Once approved, [fork the Meilisearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own Github account.
|
||||
3. [Create a new Git branch](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-and-deleting-branches-within-your-repository)
|
||||
4. Review the [Development Workflow](#development-workflow) section that describes the steps to maintain the repository.
|
||||
5. Make your changes on your branch.
|
||||
@@ -50,8 +44,6 @@ We recommend using the `--release` flag to test the full performance of Meilisea
|
||||
cargo test
|
||||
```
|
||||
|
||||
This command will be triggered to each PR as a requirement for merging it.
|
||||
|
||||
If you get a "Too many open files" error you might want to increase the open file limit using this command:
|
||||
|
||||
```bash
|
||||
@@ -76,7 +68,7 @@ As minimal requirements, your commit message should:
|
||||
|
||||
We don't follow any other convention, but if you want to use one, we recommend [the Chris Beams one](https://chris.beams.io/posts/git-commit/).
|
||||
|
||||
### GitHub Pull Requests
|
||||
### Github Pull Requests
|
||||
|
||||
Some notes on GitHub PRs:
|
||||
|
||||
@@ -86,29 +78,6 @@ Some notes on GitHub PRs:
|
||||
The draft PRs are recommended when you want to show that you are working on something and make your work visible.
|
||||
- The branch related to the PR must be **up-to-date with `main`** before merging. Fortunately, this project uses [Bors](https://github.com/bors-ng/bors-ng) to automatically enforce this requirement without the PR author having to rebase manually.
|
||||
|
||||
## Release Process (for internal team only)
|
||||
|
||||
Meilisearch tools follow the [Semantic Versioning Convention](https://semver.org/).
|
||||
|
||||
### Automation to rebase and Merge the PRs
|
||||
|
||||
This project integrates a bot that helps us manage pull requests merging.<br>
|
||||
_[Read more about this](https://github.com/meilisearch/integration-guides/blob/main/resources/bors.md)._
|
||||
|
||||
### How to Publish a new Release
|
||||
|
||||
The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/core-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release.
|
||||
|
||||
### Release assets
|
||||
|
||||
For each release, the following assets are created:
|
||||
- Binaries for differents platforms (Linux, MacOS, Windows and ARM architectures) are attached to the GitHub release
|
||||
- Binaries are pushed to HomeBrew and APT (not published for RC)
|
||||
- Docker tags are created/updated:
|
||||
- `vX.Y.Z`
|
||||
- `vX.Y` (not published for RC)
|
||||
- `latest` (not published for RC)
|
||||
|
||||
<hr>
|
||||
|
||||
Thank you again for reading this through, we can not wait to begin to work with you if you made your way through this contributing guide ❤️
|
||||
|
||||
2156
Cargo.lock
generated
2156
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
13
Cargo.toml
13
Cargo.toml
@@ -1,18 +1,7 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"meilisearch-http",
|
||||
"meilisearch-types",
|
||||
"meilisearch-error",
|
||||
"meilisearch-lib",
|
||||
"meilisearch-auth",
|
||||
"permissive-json-pointer",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
||||
[profile.dev.package.flate2]
|
||||
opt-level = 3
|
||||
|
||||
[profile.dev.package.milli]
|
||||
opt-level = 3
|
||||
|
||||
51
Dockerfile
51
Dockerfile
@@ -1,25 +1,45 @@
|
||||
# Compile
|
||||
FROM rust:alpine3.16 AS compiler
|
||||
FROM alpine:3.14 AS compiler
|
||||
|
||||
RUN apk add -q --update-cache --no-cache build-base openssl-dev
|
||||
RUN apk update --quiet \
|
||||
&& apk add -q --no-cache curl build-base
|
||||
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
|
||||
WORKDIR /meilisearch
|
||||
|
||||
COPY Cargo.lock .
|
||||
COPY Cargo.toml .
|
||||
|
||||
COPY meilisearch-auth/Cargo.toml meilisearch-auth/
|
||||
COPY meilisearch-error/Cargo.toml meilisearch-error/
|
||||
COPY meilisearch-http/Cargo.toml meilisearch-http/
|
||||
COPY meilisearch-lib/Cargo.toml meilisearch-lib/
|
||||
|
||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
|
||||
# Create dummy main.rs files for each workspace member to be able to compile all the dependencies
|
||||
RUN find . -type d -name "meilisearch-*" | xargs -I{} sh -c 'mkdir {}/src; echo "fn main() { }" > {}/src/main.rs;'
|
||||
# Use `cargo build` instead of `cargo vendor` because we need to not only download but compile dependencies too
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
|
||||
export JEMALLOC_SYS_WITH_LG_PAGE=16; \
|
||||
fi && \
|
||||
$HOME/.cargo/bin/cargo build --release
|
||||
# Cleanup dummy main.rs files
|
||||
RUN find . -path "*/src/main.rs" -delete
|
||||
|
||||
ARG COMMIT_SHA
|
||||
ARG COMMIT_DATE
|
||||
ENV COMMIT_SHA=${COMMIT_SHA} COMMIT_DATE=${COMMIT_DATE}
|
||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
|
||||
COPY . .
|
||||
RUN set -eux; \
|
||||
apkArch="$(apk --print-arch)"; \
|
||||
if [ "$apkArch" = "aarch64" ]; then \
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
|
||||
export JEMALLOC_SYS_WITH_LG_PAGE=16; \
|
||||
fi && \
|
||||
cargo build --release
|
||||
$HOME/.cargo/bin/cargo build --release
|
||||
|
||||
# Run
|
||||
FROM alpine:3.16
|
||||
FROM alpine:3.14
|
||||
|
||||
ENV MEILI_HTTP_ADDR 0.0.0.0:7700
|
||||
ENV MEILI_SERVER_PROVIDER docker
|
||||
@@ -27,20 +47,9 @@ ENV MEILI_SERVER_PROVIDER docker
|
||||
RUN apk update --quiet \
|
||||
&& apk add -q --no-cache libgcc tini curl
|
||||
|
||||
# add meilisearch to the `/bin` so you can run it from anywhere and it's easy
|
||||
# to find.
|
||||
COPY --from=compiler /meilisearch/target/release/meilisearch /bin/meilisearch
|
||||
# To stay compatible with the older version of the container (pre v0.27.0) we're
|
||||
# going to symlink the meilisearch binary in the path to `/meilisearch`
|
||||
RUN ln -s /bin/meilisearch /meilisearch
|
||||
|
||||
# This directory should hold all the data related to meilisearch so we're going
|
||||
# to move our PWD in there.
|
||||
# We don't want to put the meilisearch binary
|
||||
WORKDIR /meili_data
|
||||
|
||||
COPY --from=compiler /meilisearch/target/release/meilisearch .
|
||||
|
||||
EXPOSE 7700/tcp
|
||||
|
||||
ENTRYPOINT ["tini", "--"]
|
||||
CMD /bin/meilisearch
|
||||
CMD ./meilisearch
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019-2022 Meili SAS
|
||||
Copyright (c) 2019-2022 Meilisearch
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
212
README.md
212
README.md
@@ -1,103 +1,205 @@
|
||||
<p align="center">
|
||||
<img src="assets/meilisearch-logo-light.svg?sanitize=true#gh-light-mode-only">
|
||||
<img src="assets/meilisearch-logo-dark.svg?sanitize=true#gh-dark-mode-only">
|
||||
<img src="assets/logo.svg" alt="Meilisearch" width="200" height="200" />
|
||||
</p>
|
||||
|
||||
<h1 align="center">Meilisearch</h1>
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://www.meilisearch.com">Website</a> |
|
||||
<a href="https://roadmap.meilisearch.com/tabs/1-under-consideration">Roadmap</a> |
|
||||
<a href="https://blog.meilisearch.com">Blog</a> |
|
||||
<a href="https://fr.linkedin.com/company/meilisearch">LinkedIn</a> |
|
||||
<a href="https://twitter.com/meilisearch">Twitter</a> |
|
||||
<a href="https://docs.meilisearch.com">Documentation</a> |
|
||||
<a href="https://docs.meilisearch.com/faq/">FAQ</a> |
|
||||
<a href="https://slack.meilisearch.com">Slack</a>
|
||||
<a href="https://docs.meilisearch.com/faq/">FAQ</a>
|
||||
</h4>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/meilisearch/meilisearch/actions"><img src="https://github.com/meilisearch/meilisearch/workflows/Cargo%20test/badge.svg" alt="Build Status"></a>
|
||||
<a href="https://deps.rs/repo/github/meilisearch/meilisearch"><img src="https://deps.rs/repo/github/meilisearch/meilisearch/status.svg" alt="Dependency status"></a>
|
||||
<a href="https://github.com/meilisearch/meilisearch/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-MIT-informational" alt="License"></a>
|
||||
<a href="https://slack.meilisearch.com"><img src="https://img.shields.io/badge/slack-meilisearch-blue.svg?logo=slack" alt="Slack"></a>
|
||||
<a href="https://github.com/meilisearch/meilisearch/discussions" alt="Discussions"><img src="https://img.shields.io/badge/github-discussions-red" /></a>
|
||||
<a href="https://app.bors.tech/repositories/26457"><img src="https://bors.tech/images/badge_small.svg" alt="Bors enabled"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">⚡ A lightning-fast search engine that fits effortlessly into your apps, websites, and workflow 🔍</p>
|
||||
<p align="center">⚡ Lightning Fast, Ultra Relevant, and Typo-Tolerant Search Engine 🔍</p>
|
||||
|
||||
Meilisearch helps you shape a delightful search experience in a snap, offering features that work out-of-the-box to speed up your workflow.
|
||||
**Meilisearch** is a powerful, fast, open-source, easy to use and deploy search engine. Both searching and indexing are highly customizable. Features such as typo-tolerance, filters, and synonyms are provided out-of-the-box.
|
||||
For more information about features go to [our documentation](https://docs.meilisearch.com/).
|
||||
|
||||
<p align="center" name="demo">
|
||||
<a href="https://where2watch.meilisearch.com/#gh-light-mode-only" target="_blank">
|
||||
<img src="assets/demo-light.gif#gh-light-mode-only" alt="A bright colored application for finding movies screening near the user">
|
||||
</a>
|
||||
<a href="https://where2watch.meilisearch.com/#gh-dark-mode-only" target="_blank">
|
||||
<img src="assets/demo-dark.gif#gh-dark-mode-only" alt="A dark colored application for finding movies screening near the user">
|
||||
</a>
|
||||
<p align="center">
|
||||
<img src="assets/trumen-fast.gif" alt="Web interface gif" />
|
||||
</p>
|
||||
|
||||
🔥 [**Try it!**](https://where2watch.meilisearch.com/) 🔥
|
||||
|
||||
## ✨ Features
|
||||
* Search-as-you-type experience (answers < 50 milliseconds)
|
||||
* Full-text search
|
||||
* Typo tolerant (understands typos and misspelling)
|
||||
* Faceted search and filters
|
||||
* Supports hanzi (Chinese characters)
|
||||
* Supports synonyms
|
||||
* Easy to install, deploy, and maintain
|
||||
* Whole documents are returned
|
||||
* Highly customizable
|
||||
* RESTful API
|
||||
|
||||
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
||||
- **[Typo tolerance](https://docs.meilisearch.com/learn/getting_started/customizing_relevancy.html#typo-tolerance):** get relevant matches even when queries contain typos and misspellings
|
||||
- **[Filtering and faceted search](https://docs.meilisearch.com/learn/advanced/filtering_and_faceted_search.html):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code
|
||||
- **[Sorting](https://docs.meilisearch.com/learn/advanced/sorting.html):** sort results based on price, date, or pretty much anything else your users need
|
||||
- **[Synonym support](https://docs.meilisearch.com/learn/getting_started/customizing_relevancy.html#synonyms):** configure synonyms to include more relevant content in your search results
|
||||
- **[Geosearch](https://docs.meilisearch.com/learn/advanced/geosearch.html):** filter and sort documents based on geographic data
|
||||
- **[Extensive language support](https://docs.meilisearch.com/learn/what_is_meilisearch/language.html):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
||||
- **[Security management](https://docs.meilisearch.com/learn/security/master_api_keys.html):** control which users can access what data with API keys that allow fine-grained permissions handling
|
||||
- **[Multi-Tenancy](https://docs.meilisearch.com/learn/security/tenant_tokens.html):** personalize search results for any number of application tenants
|
||||
- **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets
|
||||
- **[RESTful API](https://docs.meilisearch.com/reference/api/overview.html):** integrate Meilisearch in your technical stack with our plugins and SDKs
|
||||
- **Easy to install, deploy, and maintain**
|
||||
## Getting started
|
||||
|
||||
## 📖 Documentation
|
||||
### Deploy the Server
|
||||
|
||||
You can consult Meilisearch's documentation at [https://docs.meilisearch.com](https://docs.meilisearch.com/).
|
||||
#### Homebrew (Mac OS)
|
||||
|
||||
## 🚀 Getting started
|
||||
```bash
|
||||
brew update && brew install meilisearch
|
||||
meilisearch
|
||||
```
|
||||
|
||||
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://docs.meilisearch.com/learn/getting_started/quick_start.html) guide.
|
||||
#### Docker
|
||||
|
||||
You may also want to check out [Meilisearch 101](https://docs.meilisearch.com/learn/getting_started/filtering_and_sorting.html) for an introduction to some of Meilisearch's most popular features.
|
||||
```bash
|
||||
docker run -p 7700:7700 -v "$(pwd)/data.ms:/data.ms" getmeili/meilisearch
|
||||
```
|
||||
|
||||
## ☁️ Meilisearch cloud
|
||||
#### Announcing a cloud-hosted Meilisearch
|
||||
|
||||
Join the closed beta for Meilisearch cloud by filling out [this form](https://meilisearch.typeform.com/to/VI2cI2rv).
|
||||
Join the closed beta by filling out this [form](https://meilisearch.typeform.com/to/FtnzvZfh).
|
||||
|
||||
## 🧰 SDKs & integration tools
|
||||
#### Try Meilisearch in our Sandbox
|
||||
|
||||
Install one of our SDKs in your project for seamless integration between Meilisearch and your favorite language or framework!
|
||||
Create a Meilisearch instance in [Meilisearch Sandbox](https://sandbox.meilisearch.com/). This instance is free, and will be active for 48 hours.
|
||||
|
||||
Take a look at the complete [Meilisearch integration list](https://docs.meilisearch.com/learn/what_is_meilisearch/sdks.html).
|
||||
#### Run on Digital Ocean
|
||||
|
||||

|
||||
[](https://marketplace.digitalocean.com/apps/meilisearch?action=deploy&refcode=7c67bd97e101)
|
||||
|
||||
## ⚙️ Advanced usage
|
||||
#### Deploy on Platform.sh
|
||||
|
||||
Experienced users will want to keep our [API Reference](https://docs.meilisearch.com/reference/api) close at hand.
|
||||
<a href="https://console.platform.sh/projects/create-project?template=https://raw.githubusercontent.com/platformsh/template-builder/master/templates/meilisearch/.platform.template.yaml&utm_content=meilisearch&utm_source=github&utm_medium=button&utm_campaign=deploy_on_platform">
|
||||
<img src="https://platform.sh/images/deploy/lg-blue.svg" alt="Deploy on Platform.sh" width="180px" />
|
||||
</a>
|
||||
|
||||
We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://docs.meilisearch.com/learn/advanced/filtering_and_faceted_search.html), [sorting](https://docs.meilisearch.com/learn/advanced/sorting.html), [geosearch](https://docs.meilisearch.com/learn/advanced/geosearch.html), [API keys](https://docs.meilisearch.com/learn/security/master_api_keys.html), and [tenant tokens](https://docs.meilisearch.com/learn/security/tenant_tokens.html).
|
||||
#### APT (Debian & Ubuntu)
|
||||
|
||||
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://docs.meilisearch.com/learn/core_concepts/documents.html) and [indexes](https://docs.meilisearch.com/learn/core_concepts/indexes.html).
|
||||
```bash
|
||||
echo "deb [trusted=yes] https://apt.fury.io/meilisearch/ /" > /etc/apt/sources.list.d/fury.list
|
||||
apt update && apt install meilisearch-http
|
||||
meilisearch
|
||||
```
|
||||
|
||||
## 📊 Telemetry
|
||||
#### Download the binary (Linux & Mac OS)
|
||||
|
||||
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html#how-to-disable-data-collection) whenever you want.
|
||||
```bash
|
||||
curl -L https://install.meilisearch.com | sh
|
||||
./meilisearch
|
||||
```
|
||||
|
||||
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
||||
#### Compile and run it from sources
|
||||
|
||||
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html) of our documentation.
|
||||
If you have the latest stable Rust toolchain installed on your local system, clone the repository and change it to your working directory.
|
||||
|
||||
## 📫 Get in touch!
|
||||
```bash
|
||||
git clone https://github.com/meilisearch/meilisearch.git
|
||||
cd meilisearch
|
||||
cargo run --release
|
||||
```
|
||||
|
||||
Meilisearch is a search engine created by [Meili](https://www.welcometothejungle.com/en/companies/meilisearch), a software development company based in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/)
|
||||
### Create an Index and Upload Some Documents
|
||||
|
||||
🗞 [Subscribe to our newsletter](https://meilisearch.us2.list-manage.com/subscribe?u=27870f7b71c908a8b359599fb&id=79582d828e) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
|
||||
Let's create an index! If you need a sample dataset, use [this movie database](https://www.notion.so/meilisearch/A-movies-dataset-to-test-Meili-1cbf7c9cfa4247249c40edfa22d7ca87#b5ae399b81834705ba5420ac70358a65). You can also find it in the `datasets/` directory.
|
||||
|
||||
💌 Want to make a suggestion or give feedback? Here are some of the channels where you can reach us:
|
||||
```bash
|
||||
curl -L 'https://bit.ly/2PAcw9l' -o movies.json
|
||||
```
|
||||
|
||||
- For feature requests, please visit our [product repository](https://github.com/meilisearch/product/discussions)
|
||||
- Found a bug? Open an [issue](https://github.com/meilisearch/meilisearch/issues)!
|
||||
- Want to be part of our Slack community? [Join us!](https://slack.meilisearch.com/)
|
||||
- For everything else, please check [this page listing some of the other places where you can find us](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html)
|
||||
Now, you're ready to index some data.
|
||||
|
||||
Thank you for your support!
|
||||
```bash
|
||||
curl -i -X POST 'http://127.0.0.1:7700/indexes/movies/documents' \
|
||||
--header 'content-type: application/json' \
|
||||
--data-binary @movies.json
|
||||
```
|
||||
|
||||
### Search for Documents
|
||||
|
||||
#### In command line
|
||||
|
||||
The search engine is now aware of your documents and can serve those via an HTTP server.
|
||||
|
||||
The [`jq` command-line tool](https://stedolan.github.io/jq/) can greatly help you read the server responses.
|
||||
|
||||
```bash
|
||||
curl 'http://127.0.0.1:7700/indexes/movies/search?q=botman+robin&limit=2' | jq
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"id": "415",
|
||||
"title": "Batman & Robin",
|
||||
"poster": "https://image.tmdb.org/t/p/w1280/79AYCcxw3kSKbhGpx1LiqaCAbwo.jpg",
|
||||
"overview": "Along with crime-fighting partner Robin and new recruit Batgirl, Batman battles the dual threat of frosty genius Mr. Freeze and homicidal horticulturalist Poison Ivy. Freeze plans to put Gotham City on ice, while Ivy tries to drive a wedge between the dynamic duo.",
|
||||
"release_date": 866768400
|
||||
},
|
||||
{
|
||||
"id": "411736",
|
||||
"title": "Batman: Return of the Caped Crusaders",
|
||||
"poster": "https://image.tmdb.org/t/p/w1280/GW3IyMW5Xgl0cgCN8wu96IlNpD.jpg",
|
||||
"overview": "Adam West and Burt Ward returns to their iconic roles of Batman and Robin. Featuring the voices of Adam West, Burt Ward, and Julie Newmar, the film sees the superheroes going up against classic villains like The Joker, The Riddler, The Penguin and Catwoman, both in Gotham City… and in space.",
|
||||
"release_date": 1475888400
|
||||
}
|
||||
],
|
||||
"nbHits": 8,
|
||||
"exhaustiveNbHits": false,
|
||||
"query": "botman robin",
|
||||
"limit": 2,
|
||||
"offset": 0,
|
||||
"processingTimeMs": 2
|
||||
}
|
||||
```
|
||||
|
||||
#### Use the Web Interface
|
||||
|
||||
We also deliver an **out-of-the-box [web interface](https://github.com/meilisearch/mini-dashboard)** in which you can test Meilisearch interactively.
|
||||
|
||||
You can access the web interface in your web browser at the root of the server. The default URL is [http://127.0.0.1:7700](http://127.0.0.1:7700). All you need to do is open your web browser and enter Meilisearch’s address to visit it. This will lead you to a web page with a search bar that will allow you to search in the selected index.
|
||||
|
||||
| [See the gif above](#demo)
|
||||
|
||||
## Documentation
|
||||
|
||||
Now that your Meilisearch server is up and running, you can learn more about how to tune your search engine in [the documentation](https://docs.meilisearch.com).
|
||||
|
||||
## Contributing
|
||||
|
||||
Hey! We're glad you're thinking about contributing to Meilisearch! Feel free to pick an [issue labeled as `good first issue`](https://github.com/meilisearch/meilisearch/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22), and to ask any question you need. Some points might not be clear and we are available to help you!
|
||||
|
||||
Also, we recommend following the [CONTRIBUTING](./CONTRIBUTING.md) to create your PR.
|
||||
|
||||
## Core engine and tokenizer
|
||||
|
||||
The code in this repository is only concerned with managing multiple indexes, handling the update store, and exposing an HTTP API.
|
||||
|
||||
Search and indexation are the domain of our core engine, [`milli`](https://github.com/meilisearch/milli), while tokenization is handled by [our `tokenizer` library](https://github.com/meilisearch/tokenizer/).
|
||||
## Telemetry
|
||||
|
||||
Meilisearch collects anonymous data regarding general usage.
|
||||
This helps us better understand developers' usage of Meilisearch features.
|
||||
|
||||
To find out more on what information we're retrieving, please see our documentation on [Telemetry](https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html).
|
||||
|
||||
This program is optional, you can disable these analytics by using the `MEILI_NO_ANALYTICS` env variable.
|
||||
|
||||
## Feature request
|
||||
|
||||
The feature requests are not managed in this repository. Please visit our [dedicated repository](https://github.com/meilisearch/product) to see our work about the Meilisearch product.
|
||||
|
||||
If you have a feature request or any feedback about an existing feature, please open [a discussion](https://github.com/meilisearch/product/discussions).
|
||||
Also, feel free to participate in the current discussions, we are looking forward to reading your comments.
|
||||
|
||||
## 💌 Contact
|
||||
|
||||
Please visit [this page](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html#contact-us).
|
||||
|
||||
Meilisearch is developed by [Meili](https://www.meilisearch.com), a young company. To know more about us, you can [read our blog](https://blog.meilisearch.com). Any suggestion or feedback is highly appreciated. Thank you for your support!
|
||||
|
||||
@@ -4,7 +4,7 @@ Meilisearch takes the security of our software products and services seriously.
|
||||
|
||||
If you believe you have found a security vulnerability in any Meilisearch-owned repository, please report it to us as described below.
|
||||
|
||||
## Supported versions
|
||||
## Suported versions
|
||||
|
||||
As long as we are pre-v1.0, only the latest version of Meilisearch will be supported with security updates.
|
||||
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 2.8 MiB |
Binary file not shown.
|
Before Width: | Height: | Size: 1.7 MiB |
Binary file not shown.
|
Before Width: | Height: | Size: 799 KiB |
@@ -1,30 +0,0 @@
|
||||
<svg width="495" height="74" viewBox="0 0 495 74" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M181.842 42.5349C181.842 37.6137 184.201 34.715 188.718 34.715C192.965 34.715 194.381 37.7486 194.381 41.6585V62.6238H203.953V40.5799C203.953 32.3556 199.639 26.4907 191.145 26.4907C186.089 26.4907 182.516 28.0412 179.415 31.4792C177.393 28.3782 173.955 26.4907 169.168 26.4907C164.112 26.4907 160.607 28.5805 158.989 31.614V27.2996H150.158V62.6238H159.731V42.3326C159.731 37.6137 162.157 34.715 166.607 34.715C170.854 34.715 172.269 37.7486 172.269 41.6585V62.6238H181.842V42.5349Z" fill="white"/>
|
||||
<path d="M243.245 47.7256C243.245 47.7256 243.379 46.4448 243.379 44.8943C243.379 34.4454 236.301 26.4907 225.852 26.4907C215.403 26.4907 208.123 34.4454 208.123 44.8943C208.123 55.7477 215.471 63.4327 225.92 63.4327C234.077 63.4327 240.548 58.5116 242.638 51.3659H232.998C231.852 53.9276 229.088 55.2084 226.189 55.2084C221.403 55.2084 218.302 52.5793 217.628 47.7256H243.245ZM225.785 34.1757C230.234 34.1757 233.133 36.8722 233.807 40.8495H217.763C218.572 36.8048 221.403 34.1757 225.785 34.1757Z" fill="white"/>
|
||||
<path d="M244.791 35.524H249.038V62.6238H258.61V27.2996H244.791V35.524ZM253.824 22.7156C257.195 22.7156 259.622 20.3561 259.622 16.9855C259.622 13.6149 257.195 11.188 253.824 11.188C250.454 11.188 248.027 13.6149 248.027 16.9855C248.027 20.3561 250.454 22.7156 253.824 22.7156Z" fill="white"/>
|
||||
<path d="M278.432 54.3995C278.163 54.3995 277.758 54.4669 277.152 54.4669C274.994 54.4669 274.725 53.4557 274.725 51.9726V12.0644H265.152V52.6467C265.152 59.6576 267.849 62.7586 275.466 62.7586C276.747 62.7586 277.96 62.6238 278.432 62.5564V54.3995Z" fill="white"/>
|
||||
<path d="M279.521 35.524H283.768V62.6238H293.341V27.2996H279.521V35.524ZM288.555 22.7156C291.925 22.7156 294.352 20.3561 294.352 16.9855C294.352 13.6149 291.925 11.188 288.555 11.188C285.184 11.188 282.757 13.6149 282.757 16.9855C282.757 20.3561 285.184 22.7156 288.555 22.7156Z" fill="white"/>
|
||||
<path d="M312.557 62.9937C321.86 62.9937 326.242 58.0726 326.242 52.8819C326.242 38.4556 305.007 46.4777 305.007 36.9725C305.007 33.8716 307.636 31.2425 312.962 31.2425C318.422 31.2425 320.984 34.2086 321.388 37.9163H326.175C325.77 33.2648 322.602 27.0629 313.097 27.0629C304.94 27.0629 300.356 31.9166 300.356 37.1748C300.356 51.264 321.591 43.1745 321.591 53.0167C321.591 56.4547 318.355 58.8142 312.557 58.8142C306.625 58.8142 303.659 55.848 303.322 51.4662H298.468C298.873 57.4659 302.648 62.9937 312.557 62.9937Z" fill="white"/>
|
||||
<path d="M364.256 46.4103C364.256 46.4103 364.324 45.3317 364.324 44.5901C364.324 34.8827 358.054 27.0629 347.808 27.0629C337.494 27.0629 330.955 35.4894 330.955 44.9946C330.955 54.6346 337.022 62.9937 347.875 62.9937C356.032 62.9937 361.695 58.0052 363.717 51.4662H358.729C357.245 55.6458 353.201 58.6794 347.943 58.6794C340.729 58.6794 336.213 53.3538 335.741 46.4103H364.256ZM347.808 31.3773C354.549 31.3773 358.931 35.8939 359.538 42.5004H335.876C336.685 36.1636 341.134 31.3773 347.808 31.3773Z" fill="white"/>
|
||||
<path d="M394.037 45.871V49.1068C394.037 54.9717 389.79 59.0164 381.634 59.0164C376.578 59.0164 373.814 56.9266 373.814 52.41C373.814 50.118 374.892 48.3652 376.578 47.4215C378.33 46.4777 380.69 45.871 394.037 45.871ZM381.094 62.9937C387.027 62.9937 391.813 61.1062 394.24 57.1963V62.1848H398.824V39.7364C398.824 32.1188 394.442 27.0629 384.532 27.0629C375.027 27.0629 370.848 31.8492 369.971 37.9837H374.623C375.566 33.13 379.274 31.1751 384.33 31.1751C390.802 31.1751 394.037 33.8716 394.037 39.669V41.8936C383.184 41.8936 378.667 42.0959 375.297 43.4441C371.387 44.9946 369.095 48.4327 369.095 52.5448C369.095 58.5445 372.937 62.9937 381.094 62.9937Z" fill="white"/>
|
||||
<path d="M424.991 27.6022C424.991 27.6022 424.182 27.5348 423.845 27.5348C417.509 27.5348 414.138 30.838 412.857 33.1974V27.8718H408.273V62.1848H413.059V42.7026C413.059 35.5569 417.441 32.0514 423.306 32.0514C424.182 32.0514 424.991 32.1188 424.991 32.1188V27.6022Z" fill="white"/>
|
||||
<path d="M425.809 45.062C425.809 54.4324 432.28 62.9937 442.729 62.9937C452.032 62.9937 457.425 56.7918 458.773 49.9831H453.92C452.504 55.3087 448.594 58.6794 442.729 58.6794C435.516 58.6794 430.662 52.9493 430.662 45.062C430.662 37.1073 435.516 31.3773 442.729 31.3773C448.594 31.3773 452.504 34.7479 453.92 40.0735H458.773C457.425 33.2648 452.032 27.0629 442.729 27.0629C432.28 27.0629 425.809 35.6243 425.809 45.062Z" fill="white"/>
|
||||
<path d="M470.041 11.6254H465.255V62.1848H470.041V41.8936C470.041 34.8827 474.558 31.2425 480.355 31.2425C486.49 31.2425 489.389 35.0176 489.389 41.2195V62.1848H494.175V40.2757C494.175 32.6581 489.658 27.0629 481.164 27.0629C474.76 27.0629 471.255 30.5683 470.041 32.6581V11.6254Z" fill="white"/>
|
||||
<path d="M0.825012 73.993L24.0688 14.5224C27.3443 6.14179 35.4223 0.625977 44.4203 0.625977H58.4336L35.1899 60.0966C31.9143 68.4772 23.8363 73.993 14.8384 73.993H0.825012Z" fill="url(#paint0_linear_0_3)"/>
|
||||
<path d="M34.9246 73.9932L58.1684 14.5226C61.444 6.14197 69.5219 0.626152 78.5199 0.626152H92.5333L69.2895 60.0968C66.014 68.4774 57.936 73.9932 48.938 73.9932H34.9246Z" fill="url(#paint1_linear_0_3)"/>
|
||||
<path d="M69.0262 73.9932L92.27 14.5226C95.5456 6.14197 103.624 0.626152 112.622 0.626152H126.635L103.391 60.0968C100.116 68.4774 92.0376 73.9932 83.0396 73.9932H69.0262Z" fill="url(#paint2_linear_0_3)"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_0_3" x1="126.635" y1="-4.97799" x2="0.825008" y2="66.0978" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FF5CAA"/>
|
||||
<stop offset="1" stop-color="#FF4E62"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear_0_3" x1="126.635" y1="-4.97799" x2="0.825008" y2="66.0978" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FF5CAA"/>
|
||||
<stop offset="1" stop-color="#FF4E62"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint2_linear_0_3" x1="126.635" y1="-4.97799" x2="0.825008" y2="66.0978" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FF5CAA"/>
|
||||
<stop offset="1" stop-color="#FF4E62"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 5.8 KiB |
@@ -1,30 +0,0 @@
|
||||
<svg width="495" height="74" viewBox="0 0 495 74" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M181.84 42.5347C181.84 37.6136 184.199 34.7149 188.716 34.7149C192.963 34.7149 194.378 37.7484 194.378 41.6584V62.6237H203.951V40.5798C203.951 32.3554 199.637 26.4906 191.143 26.4906C186.087 26.4906 182.514 28.041 179.413 31.4791C177.39 28.3781 173.952 26.4906 169.166 26.4906C164.11 26.4906 160.605 28.5804 158.987 31.6139V27.2995H150.156V62.6237H159.728V42.3325C159.728 37.6136 162.155 34.7149 166.604 34.7149C170.851 34.7149 172.267 37.7484 172.267 41.6584V62.6237H181.84V42.5347Z" fill="#21004B"/>
|
||||
<path d="M243.242 47.7255C243.242 47.7255 243.377 46.4447 243.377 44.8942C243.377 34.4452 236.299 26.4906 225.85 26.4906C215.401 26.4906 208.12 34.4452 208.12 44.8942C208.12 55.7476 215.468 63.4326 225.917 63.4326C234.074 63.4326 240.546 58.5115 242.636 51.3658H232.996C231.85 53.9274 229.086 55.2083 226.187 55.2083C221.401 55.2083 218.3 52.5792 217.626 47.7255H243.242ZM225.783 34.1756C230.232 34.1756 233.131 36.8721 233.805 40.8494H217.76C218.569 36.8047 221.401 34.1756 225.783 34.1756Z" fill="#21004B"/>
|
||||
<path d="M244.789 35.5238H249.036V62.6237H258.608V27.2995H244.789V35.5238ZM253.822 22.7155C257.193 22.7155 259.619 20.356 259.619 16.9854C259.619 13.6148 257.193 11.1879 253.822 11.1879C250.451 11.1879 248.024 13.6148 248.024 16.9854C248.024 20.356 250.451 22.7155 253.822 22.7155Z" fill="#21004B"/>
|
||||
<path d="M278.43 54.3993C278.16 54.3993 277.756 54.4667 277.149 54.4667C274.992 54.4667 274.722 53.4556 274.722 51.9725V12.0643H265.15V52.6466C265.15 59.6575 267.846 62.7585 275.464 62.7585C276.745 62.7585 277.958 62.6237 278.43 62.5562V54.3993Z" fill="#21004B"/>
|
||||
<path d="M279.519 35.5238H283.766V62.6237H293.339V27.2995H279.519V35.5238ZM288.553 22.7155C291.923 22.7155 294.35 20.356 294.35 16.9854C294.35 13.6148 291.923 11.1879 288.553 11.1879C285.182 11.1879 282.755 13.6148 282.755 16.9854C282.755 20.356 285.182 22.7155 288.553 22.7155Z" fill="#21004B"/>
|
||||
<path d="M312.557 62.9939C321.86 62.9939 326.242 58.0728 326.242 52.882C326.242 38.4557 305.007 46.4778 305.007 36.9726C305.007 33.8717 307.636 31.2426 312.962 31.2426C318.422 31.2426 320.984 34.2087 321.388 37.9164H326.175C325.77 33.265 322.602 27.063 313.097 27.063C304.94 27.063 300.356 31.9167 300.356 37.1749C300.356 51.2641 321.591 43.1746 321.591 53.0168C321.591 56.4548 318.355 58.8143 312.557 58.8143C306.625 58.8143 303.659 55.8481 303.322 51.4663H298.468C298.872 57.466 302.648 62.9939 312.557 62.9939Z" fill="#21004B"/>
|
||||
<path d="M364.256 46.4104C364.256 46.4104 364.324 45.3318 364.324 44.5903C364.324 34.8829 358.054 27.063 347.808 27.063C337.494 27.063 330.955 35.4896 330.955 44.9947C330.955 54.6347 337.022 62.9939 347.875 62.9939C356.032 62.9939 361.695 58.0053 363.717 51.4663H358.728C357.245 55.6459 353.201 58.6795 347.942 58.6795C340.729 58.6795 336.213 53.3539 335.741 46.4104H364.256ZM347.808 31.3774C354.549 31.3774 358.931 35.894 359.537 42.5005H335.876C336.685 36.1637 341.134 31.3774 347.808 31.3774Z" fill="#21004B"/>
|
||||
<path d="M394.037 45.8711V49.1069C394.037 54.9718 389.79 59.0165 381.633 59.0165C376.578 59.0165 373.814 56.9267 373.814 52.4101C373.814 50.1181 374.892 48.3654 376.578 47.4216C378.33 46.4778 380.69 45.8711 394.037 45.8711ZM381.094 62.9939C387.026 62.9939 391.813 61.1063 394.24 57.1964V62.1849H398.824V39.7366C398.824 32.1189 394.442 27.063 384.532 27.063C375.027 27.063 370.847 31.8493 369.971 37.9838H374.623C375.566 33.1301 379.274 31.1752 384.33 31.1752C390.802 31.1752 394.037 33.8717 394.037 39.6691V41.8938C383.184 41.8938 378.667 42.096 375.297 43.4442C371.387 44.9947 369.095 48.4328 369.095 52.5449C369.095 58.5446 372.937 62.9939 381.094 62.9939Z" fill="#21004B"/>
|
||||
<path d="M424.991 27.6023C424.991 27.6023 424.182 27.5349 423.845 27.5349C417.508 27.5349 414.138 30.8381 412.857 33.1975V27.872H408.273V62.1849H413.059V42.7027C413.059 35.557 417.441 32.0515 423.306 32.0515C424.182 32.0515 424.991 32.1189 424.991 32.1189V27.6023Z" fill="#21004B"/>
|
||||
<path d="M425.809 45.0621C425.809 54.4325 432.28 62.9939 442.729 62.9939C452.032 62.9939 457.425 56.7919 458.773 49.9832H453.92C452.504 55.3088 448.594 58.6795 442.729 58.6795C435.516 58.6795 430.662 52.9494 430.662 45.0621C430.662 37.1075 435.516 31.3774 442.729 31.3774C448.594 31.3774 452.504 34.748 453.92 40.0736H458.773C457.425 33.265 452.032 27.063 442.729 27.063C432.28 27.063 425.809 35.6244 425.809 45.0621Z" fill="#21004B"/>
|
||||
<path d="M470.041 11.6255H465.255V62.1849H470.041V41.8938C470.041 34.8829 474.558 31.2426 480.355 31.2426C486.49 31.2426 489.389 35.0177 489.389 41.2196V62.1849H494.175V40.2759C494.175 32.6582 489.658 27.063 481.164 27.063C474.76 27.063 471.255 30.5685 470.041 32.6582V11.6255Z" fill="#21004B"/>
|
||||
<path d="M0.824951 73.993L24.0688 14.5224C27.3443 6.14179 35.4223 0.625977 44.4202 0.625977H58.4336L35.1898 60.0966C31.9143 68.4772 23.8363 73.993 14.8383 73.993H0.824951Z" fill="url(#paint0_linear_0_15)"/>
|
||||
<path d="M34.9246 73.9932L58.1684 14.5226C61.4439 6.14197 69.5219 0.626152 78.5199 0.626152H92.5332L69.2894 60.0968C66.0139 68.4774 57.9359 73.9932 48.9379 73.9932H34.9246Z" fill="url(#paint1_linear_0_15)"/>
|
||||
<path d="M69.0262 73.9932L92.27 14.5226C95.5455 6.14197 103.623 0.626152 112.621 0.626152H126.635L103.391 60.0968C100.115 68.4774 92.0375 73.9932 83.0395 73.9932H69.0262Z" fill="url(#paint2_linear_0_15)"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_0_15" x1="126.635" y1="-4.97799" x2="0.824952" y2="66.0978" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FF5CAA"/>
|
||||
<stop offset="1" stop-color="#FF4E62"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear_0_15" x1="126.635" y1="-4.97799" x2="0.824952" y2="66.0978" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FF5CAA"/>
|
||||
<stop offset="1" stop-color="#FF4E62"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint2_linear_0_15" x1="126.635" y1="-4.97799" x2="0.824952" y2="66.0978" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FF5CAA"/>
|
||||
<stop offset="1" stop-color="#FF4E62"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 5.9 KiB |
@@ -3,8 +3,7 @@ status = [
|
||||
'Tests on macos-latest',
|
||||
'Tests on windows-latest',
|
||||
'Run Clippy',
|
||||
'Run Rustfmt',
|
||||
'Run tests in debug',
|
||||
'Run Rustfmt'
|
||||
]
|
||||
pr_status = ['Milestone Check']
|
||||
# 3 hours timeout
|
||||
|
||||
@@ -67,8 +67,8 @@ semverLT() {
|
||||
return 1
|
||||
}
|
||||
|
||||
# Get a token from https://github.com/settings/tokens to increase rate limit (from 60 to 5000), make sure the token scope is set to 'public_repo'
|
||||
# Create GITHUB_PAT environment variable once you acquired the token to start using it
|
||||
# Get a token from https://github.com/settings/tokens to increasae rate limit (from 60 to 5000), make sure the token scope is set to 'public_repo'
|
||||
# Create GITHUB_PAT enviroment variable once you aquired the token to start using it
|
||||
# Returns the tag of the latest stable release (in terms of semver and not of release date)
|
||||
get_latest() {
|
||||
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
|
||||
@@ -89,7 +89,7 @@ get_latest() {
|
||||
latest=''
|
||||
current_tag=''
|
||||
for release_info in $releases; do
|
||||
if [ $i -eq 0 ]; then # Checking tag_name
|
||||
if [ $i -eq 0 ]; then # Cheking tag_name
|
||||
if echo "$release_info" | grep -q "$GREP_SEMVER_REGEXP"; then # If it's not an alpha or beta release
|
||||
current_tag=$release_info
|
||||
else
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,17 +1,15 @@
|
||||
[package]
|
||||
name = "meilisearch-auth"
|
||||
version = "0.29.2"
|
||||
version = "0.26.1"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
enum-iterator = "0.7.0"
|
||||
hmac = "0.12.1"
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.33.5" }
|
||||
rand = "0.8.4"
|
||||
serde = { version = "1.0.136", features = ["derive"] }
|
||||
serde_json = { version = "1.0.85", features = ["preserve_order"] }
|
||||
sha2 = "0.10.2"
|
||||
thiserror = "1.0.30"
|
||||
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
||||
sha2 = "0.9.6"
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
||||
rand = "0.8.4"
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
thiserror = "1.0.28"
|
||||
|
||||
@@ -1,134 +1,104 @@
|
||||
use enum_iterator::IntoEnumIterator;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::hash::Hash;
|
||||
|
||||
#[derive(IntoEnumIterator, Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash)]
|
||||
#[derive(IntoEnumIterator, Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[repr(u8)]
|
||||
pub enum Action {
|
||||
#[serde(rename = "*")]
|
||||
All = 0,
|
||||
#[serde(rename = "search")]
|
||||
Search,
|
||||
#[serde(rename = "documents.*")]
|
||||
DocumentsAll,
|
||||
Search = actions::SEARCH,
|
||||
#[serde(rename = "documents.add")]
|
||||
DocumentsAdd,
|
||||
DocumentsAdd = actions::DOCUMENTS_ADD,
|
||||
#[serde(rename = "documents.get")]
|
||||
DocumentsGet,
|
||||
DocumentsGet = actions::DOCUMENTS_GET,
|
||||
#[serde(rename = "documents.delete")]
|
||||
DocumentsDelete,
|
||||
#[serde(rename = "indexes.*")]
|
||||
IndexesAll,
|
||||
DocumentsDelete = actions::DOCUMENTS_DELETE,
|
||||
#[serde(rename = "indexes.create")]
|
||||
IndexesAdd,
|
||||
IndexesAdd = actions::INDEXES_CREATE,
|
||||
#[serde(rename = "indexes.get")]
|
||||
IndexesGet,
|
||||
IndexesGet = actions::INDEXES_GET,
|
||||
#[serde(rename = "indexes.update")]
|
||||
IndexesUpdate,
|
||||
IndexesUpdate = actions::INDEXES_UPDATE,
|
||||
#[serde(rename = "indexes.delete")]
|
||||
IndexesDelete,
|
||||
#[serde(rename = "tasks.*")]
|
||||
TasksAll,
|
||||
IndexesDelete = actions::INDEXES_DELETE,
|
||||
#[serde(rename = "tasks.get")]
|
||||
TasksGet,
|
||||
#[serde(rename = "settings.*")]
|
||||
SettingsAll,
|
||||
TasksGet = actions::TASKS_GET,
|
||||
#[serde(rename = "settings.get")]
|
||||
SettingsGet,
|
||||
SettingsGet = actions::SETTINGS_GET,
|
||||
#[serde(rename = "settings.update")]
|
||||
SettingsUpdate,
|
||||
#[serde(rename = "stats.*")]
|
||||
StatsAll,
|
||||
SettingsUpdate = actions::SETTINGS_UPDATE,
|
||||
#[serde(rename = "stats.get")]
|
||||
StatsGet,
|
||||
#[serde(rename = "metrics.*")]
|
||||
MetricsAll,
|
||||
#[serde(rename = "metrics.get")]
|
||||
MetricsGet,
|
||||
#[serde(rename = "dumps.*")]
|
||||
DumpsAll,
|
||||
StatsGet = actions::STATS_GET,
|
||||
#[serde(rename = "dumps.create")]
|
||||
DumpsCreate,
|
||||
DumpsCreate = actions::DUMPS_CREATE,
|
||||
#[serde(rename = "dumps.get")]
|
||||
DumpsGet = actions::DUMPS_GET,
|
||||
#[serde(rename = "version")]
|
||||
Version,
|
||||
#[serde(rename = "keys.create")]
|
||||
KeysAdd,
|
||||
#[serde(rename = "keys.get")]
|
||||
KeysGet,
|
||||
#[serde(rename = "keys.update")]
|
||||
KeysUpdate,
|
||||
#[serde(rename = "keys.delete")]
|
||||
KeysDelete,
|
||||
Version = actions::VERSION,
|
||||
}
|
||||
|
||||
impl Action {
|
||||
pub const fn from_repr(repr: u8) -> Option<Self> {
|
||||
pub fn from_repr(repr: u8) -> Option<Self> {
|
||||
use actions::*;
|
||||
match repr {
|
||||
ALL => Some(Self::All),
|
||||
0 => Some(Self::All),
|
||||
SEARCH => Some(Self::Search),
|
||||
DOCUMENTS_ALL => Some(Self::DocumentsAll),
|
||||
DOCUMENTS_ADD => Some(Self::DocumentsAdd),
|
||||
DOCUMENTS_GET => Some(Self::DocumentsGet),
|
||||
DOCUMENTS_DELETE => Some(Self::DocumentsDelete),
|
||||
INDEXES_ALL => Some(Self::IndexesAll),
|
||||
INDEXES_CREATE => Some(Self::IndexesAdd),
|
||||
INDEXES_GET => Some(Self::IndexesGet),
|
||||
INDEXES_UPDATE => Some(Self::IndexesUpdate),
|
||||
INDEXES_DELETE => Some(Self::IndexesDelete),
|
||||
TASKS_ALL => Some(Self::TasksAll),
|
||||
TASKS_GET => Some(Self::TasksGet),
|
||||
SETTINGS_ALL => Some(Self::SettingsAll),
|
||||
SETTINGS_GET => Some(Self::SettingsGet),
|
||||
SETTINGS_UPDATE => Some(Self::SettingsUpdate),
|
||||
STATS_ALL => Some(Self::StatsAll),
|
||||
STATS_GET => Some(Self::StatsGet),
|
||||
METRICS_ALL => Some(Self::MetricsAll),
|
||||
METRICS_GET => Some(Self::MetricsGet),
|
||||
DUMPS_ALL => Some(Self::DumpsAll),
|
||||
DUMPS_CREATE => Some(Self::DumpsCreate),
|
||||
DUMPS_GET => Some(Self::DumpsGet),
|
||||
VERSION => Some(Self::Version),
|
||||
KEYS_CREATE => Some(Self::KeysAdd),
|
||||
KEYS_GET => Some(Self::KeysGet),
|
||||
KEYS_UPDATE => Some(Self::KeysUpdate),
|
||||
KEYS_DELETE => Some(Self::KeysDelete),
|
||||
_otherwise => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn repr(&self) -> u8 {
|
||||
*self as u8
|
||||
pub fn repr(&self) -> u8 {
|
||||
use actions::*;
|
||||
match self {
|
||||
Self::All => 0,
|
||||
Self::Search => SEARCH,
|
||||
Self::DocumentsAdd => DOCUMENTS_ADD,
|
||||
Self::DocumentsGet => DOCUMENTS_GET,
|
||||
Self::DocumentsDelete => DOCUMENTS_DELETE,
|
||||
Self::IndexesAdd => INDEXES_CREATE,
|
||||
Self::IndexesGet => INDEXES_GET,
|
||||
Self::IndexesUpdate => INDEXES_UPDATE,
|
||||
Self::IndexesDelete => INDEXES_DELETE,
|
||||
Self::TasksGet => TASKS_GET,
|
||||
Self::SettingsGet => SETTINGS_GET,
|
||||
Self::SettingsUpdate => SETTINGS_UPDATE,
|
||||
Self::StatsGet => STATS_GET,
|
||||
Self::DumpsCreate => DUMPS_CREATE,
|
||||
Self::DumpsGet => DUMPS_GET,
|
||||
Self::Version => VERSION,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub mod actions {
|
||||
use super::Action::*;
|
||||
|
||||
pub(crate) const ALL: u8 = All.repr();
|
||||
pub const SEARCH: u8 = Search.repr();
|
||||
pub const DOCUMENTS_ALL: u8 = DocumentsAll.repr();
|
||||
pub const DOCUMENTS_ADD: u8 = DocumentsAdd.repr();
|
||||
pub const DOCUMENTS_GET: u8 = DocumentsGet.repr();
|
||||
pub const DOCUMENTS_DELETE: u8 = DocumentsDelete.repr();
|
||||
pub const INDEXES_ALL: u8 = IndexesAll.repr();
|
||||
pub const INDEXES_CREATE: u8 = IndexesAdd.repr();
|
||||
pub const INDEXES_GET: u8 = IndexesGet.repr();
|
||||
pub const INDEXES_UPDATE: u8 = IndexesUpdate.repr();
|
||||
pub const INDEXES_DELETE: u8 = IndexesDelete.repr();
|
||||
pub const TASKS_ALL: u8 = TasksAll.repr();
|
||||
pub const TASKS_GET: u8 = TasksGet.repr();
|
||||
pub const SETTINGS_ALL: u8 = SettingsAll.repr();
|
||||
pub const SETTINGS_GET: u8 = SettingsGet.repr();
|
||||
pub const SETTINGS_UPDATE: u8 = SettingsUpdate.repr();
|
||||
pub const STATS_ALL: u8 = StatsAll.repr();
|
||||
pub const STATS_GET: u8 = StatsGet.repr();
|
||||
pub const METRICS_ALL: u8 = MetricsAll.repr();
|
||||
pub const METRICS_GET: u8 = MetricsGet.repr();
|
||||
pub const DUMPS_ALL: u8 = DumpsAll.repr();
|
||||
pub const DUMPS_CREATE: u8 = DumpsCreate.repr();
|
||||
pub const VERSION: u8 = Version.repr();
|
||||
pub const KEYS_CREATE: u8 = KeysAdd.repr();
|
||||
pub const KEYS_GET: u8 = KeysGet.repr();
|
||||
pub const KEYS_UPDATE: u8 = KeysUpdate.repr();
|
||||
pub const KEYS_DELETE: u8 = KeysDelete.repr();
|
||||
pub const SEARCH: u8 = 1;
|
||||
pub const DOCUMENTS_ADD: u8 = 2;
|
||||
pub const DOCUMENTS_GET: u8 = 3;
|
||||
pub const DOCUMENTS_DELETE: u8 = 4;
|
||||
pub const INDEXES_CREATE: u8 = 5;
|
||||
pub const INDEXES_GET: u8 = 6;
|
||||
pub const INDEXES_UPDATE: u8 = 7;
|
||||
pub const INDEXES_DELETE: u8 = 8;
|
||||
pub const TASKS_GET: u8 = 9;
|
||||
pub const SETTINGS_GET: u8 = 10;
|
||||
pub const SETTINGS_UPDATE: u8 = 11;
|
||||
pub const STATS_GET: u8 = 12;
|
||||
pub const DUMPS_CREATE: u8 = 13;
|
||||
pub const DUMPS_GET: u8 = 14;
|
||||
pub const VERSION: u8 = 15;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use serde_json::Deserializer;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
@@ -37,9 +36,10 @@ impl AuthController {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let reader = BufReader::new(File::open(&keys_file_path)?);
|
||||
for key in Deserializer::from_reader(reader).into_iter() {
|
||||
store.put_api_key(key?)?;
|
||||
let mut reader = BufReader::new(File::open(&keys_file_path)?).lines();
|
||||
while let Some(key) = reader.next().transpose()? {
|
||||
let key = serde_json::from_str(&key)?;
|
||||
store.put_api_key(key)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::error::Error;
|
||||
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
use meilisearch_types::internal_error;
|
||||
use meilisearch_error::ErrorCode;
|
||||
use meilisearch_error::{internal_error, Code};
|
||||
use serde_json::Value;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, AuthControllerError>;
|
||||
@@ -18,24 +18,14 @@ pub enum AuthControllerError {
|
||||
InvalidApiKeyExpiresAt(Value),
|
||||
#[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")]
|
||||
InvalidApiKeyDescription(Value),
|
||||
#[error(
|
||||
"`name` field value `{0}` is invalid. It should be a string or specified as a null value."
|
||||
)]
|
||||
InvalidApiKeyName(Value),
|
||||
#[error("`uid` field value `{0}` is invalid. It should be a valid UUID v4 string or omitted.")]
|
||||
InvalidApiKeyUid(Value),
|
||||
#[error("API key `{0}` not found.")]
|
||||
ApiKeyNotFound(String),
|
||||
#[error("`uid` field value `{0}` is already an existing API key.")]
|
||||
ApiKeyAlreadyExists(String),
|
||||
#[error("The `{0}` field cannot be modified for the given resource.")]
|
||||
ImmutableField(String),
|
||||
#[error("Internal error: {0}")]
|
||||
Internal(Box<dyn Error + Send + Sync + 'static>),
|
||||
}
|
||||
|
||||
internal_error!(
|
||||
AuthControllerError: milli::heed::Error,
|
||||
AuthControllerError: heed::Error,
|
||||
std::io::Error,
|
||||
serde_json::Error,
|
||||
std::str::Utf8Error
|
||||
@@ -49,11 +39,7 @@ impl ErrorCode for AuthControllerError {
|
||||
Self::InvalidApiKeyIndexes(_) => Code::InvalidApiKeyIndexes,
|
||||
Self::InvalidApiKeyExpiresAt(_) => Code::InvalidApiKeyExpiresAt,
|
||||
Self::InvalidApiKeyDescription(_) => Code::InvalidApiKeyDescription,
|
||||
Self::InvalidApiKeyName(_) => Code::InvalidApiKeyName,
|
||||
Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound,
|
||||
Self::InvalidApiKeyUid(_) => Code::InvalidApiKeyUid,
|
||||
Self::ApiKeyAlreadyExists(_) => Code::ApiKeyAlreadyExists,
|
||||
Self::ImmutableField(_) => Code::ImmutableField,
|
||||
Self::Internal(_) => Code::Internal,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +1,20 @@
|
||||
use crate::action::Action;
|
||||
use crate::error::{AuthControllerError, Result};
|
||||
use crate::store::KeyId;
|
||||
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use crate::store::{KeyId, KEY_ID_LENGTH};
|
||||
use rand::Rng;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{from_value, Value};
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::macros::{format_description, time};
|
||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct Key {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub name: Option<String>,
|
||||
pub uid: KeyId,
|
||||
pub id: KeyId,
|
||||
pub actions: Vec<Action>,
|
||||
pub indexes: Vec<StarOr<IndexUid>>,
|
||||
pub indexes: Vec<String>,
|
||||
#[serde(with = "time::serde::rfc3339::option")]
|
||||
pub expires_at: Option<OffsetDateTime>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
@@ -30,27 +25,16 @@ pub struct Key {
|
||||
|
||||
impl Key {
|
||||
pub fn create_from_value(value: Value) -> Result<Self> {
|
||||
let name = match value.get("name") {
|
||||
None | Some(Value::Null) => None,
|
||||
Some(des) => from_value(des.clone())
|
||||
.map(Some)
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyName(des.clone()))?,
|
||||
};
|
||||
|
||||
let description = match value.get("description") {
|
||||
None | Some(Value::Null) => None,
|
||||
Some(des) => from_value(des.clone())
|
||||
.map(Some)
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()))?,
|
||||
Some(Value::Null) => None,
|
||||
Some(des) => Some(
|
||||
from_value(des.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()))?,
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
|
||||
let uid = value.get("uid").map_or_else(
|
||||
|| Ok(Uuid::new_v4()),
|
||||
|uid| {
|
||||
from_value(uid.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyUid(uid.clone()))
|
||||
},
|
||||
)?;
|
||||
let id = generate_id();
|
||||
|
||||
let actions = value
|
||||
.get("actions")
|
||||
@@ -77,9 +61,8 @@ impl Key {
|
||||
let updated_at = created_at;
|
||||
|
||||
Ok(Self {
|
||||
name,
|
||||
description,
|
||||
uid,
|
||||
id,
|
||||
actions,
|
||||
indexes,
|
||||
expires_at,
|
||||
@@ -95,34 +78,20 @@ impl Key {
|
||||
self.description = des?;
|
||||
}
|
||||
|
||||
if let Some(des) = value.get("name") {
|
||||
let des = from_value(des.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyName(des.clone()));
|
||||
self.name = des?;
|
||||
if let Some(act) = value.get("actions") {
|
||||
let act = from_value(act.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyActions(act.clone()));
|
||||
self.actions = act?;
|
||||
}
|
||||
|
||||
if value.get("uid").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("uid".to_string()));
|
||||
if let Some(ind) = value.get("indexes") {
|
||||
let ind = from_value(ind.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyIndexes(ind.clone()));
|
||||
self.indexes = ind?;
|
||||
}
|
||||
|
||||
if value.get("actions").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("actions".to_string()));
|
||||
}
|
||||
|
||||
if value.get("indexes").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("indexes".to_string()));
|
||||
}
|
||||
|
||||
if value.get("expiresAt").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("expiresAt".to_string()));
|
||||
}
|
||||
|
||||
if value.get("createdAt").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("createdAt".to_string()));
|
||||
}
|
||||
|
||||
if value.get("updatedAt").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("updatedAt".to_string()));
|
||||
if let Some(exp) = value.get("expiresAt") {
|
||||
self.expires_at = parse_expiration_date(exp)?;
|
||||
}
|
||||
|
||||
self.updated_at = OffsetDateTime::now_utc();
|
||||
@@ -132,13 +101,11 @@ impl Key {
|
||||
|
||||
pub(crate) fn default_admin() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let uid = Uuid::new_v4();
|
||||
Self {
|
||||
name: Some("Default Admin API Key".to_string()),
|
||||
description: Some("Use it for anything that is not a search operation. Caution! Do not expose it on a public frontend".to_string()),
|
||||
uid,
|
||||
description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()),
|
||||
id: generate_id(),
|
||||
actions: vec![Action::All],
|
||||
indexes: vec![StarOr::Star],
|
||||
indexes: vec!["*".to_string()],
|
||||
expires_at: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
@@ -147,13 +114,13 @@ impl Key {
|
||||
|
||||
pub(crate) fn default_search() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let uid = Uuid::new_v4();
|
||||
Self {
|
||||
name: Some("Default Search API Key".to_string()),
|
||||
description: Some("Use it to search from the frontend".to_string()),
|
||||
uid,
|
||||
description: Some(
|
||||
"Default Search API Key (Use it to search from the frontend)".to_string(),
|
||||
),
|
||||
id: generate_id(),
|
||||
actions: vec![Action::Search],
|
||||
indexes: vec![StarOr::Star],
|
||||
indexes: vec!["*".to_string()],
|
||||
expires_at: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
@@ -161,6 +128,19 @@ impl Key {
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a printable key of 64 characters using thread_rng.
|
||||
fn generate_id() -> [u8; KEY_ID_LENGTH] {
|
||||
const CHARSET: &[u8] = b"abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
let mut bytes = [0; KEY_ID_LENGTH];
|
||||
for byte in bytes.iter_mut() {
|
||||
*byte = CHARSET[rng.gen_range(0..CHARSET.len())];
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> {
|
||||
match value {
|
||||
Value::String(string) => OffsetDateTime::parse(string, &Rfc3339)
|
||||
|
||||
@@ -5,20 +5,18 @@ mod key;
|
||||
mod store;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::ops::Deref;
|
||||
use std::path::Path;
|
||||
use std::str::from_utf8;
|
||||
use std::sync::Arc;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use sha2::{Digest, Sha256};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub use action::{actions, Action};
|
||||
use error::{AuthControllerError, Result};
|
||||
pub use key::Key;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use store::generate_key_as_hexa;
|
||||
pub use store::open_auth_store_env;
|
||||
use store::HeedAuthStore;
|
||||
|
||||
@@ -44,74 +42,62 @@ impl AuthController {
|
||||
|
||||
pub fn create_key(&self, value: Value) -> Result<Key> {
|
||||
let key = Key::create_from_value(value)?;
|
||||
match self.store.get_api_key(key.uid)? {
|
||||
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(
|
||||
key.uid.to_string(),
|
||||
)),
|
||||
None => self.store.put_api_key(key),
|
||||
}
|
||||
self.store.put_api_key(key)
|
||||
}
|
||||
|
||||
pub fn update_key(&self, uid: Uuid, value: Value) -> Result<Key> {
|
||||
let mut key = self.get_key(uid)?;
|
||||
pub fn update_key(&self, key: impl AsRef<str>, value: Value) -> Result<Key> {
|
||||
let mut key = self.get_key(key)?;
|
||||
key.update_from_value(value)?;
|
||||
self.store.put_api_key(key)
|
||||
}
|
||||
|
||||
pub fn get_key(&self, uid: Uuid) -> Result<Key> {
|
||||
pub fn get_key(&self, key: impl AsRef<str>) -> Result<Key> {
|
||||
self.store
|
||||
.get_api_key(uid)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))
|
||||
}
|
||||
|
||||
pub fn get_optional_uid_from_encoded_key(&self, encoded_key: &[u8]) -> Result<Option<Uuid>> {
|
||||
match &self.master_key {
|
||||
Some(master_key) => self
|
||||
.store
|
||||
.get_uid_from_encoded_key(encoded_key, master_key.as_bytes()),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_uid_from_encoded_key(&self, encoded_key: &str) -> Result<Uuid> {
|
||||
self.get_optional_uid_from_encoded_key(encoded_key.as_bytes())?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(encoded_key.to_string()))
|
||||
.get_api_key(&key)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))
|
||||
}
|
||||
|
||||
pub fn get_key_filters(
|
||||
&self,
|
||||
uid: Uuid,
|
||||
key: impl AsRef<str>,
|
||||
search_rules: Option<SearchRules>,
|
||||
) -> Result<AuthFilter> {
|
||||
let mut filters = AuthFilter::default();
|
||||
let key = self
|
||||
.store
|
||||
.get_api_key(uid)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))?;
|
||||
if self
|
||||
.master_key
|
||||
.as_ref()
|
||||
.map_or(false, |master_key| master_key != key.as_ref())
|
||||
{
|
||||
let key = self
|
||||
.store
|
||||
.get_api_key(&key)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))?;
|
||||
|
||||
if !key.indexes.iter().any(|i| i == &StarOr::Star) {
|
||||
filters.search_rules = match search_rules {
|
||||
// Intersect search_rules with parent key authorized indexes.
|
||||
Some(search_rules) => SearchRules::Map(
|
||||
key.indexes
|
||||
.into_iter()
|
||||
.filter_map(|index| {
|
||||
search_rules.get_index_search_rules(index.deref()).map(
|
||||
|index_search_rules| {
|
||||
(String::from(index), Some(index_search_rules))
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
None => SearchRules::Set(key.indexes.into_iter().map(String::from).collect()),
|
||||
};
|
||||
} else if let Some(search_rules) = search_rules {
|
||||
filters.search_rules = search_rules;
|
||||
if !key.indexes.iter().any(|i| i.as_str() == "*") {
|
||||
filters.search_rules = match search_rules {
|
||||
// Intersect search_rules with parent key authorized indexes.
|
||||
Some(search_rules) => SearchRules::Map(
|
||||
key.indexes
|
||||
.into_iter()
|
||||
.filter_map(|index| {
|
||||
search_rules
|
||||
.get_index_search_rules(&index)
|
||||
.map(|index_search_rules| (index, Some(index_search_rules)))
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
None => SearchRules::Set(key.indexes.into_iter().collect()),
|
||||
};
|
||||
} else if let Some(search_rules) = search_rules {
|
||||
filters.search_rules = search_rules;
|
||||
}
|
||||
|
||||
filters.allow_index_creation = key
|
||||
.actions
|
||||
.iter()
|
||||
.any(|&action| action == Action::IndexesAdd || action == Action::All);
|
||||
}
|
||||
|
||||
filters.allow_index_creation = self.is_key_authorized(uid, Action::IndexesAdd, None)?;
|
||||
|
||||
Ok(filters)
|
||||
}
|
||||
|
||||
@@ -119,11 +105,13 @@ impl AuthController {
|
||||
self.store.list_api_keys()
|
||||
}
|
||||
|
||||
pub fn delete_key(&self, uid: Uuid) -> Result<()> {
|
||||
if self.store.delete_api_key(uid)? {
|
||||
pub fn delete_key(&self, key: impl AsRef<str>) -> Result<()> {
|
||||
if self.store.delete_api_key(&key)? {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(AuthControllerError::ApiKeyNotFound(uid.to_string()))
|
||||
Err(AuthControllerError::ApiKeyNotFound(
|
||||
key.as_ref().to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,32 +121,32 @@ impl AuthController {
|
||||
|
||||
/// Generate a valid key from a key id using the current master key.
|
||||
/// Returns None if no master key has been set.
|
||||
pub fn generate_key(&self, uid: Uuid) -> Option<String> {
|
||||
pub fn generate_key(&self, id: &str) -> Option<String> {
|
||||
self.master_key
|
||||
.as_ref()
|
||||
.map(|master_key| generate_key_as_hexa(uid, master_key.as_bytes()))
|
||||
.map(|master_key| generate_key(master_key.as_bytes(), id))
|
||||
}
|
||||
|
||||
/// Check if the provided key is authorized to make a specific action
|
||||
/// without checking if the key is valid.
|
||||
pub fn is_key_authorized(
|
||||
&self,
|
||||
uid: Uuid,
|
||||
key: &[u8],
|
||||
action: Action,
|
||||
index: Option<&str>,
|
||||
) -> Result<bool> {
|
||||
match self
|
||||
.store
|
||||
// check if the key has access to all indexes.
|
||||
.get_expiration_date(uid, action, None)?
|
||||
.get_expiration_date(key, action, None)?
|
||||
.or(match index {
|
||||
// else check if the key has access to the requested index.
|
||||
Some(index) => {
|
||||
self.store
|
||||
.get_expiration_date(uid, action, Some(index.as_bytes()))?
|
||||
.get_expiration_date(key, action, Some(index.as_bytes()))?
|
||||
}
|
||||
// or to any index if no index has been requested.
|
||||
None => self.store.prefix_first_expiration_date(uid, action)?,
|
||||
None => self.store.prefix_first_expiration_date(key, action)?,
|
||||
}) {
|
||||
// check expiration date.
|
||||
Some(Some(exp)) => Ok(OffsetDateTime::now_utc() < exp),
|
||||
@@ -168,6 +156,29 @@ impl AuthController {
|
||||
None => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if the provided key is valid
|
||||
/// without checking if the key is authorized to make a specific action.
|
||||
pub fn is_key_valid(&self, key: &[u8]) -> Result<bool> {
|
||||
if let Some(id) = self.store.get_key_id(key) {
|
||||
let id = from_utf8(&id)?;
|
||||
if let Some(generated) = self.generate_key(id) {
|
||||
return Ok(generated.as_bytes() == key);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
/// Check if the provided key is valid
|
||||
/// and is authorized to make a specific action.
|
||||
pub fn authenticate(&self, key: &[u8], action: Action, index: Option<&str>) -> Result<bool> {
|
||||
if self.is_key_authorized(key, action, index)? {
|
||||
self.is_key_valid(key)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AuthFilter {
|
||||
@@ -247,6 +258,12 @@ pub struct IndexSearchRules {
|
||||
pub filter: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
fn generate_key(master_key: &[u8], keyid: &str) -> String {
|
||||
let key = [keyid.as_bytes(), master_key].concat();
|
||||
let sha = Sha256::digest(&key);
|
||||
format!("{}{:x}", keyid, sha)
|
||||
}
|
||||
|
||||
fn generate_default_keys(store: &HeedAuthStore) -> Result<()> {
|
||||
store.put_api_key(Key::default_admin())?;
|
||||
store.put_api_key(Key::default_search())?;
|
||||
|
||||
@@ -1,33 +1,27 @@
|
||||
use enum_iterator::IntoEnumIterator;
|
||||
use std::borrow::Cow;
|
||||
use std::cmp::Reverse;
|
||||
use std::collections::HashSet;
|
||||
use std::convert::TryFrom;
|
||||
use std::convert::TryInto;
|
||||
use std::fs::create_dir_all;
|
||||
use std::ops::Deref;
|
||||
use std::path::Path;
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
|
||||
use enum_iterator::IntoEnumIterator;
|
||||
use hmac::{Hmac, Mac};
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
|
||||
use milli::heed::{Database, Env, EnvOpenOptions, RwTxn};
|
||||
use sha2::Sha256;
|
||||
use heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
|
||||
use heed::{Database, Env, EnvOpenOptions, RwTxn};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::fmt::Hyphenated;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::error::Result;
|
||||
use super::{Action, Key};
|
||||
|
||||
const AUTH_STORE_SIZE: usize = 1_073_741_824; //1GiB
|
||||
pub const KEY_ID_LENGTH: usize = 8;
|
||||
const AUTH_DB_PATH: &str = "auth";
|
||||
const KEY_DB_NAME: &str = "api-keys";
|
||||
const KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME: &str = "keyid-action-index-expiration";
|
||||
|
||||
pub type KeyId = Uuid;
|
||||
pub type KeyId = [u8; KEY_ID_LENGTH];
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct HeedAuthStore {
|
||||
@@ -45,7 +39,7 @@ impl Drop for HeedAuthStore {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn open_auth_store_env(path: &Path) -> milli::heed::Result<milli::heed::Env> {
|
||||
pub fn open_auth_store_env(path: &Path) -> heed::Result<heed::Env> {
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(AUTH_STORE_SIZE); // 1GB
|
||||
options.max_dbs(2);
|
||||
@@ -79,73 +73,33 @@ impl HeedAuthStore {
|
||||
}
|
||||
|
||||
pub fn put_api_key(&self, key: Key) -> Result<Key> {
|
||||
let uid = key.uid;
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
self.keys.put(&mut wtxn, &key.id, &key)?;
|
||||
|
||||
self.keys.put(&mut wtxn, uid.as_bytes(), &key)?;
|
||||
|
||||
let id = key.id;
|
||||
// delete key from inverted database before refilling it.
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &uid)?;
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &id)?;
|
||||
// create inverted database.
|
||||
let db = self.action_keyid_index_expiration;
|
||||
|
||||
let mut actions = HashSet::new();
|
||||
for action in &key.actions {
|
||||
match action {
|
||||
Action::All => actions.extend(Action::into_enum_iter()),
|
||||
Action::DocumentsAll => {
|
||||
actions.extend(
|
||||
[
|
||||
Action::DocumentsGet,
|
||||
Action::DocumentsDelete,
|
||||
Action::DocumentsAdd,
|
||||
]
|
||||
.iter(),
|
||||
);
|
||||
}
|
||||
Action::IndexesAll => {
|
||||
actions.extend(
|
||||
[
|
||||
Action::IndexesAdd,
|
||||
Action::IndexesDelete,
|
||||
Action::IndexesGet,
|
||||
Action::IndexesUpdate,
|
||||
]
|
||||
.iter(),
|
||||
);
|
||||
}
|
||||
Action::SettingsAll => {
|
||||
actions.extend([Action::SettingsGet, Action::SettingsUpdate].iter());
|
||||
}
|
||||
Action::DumpsAll => {
|
||||
actions.insert(Action::DumpsCreate);
|
||||
}
|
||||
Action::TasksAll => {
|
||||
actions.insert(Action::TasksGet);
|
||||
}
|
||||
Action::StatsAll => {
|
||||
actions.insert(Action::StatsGet);
|
||||
}
|
||||
Action::MetricsAll => {
|
||||
actions.insert(Action::MetricsGet);
|
||||
}
|
||||
other => {
|
||||
actions.insert(*other);
|
||||
}
|
||||
}
|
||||
}
|
||||
let actions = if key.actions.contains(&Action::All) {
|
||||
// if key.actions contains All, we iterate over all actions.
|
||||
Action::into_enum_iter().collect()
|
||||
} else {
|
||||
key.actions.clone()
|
||||
};
|
||||
|
||||
let no_index_restriction = key.indexes.contains(&StarOr::Star);
|
||||
let no_index_restriction = key.indexes.contains(&"*".to_owned());
|
||||
for action in actions {
|
||||
if no_index_restriction {
|
||||
// If there is no index restriction we put None.
|
||||
db.put(&mut wtxn, &(&uid, &action, None), &key.expires_at)?;
|
||||
db.put(&mut wtxn, &(&id, &action, None), &key.expires_at)?;
|
||||
} else {
|
||||
// else we create a key for each index.
|
||||
for index in key.indexes.iter() {
|
||||
db.put(
|
||||
&mut wtxn,
|
||||
&(&uid, &action, Some(index.deref().as_bytes())),
|
||||
&(&id, &action, Some(index.as_bytes())),
|
||||
&key.expires_at,
|
||||
)?;
|
||||
}
|
||||
@@ -157,42 +111,24 @@ impl HeedAuthStore {
|
||||
Ok(key)
|
||||
}
|
||||
|
||||
pub fn get_api_key(&self, uid: Uuid) -> Result<Option<Key>> {
|
||||
pub fn get_api_key(&self, key: impl AsRef<str>) -> Result<Option<Key>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
self.keys.get(&rtxn, uid.as_bytes()).map_err(|e| e.into())
|
||||
match self.get_key_id(key.as_ref().as_bytes()) {
|
||||
Some(id) => self.keys.get(&rtxn, &id).map_err(|e| e.into()),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_uid_from_encoded_key(
|
||||
&self,
|
||||
encoded_key: &[u8],
|
||||
master_key: &[u8],
|
||||
) -> Result<Option<Uuid>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let uid = self
|
||||
.keys
|
||||
.remap_data_type::<DecodeIgnore>()
|
||||
.iter(&rtxn)?
|
||||
.filter_map(|res| match res {
|
||||
Ok((uid, _)) => {
|
||||
let (uid, _) = try_split_array_at(uid)?;
|
||||
let uid = Uuid::from_bytes(*uid);
|
||||
if generate_key_as_hexa(uid, master_key).as_bytes() == encoded_key {
|
||||
Some(uid)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Err(_) => None,
|
||||
})
|
||||
.next();
|
||||
|
||||
Ok(uid)
|
||||
}
|
||||
|
||||
pub fn delete_api_key(&self, uid: Uuid) -> Result<bool> {
|
||||
pub fn delete_api_key(&self, key: impl AsRef<str>) -> Result<bool> {
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let existing = self.keys.delete(&mut wtxn, uid.as_bytes())?;
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &uid)?;
|
||||
let existing = match self.get_key_id(key.as_ref().as_bytes()) {
|
||||
Some(id) => {
|
||||
let existing = self.keys.delete(&mut wtxn, &id)?;
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &id)?;
|
||||
existing
|
||||
}
|
||||
None => false,
|
||||
};
|
||||
wtxn.commit()?;
|
||||
|
||||
Ok(existing)
|
||||
@@ -211,37 +147,49 @@ impl HeedAuthStore {
|
||||
|
||||
pub fn get_expiration_date(
|
||||
&self,
|
||||
uid: Uuid,
|
||||
key: &[u8],
|
||||
action: Action,
|
||||
index: Option<&[u8]>,
|
||||
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let tuple = (&uid, &action, index);
|
||||
Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?)
|
||||
match self.get_key_id(key) {
|
||||
Some(id) => {
|
||||
let tuple = (&id, &action, index);
|
||||
Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?)
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prefix_first_expiration_date(
|
||||
&self,
|
||||
uid: Uuid,
|
||||
key: &[u8],
|
||||
action: Action,
|
||||
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let tuple = (&uid, &action, None);
|
||||
let exp = self
|
||||
.action_keyid_index_expiration
|
||||
.prefix_iter(&rtxn, &tuple)?
|
||||
.next()
|
||||
.transpose()?
|
||||
.map(|(_, expiration)| expiration);
|
||||
match self.get_key_id(key) {
|
||||
Some(id) => {
|
||||
let tuple = (&id, &action, None);
|
||||
Ok(self
|
||||
.action_keyid_index_expiration
|
||||
.prefix_iter(&rtxn, &tuple)?
|
||||
.next()
|
||||
.transpose()?
|
||||
.map(|(_, expiration)| expiration))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(exp)
|
||||
pub fn get_key_id(&self, key: &[u8]) -> Option<KeyId> {
|
||||
try_split_array_at::<_, KEY_ID_LENGTH>(key).map(|(id, _)| *id)
|
||||
}
|
||||
|
||||
fn delete_key_from_inverted_db(&self, wtxn: &mut RwTxn, key: &KeyId) -> Result<()> {
|
||||
let mut iter = self
|
||||
.action_keyid_index_expiration
|
||||
.remap_types::<ByteSlice, DecodeIgnore>()
|
||||
.prefix_iter_mut(wtxn, key.as_bytes())?;
|
||||
.prefix_iter_mut(wtxn, key)?;
|
||||
while iter.next().transpose()?.is_some() {
|
||||
// safety: we don't keep references from inside the LMDB database.
|
||||
unsafe { iter.del_current()? };
|
||||
@@ -252,32 +200,31 @@ impl HeedAuthStore {
|
||||
}
|
||||
|
||||
/// Codec allowing to retrieve the expiration date of an action,
|
||||
/// optionally on a specific index, for a given key.
|
||||
/// optionnally on a spcific index, for a given key.
|
||||
pub struct KeyIdActionCodec;
|
||||
|
||||
impl<'a> milli::heed::BytesDecode<'a> for KeyIdActionCodec {
|
||||
impl<'a> heed::BytesDecode<'a> for KeyIdActionCodec {
|
||||
type DItem = (KeyId, Action, Option<&'a [u8]>);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (key_id_bytes, action_bytes) = try_split_array_at(bytes)?;
|
||||
let (key_id, action_bytes) = try_split_array_at(bytes)?;
|
||||
let (action_bytes, index) = match try_split_array_at(action_bytes)? {
|
||||
(action, []) => (action, None),
|
||||
(action, index) => (action, Some(index)),
|
||||
};
|
||||
let key_id = Uuid::from_bytes(*key_id_bytes);
|
||||
let action = Action::from_repr(u8::from_be_bytes(*action_bytes))?;
|
||||
|
||||
Some((key_id, action, index))
|
||||
Some((*key_id, action, index))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
impl<'a> heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
type EItem = (&'a KeyId, &'a Action, Option<&'a [u8]>);
|
||||
|
||||
fn bytes_encode((key_id, action, index): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut bytes = Vec::new();
|
||||
|
||||
bytes.extend_from_slice(key_id.as_bytes());
|
||||
bytes.extend_from_slice(*key_id);
|
||||
let action_bytes = u8::to_be_bytes(action.repr());
|
||||
bytes.extend_from_slice(&action_bytes);
|
||||
if let Some(index) = index {
|
||||
@@ -288,19 +235,6 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_key_as_hexa(uid: Uuid, master_key: &[u8]) -> String {
|
||||
// format uid as hyphenated allowing user to generate their own keys.
|
||||
let mut uid_buffer = [0; Hyphenated::LENGTH];
|
||||
let uid = uid.hyphenated().encode_lower(&mut uid_buffer);
|
||||
|
||||
// new_from_slice function never fail.
|
||||
let mut mac = Hmac::<Sha256>::new_from_slice(master_key).unwrap();
|
||||
mac.update(uid.as_bytes());
|
||||
|
||||
let result = mac.finalize();
|
||||
format!("{:x}", result.into_bytes())
|
||||
}
|
||||
|
||||
/// Divides one slice into two at an index, returns `None` if mid is out of bounds.
|
||||
pub fn try_split_at<T>(slice: &[T], mid: usize) -> Option<(&[T], &[T])> {
|
||||
if mid <= slice.len() {
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
[package]
|
||||
name = "meilisearch-types"
|
||||
version = "0.29.2"
|
||||
name = "meilisearch-error"
|
||||
version = "0.26.1"
|
||||
authors = ["marin <postma.marin@protonmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.0.1", default-features = false }
|
||||
actix-web = { version = "4", default-features = false }
|
||||
proptest = { version = "1.0.0", optional = true }
|
||||
proptest-derive = { version = "0.3.0", optional = true }
|
||||
serde = { version = "1.0.136", features = ["derive"] }
|
||||
serde_json = "1.0.79"
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
serde_json = "1.0.69"
|
||||
|
||||
[features]
|
||||
test-traits = ["proptest", "proptest-derive"]
|
||||
@@ -73,12 +73,12 @@ impl aweb::error::ResponseError for ResponseError {
|
||||
pub trait ErrorCode: std::error::Error {
|
||||
fn error_code(&self) -> Code;
|
||||
|
||||
/// returns the HTTP status code associated with the error
|
||||
/// returns the HTTP status code ascociated with the error
|
||||
fn http_status(&self) -> StatusCode {
|
||||
self.error_code().http()
|
||||
}
|
||||
|
||||
/// returns the doc url associated with the error
|
||||
/// returns the doc url ascociated with the error
|
||||
fn error_url(&self) -> String {
|
||||
self.error_code().url()
|
||||
}
|
||||
@@ -120,7 +120,6 @@ pub enum Code {
|
||||
IndexAlreadyExists,
|
||||
IndexNotFound,
|
||||
InvalidIndexUid,
|
||||
InvalidMinWordLengthForTypo,
|
||||
|
||||
// invalid state error
|
||||
InvalidState,
|
||||
@@ -166,14 +165,10 @@ pub enum Code {
|
||||
InvalidApiKeyIndexes,
|
||||
InvalidApiKeyExpiresAt,
|
||||
InvalidApiKeyDescription,
|
||||
InvalidApiKeyName,
|
||||
InvalidApiKeyUid,
|
||||
ImmutableField,
|
||||
ApiKeyAlreadyExists,
|
||||
}
|
||||
|
||||
impl Code {
|
||||
/// associate a `Code` variant to the actual ErrCode
|
||||
/// ascociate a `Code` variant to the actual ErrCode
|
||||
fn err_code(&self) -> ErrCode {
|
||||
use Code::*;
|
||||
|
||||
@@ -276,17 +271,10 @@ impl Code {
|
||||
InvalidApiKeyDescription => {
|
||||
ErrCode::invalid("invalid_api_key_description", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
InvalidApiKeyName => ErrCode::invalid("invalid_api_key_name", StatusCode::BAD_REQUEST),
|
||||
InvalidApiKeyUid => ErrCode::invalid("invalid_api_key_uid", StatusCode::BAD_REQUEST),
|
||||
ApiKeyAlreadyExists => ErrCode::invalid("api_key_already_exists", StatusCode::CONFLICT),
|
||||
ImmutableField => ErrCode::invalid("immutable_field", StatusCode::BAD_REQUEST),
|
||||
InvalidMinWordLengthForTypo => {
|
||||
ErrCode::invalid("invalid_min_word_length_for_typo", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// return the HTTP status code associated with the `Code`
|
||||
/// return the HTTP status code ascociated with the `Code`
|
||||
fn http(&self) -> StatusCode {
|
||||
self.err_code().status_code
|
||||
}
|
||||
@@ -301,7 +289,7 @@ impl Code {
|
||||
self.err_code().error_type.to_string()
|
||||
}
|
||||
|
||||
/// return the doc url associated with the error
|
||||
/// return the doc url ascociated with the error
|
||||
fn url(&self) -> String {
|
||||
format!("https://docs.meilisearch.com/errors#{}", self.name())
|
||||
}
|
||||
@@ -4,95 +4,90 @@ description = "Meilisearch HTTP server"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
name = "meilisearch-http"
|
||||
version = "0.29.2"
|
||||
version = "0.26.1"
|
||||
|
||||
[[bin]]
|
||||
name = "meilisearch"
|
||||
path = "src/main.rs"
|
||||
|
||||
[build-dependencies]
|
||||
anyhow = { version = "1.0.62", optional = true }
|
||||
cargo_toml = { version = "0.11.4", optional = true }
|
||||
static-files = { version = "0.2.1", optional = true }
|
||||
anyhow = { version = "1.0.43", optional = true }
|
||||
cargo_toml = { version = "0.9", optional = true }
|
||||
hex = { version = "0.4.3", optional = true }
|
||||
reqwest = { version = "0.11.9", features = ["blocking", "rustls-tls"], default-features = false, optional = true }
|
||||
sha-1 = { version = "0.10.0", optional = true }
|
||||
static-files = { version = "0.2.3", optional = true }
|
||||
tempfile = { version = "3.3.0", optional = true }
|
||||
vergen = { version = "7.0.0", default-features = false, features = ["git"] }
|
||||
reqwest = { version = "0.11.4", features = ["blocking", "rustls-tls"], default-features = false, optional = true }
|
||||
sha-1 = { version = "0.9.8", optional = true }
|
||||
tempfile = { version = "3.2.0", optional = true }
|
||||
vergen = { version = "5.1.15", default-features = false, features = ["git"] }
|
||||
zip = { version = "0.5.13", optional = true }
|
||||
|
||||
[dependencies]
|
||||
actix-cors = "0.6.1"
|
||||
actix-web = { version = "4.0.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] }
|
||||
actix-cors = "0.6"
|
||||
actix-web = { version = "4", features = ["rustls"] }
|
||||
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
||||
anyhow = { version = "1.0.62", features = ["backtrace"] }
|
||||
async-stream = "0.3.3"
|
||||
async-trait = "0.1.52"
|
||||
anyhow = { version = "1.0.43", features = ["backtrace"] }
|
||||
arc-swap = "1.3.2"
|
||||
async-stream = "0.3.2"
|
||||
async-trait = "0.1.51"
|
||||
bstr = "0.2.17"
|
||||
byte-unit = { version = "4.0.14", default-features = false, features = ["std", "serde"] }
|
||||
byte-unit = { version = "4.0.12", default-features = false, features = ["std", "serde"] }
|
||||
bytes = "1.1.0"
|
||||
clap = { version = "3.1.6", features = ["derive", "env"] }
|
||||
crossbeam-channel = "0.5.2"
|
||||
crossbeam-channel = "0.5.1"
|
||||
either = "1.6.1"
|
||||
env_logger = "0.9.0"
|
||||
flate2 = "1.0.22"
|
||||
flate2 = "1.0.21"
|
||||
fst = "0.4.7"
|
||||
futures = "0.3.21"
|
||||
futures-util = "0.3.21"
|
||||
http = "0.2.6"
|
||||
indexmap = { version = "1.8.0", features = ["serde-1"] }
|
||||
itertools = "0.10.3"
|
||||
jsonwebtoken = "8.0.1"
|
||||
futures = "0.3.17"
|
||||
futures-util = "0.3.17"
|
||||
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
||||
http = "0.2.4"
|
||||
indexmap = { version = "1.7.0", features = ["serde-1"] }
|
||||
iso8601-duration = "0.1.0"
|
||||
itertools = "0.10.1"
|
||||
jsonwebtoken = "7"
|
||||
log = "0.4.14"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
meilisearch-lib = { path = "../meilisearch-lib" }
|
||||
mimalloc = { version = "0.1.29", default-features = false }
|
||||
mime = "0.3.16"
|
||||
num_cpus = "1.13.1"
|
||||
num_cpus = "1.13.0"
|
||||
obkv = "0.2.0"
|
||||
once_cell = "1.10.0"
|
||||
parking_lot = "0.12.0"
|
||||
pin-project-lite = "0.2.8"
|
||||
once_cell = "1.8.0"
|
||||
parking_lot = "0.11.2"
|
||||
platform-dirs = "0.3.0"
|
||||
rand = "0.8.5"
|
||||
rand = "0.8.4"
|
||||
rayon = "1.5.1"
|
||||
regex = "1.5.5"
|
||||
reqwest = { version = "0.11.4", features = ["rustls-tls", "json"], default-features = false }
|
||||
rustls = "0.20.4"
|
||||
rustls-pemfile = "0.3.0"
|
||||
regex = "1.5.4"
|
||||
rustls = "0.20.2"
|
||||
rustls-pemfile = "0.2"
|
||||
segment = { version = "0.2.0", optional = true }
|
||||
serde = { version = "1.0.136", features = ["derive"] }
|
||||
serde-cs = "0.2.3"
|
||||
serde_json = { version = "1.0.85", features = ["preserve_order"] }
|
||||
sha2 = "0.10.2"
|
||||
siphasher = "0.3.10"
|
||||
slice-group-by = "0.3.0"
|
||||
static-files = { version = "0.2.3", optional = true }
|
||||
sysinfo = "0.23.5"
|
||||
tar = "0.4.38"
|
||||
tempfile = "3.3.0"
|
||||
thiserror = "1.0.30"
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
||||
sha2 = "0.9.6"
|
||||
siphasher = "0.3.7"
|
||||
slice-group-by = "0.2.6"
|
||||
static-files = { version = "0.2.1", optional = true }
|
||||
clap = { version = "3.0", features = ["derive", "env"] }
|
||||
sysinfo = "0.20.2"
|
||||
tar = "0.4.37"
|
||||
tempfile = "3.2.0"
|
||||
thiserror = "1.0.28"
|
||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
tokio = { version = "1.17.0", features = ["full"] }
|
||||
tokio-stream = "0.1.8"
|
||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
||||
tokio = { version = "1.11.0", features = ["full"] }
|
||||
tokio-stream = "0.1.7"
|
||||
uuid = { version = "0.8.2", features = ["serde"] }
|
||||
walkdir = "2.3.2"
|
||||
prometheus = { version = "0.13.0", features = ["process"], optional = true }
|
||||
lazy_static = "1.4.0"
|
||||
pin-project-lite = "0.2.8"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.7.0"
|
||||
actix-rt = "2.2.0"
|
||||
assert-json-diff = "2.0.1"
|
||||
manifest-dir-macros = "0.1.14"
|
||||
maplit = "1.0.2"
|
||||
paste = "1.0.5"
|
||||
serde_url_params = "0.2.1"
|
||||
urlencoding = "2.1.0"
|
||||
yaup = "0.2.0"
|
||||
|
||||
[features]
|
||||
default = ["analytics", "mini-dashboard"]
|
||||
metrics = ["prometheus"]
|
||||
analytics = ["segment"]
|
||||
mini-dashboard = [
|
||||
"actix-web-static-files",
|
||||
"static-files",
|
||||
@@ -104,7 +99,12 @@ mini-dashboard = [
|
||||
"tempfile",
|
||||
"zip",
|
||||
]
|
||||
analytics = ["segment"]
|
||||
default = ["analytics", "mini-dashboard"]
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
tikv-jemallocator = "0.4.1"
|
||||
|
||||
[package.metadata.mini-dashboard]
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.2/build.zip"
|
||||
sha1 = "c69feffc6b590e38a46981a85c47f48905d4082a"
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.9/build.zip"
|
||||
sha1 = "b1833c3e5dc6b5d9d519ae4834935ae6c8a47024"
|
||||
|
||||
@@ -61,7 +61,7 @@ pub trait Analytics: Sync + Send {
|
||||
/// The method used to publish most analytics that do not need to be batched every hours
|
||||
fn publish(&self, event_name: String, send: Value, request: Option<&HttpRequest>);
|
||||
|
||||
/// This method should be called to aggregate a get search
|
||||
/// This method should be called to aggergate a get search
|
||||
fn get_search(&self, aggregate: SearchAggregator);
|
||||
|
||||
/// This method should be called to aggregate a post search
|
||||
|
||||
@@ -8,10 +8,7 @@ use actix_web::http::header::USER_AGENT;
|
||||
use actix_web::HttpRequest;
|
||||
use http::header::CONTENT_TYPE;
|
||||
use meilisearch_auth::SearchRules;
|
||||
use meilisearch_lib::index::{
|
||||
SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
|
||||
};
|
||||
use meilisearch_lib::index::{SearchQuery, SearchResult};
|
||||
use meilisearch_lib::index_controller::Stats;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use once_cell::sync::Lazy;
|
||||
@@ -31,8 +28,6 @@ use crate::Opt;
|
||||
|
||||
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
|
||||
|
||||
const ANALYTICS_HEADER: &str = "X-Meilisearch-Client";
|
||||
|
||||
/// Write the instance-uid in the `data.ms` and in `~/.config/MeiliSearch/path-to-db-instance-uid`. Ignore the errors.
|
||||
fn write_user_id(db_path: &Path, user_id: &str) {
|
||||
let _ = fs::write(db_path.join("instance-uid"), user_id.as_bytes());
|
||||
@@ -50,8 +45,7 @@ const SEGMENT_API_KEY: &str = "P3FWhhEsJiEDCuEHpmcN9DHcK4hVfBvb";
|
||||
pub fn extract_user_agents(request: &HttpRequest) -> Vec<String> {
|
||||
request
|
||||
.headers()
|
||||
.get(ANALYTICS_HEADER)
|
||||
.or_else(|| request.headers().get(USER_AGENT))
|
||||
.get(USER_AGENT)
|
||||
.map(|header| header.to_str().ok())
|
||||
.flatten()
|
||||
.unwrap_or("unknown")
|
||||
@@ -81,19 +75,7 @@ impl SegmentAnalytics {
|
||||
let user_id = user_id.unwrap_or_else(|| Uuid::new_v4().to_string());
|
||||
write_user_id(&opt.db_path, &user_id);
|
||||
|
||||
let client = reqwest::Client::builder()
|
||||
.connect_timeout(Duration::from_secs(10))
|
||||
.build();
|
||||
|
||||
// if reqwest throws an error we won't be able to send analytics
|
||||
if client.is_err() {
|
||||
return super::MockAnalytics::new(opt);
|
||||
}
|
||||
|
||||
let client = HttpClient::new(
|
||||
client.unwrap(),
|
||||
"https://telemetry.meilisearch.com".to_string(),
|
||||
);
|
||||
let client = HttpClient::default();
|
||||
let user = User::UserId { user_id };
|
||||
let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
|
||||
|
||||
@@ -145,7 +127,11 @@ impl SegmentAnalytics {
|
||||
|
||||
impl super::Analytics for SegmentAnalytics {
|
||||
fn publish(&self, event_name: String, mut send: Value, request: Option<&HttpRequest>) {
|
||||
let user_agent = request.map(|req| extract_user_agents(req));
|
||||
let user_agent = request
|
||||
.map(|req| req.headers().get(USER_AGENT))
|
||||
.flatten()
|
||||
.map(|header| header.to_str().unwrap_or("unknown"))
|
||||
.map(|s| s.split(';').map(str::trim).collect::<Vec<&str>>());
|
||||
|
||||
send["user-agent"] = json!(user_agent);
|
||||
let event = Track {
|
||||
@@ -366,19 +352,9 @@ pub struct SearchAggregator {
|
||||
// The maximum number of terms in a q request
|
||||
max_terms_number: usize,
|
||||
|
||||
// everytime a search is done, we increment the counter linked to the used settings
|
||||
matching_strategy: HashMap<String, usize>,
|
||||
|
||||
// pagination
|
||||
max_limit: usize,
|
||||
max_offset: usize,
|
||||
|
||||
// formatting
|
||||
highlight_pre_tag: bool,
|
||||
highlight_post_tag: bool,
|
||||
crop_marker: bool,
|
||||
show_matches_position: bool,
|
||||
crop_length: bool,
|
||||
}
|
||||
|
||||
impl SearchAggregator {
|
||||
@@ -426,18 +402,9 @@ impl SearchAggregator {
|
||||
ret.max_terms_number = q.split_whitespace().count();
|
||||
}
|
||||
|
||||
ret.matching_strategy
|
||||
.insert(format!("{:?}", query.matching_strategy), 1);
|
||||
|
||||
ret.max_limit = query.limit;
|
||||
ret.max_offset = query.offset.unwrap_or_default();
|
||||
|
||||
ret.highlight_pre_tag = query.highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG();
|
||||
ret.highlight_post_tag = query.highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG();
|
||||
ret.crop_marker = query.crop_marker != DEFAULT_CROP_MARKER();
|
||||
ret.crop_length = query.crop_length != DEFAULT_CROP_LENGTH();
|
||||
ret.show_matches_position = query.show_matches_position;
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
@@ -482,20 +449,9 @@ impl SearchAggregator {
|
||||
}
|
||||
// q
|
||||
self.max_terms_number = self.max_terms_number.max(other.max_terms_number);
|
||||
|
||||
for (key, value) in other.matching_strategy.into_iter() {
|
||||
let matching_strategy = self.matching_strategy.entry(key).or_insert(0);
|
||||
*matching_strategy = matching_strategy.saturating_add(value);
|
||||
}
|
||||
// pagination
|
||||
self.max_limit = self.max_limit.max(other.max_limit);
|
||||
self.max_offset = self.max_offset.max(other.max_offset);
|
||||
|
||||
self.highlight_pre_tag |= other.highlight_pre_tag;
|
||||
self.highlight_post_tag |= other.highlight_post_tag;
|
||||
self.crop_marker |= other.crop_marker;
|
||||
self.show_matches_position |= other.show_matches_position;
|
||||
self.crop_length |= other.crop_length;
|
||||
}
|
||||
|
||||
pub fn into_event(self, user: &User, event_name: &str) -> Option<Track> {
|
||||
@@ -506,7 +462,7 @@ impl SearchAggregator {
|
||||
let percentile_99th = 0.99 * (self.total_succeeded as f64 - 1.) + 1.;
|
||||
// we get all the values in a sorted manner
|
||||
let time_spent = self.time_spent.into_sorted_vec();
|
||||
// We are only interested by the slowest value of the 99th fastest results
|
||||
// We are only intersted by the slowest value of the 99th fastest results
|
||||
let time_spent = time_spent.get(percentile_99th as usize);
|
||||
|
||||
let properties = json!({
|
||||
@@ -528,19 +484,11 @@ impl SearchAggregator {
|
||||
},
|
||||
"q": {
|
||||
"max_terms_number": self.max_terms_number,
|
||||
"most_used_matching_strategy": self.matching_strategy.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
||||
},
|
||||
"pagination": {
|
||||
"max_limit": self.max_limit,
|
||||
"max_offset": self.max_offset,
|
||||
},
|
||||
"formatting": {
|
||||
"highlight_pre_tag": self.highlight_pre_tag,
|
||||
"highlight_post_tag": self.highlight_post_tag,
|
||||
"crop_marker": self.crop_marker,
|
||||
"show_matches_position": self.show_matches_position,
|
||||
"crop_length": self.crop_length,
|
||||
},
|
||||
});
|
||||
|
||||
Some(Track {
|
||||
@@ -586,8 +534,8 @@ impl DocumentsAggregator {
|
||||
let content_type = request
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|s| s.to_str().ok())
|
||||
.unwrap_or("unknown")
|
||||
.map(|s| s.to_str().unwrap_or("unkown"))
|
||||
.unwrap()
|
||||
.to_string();
|
||||
ret.content_types.insert(content_type);
|
||||
ret.index_creation = index_creation;
|
||||
@@ -603,13 +551,13 @@ impl DocumentsAggregator {
|
||||
|
||||
self.updated |= other.updated;
|
||||
// we can't create a union because there is no `into_union` method
|
||||
for user_agent in other.user_agents {
|
||||
for user_agent in other.user_agents.into_iter() {
|
||||
self.user_agents.insert(user_agent);
|
||||
}
|
||||
for primary_key in other.primary_keys {
|
||||
for primary_key in other.primary_keys.into_iter() {
|
||||
self.primary_keys.insert(primary_key);
|
||||
}
|
||||
for content_type in other.content_types {
|
||||
for content_type in other.content_types.into_iter() {
|
||||
self.content_types.insert(content_type);
|
||||
}
|
||||
self.index_creation |= other.index_creation;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use actix_web as aweb;
|
||||
use aweb::error::{JsonPayloadError, QueryPayloadError};
|
||||
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
|
||||
use meilisearch_error::{Code, ErrorCode, ResponseError};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum MeilisearchHttpError {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum AuthenticationError {
|
||||
|
||||
@@ -5,11 +5,12 @@ use std::ops::Deref;
|
||||
use std::pin::Pin;
|
||||
|
||||
use actix_web::FromRequest;
|
||||
pub use error::AuthenticationError;
|
||||
use futures::future::err;
|
||||
use futures::Future;
|
||||
use meilisearch_error::{Code, ResponseError};
|
||||
|
||||
use error::AuthenticationError;
|
||||
use meilisearch_auth::{AuthController, AuthFilter};
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
|
||||
pub struct GuardedData<P, D> {
|
||||
data: D,
|
||||
@@ -69,9 +70,11 @@ impl<P, D> GuardedData<P, D> {
|
||||
where
|
||||
P: Policy + 'static,
|
||||
{
|
||||
tokio::task::spawn_blocking(move || P::authenticate(auth, token.as_ref(), index.as_deref()))
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))
|
||||
Ok(tokio::task::spawn_blocking(move || {
|
||||
P::authenticate(auth, token.as_ref(), index.as_deref())
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))?)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -128,39 +131,38 @@ pub trait Policy {
|
||||
}
|
||||
|
||||
pub mod policies {
|
||||
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
|
||||
use jsonwebtoken::{dangerous_insecure_decode, decode, Algorithm, DecodingKey, Validation};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::extractors::authentication::Policy;
|
||||
use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules};
|
||||
// reexport actions in policies in order to be used in routes configuration.
|
||||
pub use meilisearch_auth::actions;
|
||||
|
||||
fn tenant_token_validation() -> Validation {
|
||||
let mut validation = Validation::default();
|
||||
validation.validate_exp = false;
|
||||
validation.required_spec_claims.remove("exp");
|
||||
validation.algorithms = vec![Algorithm::HS256, Algorithm::HS384, Algorithm::HS512];
|
||||
validation
|
||||
}
|
||||
pub static TENANT_TOKEN_VALIDATION: Lazy<Validation> = Lazy::new(|| Validation {
|
||||
validate_exp: false,
|
||||
algorithms: vec![Algorithm::HS256, Algorithm::HS384, Algorithm::HS512],
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
/// Extracts the key id used to sign the payload, without performing any validation.
|
||||
fn extract_key_id(token: &str) -> Option<Uuid> {
|
||||
let mut validation = tenant_token_validation();
|
||||
validation.insecure_disable_signature_validation();
|
||||
let dummy_key = DecodingKey::from_secret(b"secret");
|
||||
let token_data = decode::<Claims>(token, &dummy_key, &validation).ok()?;
|
||||
pub struct MasterPolicy;
|
||||
|
||||
// get token fields without validating it.
|
||||
let Claims { api_key_uid, .. } = token_data.claims;
|
||||
Some(api_key_uid)
|
||||
}
|
||||
impl Policy for MasterPolicy {
|
||||
fn authenticate(
|
||||
auth: AuthController,
|
||||
token: &str,
|
||||
_index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
if let Some(master_key) = auth.get_master_key() {
|
||||
if master_key == token {
|
||||
return Some(AuthFilter::default());
|
||||
}
|
||||
}
|
||||
|
||||
fn is_keys_action(action: u8) -> bool {
|
||||
use actions::*;
|
||||
matches!(action, KEYS_GET | KEYS_CREATE | KEYS_UPDATE | KEYS_DELETE)
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ActionPolicy<const A: u8>;
|
||||
@@ -172,12 +174,7 @@ pub mod policies {
|
||||
index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
// authenticate if token is the master key.
|
||||
// master key can only have access to keys routes.
|
||||
// if master key is None only keys routes are inaccessible.
|
||||
if auth
|
||||
.get_master_key()
|
||||
.map_or_else(|| !is_keys_action(A), |mk| mk == token)
|
||||
{
|
||||
if auth.get_master_key().map_or(true, |mk| mk == token) {
|
||||
return Some(AuthFilter::default());
|
||||
}
|
||||
|
||||
@@ -187,10 +184,8 @@ pub mod policies {
|
||||
return Some(filters);
|
||||
} else if let Some(action) = Action::from_repr(A) {
|
||||
// API key
|
||||
if let Ok(Some(uid)) = auth.get_optional_uid_from_encoded_key(token.as_bytes()) {
|
||||
if let Ok(true) = auth.is_key_authorized(uid, action, index) {
|
||||
return auth.get_key_filters(uid, None).ok();
|
||||
}
|
||||
if let Ok(true) = auth.authenticate(token.as_bytes(), action, index) {
|
||||
return auth.get_key_filters(token, None).ok();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -209,34 +204,43 @@ pub mod policies {
|
||||
return None;
|
||||
}
|
||||
|
||||
let uid = extract_key_id(token)?;
|
||||
// get token fields without validating it.
|
||||
let Claims {
|
||||
search_rules,
|
||||
exp,
|
||||
api_key_prefix,
|
||||
} = dangerous_insecure_decode::<Claims>(token).ok()?.claims;
|
||||
|
||||
// Check index access if an index restriction is provided.
|
||||
if let Some(index) = index {
|
||||
if !search_rules.is_index_authorized(index) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if token is expired.
|
||||
if let Some(exp) = exp {
|
||||
if OffsetDateTime::now_utc().unix_timestamp() > exp {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// check if parent key is authorized to do the action.
|
||||
if auth.is_key_authorized(uid, Action::Search, index).ok()? {
|
||||
if auth
|
||||
.is_key_authorized(api_key_prefix.as_bytes(), Action::Search, index)
|
||||
.ok()?
|
||||
{
|
||||
// Check if tenant token is valid.
|
||||
let key = auth.generate_key(uid)?;
|
||||
let data = decode::<Claims>(
|
||||
let key = auth.generate_key(&api_key_prefix)?;
|
||||
decode::<Claims>(
|
||||
token,
|
||||
&DecodingKey::from_secret(key.as_bytes()),
|
||||
&tenant_token_validation(),
|
||||
&TENANT_TOKEN_VALIDATION,
|
||||
)
|
||||
.ok()?;
|
||||
|
||||
// Check index access if an index restriction is provided.
|
||||
if let Some(index) = index {
|
||||
if !data.claims.search_rules.is_index_authorized(index) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if token is expired.
|
||||
if let Some(exp) = data.claims.exp {
|
||||
if OffsetDateTime::now_utc().unix_timestamp() > exp {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
return auth
|
||||
.get_key_filters(uid, Some(data.claims.search_rules))
|
||||
.get_key_filters(api_key_prefix, Some(search_rules))
|
||||
.ok();
|
||||
}
|
||||
|
||||
@@ -249,6 +253,6 @@ pub mod policies {
|
||||
struct Claims {
|
||||
search_rules: SearchRules,
|
||||
exp: Option<i64>,
|
||||
api_key_uid: Uuid,
|
||||
api_key_prefix: String,
|
||||
}
|
||||
}
|
||||
|
||||
16
meilisearch-http/src/helpers/env.rs
Normal file
16
meilisearch-http/src/helpers/env.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use walkdir::WalkDir;
|
||||
|
||||
pub trait EnvSizer {
|
||||
fn size(&self) -> u64;
|
||||
}
|
||||
|
||||
impl EnvSizer for heed::Env {
|
||||
fn size(&self) -> u64 {
|
||||
WalkDir::new(self.path())
|
||||
.into_iter()
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter_map(|entry| entry.metadata().ok())
|
||||
.filter(|metadata| metadata.is_file())
|
||||
.fold(0, |acc, m| acc + m.len())
|
||||
}
|
||||
}
|
||||
3
meilisearch-http/src/helpers/mod.rs
Normal file
3
meilisearch-http/src/helpers/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
mod env;
|
||||
|
||||
pub use env::EnvSizer;
|
||||
@@ -2,17 +2,13 @@
|
||||
#[macro_use]
|
||||
pub mod error;
|
||||
pub mod analytics;
|
||||
pub mod task;
|
||||
mod task;
|
||||
#[macro_use]
|
||||
pub mod extractors;
|
||||
pub mod helpers;
|
||||
pub mod option;
|
||||
pub mod routes;
|
||||
|
||||
#[cfg(feature = "metrics")]
|
||||
pub mod metrics;
|
||||
#[cfg(feature = "metrics")]
|
||||
pub mod route_metrics;
|
||||
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
use std::time::Duration;
|
||||
|
||||
@@ -34,9 +30,9 @@ pub static AUTOBATCHING_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||
pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<MeiliSearch> {
|
||||
let mut meilisearch = MeiliSearch::builder();
|
||||
|
||||
// disable autobatching?
|
||||
AUTOBATCHING_ENABLED.store(
|
||||
!opt.scheduler_options.disable_auto_batching,
|
||||
// enable autobatching?
|
||||
let _ = AUTOBATCHING_ENABLED.store(
|
||||
opt.scheduler_options.enable_auto_batching,
|
||||
std::sync::atomic::Ordering::Relaxed,
|
||||
);
|
||||
|
||||
@@ -145,40 +141,22 @@ pub fn dashboard(config: &mut web::ServiceConfig, _enable_frontend: bool) {
|
||||
config.service(web::resource("/").route(web::get().to(routes::running)));
|
||||
}
|
||||
|
||||
#[cfg(feature = "metrics")]
|
||||
pub fn configure_metrics_route(config: &mut web::ServiceConfig, enable_metrics_route: bool) {
|
||||
if enable_metrics_route {
|
||||
config.service(
|
||||
web::resource("/metrics").route(web::get().to(crate::route_metrics::get_metrics)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! create_app {
|
||||
($data:expr, $auth:expr, $enable_frontend:expr, $opt:expr, $analytics:expr) => {{
|
||||
use actix_cors::Cors;
|
||||
use actix_web::dev::Service;
|
||||
use actix_web::middleware::Condition;
|
||||
use actix_web::middleware::TrailingSlash;
|
||||
use actix_web::App;
|
||||
use actix_web::{middleware, web};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_http::error::MeilisearchHttpError;
|
||||
use meilisearch_http::routes;
|
||||
use meilisearch_http::{configure_data, dashboard};
|
||||
#[cfg(feature = "metrics")]
|
||||
use meilisearch_http::{configure_metrics_route, metrics, route_metrics};
|
||||
use meilisearch_types::error::ResponseError;
|
||||
|
||||
let app = App::new()
|
||||
App::new()
|
||||
.configure(|s| configure_data(s, $data.clone(), $auth.clone(), &$opt, $analytics))
|
||||
.configure(routes::configure)
|
||||
.configure(|s| dashboard(s, $enable_frontend));
|
||||
|
||||
#[cfg(feature = "metrics")]
|
||||
let app = app.configure(|s| configure_metrics_route(s, $opt.enable_metrics_route));
|
||||
|
||||
let app = app
|
||||
.configure(|s| dashboard(s, $enable_frontend))
|
||||
.wrap(
|
||||
Cors::default()
|
||||
.send_wildcard()
|
||||
@@ -191,14 +169,6 @@ macro_rules! create_app {
|
||||
.wrap(middleware::Compress::default())
|
||||
.wrap(middleware::NormalizePath::new(
|
||||
middleware::TrailingSlash::Trim,
|
||||
));
|
||||
|
||||
#[cfg(feature = "metrics")]
|
||||
let app = app.wrap(Condition::new(
|
||||
$opt.enable_metrics_route,
|
||||
route_metrics::RouteMetrics,
|
||||
));
|
||||
|
||||
app
|
||||
))
|
||||
}};
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
|
||||
use actix_web::http::KeepAlive;
|
||||
use actix_web::HttpServer;
|
||||
use clap::Parser;
|
||||
use meilisearch_auth::AuthController;
|
||||
@@ -10,8 +9,9 @@ use meilisearch_http::analytics::Analytics;
|
||||
use meilisearch_http::{create_app, setup_meilisearch, Opt};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
#[global_allocator]
|
||||
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||
|
||||
/// does all the setup before meilisearch is launched
|
||||
fn setup(opt: &Opt) -> anyhow::Result<()> {
|
||||
@@ -83,8 +83,7 @@ async fn run_http(
|
||||
)
|
||||
})
|
||||
// Disable signals allows the server to terminate immediately when a user enter CTRL-C
|
||||
.disable_signals()
|
||||
.keep_alive(KeepAlive::Os);
|
||||
.disable_signals();
|
||||
|
||||
if let Some(config) = opt.get_ssl_config()? {
|
||||
http_server
|
||||
@@ -100,11 +99,7 @@ async fn run_http(
|
||||
pub fn print_launch_resume(opt: &Opt, user: &str) {
|
||||
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
|
||||
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
||||
let protocol = if opt.ssl_cert_path.is_some() && opt.ssl_key_path.is_some() {
|
||||
"https"
|
||||
} else {
|
||||
"http"
|
||||
};
|
||||
|
||||
let ascii_name = r#"
|
||||
888b d888 d8b 888 d8b 888
|
||||
8888b d8888 Y8P 888 Y8P 888
|
||||
@@ -119,7 +114,7 @@ pub fn print_launch_resume(opt: &Opt, user: &str) {
|
||||
eprintln!("{}", ascii_name);
|
||||
|
||||
eprintln!("Database path:\t\t{:?}", opt.db_path);
|
||||
eprintln!("Server listening on:\t\"{}://{}\"", protocol, opt.http_addr);
|
||||
eprintln!("Server listening on:\t\"http://{}\"", opt.http_addr);
|
||||
eprintln!("Environment:\t\t{:?}", opt.env);
|
||||
eprintln!("Commit SHA:\t\t{:?}", commit_sha.to_string());
|
||||
eprintln!("Commit date:\t\t{:?}", commit_date.to_string());
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
use lazy_static::lazy_static;
|
||||
use prometheus::{
|
||||
opts, register_histogram_vec, register_int_counter_vec, register_int_gauge,
|
||||
register_int_gauge_vec,
|
||||
};
|
||||
use prometheus::{HistogramVec, IntCounterVec, IntGauge, IntGaugeVec};
|
||||
|
||||
const HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: &[f64; 14] = &[
|
||||
0.0005, 0.0008, 0.00085, 0.0009, 0.00095, 0.001, 0.00105, 0.0011, 0.00115, 0.0012, 0.0015,
|
||||
0.002, 0.003, 1.0,
|
||||
];
|
||||
|
||||
lazy_static! {
|
||||
pub static ref HTTP_REQUESTS_TOTAL: IntCounterVec = register_int_counter_vec!(
|
||||
opts!("http_requests_total", "HTTP requests total"),
|
||||
&["method", "path"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge = register_int_gauge!(opts!(
|
||||
"meilisearch_db_size_bytes",
|
||||
"Meilisearch Db Size In Bytes"
|
||||
))
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_INDEX_COUNT: IntGauge =
|
||||
register_int_gauge!(opts!("meilisearch_index_count", "Meilisearch Index Count"))
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_INDEX_DOCS_COUNT: IntGaugeVec = register_int_gauge_vec!(
|
||||
opts!(
|
||||
"meilisearch_index_docs_count",
|
||||
"Meilisearch Index Docs Count"
|
||||
),
|
||||
&["index"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref HTTP_RESPONSE_TIME_SECONDS: HistogramVec = register_histogram_vec!(
|
||||
"http_response_time_seconds",
|
||||
"HTTP response times",
|
||||
&["method", "path"],
|
||||
HTTP_RESPONSE_TIME_CUSTOM_BUCKETS.to_vec()
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
}
|
||||
@@ -19,7 +19,6 @@ use serde::Serialize;
|
||||
const POSSIBLE_ENV: [&str; 2] = ["development", "production"];
|
||||
|
||||
#[derive(Debug, Clone, Parser, Serialize)]
|
||||
#[clap(version)]
|
||||
pub struct Opt {
|
||||
/// The destination where the database must be created.
|
||||
#[clap(long, env = "MEILI_DB_PATH", default_value = "./data.ms")]
|
||||
@@ -146,12 +145,7 @@ pub struct Opt {
|
||||
#[clap(long, env = "MEILI_LOG_LEVEL", default_value = "info")]
|
||||
pub log_level: String,
|
||||
|
||||
/// Enables Prometheus metrics and /metrics route.
|
||||
#[cfg(feature = "metrics")]
|
||||
#[clap(long, env = "MEILI_ENABLE_METRICS_ROUTE")]
|
||||
pub enable_metrics_route: bool,
|
||||
|
||||
#[serde(flatten)]
|
||||
#[serde(skip)]
|
||||
#[clap(flatten)]
|
||||
pub indexer_options: IndexerOpts,
|
||||
|
||||
@@ -264,13 +258,3 @@ fn load_ocsp(filename: &Option<PathBuf>) -> anyhow::Result<Vec<u8>> {
|
||||
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_valid_opt() {
|
||||
assert!(Opt::try_parse_from(Some("")).is_ok());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,112 +0,0 @@
|
||||
use std::future::{ready, Ready};
|
||||
|
||||
use actix_web::http::header;
|
||||
use actix_web::HttpResponse;
|
||||
use actix_web::{
|
||||
dev::{self, Service, ServiceRequest, ServiceResponse, Transform},
|
||||
Error,
|
||||
};
|
||||
use futures_util::future::LocalBoxFuture;
|
||||
use meilisearch_auth::actions;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use prometheus::HistogramTimer;
|
||||
use prometheus::{Encoder, TextEncoder};
|
||||
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
|
||||
pub async fn get_metrics(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, MeiliSearch>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
let response = meilisearch.get_all_stats(search_rules).await?;
|
||||
|
||||
crate::metrics::MEILISEARCH_DB_SIZE_BYTES.set(response.database_size as i64);
|
||||
crate::metrics::MEILISEARCH_INDEX_COUNT.set(response.indexes.len() as i64);
|
||||
|
||||
for (index, value) in response.indexes.iter() {
|
||||
crate::metrics::MEILISEARCH_INDEX_DOCS_COUNT
|
||||
.with_label_values(&[index])
|
||||
.set(value.number_of_documents as i64);
|
||||
}
|
||||
|
||||
let encoder = TextEncoder::new();
|
||||
let mut buffer = vec![];
|
||||
encoder
|
||||
.encode(&prometheus::gather(), &mut buffer)
|
||||
.expect("Failed to encode metrics");
|
||||
|
||||
let response = String::from_utf8(buffer).expect("Failed to convert bytes to string");
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.insert_header(header::ContentType(mime::TEXT_PLAIN))
|
||||
.body(response))
|
||||
}
|
||||
|
||||
pub struct RouteMetrics;
|
||||
|
||||
// Middleware factory is `Transform` trait from actix-service crate
|
||||
// `S` - type of the next service
|
||||
// `B` - type of response's body
|
||||
impl<S, B> Transform<S, ServiceRequest> for RouteMetrics
|
||||
where
|
||||
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
||||
S::Future: 'static,
|
||||
B: 'static,
|
||||
{
|
||||
type Response = ServiceResponse<B>;
|
||||
type Error = Error;
|
||||
type InitError = ();
|
||||
type Transform = RouteMetricsMiddleware<S>;
|
||||
type Future = Ready<Result<Self::Transform, Self::InitError>>;
|
||||
|
||||
fn new_transform(&self, service: S) -> Self::Future {
|
||||
ready(Ok(RouteMetricsMiddleware { service }))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RouteMetricsMiddleware<S> {
|
||||
service: S,
|
||||
}
|
||||
|
||||
impl<S, B> Service<ServiceRequest> for RouteMetricsMiddleware<S>
|
||||
where
|
||||
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
||||
S::Future: 'static,
|
||||
B: 'static,
|
||||
{
|
||||
type Response = ServiceResponse<B>;
|
||||
type Error = Error;
|
||||
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
|
||||
|
||||
dev::forward_ready!(service);
|
||||
|
||||
fn call(&self, req: ServiceRequest) -> Self::Future {
|
||||
let mut histogram_timer: Option<HistogramTimer> = None;
|
||||
let request_path = req.path();
|
||||
let is_registered_resource = req.resource_map().has_resource(request_path);
|
||||
if is_registered_resource {
|
||||
let request_method = req.method().to_string();
|
||||
histogram_timer = Some(
|
||||
crate::metrics::HTTP_RESPONSE_TIME_SECONDS
|
||||
.with_label_values(&[&request_method, request_path])
|
||||
.start_timer(),
|
||||
);
|
||||
crate::metrics::HTTP_REQUESTS_TOTAL
|
||||
.with_label_values(&[&request_method, request_path])
|
||||
.inc();
|
||||
}
|
||||
|
||||
let fut = self.service.call(req);
|
||||
|
||||
Box::pin(async move {
|
||||
let res = fut.await?;
|
||||
|
||||
if let Some(histogram_timer) = histogram_timer {
|
||||
histogram_timer.observe_duration();
|
||||
};
|
||||
Ok(res)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,19 +1,17 @@
|
||||
use std::str;
|
||||
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
|
||||
use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key};
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
|
||||
use crate::extractors::{
|
||||
authentication::{policies::*, GuardedData},
|
||||
sequential_extractor::SeqHandler,
|
||||
};
|
||||
use crate::routes::Pagination;
|
||||
use meilisearch_error::{Code, ResponseError};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
@@ -22,7 +20,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.route(web::get().to(SeqHandler(list_api_keys))),
|
||||
)
|
||||
.service(
|
||||
web::resource("/{key}")
|
||||
web::resource("/{api_key}")
|
||||
.route(web::get().to(SeqHandler(get_api_key)))
|
||||
.route(web::patch().to(SeqHandler(patch_api_key)))
|
||||
.route(web::delete().to(SeqHandler(delete_api_key))),
|
||||
@@ -30,7 +28,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
}
|
||||
|
||||
pub async fn create_api_key(
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>,
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
body: web::Json<Value>,
|
||||
_req: HttpRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
@@ -46,35 +44,30 @@ pub async fn create_api_key(
|
||||
}
|
||||
|
||||
pub async fn list_api_keys(
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
|
||||
paginate: web::Query<Pagination>,
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
_req: HttpRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let keys = auth_controller.list_keys()?;
|
||||
let page_view = paginate.auto_paginate_sized(
|
||||
keys.into_iter()
|
||||
.map(|k| KeyView::from_key(k, &auth_controller)),
|
||||
);
|
||||
|
||||
Ok(page_view)
|
||||
let res: Vec<_> = keys
|
||||
.into_iter()
|
||||
.map(|k| KeyView::from_key(k, &auth_controller))
|
||||
.collect();
|
||||
Ok(res)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::Ok().json(page_view))
|
||||
Ok(HttpResponse::Ok().json(KeyListView::from(res)))
|
||||
}
|
||||
|
||||
pub async fn get_api_key(
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let key = path.into_inner().key;
|
||||
|
||||
let api_key = path.into_inner().api_key;
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let uid =
|
||||
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
|
||||
let key = auth_controller.get_key(uid)?;
|
||||
|
||||
let key = auth_controller.get_key(&api_key)?;
|
||||
Ok(KeyView::from_key(key, &auth_controller))
|
||||
})
|
||||
.await
|
||||
@@ -84,17 +77,14 @@ pub async fn get_api_key(
|
||||
}
|
||||
|
||||
pub async fn patch_api_key(
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>,
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
body: web::Json<Value>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let key = path.into_inner().key;
|
||||
let api_key = path.into_inner().api_key;
|
||||
let body = body.into_inner();
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let uid =
|
||||
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
|
||||
let key = auth_controller.update_key(uid, body)?;
|
||||
|
||||
let key = auth_controller.update_key(&api_key, body)?;
|
||||
Ok(KeyView::from_key(key, &auth_controller))
|
||||
})
|
||||
.await
|
||||
@@ -104,33 +94,27 @@ pub async fn patch_api_key(
|
||||
}
|
||||
|
||||
pub async fn delete_api_key(
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_DELETE }>, AuthController>,
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let key = path.into_inner().key;
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let uid =
|
||||
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
|
||||
auth_controller.delete_key(uid)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
let api_key = path.into_inner().api_key;
|
||||
tokio::task::spawn_blocking(move || auth_controller.delete_key(&api_key))
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AuthParam {
|
||||
key: String,
|
||||
api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct KeyView {
|
||||
name: Option<String>,
|
||||
description: Option<String>,
|
||||
key: String,
|
||||
uid: Uuid,
|
||||
actions: Vec<Action>,
|
||||
indexes: Vec<String>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||
@@ -143,18 +127,28 @@ struct KeyView {
|
||||
|
||||
impl KeyView {
|
||||
fn from_key(key: Key, auth: &AuthController) -> Self {
|
||||
let generated_key = auth.generate_key(key.uid).unwrap_or_default();
|
||||
let key_id = str::from_utf8(&key.id).unwrap();
|
||||
let generated_key = auth.generate_key(key_id).unwrap_or_default();
|
||||
|
||||
KeyView {
|
||||
name: key.name,
|
||||
description: key.description,
|
||||
key: generated_key,
|
||||
uid: key.uid,
|
||||
actions: key.actions,
|
||||
indexes: key.indexes.into_iter().map(String::from).collect(),
|
||||
indexes: key.indexes,
|
||||
expires_at: key.expires_at,
|
||||
created_at: key.created_at,
|
||||
updated_at: key.updated_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct KeyListView {
|
||||
results: Vec<KeyView>,
|
||||
}
|
||||
|
||||
impl From<Vec<KeyView>> for KeyListView {
|
||||
fn from(results: Vec<KeyView>) -> Self {
|
||||
Self { results }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))));
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))))
|
||||
.service(
|
||||
web::resource("/{dump_uid}/status").route(web::get().to(SeqHandler(get_dump_status))),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn create_dump(
|
||||
@@ -20,8 +23,29 @@ pub async fn create_dump(
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish("Dump Created".to_string(), json!({}), Some(&req));
|
||||
|
||||
let res: SummarizedTaskView = meilisearch.register_dump_task().await?.into();
|
||||
let res = meilisearch.create_dump().await?;
|
||||
|
||||
debug!("returns: {:?}", res);
|
||||
Ok(HttpResponse::Accepted().json(res))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct DumpStatusResponse {
|
||||
status: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DumpParam {
|
||||
dump_uid: String,
|
||||
}
|
||||
|
||||
async fn get_dump_status(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DUMPS_GET }>, MeiliSearch>,
|
||||
path: web::Path<DumpParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let res = meilisearch.dump_info(path.dump_uid.clone()).await?;
|
||||
|
||||
debug!("returns: {:?}", res);
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
}
|
||||
|
||||
@@ -6,15 +6,13 @@ use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use bstr::ByteSlice;
|
||||
use futures::{Stream, StreamExt};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index_controller::{DocumentAdditionFormat, Update};
|
||||
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use mime::Mime;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::Deserialize;
|
||||
use serde_cs::vec::CS;
|
||||
use serde_json::Value;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
@@ -23,9 +21,11 @@ use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::payload::Payload;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::{fold_star_or, PaginationView};
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
const DEFAULT_RETRIEVE_DOCUMENTS_OFFSET: usize = 0;
|
||||
const DEFAULT_RETRIEVE_DOCUMENTS_LIMIT: usize = 20;
|
||||
|
||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||
vec![
|
||||
"application/json".to_string(),
|
||||
@@ -46,7 +46,7 @@ fn payload_to_stream(mut payload: Payload) -> impl Stream<Item = Result<Bytes, P
|
||||
}
|
||||
|
||||
/// Extracts the mime type from the content type and return
|
||||
/// a meilisearch error if anything bad happen.
|
||||
/// a meilisearch error if anyhthing bad happen.
|
||||
fn extract_mime_type(req: &HttpRequest) -> Result<Option<Mime>, MeilisearchHttpError> {
|
||||
match req.mime_type() {
|
||||
Ok(Some(mime)) => Ok(Some(mime)),
|
||||
@@ -86,24 +86,14 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct GetDocument {
|
||||
fields: Option<CS<StarOr<String>>>,
|
||||
}
|
||||
|
||||
pub async fn get_document(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, MeiliSearch>,
|
||||
path: web::Path<DocumentParam>,
|
||||
params: web::Query<GetDocument>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = path.index_uid.clone();
|
||||
let id = path.document_id.clone();
|
||||
let GetDocument { fields } = params.into_inner();
|
||||
let attributes_to_retrieve = fields.and_then(fold_star_or);
|
||||
|
||||
let document = meilisearch
|
||||
.document(index, id, attributes_to_retrieve)
|
||||
.document(index, id, None as Option<Vec<String>>)
|
||||
.await?;
|
||||
debug!("returns: {:?}", document);
|
||||
Ok(HttpResponse::Ok().json(document))
|
||||
@@ -126,11 +116,9 @@ pub async fn delete_document(
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct BrowseQuery {
|
||||
#[serde(default)]
|
||||
offset: usize,
|
||||
#[serde(default = "crate::routes::PAGINATION_DEFAULT_LIMIT")]
|
||||
limit: usize,
|
||||
fields: Option<CS<StarOr<String>>>,
|
||||
offset: Option<usize>,
|
||||
limit: Option<usize>,
|
||||
attributes_to_retrieve: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn get_all_documents(
|
||||
@@ -139,21 +127,27 @@ pub async fn get_all_documents(
|
||||
params: web::Query<BrowseQuery>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!("called with params: {:?}", params);
|
||||
let BrowseQuery {
|
||||
limit,
|
||||
offset,
|
||||
fields,
|
||||
} = params.into_inner();
|
||||
let attributes_to_retrieve = fields.and_then(fold_star_or);
|
||||
let attributes_to_retrieve = params.attributes_to_retrieve.as_ref().and_then(|attrs| {
|
||||
let mut names = Vec::new();
|
||||
for name in attrs.split(',').map(String::from) {
|
||||
if name == "*" {
|
||||
return None;
|
||||
}
|
||||
names.push(name);
|
||||
}
|
||||
Some(names)
|
||||
});
|
||||
|
||||
let (total, documents) = meilisearch
|
||||
.documents(path.into_inner(), offset, limit, attributes_to_retrieve)
|
||||
let documents = meilisearch
|
||||
.documents(
|
||||
path.into_inner(),
|
||||
params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET),
|
||||
params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT),
|
||||
attributes_to_retrieve,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let ret = PaginationView::new(offset, limit, total as usize, documents);
|
||||
|
||||
debug!("returns: {:?}", ret);
|
||||
Ok(HttpResponse::Ok().json(ret))
|
||||
debug!("returns: {:?}", documents);
|
||||
Ok(HttpResponse::Ok().json(documents))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
|
||||
@@ -1,22 +1,21 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index_controller::Update;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, AuthenticationError, GuardedData};
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
use super::Pagination;
|
||||
|
||||
pub mod documents;
|
||||
pub mod search;
|
||||
pub mod settings;
|
||||
pub mod tasks;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
@@ -29,32 +28,30 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(SeqHandler(get_index)))
|
||||
.route(web::patch().to(SeqHandler(update_index)))
|
||||
.route(web::put().to(SeqHandler(update_index)))
|
||||
.route(web::delete().to(SeqHandler(delete_index))),
|
||||
)
|
||||
.service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats))))
|
||||
.service(web::scope("/documents").configure(documents::configure))
|
||||
.service(web::scope("/search").configure(search::configure))
|
||||
.service(web::scope("/tasks").configure(tasks::configure))
|
||||
.service(web::scope("/settings").configure(settings::configure)),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn list_indexes(
|
||||
data: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, MeiliSearch>,
|
||||
paginate: web::Query<Pagination>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let search_rules = &data.filters().search_rules;
|
||||
let indexes: Vec<_> = data.list_indexes().await?;
|
||||
let nb_indexes = indexes.len();
|
||||
let iter = indexes
|
||||
let indexes: Vec<_> = data
|
||||
.list_indexes()
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|i| search_rules.is_index_authorized(&i.uid));
|
||||
let ret = paginate
|
||||
.into_inner()
|
||||
.auto_paginate_unsized(nb_indexes, iter);
|
||||
.filter(|i| search_rules.is_index_authorized(&i.uid))
|
||||
.collect();
|
||||
|
||||
debug!("returns: {:?}", ret);
|
||||
Ok(HttpResponse::Ok().json(ret))
|
||||
debug!("returns: {:?}", indexes);
|
||||
Ok(HttpResponse::Ok().json(indexes))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -74,21 +71,16 @@ pub async fn create_index(
|
||||
primary_key, uid, ..
|
||||
} = body.into_inner();
|
||||
|
||||
let allow_index_creation = meilisearch.filters().search_rules.is_index_authorized(&uid);
|
||||
if allow_index_creation {
|
||||
analytics.publish(
|
||||
"Index Created".to_string(),
|
||||
json!({ "primary_key": primary_key }),
|
||||
Some(&req),
|
||||
);
|
||||
analytics.publish(
|
||||
"Index Created".to_string(),
|
||||
json!({ "primary_key": primary_key }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let update = Update::CreateIndex { primary_key };
|
||||
let task: SummarizedTaskView = meilisearch.register_update(uid, update).await?.into();
|
||||
let update = Update::CreateIndex { primary_key };
|
||||
let task: SummarizedTaskView = meilisearch.register_update(uid, update).await?.into();
|
||||
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
} else {
|
||||
Err(AuthenticationError::InvalidToken.into())
|
||||
}
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -163,14 +155,7 @@ pub async fn delete_index(
|
||||
pub async fn get_index_stats(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::STATS_GET }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish(
|
||||
"Stats Seen".to_string(),
|
||||
json!({ "per_index_uid": true }),
|
||||
Some(&req),
|
||||
);
|
||||
let response = meilisearch.get_index_stats(path.into_inner()).await?;
|
||||
|
||||
debug!("returns: {:?}", response);
|
||||
|
||||
@@ -1,14 +1,10 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_lib::index::{
|
||||
MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||
};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{default_crop_length, SearchQuery, DEFAULT_SEARCH_LIMIT};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::Deserialize;
|
||||
use serde_cs::vec::CS;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::analytics::{Analytics, SearchAggregator};
|
||||
@@ -29,28 +25,36 @@ pub struct SearchQueryGet {
|
||||
q: Option<String>,
|
||||
offset: Option<usize>,
|
||||
limit: Option<usize>,
|
||||
attributes_to_retrieve: Option<CS<String>>,
|
||||
attributes_to_crop: Option<CS<String>>,
|
||||
#[serde(default = "DEFAULT_CROP_LENGTH")]
|
||||
attributes_to_retrieve: Option<String>,
|
||||
attributes_to_crop: Option<String>,
|
||||
#[serde(default = "default_crop_length")]
|
||||
crop_length: usize,
|
||||
attributes_to_highlight: Option<CS<String>>,
|
||||
attributes_to_highlight: Option<String>,
|
||||
filter: Option<String>,
|
||||
sort: Option<String>,
|
||||
#[serde(default = "Default::default")]
|
||||
show_matches_position: bool,
|
||||
facets: Option<CS<String>>,
|
||||
#[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")]
|
||||
highlight_pre_tag: String,
|
||||
#[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")]
|
||||
highlight_post_tag: String,
|
||||
#[serde(default = "DEFAULT_CROP_MARKER")]
|
||||
crop_marker: String,
|
||||
#[serde(default)]
|
||||
matching_strategy: MatchingStrategy,
|
||||
matches: bool,
|
||||
facets_distribution: Option<String>,
|
||||
}
|
||||
|
||||
impl From<SearchQueryGet> for SearchQuery {
|
||||
fn from(other: SearchQueryGet) -> Self {
|
||||
let attributes_to_retrieve = other
|
||||
.attributes_to_retrieve
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect());
|
||||
|
||||
let attributes_to_crop = other
|
||||
.attributes_to_crop
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect());
|
||||
|
||||
let attributes_to_highlight = other
|
||||
.attributes_to_highlight
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect());
|
||||
|
||||
let facets_distribution = other
|
||||
.facets_distribution
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect());
|
||||
|
||||
let filter = match other.filter {
|
||||
Some(f) => match serde_json::from_str(&f) {
|
||||
Ok(v) => Some(v),
|
||||
@@ -59,26 +63,20 @@ impl From<SearchQueryGet> for SearchQuery {
|
||||
None => None,
|
||||
};
|
||||
|
||||
let sort = other.sort.map(|attr| fix_sort_query_parameters(&attr));
|
||||
|
||||
Self {
|
||||
q: other.q,
|
||||
offset: other.offset,
|
||||
limit: other.limit.unwrap_or_else(DEFAULT_SEARCH_LIMIT),
|
||||
attributes_to_retrieve: other
|
||||
.attributes_to_retrieve
|
||||
.map(|o| o.into_iter().collect()),
|
||||
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
|
||||
limit: other.limit.unwrap_or(DEFAULT_SEARCH_LIMIT),
|
||||
attributes_to_retrieve,
|
||||
attributes_to_crop,
|
||||
crop_length: other.crop_length,
|
||||
attributes_to_highlight: other
|
||||
.attributes_to_highlight
|
||||
.map(|o| o.into_iter().collect()),
|
||||
attributes_to_highlight,
|
||||
filter,
|
||||
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
|
||||
show_matches_position: other.show_matches_position,
|
||||
facets: other.facets.map(|o| o.into_iter().collect()),
|
||||
highlight_pre_tag: other.highlight_pre_tag,
|
||||
highlight_post_tag: other.highlight_post_tag,
|
||||
crop_marker: other.crop_marker,
|
||||
matching_strategy: other.matching_strategy,
|
||||
sort,
|
||||
matches: other.matches,
|
||||
facets_distribution,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -114,9 +112,10 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
|
||||
sort_parameters.push(current_sort.to_string());
|
||||
merge = true;
|
||||
} else if merge && !sort_parameters.is_empty() {
|
||||
let s = sort_parameters.last_mut().unwrap();
|
||||
s.push(',');
|
||||
s.push_str(current_sort);
|
||||
sort_parameters
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.push_str(&format!(",{}", current_sort));
|
||||
if current_sort.ends_with("):desc") || current_sort.ends_with("):asc") {
|
||||
merge = false;
|
||||
}
|
||||
@@ -158,6 +157,10 @@ pub async fn search_with_url_query(
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
// Tests that the nb_hits is always set to false
|
||||
#[cfg(test)]
|
||||
assert!(!search_result.exhaustive_nb_hits);
|
||||
|
||||
debug!("returns: {:?}", search_result);
|
||||
Ok(HttpResponse::Ok().json(search_result))
|
||||
}
|
||||
@@ -192,6 +195,10 @@ pub async fn search_with_post(
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
// Tests that the nb_hits is always set to false
|
||||
#[cfg(test)]
|
||||
assert!(!search_result.exhaustive_nb_hits);
|
||||
|
||||
debug!("returns: {:?}", search_result);
|
||||
Ok(HttpResponse::Ok().json(search_result))
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use log::debug;
|
||||
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::index_controller::Update;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
@@ -13,7 +13,7 @@ use crate::task::SummarizedTaskView;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! make_setting_route {
|
||||
($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
|
||||
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
|
||||
pub mod $attr {
|
||||
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
||||
use log::debug;
|
||||
@@ -21,11 +21,11 @@ macro_rules! make_setting_route {
|
||||
use meilisearch_lib::milli::update::Setting;
|
||||
use meilisearch_lib::{index::Settings, index_controller::Update, MeiliSearch};
|
||||
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use $crate::analytics::Analytics;
|
||||
use $crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use $crate::extractors::sequential_extractor::SeqHandler;
|
||||
use $crate::task::SummarizedTaskView;
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::SummarizedTaskView;
|
||||
use meilisearch_error::ResponseError;
|
||||
|
||||
pub async fn delete(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, MeiliSearch>,
|
||||
@@ -100,27 +100,18 @@ macro_rules! make_setting_route {
|
||||
pub fn resources() -> Resource {
|
||||
Resource::new($route)
|
||||
.route(web::get().to(SeqHandler(get)))
|
||||
.route(web::$update_verb().to(SeqHandler(update)))
|
||||
.route(web::post().to(SeqHandler(update)))
|
||||
.route(web::delete().to(SeqHandler(delete)))
|
||||
}
|
||||
}
|
||||
};
|
||||
($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal) => {
|
||||
make_setting_route!(
|
||||
$route,
|
||||
$update_verb,
|
||||
$type,
|
||||
$attr,
|
||||
$camelcase_attr,
|
||||
_analytics,
|
||||
|_, _| {}
|
||||
);
|
||||
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal) => {
|
||||
make_setting_route!($route, $type, $attr, $camelcase_attr, _analytics, |_, _| {});
|
||||
};
|
||||
}
|
||||
|
||||
make_setting_route!(
|
||||
"/filterable-attributes",
|
||||
put,
|
||||
std::collections::BTreeSet<String>,
|
||||
filterable_attributes,
|
||||
"filterableAttributes",
|
||||
@@ -143,7 +134,6 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/sortable-attributes",
|
||||
put,
|
||||
std::collections::BTreeSet<String>,
|
||||
sortable_attributes,
|
||||
"sortableAttributes",
|
||||
@@ -155,8 +145,8 @@ make_setting_route!(
|
||||
"SortableAttributes Updated".to_string(),
|
||||
json!({
|
||||
"sortable_attributes": {
|
||||
"total": setting.as_ref().map(|sort| sort.len()),
|
||||
"has_geo": setting.as_ref().map(|sort| sort.contains("_geo")),
|
||||
"total": setting.as_ref().map(|sort| sort.len()).unwrap_or(0),
|
||||
"has_geo": setting.as_ref().map(|sort| sort.contains("_geo")).unwrap_or(false),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
@@ -166,57 +156,13 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/displayed-attributes",
|
||||
put,
|
||||
Vec<String>,
|
||||
displayed_attributes,
|
||||
"displayedAttributes"
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/typo-tolerance",
|
||||
patch,
|
||||
meilisearch_lib::index::updates::TypoSettings,
|
||||
typo_tolerance,
|
||||
"typoTolerance",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_lib::index::updates::TypoSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"TypoTolerance Updated".to_string(),
|
||||
json!({
|
||||
"typo_tolerance": {
|
||||
"enabled": setting.as_ref().map(|s| !matches!(s.enabled, Setting::Set(false))),
|
||||
"disable_on_attributes": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||
"disable_on_words": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_words.as_ref().set().map(|m| !m.is_empty())),
|
||||
"min_word_size_for_one_typo": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.one_typo.set()))
|
||||
.flatten(),
|
||||
"min_word_size_for_two_typos": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.two_typos.set()))
|
||||
.flatten(),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/searchable-attributes",
|
||||
put,
|
||||
Vec<String>,
|
||||
searchable_attributes,
|
||||
"searchableAttributes",
|
||||
@@ -228,7 +174,7 @@ make_setting_route!(
|
||||
"SearchableAttributes Updated".to_string(),
|
||||
json!({
|
||||
"searchable_attributes": {
|
||||
"total": setting.as_ref().map(|searchable| searchable.len()),
|
||||
"total": setting.as_ref().map(|searchable| searchable.len()).unwrap_or(0),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
@@ -238,7 +184,6 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/stop-words",
|
||||
put,
|
||||
std::collections::BTreeSet<String>,
|
||||
stop_words,
|
||||
"stopWords"
|
||||
@@ -246,7 +191,6 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/synonyms",
|
||||
put,
|
||||
std::collections::BTreeMap<String, Vec<String>>,
|
||||
synonyms,
|
||||
"synonyms"
|
||||
@@ -254,7 +198,6 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/distinct-attribute",
|
||||
put,
|
||||
String,
|
||||
distinct_attribute,
|
||||
"distinctAttribute"
|
||||
@@ -262,7 +205,6 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/ranking-rules",
|
||||
put,
|
||||
Vec<String>,
|
||||
ranking_rules,
|
||||
"rankingRules",
|
||||
@@ -282,57 +224,13 @@ make_setting_route!(
|
||||
}
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/faceting",
|
||||
patch,
|
||||
meilisearch_lib::index::updates::FacetingSettings,
|
||||
faceting,
|
||||
"faceting",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_lib::index::updates::FacetingSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"Faceting Updated".to_string(),
|
||||
json!({
|
||||
"faceting": {
|
||||
"max_values_per_facet": setting.as_ref().and_then(|s| s.max_values_per_facet.set()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/pagination",
|
||||
patch,
|
||||
meilisearch_lib::index::updates::PaginationSettings,
|
||||
pagination,
|
||||
"pagination",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_lib::index::updates::PaginationSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"Pagination Updated".to_string(),
|
||||
json!({
|
||||
"pagination": {
|
||||
"max_total_hits": setting.as_ref().and_then(|s| s.max_total_hits.set()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
macro_rules! generate_configure {
|
||||
($($mod:ident),*) => {
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::patch().to(SeqHandler(update_all)))
|
||||
.route(web::post().to(SeqHandler(update_all)))
|
||||
.route(web::get().to(SeqHandler(get_all)))
|
||||
.route(web::delete().to(SeqHandler(delete_all))))
|
||||
$(.service($mod::resources()))*;
|
||||
@@ -348,10 +246,7 @@ generate_configure!(
|
||||
distinct_attribute,
|
||||
stop_words,
|
||||
synonyms,
|
||||
ranking_rules,
|
||||
typo_tolerance,
|
||||
pagination,
|
||||
faceting
|
||||
ranking_rules
|
||||
);
|
||||
|
||||
pub async fn update_all(
|
||||
@@ -370,58 +265,15 @@ pub async fn update_all(
|
||||
"sort_position": settings.ranking_rules.as_ref().set().map(|sort| sort.iter().position(|s| s == "sort")),
|
||||
},
|
||||
"searchable_attributes": {
|
||||
"total": settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()),
|
||||
"total": settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()).unwrap_or(0),
|
||||
},
|
||||
"sortable_attributes": {
|
||||
"total": settings.sortable_attributes.as_ref().set().map(|sort| sort.len()),
|
||||
"has_geo": settings.sortable_attributes.as_ref().set().map(|sort| sort.iter().any(|s| s == "_geo")),
|
||||
"total": settings.sortable_attributes.as_ref().set().map(|sort| sort.len()).unwrap_or(0),
|
||||
"has_geo": settings.sortable_attributes.as_ref().set().map(|sort| sort.iter().any(|s| s == "_geo")).unwrap_or(false),
|
||||
},
|
||||
"filterable_attributes": {
|
||||
"total": settings.filterable_attributes.as_ref().set().map(|filter| filter.len()),
|
||||
"has_geo": settings.filterable_attributes.as_ref().set().map(|filter| filter.iter().any(|s| s == "_geo")),
|
||||
},
|
||||
"typo_tolerance": {
|
||||
"enabled": settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.enabled.as_ref().set())
|
||||
.copied(),
|
||||
"disable_on_attributes": settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||
"disable_on_words": settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.disable_on_words.as_ref().set().map(|m| !m.is_empty())),
|
||||
"min_word_size_for_one_typo": settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.one_typo.set()))
|
||||
.flatten(),
|
||||
"min_word_size_for_two_typos": settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.two_typos.set()))
|
||||
.flatten(),
|
||||
},
|
||||
"faceting": {
|
||||
"max_values_per_facet": settings.faceting
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.max_values_per_facet.as_ref().set()),
|
||||
},
|
||||
"pagination": {
|
||||
"max_total_hits": settings.pagination
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.max_total_hits.as_ref().set()),
|
||||
"total": settings.filterable_attributes.as_ref().set().map(|filter| filter.len()).unwrap_or(0),
|
||||
"has_geo": settings.filterable_attributes.as_ref().set().map(|filter| filter.iter().any(|s| s == "_geo")).unwrap_or(false),
|
||||
},
|
||||
}),
|
||||
Some(&req),
|
||||
|
||||
80
meilisearch-http/src/routes/indexes/tasks.rs
Normal file
80
meilisearch-http/src/routes/indexes/tasks.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::{TaskListView, TaskView};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_all_tasks_status))))
|
||||
.service(web::resource("{task_id}").route(web::get().to(SeqHandler(get_task_status))));
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UpdateIndexResponse {
|
||||
name: String,
|
||||
uid: String,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
created_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
updated_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
primary_key: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateParam {
|
||||
index_uid: String,
|
||||
task_id: u64,
|
||||
}
|
||||
|
||||
pub async fn get_task_status(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
index_uid: web::Path<UpdateParam>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish(
|
||||
"Index Tasks Seen".to_string(),
|
||||
json!({ "per_task_uid": true }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let UpdateParam { index_uid, task_id } = index_uid.into_inner();
|
||||
|
||||
let task: TaskView = meilisearch.get_index_task(index_uid, task_id).await?.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Ok().json(task))
|
||||
}
|
||||
|
||||
pub async fn get_all_tasks_status(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
index_uid: web::Path<String>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish(
|
||||
"Index Tasks Seen".to_string(),
|
||||
json!({ "per_task_uid": false }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let tasks: TaskListView = meilisearch
|
||||
.list_index_task(index_uid.into_inner(), None, None)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(TaskView::from)
|
||||
.collect::<Vec<_>>()
|
||||
.into();
|
||||
|
||||
debug!("returns: {:?}", tasks);
|
||||
Ok(HttpResponse::Ok().json(tasks))
|
||||
}
|
||||
0
meilisearch-http/src/routes/indexes/updates.rs
Normal file
0
meilisearch-http/src/routes/indexes/updates.rs
Normal file
@@ -1,16 +1,12 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use actix_web::{web, HttpResponse};
|
||||
use log::debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use serde_json::json;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
|
||||
mod api_key;
|
||||
@@ -28,101 +24,6 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::scope("/indexes").configure(indexes::configure));
|
||||
}
|
||||
|
||||
/// Extracts the raw values from the `StarOr` types and
|
||||
/// return None if a `StarOr::Star` is encountered.
|
||||
pub fn fold_star_or<T, O>(content: impl IntoIterator<Item = StarOr<T>>) -> Option<O>
|
||||
where
|
||||
O: FromIterator<T>,
|
||||
{
|
||||
content
|
||||
.into_iter()
|
||||
.map(|value| match value {
|
||||
StarOr::Star => None,
|
||||
StarOr::Other(val) => Some(val),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct Pagination {
|
||||
#[serde(default)]
|
||||
pub offset: usize,
|
||||
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
|
||||
pub limit: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct PaginationView<T> {
|
||||
pub results: Vec<T>,
|
||||
pub offset: usize,
|
||||
pub limit: usize,
|
||||
pub total: usize,
|
||||
}
|
||||
|
||||
impl Pagination {
|
||||
/// Given the full data to paginate, returns the selected section.
|
||||
pub fn auto_paginate_sized<T>(
|
||||
self,
|
||||
content: impl IntoIterator<Item = T> + ExactSizeIterator,
|
||||
) -> PaginationView<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let total = content.len();
|
||||
let content: Vec<_> = content
|
||||
.into_iter()
|
||||
.skip(self.offset)
|
||||
.take(self.limit)
|
||||
.collect();
|
||||
self.format_with(total, content)
|
||||
}
|
||||
|
||||
/// Given an iterator and the total number of elements, returns the selected section.
|
||||
pub fn auto_paginate_unsized<T>(
|
||||
self,
|
||||
total: usize,
|
||||
content: impl IntoIterator<Item = T>,
|
||||
) -> PaginationView<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let content: Vec<_> = content
|
||||
.into_iter()
|
||||
.skip(self.offset)
|
||||
.take(self.limit)
|
||||
.collect();
|
||||
self.format_with(total, content)
|
||||
}
|
||||
|
||||
/// Given the data already paginated + the total number of elements, it stores
|
||||
/// everything in a [PaginationResult].
|
||||
pub fn format_with<T>(self, total: usize, results: Vec<T>) -> PaginationView<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
PaginationView {
|
||||
results,
|
||||
offset: self.offset,
|
||||
limit: self.limit,
|
||||
total,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PaginationView<T> {
|
||||
pub fn new(offset: usize, limit: usize, total: usize, results: Vec<T>) -> Self {
|
||||
Self {
|
||||
offset,
|
||||
limit,
|
||||
results,
|
||||
total,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[serde(tag = "name")]
|
||||
@@ -233,14 +134,7 @@ pub async fn running() -> HttpResponse {
|
||||
|
||||
async fn get_stats(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::STATS_GET }>, MeiliSearch>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish(
|
||||
"Stats Seen".to_string(),
|
||||
json!({ "per_index_uid": false }),
|
||||
Some(&req),
|
||||
);
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
let response = meilisearch.get_all_stats(search_rules).await?;
|
||||
|
||||
|
||||
@@ -1,172 +1,49 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use meilisearch_lib::tasks::task::{TaskContent, TaskEvent, TaskId};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::tasks::task::TaskId;
|
||||
use meilisearch_lib::tasks::TaskFilter;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use serde::Deserialize;
|
||||
use serde_cs::vec::CS;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::{TaskListView, TaskStatus, TaskType, TaskView};
|
||||
|
||||
use super::fold_star_or;
|
||||
|
||||
const DEFAULT_LIMIT: fn() -> usize = || 20;
|
||||
use crate::task::{TaskListView, TaskView};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks))))
|
||||
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct TasksFilterQuery {
|
||||
#[serde(rename = "type")]
|
||||
type_: Option<CS<StarOr<TaskType>>>,
|
||||
status: Option<CS<StarOr<TaskStatus>>>,
|
||||
index_uid: Option<CS<StarOr<IndexUid>>>,
|
||||
#[serde(default = "DEFAULT_LIMIT")]
|
||||
limit: usize,
|
||||
from: Option<TaskId>,
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn task_type_matches_content(type_: &TaskType, content: &TaskContent) -> bool {
|
||||
matches!((type_, content),
|
||||
(TaskType::IndexCreation, TaskContent::IndexCreation { .. })
|
||||
| (TaskType::IndexUpdate, TaskContent::IndexUpdate { .. })
|
||||
| (TaskType::IndexDeletion, TaskContent::IndexDeletion { .. })
|
||||
| (TaskType::DocumentAdditionOrUpdate, TaskContent::DocumentAddition { .. })
|
||||
| (TaskType::DocumentDeletion, TaskContent::DocumentDeletion{ .. })
|
||||
| (TaskType::SettingsUpdate, TaskContent::SettingsUpdate { .. })
|
||||
)
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn task_status_matches_events(status: &TaskStatus, events: &[TaskEvent]) -> bool {
|
||||
events.last().map_or(false, |event| {
|
||||
matches!((status, event),
|
||||
(TaskStatus::Enqueued, TaskEvent::Created(_))
|
||||
| (TaskStatus::Processing, TaskEvent::Processing(_) | TaskEvent::Batched { .. })
|
||||
| (TaskStatus::Succeeded, TaskEvent::Succeeded { .. })
|
||||
| (TaskStatus::Failed, TaskEvent::Failed { .. }),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_tasks(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
params: web::Query<TasksFilterQuery>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let TasksFilterQuery {
|
||||
type_,
|
||||
status,
|
||||
index_uid,
|
||||
limit,
|
||||
from,
|
||||
} = params.into_inner();
|
||||
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
|
||||
// We first transform a potential indexUid=* into a "not specified indexUid filter"
|
||||
// for every one of the filters: type, status, and indexUid.
|
||||
let type_: Option<Vec<_>> = type_.and_then(fold_star_or);
|
||||
let status: Option<Vec<_>> = status.and_then(fold_star_or);
|
||||
let index_uid: Option<Vec<_>> = index_uid.and_then(fold_star_or);
|
||||
|
||||
analytics.publish(
|
||||
"Tasks Seen".to_string(),
|
||||
json!({
|
||||
"filtered_by_index_uid": index_uid.as_ref().map_or(false, |v| !v.is_empty()),
|
||||
"filtered_by_type": type_.as_ref().map_or(false, |v| !v.is_empty()),
|
||||
"filtered_by_status": status.as_ref().map_or(false, |v| !v.is_empty()),
|
||||
}),
|
||||
json!({ "per_task_uid": false }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
// Then we filter on potential indexes and make sure that the search filter
|
||||
// restrictions are also applied.
|
||||
let indexes_filters = match index_uid {
|
||||
Some(indexes) => {
|
||||
let mut filters = TaskFilter::default();
|
||||
for name in indexes {
|
||||
if search_rules.is_index_authorized(&name) {
|
||||
filters.filter_index(name.to_string());
|
||||
}
|
||||
}
|
||||
Some(filters)
|
||||
}
|
||||
None => {
|
||||
if search_rules.is_index_authorized("*") {
|
||||
None
|
||||
} else {
|
||||
let mut filters = TaskFilter::default();
|
||||
for (index, _policy) in search_rules.clone() {
|
||||
filters.filter_index(index);
|
||||
}
|
||||
Some(filters)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Then we complete the task filter with other potential status and types filters.
|
||||
let filters = if type_.is_some() || status.is_some() {
|
||||
let mut filters = indexes_filters.unwrap_or_default();
|
||||
filters.filter_fn(Box::new(move |task| {
|
||||
let matches_type = match &type_ {
|
||||
Some(types) => types
|
||||
.iter()
|
||||
.any(|t| task_type_matches_content(t, &task.content)),
|
||||
None => true,
|
||||
};
|
||||
|
||||
let matches_status = match &status {
|
||||
Some(statuses) => statuses
|
||||
.iter()
|
||||
.any(|t| task_status_matches_events(t, &task.events)),
|
||||
None => true,
|
||||
};
|
||||
|
||||
matches_type && matches_status
|
||||
}));
|
||||
Some(filters)
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
let filters = if search_rules.is_index_authorized("*") {
|
||||
None
|
||||
} else {
|
||||
indexes_filters
|
||||
let mut filters = TaskFilter::default();
|
||||
for (index, _policy) in search_rules.clone() {
|
||||
filters.filter_index(index);
|
||||
}
|
||||
Some(filters)
|
||||
};
|
||||
|
||||
// We +1 just to know if there is more after this "page" or not.
|
||||
let limit = limit.saturating_add(1);
|
||||
|
||||
let mut tasks_results: Vec<_> = meilisearch
|
||||
.list_tasks(filters, Some(limit), from)
|
||||
let tasks: TaskListView = meilisearch
|
||||
.list_tasks(filters, None, None)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(TaskView::from)
|
||||
.collect();
|
||||
|
||||
// If we were able to fetch the number +1 tasks we asked
|
||||
// it means that there is more to come.
|
||||
let next = if tasks_results.len() == limit {
|
||||
tasks_results.pop().map(|t| t.uid)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let from = tasks_results.first().map(|t| t.uid);
|
||||
|
||||
let tasks = TaskListView {
|
||||
results: tasks_results,
|
||||
limit: limit.saturating_sub(1),
|
||||
from,
|
||||
next,
|
||||
};
|
||||
.collect::<Vec<_>>()
|
||||
.into();
|
||||
|
||||
Ok(HttpResponse::Ok().json(tasks))
|
||||
}
|
||||
|
||||
@@ -1,134 +1,62 @@
|
||||
use std::error::Error;
|
||||
use std::fmt::{self, Write};
|
||||
use std::str::FromStr;
|
||||
use std::fmt::Write;
|
||||
use std::write;
|
||||
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
||||
use meilisearch_lib::tasks::batch::BatchId;
|
||||
use meilisearch_lib::tasks::task::{
|
||||
DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult,
|
||||
};
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use serde::{Serialize, Serializer};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
use crate::AUTOBATCHING_ENABLED;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum TaskType {
|
||||
enum TaskType {
|
||||
IndexCreation,
|
||||
IndexUpdate,
|
||||
IndexDeletion,
|
||||
DocumentAdditionOrUpdate,
|
||||
DocumentAddition,
|
||||
DocumentPartial,
|
||||
DocumentDeletion,
|
||||
SettingsUpdate,
|
||||
DumpCreation,
|
||||
ClearAll,
|
||||
}
|
||||
|
||||
impl From<TaskContent> for TaskType {
|
||||
fn from(other: TaskContent) -> Self {
|
||||
match other {
|
||||
TaskContent::DocumentAddition {
|
||||
merge_strategy: IndexDocumentsMethod::ReplaceDocuments,
|
||||
..
|
||||
} => TaskType::DocumentAddition,
|
||||
TaskContent::DocumentAddition {
|
||||
merge_strategy: IndexDocumentsMethod::UpdateDocuments,
|
||||
..
|
||||
} => TaskType::DocumentPartial,
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => TaskType::ClearAll,
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentDeletion,
|
||||
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
|
||||
TaskContent::IndexDeletion => TaskType::IndexDeletion,
|
||||
TaskContent::IndexCreation { .. } => TaskType::IndexCreation,
|
||||
TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate,
|
||||
TaskContent::IndexDeletion { .. } => TaskType::IndexDeletion,
|
||||
TaskContent::DocumentAddition { .. } => TaskType::DocumentAdditionOrUpdate,
|
||||
TaskContent::DocumentDeletion { .. } => TaskType::DocumentDeletion,
|
||||
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
|
||||
TaskContent::Dump { .. } => TaskType::DumpCreation,
|
||||
_ => unreachable!("unexpected task type"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TaskTypeError {
|
||||
invalid_type: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for TaskTypeError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"invalid task type `{}`, expecting one of: \
|
||||
indexCreation, indexUpdate, indexDeletion, documentAdditionOrUpdate, \
|
||||
documentDeletion, settingsUpdate, dumpCreation",
|
||||
self.invalid_type
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for TaskTypeError {}
|
||||
|
||||
impl FromStr for TaskType {
|
||||
type Err = TaskTypeError;
|
||||
|
||||
fn from_str(type_: &str) -> Result<Self, TaskTypeError> {
|
||||
if type_.eq_ignore_ascii_case("indexCreation") {
|
||||
Ok(TaskType::IndexCreation)
|
||||
} else if type_.eq_ignore_ascii_case("indexUpdate") {
|
||||
Ok(TaskType::IndexUpdate)
|
||||
} else if type_.eq_ignore_ascii_case("indexDeletion") {
|
||||
Ok(TaskType::IndexDeletion)
|
||||
} else if type_.eq_ignore_ascii_case("documentAdditionOrUpdate") {
|
||||
Ok(TaskType::DocumentAdditionOrUpdate)
|
||||
} else if type_.eq_ignore_ascii_case("documentDeletion") {
|
||||
Ok(TaskType::DocumentDeletion)
|
||||
} else if type_.eq_ignore_ascii_case("settingsUpdate") {
|
||||
Ok(TaskType::SettingsUpdate)
|
||||
} else if type_.eq_ignore_ascii_case("dumpCreation") {
|
||||
Ok(TaskType::DumpCreation)
|
||||
} else {
|
||||
Err(TaskTypeError {
|
||||
invalid_type: type_.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum TaskStatus {
|
||||
enum TaskStatus {
|
||||
Enqueued,
|
||||
Processing,
|
||||
Succeeded,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TaskStatusError {
|
||||
invalid_status: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for TaskStatusError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"invalid task status `{}`, expecting one of: \
|
||||
enqueued, processing, succeeded, or failed",
|
||||
self.invalid_status,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for TaskStatusError {}
|
||||
|
||||
impl FromStr for TaskStatus {
|
||||
type Err = TaskStatusError;
|
||||
|
||||
fn from_str(status: &str) -> Result<Self, TaskStatusError> {
|
||||
if status.eq_ignore_ascii_case("enqueued") {
|
||||
Ok(TaskStatus::Enqueued)
|
||||
} else if status.eq_ignore_ascii_case("processing") {
|
||||
Ok(TaskStatus::Processing)
|
||||
} else if status.eq_ignore_ascii_case("succeeded") {
|
||||
Ok(TaskStatus::Succeeded)
|
||||
} else if status.eq_ignore_ascii_case("failed") {
|
||||
Ok(TaskStatus::Failed)
|
||||
} else {
|
||||
Err(TaskStatusError {
|
||||
invalid_status: status.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(untagged)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
@@ -152,8 +80,6 @@ enum TaskDetails {
|
||||
},
|
||||
#[serde(rename_all = "camelCase")]
|
||||
ClearAll { deleted_documents: Option<u64> },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
Dump { dump_uid: String },
|
||||
}
|
||||
|
||||
/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for
|
||||
@@ -210,8 +136,8 @@ fn serialize_duration<S: Serializer>(
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TaskView {
|
||||
pub uid: TaskId,
|
||||
index_uid: Option<String>,
|
||||
uid: TaskId,
|
||||
index_uid: String,
|
||||
status: TaskStatus,
|
||||
#[serde(rename = "type")]
|
||||
task_type: TaskType,
|
||||
@@ -227,48 +153,52 @@ pub struct TaskView {
|
||||
started_at: Option<OffsetDateTime>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||
finished_at: Option<OffsetDateTime>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
batch_uid: Option<Option<BatchId>>,
|
||||
}
|
||||
|
||||
impl From<Task> for TaskView {
|
||||
fn from(task: Task) -> Self {
|
||||
let index_uid = task.index_uid().map(String::from);
|
||||
let Task {
|
||||
id,
|
||||
index_uid,
|
||||
content,
|
||||
events,
|
||||
} = task;
|
||||
|
||||
let (task_type, mut details) = match content {
|
||||
TaskContent::DocumentAddition {
|
||||
documents_count, ..
|
||||
merge_strategy,
|
||||
documents_count,
|
||||
..
|
||||
} => {
|
||||
let details = TaskDetails::DocumentAddition {
|
||||
received_documents: documents_count,
|
||||
indexed_documents: None,
|
||||
};
|
||||
|
||||
(TaskType::DocumentAdditionOrUpdate, Some(details))
|
||||
let task_type = match merge_strategy {
|
||||
IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentPartial,
|
||||
IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentAddition,
|
||||
_ => unreachable!("Unexpected document merge strategy."),
|
||||
};
|
||||
|
||||
(task_type, Some(details))
|
||||
}
|
||||
TaskContent::DocumentDeletion {
|
||||
deletion: DocumentDeletion::Ids(ids),
|
||||
..
|
||||
} => (
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => (
|
||||
TaskType::DocumentDeletion,
|
||||
Some(TaskDetails::DocumentDeletion {
|
||||
received_document_ids: ids.len(),
|
||||
deleted_documents: None,
|
||||
}),
|
||||
),
|
||||
TaskContent::DocumentDeletion {
|
||||
deletion: DocumentDeletion::Clear,
|
||||
..
|
||||
} => (
|
||||
TaskType::DocumentDeletion,
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => (
|
||||
TaskType::ClearAll,
|
||||
Some(TaskDetails::ClearAll {
|
||||
deleted_documents: None,
|
||||
}),
|
||||
),
|
||||
TaskContent::IndexDeletion { .. } => (
|
||||
TaskContent::IndexDeletion => (
|
||||
TaskType::IndexDeletion,
|
||||
Some(TaskDetails::ClearAll {
|
||||
deleted_documents: None,
|
||||
@@ -278,18 +208,14 @@ impl From<Task> for TaskView {
|
||||
TaskType::SettingsUpdate,
|
||||
Some(TaskDetails::Settings { settings }),
|
||||
),
|
||||
TaskContent::IndexCreation { primary_key, .. } => (
|
||||
TaskContent::IndexCreation { primary_key } => (
|
||||
TaskType::IndexCreation,
|
||||
Some(TaskDetails::IndexInfo { primary_key }),
|
||||
),
|
||||
TaskContent::IndexUpdate { primary_key, .. } => (
|
||||
TaskContent::IndexUpdate { primary_key } => (
|
||||
TaskType::IndexUpdate,
|
||||
Some(TaskDetails::IndexInfo { primary_key }),
|
||||
),
|
||||
TaskContent::Dump { uid } => (
|
||||
TaskType::DumpCreation,
|
||||
Some(TaskDetails::Dump { dump_uid: uid }),
|
||||
),
|
||||
};
|
||||
|
||||
// An event always has at least one event: "Created"
|
||||
@@ -297,7 +223,7 @@ impl From<Task> for TaskView {
|
||||
TaskEvent::Created(_) => (TaskStatus::Enqueued, None, None),
|
||||
TaskEvent::Batched { .. } => (TaskStatus::Enqueued, None, None),
|
||||
TaskEvent::Processing(_) => (TaskStatus::Processing, None, None),
|
||||
TaskEvent::Succeeded { timestamp, result } => {
|
||||
TaskEvent::Succeded { timestamp, result } => {
|
||||
match (result, &mut details) {
|
||||
(
|
||||
TaskResult::DocumentAddition {
|
||||
@@ -375,9 +301,19 @@ impl From<Task> for TaskView {
|
||||
|
||||
let duration = finished_at.zip(started_at).map(|(tf, ts)| (tf - ts));
|
||||
|
||||
let batch_uid = if AUTOBATCHING_ENABLED.load(std::sync::atomic::Ordering::Relaxed) {
|
||||
let id = events.iter().find_map(|e| match e {
|
||||
TaskEvent::Batched { batch_id, .. } => Some(*batch_id),
|
||||
_ => None,
|
||||
});
|
||||
Some(id)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Self {
|
||||
uid: id,
|
||||
index_uid,
|
||||
index_uid: index_uid.into_inner(),
|
||||
status,
|
||||
task_type,
|
||||
details,
|
||||
@@ -386,23 +322,27 @@ impl From<Task> for TaskView {
|
||||
enqueued_at,
|
||||
started_at,
|
||||
finished_at,
|
||||
batch_uid,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct TaskListView {
|
||||
pub results: Vec<TaskView>,
|
||||
pub limit: usize,
|
||||
pub from: Option<TaskId>,
|
||||
pub next: Option<TaskId>,
|
||||
results: Vec<TaskView>,
|
||||
}
|
||||
|
||||
impl From<Vec<TaskView>> for TaskListView {
|
||||
fn from(results: Vec<TaskView>) -> Self {
|
||||
Self { results }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SummarizedTaskView {
|
||||
task_uid: TaskId,
|
||||
index_uid: Option<String>,
|
||||
uid: TaskId,
|
||||
index_uid: String,
|
||||
status: TaskStatus,
|
||||
#[serde(rename = "type")]
|
||||
task_type: TaskType,
|
||||
@@ -424,8 +364,8 @@ impl From<Task> for SummarizedTaskView {
|
||||
};
|
||||
|
||||
Self {
|
||||
task_uid: other.id,
|
||||
index_uid: other.index_uid().map(String::from),
|
||||
uid: other.id,
|
||||
index_uid: other.index_uid.to_string(),
|
||||
status: TaskStatus::Enqueued,
|
||||
task_type: other.content.into(),
|
||||
enqueued_at,
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@@ -8,60 +8,46 @@ use time::{Duration, OffsetDateTime};
|
||||
|
||||
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
||||
Lazy::new(|| {
|
||||
let mut authorizations = hashmap! {
|
||||
hashmap! {
|
||||
("POST", "/indexes/products/search") => hashset!{"search", "*"},
|
||||
("GET", "/indexes/products/search") => hashset!{"search", "*"},
|
||||
("POST", "/indexes/products/documents") => hashset!{"documents.add", "documents.*", "*"},
|
||||
("GET", "/indexes/products/documents") => hashset!{"documents.get", "documents.*", "*"},
|
||||
("GET", "/indexes/products/documents/0") => hashset!{"documents.get", "documents.*", "*"},
|
||||
("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "documents.*", "*"},
|
||||
("GET", "/tasks") => hashset!{"tasks.get", "tasks.*", "*"},
|
||||
("GET", "/tasks?indexUid=products") => hashset!{"tasks.get", "tasks.*", "*"},
|
||||
("GET", "/tasks/0") => hashset!{"tasks.get", "tasks.*", "*"},
|
||||
("PATCH", "/indexes/products/") => hashset!{"indexes.update", "indexes.*", "*"},
|
||||
("GET", "/indexes/products/") => hashset!{"indexes.get", "indexes.*", "*"},
|
||||
("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "indexes.*", "*"},
|
||||
("POST", "/indexes") => hashset!{"indexes.create", "indexes.*", "*"},
|
||||
("GET", "/indexes") => hashset!{"indexes.get", "indexes.*", "*"},
|
||||
("GET", "/indexes/products/settings") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/ranking-rules") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/stop-words") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/synonyms") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("DELETE", "/indexes/products/settings") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PATCH", "/indexes/products/settings") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PATCH", "/indexes/products/settings/typo-tolerance") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("GET", "/indexes/products/stats") => hashset!{"stats.get", "stats.*", "*"},
|
||||
("GET", "/stats") => hashset!{"stats.get", "stats.*", "*"},
|
||||
("POST", "/dumps") => hashset!{"dumps.create", "dumps.*", "*"},
|
||||
("POST", "/indexes/products/documents") => hashset!{"documents.add", "*"},
|
||||
("GET", "/indexes/products/documents") => hashset!{"documents.get", "*"},
|
||||
("GET", "/indexes/products/documents/0") => hashset!{"documents.get", "*"},
|
||||
("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "*"},
|
||||
("GET", "/tasks") => hashset!{"tasks.get", "*"},
|
||||
("GET", "/indexes/products/tasks") => hashset!{"tasks.get", "*"},
|
||||
("GET", "/indexes/products/tasks/0") => hashset!{"tasks.get", "*"},
|
||||
("PUT", "/indexes/products/") => hashset!{"indexes.update", "*"},
|
||||
("GET", "/indexes/products/") => hashset!{"indexes.get", "*"},
|
||||
("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "*"},
|
||||
("POST", "/indexes") => hashset!{"indexes.create", "*"},
|
||||
("GET", "/indexes") => hashset!{"indexes.get", "*"},
|
||||
("GET", "/indexes/products/settings") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/ranking-rules") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/stop-words") => hashset!{"settings.get", "*"},
|
||||
("GET", "/indexes/products/settings/synonyms") => hashset!{"settings.get", "*"},
|
||||
("DELETE", "/indexes/products/settings") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "*"},
|
||||
("POST", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "*"},
|
||||
("GET", "/indexes/products/stats") => hashset!{"stats.get", "*"},
|
||||
("GET", "/stats") => hashset!{"stats.get", "*"},
|
||||
("POST", "/dumps") => hashset!{"dumps.create", "*"},
|
||||
("GET", "/dumps/0/status") => hashset!{"dumps.get", "*"},
|
||||
("GET", "/version") => hashset!{"version", "*"},
|
||||
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
|
||||
("GET", "/keys/mykey/") => hashset!{"keys.get", "*"},
|
||||
("DELETE", "/keys/mykey/") => hashset!{"keys.delete", "*"},
|
||||
("POST", "/keys") => hashset!{"keys.create", "*"},
|
||||
("GET", "/keys") => hashset!{"keys.get", "*"},
|
||||
};
|
||||
|
||||
if cfg!(feature = "metrics") {
|
||||
authorizations.insert(
|
||||
("GET", "/metrics"),
|
||||
hashset! {"metrics.get", "metrics.*", "*"},
|
||||
);
|
||||
}
|
||||
|
||||
authorizations
|
||||
});
|
||||
|
||||
pub static ALL_ACTIONS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
|
||||
@@ -95,11 +81,11 @@ async fn error_access_expired_key() {
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
server.use_api_key(&key);
|
||||
|
||||
// wait until the key is expired.
|
||||
thread::sleep(time::Duration::new(1, 0));
|
||||
@@ -107,14 +93,8 @@ async fn error_access_expired_key() {
|
||||
for (method, route) in AUTHORIZATIONS.keys() {
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_eq!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?}",
|
||||
method,
|
||||
route
|
||||
);
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,11 +111,11 @@ async fn error_access_unauthorized_index() {
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
server.use_api_key(&key);
|
||||
|
||||
for (method, route) in AUTHORIZATIONS
|
||||
.keys()
|
||||
@@ -144,14 +124,8 @@ async fn error_access_unauthorized_index() {
|
||||
{
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_eq!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?}",
|
||||
method,
|
||||
route
|
||||
);
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -159,54 +133,36 @@ async fn error_access_unauthorized_index() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_unauthorized_action() {
|
||||
let mut server = Server::new_auth().await;
|
||||
|
||||
for ((method, route), action) in AUTHORIZATIONS.iter() {
|
||||
// create a new API key letting only the needed action.
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ALL_ACTIONS.difference(action).collect::<Vec<_>>(),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_eq!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?}",
|
||||
method,
|
||||
route
|
||||
);
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_master_key() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// master key must have access to all routes.
|
||||
for ((method, route), _) in AUTHORIZATIONS.iter() {
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
for ((method, route), action) in AUTHORIZATIONS.iter() {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// Patch API key letting all rights but the needed one.
|
||||
let content = json!({
|
||||
"actions": ALL_ACTIONS.difference(action).collect::<Vec<_>>(),
|
||||
});
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?}",
|
||||
method,
|
||||
route
|
||||
);
|
||||
assert_ne!(code, 403);
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -214,34 +170,36 @@ async fn access_authorized_master_key() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
for action in actions {
|
||||
// create a new API key letting only the needed action.
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// Patch API key letting only the needed action.
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [action],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?} with action: {:?}",
|
||||
method,
|
||||
route,
|
||||
action
|
||||
);
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
}
|
||||
}
|
||||
@@ -251,35 +209,36 @@ async fn access_authorized_restricted_index() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": [],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
for action in actions {
|
||||
// create a new API key letting only the needed action.
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// Patch API key letting only the needed action.
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": [action],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?} with action: {:?}",
|
||||
method,
|
||||
route,
|
||||
action
|
||||
);
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
}
|
||||
}
|
||||
@@ -289,16 +248,16 @@ async fn access_authorized_no_index_restriction() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_stats_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
@@ -308,15 +267,15 @@ async fn access_authorized_stats_restricted_index() {
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.stats().await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(code, 200);
|
||||
|
||||
// key should have access on `products` index.
|
||||
assert!(response["indexes"].get("products").is_some());
|
||||
@@ -329,16 +288,16 @@ async fn access_authorized_stats_restricted_index() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_stats_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
@@ -348,15 +307,15 @@ async fn access_authorized_stats_no_index_restriction() {
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.stats().await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(code, 200);
|
||||
|
||||
// key should have access on `products` index.
|
||||
assert!(response["indexes"].get("products").is_some());
|
||||
@@ -369,16 +328,16 @@ async fn access_authorized_stats_no_index_restriction() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn list_authorized_indexes_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
@@ -388,17 +347,17 @@ async fn list_authorized_indexes_restricted_index() {
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
let (response, code) = server.list_indexes().await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let response = response["results"].as_array().unwrap();
|
||||
let response = response.as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
assert!(response.iter().any(|index| index["uid"] == "products"));
|
||||
|
||||
@@ -410,16 +369,16 @@ async fn list_authorized_indexes_restricted_index() {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn list_authorized_indexes_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
@@ -429,17 +388,17 @@ async fn list_authorized_indexes_no_index_restriction() {
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
let (response, code) = server.list_indexes().await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let response = response["results"].as_array().unwrap();
|
||||
let response = response.as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
assert!(response.iter().any(|index| index["uid"] == "products"));
|
||||
|
||||
@@ -450,16 +409,16 @@ async fn list_authorized_indexes_no_index_restriction() {
|
||||
#[actix_rt::test]
|
||||
async fn list_authorized_tasks_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
@@ -469,15 +428,15 @@ async fn list_authorized_tasks_restricted_index() {
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.service.get("/tasks").await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(code, 200);
|
||||
println!("{}", response);
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
@@ -490,16 +449,16 @@ async fn list_authorized_tasks_restricted_index() {
|
||||
#[actix_rt::test]
|
||||
async fn list_authorized_tasks_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
@@ -509,15 +468,15 @@ async fn list_authorized_tasks_no_index_restriction() {
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.service.get("/tasks").await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
@@ -540,12 +499,12 @@ async fn error_creating_index_without_action() {
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
server.use_api_key(&key);
|
||||
|
||||
let expected_error = json!({
|
||||
"message": "Index `test` not found.",
|
||||
@@ -564,8 +523,8 @@ async fn error_creating_index_without_action() {
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
assert_eq!(code, 202, "{:?}", response);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
@@ -575,8 +534,8 @@ async fn error_creating_index_without_action() {
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
|
||||
@@ -585,8 +544,8 @@ async fn error_creating_index_without_action() {
|
||||
|
||||
// try to create a index via add specialized settings route
|
||||
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
|
||||
@@ -597,102 +556,22 @@ async fn error_creating_index_without_action() {
|
||||
#[actix_rt::test]
|
||||
async fn lazy_create_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
let contents = vec![
|
||||
json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["indexes.*", "documents.*", "settings.*", "tasks.*"],
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["indexes.create", "documents.add", "settings.update", "tasks.get"],
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
}),
|
||||
];
|
||||
|
||||
for content in contents {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
|
||||
// try to create a index via add documents route
|
||||
let index = server.index("test");
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"content": "foo",
|
||||
}
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// try to create a index via add settings route
|
||||
let index = server.index("test1");
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// try to create a index via add specialized settings route
|
||||
let index = server.index("test2");
|
||||
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_creating_index_without_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
"indexes": ["unexpected"],
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
server.use_api_key(&key);
|
||||
|
||||
// try to create a index via add documents route
|
||||
let index = server.index("test");
|
||||
@@ -704,21 +583,38 @@ async fn error_creating_index_without_index() {
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
assert_eq!(code, 202, "{:?}", response);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// try to create a index via add settings route
|
||||
let index = server.index("test1");
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// try to create a index via add specialized settings route
|
||||
let index = server.index("test2");
|
||||
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
|
||||
// try to create a index via create index route
|
||||
let index = server.index("test3");
|
||||
let (response, code) = index.create(None).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
|
||||
@@ -13,15 +13,6 @@ impl Server {
|
||||
self.service.api_key = Some(api_key.as_ref().to_string());
|
||||
}
|
||||
|
||||
/// Fetch and use the default admin key for nexts http requests.
|
||||
pub async fn use_admin_key(&mut self, master_key: impl AsRef<str>) {
|
||||
self.use_api_key(master_key);
|
||||
let (response, code) = self.list_api_keys().await;
|
||||
assert_eq!(200, code, "{:?}", response);
|
||||
let admin_key = &response["results"][1]["key"];
|
||||
self.use_api_key(admin_key.as_str().unwrap());
|
||||
}
|
||||
|
||||
pub async fn add_api_key(&self, content: Value) -> (Value, StatusCode) {
|
||||
let url = "/keys";
|
||||
self.service.post(url, content).await
|
||||
|
||||
@@ -19,7 +19,7 @@ async fn error_api_key_bad_content_types() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -91,7 +91,7 @@ async fn error_api_key_empty_content_types() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -163,7 +163,7 @@ async fn error_api_key_missing_content_types() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -227,7 +227,7 @@ async fn error_api_key_empty_payload() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -283,7 +283,7 @@ async fn error_api_key_malformed_payload() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
@@ -8,15 +8,11 @@ use time::{Duration, OffsetDateTime};
|
||||
|
||||
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
||||
|
||||
fn generate_tenant_token(
|
||||
parent_uid: impl AsRef<str>,
|
||||
parent_key: impl AsRef<str>,
|
||||
mut body: HashMap<&str, Value>,
|
||||
) -> String {
|
||||
fn generate_tenant_token(parent_key: impl AsRef<str>, mut body: HashMap<&str, Value>) -> String {
|
||||
use jsonwebtoken::{encode, EncodingKey, Header};
|
||||
|
||||
let parent_uid = parent_uid.as_ref();
|
||||
body.insert("apiKeyUid", json!(parent_uid));
|
||||
let key_id = &parent_key.as_ref()[..8];
|
||||
body.insert("apiKeyPrefix", json!(key_id));
|
||||
encode(
|
||||
&Header::default(),
|
||||
&body,
|
||||
@@ -115,10 +111,10 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||
]
|
||||
});
|
||||
|
||||
macro_rules! compute_authorized_search {
|
||||
macro_rules! compute_autorized_search {
|
||||
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
@@ -134,10 +130,9 @@ macro_rules! compute_authorized_search {
|
||||
let (response, code) = server.add_api_key(key_content.clone()).await;
|
||||
assert_eq!(code, 201);
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
for tenant_token in $tenant_tokens.iter() {
|
||||
let web_token = generate_tenant_token(&uid, &key, tenant_token.clone());
|
||||
let web_token = generate_tenant_token(&key, tenant_token.clone());
|
||||
server.use_api_key(&web_token);
|
||||
let index = server.index("sales");
|
||||
index
|
||||
@@ -165,7 +160,7 @@ macro_rules! compute_authorized_search {
|
||||
macro_rules! compute_forbidden_search {
|
||||
($tenant_tokens:expr, $parent_keys:expr) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
@@ -177,10 +172,9 @@ macro_rules! compute_forbidden_search {
|
||||
let (response, code) = server.add_api_key(key_content.clone()).await;
|
||||
assert_eq!(code, 201, "{:?}", response);
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
for tenant_token in $tenant_tokens.iter() {
|
||||
let web_token = generate_tenant_token(&uid, &key, tenant_token.clone());
|
||||
let web_token = generate_tenant_token(&key, tenant_token.clone());
|
||||
server.use_api_key(&web_token);
|
||||
let index = server.index("sales");
|
||||
index
|
||||
@@ -251,7 +245,7 @@ async fn search_authorized_simple_token() {
|
||||
},
|
||||
];
|
||||
|
||||
compute_authorized_search!(tenant_tokens, {}, 5);
|
||||
compute_autorized_search!(tenant_tokens, {}, 5);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -305,7 +299,7 @@ async fn search_authorized_filter_token() {
|
||||
},
|
||||
];
|
||||
|
||||
compute_authorized_search!(tenant_tokens, {}, 3);
|
||||
compute_autorized_search!(tenant_tokens, {}, 3);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -359,7 +353,7 @@ async fn filter_search_authorized_filter_token() {
|
||||
},
|
||||
];
|
||||
|
||||
compute_authorized_search!(tenant_tokens, "color = yellow", 1);
|
||||
compute_autorized_search!(tenant_tokens, "color = yellow", 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -467,13 +461,12 @@ async fn error_access_forbidden_routes() {
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(uid, key, tenant_token);
|
||||
let web_token = generate_tenant_token(&key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
@@ -503,13 +496,12 @@ async fn error_access_expired_parent_key() {
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(uid, key, tenant_token);
|
||||
let web_token = generate_tenant_token(&key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
// test search request while parent_key is not expired
|
||||
@@ -546,13 +538,12 @@ async fn error_access_modified_token() {
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(uid, key, tenant_token);
|
||||
let web_token = generate_tenant_token(&key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
// test search request while web_token is valid
|
||||
@@ -567,7 +558,7 @@ async fn error_access_modified_token() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
|
||||
let alt = generate_tenant_token(uid, key, tenant_token);
|
||||
let alt = generate_tenant_token(&key, tenant_token);
|
||||
let altered_token = [
|
||||
web_token.split('.').next().unwrap(),
|
||||
alt.split('.').nth(1).unwrap(),
|
||||
|
||||
@@ -1,16 +1,32 @@
|
||||
use std::{
|
||||
fmt::Write,
|
||||
panic::{catch_unwind, resume_unwind, UnwindSafe},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use actix_web::http::StatusCode;
|
||||
use paste::paste;
|
||||
use serde_json::{json, Value};
|
||||
use tokio::time::sleep;
|
||||
use urlencoding::encode;
|
||||
|
||||
use super::service::Service;
|
||||
|
||||
macro_rules! make_settings_test_routes {
|
||||
($($name:ident),+) => {
|
||||
$(paste! {
|
||||
pub async fn [<update_$name>](&self, value: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-"));
|
||||
self.service.post(url, value).await
|
||||
}
|
||||
|
||||
pub async fn [<get_$name>](&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-"));
|
||||
self.service.get(url).await
|
||||
}
|
||||
})*
|
||||
};
|
||||
}
|
||||
|
||||
pub struct Index<'a> {
|
||||
pub uid: String,
|
||||
pub service: &'a Service,
|
||||
@@ -30,7 +46,7 @@ impl Index<'_> {
|
||||
.post_str(url, include_str!("../assets/test_set.json"))
|
||||
.await;
|
||||
assert_eq!(code, 202);
|
||||
let update_id = response["taskUid"].as_i64().unwrap();
|
||||
let update_id = response["uid"].as_i64().unwrap();
|
||||
self.wait_task(update_id as u64).await;
|
||||
update_id as u64
|
||||
}
|
||||
@@ -49,7 +65,7 @@ impl Index<'_> {
|
||||
});
|
||||
let url = format!("/indexes/{}", encode(self.uid.as_ref()));
|
||||
|
||||
self.service.patch(url, body).await
|
||||
self.service.put(url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete(&self) -> (Value, StatusCode) {
|
||||
@@ -90,67 +106,55 @@ impl Index<'_> {
|
||||
}
|
||||
|
||||
pub async fn wait_task(&self, update_id: u64) -> Value {
|
||||
// try several times to get status, or panic to not wait forever
|
||||
// try 10 times to get status, or panic to not wait forever
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
for _ in 0..100 {
|
||||
for _ in 0..10 {
|
||||
let (response, status_code) = self.service.get(&url).await;
|
||||
assert_eq!(200, status_code, "response: {}", response);
|
||||
assert_eq!(status_code, 200, "response: {}", response);
|
||||
|
||||
if response["status"] == "succeeded" || response["status"] == "failed" {
|
||||
return response;
|
||||
}
|
||||
|
||||
// wait 0.5 second.
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
sleep(Duration::from_secs(1)).await;
|
||||
}
|
||||
panic!("Timeout waiting for update id");
|
||||
}
|
||||
|
||||
pub async fn get_task(&self, update_id: u64) -> (Value, StatusCode) {
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
let url = format!("/indexes/{}/tasks/{}", self.uid, update_id);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn list_tasks(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/tasks?indexUid={}", self.uid);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn filtered_tasks(&self, type_: &[&str], status: &[&str]) -> (Value, StatusCode) {
|
||||
let mut url = format!("/tasks?indexUid={}", self.uid);
|
||||
if !type_.is_empty() {
|
||||
let _ = write!(url, "&type={}", type_.join(","));
|
||||
}
|
||||
if !status.is_empty() {
|
||||
let _ = write!(url, "&status={}", status.join(","));
|
||||
}
|
||||
let url = format!("/indexes/{}/tasks", self.uid);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_document(
|
||||
&self,
|
||||
id: u64,
|
||||
options: Option<GetDocumentOptions>,
|
||||
_options: Option<GetDocumentOptions>,
|
||||
) -> (Value, StatusCode) {
|
||||
let mut url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id);
|
||||
if let Some(fields) = options.and_then(|o| o.fields) {
|
||||
let _ = write!(url, "?fields={}", fields.join(","));
|
||||
}
|
||||
let url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) {
|
||||
let mut url = format!("/indexes/{}/documents?", encode(self.uid.as_ref()));
|
||||
if let Some(limit) = options.limit {
|
||||
let _ = write!(url, "limit={}&", limit);
|
||||
url.push_str(&format!("limit={}&", limit));
|
||||
}
|
||||
|
||||
if let Some(offset) = options.offset {
|
||||
let _ = write!(url, "offset={}&", offset);
|
||||
url.push_str(&format!("offset={}&", offset));
|
||||
}
|
||||
|
||||
if let Some(attributes_to_retrieve) = options.attributes_to_retrieve {
|
||||
let _ = write!(url, "fields={}&", attributes_to_retrieve.join(","));
|
||||
url.push_str(&format!(
|
||||
"attributesToRetrieve={}&",
|
||||
attributes_to_retrieve.join(",")
|
||||
));
|
||||
}
|
||||
|
||||
self.service.get(url).await
|
||||
@@ -183,7 +187,7 @@ impl Index<'_> {
|
||||
|
||||
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", encode(self.uid.as_ref()));
|
||||
self.service.patch(url, settings).await
|
||||
self.service.post(url, settings).await
|
||||
}
|
||||
|
||||
pub async fn delete_settings(&self) -> (Value, StatusCode) {
|
||||
@@ -222,33 +226,15 @@ impl Index<'_> {
|
||||
}
|
||||
|
||||
pub async fn search_get(&self, query: Value) -> (Value, StatusCode) {
|
||||
let params = yaup::to_string(&query).unwrap();
|
||||
let params = serde_url_params::to_string(&query).unwrap();
|
||||
let url = format!("/indexes/{}/search?{}", encode(self.uid.as_ref()), params);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn update_distinct_attribute(&self, value: Value) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/settings/{}",
|
||||
encode(self.uid.as_ref()),
|
||||
"distinct-attribute"
|
||||
);
|
||||
self.service.put(url, value).await
|
||||
}
|
||||
|
||||
pub async fn get_distinct_attribute(&self) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/settings/{}",
|
||||
encode(self.uid.as_ref()),
|
||||
"distinct-attribute"
|
||||
);
|
||||
self.service.get(url).await
|
||||
}
|
||||
make_settings_test_routes!(distinct_attribute);
|
||||
}
|
||||
|
||||
pub struct GetDocumentOptions {
|
||||
pub fields: Option<Vec<&'static str>>,
|
||||
}
|
||||
pub struct GetDocumentOptions;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct GetAllDocumentsOptions {
|
||||
|
||||
@@ -3,7 +3,7 @@ pub mod server;
|
||||
pub mod service;
|
||||
|
||||
pub use index::{GetAllDocumentsOptions, GetDocumentOptions};
|
||||
pub use server::{default_settings, Server};
|
||||
pub use server::Server;
|
||||
|
||||
/// Performs a search test on both post and get routes
|
||||
#[macro_export]
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use clap::Parser;
|
||||
use std::path::Path;
|
||||
|
||||
use actix_web::http::StatusCode;
|
||||
@@ -52,13 +50,16 @@ impl Server {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
|
||||
pub async fn new_auth() -> Self {
|
||||
let dir = TempDir::new().unwrap();
|
||||
|
||||
if cfg!(windows) {
|
||||
std::env::set_var("TMP", TEST_TEMP_DIR.path());
|
||||
} else {
|
||||
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
|
||||
}
|
||||
|
||||
let mut options = default_settings(dir.path());
|
||||
options.master_key = Some("MASTER_KEY".to_string());
|
||||
|
||||
let meilisearch = setup_meilisearch(&options).unwrap();
|
||||
@@ -76,15 +77,9 @@ impl Server {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn new_auth() -> Self {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let options = default_settings(dir.path());
|
||||
Self::new_auth_with_options(options, dir).await
|
||||
}
|
||||
|
||||
pub async fn new_with_options(options: Opt) -> Result<Self, anyhow::Error> {
|
||||
let meilisearch = setup_meilisearch(&options)?;
|
||||
let auth = AuthController::new(&options.db_path, &options.master_key)?;
|
||||
pub async fn new_with_options(options: Opt) -> Self {
|
||||
let meilisearch = setup_meilisearch(&options).unwrap();
|
||||
let auth = AuthController::new(&options.db_path, &options.master_key).unwrap();
|
||||
let service = Service {
|
||||
meilisearch,
|
||||
auth,
|
||||
@@ -92,10 +87,10 @@ impl Server {
|
||||
api_key: None,
|
||||
};
|
||||
|
||||
Ok(Server {
|
||||
Server {
|
||||
service,
|
||||
_dir: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a view to an index. There is no guarantee that the index exists.
|
||||
@@ -106,27 +101,8 @@ impl Server {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_indexes(
|
||||
&self,
|
||||
offset: Option<usize>,
|
||||
limit: Option<usize>,
|
||||
) -> (Value, StatusCode) {
|
||||
let (offset, limit) = (
|
||||
offset.map(|offset| format!("offset={offset}")),
|
||||
limit.map(|limit| format!("limit={limit}")),
|
||||
);
|
||||
let query_parameter = offset
|
||||
.as_ref()
|
||||
.zip(limit.as_ref())
|
||||
.map(|(offset, limit)| format!("{offset}&{limit}"))
|
||||
.or_else(|| offset.xor(limit));
|
||||
if let Some(query_parameter) = query_parameter {
|
||||
self.service
|
||||
.get(format!("/indexes?{query_parameter}"))
|
||||
.await
|
||||
} else {
|
||||
self.service.get("/indexes").await
|
||||
}
|
||||
pub async fn list_indexes(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/indexes").await
|
||||
}
|
||||
|
||||
pub async fn version(&self) -> (Value, StatusCode) {
|
||||
@@ -150,20 +126,36 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
Opt {
|
||||
db_path: dir.as_ref().join("db"),
|
||||
dumps_dir: dir.as_ref().join("dump"),
|
||||
http_addr: "127.0.0.1:7700".to_owned(),
|
||||
master_key: None,
|
||||
env: "development".to_owned(),
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
no_analytics: true,
|
||||
max_index_size: Byte::from_unit(100.0, ByteUnit::MiB).unwrap(),
|
||||
max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(),
|
||||
max_index_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
||||
max_task_db_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
||||
http_payload_size_limit: Byte::from_unit(10.0, ByteUnit::MiB).unwrap(),
|
||||
ssl_cert_path: None,
|
||||
ssl_key_path: None,
|
||||
ssl_auth_path: None,
|
||||
ssl_ocsp_path: None,
|
||||
ssl_require_auth: false,
|
||||
ssl_resumption: false,
|
||||
ssl_tickets: false,
|
||||
import_snapshot: None,
|
||||
ignore_missing_snapshot: false,
|
||||
ignore_snapshot_if_db_exists: false,
|
||||
snapshot_dir: ".".into(),
|
||||
schedule_snapshot: false,
|
||||
snapshot_interval_sec: 0,
|
||||
import_dump: None,
|
||||
ignore_missing_dump: false,
|
||||
ignore_dump_if_db_exists: false,
|
||||
indexer_options: IndexerOpts {
|
||||
// memory has to be unlimited because several meilisearch are running in test context.
|
||||
max_indexing_memory: MaxMemory::unlimited(),
|
||||
..Parser::parse_from(None as Option<&str>)
|
||||
max_memory: MaxMemory::unlimited(),
|
||||
..Default::default()
|
||||
},
|
||||
#[cfg(feature = "metrics")]
|
||||
enable_metrics_route: true,
|
||||
..Parser::parse_from(None as Option<&str>)
|
||||
log_level: "off".into(),
|
||||
scheduler_options: meilisearch_lib::options::SchedulerConfig::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ impl Service {
|
||||
&self.meilisearch,
|
||||
&self.auth,
|
||||
true,
|
||||
self.options,
|
||||
&self.options,
|
||||
analytics::MockAnalytics::new(&self.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -46,7 +46,7 @@ impl Service {
|
||||
&self.meilisearch,
|
||||
&self.auth,
|
||||
true,
|
||||
self.options,
|
||||
&self.options,
|
||||
analytics::MockAnalytics::new(&self.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -72,7 +72,7 @@ impl Service {
|
||||
&self.meilisearch,
|
||||
&self.auth,
|
||||
true,
|
||||
self.options,
|
||||
&self.options,
|
||||
analytics::MockAnalytics::new(&self.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -95,7 +95,7 @@ impl Service {
|
||||
&self.meilisearch,
|
||||
&self.auth,
|
||||
true,
|
||||
self.options,
|
||||
&self.options,
|
||||
analytics::MockAnalytics::new(&self.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -118,7 +118,7 @@ impl Service {
|
||||
&self.meilisearch,
|
||||
&self.auth,
|
||||
true,
|
||||
self.options,
|
||||
&self.options,
|
||||
analytics::MockAnalytics::new(&self.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -141,7 +141,7 @@ impl Service {
|
||||
&self.meilisearch,
|
||||
&self.auth,
|
||||
true,
|
||||
self.options,
|
||||
&self.options,
|
||||
analytics::MockAnalytics::new(&self.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
@@ -7,45 +7,23 @@ use actix_web::test;
|
||||
use meilisearch_http::{analytics, create_app};
|
||||
use serde_json::{json, Value};
|
||||
|
||||
enum HttpVerb {
|
||||
Put,
|
||||
Patch,
|
||||
Post,
|
||||
Get,
|
||||
Delete,
|
||||
}
|
||||
|
||||
impl HttpVerb {
|
||||
fn test_request(&self) -> test::TestRequest {
|
||||
match self {
|
||||
HttpVerb::Put => test::TestRequest::put(),
|
||||
HttpVerb::Patch => test::TestRequest::patch(),
|
||||
HttpVerb::Post => test::TestRequest::post(),
|
||||
HttpVerb::Get => test::TestRequest::get(),
|
||||
HttpVerb::Delete => test::TestRequest::delete(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_json_bad_content_type() {
|
||||
use HttpVerb::{Patch, Post, Put};
|
||||
|
||||
let routes = [
|
||||
// all the routes except the dumps that can be created without any body or content-type
|
||||
// all the POST routes except the dumps that can be created without any body or content-type
|
||||
// and the search that is not a strict json
|
||||
(Post, "/indexes"),
|
||||
(Post, "/indexes/doggo/documents/delete-batch"),
|
||||
(Post, "/indexes/doggo/search"),
|
||||
(Patch, "/indexes/doggo/settings"),
|
||||
(Put, "/indexes/doggo/settings/displayed-attributes"),
|
||||
(Put, "/indexes/doggo/settings/distinct-attribute"),
|
||||
(Put, "/indexes/doggo/settings/filterable-attributes"),
|
||||
(Put, "/indexes/doggo/settings/ranking-rules"),
|
||||
(Put, "/indexes/doggo/settings/searchable-attributes"),
|
||||
(Put, "/indexes/doggo/settings/sortable-attributes"),
|
||||
(Put, "/indexes/doggo/settings/stop-words"),
|
||||
(Put, "/indexes/doggo/settings/synonyms"),
|
||||
"/indexes",
|
||||
"/indexes/doggo/documents/delete-batch",
|
||||
"/indexes/doggo/search",
|
||||
"/indexes/doggo/settings",
|
||||
"/indexes/doggo/settings/displayed-attributes",
|
||||
"/indexes/doggo/settings/distinct-attribute",
|
||||
"/indexes/doggo/settings/filterable-attributes",
|
||||
"/indexes/doggo/settings/ranking-rules",
|
||||
"/indexes/doggo/settings/searchable-attributes",
|
||||
"/indexes/doggo/settings/sortable-attributes",
|
||||
"/indexes/doggo/settings/stop-words",
|
||||
"/indexes/doggo/settings/synonyms",
|
||||
];
|
||||
let bad_content_types = [
|
||||
"application/csv",
|
||||
@@ -63,15 +41,14 @@ async fn error_json_bad_content_type() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
for (verb, route) in routes {
|
||||
for route in routes {
|
||||
// Good content-type, we probably have an error since we didn't send anything in the json
|
||||
// so we only ensure we didn't get a bad media type error.
|
||||
let req = verb
|
||||
.test_request()
|
||||
let req = test::TestRequest::post()
|
||||
.uri(route)
|
||||
.set_payload(document)
|
||||
.insert_header(("content-type", "application/json"))
|
||||
@@ -82,8 +59,7 @@ async fn error_json_bad_content_type() {
|
||||
"calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route);
|
||||
|
||||
// No content-type.
|
||||
let req = verb
|
||||
.test_request()
|
||||
let req = test::TestRequest::post()
|
||||
.uri(route)
|
||||
.set_payload(document)
|
||||
.to_request();
|
||||
@@ -106,8 +82,7 @@ async fn error_json_bad_content_type() {
|
||||
|
||||
for bad_content_type in bad_content_types {
|
||||
// Always bad content-type
|
||||
let req = verb
|
||||
.test_request()
|
||||
let req = test::TestRequest::post()
|
||||
.uri(route)
|
||||
.set_payload(document.to_string())
|
||||
.insert_header(("content-type", bad_content_type))
|
||||
@@ -146,7 +121,7 @@ async fn extract_actual_content_type() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use crate::common::{GetAllDocumentsOptions, Server};
|
||||
use actix_web::test;
|
||||
|
||||
use meilisearch_http::{analytics, create_app};
|
||||
use serde_json::{json, Value};
|
||||
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||
@@ -21,7 +20,7 @@ async fn add_documents_test_json_content_types() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -36,7 +35,7 @@ async fn add_documents_test_json_content_types() {
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 202);
|
||||
assert_eq!(response["taskUid"], 0);
|
||||
assert_eq!(response["uid"], 0);
|
||||
|
||||
// put
|
||||
let req = test::TestRequest::put()
|
||||
@@ -49,52 +48,7 @@ async fn add_documents_test_json_content_types() {
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 202);
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
}
|
||||
|
||||
/// Here we try to send a single document instead of an array with a single document inside.
|
||||
#[actix_rt::test]
|
||||
async fn add_single_document_test_json_content_types() {
|
||||
let document = json!({
|
||||
"id": 1,
|
||||
"content": "Bouvier Bernois",
|
||||
});
|
||||
|
||||
// this is a what is expected and should work
|
||||
let server = Server::new().await;
|
||||
let app = test::init_service(create_app!(
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
// post
|
||||
let req = test::TestRequest::post()
|
||||
.uri("/indexes/dog/documents")
|
||||
.set_payload(document.to_string())
|
||||
.insert_header(("content-type", "application/json"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 202);
|
||||
assert_eq!(response["taskUid"], 0);
|
||||
|
||||
// put
|
||||
let req = test::TestRequest::put()
|
||||
.uri("/indexes/dog/documents")
|
||||
.set_payload(document.to_string())
|
||||
.insert_header(("content-type", "application/json"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let status_code = res.status();
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 202);
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
assert_eq!(response["uid"], 1);
|
||||
}
|
||||
|
||||
/// any other content-type is must be refused
|
||||
@@ -112,7 +66,7 @@ async fn error_add_documents_test_bad_content_types() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -180,7 +134,7 @@ async fn error_add_documents_test_no_content_type() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -240,7 +194,7 @@ async fn error_add_malformed_csv_documents() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -258,7 +212,7 @@ async fn error_add_malformed_csv_documents() {
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The `csv` payload provided is malformed: `CSV error: record 1 (line: 2, byte: 12): found record with 3 fields, but the previous record has 2 fields`."#
|
||||
r#"The `csv` payload provided is malformed. `CSV error: record 1 (line: 2, byte: 12): found record with 3 fields, but the previous record has 2 fields`."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], json!("malformed_payload"));
|
||||
@@ -282,7 +236,7 @@ async fn error_add_malformed_csv_documents() {
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The `csv` payload provided is malformed: `CSV error: record 1 (line: 2, byte: 12): found record with 3 fields, but the previous record has 2 fields`."#
|
||||
r#"The `csv` payload provided is malformed. `CSV error: record 1 (line: 2, byte: 12): found record with 3 fields, but the previous record has 2 fields`."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], json!("malformed_payload"));
|
||||
@@ -302,7 +256,7 @@ async fn error_add_malformed_json_documents() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -353,56 +307,6 @@ async fn error_add_malformed_json_documents() {
|
||||
response["link"],
|
||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
||||
);
|
||||
|
||||
// truncate
|
||||
|
||||
// length = 100
|
||||
let long = "0123456789".repeat(10);
|
||||
|
||||
let document = format!("\"{}\"", long);
|
||||
let req = test::TestRequest::put()
|
||||
.uri("/indexes/dog/documents")
|
||||
.set_payload(document)
|
||||
.insert_header(("content-type", "application/json"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The `json` payload provided is malformed. `Couldn't serialize document value: data did not match any variant of untagged enum Either`."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], json!("malformed_payload"));
|
||||
assert_eq!(response["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
||||
);
|
||||
|
||||
// add one more char to the long string to test if the truncating works.
|
||||
let document = format!("\"{}m\"", long);
|
||||
let req = test::TestRequest::put()
|
||||
.uri("/indexes/dog/documents")
|
||||
.set_payload(document)
|
||||
.insert_header(("content-type", "application/json"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!("The `json` payload provided is malformed. `Couldn't serialize document value: data did not match any variant of untagged enum Either`.")
|
||||
);
|
||||
assert_eq!(response["code"], json!("malformed_payload"));
|
||||
assert_eq!(response["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -414,7 +318,7 @@ async fn error_add_malformed_ndjson_documents() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -432,7 +336,7 @@ async fn error_add_malformed_ndjson_documents() {
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The `ndjson` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 2 column 2`."#
|
||||
r#"The `ndjson` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 1 column 2`."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], json!("malformed_payload"));
|
||||
@@ -455,7 +359,9 @@ async fn error_add_malformed_ndjson_documents() {
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!("The `ndjson` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 2 column 2`.")
|
||||
json!(
|
||||
r#"The `ndjson` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 1 column 2`."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], json!("malformed_payload"));
|
||||
assert_eq!(response["type"], json!("invalid_request"));
|
||||
@@ -474,7 +380,7 @@ async fn error_add_missing_payload_csv_documents() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -526,7 +432,7 @@ async fn error_add_missing_payload_json_documents() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -578,7 +484,7 @@ async fn error_add_missing_payload_ndjson_documents() {
|
||||
&server.service.meilisearch,
|
||||
&server.service.auth,
|
||||
true,
|
||||
server.service.options,
|
||||
&server.service.options,
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
@@ -641,7 +547,7 @@ async fn add_documents_no_index_creation() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["taskUid"], 0);
|
||||
assert_eq!(response["uid"], 0);
|
||||
/*
|
||||
* currently we don’t check these field to stay ISO with meilisearch
|
||||
* assert_eq!(response["status"], "pending");
|
||||
@@ -657,7 +563,7 @@ async fn add_documents_no_index_creation() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["type"], "documentAddition");
|
||||
assert_eq!(response["details"]["receivedDocuments"], 1);
|
||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||
|
||||
@@ -667,7 +573,7 @@ async fn add_documents_no_index_creation() {
|
||||
OffsetDateTime::parse(response["enqueuedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||
assert!(processed_at > enqueued_at);
|
||||
|
||||
// index was created, and primary key was inferred.
|
||||
// index was created, and primary key was infered.
|
||||
let (response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["primaryKey"], "id");
|
||||
@@ -680,7 +586,7 @@ async fn error_document_add_create_index_bad_uid() {
|
||||
let (response, code) = index.add_documents(json!([{"id": 1}]), None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "invalid index uid `883 fj!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
|
||||
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
@@ -697,7 +603,7 @@ async fn error_document_update_create_index_bad_uid() {
|
||||
let (response, code) = index.update_documents(json!([{"id": 1}]), None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "invalid index uid `883 fj!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
|
||||
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
@@ -727,7 +633,7 @@ async fn document_addition_with_primary_key() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["type"], "documentAddition");
|
||||
assert_eq!(response["details"]["receivedDocuments"], 1);
|
||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||
|
||||
@@ -756,7 +662,7 @@ async fn document_update_with_primary_key() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["type"], "documentPartial");
|
||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||
assert_eq!(response["details"]["receivedDocuments"], 1);
|
||||
|
||||
@@ -860,7 +766,7 @@ async fn add_larger_dataset() {
|
||||
let (response, code) = index.get_task(update_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["type"], "documentAddition");
|
||||
assert_eq!(response["details"]["indexedDocuments"], 77);
|
||||
assert_eq!(response["details"]["receivedDocuments"], 77);
|
||||
let (response, code) = index
|
||||
@@ -869,8 +775,8 @@ async fn add_larger_dataset() {
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200, "failed with `{}`", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 77);
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 77);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -882,7 +788,7 @@ async fn update_larger_dataset() {
|
||||
index.wait_task(0).await;
|
||||
let (response, code) = index.get_task(0).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["type"], "documentPartial");
|
||||
assert_eq!(response["details"]["indexedDocuments"], 77);
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -891,7 +797,7 @@ async fn update_larger_dataset() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 77);
|
||||
assert_eq!(response.as_array().unwrap().len(), 77);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -910,12 +816,7 @@ async fn error_add_documents_bad_document_id() {
|
||||
let (response, code) = index.get_task(1).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], json!("failed"));
|
||||
assert_eq!(
|
||||
response["error"]["message"],
|
||||
json!(
|
||||
r#"Document identifier `"foo & bar"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["error"]["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."));
|
||||
assert_eq!(response["error"]["code"], json!("invalid_document_id"));
|
||||
assert_eq!(response["error"]["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
@@ -938,12 +839,7 @@ async fn error_update_documents_bad_document_id() {
|
||||
index.update_documents(documents, None).await;
|
||||
let response = index.wait_task(1).await;
|
||||
assert_eq!(response["status"], json!("failed"));
|
||||
assert_eq!(
|
||||
response["error"]["message"],
|
||||
json!(
|
||||
r#"Document identifier `"foo & bar"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["error"]["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."));
|
||||
assert_eq!(response["error"]["code"], json!("invalid_document_id"));
|
||||
assert_eq!(response["error"]["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
@@ -1043,7 +939,7 @@ async fn error_document_field_limit_reached() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_documents_invalid_geo_field() {
|
||||
async fn error_add_documents_invalid_geo_field() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(Some("id")).await;
|
||||
@@ -1063,6 +959,15 @@ async fn add_documents_invalid_geo_field() {
|
||||
let (response, code) = index.get_task(2).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "failed");
|
||||
|
||||
let expected_error = json!({
|
||||
"message": r#"The document with the id: `11` contains an invalid _geo field: `foobar`."#,
|
||||
"code": "invalid_geo_field",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_geo_field"
|
||||
});
|
||||
|
||||
assert_eq!(response["error"], expected_error);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -1141,62 +1046,3 @@ async fn add_documents_with_primary_key_twice() {
|
||||
let (response, _code) = index.get_task(1).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn batch_several_documents_addition() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let mut documents: Vec<_> = (0..150usize)
|
||||
.into_iter()
|
||||
.map(|id| {
|
||||
json!(
|
||||
{
|
||||
"id": id,
|
||||
"title": "foo",
|
||||
"desc": "bar"
|
||||
}
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
documents[100] = json!({"title": "error", "desc": "error"});
|
||||
|
||||
// enqueue batch of documents
|
||||
let mut waiter = Vec::new();
|
||||
for chunk in documents.chunks(30) {
|
||||
waiter.push(index.add_documents(json!(chunk), Some("id")));
|
||||
}
|
||||
|
||||
// wait first batch of documents to finish
|
||||
futures::future::join_all(waiter).await;
|
||||
index.wait_task(4).await;
|
||||
|
||||
// run a second completely failing batch
|
||||
documents[40] = json!({"title": "error", "desc": "error"});
|
||||
documents[70] = json!({"title": "error", "desc": "error"});
|
||||
documents[130] = json!({"title": "error", "desc": "error"});
|
||||
let mut waiter = Vec::new();
|
||||
for chunk in documents.chunks(30) {
|
||||
waiter.push(index.add_documents(json!(chunk), Some("id")));
|
||||
}
|
||||
// wait second batch of documents to finish
|
||||
futures::future::join_all(waiter).await;
|
||||
index.wait_task(9).await;
|
||||
|
||||
let (response, _code) = index.filtered_tasks(&[], &["failed"]).await;
|
||||
|
||||
// Check if only the 6th task failed
|
||||
println!("{}", &response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 5);
|
||||
|
||||
// Check if there are exactly 120 documents (150 - 30) in the index;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
limit: Some(200),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200, "failed with `{}`", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 120);
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ async fn clear_all_documents() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -89,7 +89,7 @@ async fn clear_all_documents_empty_index() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -125,8 +125,8 @@ async fn delete_batch() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["results"][0]["id"], json!(3));
|
||||
assert_eq!(response.as_array().unwrap().len(), 1);
|
||||
assert_eq!(response.as_array().unwrap()[0]["id"], 3);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -143,5 +143,5 @@ async fn delete_no_document_batch() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 3);
|
||||
assert_eq!(response.as_array().unwrap().len(), 3);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
|
||||
use crate::common::GetAllDocumentsOptions;
|
||||
use crate::common::Server;
|
||||
|
||||
use serde_json::json;
|
||||
|
||||
@@ -38,51 +39,19 @@ async fn get_document() {
|
||||
let documents = serde_json::json!([
|
||||
{
|
||||
"id": 0,
|
||||
"nested": { "content": "foobar" },
|
||||
"content": "foobar",
|
||||
}
|
||||
]);
|
||||
let (_, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(1).await;
|
||||
index.wait_task(0).await;
|
||||
let (response, code) = index.get_document(0, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!({
|
||||
serde_json::json!( {
|
||||
"id": 0,
|
||||
"nested": { "content": "foobar" },
|
||||
})
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
0,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["id"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!({
|
||||
"id": 0,
|
||||
})
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
0,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["nested.content"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!({
|
||||
"nested": { "content": "foobar" },
|
||||
"content": "foobar",
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -119,7 +88,7 @@ async fn get_no_document() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -132,7 +101,7 @@ async fn get_all_documents_no_options() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
let arr = response.as_array().unwrap();
|
||||
assert_eq!(arr.len(), 20);
|
||||
let first = serde_json::json!({
|
||||
"id":0,
|
||||
@@ -168,11 +137,8 @@ async fn test_get_all_documents_limit() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 5);
|
||||
assert_eq!(response["results"][0]["id"], json!(0));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(5));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
assert_eq!(response.as_array().unwrap().len(), 5);
|
||||
assert_eq!(response.as_array().unwrap()[0]["id"], 0);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -188,11 +154,8 @@ async fn test_get_all_documents_offset() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
assert_eq!(response["results"][0]["id"], json!(5));
|
||||
assert_eq!(response["offset"], json!(5));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(response.as_array().unwrap()[0]["id"], 13);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -208,14 +171,20 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 1);
|
||||
assert!(results["name"] != json!(null));
|
||||
}
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
1
|
||||
);
|
||||
assert!(response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.get("name")
|
||||
.is_some());
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -224,13 +193,15 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 0);
|
||||
}
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
0
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -239,13 +210,15 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 0);
|
||||
}
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
0
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -254,12 +227,15 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 2);
|
||||
assert!(results["name"] != json!(null));
|
||||
assert!(results["tags"] != json!(null));
|
||||
}
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
2
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -268,10 +244,15 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 16);
|
||||
}
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
16
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -280,99 +261,19 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 16);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_document_s_nested_attributes_to_retrieve() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 0,
|
||||
"content.truc": "foobar",
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"content": {
|
||||
"truc": "foobar",
|
||||
"machin": "bidule",
|
||||
},
|
||||
},
|
||||
]);
|
||||
let (_, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
0,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["content"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response, json!({}));
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
1,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["content"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response,
|
||||
json!({
|
||||
"content": {
|
||||
"truc": "foobar",
|
||||
"machin": "bidule",
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
0,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["content.truc"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
json!({
|
||||
"content.truc": "foobar",
|
||||
})
|
||||
);
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
1,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["content.truc"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
json!({
|
||||
"content": {
|
||||
"truc": "foobar",
|
||||
},
|
||||
})
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
16
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_documents_displayed_attributes_is_ignored() {
|
||||
async fn get_documents_displayed_attributes() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index
|
||||
@@ -384,19 +285,23 @@ async fn get_documents_displayed_attributes_is_ignored() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response["results"][0].as_object().unwrap().keys().count(),
|
||||
16
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
1
|
||||
);
|
||||
assert!(response["results"][0]["gender"] != json!(null));
|
||||
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
assert!(response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.get("gender")
|
||||
.is_some());
|
||||
|
||||
let (response, code) = index.get_document(0, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_object().unwrap().keys().count(), 16);
|
||||
assert_eq!(response.as_object().unwrap().keys().count(), 1);
|
||||
assert!(response.as_object().unwrap().get("gender").is_some());
|
||||
}
|
||||
|
||||
22
meilisearch-http/tests/dumps.rs
Normal file
22
meilisearch-http/tests/dumps.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
#![allow(dead_code)]
|
||||
mod common;
|
||||
|
||||
use crate::common::Server;
|
||||
use serde_json::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_unexisting_dump_status() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server.get_dump_status("foobar").await;
|
||||
assert_eq!(code, 404);
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Dump `foobar` not found.",
|
||||
"code": "dump_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#dump_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use manifest_dir_macros::exist_relative_path;
|
||||
|
||||
pub enum GetDump {
|
||||
MoviesRawV1,
|
||||
MoviesWithSettingsV1,
|
||||
RubyGemsWithSettingsV1,
|
||||
|
||||
MoviesRawV2,
|
||||
MoviesWithSettingsV2,
|
||||
RubyGemsWithSettingsV2,
|
||||
|
||||
MoviesRawV3,
|
||||
MoviesWithSettingsV3,
|
||||
RubyGemsWithSettingsV3,
|
||||
|
||||
MoviesRawV4,
|
||||
MoviesWithSettingsV4,
|
||||
RubyGemsWithSettingsV4,
|
||||
|
||||
TestV5,
|
||||
}
|
||||
|
||||
impl GetDump {
|
||||
pub fn path(&self) -> PathBuf {
|
||||
match self {
|
||||
GetDump::MoviesRawV1 => {
|
||||
exist_relative_path!("tests/assets/v1_v0.20.0_movies.dump").into()
|
||||
}
|
||||
GetDump::MoviesWithSettingsV1 => {
|
||||
exist_relative_path!("tests/assets/v1_v0.20.0_movies_with_settings.dump").into()
|
||||
}
|
||||
GetDump::RubyGemsWithSettingsV1 => {
|
||||
exist_relative_path!("tests/assets/v1_v0.20.0_rubygems_with_settings.dump").into()
|
||||
}
|
||||
|
||||
GetDump::MoviesRawV2 => {
|
||||
exist_relative_path!("tests/assets/v2_v0.21.1_movies.dump").into()
|
||||
}
|
||||
GetDump::MoviesWithSettingsV2 => {
|
||||
exist_relative_path!("tests/assets/v2_v0.21.1_movies_with_settings.dump").into()
|
||||
}
|
||||
|
||||
GetDump::RubyGemsWithSettingsV2 => {
|
||||
exist_relative_path!("tests/assets/v2_v0.21.1_rubygems_with_settings.dump").into()
|
||||
}
|
||||
|
||||
GetDump::MoviesRawV3 => {
|
||||
exist_relative_path!("tests/assets/v3_v0.24.0_movies.dump").into()
|
||||
}
|
||||
GetDump::MoviesWithSettingsV3 => {
|
||||
exist_relative_path!("tests/assets/v3_v0.24.0_movies_with_settings.dump").into()
|
||||
}
|
||||
GetDump::RubyGemsWithSettingsV3 => {
|
||||
exist_relative_path!("tests/assets/v3_v0.24.0_rubygems_with_settings.dump").into()
|
||||
}
|
||||
|
||||
GetDump::MoviesRawV4 => {
|
||||
exist_relative_path!("tests/assets/v4_v0.25.2_movies.dump").into()
|
||||
}
|
||||
GetDump::MoviesWithSettingsV4 => {
|
||||
exist_relative_path!("tests/assets/v4_v0.25.2_movies_with_settings.dump").into()
|
||||
}
|
||||
GetDump::RubyGemsWithSettingsV4 => {
|
||||
exist_relative_path!("tests/assets/v4_v0.25.2_rubygems_with_settings.dump").into()
|
||||
}
|
||||
GetDump::TestV5 => {
|
||||
exist_relative_path!("tests/assets/v5_v0.28.0_test_dump.dump").into()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,677 +0,0 @@
|
||||
mod data;
|
||||
|
||||
use crate::common::{default_settings, GetAllDocumentsOptions, Server};
|
||||
use meilisearch_http::Opt;
|
||||
use serde_json::json;
|
||||
|
||||
use self::data::GetDump;
|
||||
|
||||
// all the following test are ignored on windows. See #2364
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v1() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
for path in [
|
||||
GetDump::MoviesRawV1.path(),
|
||||
GetDump::MoviesWithSettingsV1.path(),
|
||||
GetDump::RubyGemsWithSettingsV1.path(),
|
||||
] {
|
||||
let options = Opt {
|
||||
import_dump: Some(path),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let error = Server::new_with_options(options)
|
||||
.await
|
||||
.map(|_| ())
|
||||
.unwrap_err();
|
||||
|
||||
assert_eq!(error.to_string(), "The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.");
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v2_movie_raw() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesRawV2.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v2_movie_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesWithSettingsV2.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 })
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v2_rubygems_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::RubyGemsWithSettingsV2.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("rubygems"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("rubygems");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 }})
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks["results"][0],
|
||||
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(188040, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(191940, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(159227, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v3_movie_raw() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesRawV3.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v3_movie_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesWithSettingsV3.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can["results"] still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 })
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v3_rubygems_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::RubyGemsWithSettingsV3.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("rubygems"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("rubygems");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks["results"][0],
|
||||
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(188040, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(191940, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(159227, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v4_movie_raw() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesRawV4.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit" : 20, "from": 0, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v4_movie_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesWithSettingsV4.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 })
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v4_rubygems_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::RubyGemsWithSettingsV4.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("rubygems"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("rubygems");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks["results"][0],
|
||||
json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(188040, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(191940, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(159227, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v5() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::TestV5.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let mut server = Server::new_auth_with_options(options, temp).await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200, "{indexes}");
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("test"));
|
||||
assert_eq!(indexes["results"][1]["uid"], json!("test2"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let expected_stats = json!({
|
||||
"numberOfDocuments": 10,
|
||||
"isIndexing": false,
|
||||
"fieldDistribution": {
|
||||
"cast": 10,
|
||||
"director": 10,
|
||||
"genres": 10,
|
||||
"id": 10,
|
||||
"overview": 10,
|
||||
"popularity": 10,
|
||||
"poster_path": 10,
|
||||
"producer": 10,
|
||||
"production_companies": 10,
|
||||
"release_date": 10,
|
||||
"tagline": 10,
|
||||
"title": 10,
|
||||
"vote_average": 10,
|
||||
"vote_count": 10
|
||||
}
|
||||
});
|
||||
|
||||
let index1 = server.index("test");
|
||||
let index2 = server.index("test2");
|
||||
|
||||
let (stats, code) = index1.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(stats, expected_stats);
|
||||
|
||||
let (docs, code) = index2
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
|
||||
let (docs, code) = index1
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
|
||||
|
||||
let (stats, code) = index2.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(stats, expected_stats);
|
||||
|
||||
let (keys, code) = server.list_api_keys().await;
|
||||
assert_eq!(code, 200);
|
||||
let key = &keys["results"][0];
|
||||
|
||||
assert_eq!(key["name"], "my key");
|
||||
}
|
||||
@@ -102,7 +102,7 @@ async fn error_create_with_invalid_index_uid() {
|
||||
let (response, code) = index.create(None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "invalid index uid `test test#!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
|
||||
"message": "`test test#!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
|
||||
@@ -52,10 +52,10 @@ async fn loop_delete_add_documents() {
|
||||
let mut tasks = Vec::new();
|
||||
for _ in 0..50 {
|
||||
let (response, code) = index.add_documents(documents.clone(), None).await;
|
||||
tasks.push(response["taskUid"].as_u64().unwrap());
|
||||
tasks.push(response["uid"].as_u64().unwrap());
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
let (response, code) = index.delete().await;
|
||||
tasks.push(response["taskUid"].as_u64().unwrap());
|
||||
tasks.push(response["uid"].as_u64().unwrap());
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
}
|
||||
|
||||
|
||||
@@ -16,11 +16,12 @@ async fn create_and_get_index() {
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert_eq!(response["name"], "test");
|
||||
assert!(response.get("createdAt").is_some());
|
||||
assert!(response.get("updatedAt").is_some());
|
||||
assert_eq!(response["createdAt"], response["updatedAt"]);
|
||||
assert_eq!(response["primaryKey"], Value::Null);
|
||||
assert_eq!(response.as_object().unwrap().len(), 4);
|
||||
assert_eq!(response.as_object().unwrap().len(), 5);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -44,10 +45,10 @@ async fn error_get_unexisting_index() {
|
||||
#[actix_rt::test]
|
||||
async fn no_index_return_empty_list() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
let (response, code) = server.list_indexes().await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].is_array());
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
assert!(response.is_array());
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -58,10 +59,10 @@ async fn list_multiple_indexes() {
|
||||
|
||||
server.index("test").wait_task(1).await;
|
||||
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
let (response, code) = server.list_indexes().await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert!(response.is_array());
|
||||
let arr = response.as_array().unwrap();
|
||||
assert_eq!(arr.len(), 2);
|
||||
assert!(arr
|
||||
.iter()
|
||||
@@ -71,118 +72,6 @@ async fn list_multiple_indexes() {
|
||||
.any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_and_paginate_indexes() {
|
||||
let server = Server::new().await;
|
||||
const NB_INDEXES: usize = 50;
|
||||
for i in 0..NB_INDEXES {
|
||||
server.index(&format!("test_{i:02}")).create(None).await;
|
||||
server
|
||||
.index(&format!("test_{i:02}"))
|
||||
.wait_task(i as u64)
|
||||
.await;
|
||||
}
|
||||
|
||||
// basic
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 20);
|
||||
// ensuring we get all the indexes in the alphabetical order
|
||||
assert!((0..20)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with an offset
|
||||
let (response, code) = server.list_indexes(Some(15), None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["offset"], json!(15));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 20);
|
||||
assert!((15..35)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with an offset and not enough elements
|
||||
let (response, code) = server.list_indexes(Some(45), None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["offset"], json!(45));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 5);
|
||||
assert!((45..50)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with a limit lower than the default
|
||||
let (response, code) = server.list_indexes(None, Some(5)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(5));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 5);
|
||||
assert!((0..5)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with a limit higher than the default
|
||||
let (response, code) = server.list_indexes(None, Some(40)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(40));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 40);
|
||||
assert!((0..40)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with a limit higher than the default
|
||||
let (response, code) = server.list_indexes(None, Some(80)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(80));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 50);
|
||||
assert!((0..50)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with a limit and an offset
|
||||
let (response, code) = server.list_indexes(Some(20), Some(10)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(10));
|
||||
assert_eq!(response["offset"], json!(20));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 10);
|
||||
assert!((20..30)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_invalid_index_uid() {
|
||||
let server = Server::new().await;
|
||||
|
||||
@@ -35,7 +35,7 @@ async fn stats() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
assert_eq!(response["uid"], 1);
|
||||
|
||||
index.wait_task(1).await;
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ async fn update_primary_key() {
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert_eq!(response["name"], "test");
|
||||
assert!(response.get("createdAt").is_some());
|
||||
assert!(response.get("updatedAt").is_some());
|
||||
|
||||
@@ -31,7 +32,7 @@ async fn update_primary_key() {
|
||||
assert!(created_at < updated_at);
|
||||
|
||||
assert_eq!(response["primaryKey"], "primary");
|
||||
assert_eq!(response.as_object().unwrap().len(), 4);
|
||||
assert_eq!(response.as_object().unwrap().len(), 5);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -2,7 +2,6 @@ mod auth;
|
||||
mod common;
|
||||
mod dashboard;
|
||||
mod documents;
|
||||
mod dumps;
|
||||
mod index;
|
||||
mod search;
|
||||
mod settings;
|
||||
|
||||
@@ -36,30 +36,6 @@ async fn search_unexisting_parameter() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_invalid_highlight_and_crop_tags() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let fields = &["cropMarker", "highlightPreTag", "highlightPostTag"];
|
||||
|
||||
for field in fields {
|
||||
// object
|
||||
let (response, code) = index
|
||||
.search_post(json!({field.to_string(): {"marker": "<crop>"}}))
|
||||
.await;
|
||||
assert_eq!(code, 400, "field {} passing object: {}", &field, response);
|
||||
assert_eq!(response["code"], "bad_request");
|
||||
|
||||
// array
|
||||
let (response, code) = index
|
||||
.search_post(json!({field.to_string(): ["marker", "<crop>"]}))
|
||||
.await;
|
||||
assert_eq!(code, 400, "field {} passing array: {}", &field, response);
|
||||
assert_eq!(response["code"], "bad_request");
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_syntax_object() {
|
||||
let server = Server::new().await;
|
||||
@@ -74,7 +50,7 @@ async fn filter_invalid_syntax_object() {
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO` or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
@@ -101,13 +77,13 @@ async fn filter_invalid_syntax_array() {
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO` or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||
"code": "invalid_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
});
|
||||
index
|
||||
.search(json!({"filter": ["title & Glass"]}), |response, code| {
|
||||
.search(json!({"filter": [["title & Glass"]]}), |response, code| {
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
})
|
||||
@@ -164,7 +140,7 @@ async fn filter_invalid_attribute_array() {
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
});
|
||||
index
|
||||
.search(json!({"filter": ["many = Glass"]}), |response, code| {
|
||||
.search(json!({"filter": [["many = Glass"]]}), |response, code| {
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
})
|
||||
@@ -218,7 +194,7 @@ async fn filter_reserved_geo_attribute_array() {
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
});
|
||||
index
|
||||
.search(json!({"filter": ["_geo = Glass"]}), |response, code| {
|
||||
.search(json!({"filter": [["_geo = Glass"]]}), |response, code| {
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
})
|
||||
@@ -273,7 +249,7 @@ async fn filter_reserved_attribute_array() {
|
||||
});
|
||||
index
|
||||
.search(
|
||||
json!({"filter": ["_geoDistance = Glass"]}),
|
||||
json!({"filter": [["_geoDistance = Glass"]]}),
|
||||
|response, code| {
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
|
||||
@@ -1,471 +0,0 @@
|
||||
use super::*;
|
||||
use crate::common::Server;
|
||||
use serde_json::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn formatted_contain_wildcard() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
index
|
||||
.update_settings(json!({ "displayedAttributes": ["id", "cattos"] }))
|
||||
.await;
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index.search(json!({ "q": "pesti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "showMatchesPosition": true }),
|
||||
|response, code|
|
||||
{
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "<em>pesti</em>",
|
||||
},
|
||||
"_matchesPosition": {"cattos": [{"start": 0, "length": 5}]},
|
||||
})
|
||||
);
|
||||
}
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "pesti", "attributesToRetrieve": ["*"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"cattos": "pesti",
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"], "showMatchesPosition": true }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"cattos": "pesti",
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pesti",
|
||||
},
|
||||
"_matchesPosition": {"cattos": [{"start": 0, "length": 5}]},
|
||||
})
|
||||
);
|
||||
}
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToCrop": ["*"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"cattos": "pesti",
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pesti",
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "pesti", "attributesToCrop": ["*"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"cattos": "pesti",
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pesti",
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn format_nested() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "pesti", "attributesToRetrieve": ["doggos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": 2,
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": 4,
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "pesti", "attributesToRetrieve": ["doggos.name"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "bobby", "attributesToRetrieve": ["doggos.name"], "showMatchesPosition": true }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
"_matchesPosition": {"doggos.name": [{"start": 0, "length": 5}]},
|
||||
})
|
||||
);
|
||||
}
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.name"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToCrop": ["doggos.name"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "q": "pesti", "attributesToRetrieve": ["doggos.name"], "attributesToHighlight": ["doggos.age"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": "2",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": "4",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.age"], "attributesToCrop": ["doggos.name"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": "2",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": "4",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn displayedattr_2_smol() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
// not enough displayed for the other settings
|
||||
index
|
||||
.update_settings(json!({ "displayedAttributes": ["id"] }))
|
||||
.await;
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(json!({ "attributesToRetrieve": ["father", "id"], "attributesToHighlight": ["mother"], "attributesToCrop": ["cattos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
})
|
||||
);
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToRetrieve": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToHighlight": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "attributesToCrop": ["id"] }), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToHighlight": ["id"], "attributesToCrop": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToHighlight": ["cattos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToCrop": ["cattos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToRetrieve": ["cattos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"][0], json!({}));
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["cattos"], "attributesToCrop": ["cattos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"][0], json!({}));
|
||||
|
||||
}
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToRetrieve": ["cattos"], "attributesToCrop": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
@@ -1,97 +1,39 @@
|
||||
// This modules contains all the test concerning search. Each particular feature of the search
|
||||
// This modules contains all the test concerning search. Each particular feture of the search
|
||||
// should be tested in its own module to isolate tests and keep the tests readable.
|
||||
|
||||
mod errors;
|
||||
mod formatted;
|
||||
|
||||
use crate::common::Server;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"id": "287947",
|
||||
"id": "287947"
|
||||
},
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"id": "299537",
|
||||
"id": "299537"
|
||||
},
|
||||
{
|
||||
"title": "Escape Room",
|
||||
"id": "522681",
|
||||
"id": "522681"
|
||||
},
|
||||
{
|
||||
"title": "How to Train Your Dragon: The Hidden World",
|
||||
"id": "166428",
|
||||
{ "title": "How to Train Your Dragon: The Hidden World", "id": "166428"
|
||||
},
|
||||
{
|
||||
"title": "Glass",
|
||||
"id": "450465",
|
||||
"id": "450465"
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
pub(self) static NESTED_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"id": 852,
|
||||
"father": "jean",
|
||||
"mother": "michelle",
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": 2,
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": 4,
|
||||
},
|
||||
],
|
||||
"cattos": "pesti",
|
||||
},
|
||||
{
|
||||
"id": 654,
|
||||
"father": "pierre",
|
||||
"mother": "sabine",
|
||||
"doggos": [
|
||||
{
|
||||
"name": "gros bill",
|
||||
"age": 8,
|
||||
},
|
||||
],
|
||||
"cattos": ["simba", "pestiféré"],
|
||||
},
|
||||
{
|
||||
"id": 750,
|
||||
"father": "romain",
|
||||
"mother": "michelle",
|
||||
"cattos": ["enigma"],
|
||||
},
|
||||
{
|
||||
"id": 951,
|
||||
"father": "jean-baptiste",
|
||||
"mother": "sophie",
|
||||
"doggos": [
|
||||
{
|
||||
"name": "turbo",
|
||||
"age": 5,
|
||||
},
|
||||
{
|
||||
"name": "fast",
|
||||
"age": 6,
|
||||
},
|
||||
],
|
||||
"cattos": ["moumoute", "gomez"],
|
||||
},
|
||||
])
|
||||
});
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_placeholder_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("basic");
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
@@ -103,18 +45,6 @@ async fn simple_placeholder_search() {
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 5);
|
||||
})
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(json!({}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 4);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -132,18 +62,6 @@ async fn simple_search() {
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
})
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "pesti"}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -170,27 +88,6 @@ async fn search_multiple_params() {
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"q": "pesti",
|
||||
"attributesToCrop": ["catto:2"],
|
||||
"attributesToHighlight": ["catto"],
|
||||
"limit": 2,
|
||||
"offset": 0,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -217,43 +114,6 @@ async fn search_with_filter_string_notation() {
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
|
||||
index
|
||||
.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]}))
|
||||
.await;
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(3).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"filter": "cattos = pesti"
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["hits"][0]["id"], json!(852));
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"filter": "doggos.age > 5"
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(response["hits"][0]["id"], json!(654));
|
||||
assert_eq!(response["hits"][1]["id"], json!(951));
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -310,28 +170,6 @@ async fn search_with_sort_on_numbers() {
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
|
||||
index
|
||||
.update_settings(json!({"sortableAttributes": ["doggos.age"]}))
|
||||
.await;
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(3).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"sort": ["doggos.age:asc"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 4);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -358,28 +196,6 @@ async fn search_with_sort_on_strings() {
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
|
||||
index
|
||||
.update_settings(json!({"sortableAttributes": ["doggos.name"]}))
|
||||
.await;
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(3).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"sort": ["doggos.name:asc"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 4);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -420,94 +236,16 @@ async fn search_facet_distribution() {
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facets": ["title"]
|
||||
"facetsDistribution": ["title"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let dist = response["facetDistribution"].as_object().unwrap();
|
||||
let dist = response["facetsDistribution"].as_object().unwrap();
|
||||
assert_eq!(dist.len(), 1);
|
||||
assert!(dist.get("title").is_some());
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
|
||||
index
|
||||
.update_settings(json!({"filterableAttributes": ["father", "doggos.name"]}))
|
||||
.await;
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(3).await;
|
||||
|
||||
// TODO: TAMO: fix the test
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
// "facets": ["father", "doggos.name"]
|
||||
"facets": ["father"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let dist = response["facetDistribution"].as_object().unwrap();
|
||||
assert_eq!(dist.len(), 1);
|
||||
assert_eq!(
|
||||
dist["father"],
|
||||
json!({ "jean": 1, "pierre": 1, "romain": 1, "jean-baptiste": 1})
|
||||
);
|
||||
/*
|
||||
assert_eq!(
|
||||
dist["doggos.name"],
|
||||
json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1})
|
||||
);
|
||||
*/
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.update_settings(json!({"filterableAttributes": ["doggos"]}))
|
||||
.await;
|
||||
index.wait_task(4).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facets": ["doggos.name"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let dist = response["facetDistribution"].as_object().unwrap();
|
||||
assert_eq!(dist.len(), 1);
|
||||
assert_eq!(
|
||||
dist["doggos.name"],
|
||||
json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facets": ["doggos"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let dist = response["facetDistribution"].as_object().unwrap();
|
||||
assert_eq!(dist.len(), 3);
|
||||
assert_eq!(
|
||||
dist["doggos.name"],
|
||||
json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1})
|
||||
);
|
||||
assert_eq!(
|
||||
dist["doggos.age"],
|
||||
json!({ "2": 1, "4": 1, "5": 1, "6": 1, "8": 1})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -527,190 +265,5 @@ async fn displayed_attributes() {
|
||||
.search_post(json!({ "attributesToRetrieve": ["title", "id"] }))
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert!(response["hits"][0].get("title").is_some());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_search_is_hard_limited() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents: Vec<_> = (0..1200)
|
||||
.map(|i| json!({ "id": i, "text": "I am unique!" }))
|
||||
.collect();
|
||||
index.add_documents(documents.into(), None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"limit": 1500,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1000);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"offset": 800,
|
||||
"limit": 400,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } }))
|
||||
.await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"limit": 1500,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"offset": 1000,
|
||||
"limit": 400,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_is_hard_limited() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents: Vec<_> = (0..1200)
|
||||
.map(|i| json!({ "id": i, "text": "I am unique!" }))
|
||||
.collect();
|
||||
index.add_documents(documents.into(), None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"q": "unique",
|
||||
"limit": 1500,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1000);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"q": "unique",
|
||||
"offset": 800,
|
||||
"limit": 400,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } }))
|
||||
.await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"q": "unique",
|
||||
"limit": 1500,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"q": "unique",
|
||||
"offset": 1000,
|
||||
"limit": 400,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn faceting_max_values_per_facet() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
index
|
||||
.update_settings(json!({ "filterableAttributes": ["number"] }))
|
||||
.await;
|
||||
|
||||
let documents: Vec<_> = (0..10_000)
|
||||
.map(|id| json!({ "id": id, "number": id * 10 }))
|
||||
.collect();
|
||||
index.add_documents(json!(documents), None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facets": ["number"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let numbers = response["facetDistribution"]["number"].as_object().unwrap();
|
||||
assert_eq!(numbers.len(), 100);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } }))
|
||||
.await;
|
||||
index.wait_task(2).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facets": ["number"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let numbers = &response["facetDistribution"]["number"].as_object().unwrap();
|
||||
assert_eq!(numbers.len(), 10_000);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
assert!(response["hits"].get("title").is_none());
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user