mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-03 03:05:34 +00:00
Compare commits
222 Commits
v1.3.5
...
panic-repo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b06e04fb9b | ||
|
|
905dc241ae | ||
|
|
146908f062 | ||
|
|
84f701679d | ||
|
|
355d3b7e45 | ||
|
|
69354a6144 | ||
|
|
2b5d9042d1 | ||
|
|
5b57fbab08 | ||
|
|
48865470d7 | ||
|
|
c810df4d9f | ||
|
|
5e3df76699 | ||
|
|
02765fb267 | ||
|
|
841165d529 | ||
|
|
ea4a266f08 | ||
|
|
49f069ed97 | ||
|
|
be16b99d40 | ||
|
|
ec0c09d17c | ||
|
|
a9230f6e6c | ||
|
|
62ea81bef6 | ||
|
|
f28f09ae2f | ||
|
|
eae9eab181 | ||
|
|
cf8dad1ca0 | ||
|
|
dd619913da | ||
|
|
9b55ff16e9 | ||
|
|
e761db582f | ||
|
|
d8c649b3cd | ||
|
|
5e0485d8dd | ||
|
|
27eec21415 | ||
|
|
62cc97ba70 | ||
|
|
fed59cc1d5 | ||
|
|
2b3adef796 | ||
|
|
956cfc5487 | ||
|
|
12fc878640 | ||
|
|
0a2e8b92a9 | ||
|
|
c7a3f80de6 | ||
|
|
029d4de043 | ||
|
|
549f1bcccf | ||
|
|
689ec7c7ad | ||
|
|
3655d4bdca | ||
|
|
055ca3935b | ||
|
|
1b8871a585 | ||
|
|
bf8fac6676 | ||
|
|
f2a9e1ebbb | ||
|
|
c45c6cf54c | ||
|
|
513e61e9a3 | ||
|
|
90a626bf80 | ||
|
|
0d4acf2daa | ||
|
|
58db8d85ec | ||
|
|
62dfd09dc6 | ||
|
|
656dadabea | ||
|
|
c5f7893fbb | ||
|
|
8cf2ccf168 | ||
|
|
0913373a5e | ||
|
|
1a7f1282af | ||
|
|
bc747aac3a | ||
|
|
be92376ab3 | ||
|
|
cf7e355735 | ||
|
|
5f09d89ad1 | ||
|
|
6ecb26a3f8 | ||
|
|
76c6f554d6 | ||
|
|
f343ef5f2f | ||
|
|
96982a768a | ||
|
|
fca78fbc46 | ||
|
|
67a678cfb6 | ||
|
|
d1331d8abf | ||
|
|
19ba129165 | ||
|
|
d4da06ff47 | ||
|
|
3e0471edae | ||
|
|
432df03c4c | ||
|
|
11958016dd | ||
|
|
63c250a04d | ||
|
|
06d8cd5b72 | ||
|
|
c0f2724c2d | ||
|
|
d772073dfa | ||
|
|
8fe8ddea79 | ||
|
|
8a95bf28e5 | ||
|
|
c0fd3dffb8 | ||
|
|
c42fd5375f | ||
|
|
b418c3a756 | ||
|
|
1cde455758 | ||
|
|
ca19bae72f | ||
|
|
705878ff59 | ||
|
|
92c280d1c8 | ||
|
|
181e7a1e53 | ||
|
|
2e5abb4d2c | ||
|
|
44aaf5d9e3 | ||
|
|
ff0ababf65 | ||
|
|
c5336af1c5 | ||
|
|
1567758a56 | ||
|
|
37953afe1a | ||
|
|
43989fe2e4 | ||
|
|
de3f992ae4 | ||
|
|
c668a29ed5 | ||
|
|
98f0618065 | ||
|
|
b10eeb0e41 | ||
|
|
4a8515e9fc | ||
|
|
86b314626d | ||
|
|
bb79bdb3f8 | ||
|
|
d429e7da99 | ||
|
|
584b772248 | ||
|
|
1806c04a9a | ||
|
|
3485e8f1c4 | ||
|
|
fe697a6685 | ||
|
|
eb4135f8ae | ||
|
|
ec4844c3a6 | ||
|
|
77c3787b78 | ||
|
|
4f902490b9 | ||
|
|
1faee92748 | ||
|
|
5831466525 | ||
|
|
3cdb3e4eaf | ||
|
|
26f34ec7a2 | ||
|
|
07d36180ad | ||
|
|
4c641b79a2 | ||
|
|
76c05d1b20 | ||
|
|
ef31ab52a4 | ||
|
|
34fac115d5 | ||
|
|
791c5cd874 | ||
|
|
5bea1092fb | ||
|
|
056b2c387d | ||
|
|
a09686fcbd | ||
|
|
b4c44603db | ||
|
|
393be40179 | ||
|
|
2c1d60f79b | ||
|
|
487d493f49 | ||
|
|
08af69a33b | ||
|
|
9258e5b5bf | ||
|
|
ddd34a488a | ||
|
|
526c2b3602 | ||
|
|
e8c9367686 | ||
|
|
9636c5f558 | ||
|
|
b310830b5d | ||
|
|
462b4654c4 | ||
|
|
abfa7ded25 | ||
|
|
f2837aaec2 | ||
|
|
11df155598 | ||
|
|
651657c03e | ||
|
|
b9ad59c969 | ||
|
|
66aa682e23 | ||
|
|
dc3d9c90d9 | ||
|
|
287cf25d39 | ||
|
|
66aa6d5871 | ||
|
|
8ac5b765bc | ||
|
|
cea93e9a37 | ||
|
|
085aad0a94 | ||
|
|
e9b62aacb3 | ||
|
|
456960d2c7 | ||
|
|
3dda176723 | ||
|
|
af0f6f0bf0 | ||
|
|
ccf3ba3f32 | ||
|
|
65528a3e06 | ||
|
|
6db80b0836 | ||
|
|
914b125c5f | ||
|
|
e59d7f238c | ||
|
|
fd81945597 | ||
|
|
794e491152 | ||
|
|
cab27c2ab4 | ||
|
|
624fa9052f | ||
|
|
359ede4862 | ||
|
|
60c11dbdbd | ||
|
|
dacee40ebc | ||
|
|
6089083a8e | ||
|
|
cc2c19d4c3 | ||
|
|
a5c56fac8a | ||
|
|
e4e49e63d0 | ||
|
|
00bd7bd19a | ||
|
|
8084cf29f3 | ||
|
|
5a7c1bde84 | ||
|
|
6b2d671be7 | ||
|
|
43c13faeda | ||
|
|
29adfc2f68 | ||
|
|
064ee95b1c | ||
|
|
604d533b31 | ||
|
|
8dc5acf998 | ||
|
|
fc2590fc9d | ||
|
|
4a21fecf67 | ||
|
|
ae8e69c030 | ||
|
|
dd57873f8e | ||
|
|
3dda93d50f | ||
|
|
117146ec4e | ||
|
|
884b4d47b1 | ||
|
|
023cb0c2de | ||
|
|
f391039a6f | ||
|
|
fcdd20b533 | ||
|
|
b45c36cd71 | ||
|
|
151c31c18f | ||
|
|
a8ad0902d3 | ||
|
|
e917dbdebb | ||
|
|
ba919b6123 | ||
|
|
9d5e3457e5 | ||
|
|
04694071fe | ||
|
|
b0c1a9504a | ||
|
|
d57026cd96 | ||
|
|
41c9e8856a | ||
|
|
d4ff59fcf5 | ||
|
|
9c485f8563 | ||
|
|
d8d12d5979 | ||
|
|
0597a97c84 | ||
|
|
2dfbb6813a | ||
|
|
8f589a5cce | ||
|
|
0b8bbd8750 | ||
|
|
eef95de30e | ||
|
|
13a13a4862 | ||
|
|
e691c92ed5 | ||
|
|
928ab2f9b1 | ||
|
|
7c18a9375f | ||
|
|
05a311f9be | ||
|
|
9b1b9b409e | ||
|
|
7f555f23e8 | ||
|
|
a0bfc9f63a | ||
|
|
3155264381 | ||
|
|
42400c381e | ||
|
|
08c7dab528 | ||
|
|
8590687515 | ||
|
|
8f5d127b1e | ||
|
|
2b4160ebb9 | ||
|
|
8ba1c8f88f | ||
|
|
8e7edf8ea7 | ||
|
|
9daccdf7f0 | ||
|
|
437ee55c57 | ||
|
|
b1717865ea | ||
|
|
176f716292 | ||
|
|
40ad19ba9e |
8
.github/ISSUE_TEMPLATE/sprint_issue.md
vendored
8
.github/ISSUE_TEMPLATE/sprint_issue.md
vendored
@@ -7,19 +7,17 @@ assignees: ''
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
Related product team resources: [roadmap card]() (_internal only_) and [PRD]() (_internal only_)
|
Related product team resources: [PRD]() (_internal only_)
|
||||||
Related product discussion:
|
Related product discussion:
|
||||||
Related spec: WIP
|
Related spec: WIP
|
||||||
|
|
||||||
## Motivation
|
## Motivation
|
||||||
|
|
||||||
<!---Copy/paste the information in the roadmap resources or briefly detail the product motivation. Ask product team if any hesitation.-->
|
<!---Copy/paste the information in PRD or briefly detail the product motivation. Ask product team if any hesitation.-->
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
<!---Write a quick description of the usage if the usage has already been defined-->
|
<!---Link to the public part of the PRD, or to the related product discussion for experimental features-->
|
||||||
|
|
||||||
Refer to the final spec to know the details and the final decisions about the usage.
|
|
||||||
|
|
||||||
## TODO
|
## TODO
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/benchmarks-manual.yml
vendored
2
.github/workflows/benchmarks-manual.yml
vendored
@@ -74,4 +74,4 @@ jobs:
|
|||||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||||
echo 'How to compare this benchmark with another one?'
|
echo 'How to compare this benchmark with another one?'
|
||||||
echo ' - Check the available files with: ./benchmarks/scripts/list.sh'
|
echo ' - Check the available files with: ./benchmarks/scripts/list.sh'
|
||||||
echo " - Run the following command: ./benchmaks/scipts/compare.sh <file-to-compare-with> ${{ steps.file.outputs.basename }}.json"
|
echo " - Run the following command: ./benchmaks/scripts/compare.sh <file-to-compare-with> ${{ steps.file.outputs.basename }}.json"
|
||||||
|
|||||||
98
.github/workflows/benchmarks-pr.yml
vendored
Normal file
98
.github/workflows/benchmarks-pr.yml
vendored
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
name: Benchmarks (PR)
|
||||||
|
on: issue_comment
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
run-benchmarks-on-comment:
|
||||||
|
if: startsWith(github.event.comment.body, '/benchmark')
|
||||||
|
name: Run and upload benchmarks
|
||||||
|
runs-on: benchmarks
|
||||||
|
timeout-minutes: 4320 # 72h
|
||||||
|
steps:
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
|
||||||
|
- name: Check for Command
|
||||||
|
id: command
|
||||||
|
uses: xt0rted/slash-command-action@v2
|
||||||
|
with:
|
||||||
|
command: benchmark
|
||||||
|
reaction-type: "eyes"
|
||||||
|
repo-token: ${{ env.GH_TOKEN }}
|
||||||
|
|
||||||
|
- uses: xt0rted/pull-request-comment-branch@v2
|
||||||
|
id: comment-branch
|
||||||
|
with:
|
||||||
|
repo_token: ${{ env.GH_TOKEN }}
|
||||||
|
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
if: success()
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||||
|
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
||||||
|
|
||||||
|
# Set variables
|
||||||
|
- name: Set current branch name
|
||||||
|
shell: bash
|
||||||
|
run: echo "name=$(git rev-parse --abbrev-ref HEAD)" >> $GITHUB_OUTPUT
|
||||||
|
id: current_branch
|
||||||
|
- name: Set normalized current branch name # Replace `/` by `_` in branch name to avoid issues when pushing to S3
|
||||||
|
shell: bash
|
||||||
|
run: echo "name=$(git rev-parse --abbrev-ref HEAD | tr '/' '_')" >> $GITHUB_OUTPUT
|
||||||
|
id: normalized_current_branch
|
||||||
|
- name: Set shorter commit SHA
|
||||||
|
shell: bash
|
||||||
|
run: echo "short=$(echo $GITHUB_SHA | cut -c1-8)" >> $GITHUB_OUTPUT
|
||||||
|
id: commit_sha
|
||||||
|
- name: Set file basename with format "dataset_branch_commitSHA"
|
||||||
|
shell: bash
|
||||||
|
run: echo "basename=$(echo ${{ steps.command.outputs.command-arguments }}_${{ steps.normalized_current_branch.outputs.name }}_${{ steps.commit_sha.outputs.short }})" >> $GITHUB_OUTPUT
|
||||||
|
id: file
|
||||||
|
|
||||||
|
# Run benchmarks
|
||||||
|
- name: Run benchmarks - Dataset ${{ steps.command.outputs.command-arguments }} - Branch ${{ steps.current_branch.outputs.name }} - Commit ${{ steps.commit_sha.outputs.short }}
|
||||||
|
run: |
|
||||||
|
cd benchmarks
|
||||||
|
cargo bench --bench ${{ steps.command.outputs.command-arguments }} -- --save-baseline ${{ steps.file.outputs.basename }}
|
||||||
|
|
||||||
|
# Generate critcmp files
|
||||||
|
- name: Install critcmp
|
||||||
|
uses: taiki-e/install-action@v2
|
||||||
|
with:
|
||||||
|
tool: critcmp
|
||||||
|
- name: Export cripcmp file
|
||||||
|
run: |
|
||||||
|
critcmp --export ${{ steps.file.outputs.basename }} > ${{ steps.file.outputs.basename }}.json
|
||||||
|
|
||||||
|
# Upload benchmarks
|
||||||
|
- name: Upload ${{ steps.file.outputs.basename }}.json to DO Spaces # DigitalOcean Spaces = S3
|
||||||
|
uses: BetaHuhn/do-spaces-action@v2
|
||||||
|
with:
|
||||||
|
access_key: ${{ secrets.DO_SPACES_ACCESS_KEY }}
|
||||||
|
secret_key: ${{ secrets.DO_SPACES_SECRET_KEY }}
|
||||||
|
space_name: ${{ secrets.DO_SPACES_SPACE_NAME }}
|
||||||
|
space_region: ${{ secrets.DO_SPACES_SPACE_REGION }}
|
||||||
|
source: ${{ steps.file.outputs.basename }}.json
|
||||||
|
out_dir: critcmp_results
|
||||||
|
|
||||||
|
# Compute the diff of the benchmarks and send a message on the GitHub PR
|
||||||
|
- name: Compute and send a message in the PR
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
export base_ref=$(git merge-base origin/main ${{ steps.comment-branch.outputs.head_ref }} | head -c8)
|
||||||
|
export base_filename=$(echo ${{ steps.command.outputs.command-arguments }}_main_${base_ref}.json)
|
||||||
|
export bench_name=$(echo ${{ steps.command.outputs.command-arguments }})
|
||||||
|
echo "Here are your $bench_name benchmarks diff 👊" >> body.txt
|
||||||
|
echo '```' >> body.txt
|
||||||
|
./benchmarks/scripts/compare.sh $base_filename ${{ steps.file.outputs.basename }}.json >> body.txt
|
||||||
|
echo '```' >> body.txt
|
||||||
|
gh pr comment ${{ steps.current_branch.outputs.name }} --body-file body.txt
|
||||||
4
.github/workflows/dependency-issue.yml
vendored
4
.github/workflows/dependency-issue.yml
vendored
@@ -2,8 +2,8 @@ name: Create issue to upgrade dependencies
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
# Run the first of the month, every 3 month
|
# Run the first of the month, every 6 month
|
||||||
- cron: '0 0 1 */3 *'
|
- cron: '0 0 1 */6 *'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|||||||
5
.github/workflows/publish-apt-brew-pkg.yml
vendored
5
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
- name: Build deb package
|
- name: Build deb package
|
||||||
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
||||||
- name: Upload debian pkg to release
|
- name: Upload debian pkg to release
|
||||||
uses: svenstaro/upload-release-action@2.6.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/debian/meilisearch.deb
|
file: target/debian/meilisearch.deb
|
||||||
@@ -50,8 +50,9 @@ jobs:
|
|||||||
needs: check-version
|
needs: check-version
|
||||||
steps:
|
steps:
|
||||||
- name: Create PR to Homebrew
|
- name: Create PR to Homebrew
|
||||||
uses: mislav/bump-homebrew-formula-action@v2
|
uses: mislav/bump-homebrew-formula-action@v3
|
||||||
with:
|
with:
|
||||||
formula-name: meilisearch
|
formula-name: meilisearch
|
||||||
|
formula-path: Formula/m/meilisearch.rb
|
||||||
env:
|
env:
|
||||||
COMMITTER_TOKEN: ${{ secrets.HOMEBREW_COMMITTER_TOKEN }}
|
COMMITTER_TOKEN: ${{ secrets.HOMEBREW_COMMITTER_TOKEN }}
|
||||||
|
|||||||
8
.github/workflows/publish-binaries.yml
vendored
8
.github/workflows/publish-binaries.yml
vendored
@@ -54,7 +54,7 @@ jobs:
|
|||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.6.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/release/meilisearch
|
file: target/release/meilisearch
|
||||||
@@ -87,7 +87,7 @@ jobs:
|
|||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.6.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/release/${{ matrix.artifact_name }}
|
file: target/release/${{ matrix.artifact_name }}
|
||||||
@@ -121,7 +121,7 @@ jobs:
|
|||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.6.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
@@ -183,7 +183,7 @@ jobs:
|
|||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.6.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
|
|||||||
10
.github/workflows/publish-docker-images.yml
vendored
10
.github/workflows/publish-docker-images.yml
vendored
@@ -57,20 +57,20 @@ jobs:
|
|||||||
echo "date=$commit_date" >> $GITHUB_OUTPUT
|
echo "date=$commit_date" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: getmeili/meilisearch
|
images: getmeili/meilisearch
|
||||||
# Prevent `latest` to be updated for each new tag pushed.
|
# Prevent `latest` to be updated for each new tag pushed.
|
||||||
@@ -83,7 +83,7 @@ jobs:
|
|||||||
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
push: true
|
push: true
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|||||||
280
.github/workflows/sdks-tests.yml
vendored
280
.github/workflows/sdks-tests.yml
vendored
@@ -14,6 +14,7 @@ on:
|
|||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: 'masterKey'
|
MEILI_MASTER_KEY: 'masterKey'
|
||||||
MEILI_NO_ANALYTICS: 'true'
|
MEILI_NO_ANALYTICS: 'true'
|
||||||
|
DISABLE_COVERAGE: 'true'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
define-docker-image:
|
define-docker-image:
|
||||||
@@ -30,6 +31,117 @@ jobs:
|
|||||||
if [[ $event == 'workflow_dispatch' ]]; then
|
if [[ $event == 'workflow_dispatch' ]]; then
|
||||||
echo "docker-image=${{ github.event.inputs.docker_image }}" >> $GITHUB_OUTPUT
|
echo "docker-image=${{ github.event.inputs.docker_image }}" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
- name: Docker image is ${{ steps.define-image.outputs.docker-image }}
|
||||||
|
run: echo "Docker image is ${{ steps.define-image.outputs.docker-image }}"
|
||||||
|
|
||||||
|
##########
|
||||||
|
## SDKs ##
|
||||||
|
##########
|
||||||
|
|
||||||
|
meilisearch-dotnet-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: .NET SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
MEILISEARCH_VERSION: ${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-dotnet
|
||||||
|
- name: Setup .NET Core
|
||||||
|
uses: actions/setup-dotnet@v3
|
||||||
|
with:
|
||||||
|
dotnet-version: "6.0.x"
|
||||||
|
- name: Install dependencies
|
||||||
|
run: dotnet restore
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build --configuration Release --no-restore
|
||||||
|
- name: Meilisearch (latest version) setup with Docker
|
||||||
|
run: docker compose up -d
|
||||||
|
- name: Run tests
|
||||||
|
run: dotnet test --no-restore --verbosity normal
|
||||||
|
|
||||||
|
meilisearch-dart-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: Dart SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-dart
|
||||||
|
- uses: dart-lang/setup-dart@v1
|
||||||
|
with:
|
||||||
|
sdk: 3.1.1
|
||||||
|
- name: Install dependencies
|
||||||
|
run: dart pub get
|
||||||
|
- name: Run integration tests
|
||||||
|
run: dart test --concurrency=4
|
||||||
|
|
||||||
|
meilisearch-go-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: Go SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@v4
|
||||||
|
with:
|
||||||
|
go-version: stable
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-go
|
||||||
|
- name: Get dependencies
|
||||||
|
run: |
|
||||||
|
go get -v -t -d ./...
|
||||||
|
if [ -f Gopkg.toml ]; then
|
||||||
|
curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
|
||||||
|
dep ensure
|
||||||
|
fi
|
||||||
|
- name: Run integration tests
|
||||||
|
run: go test -v ./...
|
||||||
|
|
||||||
|
meilisearch-java-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: Java SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-java
|
||||||
|
- name: Set up Java
|
||||||
|
uses: actions/setup-java@v3
|
||||||
|
with:
|
||||||
|
java-version: 8
|
||||||
|
distribution: 'zulu'
|
||||||
|
cache: gradle
|
||||||
|
- name: Grant execute permission for gradlew
|
||||||
|
run: chmod +x gradlew
|
||||||
|
- name: Build and run unit and integration tests
|
||||||
|
run: ./gradlew build integrationTest
|
||||||
|
|
||||||
meilisearch-js-tests:
|
meilisearch-js-tests:
|
||||||
needs: define-docker-image
|
needs: define-docker-image
|
||||||
@@ -48,7 +160,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-js
|
repository: meilisearch/meilisearch-js
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
cache: 'yarn'
|
cache: 'yarn'
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -66,33 +178,6 @@ jobs:
|
|||||||
- name: Run Browser env
|
- name: Run Browser env
|
||||||
run: yarn test:env:browser
|
run: yarn test:env:browser
|
||||||
|
|
||||||
instant-meilisearch-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: instant-meilisearch tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
env:
|
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
|
||||||
ports:
|
|
||||||
- '7700:7700'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/instant-meilisearch
|
|
||||||
- name: Setup node
|
|
||||||
uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
cache: yarn
|
|
||||||
- name: Install dependencies
|
|
||||||
run: yarn install
|
|
||||||
- name: Run tests
|
|
||||||
run: yarn test
|
|
||||||
- name: Build all the playgrounds and the packages
|
|
||||||
run: yarn build
|
|
||||||
|
|
||||||
meilisearch-php-tests:
|
meilisearch-php-tests:
|
||||||
needs: define-docker-image
|
needs: define-docker-image
|
||||||
name: PHP SDK tests
|
name: PHP SDK tests
|
||||||
@@ -111,8 +196,6 @@ jobs:
|
|||||||
repository: meilisearch/meilisearch-php
|
repository: meilisearch/meilisearch-php
|
||||||
- name: Install PHP
|
- name: Install PHP
|
||||||
uses: shivammathur/setup-php@v2
|
uses: shivammathur/setup-php@v2
|
||||||
with:
|
|
||||||
coverage: none
|
|
||||||
- name: Validate composer.json and composer.lock
|
- name: Validate composer.json and composer.lock
|
||||||
run: composer validate
|
run: composer validate
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -149,36 +232,6 @@ jobs:
|
|||||||
- name: Test with pytest
|
- name: Test with pytest
|
||||||
run: pipenv run pytest
|
run: pipenv run pytest
|
||||||
|
|
||||||
meilisearch-go-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: Go SDK tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
env:
|
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
|
||||||
ports:
|
|
||||||
- '7700:7700'
|
|
||||||
steps:
|
|
||||||
- name: Set up Go
|
|
||||||
uses: actions/setup-go@v4
|
|
||||||
with:
|
|
||||||
go-version: stable
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/meilisearch-go
|
|
||||||
- name: Get dependencies
|
|
||||||
run: |
|
|
||||||
go get -v -t -d ./...
|
|
||||||
if [ -f Gopkg.toml ]; then
|
|
||||||
curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
|
|
||||||
dep ensure
|
|
||||||
fi
|
|
||||||
- name: Run integration tests
|
|
||||||
run: go test -v ./...
|
|
||||||
|
|
||||||
meilisearch-ruby-tests:
|
meilisearch-ruby-tests:
|
||||||
needs: define-docker-image
|
needs: define-docker-image
|
||||||
name: Ruby SDK tests
|
name: Ruby SDK tests
|
||||||
@@ -224,3 +277,110 @@ jobs:
|
|||||||
run: cargo build --verbose
|
run: cargo build --verbose
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: cargo test --verbose
|
run: cargo test --verbose
|
||||||
|
|
||||||
|
meilisearch-swift-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: Swift SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-swift
|
||||||
|
- name: Run tests
|
||||||
|
run: swift test
|
||||||
|
|
||||||
|
########################
|
||||||
|
## FRONT-END PLUGINS ##
|
||||||
|
########################
|
||||||
|
|
||||||
|
meilisearch-js-plugins-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: meilisearch-js-plugins tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-js-plugins
|
||||||
|
- name: Setup node
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
cache: yarn
|
||||||
|
- name: Install dependencies
|
||||||
|
run: yarn install
|
||||||
|
- name: Run tests
|
||||||
|
run: yarn test
|
||||||
|
- name: Build all the playgrounds and the packages
|
||||||
|
run: yarn build
|
||||||
|
|
||||||
|
########################
|
||||||
|
## BACK-END PLUGINS ###
|
||||||
|
########################
|
||||||
|
|
||||||
|
meilisearch-rails-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: meilisearch-rails tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-rails
|
||||||
|
- name: Set up Ruby 3
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: 3
|
||||||
|
bundler-cache: true
|
||||||
|
- name: Run tests
|
||||||
|
run: bundle exec rspec
|
||||||
|
|
||||||
|
meilisearch-symfony-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: meilisearch-symfony tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-symfony
|
||||||
|
- name: Install PHP
|
||||||
|
uses: shivammathur/setup-php@v2
|
||||||
|
with:
|
||||||
|
tools: composer:v2, flex
|
||||||
|
- name: Validate composer.json and composer.lock
|
||||||
|
run: composer validate
|
||||||
|
- name: Install dependencies
|
||||||
|
run: composer install --prefer-dist --no-progress --quiet
|
||||||
|
- name: Remove doctrine/annotations
|
||||||
|
run: composer remove --dev doctrine/annotations
|
||||||
|
- name: Run test suite
|
||||||
|
run: composer test:unit
|
||||||
|
|||||||
29
.github/workflows/test-suite.yml
vendored
29
.github/workflows/test-suite.yml
vendored
@@ -30,20 +30,20 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- name: Run test with Rust stable
|
- name: Setup test with Rust stable
|
||||||
if: github.event_name != 'schedule'
|
if: github.event_name != 'schedule'
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
- name: Run test with Rust nightly
|
- name: Setup test with Rust nightly
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: nightly
|
toolchain: nightly
|
||||||
override: true
|
override: true
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.4.0
|
uses: Swatinem/rust-cache@v2.7.1
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -65,7 +65,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.4.0
|
uses: Swatinem/rust-cache@v2.7.1
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -78,12 +78,12 @@ jobs:
|
|||||||
args: --locked --release --all
|
args: --locked --release --all
|
||||||
|
|
||||||
test-all-features:
|
test-all-features:
|
||||||
name: Tests all features on cron schedule only
|
name: Tests all features
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||||
image: ubuntu:18.04
|
image: ubuntu:18.04
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
@@ -110,7 +110,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: ubuntu:18.04
|
image: ubuntu:18.04
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
@@ -123,7 +123,10 @@ jobs:
|
|||||||
override: true
|
override: true
|
||||||
- name: Run cargo tree without default features and check lindera is not present
|
- name: Run cargo tree without default features and check lindera is not present
|
||||||
run: |
|
run: |
|
||||||
cargo tree -f '{p} {f}' -e normal --no-default-features | grep lindera -vqz
|
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -vqz lindera; then
|
||||||
|
echo "lindera has been found in the sources and it shouldn't"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
- name: Run cargo tree with default features and check lindera is pressent
|
- name: Run cargo tree with default features and check lindera is pressent
|
||||||
run: |
|
run: |
|
||||||
cargo tree -f '{p} {f}' -e normal | grep lindera -qz
|
cargo tree -f '{p} {f}' -e normal | grep lindera -qz
|
||||||
@@ -146,7 +149,7 @@ jobs:
|
|||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.4.0
|
uses: Swatinem/rust-cache@v2.7.1
|
||||||
- name: Run tests in debug
|
- name: Run tests in debug
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -161,11 +164,11 @@ jobs:
|
|||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: 1.69.0
|
toolchain: 1.71.1
|
||||||
override: true
|
override: true
|
||||||
components: clippy
|
components: clippy
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.4.0
|
uses: Swatinem/rust-cache@v2.7.1
|
||||||
- name: Run cargo clippy
|
- name: Run cargo clippy
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -184,7 +187,7 @@ jobs:
|
|||||||
override: true
|
override: true
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.4.0
|
uses: Swatinem/rust-cache@v2.7.1
|
||||||
- name: Run cargo fmt
|
- name: Run cargo fmt
|
||||||
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
||||||
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
||||||
|
|||||||
741
Cargo.lock
generated
741
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -18,7 +18,7 @@ members = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "1.3.3"
|
version = "1.4.1"
|
||||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||||
description = "Meilisearch HTTP server"
|
description = "Meilisearch HTTP server"
|
||||||
homepage = "https://meilisearch.com"
|
homepage = "https://meilisearch.com"
|
||||||
@@ -28,6 +28,7 @@ license = "MIT"
|
|||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
codegen-units = 1
|
codegen-units = 1
|
||||||
|
debug = true
|
||||||
|
|
||||||
[profile.dev.package.flate2]
|
[profile.dev.package.flate2]
|
||||||
opt-level = 3
|
opt-level = 3
|
||||||
|
|||||||
19
PROFILING.md
Normal file
19
PROFILING.md
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Profiling Meilisearch
|
||||||
|
|
||||||
|
Search engine technologies are complex pieces of software that require thorough profiling tools. We chose to use [Puffin](https://github.com/EmbarkStudios/puffin), which the Rust gaming industry uses extensively. You can export and import the profiling reports using the top bar's _File_ menu options [in Puffin Viewer](https://github.com/embarkstudios/puffin#ui).
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## Profiling the Indexing Process
|
||||||
|
|
||||||
|
When you enable [the `exportPuffinReports` experimental feature](https://www.meilisearch.com/docs/learn/experimental/overview) of Meilisearch, Puffin reports with the `.puffin` extension will be automatically exported to disk. When this option is enabled, the engine will automatically create a "frame" whenever it executes the `IndexScheduler::tick` method.
|
||||||
|
|
||||||
|
[Puffin Viewer](https://github.com/EmbarkStudios/puffin/tree/main/puffin_viewer) is used to analyze the reports. Those reports show areas where Meilisearch spent time during indexing.
|
||||||
|
|
||||||
|
Another piece of advice on the Puffin viewer UI interface is to consider the _Merge children with same ID_ option. It can hide the exact actual timings at which events were sent. Please turn it off when you see strange gaps on the Flamegraph. It can help.
|
||||||
|
|
||||||
|
## Profiling the Search Process
|
||||||
|
|
||||||
|
We still need to take the time to profile the search side of the engine with Puffin. It would require time to profile the filtering phase, query parsing, creation, and execution. We could even profile the Actix HTTP server.
|
||||||
|
|
||||||
|
The only issue we see is the framing system. Puffin requires a global frame-based profiling phase, which collides with Meilisearch's ability to accept and answer multiple requests on different threads simultaneously.
|
||||||
68
README.md
68
README.md
@@ -1,16 +1,20 @@
|
|||||||
<p align="center">
|
<p align="center">
|
||||||
<img src="assets/meilisearch-logo-light.svg?sanitize=true#gh-light-mode-only">
|
<a href="https://www.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=logo#gh-light-mode-only" target="_blank">
|
||||||
<img src="assets/meilisearch-logo-dark.svg?sanitize=true#gh-dark-mode-only">
|
<img src="assets/meilisearch-logo-light.svg?sanitize=true#gh-light-mode-only">
|
||||||
|
</a>
|
||||||
|
<a href="https://www.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=logo#gh-dark-mode-only" target="_blank">
|
||||||
|
<img src="assets/meilisearch-logo-dark.svg?sanitize=true#gh-dark-mode-only">
|
||||||
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<h4 align="center">
|
<h4 align="center">
|
||||||
<a href="https://www.meilisearch.com">Website</a> |
|
<a href="https://www.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Website</a> |
|
||||||
<a href="https://roadmap.meilisearch.com/tabs/1-under-consideration">Roadmap</a> |
|
<a href="https://roadmap.meilisearch.com/tabs/1-under-consideration">Roadmap</a> |
|
||||||
<a href="https://www.meilisearch.com/pricing?utm_campaign=oss&utm_source=engine&utm_medium=meilisearch">Meilisearch Cloud</a> |
|
<a href="https://www.meilisearch.com/pricing?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Meilisearch Cloud</a> |
|
||||||
<a href="https://blog.meilisearch.com">Blog</a> |
|
<a href="https://blog.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Blog</a> |
|
||||||
<a href="https://www.meilisearch.com/docs">Documentation</a> |
|
<a href="https://www.meilisearch.com/docs?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Documentation</a> |
|
||||||
<a href="https://www.meilisearch.com/docs/faq">FAQ</a> |
|
<a href="https://www.meilisearch.com/docs/faq?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">FAQ</a> |
|
||||||
<a href="https://discord.meilisearch.com">Discord</a>
|
<a href="https://discord.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Discord</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
@@ -24,40 +28,40 @@
|
|||||||
Meilisearch helps you shape a delightful search experience in a snap, offering features that work out-of-the-box to speed up your workflow.
|
Meilisearch helps you shape a delightful search experience in a snap, offering features that work out-of-the-box to speed up your workflow.
|
||||||
|
|
||||||
<p align="center" name="demo">
|
<p align="center" name="demo">
|
||||||
<a href="https://where2watch.meilisearch.com/#gh-light-mode-only" target="_blank">
|
<a href="https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demo-gif#gh-light-mode-only" target="_blank">
|
||||||
<img src="assets/demo-light.gif#gh-light-mode-only" alt="A bright colored application for finding movies screening near the user">
|
<img src="assets/demo-light.gif#gh-light-mode-only" alt="A bright colored application for finding movies screening near the user">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://where2watch.meilisearch.com/#gh-dark-mode-only" target="_blank">
|
<a href="https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demo-gif#gh-dark-mode-only" target="_blank">
|
||||||
<img src="assets/demo-dark.gif#gh-dark-mode-only" alt="A dark colored application for finding movies screening near the user">
|
<img src="assets/demo-dark.gif#gh-dark-mode-only" alt="A dark colored application for finding movies screening near the user">
|
||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
🔥 [**Try it!**](https://where2watch.meilisearch.com/) 🔥
|
🔥 [**Try it!**](https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demo-link) 🔥
|
||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|
||||||
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
||||||
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy#typo-tolerance):** get relevant matches even when queries contain typos and misspellings
|
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features#typo-tolerance):** get relevant matches even when queries contain typos and misspellings
|
||||||
- **[Filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering) and [faceted search](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code
|
- **[Filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) and [faceted search](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code
|
||||||
- **[Sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting):** sort results based on price, date, or pretty much anything else your users need
|
- **[Sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** sort results based on price, date, or pretty much anything else your users need
|
||||||
- **[Synonym support](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy#synonyms):** configure synonyms to include more relevant content in your search results
|
- **[Synonym support](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features#synonyms):** configure synonyms to include more relevant content in your search results
|
||||||
- **[Geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch):** filter and sort documents based on geographic data
|
- **[Geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** filter and sort documents based on geographic data
|
||||||
- **[Extensive language support](https://www.meilisearch.com/docs/learn/what_is_meilisearch/language):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
- **[Extensive language support](https://www.meilisearch.com/docs/learn/what_is_meilisearch/language?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
||||||
- **[Security management](https://www.meilisearch.com/docs/learn/security/master_api_keys):** control which users can access what data with API keys that allow fine-grained permissions handling
|
- **[Security management](https://www.meilisearch.com/docs/learn/security/master_api_keys?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** control which users can access what data with API keys that allow fine-grained permissions handling
|
||||||
- **[Multi-Tenancy](https://www.meilisearch.com/docs/learn/security/tenant_tokens):** personalize search results for any number of application tenants
|
- **[Multi-Tenancy](https://www.meilisearch.com/docs/learn/security/tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** personalize search results for any number of application tenants
|
||||||
- **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets
|
- **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets
|
||||||
- **[RESTful API](https://www.meilisearch.com/docs/reference/api/overview):** integrate Meilisearch in your technical stack with our plugins and SDKs
|
- **[RESTful API](https://www.meilisearch.com/docs/reference/api/overview?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** integrate Meilisearch in your technical stack with our plugins and SDKs
|
||||||
- **Easy to install, deploy, and maintain**
|
- **Easy to install, deploy, and maintain**
|
||||||
|
|
||||||
## 📖 Documentation
|
## 📖 Documentation
|
||||||
|
|
||||||
You can consult Meilisearch's documentation at [https://www.meilisearch.com/docs](https://www.meilisearch.com/docs/).
|
You can consult Meilisearch's documentation at [https://www.meilisearch.com/docs](https://www.meilisearch.com/docs/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=docs).
|
||||||
|
|
||||||
## 🚀 Getting started
|
## 🚀 Getting started
|
||||||
|
|
||||||
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://www.meilisearch.com/docs/learn/getting_started/quick_start) guide.
|
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://www.meilisearch.com/docs/learn/getting_started/quick_start?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=get-started) guide.
|
||||||
|
|
||||||
You may also want to check out [Meilisearch 101](https://www.meilisearch.com/docs/learn/getting_started/filtering_and_sorting) for an introduction to some of Meilisearch's most popular features.
|
You may also want to check out [Meilisearch 101](https://www.meilisearch.com/docs/learn/getting_started/filtering_and_sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=get-started) for an introduction to some of Meilisearch's most popular features.
|
||||||
|
|
||||||
## ⚡ Supercharge your Meilisearch experience
|
## ⚡ Supercharge your Meilisearch experience
|
||||||
|
|
||||||
@@ -67,29 +71,29 @@ Say goodbye to server deployment and manual updates with [Meilisearch Cloud](htt
|
|||||||
|
|
||||||
Install one of our SDKs in your project for seamless integration between Meilisearch and your favorite language or framework!
|
Install one of our SDKs in your project for seamless integration between Meilisearch and your favorite language or framework!
|
||||||
|
|
||||||
Take a look at the complete [Meilisearch integration list](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks).
|
Take a look at the complete [Meilisearch integration list](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=sdks-link).
|
||||||
|
|
||||||
[](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks)
|
[](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=sdks-logos)
|
||||||
|
|
||||||
## ⚙️ Advanced usage
|
## ⚙️ Advanced usage
|
||||||
|
|
||||||
Experienced users will want to keep our [API Reference](https://www.meilisearch.com/docs/reference/api/overview) close at hand.
|
Experienced users will want to keep our [API Reference](https://www.meilisearch.com/docs/reference/api/overview?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced) close at hand.
|
||||||
|
|
||||||
We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering), [sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting), [geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch), [API keys](https://www.meilisearch.com/docs/learn/security/master_api_keys), and [tenant tokens](https://www.meilisearch.com/docs/learn/security/tenant_tokens).
|
We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced), [sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced), [geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced), [API keys](https://www.meilisearch.com/docs/learn/security/master_api_keys?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced), and [tenant tokens](https://www.meilisearch.com/docs/learn/security/tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced).
|
||||||
|
|
||||||
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://www.meilisearch.com/docs/learn/core_concepts/documents) and [indexes](https://www.meilisearch.com/docs/learn/core_concepts/indexes).
|
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://www.meilisearch.com/docs/learn/core_concepts/documents?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced) and [indexes](https://www.meilisearch.com/docs/learn/core_concepts/indexes?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced).
|
||||||
|
|
||||||
## 📊 Telemetry
|
## 📊 Telemetry
|
||||||
|
|
||||||
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry#how-to-disable-data-collection) whenever you want.
|
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) whenever you want.
|
||||||
|
|
||||||
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
||||||
|
|
||||||
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry) of our documentation.
|
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) of our documentation.
|
||||||
|
|
||||||
## 📫 Get in touch!
|
## 📫 Get in touch!
|
||||||
|
|
||||||
Meilisearch is a search engine created by [Meili](https://www.welcometothejungle.com/en/companies/meilisearch), a software development company based in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/)
|
Meilisearch is a search engine created by [Meili](https://www.welcometothejungle.com/en/companies/meilisearch), a software development company based in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=contact)
|
||||||
|
|
||||||
🗞 [Subscribe to our newsletter](https://meilisearch.us2.list-manage.com/subscribe?u=27870f7b71c908a8b359599fb&id=79582d828e) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
|
🗞 [Subscribe to our newsletter](https://meilisearch.us2.list-manage.com/subscribe?u=27870f7b71c908a8b359599fb&id=79582d828e) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
|
||||||
|
|
||||||
|
|||||||
BIN
assets/profiling-example.png
Normal file
BIN
assets/profiling-example.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.2 MiB |
@@ -14,7 +14,7 @@ license.workspace = true
|
|||||||
anyhow = "1.0.70"
|
anyhow = "1.0.70"
|
||||||
csv = "1.2.1"
|
csv = "1.2.1"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
mimalloc = { version = "0.1.36", default-features = false }
|
mimalloc = { version = "0.1.37", default-features = false }
|
||||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|||||||
@@ -262,6 +262,9 @@ pub(crate) mod test {
|
|||||||
sortable_attributes: Setting::Set(btreeset! { S("age") }),
|
sortable_attributes: Setting::Set(btreeset! { S("age") }),
|
||||||
ranking_rules: Setting::NotSet,
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
|
non_separator_tokens: Setting::NotSet,
|
||||||
|
separator_tokens: Setting::NotSet,
|
||||||
|
dictionary: Setting::NotSet,
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet,
|
distinct_attribute: Setting::NotSet,
|
||||||
typo_tolerance: Setting::NotSet,
|
typo_tolerance: Setting::NotSet,
|
||||||
|
|||||||
@@ -340,6 +340,9 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
stop_words: settings.stop_words.into(),
|
stop_words: settings.stop_words.into(),
|
||||||
|
non_separator_tokens: v6::Setting::NotSet,
|
||||||
|
separator_tokens: v6::Setting::NotSet,
|
||||||
|
dictionary: v6::Setting::NotSet,
|
||||||
synonyms: settings.synonyms.into(),
|
synonyms: settings.synonyms.into(),
|
||||||
distinct_attribute: settings.distinct_attribute.into(),
|
distinct_attribute: settings.distinct_attribute.into(),
|
||||||
typo_tolerance: match settings.typo_tolerance {
|
typo_tolerance: match settings.typo_tolerance {
|
||||||
|
|||||||
@@ -526,12 +526,12 @@ pub(crate) mod test {
|
|||||||
assert!(indexes.is_empty());
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
// products
|
// products
|
||||||
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
insta::assert_json_snapshot!(products.metadata(), @r###"
|
||||||
{
|
{
|
||||||
"uid": "products",
|
"uid": "products",
|
||||||
"primaryKey": "sku",
|
"primaryKey": "sku",
|
||||||
"createdAt": "[now]",
|
"createdAt": "2022-10-09T20:27:22.688964637Z",
|
||||||
"updatedAt": "[now]"
|
"updatedAt": "2022-10-09T20:27:23.951017769Z"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -541,12 +541,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
// movies
|
// movies
|
||||||
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
||||||
{
|
{
|
||||||
"uid": "movies",
|
"uid": "movies",
|
||||||
"primaryKey": "id",
|
"primaryKey": "id",
|
||||||
"createdAt": "[now]",
|
"createdAt": "2022-10-09T20:27:22.197788495Z",
|
||||||
"updatedAt": "[now]"
|
"updatedAt": "2022-10-09T20:28:01.93111053Z"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -571,12 +571,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
|
|
||||||
// spells
|
// spells
|
||||||
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
||||||
{
|
{
|
||||||
"uid": "dnd_spells",
|
"uid": "dnd_spells",
|
||||||
"primaryKey": "index",
|
"primaryKey": "index",
|
||||||
"createdAt": "[now]",
|
"createdAt": "2022-10-09T20:27:24.242683494Z",
|
||||||
"updatedAt": "[now]"
|
"updatedAt": "2022-10-09T20:27:24.312809641Z"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -617,12 +617,12 @@ pub(crate) mod test {
|
|||||||
assert!(indexes.is_empty());
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
// products
|
// products
|
||||||
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
insta::assert_json_snapshot!(products.metadata(), @r###"
|
||||||
{
|
{
|
||||||
"uid": "products",
|
"uid": "products",
|
||||||
"primaryKey": "sku",
|
"primaryKey": "sku",
|
||||||
"createdAt": "[now]",
|
"createdAt": "2023-01-30T16:25:56.595257Z",
|
||||||
"updatedAt": "[now]"
|
"updatedAt": "2023-01-30T16:25:58.70348Z"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -632,12 +632,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
// movies
|
// movies
|
||||||
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
||||||
{
|
{
|
||||||
"uid": "movies",
|
"uid": "movies",
|
||||||
"primaryKey": "id",
|
"primaryKey": "id",
|
||||||
"createdAt": "[now]",
|
"createdAt": "2023-01-30T16:25:56.192178Z",
|
||||||
"updatedAt": "[now]"
|
"updatedAt": "2023-01-30T16:25:56.455714Z"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -647,12 +647,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
||||||
|
|
||||||
// spells
|
// spells
|
||||||
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
||||||
{
|
{
|
||||||
"uid": "dnd_spells",
|
"uid": "dnd_spells",
|
||||||
"primaryKey": "index",
|
"primaryKey": "index",
|
||||||
"createdAt": "[now]",
|
"createdAt": "2023-01-30T16:25:58.876405Z",
|
||||||
"updatedAt": "[now]"
|
"updatedAt": "2023-01-30T16:25:59.079906Z"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
|||||||
@@ -46,6 +46,7 @@ pub type Checked = settings::Checked;
|
|||||||
pub type Unchecked = settings::Unchecked;
|
pub type Unchecked = settings::Unchecked;
|
||||||
|
|
||||||
pub type Task = updates::UpdateEntry;
|
pub type Task = updates::UpdateEntry;
|
||||||
|
pub type Kind = updates::UpdateMeta;
|
||||||
|
|
||||||
// everything related to the errors
|
// everything related to the errors
|
||||||
pub type ResponseError = errors::ResponseError;
|
pub type ResponseError = errors::ResponseError;
|
||||||
@@ -107,8 +108,11 @@ impl V2Reader {
|
|||||||
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<V2IndexReader>> + '_> {
|
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<V2IndexReader>> + '_> {
|
||||||
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
||||||
V2IndexReader::new(
|
V2IndexReader::new(
|
||||||
index.uid.clone(),
|
|
||||||
&self.dump.path().join("indexes").join(format!("index-{}", index.uuid)),
|
&self.dump.path().join("indexes").join(format!("index-{}", index.uuid)),
|
||||||
|
index,
|
||||||
|
BufReader::new(
|
||||||
|
File::open(self.dump.path().join("updates").join("data.jsonl")).unwrap(),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@@ -143,16 +147,41 @@ pub struct V2IndexReader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl V2IndexReader {
|
impl V2IndexReader {
|
||||||
pub fn new(name: String, path: &Path) -> Result<Self> {
|
pub fn new(path: &Path, index_uuid: &IndexUuid, tasks: BufReader<File>) -> Result<Self> {
|
||||||
let meta = File::open(path.join("meta.json"))?;
|
let meta = File::open(path.join("meta.json"))?;
|
||||||
let meta: DumpMeta = serde_json::from_reader(meta)?;
|
let meta: DumpMeta = serde_json::from_reader(meta)?;
|
||||||
|
|
||||||
|
let mut created_at = None;
|
||||||
|
let mut updated_at = None;
|
||||||
|
|
||||||
|
for line in tasks.lines() {
|
||||||
|
let task: Task = serde_json::from_str(&line?)?;
|
||||||
|
if !(task.uuid == index_uuid.uuid && task.is_finished()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let new_created_at = match task.update.meta() {
|
||||||
|
Kind::DocumentsAddition { .. } | Kind::Settings(_) => task.update.finished_at(),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
let new_updated_at = task.update.finished_at();
|
||||||
|
|
||||||
|
if created_at.is_none() || created_at > new_created_at {
|
||||||
|
created_at = new_created_at;
|
||||||
|
}
|
||||||
|
|
||||||
|
if updated_at.is_none() || updated_at < new_updated_at {
|
||||||
|
updated_at = new_updated_at;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let current_time = OffsetDateTime::now_utc();
|
||||||
|
|
||||||
let metadata = IndexMetadata {
|
let metadata = IndexMetadata {
|
||||||
uid: name,
|
uid: index_uuid.uid.clone(),
|
||||||
primary_key: meta.primary_key,
|
primary_key: meta.primary_key,
|
||||||
// FIXME: Iterate over the whole task queue to find the creation and last update date.
|
created_at: created_at.unwrap_or(current_time),
|
||||||
created_at: OffsetDateTime::now_utc(),
|
updated_at: updated_at.unwrap_or(current_time),
|
||||||
updated_at: OffsetDateTime::now_utc(),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let ret = V2IndexReader {
|
let ret = V2IndexReader {
|
||||||
@@ -248,12 +277,12 @@ pub(crate) mod test {
|
|||||||
assert!(indexes.is_empty());
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
// products
|
// products
|
||||||
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
insta::assert_json_snapshot!(products.metadata(), @r###"
|
||||||
{
|
{
|
||||||
"uid": "products",
|
"uid": "products",
|
||||||
"primaryKey": "sku",
|
"primaryKey": "sku",
|
||||||
"createdAt": "[now]",
|
"createdAt": "2022-10-09T20:27:22.688964637Z",
|
||||||
"updatedAt": "[now]"
|
"updatedAt": "2022-10-09T20:27:23.951017769Z"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -263,12 +292,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
// movies
|
// movies
|
||||||
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
||||||
{
|
{
|
||||||
"uid": "movies",
|
"uid": "movies",
|
||||||
"primaryKey": "id",
|
"primaryKey": "id",
|
||||||
"createdAt": "[now]",
|
"createdAt": "2022-10-09T20:27:22.197788495Z",
|
||||||
"updatedAt": "[now]"
|
"updatedAt": "2022-10-09T20:28:01.93111053Z"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -293,12 +322,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
|
|
||||||
// spells
|
// spells
|
||||||
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
||||||
{
|
{
|
||||||
"uid": "dnd_spells",
|
"uid": "dnd_spells",
|
||||||
"primaryKey": "index",
|
"primaryKey": "index",
|
||||||
"createdAt": "[now]",
|
"createdAt": "2022-10-09T20:27:24.242683494Z",
|
||||||
"updatedAt": "[now]"
|
"updatedAt": "2022-10-09T20:27:24.312809641Z"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -340,12 +369,12 @@ pub(crate) mod test {
|
|||||||
assert!(indexes.is_empty());
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
// products
|
// products
|
||||||
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
insta::assert_json_snapshot!(products.metadata(), @r###"
|
||||||
{
|
{
|
||||||
"uid": "products",
|
"uid": "products",
|
||||||
"primaryKey": "sku",
|
"primaryKey": "sku",
|
||||||
"createdAt": "[now]",
|
"createdAt": "2023-01-30T16:25:56.595257Z",
|
||||||
"updatedAt": "[now]"
|
"updatedAt": "2023-01-30T16:25:58.70348Z"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -355,12 +384,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
// movies
|
// movies
|
||||||
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
||||||
{
|
{
|
||||||
"uid": "movies",
|
"uid": "movies",
|
||||||
"primaryKey": "id",
|
"primaryKey": "id",
|
||||||
"createdAt": "[now]",
|
"createdAt": "2023-01-30T16:25:56.192178Z",
|
||||||
"updatedAt": "[now]"
|
"updatedAt": "2023-01-30T16:25:56.455714Z"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -370,12 +399,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
||||||
|
|
||||||
// spells
|
// spells
|
||||||
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
||||||
{
|
{
|
||||||
"uid": "dnd_spells",
|
"uid": "dnd_spells",
|
||||||
"primaryKey": "index",
|
"primaryKey": "index",
|
||||||
"createdAt": "[now]",
|
"createdAt": "2023-01-30T16:25:58.876405Z",
|
||||||
"updatedAt": "[now]"
|
"updatedAt": "2023-01-30T16:25:59.079906Z"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
|||||||
@@ -227,4 +227,14 @@ impl UpdateStatus {
|
|||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn finished_at(&self) -> Option<OffsetDateTime> {
|
||||||
|
match self {
|
||||||
|
UpdateStatus::Processing(_) => None,
|
||||||
|
UpdateStatus::Enqueued(_) => None,
|
||||||
|
UpdateStatus::Processed(u) => Some(u.processed_at),
|
||||||
|
UpdateStatus::Aborted(_) => None,
|
||||||
|
UpdateStatus::Failed(u) => Some(u.failed_at),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ license.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
arbitrary = { version = "1.3.0", features = ["derive"] }
|
arbitrary = { version = "1.3.0", features = ["derive"] }
|
||||||
clap = { version = "4.3.0", features = ["derive"] }
|
clap = { version = "4.3.0", features = ["derive"] }
|
||||||
fastrand = "1.9.0"
|
fastrand = "2.0.0"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
serde = { version = "1.0.160", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ license.workspace = true
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.70"
|
anyhow = "1.0.70"
|
||||||
|
backtrace = "0.3.69"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
csv = "1.2.1"
|
csv = "1.2.1"
|
||||||
derive_builder = "0.12.0"
|
derive_builder = "0.12.0"
|
||||||
@@ -22,6 +23,7 @@ log = "0.4.17"
|
|||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
page_size = "0.5.0"
|
page_size = "0.5.0"
|
||||||
|
puffin = "0.16.0"
|
||||||
roaring = { version = "0.10.1", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.160", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ one indexing operation.
|
|||||||
|
|
||||||
use std::collections::{BTreeSet, HashSet};
|
use std::collections::{BTreeSet, HashSet};
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
|
use std::fmt;
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::io::BufWriter;
|
use std::io::BufWriter;
|
||||||
|
|
||||||
@@ -67,10 +68,6 @@ pub(crate) enum Batch {
|
|||||||
op: IndexOperation,
|
op: IndexOperation,
|
||||||
must_create_index: bool,
|
must_create_index: bool,
|
||||||
},
|
},
|
||||||
IndexDocumentDeletionByFilter {
|
|
||||||
index_uid: String,
|
|
||||||
task: Task,
|
|
||||||
},
|
|
||||||
IndexCreation {
|
IndexCreation {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
@@ -114,6 +111,10 @@ pub(crate) enum IndexOperation {
|
|||||||
documents: Vec<Vec<String>>,
|
documents: Vec<Vec<String>>,
|
||||||
tasks: Vec<Task>,
|
tasks: Vec<Task>,
|
||||||
},
|
},
|
||||||
|
IndexDocumentDeletionByFilter {
|
||||||
|
index_uid: String,
|
||||||
|
task: Task,
|
||||||
|
},
|
||||||
DocumentClear {
|
DocumentClear {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
tasks: Vec<Task>,
|
tasks: Vec<Task>,
|
||||||
@@ -155,7 +156,6 @@ impl Batch {
|
|||||||
| Batch::TaskDeletion(task)
|
| Batch::TaskDeletion(task)
|
||||||
| Batch::Dump(task)
|
| Batch::Dump(task)
|
||||||
| Batch::IndexCreation { task, .. }
|
| Batch::IndexCreation { task, .. }
|
||||||
| Batch::IndexDocumentDeletionByFilter { task, .. }
|
|
||||||
| Batch::IndexUpdate { task, .. } => vec![task.uid],
|
| Batch::IndexUpdate { task, .. } => vec![task.uid],
|
||||||
Batch::SnapshotCreation(tasks) | Batch::IndexDeletion { tasks, .. } => {
|
Batch::SnapshotCreation(tasks) | Batch::IndexDeletion { tasks, .. } => {
|
||||||
tasks.iter().map(|task| task.uid).collect()
|
tasks.iter().map(|task| task.uid).collect()
|
||||||
@@ -167,6 +167,7 @@ impl Batch {
|
|||||||
| IndexOperation::DocumentClear { tasks, .. } => {
|
| IndexOperation::DocumentClear { tasks, .. } => {
|
||||||
tasks.iter().map(|task| task.uid).collect()
|
tasks.iter().map(|task| task.uid).collect()
|
||||||
}
|
}
|
||||||
|
IndexOperation::IndexDocumentDeletionByFilter { task, .. } => vec![task.uid],
|
||||||
IndexOperation::SettingsAndDocumentOperation {
|
IndexOperation::SettingsAndDocumentOperation {
|
||||||
document_import_tasks: tasks,
|
document_import_tasks: tasks,
|
||||||
settings_tasks: other,
|
settings_tasks: other,
|
||||||
@@ -194,8 +195,30 @@ impl Batch {
|
|||||||
IndexOperation { op, .. } => Some(op.index_uid()),
|
IndexOperation { op, .. } => Some(op.index_uid()),
|
||||||
IndexCreation { index_uid, .. }
|
IndexCreation { index_uid, .. }
|
||||||
| IndexUpdate { index_uid, .. }
|
| IndexUpdate { index_uid, .. }
|
||||||
| IndexDeletion { index_uid, .. }
|
| IndexDeletion { index_uid, .. } => Some(index_uid),
|
||||||
| IndexDocumentDeletionByFilter { index_uid, .. } => Some(index_uid),
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Batch {
|
||||||
|
/// A text used when we debug the profiling reports.
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
let index_uid = self.index_uid();
|
||||||
|
let tasks = self.ids();
|
||||||
|
match self {
|
||||||
|
Batch::TaskCancelation { .. } => f.write_str("TaskCancelation")?,
|
||||||
|
Batch::TaskDeletion(_) => f.write_str("TaskDeletion")?,
|
||||||
|
Batch::SnapshotCreation(_) => f.write_str("SnapshotCreation")?,
|
||||||
|
Batch::Dump(_) => f.write_str("Dump")?,
|
||||||
|
Batch::IndexOperation { op, .. } => write!(f, "{op}")?,
|
||||||
|
Batch::IndexCreation { .. } => f.write_str("IndexCreation")?,
|
||||||
|
Batch::IndexUpdate { .. } => f.write_str("IndexUpdate")?,
|
||||||
|
Batch::IndexDeletion { .. } => f.write_str("IndexDeletion")?,
|
||||||
|
Batch::IndexSwap { .. } => f.write_str("IndexSwap")?,
|
||||||
|
};
|
||||||
|
match index_uid {
|
||||||
|
Some(name) => f.write_fmt(format_args!(" on {name:?} from tasks: {tasks:?}")),
|
||||||
|
None => f.write_fmt(format_args!(" from tasks: {tasks:?}")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -205,6 +228,7 @@ impl IndexOperation {
|
|||||||
match self {
|
match self {
|
||||||
IndexOperation::DocumentOperation { index_uid, .. }
|
IndexOperation::DocumentOperation { index_uid, .. }
|
||||||
| IndexOperation::DocumentDeletion { index_uid, .. }
|
| IndexOperation::DocumentDeletion { index_uid, .. }
|
||||||
|
| IndexOperation::IndexDocumentDeletionByFilter { index_uid, .. }
|
||||||
| IndexOperation::DocumentClear { index_uid, .. }
|
| IndexOperation::DocumentClear { index_uid, .. }
|
||||||
| IndexOperation::Settings { index_uid, .. }
|
| IndexOperation::Settings { index_uid, .. }
|
||||||
| IndexOperation::DocumentClearAndSetting { index_uid, .. }
|
| IndexOperation::DocumentClearAndSetting { index_uid, .. }
|
||||||
@@ -213,6 +237,30 @@ impl IndexOperation {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for IndexOperation {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
IndexOperation::DocumentOperation { .. } => {
|
||||||
|
f.write_str("IndexOperation::DocumentOperation")
|
||||||
|
}
|
||||||
|
IndexOperation::DocumentDeletion { .. } => {
|
||||||
|
f.write_str("IndexOperation::DocumentDeletion")
|
||||||
|
}
|
||||||
|
IndexOperation::IndexDocumentDeletionByFilter { .. } => {
|
||||||
|
f.write_str("IndexOperation::IndexDocumentDeletionByFilter")
|
||||||
|
}
|
||||||
|
IndexOperation::DocumentClear { .. } => f.write_str("IndexOperation::DocumentClear"),
|
||||||
|
IndexOperation::Settings { .. } => f.write_str("IndexOperation::Settings"),
|
||||||
|
IndexOperation::DocumentClearAndSetting { .. } => {
|
||||||
|
f.write_str("IndexOperation::DocumentClearAndSetting")
|
||||||
|
}
|
||||||
|
IndexOperation::SettingsAndDocumentOperation { .. } => {
|
||||||
|
f.write_str("IndexOperation::SettingsAndDocumentOperation")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl IndexScheduler {
|
impl IndexScheduler {
|
||||||
/// Convert an [`BatchKind`](crate::autobatcher::BatchKind) into a [`Batch`].
|
/// Convert an [`BatchKind`](crate::autobatcher::BatchKind) into a [`Batch`].
|
||||||
///
|
///
|
||||||
@@ -239,9 +287,12 @@ impl IndexScheduler {
|
|||||||
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
match &task.kind {
|
match &task.kind {
|
||||||
KindWithContent::DocumentDeletionByFilter { index_uid, .. } => {
|
KindWithContent::DocumentDeletionByFilter { index_uid, .. } => {
|
||||||
Ok(Some(Batch::IndexDocumentDeletionByFilter {
|
Ok(Some(Batch::IndexOperation {
|
||||||
index_uid: index_uid.clone(),
|
op: IndexOperation::IndexDocumentDeletionByFilter {
|
||||||
task,
|
index_uid: index_uid.clone(),
|
||||||
|
task,
|
||||||
|
},
|
||||||
|
must_create_index: false,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
@@ -471,6 +522,8 @@ impl IndexScheduler {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
self.maybe_fail(crate::tests::FailureLocation::InsideCreateBatch)?;
|
self.maybe_fail(crate::tests::FailureLocation::InsideCreateBatch)?;
|
||||||
|
|
||||||
|
puffin::profile_function!();
|
||||||
|
|
||||||
let enqueued = &self.get_status(rtxn, Status::Enqueued)?;
|
let enqueued = &self.get_status(rtxn, Status::Enqueued)?;
|
||||||
let to_cancel = self.get_kind(rtxn, Kind::TaskCancelation)? & enqueued;
|
let to_cancel = self.get_kind(rtxn, Kind::TaskCancelation)? & enqueued;
|
||||||
|
|
||||||
@@ -575,6 +628,9 @@ impl IndexScheduler {
|
|||||||
self.maybe_fail(crate::tests::FailureLocation::PanicInsideProcessBatch)?;
|
self.maybe_fail(crate::tests::FailureLocation::PanicInsideProcessBatch)?;
|
||||||
self.breakpoint(crate::Breakpoint::InsideProcessBatch);
|
self.breakpoint(crate::Breakpoint::InsideProcessBatch);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
puffin::profile_function!(batch.to_string());
|
||||||
|
|
||||||
match batch {
|
match batch {
|
||||||
Batch::TaskCancelation { mut task, previous_started_at, previous_processing_tasks } => {
|
Batch::TaskCancelation { mut task, previous_started_at, previous_processing_tasks } => {
|
||||||
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
||||||
@@ -769,6 +825,10 @@ impl IndexScheduler {
|
|||||||
// 2. dump the tasks
|
// 2. dump the tasks
|
||||||
let mut dump_tasks = dump.create_tasks_queue()?;
|
let mut dump_tasks = dump.create_tasks_queue()?;
|
||||||
for ret in self.all_tasks.iter(&rtxn)? {
|
for ret in self.all_tasks.iter(&rtxn)? {
|
||||||
|
if self.must_stop_processing.get() {
|
||||||
|
return Err(Error::AbortedTask);
|
||||||
|
}
|
||||||
|
|
||||||
let (_, mut t) = ret?;
|
let (_, mut t) = ret?;
|
||||||
let status = t.status;
|
let status = t.status;
|
||||||
let content_file = t.content_uuid();
|
let content_file = t.content_uuid();
|
||||||
@@ -789,6 +849,9 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
// 2.1. Dump the `content_file` associated with the task if there is one and the task is not finished yet.
|
// 2.1. Dump the `content_file` associated with the task if there is one and the task is not finished yet.
|
||||||
if let Some(content_file) = content_file {
|
if let Some(content_file) = content_file {
|
||||||
|
if self.must_stop_processing.get() {
|
||||||
|
return Err(Error::AbortedTask);
|
||||||
|
}
|
||||||
if status == Status::Enqueued {
|
if status == Status::Enqueued {
|
||||||
let content_file = self.file_store.get_update(content_file)?;
|
let content_file = self.file_store.get_update(content_file)?;
|
||||||
|
|
||||||
@@ -828,6 +891,9 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
// 3.1. Dump the documents
|
// 3.1. Dump the documents
|
||||||
for ret in index.all_documents(&rtxn)? {
|
for ret in index.all_documents(&rtxn)? {
|
||||||
|
if self.must_stop_processing.get() {
|
||||||
|
return Err(Error::AbortedTask);
|
||||||
|
}
|
||||||
let (_id, doc) = ret?;
|
let (_id, doc) = ret?;
|
||||||
let document = milli::obkv_to_json(&all_fields, &fields_ids_map, doc)?;
|
let document = milli::obkv_to_json(&all_fields, &fields_ids_map, doc)?;
|
||||||
index_dumper.push_document(&document)?;
|
index_dumper.push_document(&document)?;
|
||||||
@@ -840,13 +906,16 @@ impl IndexScheduler {
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
// 4. Dump experimental feature settings
|
// 4. Dump experimental feature settings
|
||||||
let features = self.features()?.runtime_features();
|
let features = self.features().runtime_features();
|
||||||
dump.create_experimental_features(features)?;
|
dump.create_experimental_features(features)?;
|
||||||
|
|
||||||
let dump_uid = started_at.format(format_description!(
|
let dump_uid = started_at.format(format_description!(
|
||||||
"[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
|
"[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
|
||||||
)).unwrap();
|
)).unwrap();
|
||||||
|
|
||||||
|
if self.must_stop_processing.get() {
|
||||||
|
return Err(Error::AbortedTask);
|
||||||
|
}
|
||||||
let path = self.dumps_path.join(format!("{}.dump", dump_uid));
|
let path = self.dumps_path.join(format!("{}.dump", dump_uid));
|
||||||
let file = File::create(path)?;
|
let file = File::create(path)?;
|
||||||
dump.persist_to(BufWriter::new(file))?;
|
dump.persist_to(BufWriter::new(file))?;
|
||||||
@@ -891,51 +960,6 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
Batch::IndexDocumentDeletionByFilter { mut task, index_uid: _ } => {
|
|
||||||
let (index_uid, filter) =
|
|
||||||
if let KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr } =
|
|
||||||
&task.kind
|
|
||||||
{
|
|
||||||
(index_uid, filter_expr)
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
|
||||||
};
|
|
||||||
let index = {
|
|
||||||
let rtxn = self.env.read_txn()?;
|
|
||||||
self.index_mapper.index(&rtxn, index_uid)?
|
|
||||||
};
|
|
||||||
let deleted_documents = delete_document_by_filter(filter, index);
|
|
||||||
let original_filter = if let Some(Details::DocumentDeletionByFilter {
|
|
||||||
original_filter,
|
|
||||||
deleted_documents: _,
|
|
||||||
}) = task.details
|
|
||||||
{
|
|
||||||
original_filter
|
|
||||||
} else {
|
|
||||||
// In the case of a `documentDeleteByFilter` the details MUST be set
|
|
||||||
unreachable!();
|
|
||||||
};
|
|
||||||
|
|
||||||
match deleted_documents {
|
|
||||||
Ok(deleted_documents) => {
|
|
||||||
task.status = Status::Succeeded;
|
|
||||||
task.details = Some(Details::DocumentDeletionByFilter {
|
|
||||||
original_filter,
|
|
||||||
deleted_documents: Some(deleted_documents),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
task.status = Status::Failed;
|
|
||||||
task.details = Some(Details::DocumentDeletionByFilter {
|
|
||||||
original_filter,
|
|
||||||
deleted_documents: Some(0),
|
|
||||||
});
|
|
||||||
task.error = Some(e.into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(vec![task])
|
|
||||||
}
|
|
||||||
Batch::IndexCreation { index_uid, primary_key, task } => {
|
Batch::IndexCreation { index_uid, primary_key, task } => {
|
||||||
let wtxn = self.env.write_txn()?;
|
let wtxn = self.env.write_txn()?;
|
||||||
if self.index_mapper.exists(&wtxn, &index_uid)? {
|
if self.index_mapper.exists(&wtxn, &index_uid)? {
|
||||||
@@ -1111,6 +1135,8 @@ impl IndexScheduler {
|
|||||||
index: &'i Index,
|
index: &'i Index,
|
||||||
operation: IndexOperation,
|
operation: IndexOperation,
|
||||||
) -> Result<Vec<Task>> {
|
) -> Result<Vec<Task>> {
|
||||||
|
puffin::profile_function!();
|
||||||
|
|
||||||
match operation {
|
match operation {
|
||||||
IndexOperation::DocumentClear { mut tasks, .. } => {
|
IndexOperation::DocumentClear { mut tasks, .. } => {
|
||||||
let count = milli::update::ClearDocuments::new(index_wtxn, index).execute()?;
|
let count = milli::update::ClearDocuments::new(index_wtxn, index).execute()?;
|
||||||
@@ -1292,6 +1318,47 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
|
IndexOperation::IndexDocumentDeletionByFilter { mut task, index_uid: _ } => {
|
||||||
|
let filter =
|
||||||
|
if let KindWithContent::DocumentDeletionByFilter { filter_expr, .. } =
|
||||||
|
&task.kind
|
||||||
|
{
|
||||||
|
filter_expr
|
||||||
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
let deleted_documents = delete_document_by_filter(index_wtxn, filter, index);
|
||||||
|
let original_filter = if let Some(Details::DocumentDeletionByFilter {
|
||||||
|
original_filter,
|
||||||
|
deleted_documents: _,
|
||||||
|
}) = task.details
|
||||||
|
{
|
||||||
|
original_filter
|
||||||
|
} else {
|
||||||
|
// In the case of a `documentDeleteByFilter` the details MUST be set
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
|
||||||
|
match deleted_documents {
|
||||||
|
Ok(deleted_documents) => {
|
||||||
|
task.status = Status::Succeeded;
|
||||||
|
task.details = Some(Details::DocumentDeletionByFilter {
|
||||||
|
original_filter,
|
||||||
|
deleted_documents: Some(deleted_documents),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
task.status = Status::Failed;
|
||||||
|
task.details = Some(Details::DocumentDeletionByFilter {
|
||||||
|
original_filter,
|
||||||
|
deleted_documents: Some(0),
|
||||||
|
});
|
||||||
|
task.error = Some(e.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(vec![task])
|
||||||
|
}
|
||||||
IndexOperation::Settings { index_uid: _, settings, mut tasks } => {
|
IndexOperation::Settings { index_uid: _, settings, mut tasks } => {
|
||||||
let indexer_config = self.index_mapper.indexer_config();
|
let indexer_config = self.index_mapper.indexer_config();
|
||||||
let mut builder = milli::update::Settings::new(index_wtxn, index, indexer_config);
|
let mut builder = milli::update::Settings::new(index_wtxn, index, indexer_config);
|
||||||
@@ -1491,23 +1558,22 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete_document_by_filter(filter: &serde_json::Value, index: Index) -> Result<u64> {
|
fn delete_document_by_filter<'a>(
|
||||||
|
wtxn: &mut RwTxn<'a, '_>,
|
||||||
|
filter: &serde_json::Value,
|
||||||
|
index: &'a Index,
|
||||||
|
) -> Result<u64> {
|
||||||
let filter = Filter::from_json(filter)?;
|
let filter = Filter::from_json(filter)?;
|
||||||
Ok(if let Some(filter) = filter {
|
Ok(if let Some(filter) = filter {
|
||||||
let mut wtxn = index.write_txn()?;
|
let candidates = filter.evaluate(wtxn, index).map_err(|err| match err {
|
||||||
|
|
||||||
let candidates = filter.evaluate(&wtxn, &index).map_err(|err| match err {
|
|
||||||
milli::Error::UserError(milli::UserError::InvalidFilter(_)) => {
|
milli::Error::UserError(milli::UserError::InvalidFilter(_)) => {
|
||||||
Error::from(err).with_custom_error_code(Code::InvalidDocumentFilter)
|
Error::from(err).with_custom_error_code(Code::InvalidDocumentFilter)
|
||||||
}
|
}
|
||||||
e => e.into(),
|
e => e.into(),
|
||||||
})?;
|
})?;
|
||||||
let mut delete_operation = DeleteDocuments::new(&mut wtxn, &index)?;
|
let mut delete_operation = DeleteDocuments::new(wtxn, index)?;
|
||||||
delete_operation.delete_documents(&candidates);
|
delete_operation.delete_documents(&candidates);
|
||||||
let deleted_documents =
|
delete_operation.execute().map(|result| result.deleted_documents)?
|
||||||
delete_operation.execute().map(|result| result.deleted_documents)?;
|
|
||||||
wtxn.commit()?;
|
|
||||||
deleted_documents
|
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -108,6 +108,8 @@ pub enum Error {
|
|||||||
TaskDeletionWithEmptyQuery,
|
TaskDeletionWithEmptyQuery,
|
||||||
#[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
#[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
||||||
TaskCancelationWithEmptyQuery,
|
TaskCancelationWithEmptyQuery,
|
||||||
|
#[error("Aborted task")]
|
||||||
|
AbortedTask,
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Dump(#[from] dump::Error),
|
Dump(#[from] dump::Error),
|
||||||
@@ -115,8 +117,13 @@ pub enum Error {
|
|||||||
Heed(#[from] heed::Error),
|
Heed(#[from] heed::Error),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Milli(#[from] milli::Error),
|
Milli(#[from] milli::Error),
|
||||||
#[error("An unexpected crash occurred when processing the task.")]
|
#[error("An unexpected crash occurred when processing the task. {}", {
|
||||||
ProcessBatchPanicked,
|
match .0 {
|
||||||
|
Some(report) => format!("Get /reports/{}", report),
|
||||||
|
None => "No report was saved.".into(),
|
||||||
|
}
|
||||||
|
})]
|
||||||
|
ProcessBatchPanicked(Option<uuid::Uuid>),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
FileStore(#[from] file_store::Error),
|
FileStore(#[from] file_store::Error),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
@@ -175,10 +182,11 @@ impl Error {
|
|||||||
| Error::TaskNotFound(_)
|
| Error::TaskNotFound(_)
|
||||||
| Error::TaskDeletionWithEmptyQuery
|
| Error::TaskDeletionWithEmptyQuery
|
||||||
| Error::TaskCancelationWithEmptyQuery
|
| Error::TaskCancelationWithEmptyQuery
|
||||||
|
| Error::AbortedTask
|
||||||
| Error::Dump(_)
|
| Error::Dump(_)
|
||||||
| Error::Heed(_)
|
| Error::Heed(_)
|
||||||
| Error::Milli(_)
|
| Error::Milli(_)
|
||||||
| Error::ProcessBatchPanicked
|
| Error::ProcessBatchPanicked(_)
|
||||||
| Error::FileStore(_)
|
| Error::FileStore(_)
|
||||||
| Error::IoError(_)
|
| Error::IoError(_)
|
||||||
| Error::Persist(_)
|
| Error::Persist(_)
|
||||||
@@ -221,7 +229,7 @@ impl ErrorCode for Error {
|
|||||||
Error::NoSpaceLeftInTaskQueue => Code::NoSpaceLeftOnDevice,
|
Error::NoSpaceLeftInTaskQueue => Code::NoSpaceLeftOnDevice,
|
||||||
Error::Dump(e) => e.error_code(),
|
Error::Dump(e) => e.error_code(),
|
||||||
Error::Milli(e) => e.error_code(),
|
Error::Milli(e) => e.error_code(),
|
||||||
Error::ProcessBatchPanicked => Code::Internal,
|
Error::ProcessBatchPanicked(_) => Code::Internal,
|
||||||
Error::Heed(e) => e.error_code(),
|
Error::Heed(e) => e.error_code(),
|
||||||
Error::HeedTransaction(e) => e.error_code(),
|
Error::HeedTransaction(e) => e.error_code(),
|
||||||
Error::FileStore(e) => e.error_code(),
|
Error::FileStore(e) => e.error_code(),
|
||||||
@@ -236,6 +244,9 @@ impl ErrorCode for Error {
|
|||||||
Error::TaskDatabaseUpdate(_) => Code::Internal,
|
Error::TaskDatabaseUpdate(_) => Code::Internal,
|
||||||
Error::CreateBatch(_) => Code::Internal,
|
Error::CreateBatch(_) => Code::Internal,
|
||||||
|
|
||||||
|
// This one should never be seen by the end user
|
||||||
|
Error::AbortedTask => Code::Internal,
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
Error::PlannedFailure => Code::Internal,
|
Error::PlannedFailure => Code::Internal,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
|
use std::sync::{Arc, RwLock};
|
||||||
|
|
||||||
use meilisearch_types::features::{InstanceTogglableFeatures, RuntimeTogglableFeatures};
|
use meilisearch_types::features::{InstanceTogglableFeatures, RuntimeTogglableFeatures};
|
||||||
use meilisearch_types::heed::types::{SerdeJson, Str};
|
use meilisearch_types::heed::types::{SerdeJson, Str};
|
||||||
use meilisearch_types::heed::{Database, Env, RoTxn, RwTxn};
|
use meilisearch_types::heed::{Database, Env, RwTxn};
|
||||||
|
|
||||||
use crate::error::FeatureNotEnabledError;
|
use crate::error::FeatureNotEnabledError;
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
@@ -9,20 +11,19 @@ const EXPERIMENTAL_FEATURES: &str = "experimental-features";
|
|||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub(crate) struct FeatureData {
|
pub(crate) struct FeatureData {
|
||||||
runtime: Database<Str, SerdeJson<RuntimeTogglableFeatures>>,
|
persisted: Database<Str, SerdeJson<RuntimeTogglableFeatures>>,
|
||||||
instance: InstanceTogglableFeatures,
|
runtime: Arc<RwLock<RuntimeTogglableFeatures>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
pub struct RoFeatures {
|
pub struct RoFeatures {
|
||||||
runtime: RuntimeTogglableFeatures,
|
runtime: RuntimeTogglableFeatures,
|
||||||
instance: InstanceTogglableFeatures,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RoFeatures {
|
impl RoFeatures {
|
||||||
fn new(txn: RoTxn<'_>, data: &FeatureData) -> Result<Self> {
|
fn new(data: &FeatureData) -> Self {
|
||||||
let runtime = data.runtime_features(txn)?;
|
let runtime = data.runtime_features();
|
||||||
Ok(Self { runtime, instance: data.instance })
|
Self { runtime }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn runtime_features(&self) -> RuntimeTogglableFeatures {
|
pub fn runtime_features(&self) -> RuntimeTogglableFeatures {
|
||||||
@@ -43,13 +44,13 @@ impl RoFeatures {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_metrics(&self) -> Result<()> {
|
pub fn check_metrics(&self) -> Result<()> {
|
||||||
if self.instance.metrics {
|
if self.runtime.metrics {
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(FeatureNotEnabledError {
|
Err(FeatureNotEnabledError {
|
||||||
disabled_action: "Getting metrics",
|
disabled_action: "Getting metrics",
|
||||||
feature: "metrics",
|
feature: "metrics",
|
||||||
issue_link: "https://github.com/meilisearch/meilisearch/discussions/3518",
|
issue_link: "https://github.com/meilisearch/product/discussions/625",
|
||||||
}
|
}
|
||||||
.into())
|
.into())
|
||||||
}
|
}
|
||||||
@@ -67,15 +68,36 @@ impl RoFeatures {
|
|||||||
.into())
|
.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn check_puffin(&self) -> Result<()> {
|
||||||
|
if self.runtime.export_puffin_reports {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(FeatureNotEnabledError {
|
||||||
|
disabled_action: "Outputting Puffin reports to disk",
|
||||||
|
feature: "export puffin reports",
|
||||||
|
issue_link: "https://github.com/meilisearch/product/discussions/693",
|
||||||
|
}
|
||||||
|
.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FeatureData {
|
impl FeatureData {
|
||||||
pub fn new(env: &Env, instance_features: InstanceTogglableFeatures) -> Result<Self> {
|
pub fn new(env: &Env, instance_features: InstanceTogglableFeatures) -> Result<Self> {
|
||||||
let mut wtxn = env.write_txn()?;
|
let mut wtxn = env.write_txn()?;
|
||||||
let runtime_features = env.create_database(&mut wtxn, Some(EXPERIMENTAL_FEATURES))?;
|
let runtime_features_db = env.create_database(&mut wtxn, Some(EXPERIMENTAL_FEATURES))?;
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
|
|
||||||
Ok(Self { runtime: runtime_features, instance: instance_features })
|
let txn = env.read_txn()?;
|
||||||
|
let persisted_features: RuntimeTogglableFeatures =
|
||||||
|
runtime_features_db.get(&txn, EXPERIMENTAL_FEATURES)?.unwrap_or_default();
|
||||||
|
let runtime = Arc::new(RwLock::new(RuntimeTogglableFeatures {
|
||||||
|
metrics: instance_features.metrics || persisted_features.metrics,
|
||||||
|
..persisted_features
|
||||||
|
}));
|
||||||
|
|
||||||
|
Ok(Self { persisted: runtime_features_db, runtime })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn put_runtime_features(
|
pub fn put_runtime_features(
|
||||||
@@ -83,16 +105,25 @@ impl FeatureData {
|
|||||||
mut wtxn: RwTxn,
|
mut wtxn: RwTxn,
|
||||||
features: RuntimeTogglableFeatures,
|
features: RuntimeTogglableFeatures,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
self.runtime.put(&mut wtxn, EXPERIMENTAL_FEATURES, &features)?;
|
self.persisted.put(&mut wtxn, EXPERIMENTAL_FEATURES, &features)?;
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
|
|
||||||
|
// safe to unwrap, the lock will only fail if:
|
||||||
|
// 1. requested by the same thread concurrently -> it is called and released in methods that don't call each other
|
||||||
|
// 2. there's a panic while the thread is held -> it is only used for an assignment here.
|
||||||
|
let mut toggled_features = self.runtime.write().unwrap();
|
||||||
|
*toggled_features = features;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn runtime_features(&self, txn: RoTxn) -> Result<RuntimeTogglableFeatures> {
|
fn runtime_features(&self) -> RuntimeTogglableFeatures {
|
||||||
Ok(self.runtime.get(&txn, EXPERIMENTAL_FEATURES)?.unwrap_or_default())
|
// sound to unwrap, the lock will only fail if:
|
||||||
|
// 1. requested by the same thread concurrently -> it is called and released in methods that don't call each other
|
||||||
|
// 2. there's a panic while the thread is held -> it is only used for copying the data here
|
||||||
|
*self.runtime.read().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn features(&self, txn: RoTxn) -> Result<RoFeatures> {
|
pub fn features(&self) -> RoFeatures {
|
||||||
RoFeatures::new(txn, self)
|
RoFeatures::new(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
|||||||
index_mapper,
|
index_mapper,
|
||||||
features: _,
|
features: _,
|
||||||
max_number_of_tasks: _,
|
max_number_of_tasks: _,
|
||||||
|
puffin_frame: _,
|
||||||
wake_up: _,
|
wake_up: _,
|
||||||
dumps_path: _,
|
dumps_path: _,
|
||||||
snapshots_path: _,
|
snapshots_path: _,
|
||||||
@@ -38,6 +39,7 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
|||||||
test_breakpoint_sdr: _,
|
test_breakpoint_sdr: _,
|
||||||
planned_failures: _,
|
planned_failures: _,
|
||||||
run_loop_iteration: _,
|
run_loop_iteration: _,
|
||||||
|
panic_reader: _,
|
||||||
} = scheduler;
|
} = scheduler;
|
||||||
|
|
||||||
let rtxn = env.read_txn().unwrap();
|
let rtxn = env.read_txn().unwrap();
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ mod index_mapper;
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod insta_snapshot;
|
mod insta_snapshot;
|
||||||
mod lru;
|
mod lru;
|
||||||
|
mod panic_hook;
|
||||||
mod utils;
|
mod utils;
|
||||||
mod uuid_codec;
|
mod uuid_codec;
|
||||||
|
|
||||||
@@ -33,6 +34,7 @@ pub type Result<T> = std::result::Result<T, Error>;
|
|||||||
pub type TaskId = u32;
|
pub type TaskId = u32;
|
||||||
|
|
||||||
use std::collections::{BTreeMap, HashMap};
|
use std::collections::{BTreeMap, HashMap};
|
||||||
|
use std::fs::File;
|
||||||
use std::ops::{Bound, RangeBounds};
|
use std::ops::{Bound, RangeBounds};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::atomic::AtomicBool;
|
use std::sync::atomic::AtomicBool;
|
||||||
@@ -52,6 +54,9 @@ use meilisearch_types::milli::documents::DocumentsBatchBuilder;
|
|||||||
use meilisearch_types::milli::update::IndexerConfig;
|
use meilisearch_types::milli::update::IndexerConfig;
|
||||||
use meilisearch_types::milli::{self, CboRoaringBitmapCodec, Index, RoaringBitmapCodec, BEU32};
|
use meilisearch_types::milli::{self, CboRoaringBitmapCodec, Index, RoaringBitmapCodec, BEU32};
|
||||||
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
|
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
|
||||||
|
use panic_hook::ReportReader;
|
||||||
|
pub use panic_hook::{Panic, Report, ReportRegistry};
|
||||||
|
use puffin::FrameView;
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
use synchronoise::SignalEvent;
|
use synchronoise::SignalEvent;
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
@@ -314,6 +319,9 @@ pub struct IndexScheduler {
|
|||||||
/// the finished tasks automatically.
|
/// the finished tasks automatically.
|
||||||
pub(crate) max_number_of_tasks: usize,
|
pub(crate) max_number_of_tasks: usize,
|
||||||
|
|
||||||
|
/// A frame to output the indexation profiling files to disk.
|
||||||
|
pub(crate) puffin_frame: Arc<puffin::GlobalFrameView>,
|
||||||
|
|
||||||
/// The path used to create the dumps.
|
/// The path used to create the dumps.
|
||||||
pub(crate) dumps_path: PathBuf,
|
pub(crate) dumps_path: PathBuf,
|
||||||
|
|
||||||
@@ -326,6 +334,8 @@ pub struct IndexScheduler {
|
|||||||
/// The path to the version file of Meilisearch.
|
/// The path to the version file of Meilisearch.
|
||||||
pub(crate) version_file_path: PathBuf,
|
pub(crate) version_file_path: PathBuf,
|
||||||
|
|
||||||
|
pub(crate) panic_reader: ReportReader,
|
||||||
|
|
||||||
// ================= test
|
// ================= test
|
||||||
// The next entry is dedicated to the tests.
|
// The next entry is dedicated to the tests.
|
||||||
/// Provide a way to set a breakpoint in multiple part of the scheduler.
|
/// Provide a way to set a breakpoint in multiple part of the scheduler.
|
||||||
@@ -364,6 +374,7 @@ impl IndexScheduler {
|
|||||||
wake_up: self.wake_up.clone(),
|
wake_up: self.wake_up.clone(),
|
||||||
autobatching_enabled: self.autobatching_enabled,
|
autobatching_enabled: self.autobatching_enabled,
|
||||||
max_number_of_tasks: self.max_number_of_tasks,
|
max_number_of_tasks: self.max_number_of_tasks,
|
||||||
|
puffin_frame: self.puffin_frame.clone(),
|
||||||
snapshots_path: self.snapshots_path.clone(),
|
snapshots_path: self.snapshots_path.clone(),
|
||||||
dumps_path: self.dumps_path.clone(),
|
dumps_path: self.dumps_path.clone(),
|
||||||
auth_path: self.auth_path.clone(),
|
auth_path: self.auth_path.clone(),
|
||||||
@@ -375,6 +386,7 @@ impl IndexScheduler {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
run_loop_iteration: self.run_loop_iteration.clone(),
|
run_loop_iteration: self.run_loop_iteration.clone(),
|
||||||
features: self.features.clone(),
|
features: self.features.clone(),
|
||||||
|
panic_reader: self.panic_reader.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -432,6 +444,12 @@ impl IndexScheduler {
|
|||||||
let finished_at = env.create_database(&mut wtxn, Some(db_name::FINISHED_AT))?;
|
let finished_at = env.create_database(&mut wtxn, Some(db_name::FINISHED_AT))?;
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
|
|
||||||
|
const MAX_REPORT_COUNT: usize = 20;
|
||||||
|
|
||||||
|
let panic_reader = panic_hook::ReportReader::install_panic_hook(
|
||||||
|
std::num::NonZeroUsize::new(MAX_REPORT_COUNT).unwrap(),
|
||||||
|
);
|
||||||
|
|
||||||
// allow unreachable_code to get rids of the warning in the case of a test build.
|
// allow unreachable_code to get rids of the warning in the case of a test build.
|
||||||
let this = Self {
|
let this = Self {
|
||||||
must_stop_processing: MustStopProcessing::default(),
|
must_stop_processing: MustStopProcessing::default(),
|
||||||
@@ -457,6 +475,7 @@ impl IndexScheduler {
|
|||||||
env,
|
env,
|
||||||
// we want to start the loop right away in case meilisearch was ctrl+Ced while processing things
|
// we want to start the loop right away in case meilisearch was ctrl+Ced while processing things
|
||||||
wake_up: Arc::new(SignalEvent::auto(true)),
|
wake_up: Arc::new(SignalEvent::auto(true)),
|
||||||
|
puffin_frame: Arc::new(puffin::GlobalFrameView::default()),
|
||||||
autobatching_enabled: options.autobatching_enabled,
|
autobatching_enabled: options.autobatching_enabled,
|
||||||
max_number_of_tasks: options.max_number_of_tasks,
|
max_number_of_tasks: options.max_number_of_tasks,
|
||||||
dumps_path: options.dumps_path,
|
dumps_path: options.dumps_path,
|
||||||
@@ -471,6 +490,7 @@ impl IndexScheduler {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
run_loop_iteration: Arc::new(RwLock::new(0)),
|
run_loop_iteration: Arc::new(RwLock::new(0)),
|
||||||
features,
|
features,
|
||||||
|
panic_reader,
|
||||||
};
|
};
|
||||||
|
|
||||||
this.run();
|
this.run();
|
||||||
@@ -572,17 +592,46 @@ impl IndexScheduler {
|
|||||||
run.wake_up.wait();
|
run.wake_up.wait();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
let puffin_enabled = run.features().check_puffin().is_ok();
|
||||||
|
puffin::set_scopes_on(puffin_enabled);
|
||||||
|
puffin::GlobalProfiler::lock().new_frame();
|
||||||
|
|
||||||
match run.tick() {
|
match run.tick() {
|
||||||
Ok(TickOutcome::TickAgain(_)) => (),
|
Ok(TickOutcome::TickAgain(_)) => (),
|
||||||
Ok(TickOutcome::WaitForSignal) => run.wake_up.wait(),
|
Ok(TickOutcome::WaitForSignal) => run.wake_up.wait(),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("{}", e);
|
log::error!("{e}");
|
||||||
// Wait one second when an irrecoverable error occurs.
|
// Wait one second when an irrecoverable error occurs.
|
||||||
if !e.is_recoverable() {
|
if !e.is_recoverable() {
|
||||||
std::thread::sleep(Duration::from_secs(1));
|
std::thread::sleep(Duration::from_secs(1));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Let's write the previous frame to disk but only if
|
||||||
|
// the user wanted to profile with puffin.
|
||||||
|
if puffin_enabled {
|
||||||
|
let mut frame_view = run.puffin_frame.lock();
|
||||||
|
if !frame_view.is_empty() {
|
||||||
|
let now = OffsetDateTime::now_utc();
|
||||||
|
let mut file = match File::create(format!("{}.puffin", now)) {
|
||||||
|
Ok(file) => file,
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("{e}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if let Err(e) = frame_view.save_to_writer(&mut file) {
|
||||||
|
log::error!("{e}");
|
||||||
|
}
|
||||||
|
if let Err(e) = file.sync_all() {
|
||||||
|
log::error!("{e}");
|
||||||
|
}
|
||||||
|
// We erase this frame view as it is no more useful. We want to
|
||||||
|
// measure the new frames now that we exported the previous ones.
|
||||||
|
*frame_view = FrameView::default();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@@ -1094,7 +1143,10 @@ impl IndexScheduler {
|
|||||||
.name(String::from("batch-operation"))
|
.name(String::from("batch-operation"))
|
||||||
.spawn(move || cloned_index_scheduler.process_batch(batch))
|
.spawn(move || cloned_index_scheduler.process_batch(batch))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
handle.join().unwrap_or(Err(Error::ProcessBatchPanicked))
|
|
||||||
|
self.panic_reader
|
||||||
|
.join_thread(handle)
|
||||||
|
.unwrap_or_else(|maybe_report| Err(Error::ProcessBatchPanicked(maybe_report)))
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@@ -1131,7 +1183,8 @@ impl IndexScheduler {
|
|||||||
// If we have an abortion error we must stop the tick here and re-schedule tasks.
|
// If we have an abortion error we must stop the tick here and re-schedule tasks.
|
||||||
Err(Error::Milli(milli::Error::InternalError(
|
Err(Error::Milli(milli::Error::InternalError(
|
||||||
milli::InternalError::AbortedIndexation,
|
milli::InternalError::AbortedIndexation,
|
||||||
))) => {
|
)))
|
||||||
|
| Err(Error::AbortedTask) => {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
self.breakpoint(Breakpoint::AbortedIndexation);
|
self.breakpoint(Breakpoint::AbortedIndexation);
|
||||||
wtxn.abort().map_err(Error::HeedTransaction)?;
|
wtxn.abort().map_err(Error::HeedTransaction)?;
|
||||||
@@ -1257,9 +1310,8 @@ impl IndexScheduler {
|
|||||||
Ok(IndexStats { is_indexing, inner_stats: index_stats })
|
Ok(IndexStats { is_indexing, inner_stats: index_stats })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn features(&self) -> Result<RoFeatures> {
|
pub fn features(&self) -> RoFeatures {
|
||||||
let rtxn = self.read_txn()?;
|
self.features.features()
|
||||||
self.features.features(rtxn)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn put_runtime_features(&self, features: RuntimeTogglableFeatures) -> Result<()> {
|
pub fn put_runtime_features(&self, features: RuntimeTogglableFeatures) -> Result<()> {
|
||||||
@@ -1275,6 +1327,10 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn reports(&self) -> Arc<RwLock<ReportRegistry>> {
|
||||||
|
self.panic_reader.registry()
|
||||||
|
}
|
||||||
|
|
||||||
/// Blocks the thread until the test handle asks to progress to/through this breakpoint.
|
/// Blocks the thread until the test handle asks to progress to/through this breakpoint.
|
||||||
///
|
///
|
||||||
/// Two messages are sent through the channel for each breakpoint.
|
/// Two messages are sent through the channel for each breakpoint.
|
||||||
@@ -4288,4 +4344,26 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn cancel_processing_dump() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
let dump_creation = KindWithContent::DumpCreation { keys: Vec::new(), instance_uid: None };
|
||||||
|
let dump_cancellation = KindWithContent::TaskCancelation {
|
||||||
|
query: "cancel dump".to_owned(),
|
||||||
|
tasks: RoaringBitmap::from_iter([0]),
|
||||||
|
};
|
||||||
|
let _ = index_scheduler.register(dump_creation).unwrap();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_dump_register");
|
||||||
|
handle.advance_till([Start, BatchCreated, InsideProcessBatch]);
|
||||||
|
|
||||||
|
let _ = index_scheduler.register(dump_cancellation).unwrap();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "cancel_registered");
|
||||||
|
|
||||||
|
snapshot!(format!("{:?}", handle.advance()), @"AbortedIndexation");
|
||||||
|
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "cancel_processed");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
211
index-scheduler/src/panic_hook.rs
Normal file
211
index-scheduler/src/panic_hook.rs
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
//! Panic hook designed to fetch a panic from a subthread and recover it on join.
|
||||||
|
|
||||||
|
use std::collections::VecDeque;
|
||||||
|
use std::num::NonZeroUsize;
|
||||||
|
use std::panic::PanicInfo;
|
||||||
|
use std::sync::{Arc, RwLock};
|
||||||
|
use std::thread::{JoinHandle, ThreadId};
|
||||||
|
|
||||||
|
use backtrace::Backtrace;
|
||||||
|
|
||||||
|
// Represents a panic in a shallowy structured fashion
|
||||||
|
pub struct Panic {
|
||||||
|
pub payload: Option<String>,
|
||||||
|
pub location: Option<String>,
|
||||||
|
pub thread_name: Option<String>,
|
||||||
|
pub thread_id: ThreadId,
|
||||||
|
pub backtrace: Backtrace,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A panic enriched with a unique id
|
||||||
|
#[derive(serde::Serialize)]
|
||||||
|
pub struct Report {
|
||||||
|
pub id: uuid::Uuid,
|
||||||
|
#[serde(serialize_with = "serialize_panic")]
|
||||||
|
pub panic: Panic,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serialize_panic<S>(panic: &Panic, s: S) -> std::result::Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer,
|
||||||
|
{
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
panic.to_json().serialize(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Report {
|
||||||
|
pub fn new(panic: Panic) -> Self {
|
||||||
|
Self { id: uuid::Uuid::new_v4(), panic }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Panic {
|
||||||
|
pub fn to_json(&self) -> serde_json::Value {
|
||||||
|
json::panic_to_json(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod json {
|
||||||
|
use backtrace::{Backtrace, BacktraceFrame, BacktraceSymbol};
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
|
use super::Panic;
|
||||||
|
|
||||||
|
fn symbol_to_json(symbol: &BacktraceSymbol) -> Value {
|
||||||
|
let address = symbol.addr().map(|addr| format!("{:p}", addr));
|
||||||
|
let column = symbol.colno();
|
||||||
|
let line = symbol.lineno();
|
||||||
|
let function = symbol.name().map(|name| name.to_string());
|
||||||
|
let filename = symbol.filename();
|
||||||
|
json!({
|
||||||
|
"function": function,
|
||||||
|
"filename": filename,
|
||||||
|
"line": line,
|
||||||
|
"column": column,
|
||||||
|
"address": address,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn frame_to_json(frame: &BacktraceFrame) -> Value {
|
||||||
|
let symbols: Vec<_> = frame.symbols().iter().map(symbol_to_json).collect();
|
||||||
|
match symbols.as_slice() {
|
||||||
|
[] => {
|
||||||
|
let address = format!("{:p}", frame.ip());
|
||||||
|
json!({"address": address})
|
||||||
|
}
|
||||||
|
[symbol] => json!(symbol),
|
||||||
|
symbols => json!(symbols),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn backtrace_to_json(backtrace: &Backtrace) -> Value {
|
||||||
|
let frames: Vec<_> = backtrace.frames().iter().map(frame_to_json).collect();
|
||||||
|
json!(frames)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn panic_to_json(panic: &Panic) -> Value {
|
||||||
|
let thread_id = format!("{:?}", panic.thread_id);
|
||||||
|
serde_json::json!({
|
||||||
|
"payload": panic.payload,
|
||||||
|
"location": panic.location,
|
||||||
|
"thread": {
|
||||||
|
"id": thread_id,
|
||||||
|
"name": panic.thread_name,
|
||||||
|
},
|
||||||
|
"backtrace": backtrace_to_json(&panic.backtrace),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ReportWriter(Arc<RwLock<ReportRegistry>>);
|
||||||
|
|
||||||
|
/// A FIFO queue of reports.
|
||||||
|
pub struct ReportRegistry {
|
||||||
|
reports: std::collections::VecDeque<Report>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ReportRegistry {
|
||||||
|
pub fn new(capacity: NonZeroUsize) -> Self {
|
||||||
|
Self { reports: VecDeque::with_capacity(capacity.get()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn push(&mut self, report: Report) -> Option<Report> {
|
||||||
|
let popped = if self.reports.len() == self.reports.capacity() {
|
||||||
|
self.reports.pop_back()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
self.reports.push_front(report);
|
||||||
|
popped
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn iter(&self) -> impl Iterator<Item = &Report> {
|
||||||
|
self.reports.iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find(&self, report_id: uuid::Uuid) -> Option<&Report> {
|
||||||
|
self.iter().find(|report| report.id == report_id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ReportWriter {
|
||||||
|
#[track_caller]
|
||||||
|
fn write_panic(&self, panic_info: &PanicInfo<'_>) {
|
||||||
|
let payload = panic_info
|
||||||
|
.payload()
|
||||||
|
.downcast_ref::<&str>()
|
||||||
|
.map(ToString::to_string)
|
||||||
|
.or_else(|| panic_info.payload().downcast_ref::<String>().cloned());
|
||||||
|
let location = panic_info.location().map(|loc| {
|
||||||
|
format!(
|
||||||
|
"{file}:{line}:{column}",
|
||||||
|
file = loc.file(),
|
||||||
|
line = loc.line(),
|
||||||
|
column = loc.column()
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let thread_name = std::thread::current().name().map(ToString::to_string);
|
||||||
|
let thread_id = std::thread::current().id();
|
||||||
|
let backtrace = backtrace::Backtrace::new();
|
||||||
|
|
||||||
|
let panic = Panic { payload, location, thread_name, thread_id, backtrace };
|
||||||
|
|
||||||
|
let report = Report::new(panic);
|
||||||
|
|
||||||
|
log::error!(
|
||||||
|
"An unexpected panic occurred on thread {name} at {location}: {payload}. See report '{report}' for details.",
|
||||||
|
payload = report.panic.payload.as_deref().unwrap_or("Box<dyn Any>"),
|
||||||
|
name = report.panic.thread_name.as_deref().unwrap_or("<unnamed>"),
|
||||||
|
location = report.panic.location.as_deref().unwrap_or("<unknown>"),
|
||||||
|
report = report.id,
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Ok(mut registry) = self.0.write() {
|
||||||
|
if let Some(old_report) = registry.push(report) {
|
||||||
|
log::trace!("Forgetting report {} to make space for new report.", old_report.id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reads the reports written in case of a panic.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ReportReader(Arc<RwLock<ReportRegistry>>);
|
||||||
|
|
||||||
|
impl ReportReader {
|
||||||
|
/// Installs a new global panic hook, overriding any existing hook.
|
||||||
|
///
|
||||||
|
/// The hook writes any incoming panic in reports.
|
||||||
|
/// The reports can then be read by the returned [`ReportReader`].
|
||||||
|
pub fn install_panic_hook(capacity: NonZeroUsize) -> Self {
|
||||||
|
let registry = Arc::new(RwLock::new(ReportRegistry::new(capacity)));
|
||||||
|
let reader = ReportReader(registry.clone());
|
||||||
|
let writer = ReportWriter(registry.clone());
|
||||||
|
|
||||||
|
std::panic::set_hook(Box::new(move |panic_info| writer.write_panic(panic_info)));
|
||||||
|
reader
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Join the thread corresponding to the passed handle, recovering either its value
|
||||||
|
/// or, in case the thread panicked, the id of the report corresponding to the panic.
|
||||||
|
///
|
||||||
|
/// The id can be used to read the report from the [`self.registry()`].
|
||||||
|
pub fn join_thread<T>(&self, thread: JoinHandle<T>) -> Result<T, Option<uuid::Uuid>> {
|
||||||
|
let thread_id = thread.thread().id();
|
||||||
|
thread.join().map_err(|_e| {
|
||||||
|
self.0
|
||||||
|
.read()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.find(|report| report.panic.thread_id == thread_id)
|
||||||
|
.map(|report| report.id)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a registry that can be used to read the reports written during a panic.
|
||||||
|
pub fn registry(&self) -> Arc<RwLock<ReportRegistry>> {
|
||||||
|
self.0.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"dumpCreation" [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: canceled, canceled_by: 1, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
|
||||||
|
1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(0), original_filter: "cancel dump" }, kind: TaskCancelation { query: "cancel dump", tasks: RoaringBitmap<[0]> }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued []
|
||||||
|
succeeded [1,]
|
||||||
|
canceled [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"taskCancelation" [1,]
|
||||||
|
"dumpCreation" [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
1 [0,]
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_filter: "cancel dump" }, kind: TaskCancelation { query: "cancel dump", tasks: RoaringBitmap<[0]> }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"taskCancelation" [1,]
|
||||||
|
"dumpCreation" [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
@@ -129,6 +129,9 @@ impl HeedAuthStore {
|
|||||||
Action::DumpsAll => {
|
Action::DumpsAll => {
|
||||||
actions.insert(Action::DumpsCreate);
|
actions.insert(Action::DumpsCreate);
|
||||||
}
|
}
|
||||||
|
Action::SnapshotsAll => {
|
||||||
|
actions.insert(Action::SnapshotsCreate);
|
||||||
|
}
|
||||||
Action::TasksAll => {
|
Action::TasksAll => {
|
||||||
actions.extend([Action::TasksGet, Action::TasksDelete, Action::TasksCancel]);
|
actions.extend([Action::TasksGet, Action::TasksDelete, Action::TasksCancel]);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,13 +15,13 @@ actix-web = { version = "4.3.1", default-features = false }
|
|||||||
anyhow = "1.0.70"
|
anyhow = "1.0.70"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
csv = "1.2.1"
|
csv = "1.2.1"
|
||||||
deserr = "0.5.0"
|
deserr = { version = "0.6.0", features = ["actix-web"]}
|
||||||
either = { version = "1.8.1", features = ["serde"] }
|
either = { version = "1.8.1", features = ["serde"] }
|
||||||
enum-iterator = "1.4.0"
|
enum-iterator = "1.4.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.25"
|
flate2 = "1.0.25"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
memmap2 = "0.5.10"
|
memmap2 = "0.7.1"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
roaring = { version = "0.10.1", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.160", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
use std::borrow::Borrow;
|
|
||||||
use std::fmt::{self, Debug, Display};
|
use std::fmt::{self, Debug, Display};
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{self, Seek, Write};
|
use std::io::{self, Seek, Write};
|
||||||
@@ -42,7 +41,7 @@ impl Display for DocumentFormatError {
|
|||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Io(e) => write!(f, "{e}"),
|
Self::Io(e) => write!(f, "{e}"),
|
||||||
Self::MalformedPayload(me, b) => match me.borrow() {
|
Self::MalformedPayload(me, b) => match me {
|
||||||
Error::Json(se) => {
|
Error::Json(se) => {
|
||||||
let mut message = match se.classify() {
|
let mut message = match se.classify() {
|
||||||
Category::Data => {
|
Category::Data => {
|
||||||
|
|||||||
@@ -88,7 +88,6 @@ pub trait ErrorCode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::enum_variant_names)]
|
|
||||||
enum ErrorType {
|
enum ErrorType {
|
||||||
Internal,
|
Internal,
|
||||||
InvalidRequest,
|
InvalidRequest,
|
||||||
@@ -259,6 +258,9 @@ InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
|
|||||||
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSettingsNonSeparatorTokens , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSettingsSeparatorTokens , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSettingsDictionary , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
|
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
|
||||||
@@ -295,6 +297,7 @@ MissingSwapIndexes , InvalidRequest , BAD_REQUEST ;
|
|||||||
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;
|
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;
|
||||||
NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY;
|
NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY;
|
||||||
PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ;
|
PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ;
|
||||||
|
ReportNotFound , InvalidRequest , NOT_FOUND ;
|
||||||
TaskNotFound , InvalidRequest , NOT_FOUND ;
|
TaskNotFound , InvalidRequest , NOT_FOUND ;
|
||||||
TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ;
|
TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ;
|
||||||
UnretrievableDocument , Internal , BAD_REQUEST ;
|
UnretrievableDocument , Internal , BAD_REQUEST ;
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ use serde::{Deserialize, Serialize};
|
|||||||
pub struct RuntimeTogglableFeatures {
|
pub struct RuntimeTogglableFeatures {
|
||||||
pub score_details: bool,
|
pub score_details: bool,
|
||||||
pub vector_store: bool,
|
pub vector_store: bool,
|
||||||
|
pub metrics: bool,
|
||||||
|
pub export_puffin_reports: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone, Copy)]
|
#[derive(Default, Debug, Clone, Copy)]
|
||||||
|
|||||||
@@ -257,6 +257,12 @@ pub enum Action {
|
|||||||
#[serde(rename = "dumps.create")]
|
#[serde(rename = "dumps.create")]
|
||||||
#[deserr(rename = "dumps.create")]
|
#[deserr(rename = "dumps.create")]
|
||||||
DumpsCreate,
|
DumpsCreate,
|
||||||
|
#[serde(rename = "snapshots.*")]
|
||||||
|
#[deserr(rename = "snapshots.*")]
|
||||||
|
SnapshotsAll,
|
||||||
|
#[serde(rename = "snapshots.create")]
|
||||||
|
#[deserr(rename = "snapshots.create")]
|
||||||
|
SnapshotsCreate,
|
||||||
#[serde(rename = "version")]
|
#[serde(rename = "version")]
|
||||||
#[deserr(rename = "version")]
|
#[deserr(rename = "version")]
|
||||||
Version,
|
Version,
|
||||||
@@ -309,6 +315,7 @@ impl Action {
|
|||||||
METRICS_GET => Some(Self::MetricsGet),
|
METRICS_GET => Some(Self::MetricsGet),
|
||||||
DUMPS_ALL => Some(Self::DumpsAll),
|
DUMPS_ALL => Some(Self::DumpsAll),
|
||||||
DUMPS_CREATE => Some(Self::DumpsCreate),
|
DUMPS_CREATE => Some(Self::DumpsCreate),
|
||||||
|
SNAPSHOTS_CREATE => Some(Self::SnapshotsCreate),
|
||||||
VERSION => Some(Self::Version),
|
VERSION => Some(Self::Version),
|
||||||
KEYS_CREATE => Some(Self::KeysAdd),
|
KEYS_CREATE => Some(Self::KeysAdd),
|
||||||
KEYS_GET => Some(Self::KeysGet),
|
KEYS_GET => Some(Self::KeysGet),
|
||||||
@@ -353,6 +360,7 @@ pub mod actions {
|
|||||||
pub const METRICS_GET: u8 = MetricsGet.repr();
|
pub const METRICS_GET: u8 = MetricsGet.repr();
|
||||||
pub const DUMPS_ALL: u8 = DumpsAll.repr();
|
pub const DUMPS_ALL: u8 = DumpsAll.repr();
|
||||||
pub const DUMPS_CREATE: u8 = DumpsCreate.repr();
|
pub const DUMPS_CREATE: u8 = DumpsCreate.repr();
|
||||||
|
pub const SNAPSHOTS_CREATE: u8 = SnapshotsCreate.repr();
|
||||||
pub const VERSION: u8 = Version.repr();
|
pub const VERSION: u8 = Version.repr();
|
||||||
pub const KEYS_CREATE: u8 = KeysAdd.repr();
|
pub const KEYS_CREATE: u8 = KeysAdd.repr();
|
||||||
pub const KEYS_GET: u8 = KeysGet.repr();
|
pub const KEYS_GET: u8 = KeysGet.repr();
|
||||||
|
|||||||
@@ -171,6 +171,15 @@ pub struct Settings<T> {
|
|||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
||||||
pub stop_words: Setting<BTreeSet<String>>,
|
pub stop_words: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsNonSeparatorTokens>)]
|
||||||
|
pub non_separator_tokens: Setting<BTreeSet<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsSeparatorTokens>)]
|
||||||
|
pub separator_tokens: Setting<BTreeSet<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsDictionary>)]
|
||||||
|
pub dictionary: Setting<BTreeSet<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
||||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
@@ -201,6 +210,9 @@ impl Settings<Checked> {
|
|||||||
ranking_rules: Setting::Reset,
|
ranking_rules: Setting::Reset,
|
||||||
stop_words: Setting::Reset,
|
stop_words: Setting::Reset,
|
||||||
synonyms: Setting::Reset,
|
synonyms: Setting::Reset,
|
||||||
|
non_separator_tokens: Setting::Reset,
|
||||||
|
separator_tokens: Setting::Reset,
|
||||||
|
dictionary: Setting::Reset,
|
||||||
distinct_attribute: Setting::Reset,
|
distinct_attribute: Setting::Reset,
|
||||||
typo_tolerance: Setting::Reset,
|
typo_tolerance: Setting::Reset,
|
||||||
faceting: Setting::Reset,
|
faceting: Setting::Reset,
|
||||||
@@ -217,6 +229,9 @@ impl Settings<Checked> {
|
|||||||
sortable_attributes,
|
sortable_attributes,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
stop_words,
|
stop_words,
|
||||||
|
non_separator_tokens,
|
||||||
|
separator_tokens,
|
||||||
|
dictionary,
|
||||||
synonyms,
|
synonyms,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
@@ -232,6 +247,9 @@ impl Settings<Checked> {
|
|||||||
sortable_attributes,
|
sortable_attributes,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
stop_words,
|
stop_words,
|
||||||
|
non_separator_tokens,
|
||||||
|
separator_tokens,
|
||||||
|
dictionary,
|
||||||
synonyms,
|
synonyms,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
@@ -274,6 +292,9 @@ impl Settings<Unchecked> {
|
|||||||
ranking_rules: self.ranking_rules,
|
ranking_rules: self.ranking_rules,
|
||||||
stop_words: self.stop_words,
|
stop_words: self.stop_words,
|
||||||
synonyms: self.synonyms,
|
synonyms: self.synonyms,
|
||||||
|
non_separator_tokens: self.non_separator_tokens,
|
||||||
|
separator_tokens: self.separator_tokens,
|
||||||
|
dictionary: self.dictionary,
|
||||||
distinct_attribute: self.distinct_attribute,
|
distinct_attribute: self.distinct_attribute,
|
||||||
typo_tolerance: self.typo_tolerance,
|
typo_tolerance: self.typo_tolerance,
|
||||||
faceting: self.faceting,
|
faceting: self.faceting,
|
||||||
@@ -335,6 +356,28 @@ pub fn apply_settings_to_builder(
|
|||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
match settings.non_separator_tokens {
|
||||||
|
Setting::Set(ref non_separator_tokens) => {
|
||||||
|
builder.set_non_separator_tokens(non_separator_tokens.clone())
|
||||||
|
}
|
||||||
|
Setting::Reset => builder.reset_non_separator_tokens(),
|
||||||
|
Setting::NotSet => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
match settings.separator_tokens {
|
||||||
|
Setting::Set(ref separator_tokens) => {
|
||||||
|
builder.set_separator_tokens(separator_tokens.clone())
|
||||||
|
}
|
||||||
|
Setting::Reset => builder.reset_separator_tokens(),
|
||||||
|
Setting::NotSet => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
match settings.dictionary {
|
||||||
|
Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()),
|
||||||
|
Setting::Reset => builder.reset_dictionary(),
|
||||||
|
Setting::NotSet => (),
|
||||||
|
}
|
||||||
|
|
||||||
match settings.synonyms {
|
match settings.synonyms {
|
||||||
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
||||||
Setting::Reset => builder.reset_synonyms(),
|
Setting::Reset => builder.reset_synonyms(),
|
||||||
@@ -459,15 +502,14 @@ pub fn settings(
|
|||||||
})
|
})
|
||||||
.transpose()?
|
.transpose()?
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let non_separator_tokens = index.non_separator_tokens(rtxn)?.unwrap_or_default();
|
||||||
|
let separator_tokens = index.separator_tokens(rtxn)?.unwrap_or_default();
|
||||||
|
let dictionary = index.dictionary(rtxn)?.unwrap_or_default();
|
||||||
|
|
||||||
let distinct_field = index.distinct_field(rtxn)?.map(String::from);
|
let distinct_field = index.distinct_field(rtxn)?.map(String::from);
|
||||||
|
|
||||||
// in milli each word in the synonyms map were split on their separator. Since we lost
|
let synonyms = index.user_defined_synonyms(rtxn)?;
|
||||||
// this information we are going to put space between words.
|
|
||||||
let synonyms = index
|
|
||||||
.synonyms(rtxn)?
|
|
||||||
.iter()
|
|
||||||
.map(|(key, values)| (key.join(" "), values.iter().map(|value| value.join(" ")).collect()))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let min_typo_word_len = MinWordSizeTyposSetting {
|
let min_typo_word_len = MinWordSizeTyposSetting {
|
||||||
one_typo: Setting::Set(index.min_word_len_one_typo(rtxn)?),
|
one_typo: Setting::Set(index.min_word_len_one_typo(rtxn)?),
|
||||||
@@ -520,6 +562,9 @@ pub fn settings(
|
|||||||
sortable_attributes: Setting::Set(sortable_attributes),
|
sortable_attributes: Setting::Set(sortable_attributes),
|
||||||
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
|
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
|
||||||
stop_words: Setting::Set(stop_words),
|
stop_words: Setting::Set(stop_words),
|
||||||
|
non_separator_tokens: Setting::Set(non_separator_tokens),
|
||||||
|
separator_tokens: Setting::Set(separator_tokens),
|
||||||
|
dictionary: Setting::Set(dictionary),
|
||||||
distinct_attribute: match distinct_field {
|
distinct_attribute: match distinct_field {
|
||||||
Some(field) => Setting::Set(field),
|
Some(field) => Setting::Set(field),
|
||||||
None => Setting::Reset,
|
None => Setting::Reset,
|
||||||
@@ -642,6 +687,9 @@ pub(crate) mod test {
|
|||||||
sortable_attributes: Setting::NotSet,
|
sortable_attributes: Setting::NotSet,
|
||||||
ranking_rules: Setting::NotSet,
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
|
non_separator_tokens: Setting::NotSet,
|
||||||
|
separator_tokens: Setting::NotSet,
|
||||||
|
dictionary: Setting::NotSet,
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet,
|
distinct_attribute: Setting::NotSet,
|
||||||
typo_tolerance: Setting::NotSet,
|
typo_tolerance: Setting::NotSet,
|
||||||
@@ -663,6 +711,9 @@ pub(crate) mod test {
|
|||||||
sortable_attributes: Setting::NotSet,
|
sortable_attributes: Setting::NotSet,
|
||||||
ranking_rules: Setting::NotSet,
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
|
non_separator_tokens: Setting::NotSet,
|
||||||
|
separator_tokens: Setting::NotSet,
|
||||||
|
dictionary: Setting::NotSet,
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet,
|
distinct_attribute: Setting::NotSet,
|
||||||
typo_tolerance: Setting::NotSet,
|
typo_tolerance: Setting::NotSet,
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ byte-unit = { version = "4.0.19", default-features = false, features = [
|
|||||||
bytes = "1.4.0"
|
bytes = "1.4.0"
|
||||||
clap = { version = "4.2.1", features = ["derive", "env"] }
|
clap = { version = "4.2.1", features = ["derive", "env"] }
|
||||||
crossbeam-channel = "0.5.8"
|
crossbeam-channel = "0.5.8"
|
||||||
deserr = "0.5.0"
|
deserr = { version = "0.6.0", features = ["actix-web"]}
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
either = "1.8.1"
|
either = "1.8.1"
|
||||||
env_logger = "0.10.0"
|
env_logger = "0.10.0"
|
||||||
@@ -50,15 +50,15 @@ futures = "0.3.28"
|
|||||||
futures-util = "0.3.28"
|
futures-util = "0.3.28"
|
||||||
http = "0.2.9"
|
http = "0.2.9"
|
||||||
index-scheduler = { path = "../index-scheduler" }
|
index-scheduler = { path = "../index-scheduler" }
|
||||||
indexmap = { version = "1.9.3", features = ["serde-1"] }
|
indexmap = { version = "2.0.0", features = ["serde"] }
|
||||||
is-terminal = "0.4.8"
|
is-terminal = "0.4.8"
|
||||||
itertools = "0.10.5"
|
itertools = "0.11.0"
|
||||||
jsonwebtoken = "8.3.0"
|
jsonwebtoken = "8.3.0"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
mimalloc = { version = "0.1.36", default-features = false }
|
mimalloc = { version = "0.1.37", default-features = false }
|
||||||
mime = "0.3.17"
|
mime = "0.3.17"
|
||||||
num_cpus = "1.15.0"
|
num_cpus = "1.15.0"
|
||||||
obkv = "0.2.0"
|
obkv = "0.2.0"
|
||||||
@@ -69,6 +69,7 @@ permissive-json-pointer = { path = "../permissive-json-pointer" }
|
|||||||
pin-project-lite = "0.2.9"
|
pin-project-lite = "0.2.9"
|
||||||
platform-dirs = "0.3.0"
|
platform-dirs = "0.3.0"
|
||||||
prometheus = { version = "0.13.3", features = ["process"] }
|
prometheus = { version = "0.13.3", features = ["process"] }
|
||||||
|
puffin = { version = "0.16.0", features = ["serialization"] }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
rayon = "1.7.0"
|
rayon = "1.7.0"
|
||||||
regex = "1.7.3"
|
regex = "1.7.3"
|
||||||
@@ -85,7 +86,7 @@ sha2 = "0.10.6"
|
|||||||
siphasher = "0.3.10"
|
siphasher = "0.3.10"
|
||||||
slice-group-by = "0.3.0"
|
slice-group-by = "0.3.0"
|
||||||
static-files = { version = "0.2.3", optional = true }
|
static-files = { version = "0.2.3", optional = true }
|
||||||
sysinfo = "0.28.4"
|
sysinfo = "0.29.7"
|
||||||
tar = "0.4.38"
|
tar = "0.4.38"
|
||||||
tempfile = "3.5.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.40"
|
thiserror = "1.0.40"
|
||||||
@@ -133,7 +134,17 @@ zip = { version = "0.6.4", optional = true }
|
|||||||
[features]
|
[features]
|
||||||
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
||||||
analytics = ["segment"]
|
analytics = ["segment"]
|
||||||
mini-dashboard = ["actix-web-static-files", "static-files", "anyhow", "cargo_toml", "hex", "reqwest", "sha-1", "tempfile", "zip"]
|
mini-dashboard = [
|
||||||
|
"actix-web-static-files",
|
||||||
|
"static-files",
|
||||||
|
"anyhow",
|
||||||
|
"cargo_toml",
|
||||||
|
"hex",
|
||||||
|
"reqwest",
|
||||||
|
"sha-1",
|
||||||
|
"tempfile",
|
||||||
|
"zip",
|
||||||
|
]
|
||||||
chinese = ["meilisearch-types/chinese"]
|
chinese = ["meilisearch-types/chinese"]
|
||||||
hebrew = ["meilisearch-types/hebrew"]
|
hebrew = ["meilisearch-types/hebrew"]
|
||||||
japanese = ["meilisearch-types/japanese"]
|
japanese = ["meilisearch-types/japanese"]
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ pub struct SearchAggregator;
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl SearchAggregator {
|
impl SearchAggregator {
|
||||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||||
Self::default()
|
Self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||||
@@ -32,7 +32,7 @@ pub struct MultiSearchAggregator;
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl MultiSearchAggregator {
|
impl MultiSearchAggregator {
|
||||||
pub fn from_queries(_: &dyn Any, _: &dyn Any) -> Self {
|
pub fn from_queries(_: &dyn Any, _: &dyn Any) -> Self {
|
||||||
Self::default()
|
Self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn succeed(&mut self) {}
|
pub fn succeed(&mut self) {}
|
||||||
@@ -44,7 +44,7 @@ pub struct FacetSearchAggregator;
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl FacetSearchAggregator {
|
impl FacetSearchAggregator {
|
||||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||||
Self::default()
|
Self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
mod mock_analytics;
|
mod mock_analytics;
|
||||||
// if we are in release mode and the feature analytics was enabled
|
#[cfg(feature = "analytics")]
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
|
||||||
mod segment_analytics;
|
mod segment_analytics;
|
||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
@@ -17,26 +16,25 @@ use serde_json::Value;
|
|||||||
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||||
use crate::routes::tasks::TasksFilterQuery;
|
use crate::routes::tasks::TasksFilterQuery;
|
||||||
|
|
||||||
// if we are in debug mode OR the analytics feature is disabled
|
// if the analytics feature is disabled
|
||||||
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
||||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
#[cfg(not(feature = "analytics"))]
|
||||||
pub type SegmentAnalytics = mock_analytics::MockAnalytics;
|
pub type SegmentAnalytics = mock_analytics::MockAnalytics;
|
||||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
#[cfg(not(feature = "analytics"))]
|
||||||
pub type SearchAggregator = mock_analytics::SearchAggregator;
|
pub type SearchAggregator = mock_analytics::SearchAggregator;
|
||||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
#[cfg(not(feature = "analytics"))]
|
||||||
pub type MultiSearchAggregator = mock_analytics::MultiSearchAggregator;
|
pub type MultiSearchAggregator = mock_analytics::MultiSearchAggregator;
|
||||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
#[cfg(not(feature = "analytics"))]
|
||||||
pub type FacetSearchAggregator = mock_analytics::FacetSearchAggregator;
|
pub type FacetSearchAggregator = mock_analytics::FacetSearchAggregator;
|
||||||
|
|
||||||
// if we are in release mode and the feature analytics was enabled
|
// if the feature analytics is enabled we use the real analytics
|
||||||
// we use the real analytics
|
#[cfg(feature = "analytics")]
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
|
||||||
pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator;
|
pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator;
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
pub type FacetSearchAggregator = segment_analytics::FacetSearchAggregator;
|
pub type FacetSearchAggregator = segment_analytics::FacetSearchAggregator;
|
||||||
|
|
||||||
/// The Meilisearch config dir:
|
/// The Meilisearch config dir:
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -51,6 +51,8 @@ pub enum MeilisearchHttpError {
|
|||||||
DocumentFormat(#[from] DocumentFormatError),
|
DocumentFormat(#[from] DocumentFormatError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Join(#[from] JoinError),
|
Join(#[from] JoinError),
|
||||||
|
#[error("Report `{0}` not found. Either its id is incorrect, or it was deleted. To save on memory, only a limited amount of reports are kept.")]
|
||||||
|
ReportNotFound(uuid::Uuid),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ErrorCode for MeilisearchHttpError {
|
impl ErrorCode for MeilisearchHttpError {
|
||||||
@@ -74,6 +76,7 @@ impl ErrorCode for MeilisearchHttpError {
|
|||||||
MeilisearchHttpError::FileStore(_) => Code::Internal,
|
MeilisearchHttpError::FileStore(_) => Code::Internal,
|
||||||
MeilisearchHttpError::DocumentFormat(e) => e.error_code(),
|
MeilisearchHttpError::DocumentFormat(e) => e.error_code(),
|
||||||
MeilisearchHttpError::Join(_) => Code::Internal,
|
MeilisearchHttpError::Join(_) => Code::Internal,
|
||||||
|
MeilisearchHttpError::ReportNotFound(_) => Code::ReportNotFound,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -114,10 +114,7 @@ pub fn create_app(
|
|||||||
.configure(routes::configure)
|
.configure(routes::configure)
|
||||||
.configure(|s| dashboard(s, enable_dashboard));
|
.configure(|s| dashboard(s, enable_dashboard));
|
||||||
|
|
||||||
let app = app.wrap(actix_web::middleware::Condition::new(
|
let app = app.wrap(middleware::RouteMetrics);
|
||||||
opt.experimental_enable_metrics,
|
|
||||||
middleware::RouteMetrics,
|
|
||||||
));
|
|
||||||
app.wrap(
|
app.wrap(
|
||||||
Cors::default()
|
Cors::default()
|
||||||
.send_wildcard()
|
.send_wildcard()
|
||||||
|
|||||||
@@ -3,8 +3,10 @@
|
|||||||
use std::future::{ready, Ready};
|
use std::future::{ready, Ready};
|
||||||
|
|
||||||
use actix_web::dev::{self, Service, ServiceRequest, ServiceResponse, Transform};
|
use actix_web::dev::{self, Service, ServiceRequest, ServiceResponse, Transform};
|
||||||
|
use actix_web::web::Data;
|
||||||
use actix_web::Error;
|
use actix_web::Error;
|
||||||
use futures_util::future::LocalBoxFuture;
|
use futures_util::future::LocalBoxFuture;
|
||||||
|
use index_scheduler::IndexScheduler;
|
||||||
use prometheus::HistogramTimer;
|
use prometheus::HistogramTimer;
|
||||||
|
|
||||||
pub struct RouteMetrics;
|
pub struct RouteMetrics;
|
||||||
@@ -47,19 +49,27 @@ where
|
|||||||
|
|
||||||
fn call(&self, req: ServiceRequest) -> Self::Future {
|
fn call(&self, req: ServiceRequest) -> Self::Future {
|
||||||
let mut histogram_timer: Option<HistogramTimer> = None;
|
let mut histogram_timer: Option<HistogramTimer> = None;
|
||||||
let request_path = req.path();
|
|
||||||
let is_registered_resource = req.resource_map().has_resource(request_path);
|
// calling unwrap here is safe because index scheduler is added to app data while creating actix app.
|
||||||
if is_registered_resource {
|
// also, the tests will fail if this is not present.
|
||||||
let request_method = req.method().to_string();
|
let index_scheduler = req.app_data::<Data<IndexScheduler>>().unwrap();
|
||||||
histogram_timer = Some(
|
let features = index_scheduler.features();
|
||||||
crate::metrics::MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS
|
|
||||||
|
if features.check_metrics().is_ok() {
|
||||||
|
let request_path = req.path();
|
||||||
|
let is_registered_resource = req.resource_map().has_resource(request_path);
|
||||||
|
if is_registered_resource {
|
||||||
|
let request_method = req.method().to_string();
|
||||||
|
histogram_timer = Some(
|
||||||
|
crate::metrics::MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS
|
||||||
|
.with_label_values(&[&request_method, request_path])
|
||||||
|
.start_timer(),
|
||||||
|
);
|
||||||
|
crate::metrics::MEILISEARCH_HTTP_REQUESTS_TOTAL
|
||||||
.with_label_values(&[&request_method, request_path])
|
.with_label_values(&[&request_method, request_path])
|
||||||
.start_timer(),
|
.inc();
|
||||||
);
|
}
|
||||||
crate::metrics::MEILISEARCH_HTTP_REQUESTS_TOTAL
|
};
|
||||||
.with_label_values(&[&request_method, request_path])
|
|
||||||
.inc();
|
|
||||||
}
|
|
||||||
|
|
||||||
let fut = self.service.call(req);
|
let fut = self.service.call(req);
|
||||||
|
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ const MEILI_DB_PATH: &str = "MEILI_DB_PATH";
|
|||||||
const MEILI_HTTP_ADDR: &str = "MEILI_HTTP_ADDR";
|
const MEILI_HTTP_ADDR: &str = "MEILI_HTTP_ADDR";
|
||||||
const MEILI_MASTER_KEY: &str = "MEILI_MASTER_KEY";
|
const MEILI_MASTER_KEY: &str = "MEILI_MASTER_KEY";
|
||||||
const MEILI_ENV: &str = "MEILI_ENV";
|
const MEILI_ENV: &str = "MEILI_ENV";
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
const MEILI_NO_ANALYTICS: &str = "MEILI_NO_ANALYTICS";
|
const MEILI_NO_ANALYTICS: &str = "MEILI_NO_ANALYTICS";
|
||||||
const MEILI_HTTP_PAYLOAD_SIZE_LIMIT: &str = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT";
|
const MEILI_HTTP_PAYLOAD_SIZE_LIMIT: &str = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT";
|
||||||
const MEILI_SSL_CERT_PATH: &str = "MEILI_SSL_CERT_PATH";
|
const MEILI_SSL_CERT_PATH: &str = "MEILI_SSL_CERT_PATH";
|
||||||
@@ -159,7 +159,7 @@ pub struct Opt {
|
|||||||
/// Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
/// Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
||||||
/// All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted
|
/// All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted
|
||||||
/// at any time.
|
/// at any time.
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
#[serde(default)] // we can't send true
|
#[serde(default)] // we can't send true
|
||||||
#[clap(long, env = MEILI_NO_ANALYTICS)]
|
#[clap(long, env = MEILI_NO_ANALYTICS)]
|
||||||
pub no_analytics: bool,
|
pub no_analytics: bool,
|
||||||
@@ -390,7 +390,7 @@ impl Opt {
|
|||||||
ignore_missing_dump: _,
|
ignore_missing_dump: _,
|
||||||
ignore_dump_if_db_exists: _,
|
ignore_dump_if_db_exists: _,
|
||||||
config_file_path: _,
|
config_file_path: _,
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
no_analytics,
|
no_analytics,
|
||||||
experimental_enable_metrics: enable_metrics_route,
|
experimental_enable_metrics: enable_metrics_route,
|
||||||
experimental_reduce_indexing_memory_usage: reduce_indexing_memory_usage,
|
experimental_reduce_indexing_memory_usage: reduce_indexing_memory_usage,
|
||||||
@@ -401,7 +401,7 @@ impl Opt {
|
|||||||
export_to_env_if_not_present(MEILI_MASTER_KEY, master_key);
|
export_to_env_if_not_present(MEILI_MASTER_KEY, master_key);
|
||||||
}
|
}
|
||||||
export_to_env_if_not_present(MEILI_ENV, env);
|
export_to_env_if_not_present(MEILI_ENV, env);
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
{
|
{
|
||||||
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,12 +29,12 @@ async fn get_features(
|
|||||||
>,
|
>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: Data<dyn Analytics>,
|
analytics: Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> HttpResponse {
|
||||||
let features = index_scheduler.features()?;
|
let features = index_scheduler.features();
|
||||||
|
|
||||||
analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req));
|
analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req));
|
||||||
debug!("returns: {:?}", features.runtime_features());
|
debug!("returns: {:?}", features.runtime_features());
|
||||||
Ok(HttpResponse::Ok().json(features.runtime_features()))
|
HttpResponse::Ok().json(features.runtime_features())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserr)]
|
#[derive(Debug, Deserr)]
|
||||||
@@ -44,6 +44,10 @@ pub struct RuntimeTogglableFeatures {
|
|||||||
pub score_details: Option<bool>,
|
pub score_details: Option<bool>,
|
||||||
#[deserr(default)]
|
#[deserr(default)]
|
||||||
pub vector_store: Option<bool>,
|
pub vector_store: Option<bool>,
|
||||||
|
#[deserr(default)]
|
||||||
|
pub metrics: Option<bool>,
|
||||||
|
#[deserr(default)]
|
||||||
|
pub export_puffin_reports: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn patch_features(
|
async fn patch_features(
|
||||||
@@ -55,26 +59,36 @@ async fn patch_features(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: Data<dyn Analytics>,
|
analytics: Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let features = index_scheduler.features()?;
|
let features = index_scheduler.features();
|
||||||
|
|
||||||
let old_features = features.runtime_features();
|
let old_features = features.runtime_features();
|
||||||
|
|
||||||
let new_features = meilisearch_types::features::RuntimeTogglableFeatures {
|
let new_features = meilisearch_types::features::RuntimeTogglableFeatures {
|
||||||
score_details: new_features.0.score_details.unwrap_or(old_features.score_details),
|
score_details: new_features.0.score_details.unwrap_or(old_features.score_details),
|
||||||
vector_store: new_features.0.vector_store.unwrap_or(old_features.vector_store),
|
vector_store: new_features.0.vector_store.unwrap_or(old_features.vector_store),
|
||||||
|
metrics: new_features.0.metrics.unwrap_or(old_features.metrics),
|
||||||
|
export_puffin_reports: new_features
|
||||||
|
.0
|
||||||
|
.export_puffin_reports
|
||||||
|
.unwrap_or(old_features.export_puffin_reports),
|
||||||
};
|
};
|
||||||
|
|
||||||
// explicitly destructure for analytics rather than using the `Serialize` implementation, because
|
// explicitly destructure for analytics rather than using the `Serialize` implementation, because
|
||||||
// the it renames to camelCase, which we don't want for analytics.
|
// the it renames to camelCase, which we don't want for analytics.
|
||||||
// **Do not** ignore fields with `..` or `_` here, because we want to add them in the future.
|
// **Do not** ignore fields with `..` or `_` here, because we want to add them in the future.
|
||||||
let meilisearch_types::features::RuntimeTogglableFeatures { score_details, vector_store } =
|
let meilisearch_types::features::RuntimeTogglableFeatures {
|
||||||
new_features;
|
score_details,
|
||||||
|
vector_store,
|
||||||
|
metrics,
|
||||||
|
export_puffin_reports,
|
||||||
|
} = new_features;
|
||||||
|
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
"Experimental features Updated".to_string(),
|
"Experimental features Updated".to_string(),
|
||||||
json!({
|
json!({
|
||||||
"score_details": score_details,
|
"score_details": score_details,
|
||||||
"vector_store": vector_store,
|
"vector_store": vector_store,
|
||||||
|
"metrics": metrics,
|
||||||
|
"export_puffin_reports": export_puffin_reports,
|
||||||
}),
|
}),
|
||||||
Some(&req),
|
Some(&req),
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ pub async fn search(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let features = index_scheduler.features()?;
|
let features = index_scheduler.features();
|
||||||
let search_result = tokio::task::spawn_blocking(move || {
|
let search_result = tokio::task::spawn_blocking(move || {
|
||||||
perform_facet_search(&index, search_query, facet_query, facet_name, features)
|
perform_facet_search(&index, search_query, facet_query, facet_name, features)
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -157,7 +157,7 @@ pub async fn search_with_url_query(
|
|||||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let features = index_scheduler.features()?;
|
let features = index_scheduler.features();
|
||||||
let search_result =
|
let search_result =
|
||||||
tokio::task::spawn_blocking(move || perform_search(&index, query, features)).await?;
|
tokio::task::spawn_blocking(move || perform_search(&index, query, features)).await?;
|
||||||
if let Ok(ref search_result) = search_result {
|
if let Ok(ref search_result) = search_result {
|
||||||
@@ -192,7 +192,7 @@ pub async fn search_with_post(
|
|||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
|
|
||||||
let features = index_scheduler.features()?;
|
let features = index_scheduler.features();
|
||||||
let search_result =
|
let search_result =
|
||||||
tokio::task::spawn_blocking(move || perform_search(&index, query, features)).await?;
|
tokio::task::spawn_blocking(move || perform_search(&index, query, features)).await?;
|
||||||
if let Ok(ref search_result) = search_result {
|
if let Ok(ref search_result) = search_result {
|
||||||
|
|||||||
@@ -310,6 +310,81 @@ make_setting_route!(
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
make_setting_route!(
|
||||||
|
"/non-separator-tokens",
|
||||||
|
put,
|
||||||
|
std::collections::BTreeSet<String>,
|
||||||
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsNonSeparatorTokens,
|
||||||
|
>,
|
||||||
|
non_separator_tokens,
|
||||||
|
"nonSeparatorTokens",
|
||||||
|
analytics,
|
||||||
|
|non_separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"nonSeparatorTokens Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"non_separator_tokens": {
|
||||||
|
"total": non_separator_tokens.as_ref().map(|non_separator_tokens| non_separator_tokens.len()),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
make_setting_route!(
|
||||||
|
"/separator-tokens",
|
||||||
|
put,
|
||||||
|
std::collections::BTreeSet<String>,
|
||||||
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsSeparatorTokens,
|
||||||
|
>,
|
||||||
|
separator_tokens,
|
||||||
|
"separatorTokens",
|
||||||
|
analytics,
|
||||||
|
|separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"separatorTokens Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"separator_tokens": {
|
||||||
|
"total": separator_tokens.as_ref().map(|separator_tokens| separator_tokens.len()),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
make_setting_route!(
|
||||||
|
"/dictionary",
|
||||||
|
put,
|
||||||
|
std::collections::BTreeSet<String>,
|
||||||
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsDictionary,
|
||||||
|
>,
|
||||||
|
dictionary,
|
||||||
|
"dictionary",
|
||||||
|
analytics,
|
||||||
|
|dictionary: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"dictionary Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"dictionary": {
|
||||||
|
"total": dictionary.as_ref().map(|dictionary| dictionary.len()),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
make_setting_route!(
|
make_setting_route!(
|
||||||
"/synonyms",
|
"/synonyms",
|
||||||
put,
|
put,
|
||||||
@@ -466,6 +541,9 @@ generate_configure!(
|
|||||||
searchable_attributes,
|
searchable_attributes,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
stop_words,
|
stop_words,
|
||||||
|
separator_tokens,
|
||||||
|
non_separator_tokens,
|
||||||
|
dictionary,
|
||||||
synonyms,
|
synonyms,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ pub async fn get_metrics(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
|
||||||
auth_controller: Data<AuthController>,
|
auth_controller: Data<AuthController>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
index_scheduler.features()?.check_metrics()?;
|
index_scheduler.features().check_metrics()?;
|
||||||
let auth_filters = index_scheduler.filters();
|
let auth_filters = index_scheduler.filters();
|
||||||
if !auth_filters.all_indexes_authorized() {
|
if !auth_filters.all_indexes_authorized() {
|
||||||
let mut error = ResponseError::from(AuthenticationError::InvalidToken);
|
let mut error = ResponseError::from(AuthenticationError::InvalidToken);
|
||||||
|
|||||||
@@ -24,6 +24,8 @@ pub mod features;
|
|||||||
pub mod indexes;
|
pub mod indexes;
|
||||||
mod metrics;
|
mod metrics;
|
||||||
mod multi_search;
|
mod multi_search;
|
||||||
|
mod reports;
|
||||||
|
mod snapshot;
|
||||||
mod swap_indexes;
|
mod swap_indexes;
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
|
|
||||||
@@ -32,13 +34,15 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
.service(web::resource("/health").route(web::get().to(get_health)))
|
.service(web::resource("/health").route(web::get().to(get_health)))
|
||||||
.service(web::scope("/keys").configure(api_key::configure))
|
.service(web::scope("/keys").configure(api_key::configure))
|
||||||
.service(web::scope("/dumps").configure(dump::configure))
|
.service(web::scope("/dumps").configure(dump::configure))
|
||||||
|
.service(web::scope("/snapshots").configure(snapshot::configure))
|
||||||
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
||||||
.service(web::resource("/version").route(web::get().to(get_version)))
|
.service(web::resource("/version").route(web::get().to(get_version)))
|
||||||
.service(web::scope("/indexes").configure(indexes::configure))
|
.service(web::scope("/indexes").configure(indexes::configure))
|
||||||
.service(web::scope("/multi-search").configure(multi_search::configure))
|
.service(web::scope("/multi-search").configure(multi_search::configure))
|
||||||
.service(web::scope("/swap-indexes").configure(swap_indexes::configure))
|
.service(web::scope("/swap-indexes").configure(swap_indexes::configure))
|
||||||
.service(web::scope("/metrics").configure(metrics::configure))
|
.service(web::scope("/metrics").configure(metrics::configure))
|
||||||
.service(web::scope("/experimental-features").configure(features::configure));
|
.service(web::scope("/experimental-features").configure(features::configure))
|
||||||
|
.service(web::scope("/reports").configure(reports::configure));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ pub async fn multi_search_with_post(
|
|||||||
let queries = params.into_inner().queries;
|
let queries = params.into_inner().queries;
|
||||||
|
|
||||||
let mut multi_aggregate = MultiSearchAggregator::from_queries(&queries, &req);
|
let mut multi_aggregate = MultiSearchAggregator::from_queries(&queries, &req);
|
||||||
let features = index_scheduler.features()?;
|
let features = index_scheduler.features();
|
||||||
|
|
||||||
// Explicitly expect a `(ResponseError, usize)` for the error type rather than `ResponseError` only,
|
// Explicitly expect a `(ResponseError, usize)` for the error type rather than `ResponseError` only,
|
||||||
// so that `?` doesn't work if it doesn't use `with_index`, ensuring that it is not forgotten in case of code
|
// so that `?` doesn't work if it doesn't use `with_index`, ensuring that it is not forgotten in case of code
|
||||||
|
|||||||
39
meilisearch/src/routes/reports.rs
Normal file
39
meilisearch/src/routes/reports.rs
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
use actix_web::web::{self, Data};
|
||||||
|
use actix_web::HttpResponse;
|
||||||
|
use index_scheduler::{IndexScheduler, Report};
|
||||||
|
use meilisearch_types::error::ResponseError;
|
||||||
|
use meilisearch_types::keys::actions;
|
||||||
|
|
||||||
|
use crate::extractors::authentication::policies::ActionPolicy;
|
||||||
|
use crate::extractors::authentication::GuardedData;
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
|
||||||
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.service(web::resource("").route(web::get().to(list_reports))).service(
|
||||||
|
web::scope("/{report_uid}")
|
||||||
|
.service(web::resource("").route(web::get().to(SeqHandler(get_report)))),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_reports(
|
||||||
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_ALL }>, Data<IndexScheduler>>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let reports = &index_scheduler.reports();
|
||||||
|
let reports = &reports.read().unwrap();
|
||||||
|
let reports: Vec<&Report> = reports.iter().collect();
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(reports))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_report(
|
||||||
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_ALL }>, Data<IndexScheduler>>,
|
||||||
|
report_id: web::Path<uuid::Uuid>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let reports = &index_scheduler.reports();
|
||||||
|
let reports = &reports.read().unwrap();
|
||||||
|
let report = reports
|
||||||
|
.find(*report_id)
|
||||||
|
.ok_or(crate::error::MeilisearchHttpError::ReportNotFound(*report_id))?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(report))
|
||||||
|
}
|
||||||
32
meilisearch/src/routes/snapshot.rs
Normal file
32
meilisearch/src/routes/snapshot.rs
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
use actix_web::web::Data;
|
||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
|
use meilisearch_types::error::ResponseError;
|
||||||
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use crate::analytics::Analytics;
|
||||||
|
use crate::extractors::authentication::policies::*;
|
||||||
|
use crate::extractors::authentication::GuardedData;
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
use crate::routes::SummarizedTaskView;
|
||||||
|
|
||||||
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_snapshot))));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_snapshot(
|
||||||
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SNAPSHOTS_CREATE }>, Data<IndexScheduler>>,
|
||||||
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
analytics.publish("Snapshot Created".to_string(), json!({}), Some(&req));
|
||||||
|
|
||||||
|
let task = KindWithContent::SnapshotCreation;
|
||||||
|
let task: SummarizedTaskView =
|
||||||
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
|
debug!("returns: {:?}", task);
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
|
}
|
||||||
@@ -60,8 +60,7 @@ pub async fn swap_indexes(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let task = KindWithContent::IndexSwap { swaps };
|
let task = KindWithContent::IndexSwap { swaps };
|
||||||
|
let task: SummarizedTaskView =
|
||||||
let task = index_scheduler.register(task)?;
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
let task: SummarizedTaskView = task.into();
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -491,6 +491,20 @@ pub fn perform_search(
|
|||||||
tokenizer_builder.allow_list(&script_lang_map);
|
tokenizer_builder.allow_list(&script_lang_map);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let separators = index.allowed_separators(&rtxn)?;
|
||||||
|
let separators: Option<Vec<_>> =
|
||||||
|
separators.as_ref().map(|x| x.iter().map(String::as_str).collect());
|
||||||
|
if let Some(ref separators) = separators {
|
||||||
|
tokenizer_builder.separators(separators);
|
||||||
|
}
|
||||||
|
|
||||||
|
let dictionary = index.dictionary(&rtxn)?;
|
||||||
|
let dictionary: Option<Vec<_>> =
|
||||||
|
dictionary.as_ref().map(|x| x.iter().map(String::as_str).collect());
|
||||||
|
if let Some(ref dictionary) = dictionary {
|
||||||
|
tokenizer_builder.words_dict(dictionary);
|
||||||
|
}
|
||||||
|
|
||||||
let mut formatter_builder = MatcherBuilder::new(matching_words, tokenizer_builder.build());
|
let mut formatter_builder = MatcherBuilder::new(matching_words, tokenizer_builder.build());
|
||||||
formatter_builder.crop_marker(query.crop_marker);
|
formatter_builder.crop_marker(query.crop_marker);
|
||||||
formatter_builder.highlight_prefix(query.highlight_pre_tag);
|
formatter_builder.highlight_prefix(query.highlight_pre_tag);
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
use std::{thread, time};
|
use std::{thread, time};
|
||||||
|
|
||||||
use serde_json::{json, Value};
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
use crate::common::Server;
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn add_valid_api_key() {
|
async fn add_valid_api_key() {
|
||||||
@@ -162,7 +161,7 @@ async fn add_valid_api_key_null_description() {
|
|||||||
server.use_api_key("MASTER_KEY");
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
let content = json!({
|
let content = json!({
|
||||||
"description": Value::Null,
|
"description": json!(null),
|
||||||
"indexes": ["products"],
|
"indexes": ["products"],
|
||||||
"actions": ["documents.add"],
|
"actions": ["documents.add"],
|
||||||
"expiresAt": "2050-11-13T00:00:00"
|
"expiresAt": "2050-11-13T00:00:00"
|
||||||
@@ -365,7 +364,7 @@ async fn error_add_api_key_invalid_index_uids() {
|
|||||||
server.use_api_key("MASTER_KEY");
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
let content = json!({
|
let content = json!({
|
||||||
"description": Value::Null,
|
"description": json!(null),
|
||||||
"indexes": ["invalid index # / \\name with spaces"],
|
"indexes": ["invalid index # / \\name with spaces"],
|
||||||
"actions": [
|
"actions": [
|
||||||
"documents.add"
|
"documents.add"
|
||||||
@@ -422,7 +421,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`",
|
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`",
|
||||||
"code": "invalid_api_key_actions",
|
"code": "invalid_api_key_actions",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||||
@@ -507,7 +506,7 @@ async fn error_add_api_key_invalid_parameters_uid() {
|
|||||||
async fn error_add_api_key_parameters_uid_already_exist() {
|
async fn error_add_api_key_parameters_uid_already_exist() {
|
||||||
let mut server = Server::new_auth().await;
|
let mut server = Server::new_auth().await;
|
||||||
server.use_api_key("MASTER_KEY");
|
server.use_api_key("MASTER_KEY");
|
||||||
let content = json!({
|
let content: Value = json!({
|
||||||
"uid": "4bc0887a-0e41-4f3b-935d-0c451dcee9c8",
|
"uid": "4bc0887a-0e41-4f3b-935d-0c451dcee9c8",
|
||||||
"indexes": ["products"],
|
"indexes": ["products"],
|
||||||
"actions": ["search"],
|
"actions": ["search"],
|
||||||
@@ -1146,7 +1145,7 @@ async fn patch_api_key_description() {
|
|||||||
meili_snap::snapshot!(code, @"200 OK");
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
|
||||||
// Remove the description
|
// Remove the description
|
||||||
let content = json!({ "description": serde_json::Value::Null });
|
let content = json!({ "description": null });
|
||||||
|
|
||||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
||||||
|
|||||||
@@ -2,11 +2,13 @@ use std::collections::{HashMap, HashSet};
|
|||||||
|
|
||||||
use ::time::format_description::well_known::Rfc3339;
|
use ::time::format_description::well_known::Rfc3339;
|
||||||
use maplit::{hashmap, hashset};
|
use maplit::{hashmap, hashset};
|
||||||
|
use meilisearch::Opt;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
use tempfile::TempDir;
|
||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{default_settings, Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
||||||
Lazy::new(|| {
|
Lazy::new(|| {
|
||||||
@@ -54,6 +56,7 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
|
|||||||
("GET", "/indexes/products/stats") => hashset!{"stats.get", "stats.*", "*"},
|
("GET", "/indexes/products/stats") => hashset!{"stats.get", "stats.*", "*"},
|
||||||
("GET", "/stats") => hashset!{"stats.get", "stats.*", "*"},
|
("GET", "/stats") => hashset!{"stats.get", "stats.*", "*"},
|
||||||
("POST", "/dumps") => hashset!{"dumps.create", "dumps.*", "*"},
|
("POST", "/dumps") => hashset!{"dumps.create", "dumps.*", "*"},
|
||||||
|
("POST", "/snapshots") => hashset!{"snapshots.create", "snapshots.*", "*"},
|
||||||
("GET", "/version") => hashset!{"version", "*"},
|
("GET", "/version") => hashset!{"version", "*"},
|
||||||
("GET", "/metrics") => hashset!{"metrics.get", "metrics.*", "*"},
|
("GET", "/metrics") => hashset!{"metrics.get", "metrics.*", "*"},
|
||||||
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
|
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
|
||||||
@@ -194,7 +197,9 @@ async fn access_authorized_master_key() {
|
|||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn access_authorized_restricted_index() {
|
async fn access_authorized_restricted_index() {
|
||||||
let mut server = Server::new_auth().await;
|
let dir = TempDir::new().unwrap();
|
||||||
|
let enable_metrics = Opt { experimental_enable_metrics: true, ..default_settings(dir.path()) };
|
||||||
|
let mut server = Server::new_auth_with_options(enable_metrics, dir).await;
|
||||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||||
for action in actions {
|
for action in actions {
|
||||||
// create a new API key letting only the needed action.
|
// create a new API key letting only the needed action.
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn create_api_key_bad_description() {
|
async fn create_api_key_bad_description() {
|
||||||
@@ -90,7 +90,7 @@ async fn create_api_key_bad_actions() {
|
|||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`",
|
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`",
|
||||||
"code": "invalid_api_key_actions",
|
"code": "invalid_api_key_actions",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ mod tenant_token;
|
|||||||
mod tenant_token_multi_search;
|
mod tenant_token_multi_search;
|
||||||
|
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
impl Server {
|
impl Server {
|
||||||
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
|
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
|
||||||
|
|||||||
@@ -3,11 +3,11 @@ use std::collections::HashMap;
|
|||||||
use ::time::format_description::well_known::Rfc3339;
|
use ::time::format_description::well_known::Rfc3339;
|
||||||
use maplit::hashmap;
|
use maplit::hashmap;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
fn generate_tenant_token(
|
fn generate_tenant_token(
|
||||||
parent_uid: impl AsRef<str>,
|
parent_uid: impl AsRef<str>,
|
||||||
@@ -233,31 +233,31 @@ async fn search_authorized_simple_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["*"]),
|
"searchRules" => json!(["*"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}}),
|
"searchRules" => json!({"sales": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sales"]),
|
"searchRules" => json!(["sales"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sa*"]),
|
"searchRules" => json!(["sa*"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -386,7 +386,7 @@ async fn error_search_token_forbidden_parent_key() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -398,7 +398,7 @@ async fn error_search_token_forbidden_parent_key() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -428,15 +428,15 @@ async fn error_search_forbidden_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": {}}),
|
"searchRules" => json!({"products": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": Value::Null}),
|
"searchRules" => json!({"products": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["products"]),
|
"searchRules" => json!(["products"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
// expired token
|
// expired token
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -444,7 +444,7 @@ async fn error_search_forbidden_token() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -456,7 +456,7 @@ async fn error_search_forbidden_token() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
|
|||||||
@@ -3,11 +3,11 @@ use std::collections::HashMap;
|
|||||||
use ::time::format_description::well_known::Rfc3339;
|
use ::time::format_description::well_known::Rfc3339;
|
||||||
use maplit::hashmap;
|
use maplit::hashmap;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
use super::authorization::ALL_ACTIONS;
|
use super::authorization::ALL_ACTIONS;
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
fn generate_tenant_token(
|
fn generate_tenant_token(
|
||||||
parent_uid: impl AsRef<str>,
|
parent_uid: impl AsRef<str>,
|
||||||
@@ -512,31 +512,31 @@ async fn single_search_authorized_simple_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["*"]),
|
"searchRules" => json!(["*"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}}),
|
"searchRules" => json!({"sales": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sales"]),
|
"searchRules" => json!(["sales"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sa*"]),
|
"searchRules" => json!(["sa*"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -564,31 +564,31 @@ async fn multi_search_authorized_simple_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["*"]),
|
"searchRules" => json!(["*"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}, "products": {}}),
|
"searchRules" => json!({"sales": {}, "products": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null, "products": Value::Null}),
|
"searchRules" => json!({"sales": null, "products": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sales", "products"]),
|
"searchRules" => json!(["sales", "products"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sa*", "pro*"]),
|
"searchRules" => json!(["sa*", "pro*"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -823,7 +823,7 @@ async fn error_single_search_token_forbidden_parent_key() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -835,7 +835,7 @@ async fn error_single_search_token_forbidden_parent_key() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -864,7 +864,7 @@ async fn error_multi_search_token_forbidden_parent_key() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -876,7 +876,7 @@ async fn error_multi_search_token_forbidden_parent_key() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null, "products": Value::Null}),
|
"searchRules" => json!({"sales": null, "products": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -919,15 +919,15 @@ async fn error_single_search_forbidden_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": {}}),
|
"searchRules" => json!({"products": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": Value::Null}),
|
"searchRules" => json!({"products": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["products"]),
|
"searchRules" => json!(["products"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
// expired token
|
// expired token
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -935,7 +935,7 @@ async fn error_single_search_forbidden_token() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -947,7 +947,7 @@ async fn error_single_search_forbidden_token() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -978,15 +978,15 @@ async fn error_multi_search_forbidden_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": {}}),
|
"searchRules" => json!({"products": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": Value::Null}),
|
"searchRules" => json!({"products": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["products"]),
|
"searchRules" => json!(["products"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}}),
|
"searchRules" => json!({"sales": {}}),
|
||||||
@@ -998,15 +998,15 @@ async fn error_multi_search_forbidden_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}}),
|
"searchRules" => json!({"sales": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sales"]),
|
"searchRules" => json!(["sales"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
// expired token
|
// expired token
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -1014,7 +1014,7 @@ async fn error_multi_search_forbidden_token() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -1026,7 +1026,7 @@ async fn error_multi_search_forbidden_token() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null, "products": {}}),
|
"searchRules" => json!({"sales": null, "products": {}}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
|
|||||||
@@ -3,12 +3,13 @@ use std::panic::{catch_unwind, resume_unwind, UnwindSafe};
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use serde_json::{json, Value};
|
|
||||||
use tokio::time::sleep;
|
use tokio::time::sleep;
|
||||||
use urlencoding::encode as urlencode;
|
use urlencoding::encode as urlencode;
|
||||||
|
|
||||||
use super::encoder::Encoder;
|
use super::encoder::Encoder;
|
||||||
use super::service::Service;
|
use super::service::Service;
|
||||||
|
use super::Value;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
pub struct Index<'a> {
|
pub struct Index<'a> {
|
||||||
pub uid: String,
|
pub uid: String,
|
||||||
@@ -242,7 +243,9 @@ impl Index<'_> {
|
|||||||
|
|
||||||
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
|
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/documents/delete-batch", urlencode(self.uid.as_ref()));
|
let url = format!("/indexes/{}/documents/delete-batch", urlencode(self.uid.as_ref()));
|
||||||
self.service.post_encoded(url, serde_json::to_value(&ids).unwrap(), self.encoder).await
|
self.service
|
||||||
|
.post_encoded(url, serde_json::to_value(&ids).unwrap().into(), self.encoder)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_batch_raw(&self, body: Value) -> (Value, StatusCode) {
|
pub async fn delete_batch_raw(&self, body: Value) -> (Value, StatusCode) {
|
||||||
|
|||||||
@@ -3,9 +3,83 @@ pub mod index;
|
|||||||
pub mod server;
|
pub mod server;
|
||||||
pub mod service;
|
pub mod service;
|
||||||
|
|
||||||
|
use std::fmt::{self, Display};
|
||||||
|
|
||||||
pub use index::{GetAllDocumentsOptions, GetDocumentOptions};
|
pub use index::{GetAllDocumentsOptions, GetDocumentOptions};
|
||||||
|
use meili_snap::json_string;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
pub use server::{default_settings, Server};
|
pub use server::{default_settings, Server};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct Value(pub serde_json::Value);
|
||||||
|
|
||||||
|
impl Value {
|
||||||
|
pub fn uid(&self) -> u64 {
|
||||||
|
if let Some(uid) = self["uid"].as_u64() {
|
||||||
|
uid
|
||||||
|
} else if let Some(uid) = self["taskUid"].as_u64() {
|
||||||
|
uid
|
||||||
|
} else {
|
||||||
|
panic!("Didn't find any task id in: {self}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<serde_json::Value> for Value {
|
||||||
|
fn from(value: serde_json::Value) -> Self {
|
||||||
|
Value(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::ops::Deref for Value {
|
||||||
|
type Target = serde_json::Value;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<serde_json::Value> for Value {
|
||||||
|
fn eq(&self, other: &serde_json::Value) -> bool {
|
||||||
|
&self.0 == other
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<Value> for serde_json::Value {
|
||||||
|
fn eq(&self, other: &Value) -> bool {
|
||||||
|
self == &other.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<&str> for Value {
|
||||||
|
fn eq(&self, other: &&str) -> bool {
|
||||||
|
self.0.eq(other)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for Value {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
json_string!(self, { ".enqueuedAt" => "[date]", ".processedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Vec<Value>> for Value {
|
||||||
|
fn from(value: Vec<Value>) -> Self {
|
||||||
|
Self(value.into_iter().map(|value| value.0).collect::<serde_json::Value>())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! json {
|
||||||
|
($($json:tt)+) => {
|
||||||
|
$crate::common::Value(serde_json::json!($($json)+))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/// Performs a search test on both post and get routes
|
/// Performs a search test on both post and get routes
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! test_post_get_search {
|
macro_rules! test_post_get_search {
|
||||||
|
|||||||
@@ -11,13 +11,14 @@ use clap::Parser;
|
|||||||
use meilisearch::option::{IndexerOpts, MaxMemory, Opt};
|
use meilisearch::option::{IndexerOpts, MaxMemory, Opt};
|
||||||
use meilisearch::{analytics, create_app, setup_meilisearch};
|
use meilisearch::{analytics, create_app, setup_meilisearch};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use tokio::time::sleep;
|
use tokio::time::sleep;
|
||||||
|
|
||||||
use super::index::Index;
|
use super::index::Index;
|
||||||
use super::service::Service;
|
use super::service::Service;
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
|
use crate::common::Value;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
pub struct Server {
|
pub struct Server {
|
||||||
pub service: Service,
|
pub service: Service,
|
||||||
@@ -156,6 +157,10 @@ impl Server {
|
|||||||
self.service.post("/dumps", json!(null)).await
|
self.service.post("/dumps", json!(null)).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn create_snapshot(&self) -> (Value, StatusCode) {
|
||||||
|
self.service.post("/snapshots", json!(null)).await
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn index_swap(&self, value: Value) -> (Value, StatusCode) {
|
pub async fn index_swap(&self, value: Value) -> (Value, StatusCode) {
|
||||||
self.service.post("/swap-indexes", value).await
|
self.service.post("/swap-indexes", value).await
|
||||||
}
|
}
|
||||||
@@ -197,6 +202,10 @@ impl Server {
|
|||||||
pub async fn set_features(&self, value: Value) -> (Value, StatusCode) {
|
pub async fn set_features(&self, value: Value) -> (Value, StatusCode) {
|
||||||
self.service.patch("/experimental-features", value).await
|
self.service.patch("/experimental-features", value).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn get_metrics(&self) -> (Value, StatusCode) {
|
||||||
|
self.service.get("/metrics").await
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||||
@@ -204,7 +213,7 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
|||||||
db_path: dir.as_ref().join("db"),
|
db_path: dir.as_ref().join("db"),
|
||||||
dump_dir: dir.as_ref().join("dumps"),
|
dump_dir: dir.as_ref().join("dumps"),
|
||||||
env: "development".to_owned(),
|
env: "development".to_owned(),
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
no_analytics: true,
|
no_analytics: true,
|
||||||
max_index_size: Byte::from_unit(100.0, ByteUnit::MiB).unwrap(),
|
max_index_size: Byte::from_unit(100.0, ByteUnit::MiB).unwrap(),
|
||||||
max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(),
|
max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(),
|
||||||
@@ -216,7 +225,7 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
|||||||
skip_index_budget: true,
|
skip_index_budget: true,
|
||||||
..Parser::parse_from(None as Option<&str>)
|
..Parser::parse_from(None as Option<&str>)
|
||||||
},
|
},
|
||||||
experimental_enable_metrics: true,
|
experimental_enable_metrics: false,
|
||||||
..Parser::parse_from(None as Option<&str>)
|
..Parser::parse_from(None as Option<&str>)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ use actix_web::test::TestRequest;
|
|||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use meilisearch::{analytics, create_app, Opt};
|
use meilisearch::{analytics, create_app, Opt};
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
use serde_json::Value;
|
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
|
use crate::common::Value;
|
||||||
|
|
||||||
pub struct Service {
|
pub struct Service {
|
||||||
pub index_scheduler: Arc<IndexScheduler>,
|
pub index_scheduler: Arc<IndexScheduler>,
|
||||||
|
|||||||
@@ -3,9 +3,8 @@
|
|||||||
mod common;
|
mod common;
|
||||||
|
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
|
||||||
enum HttpVerb {
|
enum HttpVerb {
|
||||||
Put,
|
Put,
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use serde_json::{json, Value};
|
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn delete_one_document_unexisting_index() {
|
async fn delete_one_document_unexisting_index() {
|
||||||
@@ -154,6 +154,19 @@ async fn delete_document_by_filter() {
|
|||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let (stats, _) = index.stats().await;
|
||||||
|
snapshot!(json_string!(stats), @r###"
|
||||||
|
{
|
||||||
|
"numberOfDocuments": 4,
|
||||||
|
"isIndexing": false,
|
||||||
|
"fieldDistribution": {
|
||||||
|
"color": 3,
|
||||||
|
"id": 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
let (response, code) =
|
let (response, code) =
|
||||||
index.delete_document_by_filter(json!({ "filter": "color = blue"})).await;
|
index.delete_document_by_filter(json!({ "filter": "color = blue"})).await;
|
||||||
snapshot!(code, @"202 Accepted");
|
snapshot!(code, @"202 Accepted");
|
||||||
@@ -188,6 +201,18 @@ async fn delete_document_by_filter() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
let (stats, _) = index.stats().await;
|
||||||
|
snapshot!(json_string!(stats), @r###"
|
||||||
|
{
|
||||||
|
"numberOfDocuments": 2,
|
||||||
|
"isIndexing": false,
|
||||||
|
"fieldDistribution": {
|
||||||
|
"color": 1,
|
||||||
|
"id": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(json_string!(documents), @r###"
|
snapshot!(json_string!(documents), @r###"
|
||||||
@@ -241,6 +266,18 @@ async fn delete_document_by_filter() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
let (stats, _) = index.stats().await;
|
||||||
|
snapshot!(json_string!(stats), @r###"
|
||||||
|
{
|
||||||
|
"numberOfDocuments": 1,
|
||||||
|
"isIndexing": false,
|
||||||
|
"fieldDistribution": {
|
||||||
|
"color": 1,
|
||||||
|
"id": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(json_string!(documents), @r###"
|
snapshot!(json_string!(documents), @r###"
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
|
||||||
use urlencoding::encode;
|
use urlencoding::encode;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn get_all_documents_bad_offset() {
|
async fn get_all_documents_bad_offset() {
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use http::header::ACCEPT_ENCODING;
|
use http::header::ACCEPT_ENCODING;
|
||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::{json, Value};
|
|
||||||
use urlencoding::encode as urlencode;
|
use urlencoding::encode as urlencode;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
|
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
|
||||||
// transplant
|
// transplant
|
||||||
@@ -40,7 +40,7 @@ async fn get_document() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
let documents = serde_json::json!([
|
let documents = json!([
|
||||||
{
|
{
|
||||||
"id": 0,
|
"id": 0,
|
||||||
"nested": { "content": "foobar" },
|
"nested": { "content": "foobar" },
|
||||||
@@ -53,7 +53,7 @@ async fn get_document() {
|
|||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
serde_json::json!({
|
json!({
|
||||||
"id": 0,
|
"id": 0,
|
||||||
"nested": { "content": "foobar" },
|
"nested": { "content": "foobar" },
|
||||||
})
|
})
|
||||||
@@ -64,7 +64,7 @@ async fn get_document() {
|
|||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
serde_json::json!({
|
json!({
|
||||||
"id": 0,
|
"id": 0,
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
@@ -75,7 +75,7 @@ async fn get_document() {
|
|||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
serde_json::json!({
|
json!({
|
||||||
"nested": { "content": "foobar" },
|
"nested": { "content": "foobar" },
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
@@ -122,7 +122,7 @@ async fn get_all_documents_no_options() {
|
|||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
let arr = response["results"].as_array().unwrap();
|
let arr = response["results"].as_array().unwrap();
|
||||||
assert_eq!(arr.len(), 20);
|
assert_eq!(arr.len(), 20);
|
||||||
let first = serde_json::json!({
|
let first = json!({
|
||||||
"id":0,
|
"id":0,
|
||||||
"isActive":false,
|
"isActive":false,
|
||||||
"balance":"$2,668.55",
|
"balance":"$2,668.55",
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
use serde_json::json;
|
use meili_snap::snapshot;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_document_update_create_index_bad_uid() {
|
async fn error_document_update_create_index_bad_uid() {
|
||||||
@@ -84,7 +85,13 @@ async fn update_document() {
|
|||||||
|
|
||||||
let (response, code) = index.get_document(1, None).await;
|
let (response, code) = index.get_document(1, None).await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"doc_id": 1,
|
||||||
|
"content": "foo",
|
||||||
|
"other": "bar"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -122,7 +129,13 @@ async fn update_document_gzip_encoded() {
|
|||||||
|
|
||||||
let (response, code) = index.get_document(1, None).await;
|
let (response, code) = index.get_document(1, None).await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"doc_id": 1,
|
||||||
|
"content": "foo",
|
||||||
|
"other": "bar"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,8 @@
|
|||||||
use serde_json::json;
|
use meilisearch::Opt;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{default_settings, Server};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
/// Feature name to test against.
|
/// Feature name to test against.
|
||||||
/// This will have to be changed by a different one when that feature is stabilized.
|
/// This will have to be changed by a different one when that feature is stabilized.
|
||||||
@@ -17,7 +19,9 @@ async fn experimental_features() {
|
|||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"scoreDetails": false,
|
"scoreDetails": false,
|
||||||
"vectorStore": false
|
"vectorStore": false,
|
||||||
|
"metrics": false,
|
||||||
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -27,7 +31,9 @@ async fn experimental_features() {
|
|||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"scoreDetails": false,
|
"scoreDetails": false,
|
||||||
"vectorStore": true
|
"vectorStore": true,
|
||||||
|
"metrics": false,
|
||||||
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -37,7 +43,9 @@ async fn experimental_features() {
|
|||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"scoreDetails": false,
|
"scoreDetails": false,
|
||||||
"vectorStore": true
|
"vectorStore": true,
|
||||||
|
"metrics": false,
|
||||||
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -48,7 +56,9 @@ async fn experimental_features() {
|
|||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"scoreDetails": false,
|
"scoreDetails": false,
|
||||||
"vectorStore": true
|
"vectorStore": true,
|
||||||
|
"metrics": false,
|
||||||
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -59,11 +69,73 @@ async fn experimental_features() {
|
|||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"scoreDetails": false,
|
"scoreDetails": false,
|
||||||
"vectorStore": true
|
"vectorStore": true,
|
||||||
|
"metrics": false,
|
||||||
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn experimental_feature_metrics() {
|
||||||
|
// instance flag for metrics enables metrics at startup
|
||||||
|
let dir = TempDir::new().unwrap();
|
||||||
|
let enable_metrics = Opt { experimental_enable_metrics: true, ..default_settings(dir.path()) };
|
||||||
|
let server = Server::new_with_options(enable_metrics).await.unwrap();
|
||||||
|
|
||||||
|
let (response, code) = server.get_features().await;
|
||||||
|
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"scoreDetails": false,
|
||||||
|
"vectorStore": false,
|
||||||
|
"metrics": true,
|
||||||
|
"exportPuffinReports": false
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.get_metrics().await;
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
|
||||||
|
// metrics are not returned in json format
|
||||||
|
// so the test server will return null
|
||||||
|
meili_snap::snapshot!(response, @"null");
|
||||||
|
|
||||||
|
// disabling metrics results in invalid request
|
||||||
|
let (response, code) = server.set_features(json!({"metrics": false})).await;
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(response["metrics"], @"false");
|
||||||
|
|
||||||
|
let (response, code) = server.get_metrics().await;
|
||||||
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Getting metrics requires enabling the `metrics` experimental feature. See https://github.com/meilisearch/product/discussions/625",
|
||||||
|
"code": "feature_not_enabled",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// enabling metrics via HTTP results in valid request
|
||||||
|
let (response, code) = server.set_features(json!({"metrics": true})).await;
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(response["metrics"], @"true");
|
||||||
|
|
||||||
|
let (response, code) = server.get_metrics().await;
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(response, @"null");
|
||||||
|
|
||||||
|
// startup without flag respects persisted metrics value
|
||||||
|
let disable_metrics =
|
||||||
|
Opt { experimental_enable_metrics: false, ..default_settings(dir.path()) };
|
||||||
|
let server_no_flag = Server::new_with_options(disable_metrics).await.unwrap();
|
||||||
|
let (response, code) = server_no_flag.get_metrics().await;
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(response, @"null");
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn errors() {
|
async fn errors() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
@@ -74,7 +146,7 @@ async fn errors() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Unknown field `NotAFeature`: expected one of `scoreDetails`, `vectorStore`",
|
"message": "Unknown field `NotAFeature`: expected one of `scoreDetails`, `vectorStore`, `metrics`, `exportPuffinReports`",
|
||||||
"code": "bad_request",
|
"code": "bad_request",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||||
|
|||||||
@@ -2,10 +2,10 @@ use actix_web::http::header::ContentType;
|
|||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use http::header::ACCEPT_ENCODING;
|
use http::header::ACCEPT_ENCODING;
|
||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn create_index_no_primary_key() {
|
async fn create_index_no_primary_key() {
|
||||||
@@ -21,7 +21,7 @@ async fn create_index_no_primary_key() {
|
|||||||
|
|
||||||
assert_eq!(response["status"], "succeeded");
|
assert_eq!(response["status"], "succeeded");
|
||||||
assert_eq!(response["type"], "indexCreation");
|
assert_eq!(response["type"], "indexCreation");
|
||||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
assert_eq!(response["details"]["primaryKey"], json!(null));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -38,7 +38,7 @@ async fn create_index_with_gzip_encoded_request() {
|
|||||||
|
|
||||||
assert_eq!(response["status"], "succeeded");
|
assert_eq!(response["status"], "succeeded");
|
||||||
assert_eq!(response["type"], "indexCreation");
|
assert_eq!(response["type"], "indexCreation");
|
||||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
assert_eq!(response["details"]["primaryKey"], json!(null));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -86,7 +86,7 @@ async fn create_index_with_zlib_encoded_request() {
|
|||||||
|
|
||||||
assert_eq!(response["status"], "succeeded");
|
assert_eq!(response["status"], "succeeded");
|
||||||
assert_eq!(response["type"], "indexCreation");
|
assert_eq!(response["type"], "indexCreation");
|
||||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
assert_eq!(response["details"]["primaryKey"], json!(null));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -103,7 +103,7 @@ async fn create_index_with_brotli_encoded_request() {
|
|||||||
|
|
||||||
assert_eq!(response["status"], "succeeded");
|
assert_eq!(response["status"], "succeeded");
|
||||||
assert_eq!(response["type"], "indexCreation");
|
assert_eq!(response["type"], "indexCreation");
|
||||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
assert_eq!(response["details"]["primaryKey"], json!(null));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -136,7 +136,7 @@ async fn create_index_with_invalid_primary_key() {
|
|||||||
|
|
||||||
let (response, code) = index.get().await;
|
let (response, code) = index.get().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["primaryKey"], Value::Null);
|
assert_eq!(response["primaryKey"], json!(null));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn create_and_delete_index() {
|
async fn create_and_delete_index() {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn get_indexes_bad_offset() {
|
async fn get_indexes_bad_offset() {
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn stats() {
|
async fn stats() {
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
use serde_json::json;
|
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn update_primary_key() {
|
async fn update_primary_key() {
|
||||||
|
|||||||
63
meilisearch/tests/search/distinct.rs
Normal file
63
meilisearch/tests/search/distinct.rs
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
use meili_snap::snapshot;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
|
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||||
|
json!([
|
||||||
|
{"productId": 1, "shopId": 1},
|
||||||
|
{"productId": 2, "shopId": 1},
|
||||||
|
{"productId": 3, "shopId": 2},
|
||||||
|
{"productId": 4, "shopId": 2},
|
||||||
|
{"productId": 5, "shopId": 3},
|
||||||
|
{"productId": 6, "shopId": 3},
|
||||||
|
{"productId": 7, "shopId": 4},
|
||||||
|
{"productId": 8, "shopId": 4},
|
||||||
|
{"productId": 9, "shopId": 5},
|
||||||
|
{"productId": 10, "shopId": 5}
|
||||||
|
])
|
||||||
|
});
|
||||||
|
|
||||||
|
pub(self) static DOCUMENT_PRIMARY_KEY: &str = "productId";
|
||||||
|
pub(self) static DOCUMENT_DISTINCT_KEY: &str = "shopId";
|
||||||
|
|
||||||
|
/// testing: https://github.com/meilisearch/meilisearch/issues/4078
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn distinct_search_with_offset_no_ranking() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||||
|
index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
fn get_hits(Value(response): Value) -> Vec<i64> {
|
||||||
|
let hits_array = response["hits"].as_array().unwrap();
|
||||||
|
hits_array.iter().map(|h| h[DOCUMENT_DISTINCT_KEY].as_i64().unwrap()).collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"limit": 2, "offset": 0})).await;
|
||||||
|
let hits = get_hits(response);
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(hits.len(), @"2");
|
||||||
|
snapshot!(format!("{:?}", hits), @"[1, 2]");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"limit": 2, "offset": 2})).await;
|
||||||
|
let hits = get_hits(response);
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(hits.len(), @"2");
|
||||||
|
snapshot!(format!("{:?}", hits), @"[3, 4]");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"limit": 10, "offset": 4})).await;
|
||||||
|
let hits = get_hits(response);
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(hits.len(), @"1");
|
||||||
|
snapshot!(format!("{:?}", hits), @"[5]");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"limit": 10, "offset": 5})).await;
|
||||||
|
let hits = get_hits(response);
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(hits.len(), @"0");
|
||||||
|
}
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use super::DOCUMENTS;
|
use super::DOCUMENTS;
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_unexisting_index() {
|
async fn search_unexisting_index() {
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use meili_snap::snapshot;
|
use meili_snap::snapshot;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||||
json!([
|
json!([
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use insta::{allow_duplicates, assert_json_snapshot};
|
use insta::{allow_duplicates, assert_json_snapshot};
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn formatted_contain_wildcard() {
|
async fn formatted_contain_wildcard() {
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||||
json!([
|
json!([
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
// This modules contains all the test concerning search. Each particular feature of the search
|
// This modules contains all the test concerning search. Each particular feature of the search
|
||||||
// should be tested in its own module to isolate tests and keep the tests readable.
|
// should be tested in its own module to isolate tests and keep the tests readable.
|
||||||
|
|
||||||
|
mod distinct;
|
||||||
mod errors;
|
mod errors;
|
||||||
mod facet_search;
|
mod facet_search;
|
||||||
mod formatted;
|
mod formatted;
|
||||||
@@ -10,9 +11,9 @@ mod pagination;
|
|||||||
mod restrict_searchable;
|
mod restrict_searchable;
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||||
json!([
|
json!([
|
||||||
@@ -816,7 +817,7 @@ async fn experimental_feature_score_details() {
|
|||||||
},
|
},
|
||||||
"proximity": {
|
"proximity": {
|
||||||
"order": 2,
|
"order": 2,
|
||||||
"score": 0.875
|
"score": 0.75
|
||||||
},
|
},
|
||||||
"attribute": {
|
"attribute": {
|
||||||
"order": 3,
|
"order": 3,
|
||||||
@@ -1104,3 +1105,59 @@ async fn camelcased_words() {
|
|||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn simple_search_with_strange_synonyms() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index.update_settings(json!({ "synonyms": {"&": ["to"], "to": ["&"]} })).await;
|
||||||
|
let r = index.wait_task(0).await;
|
||||||
|
meili_snap::snapshot!(r["status"], @r###""succeeded""###);
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "How to train"}), |response, code| {
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"title": "How to Train Your Dragon: The Hidden World",
|
||||||
|
"id": "166428"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "How & train"}), |response, code| {
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"title": "How to Train Your Dragon: The Hidden World",
|
||||||
|
"id": "166428"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "to"}), |response, code| {
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"title": "How to Train Your Dragon: The Hidden World",
|
||||||
|
"id": "166428"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use super::{DOCUMENTS, NESTED_DOCUMENTS};
|
use super::{DOCUMENTS, NESTED_DOCUMENTS};
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_empty_list() {
|
async fn search_empty_list() {
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
use crate::search::DOCUMENTS;
|
use crate::search::DOCUMENTS;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::index::Index;
|
use crate::common::index::Index;
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn set_and_reset_distinct_attribute() {
|
async fn set_and_reset_distinct_attribute() {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn settings_bad_displayed_attributes() {
|
async fn settings_bad_displayed_attributes() {
|
||||||
|
|||||||
@@ -1,21 +1,24 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
static DEFAULT_SETTINGS_VALUES: Lazy<HashMap<&'static str, Value>> = Lazy::new(|| {
|
static DEFAULT_SETTINGS_VALUES: Lazy<HashMap<&'static str, Value>> = Lazy::new(|| {
|
||||||
let mut map = HashMap::new();
|
let mut map = HashMap::new();
|
||||||
map.insert("displayed_attributes", json!(["*"]));
|
map.insert("displayed_attributes", json!(["*"]));
|
||||||
map.insert("searchable_attributes", json!(["*"]));
|
map.insert("searchable_attributes", json!(["*"]));
|
||||||
map.insert("filterable_attributes", json!([]));
|
map.insert("filterable_attributes", json!([]));
|
||||||
map.insert("distinct_attribute", json!(Value::Null));
|
map.insert("distinct_attribute", json!(null));
|
||||||
map.insert(
|
map.insert(
|
||||||
"ranking_rules",
|
"ranking_rules",
|
||||||
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"]),
|
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"]),
|
||||||
);
|
);
|
||||||
map.insert("stop_words", json!([]));
|
map.insert("stop_words", json!([]));
|
||||||
|
map.insert("non_separator_tokens", json!([]));
|
||||||
|
map.insert("separator_tokens", json!([]));
|
||||||
|
map.insert("dictionary", json!([]));
|
||||||
map.insert("synonyms", json!({}));
|
map.insert("synonyms", json!({}));
|
||||||
map.insert(
|
map.insert(
|
||||||
"faceting",
|
"faceting",
|
||||||
@@ -51,7 +54,7 @@ async fn get_settings() {
|
|||||||
let (response, code) = index.settings().await;
|
let (response, code) = index.settings().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
let settings = response.as_object().unwrap();
|
let settings = response.as_object().unwrap();
|
||||||
assert_eq!(settings.keys().len(), 11);
|
assert_eq!(settings.keys().len(), 14);
|
||||||
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
||||||
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
||||||
assert_eq!(settings["filterableAttributes"], json!([]));
|
assert_eq!(settings["filterableAttributes"], json!([]));
|
||||||
@@ -62,6 +65,9 @@ async fn get_settings() {
|
|||||||
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"])
|
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"])
|
||||||
);
|
);
|
||||||
assert_eq!(settings["stopWords"], json!([]));
|
assert_eq!(settings["stopWords"], json!([]));
|
||||||
|
assert_eq!(settings["nonSeparatorTokens"], json!([]));
|
||||||
|
assert_eq!(settings["separatorTokens"], json!([]));
|
||||||
|
assert_eq!(settings["dictionary"], json!([]));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
settings["faceting"],
|
settings["faceting"],
|
||||||
json!({
|
json!({
|
||||||
@@ -223,7 +229,7 @@ macro_rules! test_setting_routes {
|
|||||||
.chars()
|
.chars()
|
||||||
.map(|c| if c == '_' { '-' } else { c })
|
.map(|c| if c == '_' { '-' } else { c })
|
||||||
.collect::<String>());
|
.collect::<String>());
|
||||||
let (response, code) = server.service.$write_method(url, serde_json::Value::Null).await;
|
let (response, code) = server.service.$write_method(url, serde_json::Value::Null.into()).await;
|
||||||
assert_eq!(code, 202, "{}", response);
|
assert_eq!(code, 202, "{}", response);
|
||||||
server.index("").wait_task(0).await;
|
server.index("").wait_task(0).await;
|
||||||
let (response, code) = server.index("test").get().await;
|
let (response, code) = server.index("test").get().await;
|
||||||
@@ -272,6 +278,9 @@ test_setting_routes!(
|
|||||||
searchable_attributes put,
|
searchable_attributes put,
|
||||||
distinct_attribute put,
|
distinct_attribute put,
|
||||||
stop_words put,
|
stop_words put,
|
||||||
|
separator_tokens put,
|
||||||
|
non_separator_tokens put,
|
||||||
|
dictionary put,
|
||||||
ranking_rules put,
|
ranking_rules put,
|
||||||
synonyms put,
|
synonyms put,
|
||||||
pagination patch,
|
pagination patch,
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
mod distinct;
|
mod distinct;
|
||||||
mod errors;
|
mod errors;
|
||||||
mod get_settings;
|
mod get_settings;
|
||||||
|
mod tokenizer_customization;
|
||||||
|
|||||||
467
meilisearch/tests/settings/tokenizer_customization.rs
Normal file
467
meilisearch/tests/settings/tokenizer_customization.rs
Normal file
@@ -0,0 +1,467 @@
|
|||||||
|
use meili_snap::{json_string, snapshot};
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn set_and_reset() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (_response, _code) = index
|
||||||
|
.update_settings(json!({
|
||||||
|
"nonSeparatorTokens": ["#", "&"],
|
||||||
|
"separatorTokens": ["&sep", "<br/>"],
|
||||||
|
"dictionary": ["J.R.R.", "J. R. R."],
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
let (response, _) = index.settings().await;
|
||||||
|
snapshot!(json_string!(response["nonSeparatorTokens"]), @r###"
|
||||||
|
[
|
||||||
|
"#",
|
||||||
|
"&"
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
snapshot!(json_string!(response["separatorTokens"]), @r###"
|
||||||
|
[
|
||||||
|
"&sep",
|
||||||
|
"<br/>"
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
snapshot!(json_string!(response["dictionary"]), @r###"
|
||||||
|
[
|
||||||
|
"J. R. R.",
|
||||||
|
"J.R.R."
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({
|
||||||
|
"nonSeparatorTokens": null,
|
||||||
|
"separatorTokens": null,
|
||||||
|
"dictionary": null,
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let (response, _) = index.settings().await;
|
||||||
|
snapshot!(json_string!(response["nonSeparatorTokens"]), @"[]");
|
||||||
|
snapshot!(json_string!(response["separatorTokens"]), @"[]");
|
||||||
|
snapshot!(json_string!(response["dictionary"]), @"[]");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn set_and_search() {
|
||||||
|
let documents = json!([
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "Mac & cheese",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "G#D#G#D#G#C#D#G#C#",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "Mac&sep&&sepcheese",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
let (_response, _code) = index
|
||||||
|
.update_settings(json!({
|
||||||
|
"nonSeparatorTokens": ["#", "&"],
|
||||||
|
"separatorTokens": ["<br/>", "&sep"],
|
||||||
|
"dictionary": ["#", "A#", "B#", "C#", "D#", "E#", "F#", "G#"],
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "&", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "Mac & cheese",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "1",
|
||||||
|
"content": "Mac <em>&</em> cheese"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "Mac&sep&&sepcheese",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "3",
|
||||||
|
"content": "Mac&sep<em>&</em>&sepcheese"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({"q": "Mac & cheese", "attributesToHighlight": ["content"]}),
|
||||||
|
|response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "Mac & cheese",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "1",
|
||||||
|
"content": "<em>Mac</em> <em>&</em> <em>cheese</em>"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "Mac&sep&&sepcheese",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "3",
|
||||||
|
"content": "<em>Mac</em>&sep<em>&</em>&sep<em>cheese</em>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({"q": "Mac&sep&&sepcheese", "attributesToHighlight": ["content"]}),
|
||||||
|
|response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "Mac & cheese",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "1",
|
||||||
|
"content": "<em>Mac</em> <em>&</em> <em>cheese</em>"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "Mac&sep&&sepcheese",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "3",
|
||||||
|
"content": "<em>Mac</em>&sep<em>&</em>&sep<em>cheese</em>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "C#D#G", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "G#D#G#D#G#C#D#G#C#",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "2",
|
||||||
|
"content": "<em>G</em>#<em>D#</em><em>G</em>#<em>D#</em><em>G</em>#<em>C#</em><em>D#</em><em>G</em>#<em>C#</em>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "#", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @"[]");
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn advanced_synergies() {
|
||||||
|
let documents = json!([
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "J.R.R. Tolkien",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "J. R. R. Tolkien",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "jrr Tolkien",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"content": "J.K. Rowlings",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 5,
|
||||||
|
"content": "J. K. Rowlings",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 6,
|
||||||
|
"content": "jk Rowlings",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
let (_response, _code) = index
|
||||||
|
.update_settings(json!({
|
||||||
|
"dictionary": ["J.R.R.", "J. R. R."],
|
||||||
|
"synonyms": {
|
||||||
|
"J.R.R.": ["jrr", "J. R. R."],
|
||||||
|
"J. R. R.": ["jrr", "J.R.R."],
|
||||||
|
"jrr": ["J.R.R.", "J. R. R."],
|
||||||
|
"J.K.": ["jk", "J. K."],
|
||||||
|
"J. K.": ["jk", "J.K."],
|
||||||
|
"jk": ["J.K.", "J. K."],
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "J.R.R.", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "J.R.R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "1",
|
||||||
|
"content": "<em>J.R.R.</em> Tolkien"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "J. R. R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "2",
|
||||||
|
"content": "<em>J. R. R.</em> Tolkien"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "jrr Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "3",
|
||||||
|
"content": "<em>jrr</em> Tolkien"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "jrr", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "jrr Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "3",
|
||||||
|
"content": "<em>jrr</em> Tolkien"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "J.R.R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "1",
|
||||||
|
"content": "<em>J.R.R.</em> Tolkien"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "J. R. R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "2",
|
||||||
|
"content": "<em>J. R. R.</em> Tolkien"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "J. R. R.", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "J. R. R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "2",
|
||||||
|
"content": "<em>J. R. R.</em> Tolkien"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "J.R.R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "1",
|
||||||
|
"content": "<em>J.R.R.</em> Tolkien"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "jrr Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "3",
|
||||||
|
"content": "<em>jrr</em> Tolkien"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// Only update dictionary, the synonyms should be recomputed.
|
||||||
|
let (_response, _code) = index
|
||||||
|
.update_settings(json!({
|
||||||
|
"dictionary": ["J.R.R.", "J. R. R.", "J.K.", "J. K."],
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
index.wait_task(2).await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "jk", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 6,
|
||||||
|
"content": "jk Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "6",
|
||||||
|
"content": "<em>jk</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"content": "J.K. Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "4",
|
||||||
|
"content": "<em>J.K.</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 5,
|
||||||
|
"content": "J. K. Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "5",
|
||||||
|
"content": "<em>J. K.</em> Rowlings"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "J.K.", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"content": "J.K. Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "4",
|
||||||
|
"content": "<em>J.K.</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 5,
|
||||||
|
"content": "J. K. Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "5",
|
||||||
|
"content": "<em>J. K.</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 6,
|
||||||
|
"content": "jk Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "6",
|
||||||
|
"content": "<em>jk</em> Rowlings"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "J. K.", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 5,
|
||||||
|
"content": "J. K. Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "5",
|
||||||
|
"content": "<em>J. K.</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"content": "J.K. Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "4",
|
||||||
|
"content": "<em>J.K.</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 6,
|
||||||
|
"content": "jk Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "6",
|
||||||
|
"content": "<em>jk</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "J. R. R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "2",
|
||||||
|
"content": "<em>J. R.</em> R. Tolkien"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
@@ -1,11 +1,13 @@
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use actix_rt::time::sleep;
|
use actix_rt::time::sleep;
|
||||||
|
use meili_snap::{json_string, snapshot};
|
||||||
use meilisearch::option::ScheduleSnapshot;
|
use meilisearch::option::ScheduleSnapshot;
|
||||||
use meilisearch::Opt;
|
use meilisearch::Opt;
|
||||||
|
|
||||||
use crate::common::server::default_settings;
|
use crate::common::server::default_settings;
|
||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
macro_rules! verify_snapshot {
|
macro_rules! verify_snapshot {
|
||||||
(
|
(
|
||||||
@@ -44,7 +46,7 @@ async fn perform_snapshot() {
|
|||||||
|
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index
|
index
|
||||||
.update_settings(serde_json::json! ({
|
.update_settings(json! ({
|
||||||
"searchableAttributes": [],
|
"searchableAttributes": [],
|
||||||
}))
|
}))
|
||||||
.await;
|
.await;
|
||||||
@@ -90,3 +92,95 @@ async fn perform_snapshot() {
|
|||||||
server.index("test1").settings(),
|
server.index("test1").settings(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn perform_on_demand_snapshot() {
|
||||||
|
let temp = tempfile::tempdir().unwrap();
|
||||||
|
let snapshot_dir = tempfile::tempdir().unwrap();
|
||||||
|
|
||||||
|
let options =
|
||||||
|
Opt { snapshot_dir: snapshot_dir.path().to_owned(), ..default_settings(temp.path()) };
|
||||||
|
|
||||||
|
let server = Server::new_with_options(options).await.unwrap();
|
||||||
|
|
||||||
|
let index = server.index("catto");
|
||||||
|
index
|
||||||
|
.update_settings(json! ({
|
||||||
|
"searchableAttributes": [],
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index.load_test_set().await;
|
||||||
|
|
||||||
|
server.index("doggo").create(Some("bone")).await;
|
||||||
|
index.wait_task(2).await;
|
||||||
|
|
||||||
|
server.index("doggo").create(Some("bone")).await;
|
||||||
|
index.wait_task(2).await;
|
||||||
|
|
||||||
|
let (task, code) = server.create_snapshot().await;
|
||||||
|
snapshot!(code, @"202 Accepted");
|
||||||
|
snapshot!(json_string!(task, { ".enqueuedAt" => "[date]" }), @r###"
|
||||||
|
{
|
||||||
|
"taskUid": 4,
|
||||||
|
"indexUid": null,
|
||||||
|
"status": "enqueued",
|
||||||
|
"type": "snapshotCreation",
|
||||||
|
"enqueuedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
let task = index.wait_task(task.uid()).await;
|
||||||
|
snapshot!(json_string!(task, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||||
|
{
|
||||||
|
"uid": 4,
|
||||||
|
"indexUid": null,
|
||||||
|
"status": "succeeded",
|
||||||
|
"type": "snapshotCreation",
|
||||||
|
"canceledBy": null,
|
||||||
|
"error": null,
|
||||||
|
"duration": "[duration]",
|
||||||
|
"enqueuedAt": "[date]",
|
||||||
|
"startedAt": "[date]",
|
||||||
|
"finishedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let temp = tempfile::tempdir().unwrap();
|
||||||
|
|
||||||
|
let snapshots: Vec<String> = std::fs::read_dir(&snapshot_dir)
|
||||||
|
.unwrap()
|
||||||
|
.map(|entry| entry.unwrap().path().file_name().unwrap().to_str().unwrap().to_string())
|
||||||
|
.collect();
|
||||||
|
meili_snap::snapshot!(format!("{snapshots:?}"), @r###"["db.snapshot"]"###);
|
||||||
|
|
||||||
|
let snapshot_path = snapshot_dir.path().to_owned().join("db.snapshot");
|
||||||
|
#[cfg_attr(windows, allow(unused))]
|
||||||
|
let snapshot_meta = std::fs::metadata(&snapshot_path).unwrap();
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
let mode = snapshot_meta.permissions().mode();
|
||||||
|
// rwxrwxrwx
|
||||||
|
meili_snap::snapshot!(format!("{:b}", mode), @"1000000100100100");
|
||||||
|
}
|
||||||
|
|
||||||
|
let options = Opt { import_snapshot: Some(snapshot_path), ..default_settings(temp.path()) };
|
||||||
|
|
||||||
|
let snapshot_server = Server::new_with_options(options).await.unwrap();
|
||||||
|
|
||||||
|
verify_snapshot!(server, snapshot_server, |server| =>
|
||||||
|
server.list_indexes(None, None),
|
||||||
|
// for some reason the db sizes differ. this may be due to the compaction options we have
|
||||||
|
// set when performing the snapshot
|
||||||
|
//server.stats(),
|
||||||
|
|
||||||
|
// The original instance contains the snapshotCreation task, while the snapshotted-instance does not. For this reason we need to compare the task queue **after** the task 4
|
||||||
|
server.tasks_filter("?from=2"),
|
||||||
|
|
||||||
|
server.index("catto").get_all_documents(GetAllDocumentsOptions::default()),
|
||||||
|
server.index("catto").settings(),
|
||||||
|
server.index("doggo").get_all_documents(GetAllDocumentsOptions::default()),
|
||||||
|
server.index("doggo").settings(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use serde_json::json;
|
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn get_settings_unexisting_index() {
|
async fn get_settings_unexisting_index() {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn swap_indexes_bad_format() {
|
async fn swap_indexes_bad_format() {
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
mod errors;
|
mod errors;
|
||||||
|
|
||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn swap_indexes() {
|
async fn swap_indexes() {
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
mod errors;
|
mod errors;
|
||||||
|
|
||||||
use meili_snap::insta::assert_json_snapshot;
|
use meili_snap::insta::assert_json_snapshot;
|
||||||
use serde_json::json;
|
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_get_unexisting_task_status() {
|
async fn error_get_unexisting_task_status() {
|
||||||
@@ -33,7 +33,7 @@ async fn get_task_status() {
|
|||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
index
|
index
|
||||||
.add_documents(
|
.add_documents(
|
||||||
serde_json::json!([{
|
json!([{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
"content": "foobar",
|
"content": "foobar",
|
||||||
}]),
|
}]),
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ byteorder = "1.4.3"
|
|||||||
charabia = { version = "0.8.3", default-features = false }
|
charabia = { version = "0.8.3", default-features = false }
|
||||||
concat-arrays = "0.1.2"
|
concat-arrays = "0.1.2"
|
||||||
crossbeam-channel = "0.5.8"
|
crossbeam-channel = "0.5.8"
|
||||||
deserr = "0.5.0"
|
deserr = { version = "0.6.0", features = ["actix-web"]}
|
||||||
either = { version = "1.8.1", features = ["serde"] }
|
either = { version = "1.8.1", features = ["serde"] }
|
||||||
flatten-serde-json = { path = "../flatten-serde-json" }
|
flatten-serde-json = { path = "../flatten-serde-json" }
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
@@ -32,18 +32,18 @@ grenad = { version = "0.4.4", default-features = false, features = [
|
|||||||
heed = { git = "https://github.com/meilisearch/heed", tag = "v0.12.7", default-features = false, features = [
|
heed = { git = "https://github.com/meilisearch/heed", tag = "v0.12.7", default-features = false, features = [
|
||||||
"lmdb", "read-txn-no-tls"
|
"lmdb", "read-txn-no-tls"
|
||||||
] }
|
] }
|
||||||
indexmap = { version = "1.9.3", features = ["serde"] }
|
indexmap = { version = "2.0.0", features = ["serde"] }
|
||||||
instant-distance = { version = "0.6.1", features = ["with-serde"] }
|
instant-distance = { version = "0.6.1", features = ["with-serde"] }
|
||||||
json-depth-checker = { path = "../json-depth-checker" }
|
json-depth-checker = { path = "../json-depth-checker" }
|
||||||
levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] }
|
levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] }
|
||||||
memmap2 = "0.5.10"
|
memmap2 = "0.7.1"
|
||||||
obkv = "0.2.0"
|
obkv = "0.2.0"
|
||||||
once_cell = "1.17.1"
|
once_cell = "1.17.1"
|
||||||
ordered-float = "3.6.0"
|
ordered-float = "3.6.0"
|
||||||
rand_pcg = { version = "0.3.1", features = ["serde1"] }
|
rand_pcg = { version = "0.3.1", features = ["serde1"] }
|
||||||
rayon = "1.7.0"
|
rayon = "1.7.0"
|
||||||
roaring = "0.10.1"
|
roaring = "0.10.1"
|
||||||
rstar = { version = "0.10.0", features = ["serde"] }
|
rstar = { version = "0.11.0", features = ["serde"] }
|
||||||
serde = { version = "1.0.160", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
slice-group-by = "0.3.0"
|
slice-group-by = "0.3.0"
|
||||||
@@ -63,7 +63,10 @@ uuid = { version = "1.3.1", features = ["v4"] }
|
|||||||
filter-parser = { path = "../filter-parser" }
|
filter-parser = { path = "../filter-parser" }
|
||||||
|
|
||||||
# documents words self-join
|
# documents words self-join
|
||||||
itertools = "0.10.5"
|
itertools = "0.11.0"
|
||||||
|
|
||||||
|
# profiling
|
||||||
|
puffin = "0.16.0"
|
||||||
|
|
||||||
# logging
|
# logging
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
@@ -71,7 +74,7 @@ logging_timer = "1.1.0"
|
|||||||
csv = "1.2.1"
|
csv = "1.2.1"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
mimalloc = { version = "0.1.29", default-features = false }
|
mimalloc = { version = "0.1.37", default-features = false }
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
insta = "1.29.0"
|
insta = "1.29.0"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user