mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-01 10:15:50 +00:00
Compare commits
199 Commits
v1.3.0
...
diff-index
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
02a40645e2 | ||
|
|
066221fd2b | ||
|
|
b8fed737ef | ||
|
|
c63ff5298b | ||
|
|
d50408d670 | ||
|
|
e0dc413521 | ||
|
|
0f6a0b1ab8 | ||
|
|
061f490204 | ||
|
|
5c43ff72c1 | ||
|
|
c445e9daec | ||
|
|
178a9802fa | ||
|
|
7d546b9c22 | ||
|
|
c829feb40b | ||
|
|
b88fd7994c | ||
|
|
096d7705c7 | ||
|
|
e8f8730467 | ||
|
|
26ef0b3a07 | ||
|
|
20394fda04 | ||
|
|
27161bcd05 | ||
|
|
04fd44b5e2 | ||
|
|
9078e60024 | ||
|
|
8fb96b8274 | ||
|
|
50ba751244 | ||
|
|
f36c36e368 | ||
|
|
c2dcd66d32 | ||
|
|
d4594306d3 | ||
|
|
93d0680903 | ||
|
|
01101d55ac | ||
|
|
c0fd3dffb8 | ||
|
|
c42fd5375f | ||
|
|
b418c3a756 | ||
|
|
1cde455758 | ||
|
|
ca19bae72f | ||
|
|
705878ff59 | ||
|
|
92c280d1c8 | ||
|
|
181e7a1e53 | ||
|
|
2e5abb4d2c | ||
|
|
44aaf5d9e3 | ||
|
|
ff0ababf65 | ||
|
|
c5336af1c5 | ||
|
|
1567758a56 | ||
|
|
37953afe1a | ||
|
|
de3f992ae4 | ||
|
|
98f0618065 | ||
|
|
86b314626d | ||
|
|
bb79bdb3f8 | ||
|
|
d429e7da99 | ||
|
|
584b772248 | ||
|
|
1806c04a9a | ||
|
|
3485e8f1c4 | ||
|
|
fe697a6685 | ||
|
|
eb4135f8ae | ||
|
|
ec4844c3a6 | ||
|
|
77c3787b78 | ||
|
|
4f902490b9 | ||
|
|
1faee92748 | ||
|
|
5831466525 | ||
|
|
3cdb3e4eaf | ||
|
|
26f34ec7a2 | ||
|
|
07d36180ad | ||
|
|
4c641b79a2 | ||
|
|
76c05d1b20 | ||
|
|
ef31ab52a4 | ||
|
|
34fac115d5 | ||
|
|
791c5cd874 | ||
|
|
5bea1092fb | ||
|
|
056b2c387d | ||
|
|
a09686fcbd | ||
|
|
b4c44603db | ||
|
|
393be40179 | ||
|
|
2c1d60f79b | ||
|
|
487d493f49 | ||
|
|
08af69a33b | ||
|
|
9258e5b5bf | ||
|
|
ddd34a488a | ||
|
|
526c2b3602 | ||
|
|
e8c9367686 | ||
|
|
9636c5f558 | ||
|
|
b310830b5d | ||
|
|
462b4654c4 | ||
|
|
abfa7ded25 | ||
|
|
f2837aaec2 | ||
|
|
11df155598 | ||
|
|
651657c03e | ||
|
|
b9ad59c969 | ||
|
|
66aa682e23 | ||
|
|
256cf33bca | ||
|
|
9945cbf9db | ||
|
|
03d0f628bd | ||
|
|
ea78060916 | ||
|
|
b42d48187a | ||
|
|
679c0b0f97 | ||
|
|
e02d0064bd | ||
|
|
7ef3572f11 | ||
|
|
93285041a9 | ||
|
|
dc3d9c90d9 | ||
|
|
287cf25d39 | ||
|
|
66aa6d5871 | ||
|
|
8ac5b765bc | ||
|
|
cea93e9a37 | ||
|
|
085aad0a94 | ||
|
|
e9b62aacb3 | ||
|
|
456960d2c7 | ||
|
|
3dda176723 | ||
|
|
af0f6f0bf0 | ||
|
|
ccf3ba3f32 | ||
|
|
65528a3e06 | ||
|
|
6db80b0836 | ||
|
|
cdb4b3e024 | ||
|
|
8c0ebd1331 | ||
|
|
5130e06b41 | ||
|
|
08e27ef73f | ||
|
|
914b125c5f | ||
|
|
e59d7f238c | ||
|
|
717b069907 | ||
|
|
7ea154673a | ||
|
|
b947f3bb9d | ||
|
|
4c35817c5f | ||
|
|
c53841e166 | ||
|
|
fd81945597 | ||
|
|
794e491152 | ||
|
|
cab27c2ab4 | ||
|
|
624fa9052f | ||
|
|
359ede4862 | ||
|
|
60c11dbdbd | ||
|
|
dacee40ebc | ||
|
|
6089083a8e | ||
|
|
cc2c19d4c3 | ||
|
|
a5c56fac8a | ||
|
|
e4e49e63d0 | ||
|
|
00bd7bd19a | ||
|
|
ef3d098b4d | ||
|
|
8084cf29f3 | ||
|
|
5a7c1bde84 | ||
|
|
6b2d671be7 | ||
|
|
43c13faeda | ||
|
|
29adfc2f68 | ||
|
|
064ee95b1c | ||
|
|
604d533b31 | ||
|
|
44c1900f36 | ||
|
|
04671d0751 | ||
|
|
4f4c669d50 | ||
|
|
8dc5acf998 | ||
|
|
fc2590fc9d | ||
|
|
35758db9ec | ||
|
|
4988199bb9 | ||
|
|
83991ee770 | ||
|
|
9d061cec26 | ||
|
|
4a21fecf67 | ||
|
|
ae8e69c030 | ||
|
|
fe819a9d80 | ||
|
|
e338ceb97f | ||
|
|
75c87d5391 | ||
|
|
dd57873f8e | ||
|
|
3dda93d50f | ||
|
|
117146ec4e | ||
|
|
884b4d47b1 | ||
|
|
023cb0c2de | ||
|
|
f391039a6f | ||
|
|
fcdd20b533 | ||
|
|
b45c36cd71 | ||
|
|
151c31c18f | ||
|
|
a8ad0902d3 | ||
|
|
e917dbdebb | ||
|
|
ba919b6123 | ||
|
|
9d5e3457e5 | ||
|
|
04694071fe | ||
|
|
b0c1a9504a | ||
|
|
d57026cd96 | ||
|
|
41c9e8856a | ||
|
|
d4ff59fcf5 | ||
|
|
9c485f8563 | ||
|
|
d8d12d5979 | ||
|
|
0597a97c84 | ||
|
|
2dfbb6813a | ||
|
|
8f589a5cce | ||
|
|
0b8bbd8750 | ||
|
|
eef95de30e | ||
|
|
13a13a4862 | ||
|
|
e691c92ed5 | ||
|
|
928ab2f9b1 | ||
|
|
7c18a9375f | ||
|
|
05a311f9be | ||
|
|
9b1b9b409e | ||
|
|
7f555f23e8 | ||
|
|
a0bfc9f63a | ||
|
|
3155264381 | ||
|
|
42400c381e | ||
|
|
08c7dab528 | ||
|
|
8590687515 | ||
|
|
8f5d127b1e | ||
|
|
2b4160ebb9 | ||
|
|
8ba1c8f88f | ||
|
|
8e7edf8ea7 | ||
|
|
9daccdf7f0 | ||
|
|
437ee55c57 | ||
|
|
b1717865ea | ||
|
|
176f716292 | ||
|
|
40ad19ba9e |
2
.github/workflows/benchmarks-manual.yml
vendored
2
.github/workflows/benchmarks-manual.yml
vendored
@@ -74,4 +74,4 @@ jobs:
|
|||||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||||
echo 'How to compare this benchmark with another one?'
|
echo 'How to compare this benchmark with another one?'
|
||||||
echo ' - Check the available files with: ./benchmarks/scripts/list.sh'
|
echo ' - Check the available files with: ./benchmarks/scripts/list.sh'
|
||||||
echo " - Run the following command: ./benchmaks/scipts/compare.sh <file-to-compare-with> ${{ steps.file.outputs.basename }}.json"
|
echo " - Run the following command: ./benchmaks/scripts/compare.sh <file-to-compare-with> ${{ steps.file.outputs.basename }}.json"
|
||||||
|
|||||||
4
.github/workflows/dependency-issue.yml
vendored
4
.github/workflows/dependency-issue.yml
vendored
@@ -2,8 +2,8 @@ name: Create issue to upgrade dependencies
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
# Run the first of the month, every 3 month
|
# Run the first of the month, every 6 month
|
||||||
- cron: '0 0 1 */3 *'
|
- cron: '0 0 1 */6 *'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|||||||
3
.github/workflows/publish-apt-brew-pkg.yml
vendored
3
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
- name: Build deb package
|
- name: Build deb package
|
||||||
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
||||||
- name: Upload debian pkg to release
|
- name: Upload debian pkg to release
|
||||||
uses: svenstaro/upload-release-action@2.6.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/debian/meilisearch.deb
|
file: target/debian/meilisearch.deb
|
||||||
@@ -53,5 +53,6 @@ jobs:
|
|||||||
uses: mislav/bump-homebrew-formula-action@v2
|
uses: mislav/bump-homebrew-formula-action@v2
|
||||||
with:
|
with:
|
||||||
formula-name: meilisearch
|
formula-name: meilisearch
|
||||||
|
formula-path: Formula/m/meilisearch.rb
|
||||||
env:
|
env:
|
||||||
COMMITTER_TOKEN: ${{ secrets.HOMEBREW_COMMITTER_TOKEN }}
|
COMMITTER_TOKEN: ${{ secrets.HOMEBREW_COMMITTER_TOKEN }}
|
||||||
|
|||||||
8
.github/workflows/publish-binaries.yml
vendored
8
.github/workflows/publish-binaries.yml
vendored
@@ -54,7 +54,7 @@ jobs:
|
|||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.6.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/release/meilisearch
|
file: target/release/meilisearch
|
||||||
@@ -87,7 +87,7 @@ jobs:
|
|||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.6.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/release/${{ matrix.artifact_name }}
|
file: target/release/${{ matrix.artifact_name }}
|
||||||
@@ -121,7 +121,7 @@ jobs:
|
|||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.6.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
@@ -183,7 +183,7 @@ jobs:
|
|||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.6.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
|
|||||||
8
.github/workflows/publish-docker-images.yml
vendored
8
.github/workflows/publish-docker-images.yml
vendored
@@ -57,10 +57,10 @@ jobs:
|
|||||||
echo "date=$commit_date" >> $GITHUB_OUTPUT
|
echo "date=$commit_date" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v2
|
||||||
@@ -70,7 +70,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: getmeili/meilisearch
|
images: getmeili/meilisearch
|
||||||
# Prevent `latest` to be updated for each new tag pushed.
|
# Prevent `latest` to be updated for each new tag pushed.
|
||||||
@@ -83,7 +83,7 @@ jobs:
|
|||||||
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
push: true
|
push: true
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|||||||
278
.github/workflows/sdks-tests.yml
vendored
278
.github/workflows/sdks-tests.yml
vendored
@@ -14,6 +14,7 @@ on:
|
|||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: 'masterKey'
|
MEILI_MASTER_KEY: 'masterKey'
|
||||||
MEILI_NO_ANALYTICS: 'true'
|
MEILI_NO_ANALYTICS: 'true'
|
||||||
|
DISABLE_COVERAGE: 'true'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
define-docker-image:
|
define-docker-image:
|
||||||
@@ -30,6 +31,117 @@ jobs:
|
|||||||
if [[ $event == 'workflow_dispatch' ]]; then
|
if [[ $event == 'workflow_dispatch' ]]; then
|
||||||
echo "docker-image=${{ github.event.inputs.docker_image }}" >> $GITHUB_OUTPUT
|
echo "docker-image=${{ github.event.inputs.docker_image }}" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
- name: Docker image is ${{ steps.define-image.outputs.docker-image }}
|
||||||
|
run: echo "Docker image is ${{ steps.define-image.outputs.docker-image }}"
|
||||||
|
|
||||||
|
##########
|
||||||
|
## SDKs ##
|
||||||
|
##########
|
||||||
|
|
||||||
|
meilisearch-dotnet-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: .NET SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
MEILISEARCH_VERSION: ${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-dotnet
|
||||||
|
- name: Setup .NET Core
|
||||||
|
uses: actions/setup-dotnet@v3
|
||||||
|
with:
|
||||||
|
dotnet-version: "6.0.x"
|
||||||
|
- name: Install dependencies
|
||||||
|
run: dotnet restore
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build --configuration Release --no-restore
|
||||||
|
- name: Meilisearch (latest version) setup with Docker
|
||||||
|
run: docker compose up -d
|
||||||
|
- name: Run tests
|
||||||
|
run: dotnet test --no-restore --verbosity normal
|
||||||
|
|
||||||
|
meilisearch-dart-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: Dart SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-dart
|
||||||
|
- uses: dart-lang/setup-dart@v1
|
||||||
|
with:
|
||||||
|
sdk: 3.1.1
|
||||||
|
- name: Install dependencies
|
||||||
|
run: dart pub get
|
||||||
|
- name: Run integration tests
|
||||||
|
run: dart test --concurrency=4
|
||||||
|
|
||||||
|
meilisearch-go-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: Go SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@v4
|
||||||
|
with:
|
||||||
|
go-version: stable
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-go
|
||||||
|
- name: Get dependencies
|
||||||
|
run: |
|
||||||
|
go get -v -t -d ./...
|
||||||
|
if [ -f Gopkg.toml ]; then
|
||||||
|
curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
|
||||||
|
dep ensure
|
||||||
|
fi
|
||||||
|
- name: Run integration tests
|
||||||
|
run: go test -v ./...
|
||||||
|
|
||||||
|
meilisearch-java-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: Java SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-java
|
||||||
|
- name: Set up Java
|
||||||
|
uses: actions/setup-java@v3
|
||||||
|
with:
|
||||||
|
java-version: 8
|
||||||
|
distribution: 'zulu'
|
||||||
|
cache: gradle
|
||||||
|
- name: Grant execute permission for gradlew
|
||||||
|
run: chmod +x gradlew
|
||||||
|
- name: Build and run unit and integration tests
|
||||||
|
run: ./gradlew build integrationTest
|
||||||
|
|
||||||
meilisearch-js-tests:
|
meilisearch-js-tests:
|
||||||
needs: define-docker-image
|
needs: define-docker-image
|
||||||
@@ -66,33 +178,6 @@ jobs:
|
|||||||
- name: Run Browser env
|
- name: Run Browser env
|
||||||
run: yarn test:env:browser
|
run: yarn test:env:browser
|
||||||
|
|
||||||
instant-meilisearch-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: instant-meilisearch tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
env:
|
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
|
||||||
ports:
|
|
||||||
- '7700:7700'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/instant-meilisearch
|
|
||||||
- name: Setup node
|
|
||||||
uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
cache: yarn
|
|
||||||
- name: Install dependencies
|
|
||||||
run: yarn install
|
|
||||||
- name: Run tests
|
|
||||||
run: yarn test
|
|
||||||
- name: Build all the playgrounds and the packages
|
|
||||||
run: yarn build
|
|
||||||
|
|
||||||
meilisearch-php-tests:
|
meilisearch-php-tests:
|
||||||
needs: define-docker-image
|
needs: define-docker-image
|
||||||
name: PHP SDK tests
|
name: PHP SDK tests
|
||||||
@@ -111,8 +196,6 @@ jobs:
|
|||||||
repository: meilisearch/meilisearch-php
|
repository: meilisearch/meilisearch-php
|
||||||
- name: Install PHP
|
- name: Install PHP
|
||||||
uses: shivammathur/setup-php@v2
|
uses: shivammathur/setup-php@v2
|
||||||
with:
|
|
||||||
coverage: none
|
|
||||||
- name: Validate composer.json and composer.lock
|
- name: Validate composer.json and composer.lock
|
||||||
run: composer validate
|
run: composer validate
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -149,36 +232,6 @@ jobs:
|
|||||||
- name: Test with pytest
|
- name: Test with pytest
|
||||||
run: pipenv run pytest
|
run: pipenv run pytest
|
||||||
|
|
||||||
meilisearch-go-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: Go SDK tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
env:
|
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
|
||||||
ports:
|
|
||||||
- '7700:7700'
|
|
||||||
steps:
|
|
||||||
- name: Set up Go
|
|
||||||
uses: actions/setup-go@v4
|
|
||||||
with:
|
|
||||||
go-version: stable
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/meilisearch-go
|
|
||||||
- name: Get dependencies
|
|
||||||
run: |
|
|
||||||
go get -v -t -d ./...
|
|
||||||
if [ -f Gopkg.toml ]; then
|
|
||||||
curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
|
|
||||||
dep ensure
|
|
||||||
fi
|
|
||||||
- name: Run integration tests
|
|
||||||
run: go test -v ./...
|
|
||||||
|
|
||||||
meilisearch-ruby-tests:
|
meilisearch-ruby-tests:
|
||||||
needs: define-docker-image
|
needs: define-docker-image
|
||||||
name: Ruby SDK tests
|
name: Ruby SDK tests
|
||||||
@@ -224,3 +277,110 @@ jobs:
|
|||||||
run: cargo build --verbose
|
run: cargo build --verbose
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: cargo test --verbose
|
run: cargo test --verbose
|
||||||
|
|
||||||
|
meilisearch-swift-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: Swift SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-swift
|
||||||
|
- name: Run tests
|
||||||
|
run: swift test
|
||||||
|
|
||||||
|
########################
|
||||||
|
## FRONT-END PLUGINS ##
|
||||||
|
########################
|
||||||
|
|
||||||
|
meilisearch-js-plugins-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: meilisearch-js-plugins tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-js-plugins
|
||||||
|
- name: Setup node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
cache: yarn
|
||||||
|
- name: Install dependencies
|
||||||
|
run: yarn install
|
||||||
|
- name: Run tests
|
||||||
|
run: yarn test
|
||||||
|
- name: Build all the playgrounds and the packages
|
||||||
|
run: yarn build
|
||||||
|
|
||||||
|
########################
|
||||||
|
## BACK-END PLUGINS ###
|
||||||
|
########################
|
||||||
|
|
||||||
|
meilisearch-rails-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: meilisearch-rails tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-rails
|
||||||
|
- name: Set up Ruby 3
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: 3
|
||||||
|
bundler-cache: true
|
||||||
|
- name: Run tests
|
||||||
|
run: bundle exec rspec
|
||||||
|
|
||||||
|
meilisearch-symfony-tests:
|
||||||
|
needs: define-docker-image
|
||||||
|
name: meilisearch-symfony tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-symfony
|
||||||
|
- name: Install PHP
|
||||||
|
uses: shivammathur/setup-php@v2
|
||||||
|
with:
|
||||||
|
tools: composer:v2, flex
|
||||||
|
- name: Validate composer.json and composer.lock
|
||||||
|
run: composer validate
|
||||||
|
- name: Install dependencies
|
||||||
|
run: composer install --prefer-dist --no-progress --quiet
|
||||||
|
- name: Remove doctrine/annotations
|
||||||
|
run: composer remove --dev doctrine/annotations
|
||||||
|
- name: Run test suite
|
||||||
|
run: composer test:unit
|
||||||
|
|||||||
39
.github/workflows/test-suite.yml
vendored
39
.github/workflows/test-suite.yml
vendored
@@ -30,20 +30,20 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- name: Run test with Rust stable
|
- name: Setup test with Rust stable
|
||||||
if: github.event_name != 'schedule'
|
if: github.event_name != 'schedule'
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
- name: Run test with Rust nightly
|
- name: Setup test with Rust nightly
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: nightly
|
toolchain: nightly
|
||||||
override: true
|
override: true
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.4.0
|
uses: Swatinem/rust-cache@v2.6.2
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -65,7 +65,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.4.0
|
uses: Swatinem/rust-cache@v2.6.2
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -78,12 +78,12 @@ jobs:
|
|||||||
args: --locked --release --all
|
args: --locked --release --all
|
||||||
|
|
||||||
test-all-features:
|
test-all-features:
|
||||||
name: Tests all features on cron schedule only
|
name: Tests all features
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||||
image: ubuntu:18.04
|
image: ubuntu:18.04
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
@@ -110,24 +110,27 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: ubuntu:18.04
|
image: ubuntu:18.04
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
run: |
|
run: |
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install --assume-yes build-essential curl
|
apt-get install --assume-yes build-essential curl
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
- name: Run cargo tree without default features and check lindera is not present
|
- name: Run cargo tree without default features and check lindera is not present
|
||||||
run: |
|
run: |
|
||||||
cargo tree -f '{p} {f}' -e normal --no-default-features | grep lindera -vqz
|
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -vqz lindera; then
|
||||||
|
echo "lindera has been found in the sources and it shouldn't"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
- name: Run cargo tree with default features and check lindera is pressent
|
- name: Run cargo tree with default features and check lindera is pressent
|
||||||
run: |
|
run: |
|
||||||
cargo tree -f '{p} {f}' -e normal | grep lindera -qz
|
cargo tree -f '{p} {f}' -e normal | grep lindera -qz
|
||||||
|
|
||||||
# We run tests in debug also, to make sure that the debug_assertions are hit
|
# We run tests in debug also, to make sure that the debug_assertions are hit
|
||||||
test-debug:
|
test-debug:
|
||||||
name: Run tests in debug
|
name: Run tests in debug
|
||||||
@@ -146,7 +149,7 @@ jobs:
|
|||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.4.0
|
uses: Swatinem/rust-cache@v2.6.2
|
||||||
- name: Run tests in debug
|
- name: Run tests in debug
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -161,11 +164,11 @@ jobs:
|
|||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: 1.69.0
|
toolchain: 1.71.1
|
||||||
override: true
|
override: true
|
||||||
components: clippy
|
components: clippy
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.4.0
|
uses: Swatinem/rust-cache@v2.6.2
|
||||||
- name: Run cargo clippy
|
- name: Run cargo clippy
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -184,7 +187,7 @@ jobs:
|
|||||||
override: true
|
override: true
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.4.0
|
uses: Swatinem/rust-cache@v2.6.2
|
||||||
- name: Run cargo fmt
|
- name: Run cargo fmt
|
||||||
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
||||||
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
||||||
|
|||||||
84
.github/workflows/trigger-benchmarks-on-message.yml
vendored
Normal file
84
.github/workflows/trigger-benchmarks-on-message.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
name: Benchmarks (PR)
|
||||||
|
on: issue_comment
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
run-benchmarks-on-comment:
|
||||||
|
name: Run and upload benchmarks
|
||||||
|
runs-on: benchmarks
|
||||||
|
timeout-minutes: 4320 # 72h
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
|
||||||
|
- name: Check for Command
|
||||||
|
id: command
|
||||||
|
uses: xt0rted/slash-command-action@v2
|
||||||
|
with:
|
||||||
|
command: benchmark
|
||||||
|
reaction-type: "eyes"
|
||||||
|
repo-token: ${{ env.GH_TOKEN }}
|
||||||
|
|
||||||
|
# Set variables
|
||||||
|
- name: Set current branch name
|
||||||
|
shell: bash
|
||||||
|
run: echo "name=$(echo ${GITHUB_REF#refs/heads/})" >> $GITHUB_OUTPUT
|
||||||
|
id: current_branch
|
||||||
|
- name: Set normalized current branch name # Replace `/` by `_` in branch name to avoid issues when pushing to S3
|
||||||
|
shell: bash
|
||||||
|
run: echo "name=$(echo ${GITHUB_REF#refs/heads/} | tr '/' '_')" >> $GITHUB_OUTPUT
|
||||||
|
id: normalized_current_branch
|
||||||
|
- name: Set shorter commit SHA
|
||||||
|
shell: bash
|
||||||
|
run: echo "short=$(echo $GITHUB_SHA | cut -c1-8)" >> $GITHUB_OUTPUT
|
||||||
|
id: commit_sha
|
||||||
|
- name: Set file basename with format "dataset_branch_commitSHA"
|
||||||
|
shell: bash
|
||||||
|
run: echo "basename=$(echo ${{ steps.command.outputs.command-arguments }}_${{ steps.normalized_current_branch.outputs.name }}_${{ steps.commit_sha.outputs.short }})" >> $GITHUB_OUTPUT
|
||||||
|
id: file
|
||||||
|
|
||||||
|
# Run benchmarks
|
||||||
|
- name: Run benchmarks - Dataset ${{ steps.command.outputs.command-arguments }} - Branch ${{ steps.current_branch.outputs.name }} - Commit ${{ steps.commit_sha.outputs.short }}
|
||||||
|
run: |
|
||||||
|
cd benchmarks
|
||||||
|
cargo bench --bench ${{ steps.command.outputs.command-arguments }} -- --save-baseline ${{ steps.file.outputs.basename }}
|
||||||
|
|
||||||
|
# Generate critcmp files
|
||||||
|
- name: Install critcmp
|
||||||
|
uses: taiki-e/install-action@v2
|
||||||
|
with:
|
||||||
|
tool: critcmp
|
||||||
|
- name: Export cripcmp file
|
||||||
|
run: |
|
||||||
|
critcmp --export ${{ steps.file.outputs.basename }} > ${{ steps.file.outputs.basename }}.json
|
||||||
|
|
||||||
|
# Upload benchmarks
|
||||||
|
- name: Upload ${{ steps.file.outputs.basename }}.json to DO Spaces # DigitalOcean Spaces = S3
|
||||||
|
uses: BetaHuhn/do-spaces-action@v2
|
||||||
|
with:
|
||||||
|
access_key: ${{ secrets.DO_SPACES_ACCESS_KEY }}
|
||||||
|
secret_key: ${{ secrets.DO_SPACES_SECRET_KEY }}
|
||||||
|
space_name: ${{ secrets.DO_SPACES_SPACE_NAME }}
|
||||||
|
space_region: ${{ secrets.DO_SPACES_SPACE_REGION }}
|
||||||
|
source: ${{ steps.file.outputs.basename }}.json
|
||||||
|
out_dir: critcmp_results
|
||||||
|
|
||||||
|
# Compute the diff of the benchmarks and send a message on the GitHub PR
|
||||||
|
- name: Compute and send a message in the PR
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
|
run: |
|
||||||
|
export base=$(git log --pretty=%p -n 1)
|
||||||
|
echo 'Here are your benchmarks diff 👊' >> body.txt
|
||||||
|
echo '```' >> body.txt
|
||||||
|
./benchmarks/scripts/compare.sh $base ${{ steps.file.outputs.basename }}.json >> body.txt
|
||||||
|
echo '```' >> body.txt
|
||||||
|
gh pr comment ${GITHUB_REF#refs/heads/} --body-file body.txt
|
||||||
768
Cargo.lock
generated
768
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -18,7 +18,7 @@ members = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "1.3.0"
|
version = "1.4.0"
|
||||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||||
description = "Meilisearch HTTP server"
|
description = "Meilisearch HTTP server"
|
||||||
homepage = "https://meilisearch.com"
|
homepage = "https://meilisearch.com"
|
||||||
|
|||||||
19
PROFILING.md
Normal file
19
PROFILING.md
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Profiling Meilisearch
|
||||||
|
|
||||||
|
Search engine technologies are complex pieces of software that require thorough profiling tools. We chose to use [Puffin](https://github.com/EmbarkStudios/puffin), which the Rust gaming industry uses extensively. You can export and import the profiling reports using the top bar's _File_ menu options.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## Profiling the Indexing Process
|
||||||
|
|
||||||
|
When you enable the `profile-with-puffin` feature of Meilisearch, a Puffin HTTP server will run on Meilisearch and listen on the default _0.0.0.0:8585_ address. This server will record a "frame" whenever it executes the `IndexScheduler::tick` method.
|
||||||
|
|
||||||
|
Once your Meilisearch is running and awaits new indexation operations, you must [install and run the `puffin_viewer` tool](https://github.com/EmbarkStudios/puffin/tree/main/puffin_viewer) to see the profiling results. I advise you to run the viewer with the `RUST_LOG=puffin_http::client=debug` environment variable to see the client trying to connect to your server.
|
||||||
|
|
||||||
|
Another piece of advice on the Puffin viewer UI interface is to consider the _Merge children with same ID_ option. It can hide the exact actual timings at which events were sent. Please turn it off when you see strange gaps on the Flamegraph. It can help.
|
||||||
|
|
||||||
|
## Profiling the Search Process
|
||||||
|
|
||||||
|
We still need to take the time to profile the search side of the engine with Puffin. It would require time to profile the filtering phase, query parsing, creation, and execution. We could even profile the Actix HTTP server.
|
||||||
|
|
||||||
|
The only issue we see is the framing system. Puffin requires a global frame-based profiling phase, which collides with Meilisearch's ability to accept and answer multiple requests on different threads simultaneously.
|
||||||
68
README.md
68
README.md
@@ -1,16 +1,20 @@
|
|||||||
<p align="center">
|
<p align="center">
|
||||||
<img src="assets/meilisearch-logo-light.svg?sanitize=true#gh-light-mode-only">
|
<a href="https://www.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=logo#gh-light-mode-only" target="_blank">
|
||||||
<img src="assets/meilisearch-logo-dark.svg?sanitize=true#gh-dark-mode-only">
|
<img src="assets/meilisearch-logo-light.svg?sanitize=true#gh-light-mode-only">
|
||||||
|
</a>
|
||||||
|
<a href="https://www.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=logo#gh-dark-mode-only" target="_blank">
|
||||||
|
<img src="assets/meilisearch-logo-dark.svg?sanitize=true#gh-dark-mode-only">
|
||||||
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<h4 align="center">
|
<h4 align="center">
|
||||||
<a href="https://www.meilisearch.com">Website</a> |
|
<a href="https://www.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Website</a> |
|
||||||
<a href="https://roadmap.meilisearch.com/tabs/1-under-consideration">Roadmap</a> |
|
<a href="https://roadmap.meilisearch.com/tabs/1-under-consideration">Roadmap</a> |
|
||||||
<a href="https://www.meilisearch.com/pricing?utm_campaign=oss&utm_source=engine&utm_medium=meilisearch">Meilisearch Cloud</a> |
|
<a href="https://www.meilisearch.com/pricing?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Meilisearch Cloud</a> |
|
||||||
<a href="https://blog.meilisearch.com">Blog</a> |
|
<a href="https://blog.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Blog</a> |
|
||||||
<a href="https://www.meilisearch.com/docs">Documentation</a> |
|
<a href="https://www.meilisearch.com/docs?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Documentation</a> |
|
||||||
<a href="https://www.meilisearch.com/docs/faq">FAQ</a> |
|
<a href="https://www.meilisearch.com/docs/faq?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">FAQ</a> |
|
||||||
<a href="https://discord.meilisearch.com">Discord</a>
|
<a href="https://discord.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Discord</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
@@ -24,40 +28,40 @@
|
|||||||
Meilisearch helps you shape a delightful search experience in a snap, offering features that work out-of-the-box to speed up your workflow.
|
Meilisearch helps you shape a delightful search experience in a snap, offering features that work out-of-the-box to speed up your workflow.
|
||||||
|
|
||||||
<p align="center" name="demo">
|
<p align="center" name="demo">
|
||||||
<a href="https://where2watch.meilisearch.com/#gh-light-mode-only" target="_blank">
|
<a href="https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demo-gif#gh-light-mode-only" target="_blank">
|
||||||
<img src="assets/demo-light.gif#gh-light-mode-only" alt="A bright colored application for finding movies screening near the user">
|
<img src="assets/demo-light.gif#gh-light-mode-only" alt="A bright colored application for finding movies screening near the user">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://where2watch.meilisearch.com/#gh-dark-mode-only" target="_blank">
|
<a href="https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demo-gif#gh-dark-mode-only" target="_blank">
|
||||||
<img src="assets/demo-dark.gif#gh-dark-mode-only" alt="A dark colored application for finding movies screening near the user">
|
<img src="assets/demo-dark.gif#gh-dark-mode-only" alt="A dark colored application for finding movies screening near the user">
|
||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
🔥 [**Try it!**](https://where2watch.meilisearch.com/) 🔥
|
🔥 [**Try it!**](https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demo-link) 🔥
|
||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|
||||||
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
||||||
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy#typo-tolerance):** get relevant matches even when queries contain typos and misspellings
|
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features#typo-tolerance):** get relevant matches even when queries contain typos and misspellings
|
||||||
- **[Filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering) and [faceted search](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code
|
- **[Filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) and [faceted search](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code
|
||||||
- **[Sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting):** sort results based on price, date, or pretty much anything else your users need
|
- **[Sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** sort results based on price, date, or pretty much anything else your users need
|
||||||
- **[Synonym support](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy#synonyms):** configure synonyms to include more relevant content in your search results
|
- **[Synonym support](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features#synonyms):** configure synonyms to include more relevant content in your search results
|
||||||
- **[Geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch):** filter and sort documents based on geographic data
|
- **[Geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** filter and sort documents based on geographic data
|
||||||
- **[Extensive language support](https://www.meilisearch.com/docs/learn/what_is_meilisearch/language):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
- **[Extensive language support](https://www.meilisearch.com/docs/learn/what_is_meilisearch/language?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
||||||
- **[Security management](https://www.meilisearch.com/docs/learn/security/master_api_keys):** control which users can access what data with API keys that allow fine-grained permissions handling
|
- **[Security management](https://www.meilisearch.com/docs/learn/security/master_api_keys?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** control which users can access what data with API keys that allow fine-grained permissions handling
|
||||||
- **[Multi-Tenancy](https://www.meilisearch.com/docs/learn/security/tenant_tokens):** personalize search results for any number of application tenants
|
- **[Multi-Tenancy](https://www.meilisearch.com/docs/learn/security/tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** personalize search results for any number of application tenants
|
||||||
- **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets
|
- **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets
|
||||||
- **[RESTful API](https://www.meilisearch.com/docs/reference/api/overview):** integrate Meilisearch in your technical stack with our plugins and SDKs
|
- **[RESTful API](https://www.meilisearch.com/docs/reference/api/overview?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** integrate Meilisearch in your technical stack with our plugins and SDKs
|
||||||
- **Easy to install, deploy, and maintain**
|
- **Easy to install, deploy, and maintain**
|
||||||
|
|
||||||
## 📖 Documentation
|
## 📖 Documentation
|
||||||
|
|
||||||
You can consult Meilisearch's documentation at [https://www.meilisearch.com/docs](https://www.meilisearch.com/docs/).
|
You can consult Meilisearch's documentation at [https://www.meilisearch.com/docs](https://www.meilisearch.com/docs/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=docs).
|
||||||
|
|
||||||
## 🚀 Getting started
|
## 🚀 Getting started
|
||||||
|
|
||||||
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://www.meilisearch.com/docs/learn/getting_started/quick_start) guide.
|
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://www.meilisearch.com/docs/learn/getting_started/quick_start?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=get-started) guide.
|
||||||
|
|
||||||
You may also want to check out [Meilisearch 101](https://www.meilisearch.com/docs/learn/getting_started/filtering_and_sorting) for an introduction to some of Meilisearch's most popular features.
|
You may also want to check out [Meilisearch 101](https://www.meilisearch.com/docs/learn/getting_started/filtering_and_sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=get-started) for an introduction to some of Meilisearch's most popular features.
|
||||||
|
|
||||||
## ⚡ Supercharge your Meilisearch experience
|
## ⚡ Supercharge your Meilisearch experience
|
||||||
|
|
||||||
@@ -67,29 +71,29 @@ Say goodbye to server deployment and manual updates with [Meilisearch Cloud](htt
|
|||||||
|
|
||||||
Install one of our SDKs in your project for seamless integration between Meilisearch and your favorite language or framework!
|
Install one of our SDKs in your project for seamless integration between Meilisearch and your favorite language or framework!
|
||||||
|
|
||||||
Take a look at the complete [Meilisearch integration list](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks).
|
Take a look at the complete [Meilisearch integration list](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=sdks-link).
|
||||||
|
|
||||||
[](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks)
|
[](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=sdks-logos)
|
||||||
|
|
||||||
## ⚙️ Advanced usage
|
## ⚙️ Advanced usage
|
||||||
|
|
||||||
Experienced users will want to keep our [API Reference](https://www.meilisearch.com/docs/reference/api/overview) close at hand.
|
Experienced users will want to keep our [API Reference](https://www.meilisearch.com/docs/reference/api/overview?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced) close at hand.
|
||||||
|
|
||||||
We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering), [sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting), [geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch), [API keys](https://www.meilisearch.com/docs/learn/security/master_api_keys), and [tenant tokens](https://www.meilisearch.com/docs/learn/security/tenant_tokens).
|
We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced), [sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced), [geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced), [API keys](https://www.meilisearch.com/docs/learn/security/master_api_keys?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced), and [tenant tokens](https://www.meilisearch.com/docs/learn/security/tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced).
|
||||||
|
|
||||||
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://www.meilisearch.com/docs/learn/core_concepts/documents) and [indexes](https://www.meilisearch.com/docs/learn/core_concepts/indexes).
|
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://www.meilisearch.com/docs/learn/core_concepts/documents?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced) and [indexes](https://www.meilisearch.com/docs/learn/core_concepts/indexes?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced).
|
||||||
|
|
||||||
## 📊 Telemetry
|
## 📊 Telemetry
|
||||||
|
|
||||||
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry#how-to-disable-data-collection) whenever you want.
|
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) whenever you want.
|
||||||
|
|
||||||
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
||||||
|
|
||||||
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry) of our documentation.
|
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) of our documentation.
|
||||||
|
|
||||||
## 📫 Get in touch!
|
## 📫 Get in touch!
|
||||||
|
|
||||||
Meilisearch is a search engine created by [Meili](https://www.welcometothejungle.com/en/companies/meilisearch), a software development company based in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/)
|
Meilisearch is a search engine created by [Meili](https://www.welcometothejungle.com/en/companies/meilisearch), a software development company based in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=contact)
|
||||||
|
|
||||||
🗞 [Subscribe to our newsletter](https://meilisearch.us2.list-manage.com/subscribe?u=27870f7b71c908a8b359599fb&id=79582d828e) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
|
🗞 [Subscribe to our newsletter](https://meilisearch.us2.list-manage.com/subscribe?u=27870f7b71c908a8b359599fb&id=79582d828e) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
|
||||||
|
|
||||||
|
|||||||
BIN
assets/profiling-example.png
Normal file
BIN
assets/profiling-example.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.2 MiB |
@@ -14,7 +14,7 @@ license.workspace = true
|
|||||||
anyhow = "1.0.70"
|
anyhow = "1.0.70"
|
||||||
csv = "1.2.1"
|
csv = "1.2.1"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
mimalloc = { version = "0.1.36", default-features = false }
|
mimalloc = { version = "0.1.37", default-features = false }
|
||||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|||||||
@@ -262,6 +262,9 @@ pub(crate) mod test {
|
|||||||
sortable_attributes: Setting::Set(btreeset! { S("age") }),
|
sortable_attributes: Setting::Set(btreeset! { S("age") }),
|
||||||
ranking_rules: Setting::NotSet,
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
|
non_separator_tokens: Setting::NotSet,
|
||||||
|
separator_tokens: Setting::NotSet,
|
||||||
|
dictionary: Setting::NotSet,
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet,
|
distinct_attribute: Setting::NotSet,
|
||||||
typo_tolerance: Setting::NotSet,
|
typo_tolerance: Setting::NotSet,
|
||||||
|
|||||||
@@ -340,6 +340,9 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
stop_words: settings.stop_words.into(),
|
stop_words: settings.stop_words.into(),
|
||||||
|
non_separator_tokens: v6::Setting::NotSet,
|
||||||
|
separator_tokens: v6::Setting::NotSet,
|
||||||
|
dictionary: v6::Setting::NotSet,
|
||||||
synonyms: settings.synonyms.into(),
|
synonyms: settings.synonyms.into(),
|
||||||
distinct_attribute: settings.distinct_attribute.into(),
|
distinct_attribute: settings.distinct_attribute.into(),
|
||||||
typo_tolerance: match settings.typo_tolerance {
|
typo_tolerance: match settings.typo_tolerance {
|
||||||
|
|||||||
@@ -0,0 +1,24 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/mod.rs
|
||||||
|
expression: spells.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
|
"rankingRules": [
|
||||||
|
"typo",
|
||||||
|
"words",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"exactness"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/mod.rs
|
||||||
|
expression: products.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
|
"rankingRules": [
|
||||||
|
"typo",
|
||||||
|
"words",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"exactness"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {
|
||||||
|
"android": [
|
||||||
|
"phone",
|
||||||
|
"smartphone"
|
||||||
|
],
|
||||||
|
"iphone": [
|
||||||
|
"phone",
|
||||||
|
"smartphone"
|
||||||
|
],
|
||||||
|
"phone": [
|
||||||
|
"android",
|
||||||
|
"iphone",
|
||||||
|
"smartphone"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
||||||
@@ -0,0 +1,31 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/mod.rs
|
||||||
|
expression: movies.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [
|
||||||
|
"genres",
|
||||||
|
"id"
|
||||||
|
],
|
||||||
|
"sortableAttributes": [
|
||||||
|
"genres",
|
||||||
|
"id"
|
||||||
|
],
|
||||||
|
"rankingRules": [
|
||||||
|
"typo",
|
||||||
|
"words",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"exactness",
|
||||||
|
"release_date:asc"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
||||||
@@ -14,6 +14,7 @@ license.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
nom = "7.1.3"
|
nom = "7.1.3"
|
||||||
nom_locate = "4.1.0"
|
nom_locate = "4.1.0"
|
||||||
|
unescaper = "0.1.2"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "1.29.0"
|
insta = "1.29.0"
|
||||||
|
|||||||
@@ -62,6 +62,7 @@ pub enum ErrorKind<'a> {
|
|||||||
MisusedGeoRadius,
|
MisusedGeoRadius,
|
||||||
MisusedGeoBoundingBox,
|
MisusedGeoBoundingBox,
|
||||||
InvalidPrimary,
|
InvalidPrimary,
|
||||||
|
InvalidEscapedNumber,
|
||||||
ExpectedEof,
|
ExpectedEof,
|
||||||
ExpectedValue(ExpectedValueKind),
|
ExpectedValue(ExpectedValueKind),
|
||||||
MalformedValue,
|
MalformedValue,
|
||||||
@@ -147,6 +148,9 @@ impl<'a> Display for Error<'a> {
|
|||||||
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
|
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
|
||||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
|
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
|
||||||
}
|
}
|
||||||
|
ErrorKind::InvalidEscapedNumber => {
|
||||||
|
writeln!(f, "Found an invalid escaped sequence number: `{}`.", escaped_input)?
|
||||||
|
}
|
||||||
ErrorKind::ExpectedEof => {
|
ErrorKind::ExpectedEof => {
|
||||||
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -545,6 +545,8 @@ impl<'a> std::fmt::Display for Token<'a> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod tests {
|
pub mod tests {
|
||||||
|
use FilterCondition as Fc;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
/// Create a raw [Token]. You must specify the string that appear BEFORE your element followed by your element
|
/// Create a raw [Token]. You must specify the string that appear BEFORE your element followed by your element
|
||||||
@@ -556,14 +558,22 @@ pub mod tests {
|
|||||||
unsafe { Span::new_from_raw_offset(offset, lines as u32, value, "") }.into()
|
unsafe { Span::new_from_raw_offset(offset, lines as u32, value, "") }.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn p(s: &str) -> impl std::fmt::Display + '_ {
|
||||||
|
Fc::parse(s).unwrap().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_escaped() {
|
||||||
|
insta::assert_display_snapshot!(p(r#"title = 'foo\\'"#), @r#"{title} = {foo\}"#);
|
||||||
|
insta::assert_display_snapshot!(p(r#"title = 'foo\\\\'"#), @r#"{title} = {foo\\}"#);
|
||||||
|
insta::assert_display_snapshot!(p(r#"title = 'foo\\\\\\'"#), @r#"{title} = {foo\\\}"#);
|
||||||
|
insta::assert_display_snapshot!(p(r#"title = 'foo\\\\\\\\'"#), @r#"{title} = {foo\\\\}"#);
|
||||||
|
// but it also works with other sequencies
|
||||||
|
insta::assert_display_snapshot!(p(r#"title = 'foo\x20\n\t\"\'"'"#), @"{title} = {foo \n\t\"\'\"}");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse() {
|
fn parse() {
|
||||||
use FilterCondition as Fc;
|
|
||||||
|
|
||||||
fn p(s: &str) -> impl std::fmt::Display + '_ {
|
|
||||||
Fc::parse(s).unwrap().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test equal
|
// Test equal
|
||||||
insta::assert_display_snapshot!(p("channel = Ponce"), @"{channel} = {Ponce}");
|
insta::assert_display_snapshot!(p("channel = Ponce"), @"{channel} = {Ponce}");
|
||||||
insta::assert_display_snapshot!(p("subscribers = 12"), @"{subscribers} = {12}");
|
insta::assert_display_snapshot!(p("subscribers = 12"), @"{subscribers} = {12}");
|
||||||
|
|||||||
@@ -171,7 +171,24 @@ pub fn parse_value(input: Span) -> IResult<Token> {
|
|||||||
})
|
})
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok((input, value))
|
match unescaper::unescape(value.value()) {
|
||||||
|
Ok(content) => {
|
||||||
|
if content.len() != value.value().len() {
|
||||||
|
Ok((input, Token::new(value.original_span(), Some(content))))
|
||||||
|
} else {
|
||||||
|
Ok((input, value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(unescaper::Error::IncompleteStr(_)) => Err(nom::Err::Incomplete(nom::Needed::Unknown)),
|
||||||
|
Err(unescaper::Error::ParseIntError { .. }) => Err(nom::Err::Error(Error::new_from_kind(
|
||||||
|
value.original_span(),
|
||||||
|
ErrorKind::InvalidEscapedNumber,
|
||||||
|
))),
|
||||||
|
Err(unescaper::Error::InvalidChar { .. }) => Err(nom::Err::Error(Error::new_from_kind(
|
||||||
|
value.original_span(),
|
||||||
|
ErrorKind::MalformedValue,
|
||||||
|
))),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_value_component(c: char) -> bool {
|
fn is_value_component(c: char) -> bool {
|
||||||
@@ -318,17 +335,17 @@ pub mod test {
|
|||||||
("\"cha'nnel\"", "cha'nnel", false),
|
("\"cha'nnel\"", "cha'nnel", false),
|
||||||
("I'm tamo", "I", false),
|
("I'm tamo", "I", false),
|
||||||
// escaped thing but not quote
|
// escaped thing but not quote
|
||||||
(r#""\\""#, r#"\\"#, false),
|
(r#""\\""#, r#"\"#, true),
|
||||||
(r#""\\\\\\""#, r#"\\\\\\"#, false),
|
(r#""\\\\\\""#, r#"\\\"#, true),
|
||||||
(r#""aa\\aa""#, r#"aa\\aa"#, false),
|
(r#""aa\\aa""#, r#"aa\aa"#, true),
|
||||||
// with double quote
|
// with double quote
|
||||||
(r#""Hello \"world\"""#, r#"Hello "world""#, true),
|
(r#""Hello \"world\"""#, r#"Hello "world""#, true),
|
||||||
(r#""Hello \\\"world\\\"""#, r#"Hello \\"world\\""#, true),
|
(r#""Hello \\\"world\\\"""#, r#"Hello \"world\""#, true),
|
||||||
(r#""I'm \"super\" tamo""#, r#"I'm "super" tamo"#, true),
|
(r#""I'm \"super\" tamo""#, r#"I'm "super" tamo"#, true),
|
||||||
(r#""\"\"""#, r#""""#, true),
|
(r#""\"\"""#, r#""""#, true),
|
||||||
// with simple quote
|
// with simple quote
|
||||||
(r#"'Hello \'world\''"#, r#"Hello 'world'"#, true),
|
(r#"'Hello \'world\''"#, r#"Hello 'world'"#, true),
|
||||||
(r#"'Hello \\\'world\\\''"#, r#"Hello \\'world\\'"#, true),
|
(r#"'Hello \\\'world\\\''"#, r#"Hello \'world\'"#, true),
|
||||||
(r#"'I\'m "super" tamo'"#, r#"I'm "super" tamo"#, true),
|
(r#"'I\'m "super" tamo'"#, r#"I'm "super" tamo"#, true),
|
||||||
(r#"'\'\''"#, r#"''"#, true),
|
(r#"'\'\''"#, r#"''"#, true),
|
||||||
];
|
];
|
||||||
@@ -350,7 +367,14 @@ pub mod test {
|
|||||||
"Filter `{}` was not supposed to be escaped",
|
"Filter `{}` was not supposed to be escaped",
|
||||||
input
|
input
|
||||||
);
|
);
|
||||||
assert_eq!(token.value(), expected, "Filter `{}` failed.", input);
|
assert_eq!(
|
||||||
|
token.value(),
|
||||||
|
expected,
|
||||||
|
"Filter `{}` failed by giving `{}` instead of `{}`.",
|
||||||
|
input,
|
||||||
|
token.value(),
|
||||||
|
expected
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ license.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
arbitrary = { version = "1.3.0", features = ["derive"] }
|
arbitrary = { version = "1.3.0", features = ["derive"] }
|
||||||
clap = { version = "4.3.0", features = ["derive"] }
|
clap = { version = "4.3.0", features = ["derive"] }
|
||||||
fastrand = "1.9.0"
|
fastrand = "2.0.0"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
serde = { version = "1.0.160", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ log = "0.4.17"
|
|||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
page_size = "0.5.0"
|
page_size = "0.5.0"
|
||||||
|
puffin = "0.16.0"
|
||||||
roaring = { version = "0.10.1", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.160", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
|
|||||||
@@ -67,10 +67,6 @@ pub(crate) enum Batch {
|
|||||||
op: IndexOperation,
|
op: IndexOperation,
|
||||||
must_create_index: bool,
|
must_create_index: bool,
|
||||||
},
|
},
|
||||||
IndexDocumentDeletionByFilter {
|
|
||||||
index_uid: String,
|
|
||||||
task: Task,
|
|
||||||
},
|
|
||||||
IndexCreation {
|
IndexCreation {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
@@ -114,6 +110,10 @@ pub(crate) enum IndexOperation {
|
|||||||
documents: Vec<Vec<String>>,
|
documents: Vec<Vec<String>>,
|
||||||
tasks: Vec<Task>,
|
tasks: Vec<Task>,
|
||||||
},
|
},
|
||||||
|
IndexDocumentDeletionByFilter {
|
||||||
|
index_uid: String,
|
||||||
|
task: Task,
|
||||||
|
},
|
||||||
DocumentClear {
|
DocumentClear {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
tasks: Vec<Task>,
|
tasks: Vec<Task>,
|
||||||
@@ -155,7 +155,6 @@ impl Batch {
|
|||||||
| Batch::TaskDeletion(task)
|
| Batch::TaskDeletion(task)
|
||||||
| Batch::Dump(task)
|
| Batch::Dump(task)
|
||||||
| Batch::IndexCreation { task, .. }
|
| Batch::IndexCreation { task, .. }
|
||||||
| Batch::IndexDocumentDeletionByFilter { task, .. }
|
|
||||||
| Batch::IndexUpdate { task, .. } => vec![task.uid],
|
| Batch::IndexUpdate { task, .. } => vec![task.uid],
|
||||||
Batch::SnapshotCreation(tasks) | Batch::IndexDeletion { tasks, .. } => {
|
Batch::SnapshotCreation(tasks) | Batch::IndexDeletion { tasks, .. } => {
|
||||||
tasks.iter().map(|task| task.uid).collect()
|
tasks.iter().map(|task| task.uid).collect()
|
||||||
@@ -167,6 +166,7 @@ impl Batch {
|
|||||||
| IndexOperation::DocumentClear { tasks, .. } => {
|
| IndexOperation::DocumentClear { tasks, .. } => {
|
||||||
tasks.iter().map(|task| task.uid).collect()
|
tasks.iter().map(|task| task.uid).collect()
|
||||||
}
|
}
|
||||||
|
IndexOperation::IndexDocumentDeletionByFilter { task, .. } => vec![task.uid],
|
||||||
IndexOperation::SettingsAndDocumentOperation {
|
IndexOperation::SettingsAndDocumentOperation {
|
||||||
document_import_tasks: tasks,
|
document_import_tasks: tasks,
|
||||||
settings_tasks: other,
|
settings_tasks: other,
|
||||||
@@ -194,8 +194,7 @@ impl Batch {
|
|||||||
IndexOperation { op, .. } => Some(op.index_uid()),
|
IndexOperation { op, .. } => Some(op.index_uid()),
|
||||||
IndexCreation { index_uid, .. }
|
IndexCreation { index_uid, .. }
|
||||||
| IndexUpdate { index_uid, .. }
|
| IndexUpdate { index_uid, .. }
|
||||||
| IndexDeletion { index_uid, .. }
|
| IndexDeletion { index_uid, .. } => Some(index_uid),
|
||||||
| IndexDocumentDeletionByFilter { index_uid, .. } => Some(index_uid),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -205,6 +204,7 @@ impl IndexOperation {
|
|||||||
match self {
|
match self {
|
||||||
IndexOperation::DocumentOperation { index_uid, .. }
|
IndexOperation::DocumentOperation { index_uid, .. }
|
||||||
| IndexOperation::DocumentDeletion { index_uid, .. }
|
| IndexOperation::DocumentDeletion { index_uid, .. }
|
||||||
|
| IndexOperation::IndexDocumentDeletionByFilter { index_uid, .. }
|
||||||
| IndexOperation::DocumentClear { index_uid, .. }
|
| IndexOperation::DocumentClear { index_uid, .. }
|
||||||
| IndexOperation::Settings { index_uid, .. }
|
| IndexOperation::Settings { index_uid, .. }
|
||||||
| IndexOperation::DocumentClearAndSetting { index_uid, .. }
|
| IndexOperation::DocumentClearAndSetting { index_uid, .. }
|
||||||
@@ -239,9 +239,12 @@ impl IndexScheduler {
|
|||||||
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
match &task.kind {
|
match &task.kind {
|
||||||
KindWithContent::DocumentDeletionByFilter { index_uid, .. } => {
|
KindWithContent::DocumentDeletionByFilter { index_uid, .. } => {
|
||||||
Ok(Some(Batch::IndexDocumentDeletionByFilter {
|
Ok(Some(Batch::IndexOperation {
|
||||||
index_uid: index_uid.clone(),
|
op: IndexOperation::IndexDocumentDeletionByFilter {
|
||||||
task,
|
index_uid: index_uid.clone(),
|
||||||
|
task,
|
||||||
|
},
|
||||||
|
must_create_index: false,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
@@ -471,6 +474,8 @@ impl IndexScheduler {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
self.maybe_fail(crate::tests::FailureLocation::InsideCreateBatch)?;
|
self.maybe_fail(crate::tests::FailureLocation::InsideCreateBatch)?;
|
||||||
|
|
||||||
|
puffin::profile_function!();
|
||||||
|
|
||||||
let enqueued = &self.get_status(rtxn, Status::Enqueued)?;
|
let enqueued = &self.get_status(rtxn, Status::Enqueued)?;
|
||||||
let to_cancel = self.get_kind(rtxn, Kind::TaskCancelation)? & enqueued;
|
let to_cancel = self.get_kind(rtxn, Kind::TaskCancelation)? & enqueued;
|
||||||
|
|
||||||
@@ -575,6 +580,9 @@ impl IndexScheduler {
|
|||||||
self.maybe_fail(crate::tests::FailureLocation::PanicInsideProcessBatch)?;
|
self.maybe_fail(crate::tests::FailureLocation::PanicInsideProcessBatch)?;
|
||||||
self.breakpoint(crate::Breakpoint::InsideProcessBatch);
|
self.breakpoint(crate::Breakpoint::InsideProcessBatch);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
puffin::profile_function!(format!("{:?}", batch));
|
||||||
|
|
||||||
match batch {
|
match batch {
|
||||||
Batch::TaskCancelation { mut task, previous_started_at, previous_processing_tasks } => {
|
Batch::TaskCancelation { mut task, previous_started_at, previous_processing_tasks } => {
|
||||||
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
||||||
@@ -891,51 +899,6 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
Batch::IndexDocumentDeletionByFilter { mut task, index_uid: _ } => {
|
|
||||||
let (index_uid, filter) =
|
|
||||||
if let KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr } =
|
|
||||||
&task.kind
|
|
||||||
{
|
|
||||||
(index_uid, filter_expr)
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
|
||||||
};
|
|
||||||
let index = {
|
|
||||||
let rtxn = self.env.read_txn()?;
|
|
||||||
self.index_mapper.index(&rtxn, index_uid)?
|
|
||||||
};
|
|
||||||
let deleted_documents = delete_document_by_filter(filter, index);
|
|
||||||
let original_filter = if let Some(Details::DocumentDeletionByFilter {
|
|
||||||
original_filter,
|
|
||||||
deleted_documents: _,
|
|
||||||
}) = task.details
|
|
||||||
{
|
|
||||||
original_filter
|
|
||||||
} else {
|
|
||||||
// In the case of a `documentDeleteByFilter` the details MUST be set
|
|
||||||
unreachable!();
|
|
||||||
};
|
|
||||||
|
|
||||||
match deleted_documents {
|
|
||||||
Ok(deleted_documents) => {
|
|
||||||
task.status = Status::Succeeded;
|
|
||||||
task.details = Some(Details::DocumentDeletionByFilter {
|
|
||||||
original_filter,
|
|
||||||
deleted_documents: Some(deleted_documents),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
task.status = Status::Failed;
|
|
||||||
task.details = Some(Details::DocumentDeletionByFilter {
|
|
||||||
original_filter,
|
|
||||||
deleted_documents: Some(0),
|
|
||||||
});
|
|
||||||
task.error = Some(e.into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(vec![task])
|
|
||||||
}
|
|
||||||
Batch::IndexCreation { index_uid, primary_key, task } => {
|
Batch::IndexCreation { index_uid, primary_key, task } => {
|
||||||
let wtxn = self.env.write_txn()?;
|
let wtxn = self.env.write_txn()?;
|
||||||
if self.index_mapper.exists(&wtxn, &index_uid)? {
|
if self.index_mapper.exists(&wtxn, &index_uid)? {
|
||||||
@@ -1111,6 +1074,8 @@ impl IndexScheduler {
|
|||||||
index: &'i Index,
|
index: &'i Index,
|
||||||
operation: IndexOperation,
|
operation: IndexOperation,
|
||||||
) -> Result<Vec<Task>> {
|
) -> Result<Vec<Task>> {
|
||||||
|
puffin::profile_function!();
|
||||||
|
|
||||||
match operation {
|
match operation {
|
||||||
IndexOperation::DocumentClear { mut tasks, .. } => {
|
IndexOperation::DocumentClear { mut tasks, .. } => {
|
||||||
let count = milli::update::ClearDocuments::new(index_wtxn, index).execute()?;
|
let count = milli::update::ClearDocuments::new(index_wtxn, index).execute()?;
|
||||||
@@ -1292,6 +1257,47 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
|
IndexOperation::IndexDocumentDeletionByFilter { mut task, index_uid: _ } => {
|
||||||
|
let filter =
|
||||||
|
if let KindWithContent::DocumentDeletionByFilter { filter_expr, .. } =
|
||||||
|
&task.kind
|
||||||
|
{
|
||||||
|
filter_expr
|
||||||
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
let deleted_documents = delete_document_by_filter(index_wtxn, filter, index);
|
||||||
|
let original_filter = if let Some(Details::DocumentDeletionByFilter {
|
||||||
|
original_filter,
|
||||||
|
deleted_documents: _,
|
||||||
|
}) = task.details
|
||||||
|
{
|
||||||
|
original_filter
|
||||||
|
} else {
|
||||||
|
// In the case of a `documentDeleteByFilter` the details MUST be set
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
|
||||||
|
match deleted_documents {
|
||||||
|
Ok(deleted_documents) => {
|
||||||
|
task.status = Status::Succeeded;
|
||||||
|
task.details = Some(Details::DocumentDeletionByFilter {
|
||||||
|
original_filter,
|
||||||
|
deleted_documents: Some(deleted_documents),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
task.status = Status::Failed;
|
||||||
|
task.details = Some(Details::DocumentDeletionByFilter {
|
||||||
|
original_filter,
|
||||||
|
deleted_documents: Some(0),
|
||||||
|
});
|
||||||
|
task.error = Some(e.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(vec![task])
|
||||||
|
}
|
||||||
IndexOperation::Settings { index_uid: _, settings, mut tasks } => {
|
IndexOperation::Settings { index_uid: _, settings, mut tasks } => {
|
||||||
let indexer_config = self.index_mapper.indexer_config();
|
let indexer_config = self.index_mapper.indexer_config();
|
||||||
let mut builder = milli::update::Settings::new(index_wtxn, index, indexer_config);
|
let mut builder = milli::update::Settings::new(index_wtxn, index, indexer_config);
|
||||||
@@ -1491,23 +1497,22 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete_document_by_filter(filter: &serde_json::Value, index: Index) -> Result<u64> {
|
fn delete_document_by_filter<'a>(
|
||||||
|
wtxn: &mut RwTxn<'a, '_>,
|
||||||
|
filter: &serde_json::Value,
|
||||||
|
index: &'a Index,
|
||||||
|
) -> Result<u64> {
|
||||||
let filter = Filter::from_json(filter)?;
|
let filter = Filter::from_json(filter)?;
|
||||||
Ok(if let Some(filter) = filter {
|
Ok(if let Some(filter) = filter {
|
||||||
let mut wtxn = index.write_txn()?;
|
let candidates = filter.evaluate(wtxn, index).map_err(|err| match err {
|
||||||
|
|
||||||
let candidates = filter.evaluate(&wtxn, &index).map_err(|err| match err {
|
|
||||||
milli::Error::UserError(milli::UserError::InvalidFilter(_)) => {
|
milli::Error::UserError(milli::UserError::InvalidFilter(_)) => {
|
||||||
Error::from(err).with_custom_error_code(Code::InvalidDocumentFilter)
|
Error::from(err).with_custom_error_code(Code::InvalidDocumentFilter)
|
||||||
}
|
}
|
||||||
e => e.into(),
|
e => e.into(),
|
||||||
})?;
|
})?;
|
||||||
let mut delete_operation = DeleteDocuments::new(&mut wtxn, &index)?;
|
let mut delete_operation = DeleteDocuments::new(wtxn, index)?;
|
||||||
delete_operation.delete_documents(&candidates);
|
delete_operation.delete_documents(&candidates);
|
||||||
let deleted_documents =
|
delete_operation.execute().map(|result| result.deleted_documents)?
|
||||||
delete_operation.execute().map(|result| result.deleted_documents)?;
|
|
||||||
wtxn.commit()?;
|
|
||||||
deleted_documents
|
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -790,10 +790,19 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let mut res = BTreeMap::new();
|
let mut res = BTreeMap::new();
|
||||||
|
|
||||||
|
let processing_tasks = { self.processing_tasks.read().unwrap().processing.len() };
|
||||||
|
|
||||||
res.insert(
|
res.insert(
|
||||||
"statuses".to_string(),
|
"statuses".to_string(),
|
||||||
enum_iterator::all::<Status>()
|
enum_iterator::all::<Status>()
|
||||||
.map(|s| Ok((s.to_string(), self.get_status(&rtxn, s)?.len())))
|
.map(|s| {
|
||||||
|
let tasks = self.get_status(&rtxn, s)?.len();
|
||||||
|
match s {
|
||||||
|
Status::Enqueued => Ok((s.to_string(), tasks - processing_tasks)),
|
||||||
|
Status::Processing => Ok((s.to_string(), processing_tasks)),
|
||||||
|
s => Ok((s.to_string(), tasks)),
|
||||||
|
}
|
||||||
|
})
|
||||||
.collect::<Result<BTreeMap<String, u64>>>()?,
|
.collect::<Result<BTreeMap<String, u64>>>()?,
|
||||||
);
|
);
|
||||||
res.insert(
|
res.insert(
|
||||||
@@ -1053,6 +1062,8 @@ impl IndexScheduler {
|
|||||||
self.breakpoint(Breakpoint::Start);
|
self.breakpoint(Breakpoint::Start);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
puffin::GlobalProfiler::lock().new_frame();
|
||||||
|
|
||||||
self.cleanup_task_queue()?;
|
self.cleanup_task_queue()?;
|
||||||
|
|
||||||
let rtxn = self.env.read_txn().map_err(Error::HeedTransaction)?;
|
let rtxn = self.env.read_txn().map_err(Error::HeedTransaction)?;
|
||||||
@@ -4129,4 +4140,154 @@ mod tests {
|
|||||||
snapshot!(json_string!(tasks, { "[].enqueuedAt" => "[date]", "[].startedAt" => "[date]", "[].finishedAt" => "[date]", ".**.original_filter" => "[filter]", ".**.query" => "[query]" }), name: "everything_has_been_processed");
|
snapshot!(json_string!(tasks, { "[].enqueuedAt" => "[date]", "[].startedAt" => "[date]", "[].finishedAt" => "[date]", ".**.original_filter" => "[filter]", ".**.query" => "[query]" }), name: "everything_has_been_processed");
|
||||||
drop(rtxn);
|
drop(rtxn);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn basic_get_stats() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
let kind = index_creation_task("catto", "mouse");
|
||||||
|
let _task = index_scheduler.register(kind).unwrap();
|
||||||
|
let kind = index_creation_task("doggo", "sheep");
|
||||||
|
let _task = index_scheduler.register(kind).unwrap();
|
||||||
|
let kind = index_creation_task("whalo", "fish");
|
||||||
|
let _task = index_scheduler.register(kind).unwrap();
|
||||||
|
|
||||||
|
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
|
||||||
|
{
|
||||||
|
"indexes": {
|
||||||
|
"catto": 1,
|
||||||
|
"doggo": 1,
|
||||||
|
"whalo": 1
|
||||||
|
},
|
||||||
|
"statuses": {
|
||||||
|
"canceled": 0,
|
||||||
|
"enqueued": 3,
|
||||||
|
"failed": 0,
|
||||||
|
"processing": 0,
|
||||||
|
"succeeded": 0
|
||||||
|
},
|
||||||
|
"types": {
|
||||||
|
"documentAdditionOrUpdate": 0,
|
||||||
|
"documentDeletion": 0,
|
||||||
|
"dumpCreation": 0,
|
||||||
|
"indexCreation": 3,
|
||||||
|
"indexDeletion": 0,
|
||||||
|
"indexSwap": 0,
|
||||||
|
"indexUpdate": 0,
|
||||||
|
"settingsUpdate": 0,
|
||||||
|
"snapshotCreation": 0,
|
||||||
|
"taskCancelation": 0,
|
||||||
|
"taskDeletion": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
handle.advance_till([Start, BatchCreated]);
|
||||||
|
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
|
||||||
|
{
|
||||||
|
"indexes": {
|
||||||
|
"catto": 1,
|
||||||
|
"doggo": 1,
|
||||||
|
"whalo": 1
|
||||||
|
},
|
||||||
|
"statuses": {
|
||||||
|
"canceled": 0,
|
||||||
|
"enqueued": 2,
|
||||||
|
"failed": 0,
|
||||||
|
"processing": 1,
|
||||||
|
"succeeded": 0
|
||||||
|
},
|
||||||
|
"types": {
|
||||||
|
"documentAdditionOrUpdate": 0,
|
||||||
|
"documentDeletion": 0,
|
||||||
|
"dumpCreation": 0,
|
||||||
|
"indexCreation": 3,
|
||||||
|
"indexDeletion": 0,
|
||||||
|
"indexSwap": 0,
|
||||||
|
"indexUpdate": 0,
|
||||||
|
"settingsUpdate": 0,
|
||||||
|
"snapshotCreation": 0,
|
||||||
|
"taskCancelation": 0,
|
||||||
|
"taskDeletion": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
handle.advance_till([
|
||||||
|
InsideProcessBatch,
|
||||||
|
InsideProcessBatch,
|
||||||
|
ProcessBatchSucceeded,
|
||||||
|
AfterProcessing,
|
||||||
|
Start,
|
||||||
|
BatchCreated,
|
||||||
|
]);
|
||||||
|
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
|
||||||
|
{
|
||||||
|
"indexes": {
|
||||||
|
"catto": 1,
|
||||||
|
"doggo": 1,
|
||||||
|
"whalo": 1
|
||||||
|
},
|
||||||
|
"statuses": {
|
||||||
|
"canceled": 0,
|
||||||
|
"enqueued": 1,
|
||||||
|
"failed": 0,
|
||||||
|
"processing": 1,
|
||||||
|
"succeeded": 1
|
||||||
|
},
|
||||||
|
"types": {
|
||||||
|
"documentAdditionOrUpdate": 0,
|
||||||
|
"documentDeletion": 0,
|
||||||
|
"dumpCreation": 0,
|
||||||
|
"indexCreation": 3,
|
||||||
|
"indexDeletion": 0,
|
||||||
|
"indexSwap": 0,
|
||||||
|
"indexUpdate": 0,
|
||||||
|
"settingsUpdate": 0,
|
||||||
|
"snapshotCreation": 0,
|
||||||
|
"taskCancelation": 0,
|
||||||
|
"taskDeletion": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// now we make one more batch, the started_at field of the new tasks will be past `second_start_time`
|
||||||
|
handle.advance_till([
|
||||||
|
InsideProcessBatch,
|
||||||
|
InsideProcessBatch,
|
||||||
|
ProcessBatchSucceeded,
|
||||||
|
AfterProcessing,
|
||||||
|
Start,
|
||||||
|
BatchCreated,
|
||||||
|
]);
|
||||||
|
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
|
||||||
|
{
|
||||||
|
"indexes": {
|
||||||
|
"catto": 1,
|
||||||
|
"doggo": 1,
|
||||||
|
"whalo": 1
|
||||||
|
},
|
||||||
|
"statuses": {
|
||||||
|
"canceled": 0,
|
||||||
|
"enqueued": 0,
|
||||||
|
"failed": 0,
|
||||||
|
"processing": 1,
|
||||||
|
"succeeded": 2
|
||||||
|
},
|
||||||
|
"types": {
|
||||||
|
"documentAdditionOrUpdate": 0,
|
||||||
|
"documentDeletion": 0,
|
||||||
|
"dumpCreation": 0,
|
||||||
|
"indexCreation": 3,
|
||||||
|
"indexDeletion": 0,
|
||||||
|
"indexSwap": 0,
|
||||||
|
"indexUpdate": 0,
|
||||||
|
"settingsUpdate": 0,
|
||||||
|
"snapshotCreation": 0,
|
||||||
|
"taskCancelation": 0,
|
||||||
|
"taskDeletion": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -167,7 +167,9 @@ macro_rules! snapshot {
|
|||||||
let (settings, snap_name, _) = $crate::default_snapshot_settings_for_test(test_name, Some(&snap_name));
|
let (settings, snap_name, _) = $crate::default_snapshot_settings_for_test(test_name, Some(&snap_name));
|
||||||
settings.bind(|| {
|
settings.bind(|| {
|
||||||
let snap = format!("{}", $value);
|
let snap = format!("{}", $value);
|
||||||
meili_snap::insta::assert_snapshot!(format!("{}", snap_name), snap);
|
insta::allow_duplicates! {
|
||||||
|
meili_snap::insta::assert_snapshot!(format!("{}", snap_name), snap);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
($value:expr, @$inline:literal) => {
|
($value:expr, @$inline:literal) => {
|
||||||
@@ -176,7 +178,9 @@ macro_rules! snapshot {
|
|||||||
let (settings, _, _) = $crate::default_snapshot_settings_for_test("", Some("_dummy_argument"));
|
let (settings, _, _) = $crate::default_snapshot_settings_for_test("", Some("_dummy_argument"));
|
||||||
settings.bind(|| {
|
settings.bind(|| {
|
||||||
let snap = format!("{}", $value);
|
let snap = format!("{}", $value);
|
||||||
meili_snap::insta::assert_snapshot!(snap, @$inline);
|
insta::allow_duplicates! {
|
||||||
|
meili_snap::insta::assert_snapshot!(snap, @$inline);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
($value:expr) => {
|
($value:expr) => {
|
||||||
@@ -194,7 +198,9 @@ macro_rules! snapshot {
|
|||||||
let (settings, snap_name, _) = $crate::default_snapshot_settings_for_test(test_name, None);
|
let (settings, snap_name, _) = $crate::default_snapshot_settings_for_test(test_name, None);
|
||||||
settings.bind(|| {
|
settings.bind(|| {
|
||||||
let snap = format!("{}", $value);
|
let snap = format!("{}", $value);
|
||||||
meili_snap::insta::assert_snapshot!(format!("{}", snap_name), snap);
|
insta::allow_duplicates! {
|
||||||
|
meili_snap::insta::assert_snapshot!(format!("{}", snap_name), snap);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -129,6 +129,9 @@ impl HeedAuthStore {
|
|||||||
Action::DumpsAll => {
|
Action::DumpsAll => {
|
||||||
actions.insert(Action::DumpsCreate);
|
actions.insert(Action::DumpsCreate);
|
||||||
}
|
}
|
||||||
|
Action::SnapshotsAll => {
|
||||||
|
actions.insert(Action::SnapshotsCreate);
|
||||||
|
}
|
||||||
Action::TasksAll => {
|
Action::TasksAll => {
|
||||||
actions.extend([Action::TasksGet, Action::TasksDelete, Action::TasksCancel]);
|
actions.extend([Action::TasksGet, Action::TasksDelete, Action::TasksCancel]);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,13 +15,13 @@ actix-web = { version = "4.3.1", default-features = false }
|
|||||||
anyhow = "1.0.70"
|
anyhow = "1.0.70"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
csv = "1.2.1"
|
csv = "1.2.1"
|
||||||
deserr = "0.5.0"
|
deserr = { version = "0.6.0", features = ["actix-web"]}
|
||||||
either = { version = "1.8.1", features = ["serde"] }
|
either = { version = "1.8.1", features = ["serde"] }
|
||||||
enum-iterator = "1.4.0"
|
enum-iterator = "1.4.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.25"
|
flate2 = "1.0.25"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
memmap2 = "0.5.10"
|
memmap2 = "0.7.1"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
roaring = { version = "0.10.1", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.160", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
use std::borrow::Borrow;
|
|
||||||
use std::fmt::{self, Debug, Display};
|
use std::fmt::{self, Debug, Display};
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{self, Seek, Write};
|
use std::io::{self, Seek, Write};
|
||||||
@@ -42,7 +41,7 @@ impl Display for DocumentFormatError {
|
|||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Io(e) => write!(f, "{e}"),
|
Self::Io(e) => write!(f, "{e}"),
|
||||||
Self::MalformedPayload(me, b) => match me.borrow() {
|
Self::MalformedPayload(me, b) => match me {
|
||||||
Error::Json(se) => {
|
Error::Json(se) => {
|
||||||
let mut message = match se.classify() {
|
let mut message = match se.classify() {
|
||||||
Category::Data => {
|
Category::Data => {
|
||||||
|
|||||||
@@ -259,6 +259,9 @@ InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
|
|||||||
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSettingsNonSeparatorTokens , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSettingsSeparatorTokens , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSettingsDictionary , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
|
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
|
||||||
|
|||||||
@@ -257,6 +257,12 @@ pub enum Action {
|
|||||||
#[serde(rename = "dumps.create")]
|
#[serde(rename = "dumps.create")]
|
||||||
#[deserr(rename = "dumps.create")]
|
#[deserr(rename = "dumps.create")]
|
||||||
DumpsCreate,
|
DumpsCreate,
|
||||||
|
#[serde(rename = "snapshots.*")]
|
||||||
|
#[deserr(rename = "snapshots.*")]
|
||||||
|
SnapshotsAll,
|
||||||
|
#[serde(rename = "snapshots.create")]
|
||||||
|
#[deserr(rename = "snapshots.create")]
|
||||||
|
SnapshotsCreate,
|
||||||
#[serde(rename = "version")]
|
#[serde(rename = "version")]
|
||||||
#[deserr(rename = "version")]
|
#[deserr(rename = "version")]
|
||||||
Version,
|
Version,
|
||||||
@@ -309,6 +315,7 @@ impl Action {
|
|||||||
METRICS_GET => Some(Self::MetricsGet),
|
METRICS_GET => Some(Self::MetricsGet),
|
||||||
DUMPS_ALL => Some(Self::DumpsAll),
|
DUMPS_ALL => Some(Self::DumpsAll),
|
||||||
DUMPS_CREATE => Some(Self::DumpsCreate),
|
DUMPS_CREATE => Some(Self::DumpsCreate),
|
||||||
|
SNAPSHOTS_CREATE => Some(Self::SnapshotsCreate),
|
||||||
VERSION => Some(Self::Version),
|
VERSION => Some(Self::Version),
|
||||||
KEYS_CREATE => Some(Self::KeysAdd),
|
KEYS_CREATE => Some(Self::KeysAdd),
|
||||||
KEYS_GET => Some(Self::KeysGet),
|
KEYS_GET => Some(Self::KeysGet),
|
||||||
@@ -353,6 +360,7 @@ pub mod actions {
|
|||||||
pub const METRICS_GET: u8 = MetricsGet.repr();
|
pub const METRICS_GET: u8 = MetricsGet.repr();
|
||||||
pub const DUMPS_ALL: u8 = DumpsAll.repr();
|
pub const DUMPS_ALL: u8 = DumpsAll.repr();
|
||||||
pub const DUMPS_CREATE: u8 = DumpsCreate.repr();
|
pub const DUMPS_CREATE: u8 = DumpsCreate.repr();
|
||||||
|
pub const SNAPSHOTS_CREATE: u8 = SnapshotsCreate.repr();
|
||||||
pub const VERSION: u8 = Version.repr();
|
pub const VERSION: u8 = Version.repr();
|
||||||
pub const KEYS_CREATE: u8 = KeysAdd.repr();
|
pub const KEYS_CREATE: u8 = KeysAdd.repr();
|
||||||
pub const KEYS_GET: u8 = KeysGet.repr();
|
pub const KEYS_GET: u8 = KeysGet.repr();
|
||||||
|
|||||||
@@ -171,6 +171,15 @@ pub struct Settings<T> {
|
|||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
||||||
pub stop_words: Setting<BTreeSet<String>>,
|
pub stop_words: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsNonSeparatorTokens>)]
|
||||||
|
pub non_separator_tokens: Setting<BTreeSet<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsSeparatorTokens>)]
|
||||||
|
pub separator_tokens: Setting<BTreeSet<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsDictionary>)]
|
||||||
|
pub dictionary: Setting<BTreeSet<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
||||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
@@ -201,6 +210,9 @@ impl Settings<Checked> {
|
|||||||
ranking_rules: Setting::Reset,
|
ranking_rules: Setting::Reset,
|
||||||
stop_words: Setting::Reset,
|
stop_words: Setting::Reset,
|
||||||
synonyms: Setting::Reset,
|
synonyms: Setting::Reset,
|
||||||
|
non_separator_tokens: Setting::Reset,
|
||||||
|
separator_tokens: Setting::Reset,
|
||||||
|
dictionary: Setting::Reset,
|
||||||
distinct_attribute: Setting::Reset,
|
distinct_attribute: Setting::Reset,
|
||||||
typo_tolerance: Setting::Reset,
|
typo_tolerance: Setting::Reset,
|
||||||
faceting: Setting::Reset,
|
faceting: Setting::Reset,
|
||||||
@@ -217,6 +229,9 @@ impl Settings<Checked> {
|
|||||||
sortable_attributes,
|
sortable_attributes,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
stop_words,
|
stop_words,
|
||||||
|
non_separator_tokens,
|
||||||
|
separator_tokens,
|
||||||
|
dictionary,
|
||||||
synonyms,
|
synonyms,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
@@ -232,6 +247,9 @@ impl Settings<Checked> {
|
|||||||
sortable_attributes,
|
sortable_attributes,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
stop_words,
|
stop_words,
|
||||||
|
non_separator_tokens,
|
||||||
|
separator_tokens,
|
||||||
|
dictionary,
|
||||||
synonyms,
|
synonyms,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
@@ -274,6 +292,9 @@ impl Settings<Unchecked> {
|
|||||||
ranking_rules: self.ranking_rules,
|
ranking_rules: self.ranking_rules,
|
||||||
stop_words: self.stop_words,
|
stop_words: self.stop_words,
|
||||||
synonyms: self.synonyms,
|
synonyms: self.synonyms,
|
||||||
|
non_separator_tokens: self.non_separator_tokens,
|
||||||
|
separator_tokens: self.separator_tokens,
|
||||||
|
dictionary: self.dictionary,
|
||||||
distinct_attribute: self.distinct_attribute,
|
distinct_attribute: self.distinct_attribute,
|
||||||
typo_tolerance: self.typo_tolerance,
|
typo_tolerance: self.typo_tolerance,
|
||||||
faceting: self.faceting,
|
faceting: self.faceting,
|
||||||
@@ -335,6 +356,28 @@ pub fn apply_settings_to_builder(
|
|||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
match settings.non_separator_tokens {
|
||||||
|
Setting::Set(ref non_separator_tokens) => {
|
||||||
|
builder.set_non_separator_tokens(non_separator_tokens.clone())
|
||||||
|
}
|
||||||
|
Setting::Reset => builder.reset_non_separator_tokens(),
|
||||||
|
Setting::NotSet => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
match settings.separator_tokens {
|
||||||
|
Setting::Set(ref separator_tokens) => {
|
||||||
|
builder.set_separator_tokens(separator_tokens.clone())
|
||||||
|
}
|
||||||
|
Setting::Reset => builder.reset_separator_tokens(),
|
||||||
|
Setting::NotSet => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
match settings.dictionary {
|
||||||
|
Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()),
|
||||||
|
Setting::Reset => builder.reset_dictionary(),
|
||||||
|
Setting::NotSet => (),
|
||||||
|
}
|
||||||
|
|
||||||
match settings.synonyms {
|
match settings.synonyms {
|
||||||
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
||||||
Setting::Reset => builder.reset_synonyms(),
|
Setting::Reset => builder.reset_synonyms(),
|
||||||
@@ -459,15 +502,14 @@ pub fn settings(
|
|||||||
})
|
})
|
||||||
.transpose()?
|
.transpose()?
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let non_separator_tokens = index.non_separator_tokens(rtxn)?.unwrap_or_default();
|
||||||
|
let separator_tokens = index.separator_tokens(rtxn)?.unwrap_or_default();
|
||||||
|
let dictionary = index.dictionary(rtxn)?.unwrap_or_default();
|
||||||
|
|
||||||
let distinct_field = index.distinct_field(rtxn)?.map(String::from);
|
let distinct_field = index.distinct_field(rtxn)?.map(String::from);
|
||||||
|
|
||||||
// in milli each word in the synonyms map were split on their separator. Since we lost
|
let synonyms = index.user_defined_synonyms(rtxn)?;
|
||||||
// this information we are going to put space between words.
|
|
||||||
let synonyms = index
|
|
||||||
.synonyms(rtxn)?
|
|
||||||
.iter()
|
|
||||||
.map(|(key, values)| (key.join(" "), values.iter().map(|value| value.join(" ")).collect()))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let min_typo_word_len = MinWordSizeTyposSetting {
|
let min_typo_word_len = MinWordSizeTyposSetting {
|
||||||
one_typo: Setting::Set(index.min_word_len_one_typo(rtxn)?),
|
one_typo: Setting::Set(index.min_word_len_one_typo(rtxn)?),
|
||||||
@@ -520,6 +562,9 @@ pub fn settings(
|
|||||||
sortable_attributes: Setting::Set(sortable_attributes),
|
sortable_attributes: Setting::Set(sortable_attributes),
|
||||||
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
|
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
|
||||||
stop_words: Setting::Set(stop_words),
|
stop_words: Setting::Set(stop_words),
|
||||||
|
non_separator_tokens: Setting::Set(non_separator_tokens),
|
||||||
|
separator_tokens: Setting::Set(separator_tokens),
|
||||||
|
dictionary: Setting::Set(dictionary),
|
||||||
distinct_attribute: match distinct_field {
|
distinct_attribute: match distinct_field {
|
||||||
Some(field) => Setting::Set(field),
|
Some(field) => Setting::Set(field),
|
||||||
None => Setting::Reset,
|
None => Setting::Reset,
|
||||||
@@ -642,6 +687,9 @@ pub(crate) mod test {
|
|||||||
sortable_attributes: Setting::NotSet,
|
sortable_attributes: Setting::NotSet,
|
||||||
ranking_rules: Setting::NotSet,
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
|
non_separator_tokens: Setting::NotSet,
|
||||||
|
separator_tokens: Setting::NotSet,
|
||||||
|
dictionary: Setting::NotSet,
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet,
|
distinct_attribute: Setting::NotSet,
|
||||||
typo_tolerance: Setting::NotSet,
|
typo_tolerance: Setting::NotSet,
|
||||||
@@ -663,6 +711,9 @@ pub(crate) mod test {
|
|||||||
sortable_attributes: Setting::NotSet,
|
sortable_attributes: Setting::NotSet,
|
||||||
ranking_rules: Setting::NotSet,
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
|
non_separator_tokens: Setting::NotSet,
|
||||||
|
separator_tokens: Setting::NotSet,
|
||||||
|
dictionary: Setting::NotSet,
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet,
|
distinct_attribute: Setting::NotSet,
|
||||||
typo_tolerance: Setting::NotSet,
|
typo_tolerance: Setting::NotSet,
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ byte-unit = { version = "4.0.19", default-features = false, features = [
|
|||||||
bytes = "1.4.0"
|
bytes = "1.4.0"
|
||||||
clap = { version = "4.2.1", features = ["derive", "env"] }
|
clap = { version = "4.2.1", features = ["derive", "env"] }
|
||||||
crossbeam-channel = "0.5.8"
|
crossbeam-channel = "0.5.8"
|
||||||
deserr = "0.5.0"
|
deserr = { version = "0.6.0", features = ["actix-web"]}
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
either = "1.8.1"
|
either = "1.8.1"
|
||||||
env_logger = "0.10.0"
|
env_logger = "0.10.0"
|
||||||
@@ -50,15 +50,15 @@ futures = "0.3.28"
|
|||||||
futures-util = "0.3.28"
|
futures-util = "0.3.28"
|
||||||
http = "0.2.9"
|
http = "0.2.9"
|
||||||
index-scheduler = { path = "../index-scheduler" }
|
index-scheduler = { path = "../index-scheduler" }
|
||||||
indexmap = { version = "1.9.3", features = ["serde-1"] }
|
indexmap = { version = "2.0.0", features = ["serde"] }
|
||||||
is-terminal = "0.4.8"
|
is-terminal = "0.4.8"
|
||||||
itertools = "0.10.5"
|
itertools = "0.11.0"
|
||||||
jsonwebtoken = "8.3.0"
|
jsonwebtoken = "8.3.0"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
mimalloc = { version = "0.1.36", default-features = false }
|
mimalloc = { version = "0.1.37", default-features = false }
|
||||||
mime = "0.3.17"
|
mime = "0.3.17"
|
||||||
num_cpus = "1.15.0"
|
num_cpus = "1.15.0"
|
||||||
obkv = "0.2.0"
|
obkv = "0.2.0"
|
||||||
@@ -69,6 +69,8 @@ permissive-json-pointer = { path = "../permissive-json-pointer" }
|
|||||||
pin-project-lite = "0.2.9"
|
pin-project-lite = "0.2.9"
|
||||||
platform-dirs = "0.3.0"
|
platform-dirs = "0.3.0"
|
||||||
prometheus = { version = "0.13.3", features = ["process"] }
|
prometheus = { version = "0.13.3", features = ["process"] }
|
||||||
|
puffin = "0.16.0"
|
||||||
|
puffin_http = { version = "0.13.0", optional = true }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
rayon = "1.7.0"
|
rayon = "1.7.0"
|
||||||
regex = "1.7.3"
|
regex = "1.7.3"
|
||||||
@@ -85,7 +87,7 @@ sha2 = "0.10.6"
|
|||||||
siphasher = "0.3.10"
|
siphasher = "0.3.10"
|
||||||
slice-group-by = "0.3.0"
|
slice-group-by = "0.3.0"
|
||||||
static-files = { version = "0.2.3", optional = true }
|
static-files = { version = "0.2.3", optional = true }
|
||||||
sysinfo = "0.28.4"
|
sysinfo = "0.29.7"
|
||||||
tar = "0.4.38"
|
tar = "0.4.38"
|
||||||
tempfile = "3.5.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.40"
|
thiserror = "1.0.40"
|
||||||
@@ -133,7 +135,18 @@ zip = { version = "0.6.4", optional = true }
|
|||||||
[features]
|
[features]
|
||||||
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
||||||
analytics = ["segment"]
|
analytics = ["segment"]
|
||||||
mini-dashboard = ["actix-web-static-files", "static-files", "anyhow", "cargo_toml", "hex", "reqwest", "sha-1", "tempfile", "zip"]
|
profile-with-puffin = ["dep:puffin_http"]
|
||||||
|
mini-dashboard = [
|
||||||
|
"actix-web-static-files",
|
||||||
|
"static-files",
|
||||||
|
"anyhow",
|
||||||
|
"cargo_toml",
|
||||||
|
"hex",
|
||||||
|
"reqwest",
|
||||||
|
"sha-1",
|
||||||
|
"tempfile",
|
||||||
|
"zip",
|
||||||
|
]
|
||||||
chinese = ["meilisearch-types/chinese"]
|
chinese = ["meilisearch-types/chinese"]
|
||||||
hebrew = ["meilisearch-types/hebrew"]
|
hebrew = ["meilisearch-types/hebrew"]
|
||||||
japanese = ["meilisearch-types/japanese"]
|
japanese = ["meilisearch-types/japanese"]
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ pub struct SearchAggregator;
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl SearchAggregator {
|
impl SearchAggregator {
|
||||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||||
Self::default()
|
Self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||||
@@ -32,7 +32,7 @@ pub struct MultiSearchAggregator;
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl MultiSearchAggregator {
|
impl MultiSearchAggregator {
|
||||||
pub fn from_queries(_: &dyn Any, _: &dyn Any) -> Self {
|
pub fn from_queries(_: &dyn Any, _: &dyn Any) -> Self {
|
||||||
Self::default()
|
Self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn succeed(&mut self) {}
|
pub fn succeed(&mut self) {}
|
||||||
@@ -44,7 +44,7 @@ pub struct FacetSearchAggregator;
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl FacetSearchAggregator {
|
impl FacetSearchAggregator {
|
||||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||||
Self::default()
|
Self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
mod mock_analytics;
|
mod mock_analytics;
|
||||||
// if we are in release mode and the feature analytics was enabled
|
#[cfg(feature = "analytics")]
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
|
||||||
mod segment_analytics;
|
mod segment_analytics;
|
||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
@@ -17,26 +16,25 @@ use serde_json::Value;
|
|||||||
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||||
use crate::routes::tasks::TasksFilterQuery;
|
use crate::routes::tasks::TasksFilterQuery;
|
||||||
|
|
||||||
// if we are in debug mode OR the analytics feature is disabled
|
// if the analytics feature is disabled
|
||||||
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
||||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
#[cfg(not(feature = "analytics"))]
|
||||||
pub type SegmentAnalytics = mock_analytics::MockAnalytics;
|
pub type SegmentAnalytics = mock_analytics::MockAnalytics;
|
||||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
#[cfg(not(feature = "analytics"))]
|
||||||
pub type SearchAggregator = mock_analytics::SearchAggregator;
|
pub type SearchAggregator = mock_analytics::SearchAggregator;
|
||||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
#[cfg(not(feature = "analytics"))]
|
||||||
pub type MultiSearchAggregator = mock_analytics::MultiSearchAggregator;
|
pub type MultiSearchAggregator = mock_analytics::MultiSearchAggregator;
|
||||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
#[cfg(not(feature = "analytics"))]
|
||||||
pub type FacetSearchAggregator = mock_analytics::FacetSearchAggregator;
|
pub type FacetSearchAggregator = mock_analytics::FacetSearchAggregator;
|
||||||
|
|
||||||
// if we are in release mode and the feature analytics was enabled
|
// if the feature analytics is enabled we use the real analytics
|
||||||
// we use the real analytics
|
#[cfg(feature = "analytics")]
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
|
||||||
pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator;
|
pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator;
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
pub type FacetSearchAggregator = segment_analytics::FacetSearchAggregator;
|
pub type FacetSearchAggregator = segment_analytics::FacetSearchAggregator;
|
||||||
|
|
||||||
/// The Meilisearch config dir:
|
/// The Meilisearch config dir:
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -30,6 +30,10 @@ fn setup(opt: &Opt) -> anyhow::Result<()> {
|
|||||||
async fn main() -> anyhow::Result<()> {
|
async fn main() -> anyhow::Result<()> {
|
||||||
let (opt, config_read_from) = Opt::try_build()?;
|
let (opt, config_read_from) = Opt::try_build()?;
|
||||||
|
|
||||||
|
#[cfg(feature = "profile-with-puffin")]
|
||||||
|
let _server = puffin_http::Server::new(&format!("0.0.0.0:{}", puffin_http::DEFAULT_PORT))?;
|
||||||
|
puffin::set_scopes_on(cfg!(feature = "profile-with-puffin"));
|
||||||
|
|
||||||
anyhow::ensure!(
|
anyhow::ensure!(
|
||||||
!(cfg!(windows) && opt.experimental_reduce_indexing_memory_usage),
|
!(cfg!(windows) && opt.experimental_reduce_indexing_memory_usage),
|
||||||
"The `experimental-reduce-indexing-memory-usage` flag is not supported on Windows"
|
"The `experimental-reduce-indexing-memory-usage` flag is not supported on Windows"
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ const MEILI_DB_PATH: &str = "MEILI_DB_PATH";
|
|||||||
const MEILI_HTTP_ADDR: &str = "MEILI_HTTP_ADDR";
|
const MEILI_HTTP_ADDR: &str = "MEILI_HTTP_ADDR";
|
||||||
const MEILI_MASTER_KEY: &str = "MEILI_MASTER_KEY";
|
const MEILI_MASTER_KEY: &str = "MEILI_MASTER_KEY";
|
||||||
const MEILI_ENV: &str = "MEILI_ENV";
|
const MEILI_ENV: &str = "MEILI_ENV";
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
const MEILI_NO_ANALYTICS: &str = "MEILI_NO_ANALYTICS";
|
const MEILI_NO_ANALYTICS: &str = "MEILI_NO_ANALYTICS";
|
||||||
const MEILI_HTTP_PAYLOAD_SIZE_LIMIT: &str = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT";
|
const MEILI_HTTP_PAYLOAD_SIZE_LIMIT: &str = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT";
|
||||||
const MEILI_SSL_CERT_PATH: &str = "MEILI_SSL_CERT_PATH";
|
const MEILI_SSL_CERT_PATH: &str = "MEILI_SSL_CERT_PATH";
|
||||||
@@ -159,7 +159,7 @@ pub struct Opt {
|
|||||||
/// Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
/// Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
||||||
/// All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted
|
/// All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted
|
||||||
/// at any time.
|
/// at any time.
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
#[serde(default)] // we can't send true
|
#[serde(default)] // we can't send true
|
||||||
#[clap(long, env = MEILI_NO_ANALYTICS)]
|
#[clap(long, env = MEILI_NO_ANALYTICS)]
|
||||||
pub no_analytics: bool,
|
pub no_analytics: bool,
|
||||||
@@ -390,7 +390,7 @@ impl Opt {
|
|||||||
ignore_missing_dump: _,
|
ignore_missing_dump: _,
|
||||||
ignore_dump_if_db_exists: _,
|
ignore_dump_if_db_exists: _,
|
||||||
config_file_path: _,
|
config_file_path: _,
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
no_analytics,
|
no_analytics,
|
||||||
experimental_enable_metrics: enable_metrics_route,
|
experimental_enable_metrics: enable_metrics_route,
|
||||||
experimental_reduce_indexing_memory_usage: reduce_indexing_memory_usage,
|
experimental_reduce_indexing_memory_usage: reduce_indexing_memory_usage,
|
||||||
@@ -401,7 +401,7 @@ impl Opt {
|
|||||||
export_to_env_if_not_present(MEILI_MASTER_KEY, master_key);
|
export_to_env_if_not_present(MEILI_MASTER_KEY, master_key);
|
||||||
}
|
}
|
||||||
export_to_env_if_not_present(MEILI_ENV, env);
|
export_to_env_if_not_present(MEILI_ENV, env);
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
{
|
{
|
||||||
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -310,6 +310,81 @@ make_setting_route!(
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
make_setting_route!(
|
||||||
|
"/non-separator-tokens",
|
||||||
|
put,
|
||||||
|
std::collections::BTreeSet<String>,
|
||||||
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsNonSeparatorTokens,
|
||||||
|
>,
|
||||||
|
non_separator_tokens,
|
||||||
|
"nonSeparatorTokens",
|
||||||
|
analytics,
|
||||||
|
|non_separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"nonSeparatorTokens Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"non_separator_tokens": {
|
||||||
|
"total": non_separator_tokens.as_ref().map(|non_separator_tokens| non_separator_tokens.len()),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
make_setting_route!(
|
||||||
|
"/separator-tokens",
|
||||||
|
put,
|
||||||
|
std::collections::BTreeSet<String>,
|
||||||
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsSeparatorTokens,
|
||||||
|
>,
|
||||||
|
separator_tokens,
|
||||||
|
"separatorTokens",
|
||||||
|
analytics,
|
||||||
|
|separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"separatorTokens Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"separator_tokens": {
|
||||||
|
"total": separator_tokens.as_ref().map(|separator_tokens| separator_tokens.len()),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
make_setting_route!(
|
||||||
|
"/dictionary",
|
||||||
|
put,
|
||||||
|
std::collections::BTreeSet<String>,
|
||||||
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsDictionary,
|
||||||
|
>,
|
||||||
|
dictionary,
|
||||||
|
"dictionary",
|
||||||
|
analytics,
|
||||||
|
|dictionary: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"dictionary Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"dictionary": {
|
||||||
|
"total": dictionary.as_ref().map(|dictionary| dictionary.len()),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
make_setting_route!(
|
make_setting_route!(
|
||||||
"/synonyms",
|
"/synonyms",
|
||||||
put,
|
put,
|
||||||
@@ -466,6 +541,9 @@ generate_configure!(
|
|||||||
searchable_attributes,
|
searchable_attributes,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
stop_words,
|
stop_words,
|
||||||
|
separator_tokens,
|
||||||
|
non_separator_tokens,
|
||||||
|
dictionary,
|
||||||
synonyms,
|
synonyms,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ pub mod features;
|
|||||||
pub mod indexes;
|
pub mod indexes;
|
||||||
mod metrics;
|
mod metrics;
|
||||||
mod multi_search;
|
mod multi_search;
|
||||||
|
mod snapshot;
|
||||||
mod swap_indexes;
|
mod swap_indexes;
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
|
|
||||||
@@ -32,6 +33,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
.service(web::resource("/health").route(web::get().to(get_health)))
|
.service(web::resource("/health").route(web::get().to(get_health)))
|
||||||
.service(web::scope("/keys").configure(api_key::configure))
|
.service(web::scope("/keys").configure(api_key::configure))
|
||||||
.service(web::scope("/dumps").configure(dump::configure))
|
.service(web::scope("/dumps").configure(dump::configure))
|
||||||
|
.service(web::scope("/snapshots").configure(snapshot::configure))
|
||||||
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
||||||
.service(web::resource("/version").route(web::get().to(get_version)))
|
.service(web::resource("/version").route(web::get().to(get_version)))
|
||||||
.service(web::scope("/indexes").configure(indexes::configure))
|
.service(web::scope("/indexes").configure(indexes::configure))
|
||||||
|
|||||||
32
meilisearch/src/routes/snapshot.rs
Normal file
32
meilisearch/src/routes/snapshot.rs
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
use actix_web::web::Data;
|
||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
|
use meilisearch_types::error::ResponseError;
|
||||||
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use crate::analytics::Analytics;
|
||||||
|
use crate::extractors::authentication::policies::*;
|
||||||
|
use crate::extractors::authentication::GuardedData;
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
use crate::routes::SummarizedTaskView;
|
||||||
|
|
||||||
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_snapshot))));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_snapshot(
|
||||||
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SNAPSHOTS_CREATE }>, Data<IndexScheduler>>,
|
||||||
|
req: HttpRequest,
|
||||||
|
analytics: web::Data<dyn Analytics>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
analytics.publish("Snapshot Created".to_string(), json!({}), Some(&req));
|
||||||
|
|
||||||
|
let task = KindWithContent::SnapshotCreation;
|
||||||
|
let task: SummarizedTaskView =
|
||||||
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
|
debug!("returns: {:?}", task);
|
||||||
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
|
}
|
||||||
@@ -60,8 +60,7 @@ pub async fn swap_indexes(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let task = KindWithContent::IndexSwap { swaps };
|
let task = KindWithContent::IndexSwap { swaps };
|
||||||
|
let task: SummarizedTaskView =
|
||||||
let task = index_scheduler.register(task)?;
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
let task: SummarizedTaskView = task.into();
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -491,6 +491,20 @@ pub fn perform_search(
|
|||||||
tokenizer_builder.allow_list(&script_lang_map);
|
tokenizer_builder.allow_list(&script_lang_map);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let separators = index.allowed_separators(&rtxn)?;
|
||||||
|
let separators: Option<Vec<_>> =
|
||||||
|
separators.as_ref().map(|x| x.iter().map(String::as_str).collect());
|
||||||
|
if let Some(ref separators) = separators {
|
||||||
|
tokenizer_builder.separators(separators);
|
||||||
|
}
|
||||||
|
|
||||||
|
let dictionary = index.dictionary(&rtxn)?;
|
||||||
|
let dictionary: Option<Vec<_>> =
|
||||||
|
dictionary.as_ref().map(|x| x.iter().map(String::as_str).collect());
|
||||||
|
if let Some(ref dictionary) = dictionary {
|
||||||
|
tokenizer_builder.words_dict(dictionary);
|
||||||
|
}
|
||||||
|
|
||||||
let mut formatter_builder = MatcherBuilder::new(matching_words, tokenizer_builder.build());
|
let mut formatter_builder = MatcherBuilder::new(matching_words, tokenizer_builder.build());
|
||||||
formatter_builder.crop_marker(query.crop_marker);
|
formatter_builder.crop_marker(query.crop_marker);
|
||||||
formatter_builder.highlight_prefix(query.highlight_pre_tag);
|
formatter_builder.highlight_prefix(query.highlight_pre_tag);
|
||||||
@@ -666,6 +680,7 @@ fn compute_semantic_score(query: &[f32], vectors: Value) -> milli::Result<Option
|
|||||||
.map_err(InternalError::SerdeJson)?;
|
.map_err(InternalError::SerdeJson)?;
|
||||||
Ok(vectors
|
Ok(vectors
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
.flatten()
|
||||||
.map(|v| OrderedFloat(dot_product_similarity(query, &v)))
|
.map(|v| OrderedFloat(dot_product_similarity(query, &v)))
|
||||||
.max()
|
.max()
|
||||||
.map(OrderedFloat::into_inner))
|
.map(OrderedFloat::into_inner))
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
use std::{thread, time};
|
use std::{thread, time};
|
||||||
|
|
||||||
use serde_json::{json, Value};
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
use crate::common::Server;
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn add_valid_api_key() {
|
async fn add_valid_api_key() {
|
||||||
@@ -162,7 +161,7 @@ async fn add_valid_api_key_null_description() {
|
|||||||
server.use_api_key("MASTER_KEY");
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
let content = json!({
|
let content = json!({
|
||||||
"description": Value::Null,
|
"description": json!(null),
|
||||||
"indexes": ["products"],
|
"indexes": ["products"],
|
||||||
"actions": ["documents.add"],
|
"actions": ["documents.add"],
|
||||||
"expiresAt": "2050-11-13T00:00:00"
|
"expiresAt": "2050-11-13T00:00:00"
|
||||||
@@ -365,7 +364,7 @@ async fn error_add_api_key_invalid_index_uids() {
|
|||||||
server.use_api_key("MASTER_KEY");
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
let content = json!({
|
let content = json!({
|
||||||
"description": Value::Null,
|
"description": json!(null),
|
||||||
"indexes": ["invalid index # / \\name with spaces"],
|
"indexes": ["invalid index # / \\name with spaces"],
|
||||||
"actions": [
|
"actions": [
|
||||||
"documents.add"
|
"documents.add"
|
||||||
@@ -422,7 +421,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`",
|
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`",
|
||||||
"code": "invalid_api_key_actions",
|
"code": "invalid_api_key_actions",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||||
@@ -507,7 +506,7 @@ async fn error_add_api_key_invalid_parameters_uid() {
|
|||||||
async fn error_add_api_key_parameters_uid_already_exist() {
|
async fn error_add_api_key_parameters_uid_already_exist() {
|
||||||
let mut server = Server::new_auth().await;
|
let mut server = Server::new_auth().await;
|
||||||
server.use_api_key("MASTER_KEY");
|
server.use_api_key("MASTER_KEY");
|
||||||
let content = json!({
|
let content: Value = json!({
|
||||||
"uid": "4bc0887a-0e41-4f3b-935d-0c451dcee9c8",
|
"uid": "4bc0887a-0e41-4f3b-935d-0c451dcee9c8",
|
||||||
"indexes": ["products"],
|
"indexes": ["products"],
|
||||||
"actions": ["search"],
|
"actions": ["search"],
|
||||||
@@ -1146,7 +1145,7 @@ async fn patch_api_key_description() {
|
|||||||
meili_snap::snapshot!(code, @"200 OK");
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
|
||||||
// Remove the description
|
// Remove the description
|
||||||
let content = json!({ "description": serde_json::Value::Null });
|
let content = json!({ "description": null });
|
||||||
|
|
||||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
||||||
|
|||||||
@@ -3,10 +3,10 @@ use std::collections::{HashMap, HashSet};
|
|||||||
use ::time::format_description::well_known::Rfc3339;
|
use ::time::format_description::well_known::Rfc3339;
|
||||||
use maplit::{hashmap, hashset};
|
use maplit::{hashmap, hashset};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
||||||
Lazy::new(|| {
|
Lazy::new(|| {
|
||||||
@@ -54,6 +54,7 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
|
|||||||
("GET", "/indexes/products/stats") => hashset!{"stats.get", "stats.*", "*"},
|
("GET", "/indexes/products/stats") => hashset!{"stats.get", "stats.*", "*"},
|
||||||
("GET", "/stats") => hashset!{"stats.get", "stats.*", "*"},
|
("GET", "/stats") => hashset!{"stats.get", "stats.*", "*"},
|
||||||
("POST", "/dumps") => hashset!{"dumps.create", "dumps.*", "*"},
|
("POST", "/dumps") => hashset!{"dumps.create", "dumps.*", "*"},
|
||||||
|
("POST", "/snapshots") => hashset!{"snapshots.create", "snapshots.*", "*"},
|
||||||
("GET", "/version") => hashset!{"version", "*"},
|
("GET", "/version") => hashset!{"version", "*"},
|
||||||
("GET", "/metrics") => hashset!{"metrics.get", "metrics.*", "*"},
|
("GET", "/metrics") => hashset!{"metrics.get", "metrics.*", "*"},
|
||||||
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
|
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn create_api_key_bad_description() {
|
async fn create_api_key_bad_description() {
|
||||||
@@ -90,7 +90,7 @@ async fn create_api_key_bad_actions() {
|
|||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`",
|
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`",
|
||||||
"code": "invalid_api_key_actions",
|
"code": "invalid_api_key_actions",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ mod tenant_token;
|
|||||||
mod tenant_token_multi_search;
|
mod tenant_token_multi_search;
|
||||||
|
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
impl Server {
|
impl Server {
|
||||||
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
|
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
|
||||||
|
|||||||
@@ -3,11 +3,11 @@ use std::collections::HashMap;
|
|||||||
use ::time::format_description::well_known::Rfc3339;
|
use ::time::format_description::well_known::Rfc3339;
|
||||||
use maplit::hashmap;
|
use maplit::hashmap;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
fn generate_tenant_token(
|
fn generate_tenant_token(
|
||||||
parent_uid: impl AsRef<str>,
|
parent_uid: impl AsRef<str>,
|
||||||
@@ -233,31 +233,31 @@ async fn search_authorized_simple_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["*"]),
|
"searchRules" => json!(["*"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}}),
|
"searchRules" => json!({"sales": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sales"]),
|
"searchRules" => json!(["sales"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sa*"]),
|
"searchRules" => json!(["sa*"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -386,7 +386,7 @@ async fn error_search_token_forbidden_parent_key() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -398,7 +398,7 @@ async fn error_search_token_forbidden_parent_key() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -428,15 +428,15 @@ async fn error_search_forbidden_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": {}}),
|
"searchRules" => json!({"products": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": Value::Null}),
|
"searchRules" => json!({"products": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["products"]),
|
"searchRules" => json!(["products"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null)
|
||||||
},
|
},
|
||||||
// expired token
|
// expired token
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -444,7 +444,7 @@ async fn error_search_forbidden_token() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -456,7 +456,7 @@ async fn error_search_forbidden_token() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
|
|||||||
@@ -3,11 +3,11 @@ use std::collections::HashMap;
|
|||||||
use ::time::format_description::well_known::Rfc3339;
|
use ::time::format_description::well_known::Rfc3339;
|
||||||
use maplit::hashmap;
|
use maplit::hashmap;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
use super::authorization::ALL_ACTIONS;
|
use super::authorization::ALL_ACTIONS;
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
fn generate_tenant_token(
|
fn generate_tenant_token(
|
||||||
parent_uid: impl AsRef<str>,
|
parent_uid: impl AsRef<str>,
|
||||||
@@ -512,31 +512,31 @@ async fn single_search_authorized_simple_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["*"]),
|
"searchRules" => json!(["*"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}}),
|
"searchRules" => json!({"sales": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sales"]),
|
"searchRules" => json!(["sales"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sa*"]),
|
"searchRules" => json!(["sa*"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -564,31 +564,31 @@ async fn multi_search_authorized_simple_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["*"]),
|
"searchRules" => json!(["*"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}, "products": {}}),
|
"searchRules" => json!({"sales": {}, "products": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null, "products": Value::Null}),
|
"searchRules" => json!({"sales": null, "products": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sales", "products"]),
|
"searchRules" => json!(["sales", "products"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sa*", "pro*"]),
|
"searchRules" => json!(["sa*", "pro*"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -823,7 +823,7 @@ async fn error_single_search_token_forbidden_parent_key() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -835,7 +835,7 @@ async fn error_single_search_token_forbidden_parent_key() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -864,7 +864,7 @@ async fn error_multi_search_token_forbidden_parent_key() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -876,7 +876,7 @@ async fn error_multi_search_token_forbidden_parent_key() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null, "products": Value::Null}),
|
"searchRules" => json!({"sales": null, "products": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -919,15 +919,15 @@ async fn error_single_search_forbidden_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": {}}),
|
"searchRules" => json!({"products": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": Value::Null}),
|
"searchRules" => json!({"products": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["products"]),
|
"searchRules" => json!(["products"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
// expired token
|
// expired token
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -935,7 +935,7 @@ async fn error_single_search_forbidden_token() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -947,7 +947,7 @@ async fn error_single_search_forbidden_token() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -978,15 +978,15 @@ async fn error_multi_search_forbidden_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": {}}),
|
"searchRules" => json!({"products": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": Value::Null}),
|
"searchRules" => json!({"products": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["products"]),
|
"searchRules" => json!(["products"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}}),
|
"searchRules" => json!({"sales": {}}),
|
||||||
@@ -998,15 +998,15 @@ async fn error_multi_search_forbidden_token() {
|
|||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}}),
|
"searchRules" => json!({"sales": {}}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": null}),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sales"]),
|
"searchRules" => json!(["sales"]),
|
||||||
"exp" => Value::Null
|
"exp" => json!(null),
|
||||||
},
|
},
|
||||||
// expired token
|
// expired token
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -1014,7 +1014,7 @@ async fn error_multi_search_forbidden_token() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": null}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@@ -1026,7 +1026,7 @@ async fn error_multi_search_forbidden_token() {
|
|||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null, "products": {}}),
|
"searchRules" => json!({"sales": null, "products": {}}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
|
|||||||
@@ -3,12 +3,13 @@ use std::panic::{catch_unwind, resume_unwind, UnwindSafe};
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use serde_json::{json, Value};
|
|
||||||
use tokio::time::sleep;
|
use tokio::time::sleep;
|
||||||
use urlencoding::encode as urlencode;
|
use urlencoding::encode as urlencode;
|
||||||
|
|
||||||
use super::encoder::Encoder;
|
use super::encoder::Encoder;
|
||||||
use super::service::Service;
|
use super::service::Service;
|
||||||
|
use super::Value;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
pub struct Index<'a> {
|
pub struct Index<'a> {
|
||||||
pub uid: String,
|
pub uid: String,
|
||||||
@@ -242,7 +243,9 @@ impl Index<'_> {
|
|||||||
|
|
||||||
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
|
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/documents/delete-batch", urlencode(self.uid.as_ref()));
|
let url = format!("/indexes/{}/documents/delete-batch", urlencode(self.uid.as_ref()));
|
||||||
self.service.post_encoded(url, serde_json::to_value(&ids).unwrap(), self.encoder).await
|
self.service
|
||||||
|
.post_encoded(url, serde_json::to_value(&ids).unwrap().into(), self.encoder)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_batch_raw(&self, body: Value) -> (Value, StatusCode) {
|
pub async fn delete_batch_raw(&self, body: Value) -> (Value, StatusCode) {
|
||||||
|
|||||||
@@ -3,9 +3,83 @@ pub mod index;
|
|||||||
pub mod server;
|
pub mod server;
|
||||||
pub mod service;
|
pub mod service;
|
||||||
|
|
||||||
|
use std::fmt::{self, Display};
|
||||||
|
|
||||||
pub use index::{GetAllDocumentsOptions, GetDocumentOptions};
|
pub use index::{GetAllDocumentsOptions, GetDocumentOptions};
|
||||||
|
use meili_snap::json_string;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
pub use server::{default_settings, Server};
|
pub use server::{default_settings, Server};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct Value(pub serde_json::Value);
|
||||||
|
|
||||||
|
impl Value {
|
||||||
|
pub fn uid(&self) -> u64 {
|
||||||
|
if let Some(uid) = self["uid"].as_u64() {
|
||||||
|
uid
|
||||||
|
} else if let Some(uid) = self["taskUid"].as_u64() {
|
||||||
|
uid
|
||||||
|
} else {
|
||||||
|
panic!("Didn't find any task id in: {self}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<serde_json::Value> for Value {
|
||||||
|
fn from(value: serde_json::Value) -> Self {
|
||||||
|
Value(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::ops::Deref for Value {
|
||||||
|
type Target = serde_json::Value;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<serde_json::Value> for Value {
|
||||||
|
fn eq(&self, other: &serde_json::Value) -> bool {
|
||||||
|
&self.0 == other
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<Value> for serde_json::Value {
|
||||||
|
fn eq(&self, other: &Value) -> bool {
|
||||||
|
self == &other.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<&str> for Value {
|
||||||
|
fn eq(&self, other: &&str) -> bool {
|
||||||
|
self.0.eq(other)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for Value {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
json_string!(self, { ".enqueuedAt" => "[date]", ".processedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Vec<Value>> for Value {
|
||||||
|
fn from(value: Vec<Value>) -> Self {
|
||||||
|
Self(value.into_iter().map(|value| value.0).collect::<serde_json::Value>())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! json {
|
||||||
|
($($json:tt)+) => {
|
||||||
|
$crate::common::Value(serde_json::json!($($json)+))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/// Performs a search test on both post and get routes
|
/// Performs a search test on both post and get routes
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! test_post_get_search {
|
macro_rules! test_post_get_search {
|
||||||
|
|||||||
@@ -11,13 +11,14 @@ use clap::Parser;
|
|||||||
use meilisearch::option::{IndexerOpts, MaxMemory, Opt};
|
use meilisearch::option::{IndexerOpts, MaxMemory, Opt};
|
||||||
use meilisearch::{analytics, create_app, setup_meilisearch};
|
use meilisearch::{analytics, create_app, setup_meilisearch};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use tokio::time::sleep;
|
use tokio::time::sleep;
|
||||||
|
|
||||||
use super::index::Index;
|
use super::index::Index;
|
||||||
use super::service::Service;
|
use super::service::Service;
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
|
use crate::common::Value;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
pub struct Server {
|
pub struct Server {
|
||||||
pub service: Service,
|
pub service: Service,
|
||||||
@@ -156,6 +157,10 @@ impl Server {
|
|||||||
self.service.post("/dumps", json!(null)).await
|
self.service.post("/dumps", json!(null)).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn create_snapshot(&self) -> (Value, StatusCode) {
|
||||||
|
self.service.post("/snapshots", json!(null)).await
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn index_swap(&self, value: Value) -> (Value, StatusCode) {
|
pub async fn index_swap(&self, value: Value) -> (Value, StatusCode) {
|
||||||
self.service.post("/swap-indexes", value).await
|
self.service.post("/swap-indexes", value).await
|
||||||
}
|
}
|
||||||
@@ -204,7 +209,7 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
|||||||
db_path: dir.as_ref().join("db"),
|
db_path: dir.as_ref().join("db"),
|
||||||
dump_dir: dir.as_ref().join("dumps"),
|
dump_dir: dir.as_ref().join("dumps"),
|
||||||
env: "development".to_owned(),
|
env: "development".to_owned(),
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(feature = "analytics")]
|
||||||
no_analytics: true,
|
no_analytics: true,
|
||||||
max_index_size: Byte::from_unit(100.0, ByteUnit::MiB).unwrap(),
|
max_index_size: Byte::from_unit(100.0, ByteUnit::MiB).unwrap(),
|
||||||
max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(),
|
max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(),
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ use actix_web::test::TestRequest;
|
|||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use meilisearch::{analytics, create_app, Opt};
|
use meilisearch::{analytics, create_app, Opt};
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
use serde_json::Value;
|
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
|
use crate::common::Value;
|
||||||
|
|
||||||
pub struct Service {
|
pub struct Service {
|
||||||
pub index_scheduler: Arc<IndexScheduler>,
|
pub index_scheduler: Arc<IndexScheduler>,
|
||||||
|
|||||||
@@ -3,9 +3,8 @@
|
|||||||
mod common;
|
mod common;
|
||||||
|
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
|
||||||
enum HttpVerb {
|
enum HttpVerb {
|
||||||
Put,
|
Put,
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use serde_json::{json, Value};
|
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn delete_one_document_unexisting_index() {
|
async fn delete_one_document_unexisting_index() {
|
||||||
@@ -154,6 +154,19 @@ async fn delete_document_by_filter() {
|
|||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let (stats, _) = index.stats().await;
|
||||||
|
snapshot!(json_string!(stats), @r###"
|
||||||
|
{
|
||||||
|
"numberOfDocuments": 4,
|
||||||
|
"isIndexing": false,
|
||||||
|
"fieldDistribution": {
|
||||||
|
"color": 3,
|
||||||
|
"id": 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
let (response, code) =
|
let (response, code) =
|
||||||
index.delete_document_by_filter(json!({ "filter": "color = blue"})).await;
|
index.delete_document_by_filter(json!({ "filter": "color = blue"})).await;
|
||||||
snapshot!(code, @"202 Accepted");
|
snapshot!(code, @"202 Accepted");
|
||||||
@@ -188,6 +201,18 @@ async fn delete_document_by_filter() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
let (stats, _) = index.stats().await;
|
||||||
|
snapshot!(json_string!(stats), @r###"
|
||||||
|
{
|
||||||
|
"numberOfDocuments": 2,
|
||||||
|
"isIndexing": false,
|
||||||
|
"fieldDistribution": {
|
||||||
|
"color": 1,
|
||||||
|
"id": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(json_string!(documents), @r###"
|
snapshot!(json_string!(documents), @r###"
|
||||||
@@ -241,6 +266,18 @@ async fn delete_document_by_filter() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
let (stats, _) = index.stats().await;
|
||||||
|
snapshot!(json_string!(stats), @r###"
|
||||||
|
{
|
||||||
|
"numberOfDocuments": 1,
|
||||||
|
"isIndexing": false,
|
||||||
|
"fieldDistribution": {
|
||||||
|
"color": 1,
|
||||||
|
"id": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(json_string!(documents), @r###"
|
snapshot!(json_string!(documents), @r###"
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
|
||||||
use urlencoding::encode;
|
use urlencoding::encode;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn get_all_documents_bad_offset() {
|
async fn get_all_documents_bad_offset() {
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use http::header::ACCEPT_ENCODING;
|
use http::header::ACCEPT_ENCODING;
|
||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::{json, Value};
|
|
||||||
use urlencoding::encode as urlencode;
|
use urlencoding::encode as urlencode;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
|
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
|
||||||
// transplant
|
// transplant
|
||||||
@@ -40,7 +40,7 @@ async fn get_document() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
let documents = serde_json::json!([
|
let documents = json!([
|
||||||
{
|
{
|
||||||
"id": 0,
|
"id": 0,
|
||||||
"nested": { "content": "foobar" },
|
"nested": { "content": "foobar" },
|
||||||
@@ -53,7 +53,7 @@ async fn get_document() {
|
|||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
serde_json::json!({
|
json!({
|
||||||
"id": 0,
|
"id": 0,
|
||||||
"nested": { "content": "foobar" },
|
"nested": { "content": "foobar" },
|
||||||
})
|
})
|
||||||
@@ -64,7 +64,7 @@ async fn get_document() {
|
|||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
serde_json::json!({
|
json!({
|
||||||
"id": 0,
|
"id": 0,
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
@@ -75,7 +75,7 @@ async fn get_document() {
|
|||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
serde_json::json!({
|
json!({
|
||||||
"nested": { "content": "foobar" },
|
"nested": { "content": "foobar" },
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
@@ -122,7 +122,7 @@ async fn get_all_documents_no_options() {
|
|||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
let arr = response["results"].as_array().unwrap();
|
let arr = response["results"].as_array().unwrap();
|
||||||
assert_eq!(arr.len(), 20);
|
assert_eq!(arr.len(), 20);
|
||||||
let first = serde_json::json!({
|
let first = json!({
|
||||||
"id":0,
|
"id":0,
|
||||||
"isActive":false,
|
"isActive":false,
|
||||||
"balance":"$2,668.55",
|
"balance":"$2,668.55",
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
use serde_json::json;
|
use meili_snap::snapshot;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_document_update_create_index_bad_uid() {
|
async fn error_document_update_create_index_bad_uid() {
|
||||||
@@ -84,7 +85,13 @@ async fn update_document() {
|
|||||||
|
|
||||||
let (response, code) = index.get_document(1, None).await;
|
let (response, code) = index.get_document(1, None).await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"doc_id": 1,
|
||||||
|
"content": "foo",
|
||||||
|
"other": "bar"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -122,7 +129,13 @@ async fn update_document_gzip_encoded() {
|
|||||||
|
|
||||||
let (response, code) = index.get_document(1, None).await;
|
let (response, code) = index.get_document(1, None).await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"doc_id": 1,
|
||||||
|
"content": "foo",
|
||||||
|
"other": "bar"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,5 @@
|
|||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
/// Feature name to test against.
|
/// Feature name to test against.
|
||||||
/// This will have to be changed by a different one when that feature is stabilized.
|
/// This will have to be changed by a different one when that feature is stabilized.
|
||||||
|
|||||||
@@ -2,10 +2,10 @@ use actix_web::http::header::ContentType;
|
|||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use http::header::ACCEPT_ENCODING;
|
use http::header::ACCEPT_ENCODING;
|
||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn create_index_no_primary_key() {
|
async fn create_index_no_primary_key() {
|
||||||
@@ -21,7 +21,7 @@ async fn create_index_no_primary_key() {
|
|||||||
|
|
||||||
assert_eq!(response["status"], "succeeded");
|
assert_eq!(response["status"], "succeeded");
|
||||||
assert_eq!(response["type"], "indexCreation");
|
assert_eq!(response["type"], "indexCreation");
|
||||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
assert_eq!(response["details"]["primaryKey"], json!(null));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -38,7 +38,7 @@ async fn create_index_with_gzip_encoded_request() {
|
|||||||
|
|
||||||
assert_eq!(response["status"], "succeeded");
|
assert_eq!(response["status"], "succeeded");
|
||||||
assert_eq!(response["type"], "indexCreation");
|
assert_eq!(response["type"], "indexCreation");
|
||||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
assert_eq!(response["details"]["primaryKey"], json!(null));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -86,7 +86,7 @@ async fn create_index_with_zlib_encoded_request() {
|
|||||||
|
|
||||||
assert_eq!(response["status"], "succeeded");
|
assert_eq!(response["status"], "succeeded");
|
||||||
assert_eq!(response["type"], "indexCreation");
|
assert_eq!(response["type"], "indexCreation");
|
||||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
assert_eq!(response["details"]["primaryKey"], json!(null));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -103,7 +103,7 @@ async fn create_index_with_brotli_encoded_request() {
|
|||||||
|
|
||||||
assert_eq!(response["status"], "succeeded");
|
assert_eq!(response["status"], "succeeded");
|
||||||
assert_eq!(response["type"], "indexCreation");
|
assert_eq!(response["type"], "indexCreation");
|
||||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
assert_eq!(response["details"]["primaryKey"], json!(null));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -136,7 +136,7 @@ async fn create_index_with_invalid_primary_key() {
|
|||||||
|
|
||||||
let (response, code) = index.get().await;
|
let (response, code) = index.get().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["primaryKey"], Value::Null);
|
assert_eq!(response["primaryKey"], json!(null));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn create_and_delete_index() {
|
async fn create_and_delete_index() {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn get_indexes_bad_offset() {
|
async fn get_indexes_bad_offset() {
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn stats() {
|
async fn stats() {
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
use serde_json::json;
|
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn update_primary_key() {
|
async fn update_primary_key() {
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use super::DOCUMENTS;
|
use super::DOCUMENTS;
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_unexisting_index() {
|
async fn search_unexisting_index() {
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use meili_snap::snapshot;
|
use meili_snap::snapshot;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||||
json!([
|
json!([
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use insta::{allow_duplicates, assert_json_snapshot};
|
use insta::{allow_duplicates, assert_json_snapshot};
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn formatted_contain_wildcard() {
|
async fn formatted_contain_wildcard() {
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
|
use meili_snap::{json_string, snapshot};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||||
json!([
|
json!([
|
||||||
@@ -60,3 +61,59 @@ async fn geo_sort_with_geo_strings() {
|
|||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn geo_bounding_box_with_string_and_number() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.update_settings_filterable_attributes(json!(["_geo"])).await;
|
||||||
|
index.update_settings_sortable_attributes(json!(["_geo"])).await;
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(2).await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({
|
||||||
|
"filter": "_geoBoundingBox([89, 179], [-89, -179])",
|
||||||
|
}),
|
||||||
|
|response, code| {
|
||||||
|
assert_eq!(code, 200, "{}", response);
|
||||||
|
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||||
|
{
|
||||||
|
"hits": [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"name": "Taco Truck",
|
||||||
|
"address": "444 Salsa Street, Burritoville",
|
||||||
|
"type": "Mexican",
|
||||||
|
"rating": 9,
|
||||||
|
"_geo": {
|
||||||
|
"lat": 34.0522,
|
||||||
|
"lng": -118.2437
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"name": "La Bella Italia",
|
||||||
|
"address": "456 Elm Street, Townsville",
|
||||||
|
"type": "Italian",
|
||||||
|
"rating": 9,
|
||||||
|
"_geo": {
|
||||||
|
"lat": "45.4777599",
|
||||||
|
"lng": "9.1967508"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"query": "",
|
||||||
|
"processingTimeMs": "[time]",
|
||||||
|
"limit": 20,
|
||||||
|
"offset": 0,
|
||||||
|
"estimatedTotalHits": 2
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|||||||
@@ -10,9 +10,9 @@ mod pagination;
|
|||||||
mod restrict_searchable;
|
mod restrict_searchable;
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||||
json!([
|
json!([
|
||||||
@@ -1104,3 +1104,59 @@ async fn camelcased_words() {
|
|||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn simple_search_with_strange_synonyms() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index.update_settings(json!({ "synonyms": {"&": ["to"], "to": ["&"]} })).await;
|
||||||
|
let r = index.wait_task(0).await;
|
||||||
|
meili_snap::snapshot!(r["status"], @r###""succeeded""###);
|
||||||
|
|
||||||
|
let documents = DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "How to train"}), |response, code| {
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"title": "How to Train Your Dragon: The Hidden World",
|
||||||
|
"id": "166428"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "How & train"}), |response, code| {
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"title": "How to Train Your Dragon: The Hidden World",
|
||||||
|
"id": "166428"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "to"}), |response, code| {
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"title": "How to Train Your Dragon: The Hidden World",
|
||||||
|
"id": "166428"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use super::{DOCUMENTS, NESTED_DOCUMENTS};
|
use super::{DOCUMENTS, NESTED_DOCUMENTS};
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_empty_list() {
|
async fn search_empty_list() {
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
use crate::search::DOCUMENTS;
|
use crate::search::DOCUMENTS;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::index::Index;
|
use crate::common::index::Index;
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn set_and_reset_distinct_attribute() {
|
async fn set_and_reset_distinct_attribute() {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn settings_bad_displayed_attributes() {
|
async fn settings_bad_displayed_attributes() {
|
||||||
|
|||||||
@@ -1,21 +1,24 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
static DEFAULT_SETTINGS_VALUES: Lazy<HashMap<&'static str, Value>> = Lazy::new(|| {
|
static DEFAULT_SETTINGS_VALUES: Lazy<HashMap<&'static str, Value>> = Lazy::new(|| {
|
||||||
let mut map = HashMap::new();
|
let mut map = HashMap::new();
|
||||||
map.insert("displayed_attributes", json!(["*"]));
|
map.insert("displayed_attributes", json!(["*"]));
|
||||||
map.insert("searchable_attributes", json!(["*"]));
|
map.insert("searchable_attributes", json!(["*"]));
|
||||||
map.insert("filterable_attributes", json!([]));
|
map.insert("filterable_attributes", json!([]));
|
||||||
map.insert("distinct_attribute", json!(Value::Null));
|
map.insert("distinct_attribute", json!(null));
|
||||||
map.insert(
|
map.insert(
|
||||||
"ranking_rules",
|
"ranking_rules",
|
||||||
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"]),
|
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"]),
|
||||||
);
|
);
|
||||||
map.insert("stop_words", json!([]));
|
map.insert("stop_words", json!([]));
|
||||||
|
map.insert("non_separator_tokens", json!([]));
|
||||||
|
map.insert("separator_tokens", json!([]));
|
||||||
|
map.insert("dictionary", json!([]));
|
||||||
map.insert("synonyms", json!({}));
|
map.insert("synonyms", json!({}));
|
||||||
map.insert(
|
map.insert(
|
||||||
"faceting",
|
"faceting",
|
||||||
@@ -51,7 +54,7 @@ async fn get_settings() {
|
|||||||
let (response, code) = index.settings().await;
|
let (response, code) = index.settings().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
let settings = response.as_object().unwrap();
|
let settings = response.as_object().unwrap();
|
||||||
assert_eq!(settings.keys().len(), 11);
|
assert_eq!(settings.keys().len(), 14);
|
||||||
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
||||||
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
||||||
assert_eq!(settings["filterableAttributes"], json!([]));
|
assert_eq!(settings["filterableAttributes"], json!([]));
|
||||||
@@ -62,6 +65,9 @@ async fn get_settings() {
|
|||||||
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"])
|
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"])
|
||||||
);
|
);
|
||||||
assert_eq!(settings["stopWords"], json!([]));
|
assert_eq!(settings["stopWords"], json!([]));
|
||||||
|
assert_eq!(settings["nonSeparatorTokens"], json!([]));
|
||||||
|
assert_eq!(settings["separatorTokens"], json!([]));
|
||||||
|
assert_eq!(settings["dictionary"], json!([]));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
settings["faceting"],
|
settings["faceting"],
|
||||||
json!({
|
json!({
|
||||||
@@ -223,7 +229,7 @@ macro_rules! test_setting_routes {
|
|||||||
.chars()
|
.chars()
|
||||||
.map(|c| if c == '_' { '-' } else { c })
|
.map(|c| if c == '_' { '-' } else { c })
|
||||||
.collect::<String>());
|
.collect::<String>());
|
||||||
let (response, code) = server.service.$write_method(url, serde_json::Value::Null).await;
|
let (response, code) = server.service.$write_method(url, serde_json::Value::Null.into()).await;
|
||||||
assert_eq!(code, 202, "{}", response);
|
assert_eq!(code, 202, "{}", response);
|
||||||
server.index("").wait_task(0).await;
|
server.index("").wait_task(0).await;
|
||||||
let (response, code) = server.index("test").get().await;
|
let (response, code) = server.index("test").get().await;
|
||||||
@@ -272,6 +278,9 @@ test_setting_routes!(
|
|||||||
searchable_attributes put,
|
searchable_attributes put,
|
||||||
distinct_attribute put,
|
distinct_attribute put,
|
||||||
stop_words put,
|
stop_words put,
|
||||||
|
separator_tokens put,
|
||||||
|
non_separator_tokens put,
|
||||||
|
dictionary put,
|
||||||
ranking_rules put,
|
ranking_rules put,
|
||||||
synonyms put,
|
synonyms put,
|
||||||
pagination patch,
|
pagination patch,
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
mod distinct;
|
mod distinct;
|
||||||
mod errors;
|
mod errors;
|
||||||
mod get_settings;
|
mod get_settings;
|
||||||
|
mod tokenizer_customization;
|
||||||
|
|||||||
467
meilisearch/tests/settings/tokenizer_customization.rs
Normal file
467
meilisearch/tests/settings/tokenizer_customization.rs
Normal file
@@ -0,0 +1,467 @@
|
|||||||
|
use meili_snap::{json_string, snapshot};
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn set_and_reset() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (_response, _code) = index
|
||||||
|
.update_settings(json!({
|
||||||
|
"nonSeparatorTokens": ["#", "&"],
|
||||||
|
"separatorTokens": ["&sep", "<br/>"],
|
||||||
|
"dictionary": ["J.R.R.", "J. R. R."],
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
let (response, _) = index.settings().await;
|
||||||
|
snapshot!(json_string!(response["nonSeparatorTokens"]), @r###"
|
||||||
|
[
|
||||||
|
"#",
|
||||||
|
"&"
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
snapshot!(json_string!(response["separatorTokens"]), @r###"
|
||||||
|
[
|
||||||
|
"&sep",
|
||||||
|
"<br/>"
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
snapshot!(json_string!(response["dictionary"]), @r###"
|
||||||
|
[
|
||||||
|
"J. R. R.",
|
||||||
|
"J.R.R."
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
|
||||||
|
index
|
||||||
|
.update_settings(json!({
|
||||||
|
"nonSeparatorTokens": null,
|
||||||
|
"separatorTokens": null,
|
||||||
|
"dictionary": null,
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let (response, _) = index.settings().await;
|
||||||
|
snapshot!(json_string!(response["nonSeparatorTokens"]), @"[]");
|
||||||
|
snapshot!(json_string!(response["separatorTokens"]), @"[]");
|
||||||
|
snapshot!(json_string!(response["dictionary"]), @"[]");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn set_and_search() {
|
||||||
|
let documents = json!([
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "Mac & cheese",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "G#D#G#D#G#C#D#G#C#",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "Mac&sep&&sepcheese",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
let (_response, _code) = index
|
||||||
|
.update_settings(json!({
|
||||||
|
"nonSeparatorTokens": ["#", "&"],
|
||||||
|
"separatorTokens": ["<br/>", "&sep"],
|
||||||
|
"dictionary": ["#", "A#", "B#", "C#", "D#", "E#", "F#", "G#"],
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "&", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "Mac & cheese",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "1",
|
||||||
|
"content": "Mac <em>&</em> cheese"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "Mac&sep&&sepcheese",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "3",
|
||||||
|
"content": "Mac&sep<em>&</em>&sepcheese"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({"q": "Mac & cheese", "attributesToHighlight": ["content"]}),
|
||||||
|
|response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "Mac & cheese",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "1",
|
||||||
|
"content": "<em>Mac</em> <em>&</em> <em>cheese</em>"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "Mac&sep&&sepcheese",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "3",
|
||||||
|
"content": "<em>Mac</em>&sep<em>&</em>&sep<em>cheese</em>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({"q": "Mac&sep&&sepcheese", "attributesToHighlight": ["content"]}),
|
||||||
|
|response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "Mac & cheese",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "1",
|
||||||
|
"content": "<em>Mac</em> <em>&</em> <em>cheese</em>"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "Mac&sep&&sepcheese",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "3",
|
||||||
|
"content": "<em>Mac</em>&sep<em>&</em>&sep<em>cheese</em>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "C#D#G", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "G#D#G#D#G#C#D#G#C#",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "2",
|
||||||
|
"content": "<em>G</em>#<em>D#</em><em>G</em>#<em>D#</em><em>G</em>#<em>C#</em><em>D#</em><em>G</em>#<em>C#</em>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "#", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @"[]");
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn advanced_synergies() {
|
||||||
|
let documents = json!([
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "J.R.R. Tolkien",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "J. R. R. Tolkien",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "jrr Tolkien",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"content": "J.K. Rowlings",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 5,
|
||||||
|
"content": "J. K. Rowlings",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 6,
|
||||||
|
"content": "jk Rowlings",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
let (_response, _code) = index
|
||||||
|
.update_settings(json!({
|
||||||
|
"dictionary": ["J.R.R.", "J. R. R."],
|
||||||
|
"synonyms": {
|
||||||
|
"J.R.R.": ["jrr", "J. R. R."],
|
||||||
|
"J. R. R.": ["jrr", "J.R.R."],
|
||||||
|
"jrr": ["J.R.R.", "J. R. R."],
|
||||||
|
"J.K.": ["jk", "J. K."],
|
||||||
|
"J. K.": ["jk", "J.K."],
|
||||||
|
"jk": ["J.K.", "J. K."],
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "J.R.R.", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "J.R.R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "1",
|
||||||
|
"content": "<em>J.R.R.</em> Tolkien"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "J. R. R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "2",
|
||||||
|
"content": "<em>J. R. R.</em> Tolkien"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "jrr Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "3",
|
||||||
|
"content": "<em>jrr</em> Tolkien"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "jrr", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "jrr Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "3",
|
||||||
|
"content": "<em>jrr</em> Tolkien"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "J.R.R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "1",
|
||||||
|
"content": "<em>J.R.R.</em> Tolkien"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "J. R. R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "2",
|
||||||
|
"content": "<em>J. R. R.</em> Tolkien"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "J. R. R.", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "J. R. R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "2",
|
||||||
|
"content": "<em>J. R. R.</em> Tolkien"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"content": "J.R.R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "1",
|
||||||
|
"content": "<em>J.R.R.</em> Tolkien"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"content": "jrr Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "3",
|
||||||
|
"content": "<em>jrr</em> Tolkien"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// Only update dictionary, the synonyms should be recomputed.
|
||||||
|
let (_response, _code) = index
|
||||||
|
.update_settings(json!({
|
||||||
|
"dictionary": ["J.R.R.", "J. R. R.", "J.K.", "J. K."],
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
index.wait_task(2).await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "jk", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 6,
|
||||||
|
"content": "jk Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "6",
|
||||||
|
"content": "<em>jk</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"content": "J.K. Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "4",
|
||||||
|
"content": "<em>J.K.</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 5,
|
||||||
|
"content": "J. K. Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "5",
|
||||||
|
"content": "<em>J. K.</em> Rowlings"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "J.K.", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"content": "J.K. Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "4",
|
||||||
|
"content": "<em>J.K.</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 5,
|
||||||
|
"content": "J. K. Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "5",
|
||||||
|
"content": "<em>J. K.</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 6,
|
||||||
|
"content": "jk Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "6",
|
||||||
|
"content": "<em>jk</em> Rowlings"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"q": "J. K.", "attributesToHighlight": ["content"]}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 5,
|
||||||
|
"content": "J. K. Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "5",
|
||||||
|
"content": "<em>J. K.</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"content": "J.K. Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "4",
|
||||||
|
"content": "<em>J.K.</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 6,
|
||||||
|
"content": "jk Rowlings",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "6",
|
||||||
|
"content": "<em>jk</em> Rowlings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"content": "J. R. R. Tolkien",
|
||||||
|
"_formatted": {
|
||||||
|
"id": "2",
|
||||||
|
"content": "<em>J. R.</em> R. Tolkien"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
@@ -1,11 +1,13 @@
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use actix_rt::time::sleep;
|
use actix_rt::time::sleep;
|
||||||
|
use meili_snap::{json_string, snapshot};
|
||||||
use meilisearch::option::ScheduleSnapshot;
|
use meilisearch::option::ScheduleSnapshot;
|
||||||
use meilisearch::Opt;
|
use meilisearch::Opt;
|
||||||
|
|
||||||
use crate::common::server::default_settings;
|
use crate::common::server::default_settings;
|
||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
macro_rules! verify_snapshot {
|
macro_rules! verify_snapshot {
|
||||||
(
|
(
|
||||||
@@ -44,7 +46,7 @@ async fn perform_snapshot() {
|
|||||||
|
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index
|
index
|
||||||
.update_settings(serde_json::json! ({
|
.update_settings(json! ({
|
||||||
"searchableAttributes": [],
|
"searchableAttributes": [],
|
||||||
}))
|
}))
|
||||||
.await;
|
.await;
|
||||||
@@ -90,3 +92,95 @@ async fn perform_snapshot() {
|
|||||||
server.index("test1").settings(),
|
server.index("test1").settings(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn perform_on_demand_snapshot() {
|
||||||
|
let temp = tempfile::tempdir().unwrap();
|
||||||
|
let snapshot_dir = tempfile::tempdir().unwrap();
|
||||||
|
|
||||||
|
let options =
|
||||||
|
Opt { snapshot_dir: snapshot_dir.path().to_owned(), ..default_settings(temp.path()) };
|
||||||
|
|
||||||
|
let server = Server::new_with_options(options).await.unwrap();
|
||||||
|
|
||||||
|
let index = server.index("catto");
|
||||||
|
index
|
||||||
|
.update_settings(json! ({
|
||||||
|
"searchableAttributes": [],
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
index.load_test_set().await;
|
||||||
|
|
||||||
|
server.index("doggo").create(Some("bone")).await;
|
||||||
|
index.wait_task(2).await;
|
||||||
|
|
||||||
|
server.index("doggo").create(Some("bone")).await;
|
||||||
|
index.wait_task(2).await;
|
||||||
|
|
||||||
|
let (task, code) = server.create_snapshot().await;
|
||||||
|
snapshot!(code, @"202 Accepted");
|
||||||
|
snapshot!(json_string!(task, { ".enqueuedAt" => "[date]" }), @r###"
|
||||||
|
{
|
||||||
|
"taskUid": 4,
|
||||||
|
"indexUid": null,
|
||||||
|
"status": "enqueued",
|
||||||
|
"type": "snapshotCreation",
|
||||||
|
"enqueuedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
let task = index.wait_task(task.uid()).await;
|
||||||
|
snapshot!(json_string!(task, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||||
|
{
|
||||||
|
"uid": 4,
|
||||||
|
"indexUid": null,
|
||||||
|
"status": "succeeded",
|
||||||
|
"type": "snapshotCreation",
|
||||||
|
"canceledBy": null,
|
||||||
|
"error": null,
|
||||||
|
"duration": "[duration]",
|
||||||
|
"enqueuedAt": "[date]",
|
||||||
|
"startedAt": "[date]",
|
||||||
|
"finishedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let temp = tempfile::tempdir().unwrap();
|
||||||
|
|
||||||
|
let snapshots: Vec<String> = std::fs::read_dir(&snapshot_dir)
|
||||||
|
.unwrap()
|
||||||
|
.map(|entry| entry.unwrap().path().file_name().unwrap().to_str().unwrap().to_string())
|
||||||
|
.collect();
|
||||||
|
meili_snap::snapshot!(format!("{snapshots:?}"), @r###"["db.snapshot"]"###);
|
||||||
|
|
||||||
|
let snapshot_path = snapshot_dir.path().to_owned().join("db.snapshot");
|
||||||
|
#[cfg_attr(windows, allow(unused))]
|
||||||
|
let snapshot_meta = std::fs::metadata(&snapshot_path).unwrap();
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
let mode = snapshot_meta.permissions().mode();
|
||||||
|
// rwxrwxrwx
|
||||||
|
meili_snap::snapshot!(format!("{:b}", mode), @"1000000100100100");
|
||||||
|
}
|
||||||
|
|
||||||
|
let options = Opt { import_snapshot: Some(snapshot_path), ..default_settings(temp.path()) };
|
||||||
|
|
||||||
|
let snapshot_server = Server::new_with_options(options).await.unwrap();
|
||||||
|
|
||||||
|
verify_snapshot!(server, snapshot_server, |server| =>
|
||||||
|
server.list_indexes(None, None),
|
||||||
|
// for some reason the db sizes differ. this may be due to the compaction options we have
|
||||||
|
// set when performing the snapshot
|
||||||
|
//server.stats(),
|
||||||
|
|
||||||
|
// The original instance contains the snapshotCreation task, while the snapshotted-instance does not. For this reason we need to compare the task queue **after** the task 4
|
||||||
|
server.tasks_filter("?from=2"),
|
||||||
|
|
||||||
|
server.index("catto").get_all_documents(GetAllDocumentsOptions::default()),
|
||||||
|
server.index("catto").settings(),
|
||||||
|
server.index("doggo").get_all_documents(GetAllDocumentsOptions::default()),
|
||||||
|
server.index("doggo").settings(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use serde_json::json;
|
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn get_settings_unexisting_index() {
|
async fn get_settings_unexisting_index() {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn swap_indexes_bad_format() {
|
async fn swap_indexes_bad_format() {
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
mod errors;
|
mod errors;
|
||||||
|
|
||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn swap_indexes() {
|
async fn swap_indexes() {
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
mod errors;
|
mod errors;
|
||||||
|
|
||||||
use meili_snap::insta::assert_json_snapshot;
|
use meili_snap::insta::assert_json_snapshot;
|
||||||
use serde_json::json;
|
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_get_unexisting_task_status() {
|
async fn error_get_unexisting_task_status() {
|
||||||
@@ -33,7 +33,7 @@ async fn get_task_status() {
|
|||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
index
|
index
|
||||||
.add_documents(
|
.add_documents(
|
||||||
serde_json::json!([{
|
json!([{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
"content": "foobar",
|
"content": "foobar",
|
||||||
}]),
|
}]),
|
||||||
|
|||||||
@@ -17,10 +17,10 @@ bincode = "1.3.3"
|
|||||||
bstr = "1.4.0"
|
bstr = "1.4.0"
|
||||||
bytemuck = { version = "1.13.1", features = ["extern_crate_alloc"] }
|
bytemuck = { version = "1.13.1", features = ["extern_crate_alloc"] }
|
||||||
byteorder = "1.4.3"
|
byteorder = "1.4.3"
|
||||||
charabia = { version = "0.8.2", default-features = false }
|
charabia = { version = "0.8.3", default-features = false }
|
||||||
concat-arrays = "0.1.2"
|
concat-arrays = "0.1.2"
|
||||||
crossbeam-channel = "0.5.8"
|
crossbeam-channel = "0.5.8"
|
||||||
deserr = "0.5.0"
|
deserr = { version = "0.6.0", features = ["actix-web"]}
|
||||||
either = { version = "1.8.1", features = ["serde"] }
|
either = { version = "1.8.1", features = ["serde"] }
|
||||||
flatten-serde-json = { path = "../flatten-serde-json" }
|
flatten-serde-json = { path = "../flatten-serde-json" }
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
@@ -32,18 +32,18 @@ grenad = { version = "0.4.4", default-features = false, features = [
|
|||||||
heed = { git = "https://github.com/meilisearch/heed", tag = "v0.12.7", default-features = false, features = [
|
heed = { git = "https://github.com/meilisearch/heed", tag = "v0.12.7", default-features = false, features = [
|
||||||
"lmdb", "read-txn-no-tls"
|
"lmdb", "read-txn-no-tls"
|
||||||
] }
|
] }
|
||||||
indexmap = { version = "1.9.3", features = ["serde"] }
|
indexmap = { version = "2.0.0", features = ["serde"] }
|
||||||
instant-distance = { version = "0.6.1", features = ["with-serde"] }
|
instant-distance = { version = "0.6.1", features = ["with-serde"] }
|
||||||
json-depth-checker = { path = "../json-depth-checker" }
|
json-depth-checker = { path = "../json-depth-checker" }
|
||||||
levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] }
|
levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] }
|
||||||
memmap2 = "0.5.10"
|
memmap2 = "0.7.1"
|
||||||
obkv = "0.2.0"
|
obkv = "0.2.0"
|
||||||
once_cell = "1.17.1"
|
once_cell = "1.17.1"
|
||||||
ordered-float = "3.6.0"
|
ordered-float = "3.6.0"
|
||||||
rand_pcg = { version = "0.3.1", features = ["serde1"] }
|
rand_pcg = { version = "0.3.1", features = ["serde1"] }
|
||||||
rayon = "1.7.0"
|
rayon = "1.7.0"
|
||||||
roaring = "0.10.1"
|
roaring = "0.10.1"
|
||||||
rstar = { version = "0.10.0", features = ["serde"] }
|
rstar = { version = "0.11.0", features = ["serde"] }
|
||||||
serde = { version = "1.0.160", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
slice-group-by = "0.3.0"
|
slice-group-by = "0.3.0"
|
||||||
@@ -63,7 +63,10 @@ uuid = { version = "1.3.1", features = ["v4"] }
|
|||||||
filter-parser = { path = "../filter-parser" }
|
filter-parser = { path = "../filter-parser" }
|
||||||
|
|
||||||
# documents words self-join
|
# documents words self-join
|
||||||
itertools = "0.10.5"
|
itertools = "0.11.0"
|
||||||
|
|
||||||
|
# profiling
|
||||||
|
puffin = "0.16.0"
|
||||||
|
|
||||||
# logging
|
# logging
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
@@ -71,11 +74,12 @@ logging_timer = "1.1.0"
|
|||||||
csv = "1.2.1"
|
csv = "1.2.1"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
mimalloc = { version = "0.1.29", default-features = false }
|
mimalloc = { version = "0.1.37", default-features = false }
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
insta = "1.29.0"
|
insta = "1.29.0"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
md5 = "0.7.0"
|
md5 = "0.7.0"
|
||||||
|
meili-snap = { path = "../meili-snap" }
|
||||||
rand = { version = "0.8.5", features = ["small_rng"] }
|
rand = { version = "0.8.5", features = ["small_rng"] }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
|||||||
@@ -122,22 +122,28 @@ only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and undersco
|
|||||||
.field,
|
.field,
|
||||||
match .valid_fields.is_empty() {
|
match .valid_fields.is_empty() {
|
||||||
true => "This index does not have configured sortable attributes.".to_string(),
|
true => "This index does not have configured sortable attributes.".to_string(),
|
||||||
false => format!("Available sortable attributes are: `{}`.",
|
false => format!("Available sortable attributes are: `{}{}`.",
|
||||||
valid_fields.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
|
valid_fields.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
|
||||||
|
.hidden_fields.then_some(", <..hidden-attributes>").unwrap_or(""),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
)]
|
)]
|
||||||
InvalidSortableAttribute { field: String, valid_fields: BTreeSet<String> },
|
InvalidSortableAttribute { field: String, valid_fields: BTreeSet<String>, hidden_fields: bool },
|
||||||
#[error("Attribute `{}` is not facet-searchable. {}",
|
#[error("Attribute `{}` is not facet-searchable. {}",
|
||||||
.field,
|
.field,
|
||||||
match .valid_fields.is_empty() {
|
match .valid_fields.is_empty() {
|
||||||
true => "This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.".to_string(),
|
true => "This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.".to_string(),
|
||||||
false => format!("Available facet-searchable attributes are: `{}`. To make it facet-searchable add it to the `filterableAttributes` index settings.",
|
false => format!("Available facet-searchable attributes are: `{}{}`. To make it facet-searchable add it to the `filterableAttributes` index settings.",
|
||||||
valid_fields.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
|
valid_fields.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
|
||||||
|
.hidden_fields.then_some(", <..hidden-attributes>").unwrap_or(""),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
)]
|
)]
|
||||||
InvalidFacetSearchFacetName { field: String, valid_fields: BTreeSet<String> },
|
InvalidFacetSearchFacetName {
|
||||||
|
field: String,
|
||||||
|
valid_fields: BTreeSet<String>,
|
||||||
|
hidden_fields: bool,
|
||||||
|
},
|
||||||
#[error("Attribute `{}` is not searchable. Available searchable attributes are: `{}{}`.",
|
#[error("Attribute `{}` is not searchable. Available searchable attributes are: `{}{}`.",
|
||||||
.field,
|
.field,
|
||||||
.valid_fields.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
|
.valid_fields.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
|
||||||
@@ -340,8 +346,11 @@ fn conditionally_lookup_for_error_message() {
|
|||||||
];
|
];
|
||||||
|
|
||||||
for (list, suffix) in messages {
|
for (list, suffix) in messages {
|
||||||
let err =
|
let err = UserError::InvalidSortableAttribute {
|
||||||
UserError::InvalidSortableAttribute { field: "name".to_string(), valid_fields: list };
|
field: "name".to_string(),
|
||||||
|
valid_fields: list,
|
||||||
|
hidden_fields: false,
|
||||||
|
};
|
||||||
|
|
||||||
assert_eq!(err.to_string(), format!("{} {}", prefix, suffix));
|
assert_eq!(err.to_string(), format!("{} {}", prefix, suffix));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -60,12 +60,16 @@ impl CboRoaringBitmapCodec {
|
|||||||
/// if the merged values length is under the threshold, values are directly
|
/// if the merged values length is under the threshold, values are directly
|
||||||
/// serialized in the buffer else a RoaringBitmap is created from the
|
/// serialized in the buffer else a RoaringBitmap is created from the
|
||||||
/// values and is serialized in the buffer.
|
/// values and is serialized in the buffer.
|
||||||
pub fn merge_into(slices: &[Cow<[u8]>], buffer: &mut Vec<u8>) -> io::Result<()> {
|
pub fn merge_into<I, A>(slices: I, buffer: &mut Vec<u8>) -> io::Result<()>
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = A>,
|
||||||
|
A: AsRef<[u8]>,
|
||||||
|
{
|
||||||
let mut roaring = RoaringBitmap::new();
|
let mut roaring = RoaringBitmap::new();
|
||||||
let mut vec = Vec::new();
|
let mut vec = Vec::new();
|
||||||
|
|
||||||
for bytes in slices {
|
for bytes in slices {
|
||||||
if bytes.len() <= THRESHOLD * size_of::<u32>() {
|
if bytes.as_ref().len() <= THRESHOLD * size_of::<u32>() {
|
||||||
let mut reader = bytes.as_ref();
|
let mut reader = bytes.as_ref();
|
||||||
while let Ok(integer) = reader.read_u32::<NativeEndian>() {
|
while let Ok(integer) = reader.read_u32::<NativeEndian>() {
|
||||||
vec.push(integer);
|
vec.push(integer);
|
||||||
@@ -85,7 +89,7 @@ impl CboRoaringBitmapCodec {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// We can unwrap safely because the vector is sorted upper.
|
// We can unwrap safely because the vector is sorted upper.
|
||||||
let roaring = RoaringBitmap::from_sorted_iter(vec.into_iter()).unwrap();
|
let roaring = RoaringBitmap::from_sorted_iter(vec).unwrap();
|
||||||
roaring.serialize_into(buffer)?;
|
roaring.serialize_into(buffer)?;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::{BTreeSet, HashMap, HashSet};
|
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::mem::size_of;
|
use std::mem::size_of;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
@@ -61,8 +61,12 @@ pub mod main_key {
|
|||||||
pub const USER_DEFINED_SEARCHABLE_FIELDS_KEY: &str = "user-defined-searchable-fields";
|
pub const USER_DEFINED_SEARCHABLE_FIELDS_KEY: &str = "user-defined-searchable-fields";
|
||||||
pub const SOFT_EXTERNAL_DOCUMENTS_IDS_KEY: &str = "soft-external-documents-ids";
|
pub const SOFT_EXTERNAL_DOCUMENTS_IDS_KEY: &str = "soft-external-documents-ids";
|
||||||
pub const STOP_WORDS_KEY: &str = "stop-words";
|
pub const STOP_WORDS_KEY: &str = "stop-words";
|
||||||
|
pub const NON_SEPARATOR_TOKENS_KEY: &str = "non-separator-tokens";
|
||||||
|
pub const SEPARATOR_TOKENS_KEY: &str = "separator-tokens";
|
||||||
|
pub const DICTIONARY_KEY: &str = "dictionary";
|
||||||
pub const STRING_FACETED_DOCUMENTS_IDS_PREFIX: &str = "string-faceted-documents-ids";
|
pub const STRING_FACETED_DOCUMENTS_IDS_PREFIX: &str = "string-faceted-documents-ids";
|
||||||
pub const SYNONYMS_KEY: &str = "synonyms";
|
pub const SYNONYMS_KEY: &str = "synonyms";
|
||||||
|
pub const USER_DEFINED_SYNONYMS_KEY: &str = "user-defined-synonyms";
|
||||||
pub const WORDS_FST_KEY: &str = "words-fst";
|
pub const WORDS_FST_KEY: &str = "words-fst";
|
||||||
pub const WORDS_PREFIXES_FST_KEY: &str = "words-prefixes-fst";
|
pub const WORDS_PREFIXES_FST_KEY: &str = "words-prefixes-fst";
|
||||||
pub const CREATED_AT_KEY: &str = "created-at";
|
pub const CREATED_AT_KEY: &str = "created-at";
|
||||||
@@ -115,16 +119,16 @@ pub struct Index {
|
|||||||
pub(crate) main: PolyDatabase,
|
pub(crate) main: PolyDatabase,
|
||||||
|
|
||||||
/// A word and all the documents ids containing the word.
|
/// A word and all the documents ids containing the word.
|
||||||
pub word_docids: Database<Str, RoaringBitmapCodec>,
|
pub word_docids: Database<Str, CboRoaringBitmapCodec>,
|
||||||
|
|
||||||
/// A word and all the documents ids containing the word, from attributes for which typos are not allowed.
|
/// A word and all the documents ids containing the word, from attributes for which typos are not allowed.
|
||||||
pub exact_word_docids: Database<Str, RoaringBitmapCodec>,
|
pub exact_word_docids: Database<Str, CboRoaringBitmapCodec>,
|
||||||
|
|
||||||
/// A prefix of word and all the documents ids containing this prefix.
|
/// A prefix of word and all the documents ids containing this prefix.
|
||||||
pub word_prefix_docids: Database<Str, RoaringBitmapCodec>,
|
pub word_prefix_docids: Database<Str, CboRoaringBitmapCodec>,
|
||||||
|
|
||||||
/// A prefix of word and all the documents ids containing this prefix, from attributes for which typos are not allowed.
|
/// A prefix of word and all the documents ids containing this prefix, from attributes for which typos are not allowed.
|
||||||
pub exact_word_prefix_docids: Database<Str, RoaringBitmapCodec>,
|
pub exact_word_prefix_docids: Database<Str, CboRoaringBitmapCodec>,
|
||||||
|
|
||||||
/// Maps the proximity between a pair of words with all the docids where this relation appears.
|
/// Maps the proximity between a pair of words with all the docids where this relation appears.
|
||||||
pub word_pair_proximity_docids: Database<U8StrStrCodec, CboRoaringBitmapCodec>,
|
pub word_pair_proximity_docids: Database<U8StrStrCodec, CboRoaringBitmapCodec>,
|
||||||
@@ -651,6 +655,26 @@ impl Index {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* remove hidden fields */
|
||||||
|
pub fn remove_hidden_fields(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn,
|
||||||
|
fields: impl IntoIterator<Item = impl AsRef<str>>,
|
||||||
|
) -> Result<(BTreeSet<String>, bool)> {
|
||||||
|
let mut valid_fields =
|
||||||
|
fields.into_iter().map(|f| f.as_ref().to_string()).collect::<BTreeSet<String>>();
|
||||||
|
|
||||||
|
let fields_len = valid_fields.len();
|
||||||
|
|
||||||
|
if let Some(dn) = self.displayed_fields(rtxn)? {
|
||||||
|
let displayable_names = dn.iter().map(|s| s.to_string()).collect();
|
||||||
|
valid_fields = &valid_fields & &displayable_names;
|
||||||
|
}
|
||||||
|
|
||||||
|
let hidden_fields = fields_len > valid_fields.len();
|
||||||
|
Ok((valid_fields, hidden_fields))
|
||||||
|
}
|
||||||
|
|
||||||
/* searchable fields */
|
/* searchable fields */
|
||||||
|
|
||||||
/// Write the user defined searchable fields and generate the real searchable fields from the specified fields ids map.
|
/// Write the user defined searchable fields and generate the real searchable fields from the specified fields ids map.
|
||||||
@@ -1055,18 +1079,116 @@ impl Index {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* non separator tokens */
|
||||||
|
|
||||||
|
pub(crate) fn put_non_separator_tokens(
|
||||||
|
&self,
|
||||||
|
wtxn: &mut RwTxn,
|
||||||
|
set: &BTreeSet<String>,
|
||||||
|
) -> heed::Result<()> {
|
||||||
|
self.main.put::<_, Str, SerdeBincode<_>>(wtxn, main_key::NON_SEPARATOR_TOKENS_KEY, set)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn delete_non_separator_tokens(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||||
|
self.main.delete::<_, Str>(wtxn, main_key::NON_SEPARATOR_TOKENS_KEY)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn non_separator_tokens(&self, rtxn: &RoTxn) -> Result<Option<BTreeSet<String>>> {
|
||||||
|
Ok(self.main.get::<_, Str, SerdeBincode<BTreeSet<String>>>(
|
||||||
|
rtxn,
|
||||||
|
main_key::NON_SEPARATOR_TOKENS_KEY,
|
||||||
|
)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/* separator tokens */
|
||||||
|
|
||||||
|
pub(crate) fn put_separator_tokens(
|
||||||
|
&self,
|
||||||
|
wtxn: &mut RwTxn,
|
||||||
|
set: &BTreeSet<String>,
|
||||||
|
) -> heed::Result<()> {
|
||||||
|
self.main.put::<_, Str, SerdeBincode<_>>(wtxn, main_key::SEPARATOR_TOKENS_KEY, set)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn delete_separator_tokens(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||||
|
self.main.delete::<_, Str>(wtxn, main_key::SEPARATOR_TOKENS_KEY)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn separator_tokens(&self, rtxn: &RoTxn) -> Result<Option<BTreeSet<String>>> {
|
||||||
|
Ok(self
|
||||||
|
.main
|
||||||
|
.get::<_, Str, SerdeBincode<BTreeSet<String>>>(rtxn, main_key::SEPARATOR_TOKENS_KEY)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/* separators easing method */
|
||||||
|
|
||||||
|
pub fn allowed_separators(&self, rtxn: &RoTxn) -> Result<Option<BTreeSet<String>>> {
|
||||||
|
let default_separators =
|
||||||
|
charabia::separators::DEFAULT_SEPARATORS.iter().map(|s| s.to_string());
|
||||||
|
let mut separators: Option<BTreeSet<_>> = None;
|
||||||
|
if let Some(mut separator_tokens) = self.separator_tokens(rtxn)? {
|
||||||
|
separator_tokens.extend(default_separators.clone());
|
||||||
|
separators = Some(separator_tokens);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(non_separator_tokens) = self.non_separator_tokens(rtxn)? {
|
||||||
|
separators = separators
|
||||||
|
.or_else(|| Some(default_separators.collect()))
|
||||||
|
.map(|separators| &separators - &non_separator_tokens);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(separators)
|
||||||
|
}
|
||||||
|
|
||||||
|
/* dictionary */
|
||||||
|
|
||||||
|
pub(crate) fn put_dictionary(
|
||||||
|
&self,
|
||||||
|
wtxn: &mut RwTxn,
|
||||||
|
set: &BTreeSet<String>,
|
||||||
|
) -> heed::Result<()> {
|
||||||
|
self.main.put::<_, Str, SerdeBincode<_>>(wtxn, main_key::DICTIONARY_KEY, set)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn delete_dictionary(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||||
|
self.main.delete::<_, Str>(wtxn, main_key::DICTIONARY_KEY)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dictionary(&self, rtxn: &RoTxn) -> Result<Option<BTreeSet<String>>> {
|
||||||
|
Ok(self
|
||||||
|
.main
|
||||||
|
.get::<_, Str, SerdeBincode<BTreeSet<String>>>(rtxn, main_key::DICTIONARY_KEY)?)
|
||||||
|
}
|
||||||
|
|
||||||
/* synonyms */
|
/* synonyms */
|
||||||
|
|
||||||
pub(crate) fn put_synonyms(
|
pub(crate) fn put_synonyms(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn,
|
||||||
synonyms: &HashMap<Vec<String>, Vec<Vec<String>>>,
|
synonyms: &HashMap<Vec<String>, Vec<Vec<String>>>,
|
||||||
|
user_defined_synonyms: &BTreeMap<String, Vec<String>>,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.put::<_, Str, SerdeBincode<_>>(wtxn, main_key::SYNONYMS_KEY, synonyms)
|
self.main.put::<_, Str, SerdeBincode<_>>(wtxn, main_key::SYNONYMS_KEY, synonyms)?;
|
||||||
|
self.main.put::<_, Str, SerdeBincode<_>>(
|
||||||
|
wtxn,
|
||||||
|
main_key::USER_DEFINED_SYNONYMS_KEY,
|
||||||
|
user_defined_synonyms,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_synonyms(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_synonyms(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||||
self.main.delete::<_, Str>(wtxn, main_key::SYNONYMS_KEY)
|
self.main.delete::<_, Str>(wtxn, main_key::SYNONYMS_KEY)?;
|
||||||
|
self.main.delete::<_, Str>(wtxn, main_key::USER_DEFINED_SYNONYMS_KEY)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn user_defined_synonyms(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn,
|
||||||
|
) -> heed::Result<BTreeMap<String, Vec<String>>> {
|
||||||
|
Ok(self
|
||||||
|
.main
|
||||||
|
.get::<_, Str, SerdeBincode<_>>(rtxn, main_key::USER_DEFINED_SYNONYMS_KEY)?
|
||||||
|
.unwrap_or_default())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn synonyms(&self, rtxn: &RoTxn) -> heed::Result<HashMap<Vec<String>, Vec<Vec<String>>>> {
|
pub fn synonyms(&self, rtxn: &RoTxn) -> heed::Result<HashMap<Vec<String>, Vec<Vec<String>>>> {
|
||||||
@@ -1718,11 +1840,11 @@ pub(crate) mod tests {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
index
|
index
|
||||||
.add_documents(documents!([
|
.add_documents(documents!([
|
||||||
{ "id": 0, "_geo": { "lat": 0, "lng": 0 } },
|
{ "id": 0, "_geo": { "lat": "0", "lng": "0" } },
|
||||||
{ "id": 1, "_geo": { "lat": 0, "lng": -175 } },
|
{ "id": 1, "_geo": { "lat": 0, "lng": "-175" } },
|
||||||
{ "id": 2, "_geo": { "lat": 0, "lng": 175 } },
|
{ "id": 2, "_geo": { "lat": "0", "lng": 175 } },
|
||||||
{ "id": 3, "_geo": { "lat": 85, "lng": 0 } },
|
{ "id": 3, "_geo": { "lat": 85, "lng": 0 } },
|
||||||
{ "id": 4, "_geo": { "lat": -85, "lng": 0 } },
|
{ "id": 4, "_geo": { "lat": "-85", "lng": "0" } },
|
||||||
]))
|
]))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|||||||
@@ -97,7 +97,7 @@ const MAX_LMDB_KEY_LENGTH: usize = 500;
|
|||||||
///
|
///
|
||||||
/// This number is determined by the keys of the different facet databases
|
/// This number is determined by the keys of the different facet databases
|
||||||
/// and adding a margin of safety.
|
/// and adding a margin of safety.
|
||||||
pub const MAX_FACET_VALUE_LENGTH: usize = MAX_LMDB_KEY_LENGTH - 20;
|
pub const MAX_FACET_VALUE_LENGTH: usize = MAX_LMDB_KEY_LENGTH - 32;
|
||||||
|
|
||||||
/// The maximum length a word can be
|
/// The maximum length a word can be
|
||||||
pub const MAX_WORD_LENGTH: usize = MAX_LMDB_KEY_LENGTH / 2;
|
pub const MAX_WORD_LENGTH: usize = MAX_LMDB_KEY_LENGTH / 2;
|
||||||
@@ -293,15 +293,15 @@ pub fn normalize_facet(original: &str) -> String {
|
|||||||
#[derive(serde::Serialize, serde::Deserialize, Debug)]
|
#[derive(serde::Serialize, serde::Deserialize, Debug)]
|
||||||
#[serde(transparent)]
|
#[serde(transparent)]
|
||||||
pub struct VectorOrArrayOfVectors {
|
pub struct VectorOrArrayOfVectors {
|
||||||
#[serde(with = "either::serde_untagged")]
|
#[serde(with = "either::serde_untagged_optional")]
|
||||||
inner: either::Either<Vec<f32>, Vec<Vec<f32>>>,
|
inner: Option<either::Either<Vec<f32>, Vec<Vec<f32>>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VectorOrArrayOfVectors {
|
impl VectorOrArrayOfVectors {
|
||||||
pub fn into_array_of_vectors(self) -> Vec<Vec<f32>> {
|
pub fn into_array_of_vectors(self) -> Option<Vec<Vec<f32>>> {
|
||||||
match self.inner {
|
match self.inner? {
|
||||||
either::Either::Left(vector) => vec![vector],
|
either::Either::Left(vector) => Some(vec![vector]),
|
||||||
either::Either::Right(vectors) => vectors,
|
either::Either::Right(vectors) => Some(vectors),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -280,9 +280,13 @@ impl<'a> SearchForFacetValues<'a> {
|
|||||||
|
|
||||||
let filterable_fields = index.filterable_fields(rtxn)?;
|
let filterable_fields = index.filterable_fields(rtxn)?;
|
||||||
if !filterable_fields.contains(&self.facet) {
|
if !filterable_fields.contains(&self.facet) {
|
||||||
|
let (valid_fields, hidden_fields) =
|
||||||
|
index.remove_hidden_fields(rtxn, filterable_fields)?;
|
||||||
|
|
||||||
return Err(UserError::InvalidFacetSearchFacetName {
|
return Err(UserError::InvalidFacetSearchFacetName {
|
||||||
field: self.facet.clone(),
|
field: self.facet.clone(),
|
||||||
valid_fields: filterable_fields.into_iter().collect(),
|
valid_fields,
|
||||||
|
hidden_fields,
|
||||||
}
|
}
|
||||||
.into());
|
.into());
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -91,11 +91,12 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
|||||||
/// Update the universes accordingly and inform the logger.
|
/// Update the universes accordingly and inform the logger.
|
||||||
macro_rules! back {
|
macro_rules! back {
|
||||||
() => {
|
() => {
|
||||||
assert!(
|
// FIXME: temporarily disabled assert: see <https://github.com/meilisearch/meilisearch/pull/4013>
|
||||||
ranking_rule_universes[cur_ranking_rule_index].is_empty(),
|
// assert!(
|
||||||
"The ranking rule {} did not sort its bucket exhaustively",
|
// ranking_rule_universes[cur_ranking_rule_index].is_empty(),
|
||||||
ranking_rules[cur_ranking_rule_index].id()
|
// "The ranking rule {} did not sort its bucket exhaustively",
|
||||||
);
|
// ranking_rules[cur_ranking_rule_index].id()
|
||||||
|
// );
|
||||||
logger.end_iteration_ranking_rule(
|
logger.end_iteration_ranking_rule(
|
||||||
cur_ranking_rule_index,
|
cur_ranking_rule_index,
|
||||||
ranking_rules[cur_ranking_rule_index].as_ref(),
|
ranking_rules[cur_ranking_rule_index].as_ref(),
|
||||||
|
|||||||
@@ -11,9 +11,7 @@ use super::interner::Interned;
|
|||||||
use super::Word;
|
use super::Word;
|
||||||
use crate::heed_codec::{BytesDecodeOwned, StrBEU16Codec};
|
use crate::heed_codec::{BytesDecodeOwned, StrBEU16Codec};
|
||||||
use crate::update::{merge_cbo_roaring_bitmaps, MergeFn};
|
use crate::update::{merge_cbo_roaring_bitmaps, MergeFn};
|
||||||
use crate::{
|
use crate::{CboRoaringBitmapCodec, CboRoaringBitmapLenCodec, Result, SearchContext};
|
||||||
CboRoaringBitmapCodec, CboRoaringBitmapLenCodec, Result, RoaringBitmapCodec, SearchContext,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// A cache storing pointers to values in the LMDB databases.
|
/// A cache storing pointers to values in the LMDB databases.
|
||||||
///
|
///
|
||||||
@@ -168,7 +166,7 @@ impl<'ctx> SearchContext<'ctx> {
|
|||||||
merge_cbo_roaring_bitmaps,
|
merge_cbo_roaring_bitmaps,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
None => DatabaseCache::get_value::<_, _, RoaringBitmapCodec>(
|
None => DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
|
||||||
self.txn,
|
self.txn,
|
||||||
word,
|
word,
|
||||||
self.word_interner.get(word).as_str(),
|
self.word_interner.get(word).as_str(),
|
||||||
@@ -182,7 +180,7 @@ impl<'ctx> SearchContext<'ctx> {
|
|||||||
&mut self,
|
&mut self,
|
||||||
word: Interned<String>,
|
word: Interned<String>,
|
||||||
) -> Result<Option<RoaringBitmap>> {
|
) -> Result<Option<RoaringBitmap>> {
|
||||||
DatabaseCache::get_value::<_, _, RoaringBitmapCodec>(
|
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
|
||||||
self.txn,
|
self.txn,
|
||||||
word,
|
word,
|
||||||
self.word_interner.get(word).as_str(),
|
self.word_interner.get(word).as_str(),
|
||||||
@@ -230,7 +228,7 @@ impl<'ctx> SearchContext<'ctx> {
|
|||||||
merge_cbo_roaring_bitmaps,
|
merge_cbo_roaring_bitmaps,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
None => DatabaseCache::get_value::<_, _, RoaringBitmapCodec>(
|
None => DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
|
||||||
self.txn,
|
self.txn,
|
||||||
prefix,
|
prefix,
|
||||||
self.word_interner.get(prefix).as_str(),
|
self.word_interner.get(prefix).as_str(),
|
||||||
@@ -244,7 +242,7 @@ impl<'ctx> SearchContext<'ctx> {
|
|||||||
&mut self,
|
&mut self,
|
||||||
prefix: Interned<String>,
|
prefix: Interned<String>,
|
||||||
) -> Result<Option<RoaringBitmap>> {
|
) -> Result<Option<RoaringBitmap>> {
|
||||||
DatabaseCache::get_value::<_, _, RoaringBitmapCodec>(
|
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
|
||||||
self.txn,
|
self.txn,
|
||||||
prefix,
|
prefix,
|
||||||
self.word_interner.get(prefix).as_str(),
|
self.word_interner.get(prefix).as_str(),
|
||||||
|
|||||||
@@ -418,19 +418,11 @@ impl<'t> Matcher<'t, '_> {
|
|||||||
} else {
|
} else {
|
||||||
match &self.matches {
|
match &self.matches {
|
||||||
Some((tokens, matches)) => {
|
Some((tokens, matches)) => {
|
||||||
// If the text has to be cropped,
|
|
||||||
// compute the best interval to crop around.
|
|
||||||
let matches = match format_options.crop {
|
|
||||||
Some(crop_size) if crop_size > 0 => {
|
|
||||||
self.find_best_match_interval(matches, crop_size)
|
|
||||||
}
|
|
||||||
_ => matches,
|
|
||||||
};
|
|
||||||
|
|
||||||
// If the text has to be cropped,
|
// If the text has to be cropped,
|
||||||
// crop around the best interval.
|
// crop around the best interval.
|
||||||
let (byte_start, byte_end) = match format_options.crop {
|
let (byte_start, byte_end) = match format_options.crop {
|
||||||
Some(crop_size) if crop_size > 0 => {
|
Some(crop_size) if crop_size > 0 => {
|
||||||
|
let matches = self.find_best_match_interval(matches, crop_size);
|
||||||
self.crop_bounds(tokens, matches, crop_size)
|
self.crop_bounds(tokens, matches, crop_size)
|
||||||
}
|
}
|
||||||
_ => (0, self.text.len()),
|
_ => (0, self.text.len()),
|
||||||
@@ -450,6 +442,11 @@ impl<'t> Matcher<'t, '_> {
|
|||||||
for m in matches {
|
for m in matches {
|
||||||
let token = &tokens[m.token_position];
|
let token = &tokens[m.token_position];
|
||||||
|
|
||||||
|
// skip matches out of the crop window.
|
||||||
|
if token.byte_start < byte_start || token.byte_end > byte_end {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
if byte_index < token.byte_start {
|
if byte_index < token.byte_start {
|
||||||
formatted.push(&self.text[byte_index..token.byte_start]);
|
formatted.push(&self.text[byte_index..token.byte_start]);
|
||||||
}
|
}
|
||||||
@@ -800,6 +797,37 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn format_highlight_crop_phrase_query() {
|
||||||
|
//! testing: https://github.com/meilisearch/meilisearch/issues/3975
|
||||||
|
let temp_index = TempIndex::new();
|
||||||
|
temp_index
|
||||||
|
.add_documents(documents!([
|
||||||
|
{ "id": 1, "text": "The groundbreaking invention had the power to split the world between those who embraced progress and those who resisted change!" }
|
||||||
|
]))
|
||||||
|
.unwrap();
|
||||||
|
let rtxn = temp_index.read_txn().unwrap();
|
||||||
|
|
||||||
|
let format_options = FormatOptions { highlight: true, crop: Some(10) };
|
||||||
|
let text = "The groundbreaking invention had the power to split the world between those who embraced progress and those who resisted change!";
|
||||||
|
|
||||||
|
let builder = MatcherBuilder::new_test(&rtxn, &temp_index, "\"the world\"");
|
||||||
|
let mut matcher = builder.build(text);
|
||||||
|
// should return 10 words with a marker at the start as well the end, and the highlighted matches.
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
matcher.format(format_options),
|
||||||
|
@"…had the power to split <em>the</em> <em>world</em> between those who…"
|
||||||
|
);
|
||||||
|
|
||||||
|
let builder = MatcherBuilder::new_test(&rtxn, &temp_index, "those \"and those\"");
|
||||||
|
let mut matcher = builder.build(text);
|
||||||
|
// should highlight "those" and the phrase "and those".
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
matcher.format(format_options),
|
||||||
|
@"…world between <em>those</em> who embraced progress <em>and</em> <em>those</em> who resisted…"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn smaller_crop_size() {
|
fn smaller_crop_size() {
|
||||||
//! testing: https://github.com/meilisearch/specifications/pull/120#discussion_r836536295
|
//! testing: https://github.com/meilisearch/specifications/pull/120#discussion_r836536295
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ mod sort;
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
use std::collections::{BTreeSet, HashSet};
|
use std::collections::HashSet;
|
||||||
|
|
||||||
use bucket_sort::{bucket_sort, BucketSortOutput};
|
use bucket_sort::{bucket_sort, BucketSortOutput};
|
||||||
use charabia::TokenizerBuilder;
|
use charabia::TokenizerBuilder;
|
||||||
@@ -108,24 +108,11 @@ impl<'ctx> SearchContext<'ctx> {
|
|||||||
(None, None) => continue,
|
(None, None) => continue,
|
||||||
// The field is not searchable => User error
|
// The field is not searchable => User error
|
||||||
(_fid, Some(false)) => {
|
(_fid, Some(false)) => {
|
||||||
let mut valid_fields: BTreeSet<_> =
|
let (valid_fields, hidden_fields) = match searchable_names {
|
||||||
fids_map.names().map(String::from).collect();
|
Some(sn) => self.index.remove_hidden_fields(self.txn, sn)?,
|
||||||
|
None => self.index.remove_hidden_fields(self.txn, fids_map.names())?,
|
||||||
|
};
|
||||||
|
|
||||||
// Filter by the searchable names
|
|
||||||
if let Some(sn) = searchable_names {
|
|
||||||
let searchable_names = sn.iter().map(|s| s.to_string()).collect();
|
|
||||||
valid_fields = &valid_fields & &searchable_names;
|
|
||||||
}
|
|
||||||
|
|
||||||
let searchable_count = valid_fields.len();
|
|
||||||
|
|
||||||
// Remove hidden fields
|
|
||||||
if let Some(dn) = self.index.displayed_fields(self.txn)? {
|
|
||||||
let displayable_names = dn.iter().map(|s| s.to_string()).collect();
|
|
||||||
valid_fields = &valid_fields & &displayable_names;
|
|
||||||
}
|
|
||||||
|
|
||||||
let hidden_fields = searchable_count > valid_fields.len();
|
|
||||||
let field = field_name.to_string();
|
let field = field_name.to_string();
|
||||||
return Err(UserError::InvalidSearchableAttribute {
|
return Err(UserError::InvalidSearchableAttribute {
|
||||||
field,
|
field,
|
||||||
@@ -488,6 +475,20 @@ pub fn execute_search(
|
|||||||
tokbuilder.stop_words(stop_words);
|
tokbuilder.stop_words(stop_words);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let separators = ctx.index.allowed_separators(ctx.txn)?;
|
||||||
|
let separators: Option<Vec<_>> =
|
||||||
|
separators.as_ref().map(|x| x.iter().map(String::as_str).collect());
|
||||||
|
if let Some(ref separators) = separators {
|
||||||
|
tokbuilder.separators(separators);
|
||||||
|
}
|
||||||
|
|
||||||
|
let dictionary = ctx.index.dictionary(ctx.txn)?;
|
||||||
|
let dictionary: Option<Vec<_>> =
|
||||||
|
dictionary.as_ref().map(|x| x.iter().map(String::as_str).collect());
|
||||||
|
if let Some(ref dictionary) = dictionary {
|
||||||
|
tokbuilder.words_dict(dictionary);
|
||||||
|
}
|
||||||
|
|
||||||
let script_lang_map = ctx.index.script_language(ctx.txn)?;
|
let script_lang_map = ctx.index.script_language(ctx.txn)?;
|
||||||
if !script_lang_map.is_empty() {
|
if !script_lang_map.is_empty() {
|
||||||
tokbuilder.allow_list(&script_lang_map);
|
tokbuilder.allow_list(&script_lang_map);
|
||||||
@@ -590,16 +591,24 @@ fn check_sort_criteria(ctx: &SearchContext, sort_criteria: Option<&Vec<AscDesc>>
|
|||||||
for asc_desc in sort_criteria {
|
for asc_desc in sort_criteria {
|
||||||
match asc_desc.member() {
|
match asc_desc.member() {
|
||||||
Member::Field(ref field) if !crate::is_faceted(field, &sortable_fields) => {
|
Member::Field(ref field) if !crate::is_faceted(field, &sortable_fields) => {
|
||||||
|
let (valid_fields, hidden_fields) =
|
||||||
|
ctx.index.remove_hidden_fields(ctx.txn, sortable_fields)?;
|
||||||
|
|
||||||
return Err(UserError::InvalidSortableAttribute {
|
return Err(UserError::InvalidSortableAttribute {
|
||||||
field: field.to_string(),
|
field: field.to_string(),
|
||||||
valid_fields: sortable_fields.into_iter().collect(),
|
valid_fields,
|
||||||
})?
|
hidden_fields,
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
Member::Geo(_) if !sortable_fields.contains("_geo") => {
|
Member::Geo(_) if !sortable_fields.contains("_geo") => {
|
||||||
|
let (valid_fields, hidden_fields) =
|
||||||
|
ctx.index.remove_hidden_fields(ctx.txn, sortable_fields)?;
|
||||||
|
|
||||||
return Err(UserError::InvalidSortableAttribute {
|
return Err(UserError::InvalidSortableAttribute {
|
||||||
field: "_geo".to_string(),
|
field: "_geo".to_string(),
|
||||||
valid_fields: sortable_fields.into_iter().collect(),
|
valid_fields,
|
||||||
})?
|
hidden_fields,
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ use std::io::Cursor;
|
|||||||
|
|
||||||
use big_s::S;
|
use big_s::S;
|
||||||
use heed::EnvOpenOptions;
|
use heed::EnvOpenOptions;
|
||||||
use maplit::{hashmap, hashset};
|
use maplit::{btreemap, hashset};
|
||||||
|
|
||||||
use crate::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
use crate::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
||||||
use crate::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings};
|
use crate::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings};
|
||||||
@@ -33,7 +33,7 @@ pub fn setup_search_index_with_criteria(criteria: &[Criterion]) -> Index {
|
|||||||
S("tag"),
|
S("tag"),
|
||||||
S("asc_desc_rank"),
|
S("asc_desc_rank"),
|
||||||
});
|
});
|
||||||
builder.set_synonyms(hashmap! {
|
builder.set_synonyms(btreemap! {
|
||||||
S("hello") => vec![S("good morning")],
|
S("hello") => vec![S("good morning")],
|
||||||
S("world") => vec![S("earth")],
|
S("world") => vec![S("earth")],
|
||||||
S("america") => vec![S("the united states")],
|
S("america") => vec![S("the united states")],
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ they store fewer sprximities than the regular word sprximity DB.
|
|||||||
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
use crate::index::tests::TempIndex;
|
use crate::index::tests::TempIndex;
|
||||||
use crate::search::new::tests::collect_field_values;
|
use crate::search::new::tests::collect_field_values;
|
||||||
@@ -336,7 +336,7 @@ fn test_proximity_split_word() {
|
|||||||
|
|
||||||
index
|
index
|
||||||
.update_settings(|s| {
|
.update_settings(|s| {
|
||||||
let mut syns = HashMap::new();
|
let mut syns = BTreeMap::new();
|
||||||
syns.insert("xyz".to_owned(), vec!["sun flower".to_owned()]);
|
syns.insert("xyz".to_owned(), vec!["sun flower".to_owned()]);
|
||||||
s.set_synonyms(syns);
|
s.set_synonyms(syns);
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ This module tests the `sort` ranking rule:
|
|||||||
|
|
||||||
use big_s::S;
|
use big_s::S;
|
||||||
use maplit::hashset;
|
use maplit::hashset;
|
||||||
|
use meili_snap::insta;
|
||||||
|
|
||||||
use crate::index::tests::TempIndex;
|
use crate::index::tests::TempIndex;
|
||||||
use crate::search::new::tests::collect_field_values;
|
use crate::search::new::tests::collect_field_values;
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ if `words` doesn't exist before it.
|
|||||||
14. Synonyms cost nothing according to the typo ranking rule
|
14. Synonyms cost nothing according to the typo ranking rule
|
||||||
*/
|
*/
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
use crate::index::tests::TempIndex;
|
use crate::index::tests::TempIndex;
|
||||||
use crate::search::new::tests::collect_field_values;
|
use crate::search::new::tests::collect_field_values;
|
||||||
@@ -591,7 +591,7 @@ fn test_typo_synonyms() {
|
|||||||
.update_settings(|s| {
|
.update_settings(|s| {
|
||||||
s.set_criteria(vec![Criterion::Typo]);
|
s.set_criteria(vec![Criterion::Typo]);
|
||||||
|
|
||||||
let mut synonyms = HashMap::new();
|
let mut synonyms = BTreeMap::new();
|
||||||
synonyms.insert("lackadaisical".to_owned(), vec!["lazy".to_owned()]);
|
synonyms.insert("lackadaisical".to_owned(), vec!["lazy".to_owned()]);
|
||||||
synonyms.insert("fast brownish".to_owned(), vec!["quick brown".to_owned()]);
|
synonyms.insert("fast brownish".to_owned(), vec!["quick brown".to_owned()]);
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user