mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-11-27 08:12:36 +00:00
Compare commits
33 Commits
release-v1
...
prototype-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
43989fe2e4 | ||
|
|
86b314626d | ||
|
|
bb79bdb3f8 | ||
|
|
d429e7da99 | ||
|
|
584b772248 | ||
|
|
1806c04a9a | ||
|
|
3485e8f1c4 | ||
|
|
fe697a6685 | ||
|
|
eb4135f8ae | ||
|
|
ec4844c3a6 | ||
|
|
77c3787b78 | ||
|
|
4f902490b9 | ||
|
|
1faee92748 | ||
|
|
5831466525 | ||
|
|
3cdb3e4eaf | ||
|
|
26f34ec7a2 | ||
|
|
07d36180ad | ||
|
|
4c641b79a2 | ||
|
|
ef31ab52a4 | ||
|
|
791c5cd874 | ||
|
|
5bea1092fb | ||
|
|
056b2c387d | ||
|
|
b4c44603db | ||
|
|
2c1d60f79b | ||
|
|
08af69a33b | ||
|
|
ddd34a488a | ||
|
|
526c2b3602 | ||
|
|
e8c9367686 | ||
|
|
9636c5f558 | ||
|
|
b310830b5d | ||
|
|
e9b62aacb3 | ||
|
|
456960d2c7 | ||
|
|
e59d7f238c |
4
.github/workflows/dependency-issue.yml
vendored
4
.github/workflows/dependency-issue.yml
vendored
@@ -2,8 +2,8 @@ name: Create issue to upgrade dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run the first of the month, every 3 month
|
||||
- cron: '0 0 1 */3 *'
|
||||
# Run the first of the month, every 6 month
|
||||
- cron: '0 0 1 */6 *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
|
||||
278
.github/workflows/sdks-tests.yml
vendored
278
.github/workflows/sdks-tests.yml
vendored
@@ -14,6 +14,7 @@ on:
|
||||
env:
|
||||
MEILI_MASTER_KEY: 'masterKey'
|
||||
MEILI_NO_ANALYTICS: 'true'
|
||||
DISABLE_COVERAGE: 'true'
|
||||
|
||||
jobs:
|
||||
define-docker-image:
|
||||
@@ -30,6 +31,117 @@ jobs:
|
||||
if [[ $event == 'workflow_dispatch' ]]; then
|
||||
echo "docker-image=${{ github.event.inputs.docker_image }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Docker image is ${{ steps.define-image.outputs.docker-image }}
|
||||
run: echo "Docker image is ${{ steps.define-image.outputs.docker-image }}"
|
||||
|
||||
##########
|
||||
## SDKs ##
|
||||
##########
|
||||
|
||||
meilisearch-dotnet-tests:
|
||||
needs: define-docker-image
|
||||
name: .NET SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MEILISEARCH_VERSION: ${{ needs.define-docker-image.outputs.docker-image }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-dotnet
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v3
|
||||
with:
|
||||
dotnet-version: "6.0.x"
|
||||
- name: Install dependencies
|
||||
run: dotnet restore
|
||||
- name: Build
|
||||
run: dotnet build --configuration Release --no-restore
|
||||
- name: Meilisearch (latest version) setup with Docker
|
||||
run: docker compose up -d
|
||||
- name: Run tests
|
||||
run: dotnet test --no-restore --verbosity normal
|
||||
|
||||
meilisearch-dart-tests:
|
||||
needs: define-docker-image
|
||||
name: Dart SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-dart
|
||||
- uses: dart-lang/setup-dart@v1
|
||||
with:
|
||||
sdk: 3.1.1
|
||||
- name: Install dependencies
|
||||
run: dart pub get
|
||||
- name: Run integration tests
|
||||
run: dart test --concurrency=4
|
||||
|
||||
meilisearch-go-tests:
|
||||
needs: define-docker-image
|
||||
name: Go SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: stable
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-go
|
||||
- name: Get dependencies
|
||||
run: |
|
||||
go get -v -t -d ./...
|
||||
if [ -f Gopkg.toml ]; then
|
||||
curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
|
||||
dep ensure
|
||||
fi
|
||||
- name: Run integration tests
|
||||
run: go test -v ./...
|
||||
|
||||
meilisearch-java-tests:
|
||||
needs: define-docker-image
|
||||
name: Java SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-java
|
||||
- name: Set up Java
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: 8
|
||||
distribution: 'zulu'
|
||||
cache: gradle
|
||||
- name: Grant execute permission for gradlew
|
||||
run: chmod +x gradlew
|
||||
- name: Build and run unit and integration tests
|
||||
run: ./gradlew build integrationTest
|
||||
|
||||
meilisearch-js-tests:
|
||||
needs: define-docker-image
|
||||
@@ -66,33 +178,6 @@ jobs:
|
||||
- name: Run Browser env
|
||||
run: yarn test:env:browser
|
||||
|
||||
instant-meilisearch-tests:
|
||||
needs: define-docker-image
|
||||
name: instant-meilisearch tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/instant-meilisearch
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
cache: yarn
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Run tests
|
||||
run: yarn test
|
||||
- name: Build all the playgrounds and the packages
|
||||
run: yarn build
|
||||
|
||||
meilisearch-php-tests:
|
||||
needs: define-docker-image
|
||||
name: PHP SDK tests
|
||||
@@ -111,8 +196,6 @@ jobs:
|
||||
repository: meilisearch/meilisearch-php
|
||||
- name: Install PHP
|
||||
uses: shivammathur/setup-php@v2
|
||||
with:
|
||||
coverage: none
|
||||
- name: Validate composer.json and composer.lock
|
||||
run: composer validate
|
||||
- name: Install dependencies
|
||||
@@ -149,36 +232,6 @@ jobs:
|
||||
- name: Test with pytest
|
||||
run: pipenv run pytest
|
||||
|
||||
meilisearch-go-tests:
|
||||
needs: define-docker-image
|
||||
name: Go SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: stable
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-go
|
||||
- name: Get dependencies
|
||||
run: |
|
||||
go get -v -t -d ./...
|
||||
if [ -f Gopkg.toml ]; then
|
||||
curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
|
||||
dep ensure
|
||||
fi
|
||||
- name: Run integration tests
|
||||
run: go test -v ./...
|
||||
|
||||
meilisearch-ruby-tests:
|
||||
needs: define-docker-image
|
||||
name: Ruby SDK tests
|
||||
@@ -224,3 +277,110 @@ jobs:
|
||||
run: cargo build --verbose
|
||||
- name: Run tests
|
||||
run: cargo test --verbose
|
||||
|
||||
meilisearch-swift-tests:
|
||||
needs: define-docker-image
|
||||
name: Swift SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-swift
|
||||
- name: Run tests
|
||||
run: swift test
|
||||
|
||||
########################
|
||||
## FRONT-END PLUGINS ##
|
||||
########################
|
||||
|
||||
meilisearch-js-plugins-tests:
|
||||
needs: define-docker-image
|
||||
name: meilisearch-js-plugins tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-js-plugins
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
cache: yarn
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Run tests
|
||||
run: yarn test
|
||||
- name: Build all the playgrounds and the packages
|
||||
run: yarn build
|
||||
|
||||
########################
|
||||
## BACK-END PLUGINS ###
|
||||
########################
|
||||
|
||||
meilisearch-rails-tests:
|
||||
needs: define-docker-image
|
||||
name: meilisearch-rails tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-rails
|
||||
- name: Set up Ruby 3
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3
|
||||
bundler-cache: true
|
||||
- name: Run tests
|
||||
run: bundle exec rspec
|
||||
|
||||
meilisearch-symfony-tests:
|
||||
needs: define-docker-image
|
||||
name: meilisearch-symfony tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-symfony
|
||||
- name: Install PHP
|
||||
uses: shivammathur/setup-php@v2
|
||||
with:
|
||||
tools: composer:v2, flex
|
||||
- name: Validate composer.json and composer.lock
|
||||
run: composer validate
|
||||
- name: Install dependencies
|
||||
run: composer install --prefer-dist --no-progress --quiet
|
||||
- name: Remove doctrine/annotations
|
||||
run: composer remove --dev doctrine/annotations
|
||||
- name: Run test suite
|
||||
run: composer test:unit
|
||||
|
||||
15
.github/workflows/test-suite.yml
vendored
15
.github/workflows/test-suite.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.5.1
|
||||
uses: Swatinem/rust-cache@v2.6.2
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -65,7 +65,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.5.1
|
||||
uses: Swatinem/rust-cache@v2.6.2
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -123,7 +123,10 @@ jobs:
|
||||
override: true
|
||||
- name: Run cargo tree without default features and check lindera is not present
|
||||
run: |
|
||||
cargo tree -f '{p} {f}' -e normal --no-default-features | grep lindera -vqz
|
||||
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -vqz lindera; then
|
||||
echo "lindera has been found in the sources and it shouldn't"
|
||||
exit 1
|
||||
fi
|
||||
- name: Run cargo tree with default features and check lindera is pressent
|
||||
run: |
|
||||
cargo tree -f '{p} {f}' -e normal | grep lindera -qz
|
||||
@@ -146,7 +149,7 @@ jobs:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.5.1
|
||||
uses: Swatinem/rust-cache@v2.6.2
|
||||
- name: Run tests in debug
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -165,7 +168,7 @@ jobs:
|
||||
override: true
|
||||
components: clippy
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.5.1
|
||||
uses: Swatinem/rust-cache@v2.6.2
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -184,7 +187,7 @@ jobs:
|
||||
override: true
|
||||
components: rustfmt
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.5.1
|
||||
uses: Swatinem/rust-cache@v2.6.2
|
||||
- name: Run cargo fmt
|
||||
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
||||
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
||||
|
||||
40
Cargo.lock
generated
40
Cargo.lock
generated
@@ -468,7 +468,7 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
|
||||
|
||||
[[package]]
|
||||
name = "benchmarks"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@@ -1206,7 +1206,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "dump"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"big_s",
|
||||
@@ -1417,7 +1417,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "file-store"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"faux",
|
||||
"tempfile",
|
||||
@@ -1439,7 +1439,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "filter-parser"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"insta",
|
||||
"nom",
|
||||
@@ -1459,7 +1459,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "flatten-serde-json"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"criterion",
|
||||
"serde_json",
|
||||
@@ -1577,7 +1577,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "fuzzers"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
"clap",
|
||||
@@ -1891,7 +1891,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "index-scheduler"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"big_s",
|
||||
@@ -2088,7 +2088,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "json-depth-checker"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"criterion",
|
||||
"serde_json",
|
||||
@@ -2500,7 +2500,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
|
||||
|
||||
[[package]]
|
||||
name = "meili-snap"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"insta",
|
||||
"md5",
|
||||
@@ -2509,7 +2509,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"actix-cors",
|
||||
"actix-http",
|
||||
@@ -2600,7 +2600,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-auth"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"base64 0.21.2",
|
||||
"enum-iterator",
|
||||
@@ -2619,7 +2619,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-types"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"actix-web",
|
||||
"anyhow",
|
||||
@@ -2673,7 +2673,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "milli"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"big_s",
|
||||
"bimap",
|
||||
@@ -2995,7 +2995,7 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
|
||||
|
||||
[[package]]
|
||||
name = "permissive-json-pointer"
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"big_s",
|
||||
"serde_json",
|
||||
@@ -3538,9 +3538,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustls-webpki"
|
||||
version = "0.100.1"
|
||||
version = "0.100.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d6207cd5ed3d8dca7816f8f3725513a34609c0c765bf652b8c3cb4cfd87db46b"
|
||||
checksum = "e98ff011474fa39949b7e5c0428f9b4937eda7da7848bbb947786b7be0b27dab"
|
||||
dependencies = [
|
||||
"ring",
|
||||
"untrusted",
|
||||
@@ -4248,7 +4248,7 @@ dependencies = [
|
||||
"log",
|
||||
"once_cell",
|
||||
"rustls 0.21.6",
|
||||
"rustls-webpki 0.100.1",
|
||||
"rustls-webpki 0.100.2",
|
||||
"url",
|
||||
"webpki-roots 0.23.1",
|
||||
]
|
||||
@@ -4443,9 +4443,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "webpki"
|
||||
version = "0.22.0"
|
||||
version = "0.22.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd"
|
||||
checksum = "f0e74f82d49d545ad128049b7e88f6576df2da6b02e9ce565c6f533be576957e"
|
||||
dependencies = [
|
||||
"ring",
|
||||
"untrusted",
|
||||
@@ -4466,7 +4466,7 @@ version = "0.23.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338"
|
||||
dependencies = [
|
||||
"rustls-webpki 0.100.1",
|
||||
"rustls-webpki 0.100.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -18,7 +18,7 @@ members = [
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "1.4.2"
|
||||
version = "1.4.0"
|
||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||
description = "Meilisearch HTTP server"
|
||||
homepage = "https://meilisearch.com"
|
||||
|
||||
@@ -129,6 +129,9 @@ impl HeedAuthStore {
|
||||
Action::DumpsAll => {
|
||||
actions.insert(Action::DumpsCreate);
|
||||
}
|
||||
Action::SnapshotsAll => {
|
||||
actions.insert(Action::SnapshotsCreate);
|
||||
}
|
||||
Action::TasksAll => {
|
||||
actions.extend([Action::TasksGet, Action::TasksDelete, Action::TasksCancel]);
|
||||
}
|
||||
|
||||
@@ -257,6 +257,12 @@ pub enum Action {
|
||||
#[serde(rename = "dumps.create")]
|
||||
#[deserr(rename = "dumps.create")]
|
||||
DumpsCreate,
|
||||
#[serde(rename = "snapshots.*")]
|
||||
#[deserr(rename = "snapshots.*")]
|
||||
SnapshotsAll,
|
||||
#[serde(rename = "snapshots.create")]
|
||||
#[deserr(rename = "snapshots.create")]
|
||||
SnapshotsCreate,
|
||||
#[serde(rename = "version")]
|
||||
#[deserr(rename = "version")]
|
||||
Version,
|
||||
@@ -309,6 +315,7 @@ impl Action {
|
||||
METRICS_GET => Some(Self::MetricsGet),
|
||||
DUMPS_ALL => Some(Self::DumpsAll),
|
||||
DUMPS_CREATE => Some(Self::DumpsCreate),
|
||||
SNAPSHOTS_CREATE => Some(Self::SnapshotsCreate),
|
||||
VERSION => Some(Self::Version),
|
||||
KEYS_CREATE => Some(Self::KeysAdd),
|
||||
KEYS_GET => Some(Self::KeysGet),
|
||||
@@ -353,6 +360,7 @@ pub mod actions {
|
||||
pub const METRICS_GET: u8 = MetricsGet.repr();
|
||||
pub const DUMPS_ALL: u8 = DumpsAll.repr();
|
||||
pub const DUMPS_CREATE: u8 = DumpsCreate.repr();
|
||||
pub const SNAPSHOTS_CREATE: u8 = SnapshotsCreate.repr();
|
||||
pub const VERSION: u8 = Version.repr();
|
||||
pub const KEYS_CREATE: u8 = KeysAdd.repr();
|
||||
pub const KEYS_GET: u8 = KeysGet.repr();
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
mod mock_analytics;
|
||||
// if we are in release mode and the feature analytics was enabled
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
#[cfg(feature = "analytics")]
|
||||
mod segment_analytics;
|
||||
|
||||
use std::fs;
|
||||
@@ -17,26 +16,25 @@ use serde_json::Value;
|
||||
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||
use crate::routes::tasks::TasksFilterQuery;
|
||||
|
||||
// if we are in debug mode OR the analytics feature is disabled
|
||||
// if the analytics feature is disabled
|
||||
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type SegmentAnalytics = mock_analytics::MockAnalytics;
|
||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type SearchAggregator = mock_analytics::SearchAggregator;
|
||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type MultiSearchAggregator = mock_analytics::MultiSearchAggregator;
|
||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type FacetSearchAggregator = mock_analytics::FacetSearchAggregator;
|
||||
|
||||
// if we are in release mode and the feature analytics was enabled
|
||||
// we use the real analytics
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
// if the feature analytics is enabled we use the real analytics
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator;
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type FacetSearchAggregator = segment_analytics::FacetSearchAggregator;
|
||||
|
||||
/// The Meilisearch config dir:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -28,7 +28,7 @@ const MEILI_DB_PATH: &str = "MEILI_DB_PATH";
|
||||
const MEILI_HTTP_ADDR: &str = "MEILI_HTTP_ADDR";
|
||||
const MEILI_MASTER_KEY: &str = "MEILI_MASTER_KEY";
|
||||
const MEILI_ENV: &str = "MEILI_ENV";
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
#[cfg(feature = "analytics")]
|
||||
const MEILI_NO_ANALYTICS: &str = "MEILI_NO_ANALYTICS";
|
||||
const MEILI_HTTP_PAYLOAD_SIZE_LIMIT: &str = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT";
|
||||
const MEILI_SSL_CERT_PATH: &str = "MEILI_SSL_CERT_PATH";
|
||||
@@ -159,7 +159,7 @@ pub struct Opt {
|
||||
/// Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
||||
/// All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted
|
||||
/// at any time.
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
#[cfg(feature = "analytics")]
|
||||
#[serde(default)] // we can't send true
|
||||
#[clap(long, env = MEILI_NO_ANALYTICS)]
|
||||
pub no_analytics: bool,
|
||||
@@ -390,7 +390,7 @@ impl Opt {
|
||||
ignore_missing_dump: _,
|
||||
ignore_dump_if_db_exists: _,
|
||||
config_file_path: _,
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
#[cfg(feature = "analytics")]
|
||||
no_analytics,
|
||||
experimental_enable_metrics: enable_metrics_route,
|
||||
experimental_reduce_indexing_memory_usage: reduce_indexing_memory_usage,
|
||||
@@ -401,7 +401,7 @@ impl Opt {
|
||||
export_to_env_if_not_present(MEILI_MASTER_KEY, master_key);
|
||||
}
|
||||
export_to_env_if_not_present(MEILI_ENV, env);
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
#[cfg(feature = "analytics")]
|
||||
{
|
||||
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ pub mod features;
|
||||
pub mod indexes;
|
||||
mod metrics;
|
||||
mod multi_search;
|
||||
mod snapshot;
|
||||
mod swap_indexes;
|
||||
pub mod tasks;
|
||||
|
||||
@@ -32,6 +33,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::resource("/health").route(web::get().to(get_health)))
|
||||
.service(web::scope("/keys").configure(api_key::configure))
|
||||
.service(web::scope("/dumps").configure(dump::configure))
|
||||
.service(web::scope("/snapshots").configure(snapshot::configure))
|
||||
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
||||
.service(web::resource("/version").route(web::get().to(get_version)))
|
||||
.service(web::scope("/indexes").configure(indexes::configure))
|
||||
|
||||
32
meilisearch/src/routes/snapshot.rs
Normal file
32
meilisearch/src/routes/snapshot.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::SummarizedTaskView;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_snapshot))));
|
||||
}
|
||||
|
||||
pub async fn create_snapshot(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SNAPSHOTS_CREATE }>, Data<IndexScheduler>>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish("Snapshot Created".to_string(), json!({}), Some(&req));
|
||||
|
||||
let task = KindWithContent::SnapshotCreation;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
@@ -1,8 +1,7 @@
|
||||
use std::{thread, time};
|
||||
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_valid_api_key() {
|
||||
@@ -162,7 +161,7 @@ async fn add_valid_api_key_null_description() {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"description": Value::Null,
|
||||
"description": json!(null),
|
||||
"indexes": ["products"],
|
||||
"actions": ["documents.add"],
|
||||
"expiresAt": "2050-11-13T00:00:00"
|
||||
@@ -365,7 +364,7 @@ async fn error_add_api_key_invalid_index_uids() {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"description": Value::Null,
|
||||
"description": json!(null),
|
||||
"indexes": ["invalid index # / \\name with spaces"],
|
||||
"actions": [
|
||||
"documents.add"
|
||||
@@ -422,7 +421,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||
{
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`",
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
@@ -507,7 +506,7 @@ async fn error_add_api_key_invalid_parameters_uid() {
|
||||
async fn error_add_api_key_parameters_uid_already_exist() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let content = json!({
|
||||
let content: Value = json!({
|
||||
"uid": "4bc0887a-0e41-4f3b-935d-0c451dcee9c8",
|
||||
"indexes": ["products"],
|
||||
"actions": ["search"],
|
||||
@@ -1146,7 +1145,7 @@ async fn patch_api_key_description() {
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
|
||||
// Remove the description
|
||||
let content = json!({ "description": serde_json::Value::Null });
|
||||
let content = json!({ "description": null });
|
||||
|
||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
||||
|
||||
@@ -3,10 +3,10 @@ use std::collections::{HashMap, HashSet};
|
||||
use ::time::format_description::well_known::Rfc3339;
|
||||
use maplit::{hashmap, hashset};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::json;
|
||||
|
||||
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
||||
Lazy::new(|| {
|
||||
@@ -54,6 +54,7 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
|
||||
("GET", "/indexes/products/stats") => hashset!{"stats.get", "stats.*", "*"},
|
||||
("GET", "/stats") => hashset!{"stats.get", "stats.*", "*"},
|
||||
("POST", "/dumps") => hashset!{"dumps.create", "dumps.*", "*"},
|
||||
("POST", "/snapshots") => hashset!{"snapshots.create", "snapshots.*", "*"},
|
||||
("GET", "/version") => hashset!{"version", "*"},
|
||||
("GET", "/metrics") => hashset!{"metrics.get", "metrics.*", "*"},
|
||||
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use meili_snap::*;
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_api_key_bad_description() {
|
||||
@@ -90,7 +90,7 @@ async fn create_api_key_bad_actions() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`",
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
|
||||
@@ -7,9 +7,9 @@ mod tenant_token;
|
||||
mod tenant_token_multi_search;
|
||||
|
||||
use actix_web::http::StatusCode;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::json;
|
||||
|
||||
impl Server {
|
||||
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
|
||||
|
||||
@@ -3,11 +3,11 @@ use std::collections::HashMap;
|
||||
use ::time::format_description::well_known::Rfc3339;
|
||||
use maplit::hashmap;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
||||
use crate::common::Server;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::json;
|
||||
|
||||
fn generate_tenant_token(
|
||||
parent_uid: impl AsRef<str>,
|
||||
@@ -233,31 +233,31 @@ async fn search_authorized_simple_token() {
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null)
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
"searchRules" => json!({"*": null}),
|
||||
"exp" => json!(null)
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null)
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null)
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
"searchRules" => json!({"sales": null}),
|
||||
"exp" => json!(null)
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null)
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sa*"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null)
|
||||
},
|
||||
];
|
||||
|
||||
@@ -386,7 +386,7 @@ async fn error_search_token_forbidden_parent_key() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"searchRules" => json!({"*": null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
@@ -398,7 +398,7 @@ async fn error_search_token_forbidden_parent_key() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"searchRules" => json!({"sales": null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
@@ -428,15 +428,15 @@ async fn error_search_forbidden_token() {
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": {}}),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null)
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
"searchRules" => json!({"products": null}),
|
||||
"exp" => json!(null)
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null)
|
||||
},
|
||||
// expired token
|
||||
hashmap! {
|
||||
@@ -444,7 +444,7 @@ async fn error_search_forbidden_token() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"searchRules" => json!({"*": null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
@@ -456,7 +456,7 @@ async fn error_search_forbidden_token() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"searchRules" => json!({"sales": null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
|
||||
@@ -3,11 +3,11 @@ use std::collections::HashMap;
|
||||
use ::time::format_description::well_known::Rfc3339;
|
||||
use maplit::hashmap;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
use super::authorization::ALL_ACTIONS;
|
||||
use crate::common::Server;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::json;
|
||||
|
||||
fn generate_tenant_token(
|
||||
parent_uid: impl AsRef<str>,
|
||||
@@ -512,31 +512,31 @@ async fn single_search_authorized_simple_token() {
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
"searchRules" => json!({"*": null}),
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
"searchRules" => json!({"sales": null}),
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sa*"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
];
|
||||
|
||||
@@ -564,31 +564,31 @@ async fn multi_search_authorized_simple_token() {
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
"searchRules" => json!({"*": null}),
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}, "products": {}}),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null, "products": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
"searchRules" => json!({"sales": null, "products": null}),
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales", "products"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sa*", "pro*"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
];
|
||||
|
||||
@@ -823,7 +823,7 @@ async fn error_single_search_token_forbidden_parent_key() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"searchRules" => json!({"*": null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
@@ -835,7 +835,7 @@ async fn error_single_search_token_forbidden_parent_key() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"searchRules" => json!({"sales": null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
@@ -864,7 +864,7 @@ async fn error_multi_search_token_forbidden_parent_key() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"searchRules" => json!({"*": null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
@@ -876,7 +876,7 @@ async fn error_multi_search_token_forbidden_parent_key() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null, "products": Value::Null}),
|
||||
"searchRules" => json!({"sales": null, "products": null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
@@ -919,15 +919,15 @@ async fn error_single_search_forbidden_token() {
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": {}}),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
"searchRules" => json!({"products": null}),
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
// expired token
|
||||
hashmap! {
|
||||
@@ -935,7 +935,7 @@ async fn error_single_search_forbidden_token() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"searchRules" => json!({"*": null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
@@ -947,7 +947,7 @@ async fn error_single_search_forbidden_token() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"searchRules" => json!({"sales": null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
@@ -978,15 +978,15 @@ async fn error_multi_search_forbidden_token() {
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": {}}),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
"searchRules" => json!({"products": null}),
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
@@ -998,15 +998,15 @@ async fn error_multi_search_forbidden_token() {
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
"searchRules" => json!({"sales": null}),
|
||||
"exp" => json!(null),
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => Value::Null
|
||||
"exp" => json!(null),
|
||||
},
|
||||
// expired token
|
||||
hashmap! {
|
||||
@@ -1014,7 +1014,7 @@ async fn error_multi_search_forbidden_token() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"searchRules" => json!({"*": null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
@@ -1026,7 +1026,7 @@ async fn error_multi_search_forbidden_token() {
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null, "products": {}}),
|
||||
"searchRules" => json!({"sales": null, "products": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
|
||||
@@ -3,12 +3,13 @@ use std::panic::{catch_unwind, resume_unwind, UnwindSafe};
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_web::http::StatusCode;
|
||||
use serde_json::{json, Value};
|
||||
use tokio::time::sleep;
|
||||
use urlencoding::encode as urlencode;
|
||||
|
||||
use super::encoder::Encoder;
|
||||
use super::service::Service;
|
||||
use super::Value;
|
||||
use crate::json;
|
||||
|
||||
pub struct Index<'a> {
|
||||
pub uid: String,
|
||||
@@ -242,7 +243,9 @@ impl Index<'_> {
|
||||
|
||||
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/delete-batch", urlencode(self.uid.as_ref()));
|
||||
self.service.post_encoded(url, serde_json::to_value(&ids).unwrap(), self.encoder).await
|
||||
self.service
|
||||
.post_encoded(url, serde_json::to_value(&ids).unwrap().into(), self.encoder)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_batch_raw(&self, body: Value) -> (Value, StatusCode) {
|
||||
|
||||
@@ -3,9 +3,83 @@ pub mod index;
|
||||
pub mod server;
|
||||
pub mod service;
|
||||
|
||||
use std::fmt::{self, Display};
|
||||
|
||||
pub use index::{GetAllDocumentsOptions, GetDocumentOptions};
|
||||
use meili_snap::json_string;
|
||||
use serde::{Deserialize, Serialize};
|
||||
pub use server::{default_settings, Server};
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct Value(pub serde_json::Value);
|
||||
|
||||
impl Value {
|
||||
pub fn uid(&self) -> u64 {
|
||||
if let Some(uid) = self["uid"].as_u64() {
|
||||
uid
|
||||
} else if let Some(uid) = self["taskUid"].as_u64() {
|
||||
uid
|
||||
} else {
|
||||
panic!("Didn't find any task id in: {self}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::Value> for Value {
|
||||
fn from(value: serde_json::Value) -> Self {
|
||||
Value(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for Value {
|
||||
type Target = serde_json::Value;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<serde_json::Value> for Value {
|
||||
fn eq(&self, other: &serde_json::Value) -> bool {
|
||||
&self.0 == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Value> for serde_json::Value {
|
||||
fn eq(&self, other: &Value) -> bool {
|
||||
self == &other.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<&str> for Value {
|
||||
fn eq(&self, other: &&str) -> bool {
|
||||
self.0.eq(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Value {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
json_string!(self, { ".enqueuedAt" => "[date]", ".processedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" })
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<Value>> for Value {
|
||||
fn from(value: Vec<Value>) -> Self {
|
||||
Self(value.into_iter().map(|value| value.0).collect::<serde_json::Value>())
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! json {
|
||||
($($json:tt)+) => {
|
||||
$crate::common::Value(serde_json::json!($($json)+))
|
||||
};
|
||||
}
|
||||
|
||||
/// Performs a search test on both post and get routes
|
||||
#[macro_export]
|
||||
macro_rules! test_post_get_search {
|
||||
|
||||
@@ -11,13 +11,14 @@ use clap::Parser;
|
||||
use meilisearch::option::{IndexerOpts, MaxMemory, Opt};
|
||||
use meilisearch::{analytics, create_app, setup_meilisearch};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::sleep;
|
||||
|
||||
use super::index::Index;
|
||||
use super::service::Service;
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::Value;
|
||||
use crate::json;
|
||||
|
||||
pub struct Server {
|
||||
pub service: Service,
|
||||
@@ -156,6 +157,10 @@ impl Server {
|
||||
self.service.post("/dumps", json!(null)).await
|
||||
}
|
||||
|
||||
pub async fn create_snapshot(&self) -> (Value, StatusCode) {
|
||||
self.service.post("/snapshots", json!(null)).await
|
||||
}
|
||||
|
||||
pub async fn index_swap(&self, value: Value) -> (Value, StatusCode) {
|
||||
self.service.post("/swap-indexes", value).await
|
||||
}
|
||||
@@ -204,7 +209,7 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
db_path: dir.as_ref().join("db"),
|
||||
dump_dir: dir.as_ref().join("dumps"),
|
||||
env: "development".to_owned(),
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
#[cfg(feature = "analytics")]
|
||||
no_analytics: true,
|
||||
max_index_size: Byte::from_unit(100.0, ByteUnit::MiB).unwrap(),
|
||||
max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(),
|
||||
|
||||
@@ -7,9 +7,9 @@ use actix_web::test::TestRequest;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch::{analytics, create_app, Opt};
|
||||
use meilisearch_auth::AuthController;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::Value;
|
||||
|
||||
pub struct Service {
|
||||
pub index_scheduler: Arc<IndexScheduler>,
|
||||
|
||||
@@ -3,9 +3,8 @@
|
||||
mod common;
|
||||
|
||||
use actix_web::test;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::common::{Server, Value};
|
||||
|
||||
enum HttpVerb {
|
||||
Put,
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use actix_web::test;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use serde_json::{json, Value};
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::{GetAllDocumentsOptions, Server};
|
||||
use crate::common::{GetAllDocumentsOptions, Server, Value};
|
||||
use crate::json;
|
||||
|
||||
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::{GetAllDocumentsOptions, Server};
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_one_document_unexisting_index() {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use meili_snap::*;
|
||||
use serde_json::json;
|
||||
use urlencoding::encode;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_all_documents_bad_offset() {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use actix_web::test;
|
||||
use http::header::ACCEPT_ENCODING;
|
||||
use meili_snap::*;
|
||||
use serde_json::{json, Value};
|
||||
use urlencoding::encode as urlencode;
|
||||
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
|
||||
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server, Value};
|
||||
use crate::json;
|
||||
|
||||
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
|
||||
// transplant
|
||||
@@ -40,7 +40,7 @@ async fn get_document() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
let documents = serde_json::json!([
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 0,
|
||||
"nested": { "content": "foobar" },
|
||||
@@ -53,7 +53,7 @@ async fn get_document() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!({
|
||||
json!({
|
||||
"id": 0,
|
||||
"nested": { "content": "foobar" },
|
||||
})
|
||||
@@ -64,7 +64,7 @@ async fn get_document() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!({
|
||||
json!({
|
||||
"id": 0,
|
||||
})
|
||||
);
|
||||
@@ -75,7 +75,7 @@ async fn get_document() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!({
|
||||
json!({
|
||||
"nested": { "content": "foobar" },
|
||||
})
|
||||
);
|
||||
@@ -122,7 +122,7 @@ async fn get_all_documents_no_options() {
|
||||
assert_eq!(code, 200);
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 20);
|
||||
let first = serde_json::json!({
|
||||
let first = json!({
|
||||
"id":0,
|
||||
"isActive":false,
|
||||
"balance":"$2,668.55",
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use serde_json::json;
|
||||
use meili_snap::snapshot;
|
||||
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::{GetAllDocumentsOptions, Server};
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_document_update_create_index_bad_uid() {
|
||||
@@ -84,7 +85,13 @@ async fn update_document() {
|
||||
|
||||
let (response, code) = index.get_document(1, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"doc_id": 1,
|
||||
"content": "foo",
|
||||
"other": "bar"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -122,7 +129,13 @@ async fn update_document_gzip_encoded() {
|
||||
|
||||
let (response, code) = index.get_document(1, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"doc_id": 1,
|
||||
"content": "foo",
|
||||
"other": "bar"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -2,10 +2,10 @@ mod data;
|
||||
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use meilisearch::Opt;
|
||||
use serde_json::json;
|
||||
|
||||
use self::data::GetDump;
|
||||
use crate::common::{default_settings, GetAllDocumentsOptions, Server};
|
||||
use crate::json;
|
||||
|
||||
// all the following test are ignored on windows. See #2364
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
/// Feature name to test against.
|
||||
/// This will have to be changed by a different one when that feature is stabilized.
|
||||
|
||||
@@ -2,10 +2,10 @@ use actix_web::http::header::ContentType;
|
||||
use actix_web::test;
|
||||
use http::header::ACCEPT_ENCODING;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::Server;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_no_primary_key() {
|
||||
@@ -21,7 +21,7 @@ async fn create_index_no_primary_key() {
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "indexCreation");
|
||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
||||
assert_eq!(response["details"]["primaryKey"], json!(null));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -38,7 +38,7 @@ async fn create_index_with_gzip_encoded_request() {
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "indexCreation");
|
||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
||||
assert_eq!(response["details"]["primaryKey"], json!(null));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -86,7 +86,7 @@ async fn create_index_with_zlib_encoded_request() {
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "indexCreation");
|
||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
||||
assert_eq!(response["details"]["primaryKey"], json!(null));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -103,7 +103,7 @@ async fn create_index_with_brotli_encoded_request() {
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "indexCreation");
|
||||
assert_eq!(response["details"]["primaryKey"], Value::Null);
|
||||
assert_eq!(response["details"]["primaryKey"], json!(null));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -136,7 +136,7 @@ async fn create_index_with_invalid_primary_key() {
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["primaryKey"], Value::Null);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_and_delete_index() {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use meili_snap::*;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_indexes_bad_offset() {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn stats() {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use serde_json::json;
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_primary_key() {
|
||||
|
||||
@@ -1,241 +0,0 @@
|
||||
use meili_snap::snapshot;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::common::Server;
|
||||
|
||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"id": 1,
|
||||
"description": "Leather Jacket",
|
||||
"brand": "Lee Jeans",
|
||||
"product_id": "123456",
|
||||
"color": "Brown"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"description": "Leather Jacket",
|
||||
"brand": "Lee Jeans",
|
||||
"product_id": "123456",
|
||||
"color": "Black"
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"description": "Leather Jacket",
|
||||
"brand": "Lee Jeans",
|
||||
"product_id": "123456",
|
||||
"color": "Blue"
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"description": "T-Shirt",
|
||||
"brand": "Nike",
|
||||
"product_id": "789012",
|
||||
"color": "Red"
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"description": "T-Shirt",
|
||||
"brand": "Nike",
|
||||
"product_id": "789012",
|
||||
"color": "Blue"
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"description": "Running Shoes",
|
||||
"brand": "Adidas",
|
||||
"product_id": "456789",
|
||||
"color": "Black"
|
||||
},
|
||||
{
|
||||
"id": 7,
|
||||
"description": "Running Shoes",
|
||||
"brand": "Adidas",
|
||||
"product_id": "456789",
|
||||
"color": "White"
|
||||
},
|
||||
{
|
||||
"id": 8,
|
||||
"description": "Hoodie",
|
||||
"brand": "Puma",
|
||||
"product_id": "987654",
|
||||
"color": "Gray"
|
||||
},
|
||||
{
|
||||
"id": 9,
|
||||
"description": "Sweater",
|
||||
"brand": "Gap",
|
||||
"product_id": "234567",
|
||||
"color": "Green"
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"description": "Sweater",
|
||||
"brand": "Gap",
|
||||
"product_id": "234567",
|
||||
"color": "Red"
|
||||
},
|
||||
{
|
||||
"id": 11,
|
||||
"description": "Sweater",
|
||||
"brand": "Gap",
|
||||
"product_id": "234567",
|
||||
"color": "Blue"
|
||||
},
|
||||
{
|
||||
"id": 12,
|
||||
"description": "Jeans",
|
||||
"brand": "Levi's",
|
||||
"product_id": "345678",
|
||||
"color": "Indigo"
|
||||
},
|
||||
{
|
||||
"id": 13,
|
||||
"description": "Jeans",
|
||||
"brand": "Levi's",
|
||||
"product_id": "345678",
|
||||
"color": "Black"
|
||||
},
|
||||
{
|
||||
"id": 14,
|
||||
"description": "Jeans",
|
||||
"brand": "Levi's",
|
||||
"product_id": "345678",
|
||||
"color": "Stone Wash"
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
pub(self) static DOCUMENT_PRIMARY_KEY: &str = "id";
|
||||
pub(self) static DOCUMENT_DISTINCT_KEY: &str = "product_id";
|
||||
|
||||
/// testing: https://github.com/meilisearch/meilisearch/issues/4078
|
||||
#[actix_rt::test]
|
||||
async fn distinct_search_with_offset_no_ranking() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
fn get_hits(response: &Value) -> Vec<&str> {
|
||||
let hits_array = response["hits"].as_array().unwrap();
|
||||
hits_array.iter().map(|h| h[DOCUMENT_DISTINCT_KEY].as_str().unwrap()).collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 0, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#);
|
||||
snapshot!(response["estimatedTotalHits"] , @"11");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 2, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"10");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 4, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 5, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"1");
|
||||
snapshot!(format!("{:?}", hits), @r#"["345678"]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 6, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 7, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
}
|
||||
|
||||
/// testing: https://github.com/meilisearch/meilisearch/issues/4130
|
||||
#[actix_rt::test]
|
||||
async fn distinct_search_with_pagination_no_ranking() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
fn get_hits(response: &Value) -> Vec<&str> {
|
||||
let hits_array = response["hits"].as_array().unwrap();
|
||||
hits_array.iter().map(|h| h[DOCUMENT_DISTINCT_KEY].as_str().unwrap()).collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
let (response, code) = index.search_post(json!({"page": 0, "hitsPerPage": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(response["page"], @"0");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"page": 1, "hitsPerPage": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#);
|
||||
snapshot!(response["page"], @"1");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"page": 2, "hitsPerPage": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#);
|
||||
snapshot!(response["page"], @"2");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"page": 3, "hitsPerPage": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#);
|
||||
snapshot!(response["page"], @"3");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"page": 4, "hitsPerPage": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(response["page"], @"4");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"page": 2, "hitsPerPage": 3})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"3");
|
||||
snapshot!(format!("{:?}", hits), @r#"["987654", "234567", "345678"]"#);
|
||||
snapshot!(response["page"], @"2");
|
||||
snapshot!(response["totalPages"], @"2");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
use meili_snap::*;
|
||||
use serde_json::json;
|
||||
|
||||
use super::DOCUMENTS;
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_unexisting_index() {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use meili_snap::snapshot;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::json;
|
||||
|
||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use insta::{allow_duplicates, assert_json_snapshot};
|
||||
use serde_json::json;
|
||||
|
||||
use super::*;
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn formatted_contain_wildcard() {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::json;
|
||||
|
||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
// This modules contains all the test concerning search. Each particular feature of the search
|
||||
// should be tested in its own module to isolate tests and keep the tests readable.
|
||||
|
||||
mod distinct;
|
||||
mod errors;
|
||||
mod facet_search;
|
||||
mod formatted;
|
||||
@@ -11,9 +10,9 @@ mod pagination;
|
||||
mod restrict_searchable;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::json;
|
||||
|
||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use serde_json::json;
|
||||
|
||||
use super::{DOCUMENTS, NESTED_DOCUMENTS};
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_empty_list() {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
use crate::search::DOCUMENTS;
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::common::index::Index;
|
||||
use crate::common::Server;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::json;
|
||||
|
||||
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset_distinct_attribute() {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use meili_snap::*;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn settings_bad_displayed_attributes() {
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::json;
|
||||
|
||||
static DEFAULT_SETTINGS_VALUES: Lazy<HashMap<&'static str, Value>> = Lazy::new(|| {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("displayed_attributes", json!(["*"]));
|
||||
map.insert("searchable_attributes", json!(["*"]));
|
||||
map.insert("filterable_attributes", json!([]));
|
||||
map.insert("distinct_attribute", json!(Value::Null));
|
||||
map.insert("distinct_attribute", json!(null));
|
||||
map.insert(
|
||||
"ranking_rules",
|
||||
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"]),
|
||||
@@ -229,7 +229,7 @@ macro_rules! test_setting_routes {
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.$write_method(url, serde_json::Value::Null).await;
|
||||
let (response, code) = server.service.$write_method(url, serde_json::Value::Null.into()).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
server.index("").wait_task(0).await;
|
||||
let (response, code) = server.index("test").get().await;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset() {
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_rt::time::sleep;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use meilisearch::option::ScheduleSnapshot;
|
||||
use meilisearch::Opt;
|
||||
|
||||
use crate::common::server::default_settings;
|
||||
use crate::common::{GetAllDocumentsOptions, Server};
|
||||
use crate::json;
|
||||
|
||||
macro_rules! verify_snapshot {
|
||||
(
|
||||
@@ -44,7 +46,7 @@ async fn perform_snapshot() {
|
||||
|
||||
let index = server.index("test");
|
||||
index
|
||||
.update_settings(serde_json::json! ({
|
||||
.update_settings(json! ({
|
||||
"searchableAttributes": [],
|
||||
}))
|
||||
.await;
|
||||
@@ -90,3 +92,95 @@ async fn perform_snapshot() {
|
||||
server.index("test1").settings(),
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn perform_on_demand_snapshot() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
let snapshot_dir = tempfile::tempdir().unwrap();
|
||||
|
||||
let options =
|
||||
Opt { snapshot_dir: snapshot_dir.path().to_owned(), ..default_settings(temp.path()) };
|
||||
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let index = server.index("catto");
|
||||
index
|
||||
.update_settings(json! ({
|
||||
"searchableAttributes": [],
|
||||
}))
|
||||
.await;
|
||||
|
||||
index.load_test_set().await;
|
||||
|
||||
server.index("doggo").create(Some("bone")).await;
|
||||
index.wait_task(2).await;
|
||||
|
||||
server.index("doggo").create(Some("bone")).await;
|
||||
index.wait_task(2).await;
|
||||
|
||||
let (task, code) = server.create_snapshot().await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(json_string!(task, { ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 4,
|
||||
"indexUid": null,
|
||||
"status": "enqueued",
|
||||
"type": "snapshotCreation",
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
let task = index.wait_task(task.uid()).await;
|
||||
snapshot!(json_string!(task, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": 4,
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "snapshotCreation",
|
||||
"canceledBy": null,
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
"enqueuedAt": "[date]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let snapshots: Vec<String> = std::fs::read_dir(&snapshot_dir)
|
||||
.unwrap()
|
||||
.map(|entry| entry.unwrap().path().file_name().unwrap().to_str().unwrap().to_string())
|
||||
.collect();
|
||||
meili_snap::snapshot!(format!("{snapshots:?}"), @r###"["db.snapshot"]"###);
|
||||
|
||||
let snapshot_path = snapshot_dir.path().to_owned().join("db.snapshot");
|
||||
#[cfg_attr(windows, allow(unused))]
|
||||
let snapshot_meta = std::fs::metadata(&snapshot_path).unwrap();
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let mode = snapshot_meta.permissions().mode();
|
||||
// rwxrwxrwx
|
||||
meili_snap::snapshot!(format!("{:b}", mode), @"1000000100100100");
|
||||
}
|
||||
|
||||
let options = Opt { import_snapshot: Some(snapshot_path), ..default_settings(temp.path()) };
|
||||
|
||||
let snapshot_server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
verify_snapshot!(server, snapshot_server, |server| =>
|
||||
server.list_indexes(None, None),
|
||||
// for some reason the db sizes differ. this may be due to the compaction options we have
|
||||
// set when performing the snapshot
|
||||
//server.stats(),
|
||||
|
||||
// The original instance contains the snapshotCreation task, while the snapshotted-instance does not. For this reason we need to compare the task queue **after** the task 4
|
||||
server.tasks_filter("?from=2"),
|
||||
|
||||
server.index("catto").get_all_documents(GetAllDocumentsOptions::default()),
|
||||
server.index("catto").settings(),
|
||||
server.index("doggo").get_all_documents(GetAllDocumentsOptions::default()),
|
||||
server.index("doggo").settings(),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use serde_json::json;
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_settings_unexisting_index() {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use meili_snap::*;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn swap_indexes_bad_format() {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
mod errors;
|
||||
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::{GetAllDocumentsOptions, Server};
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn swap_indexes() {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
mod errors;
|
||||
|
||||
use meili_snap::insta::assert_json_snapshot;
|
||||
use serde_json::json;
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_unexisting_task_status() {
|
||||
@@ -33,7 +33,7 @@ async fn get_task_status() {
|
||||
index.create(None).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::json!([{
|
||||
json!([{
|
||||
"id": 1,
|
||||
"content": "foobar",
|
||||
}]),
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
use std::{io, str};
|
||||
|
||||
use obkv::KvReader;
|
||||
@@ -20,14 +19,14 @@ use crate::FieldId;
|
||||
pub struct EnrichedDocumentsBatchReader<R> {
|
||||
documents: DocumentsBatchReader<R>,
|
||||
primary_key: String,
|
||||
external_ids: grenad::ReaderCursor<BufReader<File>>,
|
||||
external_ids: grenad::ReaderCursor<File>,
|
||||
}
|
||||
|
||||
impl<R: io::Read + io::Seek> EnrichedDocumentsBatchReader<R> {
|
||||
pub fn new(
|
||||
documents: DocumentsBatchReader<R>,
|
||||
primary_key: String,
|
||||
external_ids: grenad::Reader<BufReader<File>>,
|
||||
external_ids: grenad::Reader<File>,
|
||||
) -> Result<Self, Error> {
|
||||
if documents.documents_count() as u64 == external_ids.len() {
|
||||
Ok(EnrichedDocumentsBatchReader {
|
||||
@@ -76,7 +75,7 @@ pub struct EnrichedDocument<'a> {
|
||||
pub struct EnrichedDocumentsBatchCursor<R> {
|
||||
documents: DocumentsBatchCursor<R>,
|
||||
primary_key: String,
|
||||
external_ids: grenad::ReaderCursor<BufReader<File>>,
|
||||
external_ids: grenad::ReaderCursor<File>,
|
||||
}
|
||||
|
||||
impl<R> EnrichedDocumentsBatchCursor<R> {
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::cmp;
|
||||
|
||||
use crate::{relative_from_absolute_position, Position};
|
||||
|
||||
pub const MAX_DISTANCE: u32 = 8;
|
||||
pub const MAX_DISTANCE: u32 = 4;
|
||||
|
||||
pub fn index_proximity(lhs: u32, rhs: u32) -> u32 {
|
||||
if lhs <= rhs {
|
||||
|
||||
@@ -53,22 +53,11 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
if excluded.contains(docid) {
|
||||
continue;
|
||||
}
|
||||
|
||||
distinct_single_docid(ctx.index, ctx.txn, distinct_fid, docid, &mut excluded)?;
|
||||
results.push(docid);
|
||||
}
|
||||
|
||||
let mut all_candidates = universe - excluded;
|
||||
all_candidates.extend(results.iter().copied());
|
||||
// drain the results of the skipped elements
|
||||
// this **must** be done **after** writing the entire results in `all_candidates` to ensure
|
||||
// e.g. estimatedTotalHits is correct.
|
||||
if results.len() >= from {
|
||||
results.drain(..from);
|
||||
} else {
|
||||
results.clear();
|
||||
}
|
||||
|
||||
return Ok(BucketSortOutput {
|
||||
scores: vec![Default::default(); results.len()],
|
||||
docids: results,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
|
||||
use super::ProximityCondition;
|
||||
use crate::proximity::MAX_DISTANCE;
|
||||
use crate::search::new::interner::{DedupInterner, Interned};
|
||||
use crate::search::new::query_term::LocatedQueryTermSubset;
|
||||
use crate::search::new::SearchContext;
|
||||
@@ -35,7 +36,7 @@ pub fn build_edges(
|
||||
}
|
||||
|
||||
let mut conditions = vec![];
|
||||
for cost in right_ngram_max..(7 + right_ngram_max) {
|
||||
for cost in right_ngram_max..(((MAX_DISTANCE as usize) - 1) + right_ngram_max) {
|
||||
conditions.push((
|
||||
cost as u32,
|
||||
conditions_interner.insert(ProximityCondition::Uninit {
|
||||
@@ -47,7 +48,7 @@ pub fn build_edges(
|
||||
}
|
||||
|
||||
conditions.push((
|
||||
(7 + right_ngram_max) as u32,
|
||||
((MAX_DISTANCE - 1) + (right_ngram_max as u32)),
|
||||
conditions_interner.insert(ProximityCondition::Term { term: right_term.clone() }),
|
||||
));
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use std::borrow::Cow;
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
|
||||
use grenad::CompressionType;
|
||||
use heed::types::ByteSlice;
|
||||
@@ -31,7 +30,7 @@ pub struct FacetsUpdateBulk<'i> {
|
||||
facet_type: FacetType,
|
||||
field_ids: Vec<FieldId>,
|
||||
// None if level 0 does not need to be updated
|
||||
new_data: Option<grenad::Reader<BufReader<File>>>,
|
||||
new_data: Option<grenad::Reader<File>>,
|
||||
}
|
||||
|
||||
impl<'i> FacetsUpdateBulk<'i> {
|
||||
@@ -39,7 +38,7 @@ impl<'i> FacetsUpdateBulk<'i> {
|
||||
index: &'i Index,
|
||||
field_ids: Vec<FieldId>,
|
||||
facet_type: FacetType,
|
||||
new_data: grenad::Reader<BufReader<File>>,
|
||||
new_data: grenad::Reader<File>,
|
||||
group_size: u8,
|
||||
min_level_size: u8,
|
||||
) -> FacetsUpdateBulk<'i> {
|
||||
@@ -188,7 +187,7 @@ impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
||||
&self,
|
||||
field_id: FieldId,
|
||||
txn: &RoTxn,
|
||||
) -> Result<(Vec<grenad::Reader<BufReader<File>>>, RoaringBitmap)> {
|
||||
) -> Result<(Vec<grenad::Reader<File>>, RoaringBitmap)> {
|
||||
let mut all_docids = RoaringBitmap::new();
|
||||
let subwriters = self.compute_higher_levels(txn, field_id, 32, &mut |bitmaps, _| {
|
||||
for bitmap in bitmaps {
|
||||
@@ -260,7 +259,7 @@ impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
||||
field_id: u16,
|
||||
level: u8,
|
||||
handle_group: &mut dyn FnMut(&[RoaringBitmap], &'t [u8]) -> Result<()>,
|
||||
) -> Result<Vec<grenad::Reader<BufReader<File>>>> {
|
||||
) -> Result<Vec<grenad::Reader<File>>> {
|
||||
if level == 0 {
|
||||
self.read_level_0(rtxn, field_id, handle_group)?;
|
||||
// Level 0 is already in the database
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
|
||||
use heed::types::{ByteSlice, DecodeIgnore};
|
||||
use heed::{BytesDecode, Error, RoTxn, RwTxn};
|
||||
@@ -35,14 +34,14 @@ pub struct FacetsUpdateIncremental<'i> {
|
||||
index: &'i Index,
|
||||
inner: FacetsUpdateIncrementalInner,
|
||||
facet_type: FacetType,
|
||||
new_data: grenad::Reader<BufReader<File>>,
|
||||
new_data: grenad::Reader<File>,
|
||||
}
|
||||
|
||||
impl<'i> FacetsUpdateIncremental<'i> {
|
||||
pub fn new(
|
||||
index: &'i Index,
|
||||
facet_type: FacetType,
|
||||
new_data: grenad::Reader<BufReader<File>>,
|
||||
new_data: grenad::Reader<File>,
|
||||
group_size: u8,
|
||||
min_level_size: u8,
|
||||
max_group_size: u8,
|
||||
|
||||
@@ -78,7 +78,6 @@ pub const FACET_MIN_LEVEL_SIZE: u8 = 5;
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
use std::iter::FromIterator;
|
||||
|
||||
use charabia::normalizer::{Normalize, NormalizerOption};
|
||||
@@ -109,17 +108,13 @@ pub struct FacetsUpdate<'i> {
|
||||
index: &'i Index,
|
||||
database: heed::Database<FacetGroupKeyCodec<ByteSliceRefCodec>, FacetGroupValueCodec>,
|
||||
facet_type: FacetType,
|
||||
new_data: grenad::Reader<BufReader<File>>,
|
||||
new_data: grenad::Reader<File>,
|
||||
group_size: u8,
|
||||
max_group_size: u8,
|
||||
min_level_size: u8,
|
||||
}
|
||||
impl<'i> FacetsUpdate<'i> {
|
||||
pub fn new(
|
||||
index: &'i Index,
|
||||
facet_type: FacetType,
|
||||
new_data: grenad::Reader<BufReader<File>>,
|
||||
) -> Self {
|
||||
pub fn new(index: &'i Index, facet_type: FacetType, new_data: grenad::Reader<File>) -> Self {
|
||||
let database = match facet_type {
|
||||
FacetType::String => index
|
||||
.facet_id_string_docids
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::io::{BufWriter, Read, Seek};
|
||||
use std::io::{Read, Seek};
|
||||
use std::result::Result as StdResult;
|
||||
use std::{fmt, iter};
|
||||
|
||||
@@ -35,7 +35,7 @@ pub fn enrich_documents_batch<R: Read + Seek>(
|
||||
|
||||
let (mut cursor, mut documents_batch_index) = reader.into_cursor_and_fields_index();
|
||||
|
||||
let mut external_ids = tempfile::tempfile().map(BufWriter::new).map(grenad::Writer::new)?;
|
||||
let mut external_ids = tempfile::tempfile().map(grenad::Writer::new)?;
|
||||
let mut uuid_buffer = [0; uuid::fmt::Hyphenated::LENGTH];
|
||||
|
||||
// The primary key *field id* that has already been set for this index or the one
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::convert::TryInto;
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
use std::{io, mem, str};
|
||||
|
||||
use charabia::{Language, Script, SeparatorKind, Token, TokenKind, Tokenizer, TokenizerBuilder};
|
||||
@@ -32,7 +31,7 @@ pub fn extract_docid_word_positions<R: io::Read + io::Seek>(
|
||||
allowed_separators: Option<&[&str]>,
|
||||
dictionary: Option<&[&str]>,
|
||||
max_positions_per_attributes: Option<u32>,
|
||||
) -> Result<(RoaringBitmap, grenad::Reader<BufReader<File>>, ScriptLanguageDocidsMap)> {
|
||||
) -> Result<(RoaringBitmap, grenad::Reader<File>, ScriptLanguageDocidsMap)> {
|
||||
puffin::profile_function!();
|
||||
|
||||
let max_positions_per_attributes = max_positions_per_attributes
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader};
|
||||
use std::io;
|
||||
|
||||
use heed::{BytesDecode, BytesEncode};
|
||||
|
||||
@@ -19,7 +19,7 @@ use crate::Result;
|
||||
pub fn extract_facet_number_docids<R: io::Read + io::Seek>(
|
||||
docid_fid_facet_number: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
) -> Result<grenad::Reader<File>> {
|
||||
puffin::profile_function!();
|
||||
|
||||
let max_memory = indexer.max_memory_by_thread();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader};
|
||||
use std::io;
|
||||
|
||||
use heed::BytesEncode;
|
||||
|
||||
@@ -17,7 +17,7 @@ use crate::{FieldId, Result, MAX_FACET_VALUE_LENGTH};
|
||||
pub fn extract_facet_string_docids<R: io::Read + io::Seek>(
|
||||
docid_fid_facet_string: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
) -> Result<grenad::Reader<File>> {
|
||||
puffin::profile_function!();
|
||||
|
||||
let max_memory = indexer.max_memory_by_thread();
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::collections::{BTreeMap, HashSet};
|
||||
use std::convert::TryInto;
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader};
|
||||
use std::io;
|
||||
use std::mem::size_of;
|
||||
|
||||
use heed::zerocopy::AsBytes;
|
||||
@@ -17,11 +17,11 @@ use crate::{CboRoaringBitmapCodec, DocumentId, FieldId, Result, BEU32, MAX_FACET
|
||||
|
||||
/// The extracted facet values stored in grenad files by type.
|
||||
pub struct ExtractedFacetValues {
|
||||
pub docid_fid_facet_numbers_chunk: grenad::Reader<BufReader<File>>,
|
||||
pub docid_fid_facet_strings_chunk: grenad::Reader<BufReader<File>>,
|
||||
pub fid_facet_is_null_docids_chunk: grenad::Reader<BufReader<File>>,
|
||||
pub fid_facet_is_empty_docids_chunk: grenad::Reader<BufReader<File>>,
|
||||
pub fid_facet_exists_docids_chunk: grenad::Reader<BufReader<File>>,
|
||||
pub docid_fid_facet_numbers_chunk: grenad::Reader<File>,
|
||||
pub docid_fid_facet_strings_chunk: grenad::Reader<File>,
|
||||
pub fid_facet_is_null_docids_chunk: grenad::Reader<File>,
|
||||
pub fid_facet_is_empty_docids_chunk: grenad::Reader<File>,
|
||||
pub fid_facet_exists_docids_chunk: grenad::Reader<File>,
|
||||
}
|
||||
|
||||
/// Extracts the facet values of each faceted field of each document.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader};
|
||||
use std::io;
|
||||
|
||||
use grenad::Sorter;
|
||||
|
||||
@@ -21,7 +21,7 @@ use crate::{relative_from_absolute_position, DocumentId, FieldId, Result};
|
||||
pub fn extract_fid_word_count_docids<R: io::Read + io::Seek>(
|
||||
docid_word_positions: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
) -> Result<grenad::Reader<File>> {
|
||||
puffin::profile_function!();
|
||||
|
||||
let max_memory = indexer.max_memory_by_thread();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader};
|
||||
use std::io;
|
||||
|
||||
use concat_arrays::concat_arrays;
|
||||
use serde_json::Value;
|
||||
@@ -18,7 +18,7 @@ pub fn extract_geo_points<R: io::Read + io::Seek>(
|
||||
indexer: GrenadParameters,
|
||||
primary_key_id: FieldId,
|
||||
(lat_fid, lng_fid): (FieldId, FieldId),
|
||||
) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
) -> Result<grenad::Reader<File>> {
|
||||
puffin::profile_function!();
|
||||
|
||||
let mut writer = create_writer(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::convert::TryFrom;
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader};
|
||||
use std::io;
|
||||
|
||||
use bytemuck::cast_slice;
|
||||
use serde_json::{from_slice, Value};
|
||||
@@ -18,7 +18,7 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
|
||||
indexer: GrenadParameters,
|
||||
primary_key_id: FieldId,
|
||||
vectors_fid: FieldId,
|
||||
) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
) -> Result<grenad::Reader<File>> {
|
||||
puffin::profile_function!();
|
||||
|
||||
let mut writer = create_writer(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::collections::HashSet;
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader};
|
||||
use std::io;
|
||||
use std::iter::FromIterator;
|
||||
|
||||
use roaring::RoaringBitmap;
|
||||
@@ -26,7 +26,7 @@ pub fn extract_word_docids<R: io::Read + io::Seek>(
|
||||
docid_word_positions: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
exact_attributes: &HashSet<FieldId>,
|
||||
) -> Result<(grenad::Reader<BufReader<File>>, grenad::Reader<BufReader<File>>)> {
|
||||
) -> Result<(grenad::Reader<File>, grenad::Reader<File>)> {
|
||||
puffin::profile_function!();
|
||||
|
||||
let max_memory = indexer.max_memory_by_thread();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader};
|
||||
use std::io;
|
||||
|
||||
use super::helpers::{
|
||||
create_sorter, merge_cbo_roaring_bitmaps, read_u32_ne_bytes, sorter_into_reader,
|
||||
@@ -14,7 +14,7 @@ use crate::{relative_from_absolute_position, DocumentId, Result};
|
||||
pub fn extract_word_fid_docids<R: io::Read + io::Seek>(
|
||||
docid_word_positions: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
) -> Result<grenad::Reader<File>> {
|
||||
puffin::profile_function!();
|
||||
|
||||
let max_memory = indexer.max_memory_by_thread();
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::{BinaryHeap, HashMap};
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
use std::{cmp, io, mem, str, vec};
|
||||
|
||||
use super::helpers::{
|
||||
@@ -21,7 +20,7 @@ use crate::{DocumentId, Result};
|
||||
pub fn extract_word_pair_proximity_docids<R: io::Read + io::Seek>(
|
||||
docid_word_positions: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
) -> Result<grenad::Reader<File>> {
|
||||
puffin::profile_function!();
|
||||
|
||||
let max_memory = indexer.max_memory_by_thread();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader};
|
||||
use std::io;
|
||||
|
||||
use super::helpers::{
|
||||
create_sorter, merge_cbo_roaring_bitmaps, read_u32_ne_bytes, sorter_into_reader,
|
||||
@@ -17,7 +17,7 @@ use crate::{bucketed_position, relative_from_absolute_position, DocumentId, Resu
|
||||
pub fn extract_word_position_docids<R: io::Read + io::Seek>(
|
||||
docid_word_positions: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
) -> Result<grenad::Reader<File>> {
|
||||
puffin::profile_function!();
|
||||
|
||||
let max_memory = indexer.max_memory_by_thread();
|
||||
|
||||
@@ -12,7 +12,6 @@ mod extract_word_position_docids;
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
|
||||
use crossbeam_channel::Sender;
|
||||
use log::debug;
|
||||
@@ -40,8 +39,8 @@ use crate::{FieldId, Result};
|
||||
/// Send data in grenad file over provided Sender.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn data_from_obkv_documents(
|
||||
original_obkv_chunks: impl Iterator<Item = Result<grenad::Reader<BufReader<File>>>> + Send,
|
||||
flattened_obkv_chunks: impl Iterator<Item = Result<grenad::Reader<BufReader<File>>>> + Send,
|
||||
original_obkv_chunks: impl Iterator<Item = Result<grenad::Reader<File>>> + Send,
|
||||
flattened_obkv_chunks: impl Iterator<Item = Result<grenad::Reader<File>>> + Send,
|
||||
indexer: GrenadParameters,
|
||||
lmdb_writer_sx: Sender<Result<TypedChunk>>,
|
||||
searchable_fields: Option<HashSet<FieldId>>,
|
||||
@@ -153,7 +152,7 @@ pub(crate) fn data_from_obkv_documents(
|
||||
});
|
||||
}
|
||||
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<BufReader<File>>>>(
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>(
|
||||
docid_word_positions_chunks.clone(),
|
||||
indexer,
|
||||
lmdb_writer_sx.clone(),
|
||||
@@ -163,7 +162,7 @@ pub(crate) fn data_from_obkv_documents(
|
||||
"word-pair-proximity-docids",
|
||||
);
|
||||
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<BufReader<File>>>>(
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>(
|
||||
docid_word_positions_chunks.clone(),
|
||||
indexer,
|
||||
lmdb_writer_sx.clone(),
|
||||
@@ -173,11 +172,7 @@ pub(crate) fn data_from_obkv_documents(
|
||||
"field-id-wordcount-docids",
|
||||
);
|
||||
|
||||
spawn_extraction_task::<
|
||||
_,
|
||||
_,
|
||||
Vec<(grenad::Reader<BufReader<File>>, grenad::Reader<BufReader<File>>)>,
|
||||
>(
|
||||
spawn_extraction_task::<_, _, Vec<(grenad::Reader<File>, grenad::Reader<File>)>>(
|
||||
docid_word_positions_chunks.clone(),
|
||||
indexer,
|
||||
lmdb_writer_sx.clone(),
|
||||
@@ -190,7 +185,7 @@ pub(crate) fn data_from_obkv_documents(
|
||||
"word-docids",
|
||||
);
|
||||
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<BufReader<File>>>>(
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>(
|
||||
docid_word_positions_chunks.clone(),
|
||||
indexer,
|
||||
lmdb_writer_sx.clone(),
|
||||
@@ -199,7 +194,7 @@ pub(crate) fn data_from_obkv_documents(
|
||||
TypedChunk::WordPositionDocids,
|
||||
"word-position-docids",
|
||||
);
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<BufReader<File>>>>(
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>(
|
||||
docid_word_positions_chunks,
|
||||
indexer,
|
||||
lmdb_writer_sx.clone(),
|
||||
@@ -209,7 +204,7 @@ pub(crate) fn data_from_obkv_documents(
|
||||
"word-fid-docids",
|
||||
);
|
||||
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<BufReader<File>>>>(
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>(
|
||||
docid_fid_facet_strings_chunks,
|
||||
indexer,
|
||||
lmdb_writer_sx.clone(),
|
||||
@@ -219,7 +214,7 @@ pub(crate) fn data_from_obkv_documents(
|
||||
"field-id-facet-string-docids",
|
||||
);
|
||||
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<BufReader<File>>>>(
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>(
|
||||
docid_fid_facet_numbers_chunks,
|
||||
indexer,
|
||||
lmdb_writer_sx,
|
||||
@@ -274,7 +269,7 @@ fn spawn_extraction_task<FE, FS, M>(
|
||||
/// Extract chunked data and send it into lmdb_writer_sx sender:
|
||||
/// - documents
|
||||
fn send_original_documents_data(
|
||||
original_documents_chunk: Result<grenad::Reader<BufReader<File>>>,
|
||||
original_documents_chunk: Result<grenad::Reader<File>>,
|
||||
indexer: GrenadParameters,
|
||||
lmdb_writer_sx: Sender<Result<TypedChunk>>,
|
||||
vectors_field_id: Option<FieldId>,
|
||||
@@ -316,7 +311,7 @@ fn send_original_documents_data(
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn send_and_extract_flattened_documents_data(
|
||||
flattened_documents_chunk: Result<grenad::Reader<BufReader<File>>>,
|
||||
flattened_documents_chunk: Result<grenad::Reader<File>>,
|
||||
indexer: GrenadParameters,
|
||||
lmdb_writer_sx: Sender<Result<TypedChunk>>,
|
||||
searchable_fields: &Option<HashSet<FieldId>>,
|
||||
@@ -333,10 +328,7 @@ fn send_and_extract_flattened_documents_data(
|
||||
grenad::Reader<CursorClonableMmap>,
|
||||
(
|
||||
grenad::Reader<CursorClonableMmap>,
|
||||
(
|
||||
grenad::Reader<BufReader<File>>,
|
||||
(grenad::Reader<BufReader<File>>, grenad::Reader<BufReader<File>>),
|
||||
),
|
||||
(grenad::Reader<File>, (grenad::Reader<File>, grenad::Reader<File>)),
|
||||
),
|
||||
),
|
||||
)> {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::borrow::Cow;
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader, BufWriter, Seek};
|
||||
use std::io::{self, Seek};
|
||||
use std::time::Instant;
|
||||
|
||||
use grenad::{CompressionType, Sorter};
|
||||
@@ -17,13 +17,13 @@ pub fn create_writer<R: io::Write>(
|
||||
typ: grenad::CompressionType,
|
||||
level: Option<u32>,
|
||||
file: R,
|
||||
) -> grenad::Writer<BufWriter<R>> {
|
||||
) -> grenad::Writer<R> {
|
||||
let mut builder = grenad::Writer::builder();
|
||||
builder.compression_type(typ);
|
||||
if let Some(level) = level {
|
||||
builder.compression_level(level);
|
||||
}
|
||||
builder.build(BufWriter::new(file))
|
||||
builder.build(file)
|
||||
}
|
||||
|
||||
pub fn create_sorter(
|
||||
@@ -53,7 +53,7 @@ pub fn create_sorter(
|
||||
pub fn sorter_into_reader(
|
||||
sorter: grenad::Sorter<MergeFn>,
|
||||
indexer: GrenadParameters,
|
||||
) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
) -> Result<grenad::Reader<File>> {
|
||||
let mut writer = create_writer(
|
||||
indexer.chunk_compression_type,
|
||||
indexer.chunk_compression_level,
|
||||
@@ -64,18 +64,16 @@ pub fn sorter_into_reader(
|
||||
writer_into_reader(writer)
|
||||
}
|
||||
|
||||
pub fn writer_into_reader(
|
||||
writer: grenad::Writer<BufWriter<File>>,
|
||||
) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
let mut file = writer.into_inner()?.into_inner().map_err(|err| err.into_error())?;
|
||||
pub fn writer_into_reader(writer: grenad::Writer<File>) -> Result<grenad::Reader<File>> {
|
||||
let mut file = writer.into_inner()?;
|
||||
file.rewind()?;
|
||||
grenad::Reader::new(BufReader::new(file)).map_err(Into::into)
|
||||
grenad::Reader::new(file).map_err(Into::into)
|
||||
}
|
||||
|
||||
pub unsafe fn as_cloneable_grenad(
|
||||
reader: &grenad::Reader<BufReader<File>>,
|
||||
reader: &grenad::Reader<File>,
|
||||
) -> Result<grenad::Reader<CursorClonableMmap>> {
|
||||
let file = reader.get_ref().get_ref();
|
||||
let file = reader.get_ref();
|
||||
let mmap = memmap2::Mmap::map(file)?;
|
||||
let cursor = io::Cursor::new(ClonableMmap::from(mmap));
|
||||
let reader = grenad::Reader::new(cursor)?;
|
||||
@@ -91,8 +89,8 @@ where
|
||||
fn merge(self, merge_fn: MergeFn, indexer: &GrenadParameters) -> Result<Self::Output>;
|
||||
}
|
||||
|
||||
impl MergeableReader for Vec<grenad::Reader<BufReader<File>>> {
|
||||
type Output = grenad::Reader<BufReader<File>>;
|
||||
impl MergeableReader for Vec<grenad::Reader<File>> {
|
||||
type Output = grenad::Reader<File>;
|
||||
|
||||
fn merge(self, merge_fn: MergeFn, params: &GrenadParameters) -> Result<Self::Output> {
|
||||
let mut merger = MergerBuilder::new(merge_fn);
|
||||
@@ -101,8 +99,8 @@ impl MergeableReader for Vec<grenad::Reader<BufReader<File>>> {
|
||||
}
|
||||
}
|
||||
|
||||
impl MergeableReader for Vec<(grenad::Reader<BufReader<File>>, grenad::Reader<BufReader<File>>)> {
|
||||
type Output = (grenad::Reader<BufReader<File>>, grenad::Reader<BufReader<File>>);
|
||||
impl MergeableReader for Vec<(grenad::Reader<File>, grenad::Reader<File>)> {
|
||||
type Output = (grenad::Reader<File>, grenad::Reader<File>);
|
||||
|
||||
fn merge(self, merge_fn: MergeFn, params: &GrenadParameters) -> Result<Self::Output> {
|
||||
let mut m1 = MergerBuilder::new(merge_fn);
|
||||
@@ -127,7 +125,7 @@ impl<R: io::Read + io::Seek> MergerBuilder<R> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn finish(self, params: &GrenadParameters) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
fn finish(self, params: &GrenadParameters) -> Result<grenad::Reader<File>> {
|
||||
let merger = self.0.build();
|
||||
let mut writer = create_writer(
|
||||
params.chunk_compression_type,
|
||||
@@ -178,7 +176,7 @@ pub fn grenad_obkv_into_chunks<R: io::Read + io::Seek>(
|
||||
reader: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
documents_chunk_size: usize,
|
||||
) -> Result<impl Iterator<Item = Result<grenad::Reader<BufReader<File>>>>> {
|
||||
) -> Result<impl Iterator<Item = Result<grenad::Reader<File>>>> {
|
||||
let mut continue_reading = true;
|
||||
let mut cursor = reader.into_cursor()?;
|
||||
|
||||
|
||||
@@ -659,10 +659,8 @@ impl<'a, 'i> Transform<'a, 'i> {
|
||||
new_documents_ids: self.new_documents_ids,
|
||||
replaced_documents_ids: self.replaced_documents_ids,
|
||||
documents_count: self.documents_count,
|
||||
original_documents: original_documents.into_inner().map_err(|err| err.into_error())?,
|
||||
flattened_documents: flattened_documents
|
||||
.into_inner()
|
||||
.map_err(|err| err.into_error())?,
|
||||
original_documents,
|
||||
flattened_documents,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -781,10 +779,8 @@ impl<'a, 'i> Transform<'a, 'i> {
|
||||
new_documents_ids: documents_ids,
|
||||
replaced_documents_ids: RoaringBitmap::default(),
|
||||
documents_count,
|
||||
original_documents: original_documents.into_inner().map_err(|err| err.into_error())?,
|
||||
flattened_documents: flattened_documents
|
||||
.into_inner()
|
||||
.map_err(|err| err.into_error())?,
|
||||
original_documents,
|
||||
flattened_documents,
|
||||
};
|
||||
|
||||
let new_facets = output.compute_real_facets(wtxn, self.index)?;
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryInto;
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader};
|
||||
use std::io;
|
||||
|
||||
use bytemuck::allocation::pod_collect_to_vec;
|
||||
use charabia::{Language, Script};
|
||||
@@ -27,22 +27,22 @@ pub(crate) enum TypedChunk {
|
||||
FieldIdDocidFacetStrings(grenad::Reader<CursorClonableMmap>),
|
||||
FieldIdDocidFacetNumbers(grenad::Reader<CursorClonableMmap>),
|
||||
Documents(grenad::Reader<CursorClonableMmap>),
|
||||
FieldIdWordcountDocids(grenad::Reader<BufReader<File>>),
|
||||
FieldIdWordcountDocids(grenad::Reader<File>),
|
||||
NewDocumentsIds(RoaringBitmap),
|
||||
WordDocids {
|
||||
word_docids_reader: grenad::Reader<BufReader<File>>,
|
||||
exact_word_docids_reader: grenad::Reader<BufReader<File>>,
|
||||
word_docids_reader: grenad::Reader<File>,
|
||||
exact_word_docids_reader: grenad::Reader<File>,
|
||||
},
|
||||
WordPositionDocids(grenad::Reader<BufReader<File>>),
|
||||
WordFidDocids(grenad::Reader<BufReader<File>>),
|
||||
WordPairProximityDocids(grenad::Reader<BufReader<File>>),
|
||||
FieldIdFacetStringDocids(grenad::Reader<BufReader<File>>),
|
||||
FieldIdFacetNumberDocids(grenad::Reader<BufReader<File>>),
|
||||
FieldIdFacetExistsDocids(grenad::Reader<BufReader<File>>),
|
||||
FieldIdFacetIsNullDocids(grenad::Reader<BufReader<File>>),
|
||||
FieldIdFacetIsEmptyDocids(grenad::Reader<BufReader<File>>),
|
||||
GeoPoints(grenad::Reader<BufReader<File>>),
|
||||
VectorPoints(grenad::Reader<BufReader<File>>),
|
||||
WordPositionDocids(grenad::Reader<File>),
|
||||
WordFidDocids(grenad::Reader<File>),
|
||||
WordPairProximityDocids(grenad::Reader<File>),
|
||||
FieldIdFacetStringDocids(grenad::Reader<File>),
|
||||
FieldIdFacetNumberDocids(grenad::Reader<File>),
|
||||
FieldIdFacetExistsDocids(grenad::Reader<File>),
|
||||
FieldIdFacetIsNullDocids(grenad::Reader<File>),
|
||||
FieldIdFacetIsEmptyDocids(grenad::Reader<File>),
|
||||
GeoPoints(grenad::Reader<File>),
|
||||
VectorPoints(grenad::Reader<File>),
|
||||
ScriptLanguageDocids(HashMap<(Script, Language), RoaringBitmap>),
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
use std::io::{BufReader, BufWriter};
|
||||
use std::io::BufReader;
|
||||
|
||||
use grenad::CompressionType;
|
||||
use heed::types::ByteSlice;
|
||||
@@ -119,9 +119,9 @@ pub fn insert_into_database(
|
||||
pub fn write_into_lmdb_database_without_merging(
|
||||
wtxn: &mut heed::RwTxn,
|
||||
database: heed::PolyDatabase,
|
||||
writer: grenad::Writer<BufWriter<std::fs::File>>,
|
||||
writer: grenad::Writer<std::fs::File>,
|
||||
) -> Result<()> {
|
||||
let file = writer.into_inner()?.into_inner().map_err(|err| err.into_error())?;
|
||||
let file = writer.into_inner()?;
|
||||
let reader = grenad::Reader::new(BufReader::new(file))?;
|
||||
if database.is_empty(wtxn)? {
|
||||
let mut out_iter = database.iter_mut::<_, ByteSlice, ByteSlice>(wtxn)?;
|
||||
|
||||
@@ -8,7 +8,7 @@ use Criterion::*;
|
||||
use crate::search::{self, EXTERNAL_DOCUMENTS_IDS};
|
||||
|
||||
macro_rules! test_distinct {
|
||||
($func:ident, $distinct:ident, $exhaustive:ident, $limit:expr, $offset:expr, $criteria:expr, $n_res:expr) => {
|
||||
($func:ident, $distinct:ident, $exhaustive:ident, $limit:expr, $criteria:expr, $n_res:expr) => {
|
||||
#[test]
|
||||
fn $func() {
|
||||
let criteria = $criteria;
|
||||
@@ -27,7 +27,6 @@ macro_rules! test_distinct {
|
||||
let mut search = Search::new(&rtxn, &index);
|
||||
search.query(search::TEST_QUERY);
|
||||
search.limit($limit);
|
||||
search.offset($offset);
|
||||
search.exhaustive_number_hits($exhaustive);
|
||||
|
||||
search.terms_matching_strategy(TermsMatchingStrategy::default());
|
||||
@@ -48,7 +47,6 @@ macro_rules! test_distinct {
|
||||
Some(d.id)
|
||||
}
|
||||
})
|
||||
.skip($offset)
|
||||
.take($limit)
|
||||
.collect();
|
||||
|
||||
@@ -63,7 +61,6 @@ test_distinct!(
|
||||
tag,
|
||||
true,
|
||||
1,
|
||||
0,
|
||||
vec![Words, Typo, Proximity, Attribute, Exactness],
|
||||
3
|
||||
);
|
||||
@@ -72,7 +69,6 @@ test_distinct!(
|
||||
asc_desc_rank,
|
||||
true,
|
||||
1,
|
||||
0,
|
||||
vec![Words, Typo, Proximity, Attribute, Exactness],
|
||||
7
|
||||
);
|
||||
@@ -81,7 +77,6 @@ test_distinct!(
|
||||
asc_desc_rank,
|
||||
true,
|
||||
0,
|
||||
0,
|
||||
vec![Desc(S("attribute_rank")), Desc(S("exactness_rank")), Exactness, Typo],
|
||||
7
|
||||
);
|
||||
@@ -91,7 +86,6 @@ test_distinct!(
|
||||
tag,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
0,
|
||||
vec![Words, Typo, Proximity, Attribute, Exactness],
|
||||
3
|
||||
);
|
||||
@@ -100,7 +94,6 @@ test_distinct!(
|
||||
asc_desc_rank,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
0,
|
||||
vec![Words, Typo, Proximity, Attribute, Exactness],
|
||||
7
|
||||
);
|
||||
@@ -109,7 +102,6 @@ test_distinct!(
|
||||
tag,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
0,
|
||||
vec![Words],
|
||||
3
|
||||
);
|
||||
@@ -118,7 +110,6 @@ test_distinct!(
|
||||
asc_desc_rank,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
0,
|
||||
vec![Words],
|
||||
7
|
||||
);
|
||||
@@ -127,7 +118,6 @@ test_distinct!(
|
||||
tag,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
0,
|
||||
vec![Words, Typo],
|
||||
3
|
||||
);
|
||||
@@ -136,7 +126,6 @@ test_distinct!(
|
||||
asc_desc_rank,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
0,
|
||||
vec![Words, Typo],
|
||||
7
|
||||
);
|
||||
@@ -145,7 +134,6 @@ test_distinct!(
|
||||
tag,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
0,
|
||||
vec![Words, Proximity],
|
||||
3
|
||||
);
|
||||
@@ -154,7 +142,6 @@ test_distinct!(
|
||||
asc_desc_rank,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
0,
|
||||
vec![Words, Proximity],
|
||||
7
|
||||
);
|
||||
@@ -163,7 +150,6 @@ test_distinct!(
|
||||
tag,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
0,
|
||||
vec![Words, Attribute],
|
||||
3
|
||||
);
|
||||
@@ -172,7 +158,6 @@ test_distinct!(
|
||||
asc_desc_rank,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
0,
|
||||
vec![Words, Attribute],
|
||||
7
|
||||
);
|
||||
@@ -181,7 +166,6 @@ test_distinct!(
|
||||
tag,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
0,
|
||||
vec![Words, Exactness],
|
||||
3
|
||||
);
|
||||
@@ -190,47 +174,6 @@ test_distinct!(
|
||||
asc_desc_rank,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
0,
|
||||
vec![Words, Exactness],
|
||||
7
|
||||
);
|
||||
test_distinct!(
|
||||
// testing: https://github.com/meilisearch/meilisearch/issues/4078
|
||||
distinct_string_limit_and_offset,
|
||||
tag,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
1,
|
||||
vec![],
|
||||
3
|
||||
);
|
||||
test_distinct!(
|
||||
// testing: https://github.com/meilisearch/meilisearch/issues/4078
|
||||
exhaustive_distinct_string_limit_and_offset,
|
||||
tag,
|
||||
true,
|
||||
1,
|
||||
2,
|
||||
vec![],
|
||||
3
|
||||
);
|
||||
test_distinct!(
|
||||
// testing: https://github.com/meilisearch/meilisearch/issues/4078
|
||||
distinct_number_limit_and_offset,
|
||||
asc_desc_rank,
|
||||
false,
|
||||
EXTERNAL_DOCUMENTS_IDS.len(),
|
||||
2,
|
||||
vec![],
|
||||
7
|
||||
);
|
||||
test_distinct!(
|
||||
// testing: https://github.com/meilisearch/meilisearch/issues/4078
|
||||
exhaustive_distinct_number_limit_and_offset,
|
||||
asc_desc_rank,
|
||||
true,
|
||||
2,
|
||||
4,
|
||||
vec![],
|
||||
7
|
||||
);
|
||||
|
||||
Reference in New Issue
Block a user