mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-11-22 04:36:32 +00:00
Compare commits
90 Commits
v1.21.0
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1b01539fc9 | ||
|
|
c29bdcae23 | ||
|
|
75219181a3 | ||
|
|
a5b5cf7cd1 | ||
|
|
142ba8ea00 | ||
|
|
4bc823e07c | ||
|
|
db06ca7138 | ||
|
|
95595a768e | ||
|
|
36f649768e | ||
|
|
0c6fc243f2 | ||
|
|
dfc46d5627 | ||
|
|
11d55f2121 | ||
|
|
014da57cf6 | ||
|
|
70a0ff4a8f | ||
|
|
dd0d5e4b90 | ||
|
|
15b3bb1700 | ||
|
|
077ec2ab11 | ||
|
|
f25db0795e | ||
|
|
c50a337c29 | ||
|
|
efeae09ce1 | ||
|
|
ad55b48664 | ||
|
|
94eabd34e6 | ||
|
|
6935589f74 | ||
|
|
4beb452027 | ||
|
|
b722da303a | ||
|
|
ad39263b94 | ||
|
|
0ffb08b112 | ||
|
|
ff80b4d0ff | ||
|
|
7fb4404928 | ||
|
|
8405f0bf9c | ||
|
|
3a7f9b56fe | ||
|
|
61034e2e2e | ||
|
|
108d6d3344 | ||
|
|
35bd00f6a1 | ||
|
|
69059d67ef | ||
|
|
e13783103f | ||
|
|
f719665c4e | ||
|
|
638f284614 | ||
|
|
32ac98ed95 | ||
|
|
46aee695ca | ||
|
|
716c67f858 | ||
|
|
fec10bb2d6 | ||
|
|
3dac2cf73e | ||
|
|
03eca800e6 | ||
|
|
28fa2e960e | ||
|
|
a3b9220f84 | ||
|
|
c09d48edf2 | ||
|
|
ae4ab0ebbb | ||
|
|
900a9a6d59 | ||
|
|
fc560e6730 | ||
|
|
e2a06470b7 | ||
|
|
ada27323f2 | ||
|
|
607a1c2395 | ||
|
|
b56956ea0c | ||
|
|
3d21290f7f | ||
|
|
4edd4c06bc | ||
|
|
566baddc6b | ||
|
|
febe3186ce | ||
|
|
5dd42c1871 | ||
|
|
8670793e6e | ||
|
|
41a04aa3ab | ||
|
|
88f841bc05 | ||
|
|
d19892d2ea | ||
|
|
c0905d6650 | ||
|
|
576d7d94b1 | ||
|
|
f4f1334b62 | ||
|
|
aaff6c3685 | ||
|
|
42d2af4c84 | ||
|
|
6be91c824c | ||
|
|
6ee0537db8 | ||
|
|
3fbeff4308 | ||
|
|
375546b61a | ||
|
|
25a1d50763 | ||
|
|
6f0d26c22c | ||
|
|
4fe073cc1a | ||
|
|
5cd3d36d20 | ||
|
|
d7ad76ea1e | ||
|
|
e82bb93221 | ||
|
|
000cb93aad | ||
|
|
ad4f5514b9 | ||
|
|
8d29a29867 | ||
|
|
d7de819d11 | ||
|
|
7a6cf30cb2 | ||
|
|
e43d67591c | ||
|
|
134237d1eb | ||
|
|
26d9070aa7 | ||
|
|
f9ffb8ada5 | ||
|
|
a47888f02c | ||
|
|
5bef2f4d86 | ||
|
|
d52c7dcc94 |
1
.github/dependabot.yml
vendored
1
.github/dependabot.yml
vendored
@@ -7,6 +7,5 @@ updates:
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
labels:
|
||||
- 'skip changelog'
|
||||
- 'dependencies'
|
||||
rebase-strategy: disabled
|
||||
|
||||
6
.github/release-draft-template.yml
vendored
6
.github/release-draft-template.yml
vendored
@@ -18,6 +18,7 @@ categories:
|
||||
label: 'security'
|
||||
- title: '⚙️ Maintenance/misc'
|
||||
label:
|
||||
- 'dependencies'
|
||||
- 'maintenance'
|
||||
- 'documentation'
|
||||
template: |
|
||||
@@ -26,8 +27,3 @@ template: |
|
||||
❤️ Huge thanks to our contributors: $CONTRIBUTORS.
|
||||
no-changes-template: 'Changes are coming soon 😎'
|
||||
sort-direction: 'ascending'
|
||||
replacers:
|
||||
- search: '/(?:and )?@dependabot-preview(?:\[bot\])?,?/g'
|
||||
replace: ''
|
||||
- search: '/(?:and )?@dependabot(?:\[bot\])?,?/g'
|
||||
replace: ''
|
||||
|
||||
2
.github/workflows/bench-manual.yml
vendored
2
.github/workflows/bench-manual.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/bench-pr.yml
vendored
2
.github/workflows/bench-pr.yml
vendored
@@ -66,7 +66,7 @@ jobs:
|
||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/bench-push-indexing.yml
vendored
2
.github/workflows/bench-push-indexing.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/benchmarks-manual.yml
vendored
2
.github/workflows/benchmarks-manual.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/benchmarks-pr.yml
vendored
2
.github/workflows/benchmarks-pr.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/db-change-comments.yml
vendored
2
.github/workflows/db-change-comments.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
if: github.event.label.name == 'db change'
|
||||
steps:
|
||||
- name: Add comment
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
||||
2
.github/workflows/flaky-tests.yml
vendored
2
.github/workflows/flaky-tests.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Install cargo-flaky
|
||||
run: cargo install cargo-flaky
|
||||
- name: Run cargo flaky in the dumps
|
||||
|
||||
2
.github/workflows/fuzzer-indexing.yml
vendored
2
.github/workflows/fuzzer-indexing.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
2
.github/workflows/publish-docker-images.yml
vendored
2
.github/workflows/publish-docker-images.yml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # tag=v3.9.2
|
||||
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # tag=v3.10.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
|
||||
21
.github/workflows/publish-release-assets.yml
vendored
21
.github/workflows/publish-release-assets.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
check-version:
|
||||
name: Check the version validity
|
||||
runs-on: ubuntu-latest
|
||||
# No need to check the version for dry run (cron)
|
||||
# No need to check the version for dry run (cron or workflow_dispatch)
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
# Check if the tag has the v<nmumber>.<number>.<number> format.
|
||||
@@ -45,10 +45,10 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Build
|
||||
run: cargo build --release --locked
|
||||
# No need to upload binaries for dry run (cron)
|
||||
# No need to upload binaries for dry run (cron or workflow_dispatch)
|
||||
- name: Upload binaries to release
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
@@ -75,10 +75,10 @@ jobs:
|
||||
asset_name: meilisearch-windows-amd64.exe
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Build
|
||||
run: cargo build --release --locked
|
||||
# No need to upload binaries for dry run (cron)
|
||||
# No need to upload binaries for dry run (cron or workflow_dispatch)
|
||||
- name: Upload binaries to release
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Installing Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@1.85
|
||||
uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
@@ -111,7 +111,7 @@ jobs:
|
||||
command: build
|
||||
args: --release --target ${{ matrix.target }}
|
||||
- name: Upload the binary to release
|
||||
# No need to upload binaries for dry run (cron)
|
||||
# No need to upload binaries for dry run (cron or workflow_dispatch)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
with:
|
||||
@@ -148,7 +148,7 @@ jobs:
|
||||
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
apt-get update -y && apt-get install -y docker-ce
|
||||
- name: Installing Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@1.85
|
||||
uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
@@ -176,7 +176,7 @@ jobs:
|
||||
- name: List target output files
|
||||
run: ls -lR ./target
|
||||
- name: Upload the binary to release
|
||||
# No need to upload binaries for dry run (cron)
|
||||
# No need to upload binaries for dry run (cron or workflow_dispatch)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
with:
|
||||
@@ -187,6 +187,7 @@ jobs:
|
||||
|
||||
publish-openapi-file:
|
||||
name: Publish OpenAPI file
|
||||
needs: check-version
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@@ -201,7 +202,7 @@ jobs:
|
||||
cd crates/openapi-generator
|
||||
cargo run --release -- --pretty --output ../../meilisearch.json
|
||||
- name: Upload OpenAPI to Release
|
||||
# No need to upload for dry run (cron)
|
||||
# No need to upload for dry run (cron or workflow_dispatch)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
with:
|
||||
|
||||
16
.github/workflows/sdks-tests.yml
vendored
16
.github/workflows/sdks-tests.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
with:
|
||||
repository: meilisearch/meilisearch-dotnet
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v4
|
||||
uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: "8.0.x"
|
||||
- name: Install dependencies
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: stable
|
||||
- uses: actions/checkout@v5
|
||||
@@ -135,13 +135,13 @@ jobs:
|
||||
- name: Set up Java
|
||||
uses: actions/setup-java@v5
|
||||
with:
|
||||
java-version: 8
|
||||
distribution: 'zulu'
|
||||
java-version: 17
|
||||
distribution: 'temurin'
|
||||
cache: gradle
|
||||
- name: Grant execute permission for gradlew
|
||||
run: chmod +x gradlew
|
||||
- name: Build and run unit and integration tests
|
||||
run: ./gradlew build integrationTest
|
||||
run: ./gradlew build integrationTest --info
|
||||
|
||||
meilisearch-js-tests:
|
||||
needs: define-docker-image
|
||||
@@ -160,7 +160,7 @@ jobs:
|
||||
with:
|
||||
repository: meilisearch/meilisearch-js
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
cache: 'yarn'
|
||||
- name: Install dependencies
|
||||
@@ -224,7 +224,7 @@ jobs:
|
||||
with:
|
||||
repository: meilisearch/meilisearch-python
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
- name: Install pipenv
|
||||
uses: dschep/install-pipenv-action@v1
|
||||
- name: Install dependencies
|
||||
@@ -318,7 +318,7 @@ jobs:
|
||||
with:
|
||||
repository: meilisearch/meilisearch-js-plugins
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
cache: yarn
|
||||
- name: Install dependencies
|
||||
|
||||
14
.github/workflows/test-suite.yml
vendored
14
.github/workflows/test-suite.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- name: Setup test with Rust stable
|
||||
uses: dtolnay/rust-toolchain@1.85
|
||||
uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.8.0
|
||||
- name: Run cargo check without any default features
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.8.0
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install --assume-yes build-essential curl
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Run cargo build with almost all features
|
||||
run: |
|
||||
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install --assume-yes build-essential curl
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Run cargo tree without default features and check lindera is not present
|
||||
run: |
|
||||
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
||||
@@ -153,7 +153,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.8.0
|
||||
- name: Run tests in debug
|
||||
@@ -167,7 +167,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
components: clippy
|
||||
@@ -184,7 +184,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly-2024-07-09
|
||||
|
||||
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
- name: Install sd
|
||||
|
||||
1095
Cargo.lock
generated
1095
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -23,7 +23,7 @@ members = [
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "1.21.0"
|
||||
version = "1.22.1"
|
||||
authors = [
|
||||
"Quentin de Quelen <quentin@dequelen.me>",
|
||||
"Clément Renault <clement@meilisearch.com>",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Compile
|
||||
FROM rust:1.85-alpine3.20 AS compiler
|
||||
FROM rust:1.89-alpine3.20 AS compiler
|
||||
|
||||
RUN apk add -q --no-cache build-base openssl-dev
|
||||
|
||||
|
||||
@@ -121,7 +121,7 @@ If you want to know more about the kind of data we collect and what we use it fo
|
||||
|
||||
Meilisearch is a search engine created by [Meili](https://www.meilisearch.com/careers), a software development company headquartered in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=contact)
|
||||
|
||||
🗞 [Subscribe to our newsletter](https://meilisearch.us2.list-manage.com/subscribe?u=27870f7b71c908a8b359599fb&id=79582d828e) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
|
||||
🗞 [Subscribe to our newsletter](https://share-eu1.hsforms.com/1LN5N0x_GQgq7ss7tXmSykwfg3aq) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
|
||||
|
||||
💌 Want to make a suggestion or give feedback? Here are some of the channels where you can reach us:
|
||||
|
||||
|
||||
@@ -97,6 +97,7 @@ impl CompatV2ToV3 {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum CompatIndexV2ToV3 {
|
||||
V2(v2::V2IndexReader),
|
||||
Compat(Box<CompatIndexV1ToV2>),
|
||||
|
||||
@@ -7,23 +7,14 @@
|
||||
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::character::complete::char;
|
||||
use nom::character::complete::multispace0;
|
||||
use nom::character::complete::multispace1;
|
||||
use nom::combinator::cut;
|
||||
use nom::combinator::map;
|
||||
use nom::combinator::value;
|
||||
use nom::sequence::preceded;
|
||||
use nom::sequence::{terminated, tuple};
|
||||
use nom::character::complete::{char, multispace0, multispace1};
|
||||
use nom::combinator::{cut, map, value};
|
||||
use nom::sequence::{preceded, terminated, tuple};
|
||||
use Condition::*;
|
||||
|
||||
use crate::error::IResultExt;
|
||||
use crate::value::parse_vector_value;
|
||||
use crate::value::parse_vector_value_cut;
|
||||
use crate::Error;
|
||||
use crate::ErrorKind;
|
||||
use crate::VectorFilter;
|
||||
use crate::{parse_value, FilterCondition, IResult, Span, Token};
|
||||
use crate::value::{parse_vector_value, parse_vector_value_cut};
|
||||
use crate::{parse_value, Error, ErrorKind, FilterCondition, IResult, Span, Token, VectorFilter};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Condition<'a> {
|
||||
@@ -124,7 +115,7 @@ pub fn parse_not_exists(input: Span) -> IResult<FilterCondition> {
|
||||
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Exists }))))
|
||||
}
|
||||
|
||||
fn parse_vectors(input: Span) -> IResult<(Token, Option<Token>, VectorFilter<'_>)> {
|
||||
fn parse_vectors(input: Span) -> IResult<(Token, Option<Token>, VectorFilter)> {
|
||||
let (input, _) = multispace0(input)?;
|
||||
let (input, fid) = tag("_vectors")(input)?;
|
||||
|
||||
|
||||
@@ -75,7 +75,11 @@ pub enum ExpectedValueKind {
|
||||
pub enum ErrorKind<'a> {
|
||||
ReservedGeo(&'a str),
|
||||
GeoRadius,
|
||||
GeoRadiusArgumentCount(usize),
|
||||
GeoBoundingBox,
|
||||
GeoPolygon,
|
||||
GeoPolygonNotEnoughPoints(usize),
|
||||
GeoCoordinatesNotPair(usize),
|
||||
MisusedGeoRadius,
|
||||
MisusedGeoBoundingBox,
|
||||
VectorFilterLeftover,
|
||||
@@ -189,7 +193,7 @@ impl Display for Error<'_> {
|
||||
}
|
||||
ErrorKind::InvalidPrimary => {
|
||||
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
|
||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
|
||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` {text}")?
|
||||
}
|
||||
ErrorKind::InvalidEscapedNumber => {
|
||||
writeln!(f, "Found an invalid escaped sequence number: `{}`.", escaped_input)?
|
||||
@@ -198,11 +202,23 @@ impl Display for Error<'_> {
|
||||
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
||||
}
|
||||
ErrorKind::GeoRadius => {
|
||||
writeln!(f, "The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.")?
|
||||
writeln!(f, "The `_geoRadius` filter must be in the form: `_geoRadius(latitude, longitude, radius, optionalResolution)`.")?
|
||||
}
|
||||
ErrorKind::GeoRadiusArgumentCount(count) => {
|
||||
writeln!(f, "Was expecting 3 or 4 arguments for `_geoRadius`, but instead found {count}.")?
|
||||
}
|
||||
ErrorKind::GeoBoundingBox => {
|
||||
writeln!(f, "The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.")?
|
||||
}
|
||||
ErrorKind::GeoPolygon => {
|
||||
writeln!(f, "The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.")?
|
||||
}
|
||||
ErrorKind::GeoPolygonNotEnoughPoints(n) => {
|
||||
writeln!(f, "The `_geoPolygon` filter expects at least 3 points but only {n} were specified")?;
|
||||
}
|
||||
ErrorKind::GeoCoordinatesNotPair(number) => {
|
||||
writeln!(f, "Was expecting 2 coordinates but instead found {number}.")?
|
||||
}
|
||||
ErrorKind::ReservedGeo(name) => {
|
||||
writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.", name.escape_debug())?
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
//! word = (alphanumeric | _ | - | .)+
|
||||
//! geoRadius = "_geoRadius(" WS* float WS* "," WS* float WS* "," float WS* ")"
|
||||
//! geoBoundingBox = "_geoBoundingBox([" WS * float WS* "," WS* float WS* "], [" WS* float WS* "," WS* float WS* "]")
|
||||
//! geoPolygon = "_geoPolygon([[" WS* float WS* "," WS* float WS* "],+])"
|
||||
//! ```
|
||||
//!
|
||||
//! Other BNF grammar used to handle some specific errors:
|
||||
@@ -116,7 +117,7 @@ impl<'a> Token<'a> {
|
||||
self.span
|
||||
}
|
||||
|
||||
pub fn parse_finite_float(&self) -> Result<f64, Error> {
|
||||
pub fn parse_finite_float(&self) -> Result<f64, Error<'a>> {
|
||||
let value: f64 = self.value().parse().map_err(|e| self.as_external_error(e))?;
|
||||
if value.is_finite() {
|
||||
Ok(value)
|
||||
@@ -156,8 +157,9 @@ pub enum FilterCondition<'a> {
|
||||
Or(Vec<Self>),
|
||||
And(Vec<Self>),
|
||||
VectorExists { fid: Token<'a>, embedder: Option<Token<'a>>, filter: VectorFilter<'a> },
|
||||
GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a> },
|
||||
GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a>, resolution: Option<Token<'a>> },
|
||||
GeoBoundingBox { top_right_point: [Token<'a>; 2], bottom_left_point: [Token<'a>; 2] },
|
||||
GeoPolygon { points: Vec<[Token<'a>; 2]> },
|
||||
}
|
||||
|
||||
pub enum TraversedElement<'a> {
|
||||
@@ -166,7 +168,7 @@ pub enum TraversedElement<'a> {
|
||||
}
|
||||
|
||||
impl<'a> FilterCondition<'a> {
|
||||
pub fn use_contains_operator(&self) -> Option<&Token> {
|
||||
pub fn use_contains_operator(&self) -> Option<&Token<'a>> {
|
||||
match self {
|
||||
FilterCondition::Condition { fid: _, op } => match op {
|
||||
Condition::GreaterThan(_)
|
||||
@@ -189,11 +191,12 @@ impl<'a> FilterCondition<'a> {
|
||||
FilterCondition::VectorExists { .. }
|
||||
| FilterCondition::GeoLowerThan { .. }
|
||||
| FilterCondition::GeoBoundingBox { .. }
|
||||
| FilterCondition::GeoPolygon { .. }
|
||||
| FilterCondition::In { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn use_vector_filter(&self) -> Option<&Token> {
|
||||
pub fn use_vector_filter(&self) -> Option<&Token<'a>> {
|
||||
match self {
|
||||
FilterCondition::Condition { .. } => None,
|
||||
FilterCondition::Not(this) => this.use_vector_filter(),
|
||||
@@ -202,12 +205,13 @@ impl<'a> FilterCondition<'a> {
|
||||
}
|
||||
FilterCondition::GeoLowerThan { .. }
|
||||
| FilterCondition::GeoBoundingBox { .. }
|
||||
| FilterCondition::GeoPolygon { .. }
|
||||
| FilterCondition::In { .. } => None,
|
||||
FilterCondition::VectorExists { fid, .. } => Some(fid),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fids(&self, depth: usize) -> Box<dyn Iterator<Item = &Token> + '_> {
|
||||
pub fn fids(&self, depth: usize) -> Box<dyn Iterator<Item = &Token<'a>> + '_> {
|
||||
if depth == 0 {
|
||||
return Box::new(std::iter::empty());
|
||||
}
|
||||
@@ -228,7 +232,7 @@ impl<'a> FilterCondition<'a> {
|
||||
}
|
||||
|
||||
/// Returns the first token found at the specified depth, `None` if no token at this depth.
|
||||
pub fn token_at_depth(&self, depth: usize) -> Option<&Token> {
|
||||
pub fn token_at_depth(&self, depth: usize) -> Option<&Token<'a>> {
|
||||
match self {
|
||||
FilterCondition::Condition { fid, .. } if depth == 0 => Some(fid),
|
||||
FilterCondition::Or(subfilters) => {
|
||||
@@ -396,23 +400,27 @@ fn parse_not(input: Span, depth: usize) -> IResult<FilterCondition> {
|
||||
/// If we parse `_geoRadius` we MUST parse the rest of the expression.
|
||||
fn parse_geo_radius(input: Span) -> IResult<FilterCondition> {
|
||||
// we want to allow space BEFORE the _geoRadius but not after
|
||||
let parsed = preceded(
|
||||
tuple((multispace0, word_exact("_geoRadius"))),
|
||||
// if we were able to parse `_geoRadius` and can't parse the rest of the input we return a failure
|
||||
cut(delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))),
|
||||
)(input)
|
||||
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::GeoRadius)));
|
||||
|
||||
let (input, _) = tuple((multispace0, word_exact("_geoRadius")))(input)?;
|
||||
|
||||
// if we were able to parse `_geoRadius` and can't parse the rest of the input we return a failure
|
||||
|
||||
let parsed =
|
||||
delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))(input)
|
||||
.map_cut(ErrorKind::GeoRadius);
|
||||
|
||||
let (input, args) = parsed?;
|
||||
|
||||
if args.len() != 3 {
|
||||
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::GeoRadius)));
|
||||
if !(3..=4).contains(&args.len()) {
|
||||
return Err(Error::failure_from_kind(input, ErrorKind::GeoRadiusArgumentCount(args.len())));
|
||||
}
|
||||
|
||||
let res = FilterCondition::GeoLowerThan {
|
||||
point: [args[0].into(), args[1].into()],
|
||||
radius: args[2].into(),
|
||||
resolution: args.get(3).cloned().map(Token::from),
|
||||
};
|
||||
|
||||
Ok((input, res))
|
||||
}
|
||||
|
||||
@@ -420,26 +428,33 @@ fn parse_geo_radius(input: Span) -> IResult<FilterCondition> {
|
||||
/// If we parse `_geoBoundingBox` we MUST parse the rest of the expression.
|
||||
fn parse_geo_bounding_box(input: Span) -> IResult<FilterCondition> {
|
||||
// we want to allow space BEFORE the _geoBoundingBox but not after
|
||||
let parsed = preceded(
|
||||
tuple((multispace0, word_exact("_geoBoundingBox"))),
|
||||
// if we were able to parse `_geoBoundingBox` and can't parse the rest of the input we return a failure
|
||||
cut(delimited(
|
||||
char('('),
|
||||
separated_list1(
|
||||
tag(","),
|
||||
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
|
||||
),
|
||||
char(')'),
|
||||
)),
|
||||
|
||||
let (input, _) = tuple((multispace0, word_exact("_geoBoundingBox")))(input)?;
|
||||
|
||||
// if we were able to parse `_geoBoundingBox` and can't parse the rest of the input we return a failure
|
||||
|
||||
let (input, args) = delimited(
|
||||
char('('),
|
||||
separated_list1(
|
||||
tag(","),
|
||||
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
|
||||
),
|
||||
char(')'),
|
||||
)(input)
|
||||
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::GeoBoundingBox)));
|
||||
.map_cut(ErrorKind::GeoBoundingBox)?;
|
||||
|
||||
let (input, args) = parsed?;
|
||||
|
||||
if args.len() != 2 || args[0].len() != 2 || args[1].len() != 2 {
|
||||
if args.len() != 2 {
|
||||
return Err(Error::failure_from_kind(input, ErrorKind::GeoBoundingBox));
|
||||
}
|
||||
|
||||
if let Some(offending) = args.iter().find(|a| a.len() != 2) {
|
||||
let context = offending.first().unwrap_or(&input);
|
||||
return Err(Error::failure_from_kind(
|
||||
*context,
|
||||
ErrorKind::GeoCoordinatesNotPair(offending.len()),
|
||||
));
|
||||
}
|
||||
|
||||
let res = FilterCondition::GeoBoundingBox {
|
||||
top_right_point: [args[0][0].into(), args[0][1].into()],
|
||||
bottom_left_point: [args[1][0].into(), args[1][1].into()],
|
||||
@@ -447,6 +462,47 @@ fn parse_geo_bounding_box(input: Span) -> IResult<FilterCondition> {
|
||||
Ok((input, res))
|
||||
}
|
||||
|
||||
/// geoPolygon = "_geoPolygon([[" WS* float WS* "," WS* float WS* "],+])"
|
||||
/// If we parse `_geoPolygon` we MUST parse the rest of the expression.
|
||||
fn parse_geo_polygon(input: Span) -> IResult<FilterCondition> {
|
||||
// we want to allow space BEFORE the _geoPolygon but not after
|
||||
|
||||
let (input, _) = tuple((multispace0, word_exact("_geoPolygon")))(input)?;
|
||||
|
||||
// if we were able to parse `_geoPolygon` and can't parse the rest of the input we return a failure
|
||||
|
||||
let (input, args): (_, Vec<Vec<LocatedSpan<_, _>>>) = delimited(
|
||||
char('('),
|
||||
separated_list1(
|
||||
tag(","),
|
||||
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
|
||||
),
|
||||
preceded(opt(ws(char(','))), char(')')), // Tolerate trailing comma
|
||||
)(input)
|
||||
.map_cut(ErrorKind::GeoPolygon)?;
|
||||
|
||||
if args.len() < 3 {
|
||||
let context = args.last().and_then(|a| a.last()).unwrap_or(&input);
|
||||
return Err(Error::failure_from_kind(
|
||||
*context,
|
||||
ErrorKind::GeoPolygonNotEnoughPoints(args.len()),
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(offending) = args.iter().find(|a| a.len() != 2) {
|
||||
let context = offending.first().unwrap_or(&input);
|
||||
return Err(Error::failure_from_kind(
|
||||
*context,
|
||||
ErrorKind::GeoCoordinatesNotPair(offending.len()),
|
||||
));
|
||||
}
|
||||
|
||||
let res = FilterCondition::GeoPolygon {
|
||||
points: args.into_iter().map(|a| [a[0].into(), a[1].into()]).collect(),
|
||||
};
|
||||
Ok((input, res))
|
||||
}
|
||||
|
||||
/// geoPoint = WS* "_geoPoint(float WS* "," WS* float WS* "," WS* float)
|
||||
fn parse_geo_point(input: Span) -> IResult<FilterCondition> {
|
||||
// we want to forbid space BEFORE the _geoPoint but not after
|
||||
@@ -516,8 +572,8 @@ fn parse_primary(input: Span, depth: usize) -> IResult<FilterCondition> {
|
||||
Error::new_from_kind(input, ErrorKind::MissingClosingDelimiter(c.char()))
|
||||
}),
|
||||
),
|
||||
parse_geo_radius,
|
||||
parse_geo_bounding_box,
|
||||
// Made a random block of functions because we reached the maximum number of elements per alt
|
||||
alt((parse_geo_radius, parse_geo_bounding_box, parse_geo_polygon)),
|
||||
parse_in,
|
||||
parse_not_in,
|
||||
parse_condition,
|
||||
@@ -597,9 +653,12 @@ impl std::fmt::Display for FilterCondition<'_> {
|
||||
}
|
||||
write!(f, " EXISTS")
|
||||
}
|
||||
FilterCondition::GeoLowerThan { point, radius } => {
|
||||
FilterCondition::GeoLowerThan { point, radius, resolution: None } => {
|
||||
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
|
||||
}
|
||||
FilterCondition::GeoLowerThan { point, radius, resolution: Some(resolution) } => {
|
||||
write!(f, "_geoRadius({}, {}, {}, {})", point[0], point[1], radius, resolution)
|
||||
}
|
||||
FilterCondition::GeoBoundingBox {
|
||||
top_right_point: top_left_point,
|
||||
bottom_left_point: bottom_right_point,
|
||||
@@ -613,6 +672,13 @@ impl std::fmt::Display for FilterCondition<'_> {
|
||||
bottom_right_point[1]
|
||||
)
|
||||
}
|
||||
FilterCondition::GeoPolygon { points } => {
|
||||
write!(f, "_geoPolygon([")?;
|
||||
for point in points {
|
||||
write!(f, "[{}, {}], ", point[0], point[1])?;
|
||||
}
|
||||
write!(f, "])")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -651,7 +717,7 @@ pub mod tests {
|
||||
/// Create a raw [Token]. You must specify the string that appear BEFORE your element followed by your element
|
||||
pub fn rtok<'a>(before: &'a str, value: &'a str) -> Token<'a> {
|
||||
// if the string is empty we still need to return 1 for the line number
|
||||
let lines = before.is_empty().then_some(1).unwrap_or_else(|| before.lines().count());
|
||||
let lines = if before.is_empty() { 1 } else { before.lines().count() };
|
||||
let offset = before.chars().count();
|
||||
// the extra field is not checked in the tests so we can set it to nothing
|
||||
unsafe { Span::new_from_raw_offset(offset, lines as u32, value, "") }.into()
|
||||
@@ -776,12 +842,17 @@ pub mod tests {
|
||||
insta::assert_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})");
|
||||
insta::assert_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))");
|
||||
insta::assert_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})");
|
||||
insta::assert_snapshot!(p("_geoRadius(12,13,14,1000)"), @"_geoRadius({12}, {13}, {14}, {1000})");
|
||||
|
||||
// Test geo bounding box
|
||||
insta::assert_snapshot!(p("_geoBoundingBox([12, 13], [14, 15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
||||
insta::assert_snapshot!(p("NOT _geoBoundingBox([12, 13], [14, 15])"), @"NOT (_geoBoundingBox([{12}, {13}], [{14}, {15}]))");
|
||||
insta::assert_snapshot!(p("_geoBoundingBox([12,13],[14,15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
||||
|
||||
// Test geo polygon
|
||||
insta::assert_snapshot!(p("_geoPolygon([12, 13], [14, 15], [16, 17])"), @"_geoPolygon([[{12}, {13}], [{14}, {15}], [{16}, {17}], ])");
|
||||
insta::assert_snapshot!(p("_geoPolygon([12, 13], [14, 15], [-1.2,2939.2], [1,1])"), @"_geoPolygon([[{12}, {13}], [{14}, {15}], [{-1.2}, {2939.2}], [{1}, {1}], ])");
|
||||
|
||||
// Test OR + AND
|
||||
insta::assert_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
||||
insta::assert_snapshot!(p("channel = ponce OR 'dog race' != 'bernese mountain'"), @"OR[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
||||
@@ -838,50 +909,80 @@ pub mod tests {
|
||||
11:12 channel = 🐻 AND followers < 100
|
||||
"###);
|
||||
|
||||
insta::assert_snapshot!(p("'OR'"), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`.
|
||||
insta::assert_snapshot!(p("'OR'"), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `\'OR\'`.
|
||||
1:5 'OR'
|
||||
"###);
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("OR"), @r###"
|
||||
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
|
||||
1:3 OR
|
||||
"###);
|
||||
|
||||
insta::assert_snapshot!(p("channel Ponce"), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`.
|
||||
insta::assert_snapshot!(p("channel Ponce"), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `channel Ponce`.
|
||||
1:14 channel Ponce
|
||||
"###);
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("channel = Ponce OR"), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.
|
||||
insta::assert_snapshot!(p("channel = Ponce OR"), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` but instead got nothing.
|
||||
19:19 channel = Ponce OR
|
||||
"###);
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("_geoRadius"), @r###"
|
||||
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
|
||||
1:11 _geoRadius
|
||||
"###);
|
||||
insta::assert_snapshot!(p("_geoRadius"), @r"
|
||||
The `_geoRadius` filter must be in the form: `_geoRadius(latitude, longitude, radius, optionalResolution)`.
|
||||
11:11 _geoRadius
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("_geoRadius = 12"), @r###"
|
||||
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
|
||||
1:16 _geoRadius = 12
|
||||
"###);
|
||||
insta::assert_snapshot!(p("_geoRadius = 12"), @r"
|
||||
The `_geoRadius` filter must be in the form: `_geoRadius(latitude, longitude, radius, optionalResolution)`.
|
||||
11:16 _geoRadius = 12
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("_geoBoundingBox"), @r###"
|
||||
insta::assert_snapshot!(p("_geoBoundingBox"), @r"
|
||||
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||
1:16 _geoBoundingBox
|
||||
"###);
|
||||
16:16 _geoBoundingBox
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("_geoBoundingBox = 12"), @r###"
|
||||
insta::assert_snapshot!(p("_geoBoundingBox = 12"), @r"
|
||||
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||
1:21 _geoBoundingBox = 12
|
||||
"###);
|
||||
16:21 _geoBoundingBox = 12
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r###"
|
||||
insta::assert_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r"
|
||||
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||
1:26 _geoBoundingBox(1.0, 1.0)
|
||||
"###);
|
||||
17:26 _geoBoundingBox(1.0, 1.0)
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("_geoPolygon([1,2,3])"), @r"
|
||||
The `_geoPolygon` filter expects at least 3 points but only 1 were specified
|
||||
18:19 _geoPolygon([1,2,3])
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("_geoPolygon(1,2,3)"), @r"
|
||||
The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.
|
||||
13:19 _geoPolygon(1,2,3)
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("_geoPolygon([1,2],[1,2],[1,2,3])"), @r"
|
||||
Was expecting 2 coordinates but instead found 3.
|
||||
26:27 _geoPolygon([1,2],[1,2],[1,2,3])
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("_geoPolygon([1,2],[1,2,3])"), @r"
|
||||
The `_geoPolygon` filter expects at least 3 points but only 2 were specified
|
||||
24:25 _geoPolygon([1,2],[1,2,3])
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("_geoPolygon(1)"), @r"
|
||||
The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.
|
||||
13:15 _geoPolygon(1)
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("_geoPolygon([1,2)"), @r"
|
||||
The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.
|
||||
17:18 _geoPolygon([1,2)
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("_geoPoint(12, 13, 14)"), @r###"
|
||||
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
||||
@@ -938,15 +1039,15 @@ pub mod tests {
|
||||
34:35 channel = mv OR followers >= 1000)
|
||||
"###);
|
||||
|
||||
insta::assert_snapshot!(p("colour NOT EXIST"), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`.
|
||||
insta::assert_snapshot!(p("colour NOT EXIST"), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `colour NOT EXIST`.
|
||||
1:17 colour NOT EXIST
|
||||
"###);
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("subscribers 100 TO1000"), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`.
|
||||
insta::assert_snapshot!(p("subscribers 100 TO1000"), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `subscribers 100 TO1000`.
|
||||
1:23 subscribers 100 TO1000
|
||||
"###);
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p("channel = ponce ORdog != 'bernese mountain'"), @r###"
|
||||
Found unexpected characters at the end of the filter: `ORdog != \'bernese mountain\'`. You probably forgot an `OR` or an `AND` rule.
|
||||
@@ -1071,38 +1172,38 @@ pub mod tests {
|
||||
5:7 NOT OR EXISTS AND EXISTS NOT EXISTS
|
||||
"###);
|
||||
|
||||
insta::assert_snapshot!(p(r#"value NULL"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NULL`.
|
||||
insta::assert_snapshot!(p(r#"value NULL"#), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value NULL`.
|
||||
1:11 value NULL
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value NOT NULL"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NOT NULL`.
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"value NOT NULL"#), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value NOT NULL`.
|
||||
1:15 value NOT NULL
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value EMPTY"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value EMPTY`.
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"value EMPTY"#), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value EMPTY`.
|
||||
1:12 value EMPTY
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value NOT EMPTY"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NOT EMPTY`.
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"value NOT EMPTY"#), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value NOT EMPTY`.
|
||||
1:16 value NOT EMPTY
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value IS"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS`.
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"value IS"#), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS`.
|
||||
1:9 value IS
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value IS NOT"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT`.
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"value IS NOT"#), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS NOT`.
|
||||
1:13 value IS NOT
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value IS EXISTS"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS EXISTS`.
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"value IS EXISTS"#), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS EXISTS`.
|
||||
1:16 value IS EXISTS
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value IS NOT EXISTS"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT EXISTS`.
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"value IS NOT EXISTS"#), @r"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS NOT EXISTS`.
|
||||
1:20 value IS NOT EXISTS
|
||||
"###);
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#![allow(clippy::result_large_err)]
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::io;
|
||||
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// The main Error type is large and boxing the large variant make the pattern matching fails
|
||||
#![allow(clippy::result_large_err)]
|
||||
|
||||
/*!
|
||||
This crate defines the index scheduler, which is responsible for:
|
||||
1. Keeping references to meilisearch's indexes and mapping them to their
|
||||
@@ -344,7 +347,7 @@ impl IndexScheduler {
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {
|
||||
fn read_txn(&self) -> Result<RoTxn<'_, WithoutTls>> {
|
||||
self.env.read_txn().map_err(|e| e.into())
|
||||
}
|
||||
|
||||
@@ -757,7 +760,7 @@ impl IndexScheduler {
|
||||
|
||||
/// Register a new task coming from a dump in the scheduler.
|
||||
/// By taking a mutable ref we're pretty sure no one will ever import a dump while actix is running.
|
||||
pub fn register_dumped_task(&mut self) -> Result<Dump> {
|
||||
pub fn register_dumped_task(&mut self) -> Result<Dump<'_>> {
|
||||
Dump::new(self)
|
||||
}
|
||||
|
||||
@@ -806,10 +809,8 @@ impl IndexScheduler {
|
||||
.queue
|
||||
.tasks
|
||||
.get_task(self.rtxn, task_id)
|
||||
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?
|
||||
.ok_or_else(|| {
|
||||
io::Error::new(io::ErrorKind::Other, Error::CorruptedTaskQueue)
|
||||
})?;
|
||||
.map_err(io::Error::other)?
|
||||
.ok_or_else(|| io::Error::other(Error::CorruptedTaskQueue))?;
|
||||
|
||||
serde_json::to_writer(&mut self.buffer, &TaskView::from_task(&task))?;
|
||||
self.buffer.push(b'\n');
|
||||
|
||||
@@ -310,7 +310,8 @@ impl Queue {
|
||||
| self.tasks.status.get(wtxn, &Status::Failed)?.unwrap_or_default()
|
||||
| self.tasks.status.get(wtxn, &Status::Canceled)?.unwrap_or_default();
|
||||
|
||||
let to_delete = RoaringBitmap::from_iter(finished.into_iter().rev().take(100_000));
|
||||
let to_delete =
|
||||
RoaringBitmap::from_sorted_iter(finished.into_iter().take(100_000)).unwrap();
|
||||
|
||||
// /!\ the len must be at least 2 or else we might enter an infinite loop where we only delete
|
||||
// the deletion tasks we enqueued ourselves.
|
||||
@@ -326,7 +327,7 @@ impl Queue {
|
||||
);
|
||||
|
||||
// it's safe to unwrap here because we checked the len above
|
||||
let newest_task_id = to_delete.iter().last().unwrap();
|
||||
let newest_task_id = to_delete.iter().next_back().unwrap();
|
||||
let last_task_to_delete =
|
||||
self.tasks.get_task(wtxn, newest_task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||
|
||||
|
||||
@@ -66,6 +66,7 @@ pub(crate) enum DocumentOperation {
|
||||
|
||||
/// A [batch](Batch) that combines multiple tasks operating on an index.
|
||||
#[derive(Debug)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub(crate) enum IndexOperation {
|
||||
DocumentOperation {
|
||||
index_uid: String,
|
||||
|
||||
@@ -370,7 +370,7 @@ fn ureq_error_into_error(error: ureq::Error) -> Error {
|
||||
}
|
||||
Err(e) => e.into(),
|
||||
},
|
||||
ureq::Error::Transport(transport) => io::Error::new(io::ErrorKind::Other, transport).into(),
|
||||
ureq::Error::Transport(transport) => io::Error::other(transport).into(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 21, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 22, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggo` already exists.", error_code: "index_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_already_exists" }, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
@@ -57,7 +57,7 @@ girafo: { number_of_documents: 0, field_distribution: {} }
|
||||
[timestamp] [4,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.21.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.22.1"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
1 {uid: 1, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
||||
2 {uid: 2, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
||||
3 {uid: 3, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 21, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 22, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 21, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 22, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 21, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 22, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
@@ -37,7 +37,7 @@ catto [1,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.21.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.22.1"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 21, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 22, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
----------------------------------------------------------------------
|
||||
@@ -40,7 +40,7 @@ doggo [2,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.21.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.22.1"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 21, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 22, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
3 {uid: 3, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
@@ -43,7 +43,7 @@ doggo [2,3,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.21.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.22.1"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
||||
@@ -45,6 +45,7 @@ pub fn upgrade_index_scheduler(
|
||||
(1, 19, _) => 0,
|
||||
(1, 20, _) => 0,
|
||||
(1, 21, _) => 0,
|
||||
(1, 22, _) => 0,
|
||||
(major, minor, patch) => {
|
||||
if major > current_major
|
||||
|| (major == current_major && minor > current_minor)
|
||||
|
||||
@@ -17,7 +17,7 @@ impl<'a> BytesDecode<'a> for UuidCodec {
|
||||
impl BytesEncode<'_> for UuidCodec {
|
||||
type EItem = Uuid;
|
||||
|
||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<'_, [u8]>, BoxedError> {
|
||||
Ok(Cow::Borrowed(item.as_bytes()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -271,9 +271,10 @@ macro_rules! json_string {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate as meili_snap;
|
||||
use crate::UUID_IN_MESSAGE_RE;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn snap() {
|
||||
|
||||
@@ -315,7 +315,9 @@ impl<'a> heed::BytesDecode<'a> for KeyIdActionCodec {
|
||||
impl<'a> heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
type EItem = (&'a KeyId, &'a Action, Option<&'a [u8]>);
|
||||
|
||||
fn bytes_encode((key_id, action, index): &Self::EItem) -> StdResult<Cow<[u8]>, BoxedError> {
|
||||
fn bytes_encode(
|
||||
(key_id, action, index): &'_ Self::EItem,
|
||||
) -> StdResult<Cow<'_, [u8]>, BoxedError> {
|
||||
let mut bytes = Vec::new();
|
||||
|
||||
bytes.extend_from_slice(key_id.as_bytes());
|
||||
|
||||
@@ -5,6 +5,7 @@ use actix_web::{self as aweb, HttpResponseBuilder};
|
||||
use aweb::http::header;
|
||||
use aweb::rt::task::JoinError;
|
||||
use convert_case::Casing;
|
||||
use milli::cellulite;
|
||||
use milli::heed::{Error as HeedError, MdbError};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use utoipa::ToSchema;
|
||||
@@ -239,6 +240,7 @@ InconsistentDocumentChangeHeaders , InvalidRequest , BAD_REQU
|
||||
InvalidDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentSort , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentGeojsonField , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidHeaderValue , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidVectorDimensions , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidVectorsType , InvalidRequest , BAD_REQUEST ;
|
||||
@@ -501,7 +503,9 @@ impl ErrorCode for milli::Error {
|
||||
Code::InvalidFacetSearchFacetName
|
||||
}
|
||||
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
|
||||
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
|
||||
UserError::InvalidGeoField { .. } | UserError::GeoJsonError(_) => {
|
||||
Code::InvalidDocumentGeoField
|
||||
}
|
||||
UserError::InvalidVectorDimensions { .. }
|
||||
| UserError::InvalidIndexingVectorDimensions { .. } => {
|
||||
Code::InvalidVectorDimensions
|
||||
@@ -525,6 +529,17 @@ impl ErrorCode for milli::Error {
|
||||
| UserError::DocumentEditionCompilationError(_) => {
|
||||
Code::EditDocumentsByFunctionError
|
||||
}
|
||||
UserError::CelluliteError(err) => match err {
|
||||
cellulite::Error::BuildCanceled
|
||||
| cellulite::Error::VersionMismatchOnBuild(_)
|
||||
| cellulite::Error::DatabaseDoesntExists
|
||||
| cellulite::Error::Heed(_)
|
||||
| cellulite::Error::InvalidGeometry(_)
|
||||
| cellulite::Error::InternalDocIdMissing(_, _)
|
||||
| cellulite::Error::CannotConvertLineToCell(_, _, _) => Code::Internal,
|
||||
cellulite::Error::InvalidGeoJson(_) => Code::InvalidDocumentGeojsonField,
|
||||
},
|
||||
UserError::MalformedGeojson(_) => Code::InvalidDocumentGeojsonField,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#![allow(clippy::result_large_err)]
|
||||
|
||||
pub mod batch_view;
|
||||
pub mod batches;
|
||||
pub mod compression;
|
||||
|
||||
@@ -12,6 +12,7 @@ use tokio::task::JoinError;
|
||||
use crate::routes::indexes::{PROXY_ORIGIN_REMOTE_HEADER, PROXY_ORIGIN_TASK_UID_HEADER};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum MeilisearchHttpError {
|
||||
#[error("A Content-Type header is missing. Accepted values for the Content-Type header are: {}",
|
||||
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", "))]
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
#![allow(clippy::result_large_err)]
|
||||
#![allow(rustdoc::private_intra_doc_links)]
|
||||
|
||||
#[macro_use]
|
||||
pub mod error;
|
||||
pub mod analytics;
|
||||
|
||||
@@ -180,12 +180,6 @@ pub async fn get_metrics(
|
||||
|
||||
let response = String::from_utf8(buffer).expect("Failed to convert bytes to string");
|
||||
|
||||
// We cannot specify the version with ContentType(TEXT_PLAIN_UTF_8) so we have to write everything by hand :(
|
||||
// see the following for what should be returned: https://prometheus.io/docs/instrumenting/content_negotiation/#content-type-response
|
||||
let content_type = ("content-type", "text/plain; version=0.0.4; charset=utf-8");
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
// .insert_header(header::ContentType(mime::TEXT_PLAIN_UTF_8))
|
||||
.insert_header(content_type)
|
||||
.body(response))
|
||||
let content_type = ("content-type", prometheus::TEXT_FORMAT);
|
||||
Ok(HttpResponse::Ok().insert_header(content_type).body(response))
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use core::convert::Infallible;
|
||||
use std::collections::BTreeMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
@@ -7,7 +8,6 @@ use actix_http::header::{
|
||||
};
|
||||
use actix_web::web::{self, Data, Path};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use core::convert::Infallible;
|
||||
use deserr::actix_web::AwebJson;
|
||||
use deserr::{DeserializeError, Deserr, ValuePointerRef};
|
||||
use index_scheduler::IndexScheduler;
|
||||
@@ -24,12 +24,12 @@ use tracing::debug;
|
||||
use url::Url;
|
||||
use utoipa::{OpenApi, ToSchema};
|
||||
use uuid::Uuid;
|
||||
use WebhooksError::*;
|
||||
|
||||
use crate::analytics::{Aggregate, Analytics};
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use WebhooksError::*;
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
|
||||
@@ -219,7 +219,7 @@ struct SearchResultByQueryIterItem<'a> {
|
||||
|
||||
fn merge_index_local_results(
|
||||
results_by_query: Vec<SearchResultByQuery<'_>>,
|
||||
) -> impl Iterator<Item = SearchResultByQueryIterItem> + '_ {
|
||||
) -> impl Iterator<Item = SearchResultByQueryIterItem<'_>> + '_ {
|
||||
itertools::kmerge_by(
|
||||
results_by_query.into_iter().map(SearchResultByQueryIter::new),
|
||||
|left: &SearchResultByQueryIterItem, right: &SearchResultByQueryIterItem| {
|
||||
|
||||
@@ -2080,7 +2080,7 @@ pub(crate) fn parse_filter(
|
||||
facets: &Value,
|
||||
filter_parsing_error_code: Code,
|
||||
features: RoFeatures,
|
||||
) -> Result<Option<Filter>, ResponseError> {
|
||||
) -> Result<Option<Filter<'_>>, ResponseError> {
|
||||
let filter = match facets {
|
||||
Value::String(expr) => Filter::from_str(expr).map_err(|e| e.into()),
|
||||
Value::Array(arr) => parse_filter_array(arr).map_err(|e| e.into()),
|
||||
@@ -2117,7 +2117,7 @@ pub(crate) fn parse_filter(
|
||||
Ok(filter)
|
||||
}
|
||||
|
||||
fn parse_filter_array(arr: &[Value]) -> Result<Option<Filter>, MeilisearchHttpError> {
|
||||
fn parse_filter_array(arr: &'_ [Value]) -> Result<Option<Filter<'_>>, MeilisearchHttpError> {
|
||||
let mut ands = Vec::new();
|
||||
for value in arr {
|
||||
match value {
|
||||
|
||||
@@ -13,9 +13,9 @@
|
||||
//! What is going to happen at this point is that you're going to send a oneshot::Sender over an async mpsc channel.
|
||||
//! Then, the queue/scheduler is going to either:
|
||||
//! - Drop your oneshot channel => that means there are too many searches going on, and yours won't be executed.
|
||||
//! You should exit and free all the RAM you use ASAP.
|
||||
//! You should exit and free all the RAM you use ASAP.
|
||||
//! - Sends you a Permit => that will unlock the method, and you will be able to process your search.
|
||||
//! And should drop the Permit only once you have freed all the RAM consumed by the method.
|
||||
//! And should drop the Permit only once you have freed all the RAM consumed by the method.
|
||||
|
||||
use std::num::NonZeroUsize;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
@@ -1040,7 +1040,7 @@ async fn error_single_search_forbidden_token() {
|
||||
];
|
||||
|
||||
let failed_query_indexes: Vec<_> =
|
||||
std::iter::repeat(Some(0)).take(5).chain(std::iter::repeat(None).take(6)).collect();
|
||||
std::iter::repeat_n(Some(0), 5).chain(std::iter::repeat_n(None, 6)).collect();
|
||||
|
||||
let failed_query_indexes = vec![failed_query_indexes; ACCEPTED_KEYS_SINGLE.len()];
|
||||
|
||||
@@ -1118,10 +1118,9 @@ async fn error_multi_search_forbidden_token() {
|
||||
},
|
||||
];
|
||||
|
||||
let failed_query_indexes: Vec<_> = std::iter::repeat(Some(0))
|
||||
.take(5)
|
||||
.chain(std::iter::repeat(Some(1)).take(5))
|
||||
.chain(std::iter::repeat(None).take(6))
|
||||
let failed_query_indexes: Vec<_> = std::iter::repeat_n(Some(0), 5)
|
||||
.chain(std::iter::repeat_n(Some(1), 5))
|
||||
.chain(std::iter::repeat_n(None, 6))
|
||||
.collect();
|
||||
|
||||
let failed_query_indexes = vec![failed_query_indexes; ACCEPTED_KEYS_BOTH.len()];
|
||||
|
||||
@@ -522,6 +522,26 @@ pub async fn shared_index_with_geo_documents() -> &'static Index<'static, Shared
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn shared_index_geojson_documents() -> &'static Index<'static, Shared> {
|
||||
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
|
||||
INDEX
|
||||
.get_or_init(|| async {
|
||||
// Retrieved from https://gitlab-forge.din.developpement-durable.gouv.fr/pub/geomatique/descartes/d-map/-/blob/main/demo/examples/commons/countries.geojson?ref_type=heads
|
||||
let server = Server::new_shared();
|
||||
let index = server._index("SHARED_GEOJSON_DOCUMENTS").to_shared();
|
||||
let countries = include_str!("../documents/geojson/assets/countries.json");
|
||||
let lille = serde_json::from_str::<serde_json::Value>(countries).unwrap();
|
||||
let (response, _code) = index._add_documents(Value(lille), Some("name")).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) =
|
||||
index._update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn shared_index_for_fragments() -> Index<'static, Shared> {
|
||||
static INDEX: OnceCell<(Server<Shared>, String)> = OnceCell::const_new();
|
||||
let (server, uid) = INDEX
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value};
|
||||
use crate::json;
|
||||
use actix_web::test;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use meilisearch::Opt;
|
||||
@@ -8,6 +5,10 @@ use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value};
|
||||
use crate::json;
|
||||
|
||||
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
||||
#[actix_rt::test]
|
||||
async fn add_documents_test_json_content_types() {
|
||||
|
||||
@@ -134,14 +134,14 @@ async fn get_all_documents_bad_filter() {
|
||||
|
||||
let (response, code) = index.get_all_documents_raw("?filter=doggo").await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `doggo`.\n1:6 doggo",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `doggo`.\n1:6 doggo",
|
||||
"code": "invalid_document_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_document_filter"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
let (response, code) = index.get_all_documents_raw("?filter=doggo=bernese").await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@@ -523,14 +523,14 @@ async fn delete_document_by_filter() {
|
||||
// send bad filter
|
||||
let (response, code) = index.delete_document_by_filter(json!({ "filter": "hello"})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `hello`.\n1:6 hello",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `hello`.\n1:6 hello",
|
||||
"code": "invalid_document_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_document_filter"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
// send empty filter
|
||||
let (response, code) = index.delete_document_by_filter(json!({ "filter": ""})).await;
|
||||
@@ -724,14 +724,14 @@ async fn fetch_document_by_filter() {
|
||||
|
||||
let (response, code) = index.fetch_documents(json!({ "filter": "cool doggo" })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `cool doggo`.\n1:11 cool doggo",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `cool doggo`.\n1:11 cool doggo",
|
||||
"code": "invalid_document_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_document_filter"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
let (response, code) = index.fetch_documents(json!({ "filter": "doggo = bernese" })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
|
||||
180
crates/meilisearch/tests/documents/geojson/assets/countries.json
Normal file
180
crates/meilisearch/tests/documents/geojson/assets/countries.json
Normal file
File diff suppressed because one or more lines are too long
547
crates/meilisearch/tests/documents/geojson/assets/lille.geojson
Normal file
547
crates/meilisearch/tests/documents/geojson/assets/lille.geojson
Normal file
@@ -0,0 +1,547 @@
|
||||
{
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
3.11681,
|
||||
50.63646
|
||||
],
|
||||
[
|
||||
3.11945,
|
||||
50.63488
|
||||
],
|
||||
[
|
||||
3.12134,
|
||||
50.63504
|
||||
],
|
||||
[
|
||||
3.12064,
|
||||
50.63127
|
||||
],
|
||||
[
|
||||
3.12203,
|
||||
50.62785
|
||||
],
|
||||
[
|
||||
3.12389,
|
||||
50.6262
|
||||
],
|
||||
[
|
||||
3.12161,
|
||||
50.62358
|
||||
],
|
||||
[
|
||||
3.12547,
|
||||
50.62114
|
||||
],
|
||||
[
|
||||
3.12447,
|
||||
50.61874
|
||||
],
|
||||
[
|
||||
3.12288,
|
||||
50.61988
|
||||
],
|
||||
[
|
||||
3.12054,
|
||||
50.61846
|
||||
],
|
||||
[
|
||||
3.11846,
|
||||
50.61754
|
||||
],
|
||||
[
|
||||
3.11482,
|
||||
50.6207
|
||||
],
|
||||
[
|
||||
3.11232,
|
||||
50.6188
|
||||
],
|
||||
[
|
||||
3.10936,
|
||||
50.61727
|
||||
],
|
||||
[
|
||||
3.10822,
|
||||
50.61765
|
||||
],
|
||||
[
|
||||
3.10603,
|
||||
50.61536
|
||||
],
|
||||
[
|
||||
3.1041,
|
||||
50.61596
|
||||
],
|
||||
[
|
||||
3.10017,
|
||||
50.6186
|
||||
],
|
||||
[
|
||||
3.09688,
|
||||
50.61714
|
||||
],
|
||||
[
|
||||
3.09575,
|
||||
50.61795
|
||||
],
|
||||
[
|
||||
3.0891,
|
||||
50.61532
|
||||
],
|
||||
[
|
||||
3.08625,
|
||||
50.61792
|
||||
],
|
||||
[
|
||||
3.07948,
|
||||
50.61428
|
||||
],
|
||||
[
|
||||
3.07146,
|
||||
50.6066
|
||||
],
|
||||
[
|
||||
3.06819,
|
||||
50.60918
|
||||
],
|
||||
[
|
||||
3.06502,
|
||||
50.61046
|
||||
],
|
||||
[
|
||||
3.06223,
|
||||
50.61223
|
||||
],
|
||||
[
|
||||
3.05925,
|
||||
50.60659
|
||||
],
|
||||
[
|
||||
3.05463,
|
||||
50.60077
|
||||
],
|
||||
[
|
||||
3.04906,
|
||||
50.6008
|
||||
],
|
||||
[
|
||||
3.04726,
|
||||
50.6035
|
||||
],
|
||||
[
|
||||
3.04328,
|
||||
50.60667
|
||||
],
|
||||
[
|
||||
3.04155,
|
||||
50.60417
|
||||
],
|
||||
[
|
||||
3.03767,
|
||||
50.60456
|
||||
],
|
||||
[
|
||||
3.03528,
|
||||
50.60538
|
||||
],
|
||||
[
|
||||
3.03239,
|
||||
50.60725
|
||||
],
|
||||
[
|
||||
3.0254,
|
||||
50.6111
|
||||
],
|
||||
[
|
||||
3.02387,
|
||||
50.6125
|
||||
],
|
||||
[
|
||||
3.0248,
|
||||
50.61344
|
||||
],
|
||||
[
|
||||
3.02779,
|
||||
50.61418
|
||||
],
|
||||
[
|
||||
3.02414,
|
||||
50.6169
|
||||
],
|
||||
[
|
||||
3.02312,
|
||||
50.61975
|
||||
],
|
||||
[
|
||||
3.02172,
|
||||
50.62082
|
||||
],
|
||||
[
|
||||
3.01953,
|
||||
50.62484
|
||||
],
|
||||
[
|
||||
3.01811,
|
||||
50.62529
|
||||
],
|
||||
[
|
||||
3.01313,
|
||||
50.62558
|
||||
],
|
||||
[
|
||||
3.01385,
|
||||
50.62695
|
||||
],
|
||||
[
|
||||
3.00844,
|
||||
50.62717
|
||||
],
|
||||
[
|
||||
3.0056,
|
||||
50.6267
|
||||
],
|
||||
[
|
||||
3.00229,
|
||||
50.62557
|
||||
],
|
||||
[
|
||||
3.00119,
|
||||
50.62723
|
||||
],
|
||||
[
|
||||
2.99769,
|
||||
50.62901
|
||||
],
|
||||
[
|
||||
2.99391,
|
||||
50.62732
|
||||
],
|
||||
[
|
||||
2.98971,
|
||||
50.63036
|
||||
],
|
||||
[
|
||||
2.9862,
|
||||
50.63328
|
||||
],
|
||||
[
|
||||
2.98178,
|
||||
50.63404
|
||||
],
|
||||
[
|
||||
2.97917,
|
||||
50.63499
|
||||
],
|
||||
[
|
||||
2.97284,
|
||||
50.63429
|
||||
],
|
||||
[
|
||||
2.97174,
|
||||
50.63365
|
||||
],
|
||||
[
|
||||
2.97002,
|
||||
50.63366
|
||||
],
|
||||
[
|
||||
2.96956,
|
||||
50.63506
|
||||
],
|
||||
[
|
||||
2.97046,
|
||||
50.6365
|
||||
],
|
||||
[
|
||||
2.96878,
|
||||
50.63833
|
||||
],
|
||||
[
|
||||
2.97039,
|
||||
50.6395
|
||||
],
|
||||
[
|
||||
2.97275,
|
||||
50.64183
|
||||
],
|
||||
[
|
||||
2.97225,
|
||||
50.64381
|
||||
],
|
||||
[
|
||||
2.9745,
|
||||
50.64442
|
||||
],
|
||||
[
|
||||
2.97474,
|
||||
50.64648
|
||||
],
|
||||
[
|
||||
2.97091,
|
||||
50.65108
|
||||
],
|
||||
[
|
||||
2.96975,
|
||||
50.65361
|
||||
],
|
||||
[
|
||||
2.97061,
|
||||
50.65513
|
||||
],
|
||||
[
|
||||
2.96929,
|
||||
50.65739
|
||||
],
|
||||
[
|
||||
2.97072,
|
||||
50.6581
|
||||
],
|
||||
[
|
||||
2.97973,
|
||||
50.66048
|
||||
],
|
||||
[
|
||||
2.98369,
|
||||
50.66123
|
||||
],
|
||||
[
|
||||
2.9865,
|
||||
50.65959
|
||||
],
|
||||
[
|
||||
2.9896,
|
||||
50.65845
|
||||
],
|
||||
[
|
||||
2.9963,
|
||||
50.65666
|
||||
],
|
||||
[
|
||||
2.99903,
|
||||
50.65552
|
||||
],
|
||||
[
|
||||
3.00274,
|
||||
50.65235
|
||||
],
|
||||
[
|
||||
3.00714,
|
||||
50.64887
|
||||
],
|
||||
[
|
||||
3.01088,
|
||||
50.64845
|
||||
],
|
||||
[
|
||||
3.01318,
|
||||
50.64541
|
||||
],
|
||||
[
|
||||
3.01974,
|
||||
50.63972
|
||||
],
|
||||
[
|
||||
3.02317,
|
||||
50.63813
|
||||
],
|
||||
[
|
||||
3.02639,
|
||||
50.63613
|
||||
],
|
||||
[
|
||||
3.029,
|
||||
50.63521
|
||||
],
|
||||
[
|
||||
3.03414,
|
||||
50.6382
|
||||
],
|
||||
[
|
||||
3.03676,
|
||||
50.63888
|
||||
],
|
||||
[
|
||||
3.03686,
|
||||
50.64147
|
||||
],
|
||||
[
|
||||
3.03791,
|
||||
50.64379
|
||||
],
|
||||
[
|
||||
3.0409,
|
||||
50.64577
|
||||
],
|
||||
[
|
||||
3.04582,
|
||||
50.64807
|
||||
],
|
||||
[
|
||||
3.05132,
|
||||
50.64866
|
||||
],
|
||||
[
|
||||
3.05055,
|
||||
50.64949
|
||||
],
|
||||
[
|
||||
3.05244,
|
||||
50.65055
|
||||
],
|
||||
[
|
||||
3.05784,
|
||||
50.64927
|
||||
],
|
||||
[
|
||||
3.0596,
|
||||
50.65105
|
||||
],
|
||||
[
|
||||
3.06414,
|
||||
50.65041
|
||||
],
|
||||
[
|
||||
3.06705,
|
||||
50.64936
|
||||
],
|
||||
[
|
||||
3.07023,
|
||||
50.64706
|
||||
],
|
||||
[
|
||||
3.07203,
|
||||
50.64355
|
||||
],
|
||||
[
|
||||
3.07526,
|
||||
50.64188
|
||||
],
|
||||
[
|
||||
3.0758,
|
||||
50.64453
|
||||
],
|
||||
[
|
||||
3.07753,
|
||||
50.64381
|
||||
],
|
||||
[
|
||||
3.07861,
|
||||
50.64542
|
||||
],
|
||||
[
|
||||
3.08299,
|
||||
50.64725
|
||||
],
|
||||
[
|
||||
3.08046,
|
||||
50.64912
|
||||
],
|
||||
[
|
||||
3.08349,
|
||||
50.65082
|
||||
],
|
||||
[
|
||||
3.08354,
|
||||
50.65155
|
||||
],
|
||||
[
|
||||
3.08477,
|
||||
50.65312
|
||||
],
|
||||
[
|
||||
3.08542,
|
||||
50.65654
|
||||
],
|
||||
[
|
||||
3.08753,
|
||||
50.65687
|
||||
],
|
||||
[
|
||||
3.09032,
|
||||
50.65602
|
||||
],
|
||||
[
|
||||
3.09018,
|
||||
50.65142
|
||||
],
|
||||
[
|
||||
3.09278,
|
||||
50.65086
|
||||
],
|
||||
[
|
||||
3.09402,
|
||||
50.64982
|
||||
],
|
||||
[
|
||||
3.09908,
|
||||
50.65146
|
||||
],
|
||||
[
|
||||
3.10316,
|
||||
50.65227
|
||||
],
|
||||
[
|
||||
3.09726,
|
||||
50.64723
|
||||
],
|
||||
[
|
||||
3.09387,
|
||||
50.64358
|
||||
],
|
||||
[
|
||||
3.09357,
|
||||
50.64095
|
||||
],
|
||||
[
|
||||
3.09561,
|
||||
50.64133
|
||||
],
|
||||
[
|
||||
3.09675,
|
||||
50.64018
|
||||
],
|
||||
[
|
||||
3.09454,
|
||||
50.63891
|
||||
],
|
||||
[
|
||||
3.09627,
|
||||
50.63693
|
||||
],
|
||||
[
|
||||
3.09795,
|
||||
50.63713
|
||||
],
|
||||
[
|
||||
3.09919,
|
||||
50.63576
|
||||
],
|
||||
[
|
||||
3.10324,
|
||||
50.6351
|
||||
],
|
||||
[
|
||||
3.10613,
|
||||
50.63532
|
||||
],
|
||||
[
|
||||
3.10649,
|
||||
50.63434
|
||||
],
|
||||
[
|
||||
3.1109,
|
||||
50.63525
|
||||
],
|
||||
[
|
||||
3.11502,
|
||||
50.63504
|
||||
],
|
||||
[
|
||||
3.11681,
|
||||
50.63646
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
453
crates/meilisearch/tests/documents/geojson/mod.rs
Normal file
453
crates/meilisearch/tests/documents/geojson/mod.rs
Normal file
@@ -0,0 +1,453 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
|
||||
use crate::common::{shared_index_geojson_documents, Server};
|
||||
use crate::json;
|
||||
|
||||
const LILLE: &str = include_str!("assets/lille.geojson");
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn basic_add_settings_and_geojson_documents() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) =
|
||||
index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
|
||||
snapshot!(response,
|
||||
@r#"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"#);
|
||||
|
||||
let lille: serde_json::Value = serde_json::from_str(LILLE).unwrap();
|
||||
let documents = json!([
|
||||
{
|
||||
"id": "missing",
|
||||
},
|
||||
{
|
||||
"id": "point",
|
||||
"_geojson": { "type": "Point", "coordinates": [1, 1] },
|
||||
},
|
||||
{
|
||||
"id": "lille",
|
||||
"_geojson": lille,
|
||||
},
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
let response = server.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r#"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 3,
|
||||
"indexedDocuments": 3
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
"enqueuedAt": "[date]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]"
|
||||
}
|
||||
"#);
|
||||
|
||||
let (response, code) = index.get_all_documents_raw("?ids=missing,point").await;
|
||||
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response,
|
||||
@r#"
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"id": "missing"
|
||||
},
|
||||
{
|
||||
"id": "point",
|
||||
"_geojson": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
1,
|
||||
1
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"offset": 0,
|
||||
"limit": 20,
|
||||
"total": 2
|
||||
}
|
||||
"#);
|
||||
|
||||
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
|
||||
snapshot!(response,
|
||||
@r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"id": "point",
|
||||
"_geojson": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
1,
|
||||
1
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn basic_add_geojson_documents_and_settings() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let lille: serde_json::Value = serde_json::from_str(LILLE).unwrap();
|
||||
let documents = json!([
|
||||
{
|
||||
"id": "missing",
|
||||
},
|
||||
{
|
||||
"id": "point",
|
||||
"_geojson": { "type": "Point", "coordinates": [1, 1] },
|
||||
},
|
||||
{
|
||||
"id": "lille",
|
||||
"_geojson": lille,
|
||||
},
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
let response = server.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(response,
|
||||
@r#"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 3,
|
||||
"indexedDocuments": 3
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
"enqueuedAt": "[date]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]"
|
||||
}
|
||||
"#);
|
||||
|
||||
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
|
||||
snapshot!(response,
|
||||
@r#"
|
||||
{
|
||||
"message": "Index `[uuid]`: Attribute `_geojson` is not filterable. This index does not have configured filterable attributes.\n14:15 _geoPolygon([0,0],[0,2],[2,2],[2,0])",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"#);
|
||||
|
||||
let (task, _status_code) =
|
||||
index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
|
||||
snapshot!(response,
|
||||
@r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"id": "point",
|
||||
"_geojson": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
1,
|
||||
1
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_and_remove_geojson() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
"id": "missing",
|
||||
},
|
||||
{
|
||||
"id": 0,
|
||||
"_geojson": { "type": "Point", "coordinates": [1, 1] },
|
||||
}
|
||||
]);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, _code) =
|
||||
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
|
||||
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
|
||||
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
|
||||
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
|
||||
|
||||
let (task, _) = index.delete_document(0).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, _code) =
|
||||
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
|
||||
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
|
||||
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
|
||||
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
|
||||
|
||||
// add it back
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 0,
|
||||
"_geojson": { "type": "Point", "coordinates": [1, 1] },
|
||||
}
|
||||
]);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, _code) =
|
||||
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
|
||||
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
|
||||
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
|
||||
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn partial_update_geojson() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _) = index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 0,
|
||||
"_geojson": { "type": "Point", "coordinates": [1, 1] },
|
||||
}
|
||||
]);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, _code) =
|
||||
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
|
||||
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
|
||||
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
|
||||
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 0,
|
||||
"_geojson": { "type": "Point", "coordinates": [0.5, 0.5] },
|
||||
}
|
||||
]);
|
||||
let (task, _status_code) = index.update_documents(documents, None).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, _code) =
|
||||
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
|
||||
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
|
||||
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
|
||||
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
|
||||
let (response, _code) =
|
||||
index.search_get("?filter=_geoPolygon([0.9,0.9],[0.9,2],[2,2],[2,0.9])").await;
|
||||
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn geo_bounding_box() {
|
||||
let index = shared_index_geojson_documents().await;
|
||||
|
||||
// The bounding box is a polygon over middle Europe
|
||||
let (response, code) =
|
||||
index.search_get("?filter=_geoBoundingBox([50.53987503447863,21.43443989912143],[43.76393151539099,0.54979129195425])&attributesToRetrieve=name").await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "Austria"
|
||||
},
|
||||
{
|
||||
"name": "Belgium"
|
||||
},
|
||||
{
|
||||
"name": "Bosnia_and_Herzegovina"
|
||||
},
|
||||
{
|
||||
"name": "Switzerland"
|
||||
},
|
||||
{
|
||||
"name": "Czech_Republic"
|
||||
},
|
||||
{
|
||||
"name": "Germany"
|
||||
},
|
||||
{
|
||||
"name": "France"
|
||||
},
|
||||
{
|
||||
"name": "Croatia"
|
||||
},
|
||||
{
|
||||
"name": "Hungary"
|
||||
},
|
||||
{
|
||||
"name": "Italy"
|
||||
},
|
||||
{
|
||||
"name": "Luxembourg"
|
||||
},
|
||||
{
|
||||
"name": "Netherlands"
|
||||
},
|
||||
{
|
||||
"name": "Poland"
|
||||
},
|
||||
{
|
||||
"name": "Romania"
|
||||
},
|
||||
{
|
||||
"name": "Republic_of_Serbia"
|
||||
},
|
||||
{
|
||||
"name": "Slovakia"
|
||||
},
|
||||
{
|
||||
"name": "Slovenia"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 17
|
||||
}
|
||||
"#);
|
||||
|
||||
// Between Russia and Alaska
|
||||
let (response, code) = index
|
||||
.search_get("?filter=_geoBoundingBox([70,-148],[63,152])&attributesToRetrieve=name")
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "Canada"
|
||||
},
|
||||
{
|
||||
"name": "Russia"
|
||||
},
|
||||
{
|
||||
"name": "United_States_of_America"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn geo_radius() {
|
||||
let index = shared_index_geojson_documents().await;
|
||||
|
||||
// 200km around Luxembourg
|
||||
let (response, code) = index
|
||||
.search_get("?filter=_geoRadius(49.4369862,6.5576591,200000)&attributesToRetrieve=name")
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "Belgium"
|
||||
},
|
||||
{
|
||||
"name": "Germany"
|
||||
},
|
||||
{
|
||||
"name": "France"
|
||||
},
|
||||
{
|
||||
"name": "Luxembourg"
|
||||
},
|
||||
{
|
||||
"name": "Netherlands"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 5
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn bug_5904() {
|
||||
// https://github.com/meilisearch/meilisearch/issues/5904
|
||||
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) =
|
||||
index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let geojson = json!({
|
||||
"id": 1,
|
||||
"_geojson": {
|
||||
"type": "FeatureCollection",
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
4.23914,
|
||||
48.382893
|
||||
]
|
||||
},
|
||||
"properties": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
let (response, _code) = index.add_documents(geojson, Some("id")).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
mod add_documents;
|
||||
mod delete_documents;
|
||||
mod errors;
|
||||
mod geojson;
|
||||
mod get_documents;
|
||||
mod update_documents;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use meili_snap::snapshot;
|
||||
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
|
||||
@@ -642,14 +642,14 @@ async fn filter_invalid_syntax_object() {
|
||||
&json!({"filterableAttributes": ["title"]}),
|
||||
&json!({"filter": "title & Glass"}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `title & Glass`.\n1:14 title & Glass",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
},
|
||||
)
|
||||
@@ -663,14 +663,14 @@ async fn filter_invalid_syntax_array() {
|
||||
&json!({"filterableAttributes": ["title"]}),
|
||||
&json!({"filter": ["title & Glass"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `title & Glass`.\n1:14 title & Glass",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
},
|
||||
)
|
||||
|
||||
@@ -2,8 +2,7 @@ use std::sync::Arc;
|
||||
|
||||
use actix_http::StatusCode;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::{path, AnyMatcher};
|
||||
use wiremock::matchers::{method, path, AnyMatcher};
|
||||
use wiremock::{Mock, MockServer, Request, ResponseTemplate};
|
||||
|
||||
use crate::common::{Server, Value, SCORE_DOCUMENTS};
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
|
||||
use super::shared_index_with_documents;
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn default_search_should_return_estimated_total_hit() {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_reset_chat_issue_5772() {
|
||||
|
||||
@@ -339,14 +339,14 @@ async fn filter_invalid_syntax_object() {
|
||||
|
||||
index
|
||||
.similar(json!({"id": 287947, "filter": "title & Glass", "embedder": "manual"}), |response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `title & Glass`.\n1:14 title & Glass",
|
||||
"code": "invalid_similar_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_similar_filter"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
})
|
||||
.await;
|
||||
@@ -377,14 +377,14 @@ async fn filter_invalid_syntax_array() {
|
||||
|
||||
index
|
||||
.similar(json!({"id": 287947, "filter": ["title & Glass"], "embedder": "manual"}), |response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
|
||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `title & Glass`.\n1:14 title & Glass",
|
||||
"code": "invalid_similar_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_similar_filter"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -43,7 +43,7 @@ async fn version_too_old() {
|
||||
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.21.0");
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.22.1");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -58,7 +58,7 @@ async fn version_requires_downgrade() {
|
||||
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.21.1 is higher than the Meilisearch version 1.21.0. Downgrade is not supported");
|
||||
snapshot!(err, @"Database version 1.22.2 is higher than the Meilisearch version 1.22.1. Downgrade is not supported");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.21.0"
|
||||
"upgradeTo": "v1.22.1"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.21.0"
|
||||
"upgradeTo": "v1.22.1"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.21.0"
|
||||
"upgradeTo": "v1.22.1"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
||||
@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.21.0"
|
||||
"upgradeTo": "v1.22.1"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.21.0"
|
||||
"upgradeTo": "v1.22.1"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.21.0"
|
||||
"upgradeTo": "v1.22.1"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.21.0"
|
||||
"upgradeTo": "v1.22.1"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
||||
@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.21.0"
|
||||
"upgradeTo": "v1.22.1"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -249,7 +249,7 @@ async fn user_provide_mismatched_embedding_dimension() {
|
||||
"###);
|
||||
}
|
||||
|
||||
async fn generate_default_user_provided_documents(server: &Server) -> Index {
|
||||
async fn generate_default_user_provided_documents(server: &Server) -> Index<'_> {
|
||||
let index = server.index("doggo");
|
||||
|
||||
let (response, code) = index
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#![allow(clippy::result_large_err)]
|
||||
|
||||
use std::fs::{read_dir, read_to_string, remove_file, File};
|
||||
use std::io::{BufWriter, Write as _};
|
||||
use std::path::PathBuf;
|
||||
|
||||
@@ -17,7 +17,7 @@ impl<'a> BytesDecode<'a> for UuidCodec {
|
||||
impl BytesEncode<'_> for UuidCodec {
|
||||
type EItem = Uuid;
|
||||
|
||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
||||
fn bytes_encode(item: &'_ Self::EItem) -> Result<Cow<'_, [u8]>, BoxedError> {
|
||||
Ok(Cow::Borrowed(item.as_bytes()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ bstr = "1.12.0"
|
||||
bytemuck = { version = "1.23.1", features = ["extern_crate_alloc"] }
|
||||
byteorder = "1.5.0"
|
||||
charabia = { version = "0.9.7", default-features = false }
|
||||
cellulite = "0.3.0"
|
||||
concat-arrays = "0.1.2"
|
||||
convert_case = "0.8.0"
|
||||
crossbeam-channel = "0.5.15"
|
||||
@@ -27,6 +28,7 @@ either = { version = "1.15.0", features = ["serde"] }
|
||||
flatten-serde-json = { path = "../flatten-serde-json" }
|
||||
fst = "0.4.7"
|
||||
fxhash = "0.2.1"
|
||||
geojson = "0.24.2"
|
||||
geoutils = "0.5.1"
|
||||
grenad = { version = "0.5.0", default-features = false, features = [
|
||||
"rayon",
|
||||
@@ -88,7 +90,7 @@ rhai = { version = "1.22.2", features = [
|
||||
"sync",
|
||||
] }
|
||||
arroy = "0.6.3"
|
||||
hannoy = "0.0.5"
|
||||
hannoy = { version = "0.0.8", features = ["arroy"] }
|
||||
rand = "0.8.5"
|
||||
tracing = "0.1.41"
|
||||
ureq = { version = "2.12.1", features = ["json"] }
|
||||
@@ -96,7 +98,7 @@ url = "2.5.4"
|
||||
hashbrown = "0.15.4"
|
||||
bumpalo = "3.18.1"
|
||||
bumparaw-collections = "0.1.4"
|
||||
steppe = { version = "0.4.0", default-features = false }
|
||||
steppe = { version = "0.4", default-features = false }
|
||||
thread_local = "1.1.9"
|
||||
allocator-api2 = "0.3.0"
|
||||
rustc-hash = "2.1.1"
|
||||
@@ -116,6 +118,8 @@ twox-hash = { version = "2.1.1", default-features = false, features = [
|
||||
"xxhash3_64",
|
||||
"xxhash64",
|
||||
] }
|
||||
geo-types = "0.7.16"
|
||||
zerometry = "0.3.0"
|
||||
|
||||
[dev-dependencies]
|
||||
mimalloc = { version = "0.1.47", default-features = false }
|
||||
|
||||
@@ -11,3 +11,4 @@ const fn parse_u32(s: &str) -> u32 {
|
||||
|
||||
pub const RESERVED_VECTORS_FIELD_NAME: &str = "_vectors";
|
||||
pub const RESERVED_GEO_FIELD_NAME: &str = "_geo";
|
||||
pub const RESERVED_GEOJSON_FIELD_NAME: &str = "_geojson";
|
||||
|
||||
@@ -48,6 +48,7 @@ pub enum PrimaryKey<'a> {
|
||||
Nested { name: &'a str },
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum DocumentIdExtractionError {
|
||||
InvalidDocumentId(UserError),
|
||||
MissingDocumentId,
|
||||
|
||||
@@ -10,17 +10,26 @@ use rhai::EvalAltResult;
|
||||
use serde_json::Value;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::constants::RESERVED_GEO_FIELD_NAME;
|
||||
use crate::constants::{RESERVED_GEOJSON_FIELD_NAME, RESERVED_GEO_FIELD_NAME};
|
||||
use crate::documents::{self, DocumentsBatchCursorError};
|
||||
use crate::thread_pool_no_abort::PanicCatched;
|
||||
use crate::vector::settings::EmbeddingSettings;
|
||||
use crate::{CriterionError, DocumentId, FieldId, Object, SortError};
|
||||
|
||||
pub fn is_reserved_keyword(keyword: &str) -> bool {
|
||||
[RESERVED_GEO_FIELD_NAME, "_geoDistance", "_geoPoint", "_geoRadius", "_geoBoundingBox"]
|
||||
.contains(&keyword)
|
||||
[
|
||||
RESERVED_GEO_FIELD_NAME,
|
||||
RESERVED_GEOJSON_FIELD_NAME,
|
||||
"_geoDistance",
|
||||
"_geoPoint",
|
||||
"_geoRadius",
|
||||
"_geoBoundingBox",
|
||||
"_geoPolygon",
|
||||
]
|
||||
.contains(&keyword)
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error("internal: {0}.")]
|
||||
@@ -80,6 +89,8 @@ pub enum InternalError {
|
||||
#[error(transparent)]
|
||||
HannoyError(#[from] hannoy::Error),
|
||||
#[error(transparent)]
|
||||
CelluliteError(#[from] cellulite::Error),
|
||||
#[error(transparent)]
|
||||
VectorEmbeddingError(#[from] crate::vector::Error),
|
||||
}
|
||||
|
||||
@@ -99,6 +110,12 @@ pub enum SerializationError {
|
||||
InvalidNumberSerialization,
|
||||
}
|
||||
|
||||
impl From<cellulite::Error> for Error {
|
||||
fn from(error: cellulite::Error) -> Self {
|
||||
Self::UserError(UserError::CelluliteError(error))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum FieldIdMapMissingEntry {
|
||||
#[error("unknown field id {field_id} coming from the {process} process")]
|
||||
@@ -107,8 +124,13 @@ pub enum FieldIdMapMissingEntry {
|
||||
FieldName { field_name: String, process: &'static str },
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Error, Debug)]
|
||||
pub enum UserError {
|
||||
#[error(transparent)]
|
||||
CelluliteError(#[from] cellulite::Error),
|
||||
#[error("Malformed geojson: {0}")]
|
||||
MalformedGeojson(serde_json::Error),
|
||||
#[error("A document cannot contain more than 65,535 fields.")]
|
||||
AttributeLimitReached,
|
||||
#[error(transparent)]
|
||||
@@ -153,6 +175,8 @@ and can not be more than 511 bytes.", .document_id.to_string()
|
||||
},
|
||||
#[error(transparent)]
|
||||
InvalidGeoField(#[from] Box<GeoError>),
|
||||
#[error(transparent)]
|
||||
GeoJsonError(#[from] geojson::Error),
|
||||
#[error("Invalid vector dimensions: expected: `{}`, found: `{}`.", .expected, .found)]
|
||||
InvalidVectorDimensions { expected: usize, found: usize },
|
||||
#[error("Invalid vector dimensions in document with id `{document_id}` in `._vectors.{embedder_name}`.\n - note: embedding #{embedding_index} has dimensions {found}\n - note: embedder `{embedder_name}` requires {expected}")]
|
||||
@@ -621,7 +645,7 @@ impl From<HeedError> for Error {
|
||||
// TODO use the encoding
|
||||
HeedError::Encoding(_) => InternalError(Serialization(Encoding { db_name: None })),
|
||||
HeedError::Decoding(_) => InternalError(Serialization(Decoding { db_name: None })),
|
||||
HeedError::EnvAlreadyOpened { .. } => UserError(EnvAlreadyOpened),
|
||||
HeedError::EnvAlreadyOpened => UserError(EnvAlreadyOpened),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,9 @@ use heed::RoTxn;
|
||||
|
||||
use super::FieldsIdsMap;
|
||||
use crate::attribute_patterns::{match_field_legacy, PatternMatch};
|
||||
use crate::constants::{RESERVED_GEO_FIELD_NAME, RESERVED_VECTORS_FIELD_NAME};
|
||||
use crate::constants::{
|
||||
RESERVED_GEOJSON_FIELD_NAME, RESERVED_GEO_FIELD_NAME, RESERVED_VECTORS_FIELD_NAME,
|
||||
};
|
||||
use crate::{
|
||||
is_faceted_by, FieldId, FilterableAttributesFeatures, FilterableAttributesRule, Index,
|
||||
LocalizedAttributesRule, Result, Weight,
|
||||
@@ -24,6 +26,8 @@ pub struct Metadata {
|
||||
pub asc_desc: bool,
|
||||
/// The field is a geo field (`_geo`, `_geo.lat`, `_geo.lng`).
|
||||
pub geo: bool,
|
||||
/// The field is a geo json field (`_geojson`).
|
||||
pub geo_json: bool,
|
||||
/// The id of the localized attributes rule if the field is localized.
|
||||
pub localized_attributes_rule_id: Option<NonZeroU16>,
|
||||
/// The id of the filterable attributes rule if the field is filterable.
|
||||
@@ -269,6 +273,7 @@ impl MetadataBuilder {
|
||||
distinct: false,
|
||||
asc_desc: false,
|
||||
geo: false,
|
||||
geo_json: false,
|
||||
localized_attributes_rule_id: None,
|
||||
filterable_attributes_rule_id: None,
|
||||
};
|
||||
@@ -295,6 +300,20 @@ impl MetadataBuilder {
|
||||
distinct: false,
|
||||
asc_desc: false,
|
||||
geo: true,
|
||||
geo_json: false,
|
||||
localized_attributes_rule_id: None,
|
||||
filterable_attributes_rule_id,
|
||||
};
|
||||
}
|
||||
if match_field_legacy(RESERVED_GEOJSON_FIELD_NAME, field) == PatternMatch::Match {
|
||||
debug_assert!(!sortable, "geojson fields should not be sortable");
|
||||
return Metadata {
|
||||
searchable: None,
|
||||
sortable,
|
||||
distinct: false,
|
||||
asc_desc: false,
|
||||
geo: false,
|
||||
geo_json: true,
|
||||
localized_attributes_rule_id: None,
|
||||
filterable_attributes_rule_id,
|
||||
};
|
||||
@@ -328,6 +347,7 @@ impl MetadataBuilder {
|
||||
distinct,
|
||||
asc_desc,
|
||||
geo: false,
|
||||
geo_json: false,
|
||||
localized_attributes_rule_id,
|
||||
filterable_attributes_rule_id,
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::attribute_patterns::{match_distinct_field, match_field_legacy, PatternMatch};
|
||||
use crate::constants::RESERVED_GEO_FIELD_NAME;
|
||||
use crate::constants::{RESERVED_GEOJSON_FIELD_NAME, RESERVED_GEO_FIELD_NAME};
|
||||
use crate::AttributePatterns;
|
||||
|
||||
#[derive(Serialize, Deserialize, PartialEq, Eq, Clone, Debug, ToSchema)]
|
||||
@@ -34,6 +34,10 @@ impl FilterableAttributesRule {
|
||||
matches!(self, FilterableAttributesRule::Field(field_name) if field_name == RESERVED_GEO_FIELD_NAME)
|
||||
}
|
||||
|
||||
pub fn has_geojson(&self) -> bool {
|
||||
matches!(self, FilterableAttributesRule::Field(field_name) if field_name == RESERVED_GEOJSON_FIELD_NAME)
|
||||
}
|
||||
|
||||
/// Get the features of the rule.
|
||||
pub fn features(&self) -> FilterableAttributesFeatures {
|
||||
match self {
|
||||
|
||||
@@ -19,14 +19,14 @@ impl RoaringBitmapLenCodec {
|
||||
if cookie == SERIAL_COOKIE_NO_RUNCONTAINER {
|
||||
(bytes.read_u32::<LittleEndian>()? as usize, true)
|
||||
} else if (cookie as u16) == SERIAL_COOKIE {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, "run containers are unsupported"));
|
||||
return Err(io::Error::other("run containers are unsupported"));
|
||||
} else {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, "unknown cookie value"));
|
||||
return Err(io::Error::other("unknown cookie value"));
|
||||
}
|
||||
};
|
||||
|
||||
if size > u16::MAX as usize + 1 {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, "size is greater than supported"));
|
||||
return Err(io::Error::other("size is greater than supported"));
|
||||
}
|
||||
|
||||
let mut description_bytes = vec![0u8; size * 4];
|
||||
|
||||
@@ -5,6 +5,7 @@ use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
use cellulite::Cellulite;
|
||||
use deserr::Deserr;
|
||||
use heed::types::*;
|
||||
use heed::{CompactionOption, Database, DatabaseStat, RoTxn, RwTxn, Unspecified, WithoutTls};
|
||||
@@ -115,9 +116,10 @@ pub mod db_name {
|
||||
pub const FIELD_ID_DOCID_FACET_STRINGS: &str = "field-id-docid-facet-strings";
|
||||
pub const VECTOR_EMBEDDER_CATEGORY_ID: &str = "vector-embedder-category-id";
|
||||
pub const VECTOR_STORE: &str = "vector-arroy";
|
||||
pub const CELLULITE: &str = "cellulite";
|
||||
pub const DOCUMENTS: &str = "documents";
|
||||
}
|
||||
const NUMBER_OF_DBS: u32 = 25;
|
||||
const NUMBER_OF_DBS: u32 = 25 + Cellulite::nb_dbs();
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Index {
|
||||
@@ -183,6 +185,9 @@ pub struct Index {
|
||||
/// Vector store based on hannoy™.
|
||||
pub vector_store: hannoy::Database<Unspecified>,
|
||||
|
||||
/// Geo store based on cellulite™.
|
||||
pub cellulite: Cellulite,
|
||||
|
||||
/// Maps the document id to the document as an obkv store.
|
||||
pub(crate) documents: Database<BEU32, ObkvCodec>,
|
||||
}
|
||||
@@ -239,6 +244,7 @@ impl Index {
|
||||
let embedder_category_id =
|
||||
env.create_database(&mut wtxn, Some(VECTOR_EMBEDDER_CATEGORY_ID))?;
|
||||
let vector_store = env.create_database(&mut wtxn, Some(VECTOR_STORE))?;
|
||||
let cellulite = cellulite::Cellulite::create_from_env(&env, &mut wtxn, CELLULITE)?;
|
||||
|
||||
let documents = env.create_database(&mut wtxn, Some(DOCUMENTS))?;
|
||||
|
||||
@@ -267,6 +273,7 @@ impl Index {
|
||||
field_id_docid_facet_strings,
|
||||
vector_store,
|
||||
embedder_category_id,
|
||||
cellulite,
|
||||
documents,
|
||||
};
|
||||
if this.get_version(&wtxn)?.is_none() && creation {
|
||||
@@ -1052,6 +1059,13 @@ impl Index {
|
||||
Ok(geo_filter)
|
||||
}
|
||||
|
||||
/// Returns true if the geo sorting feature is enabled.
|
||||
pub fn is_geojson_filtering_enabled(&self, rtxn: &RoTxn<'_>) -> Result<bool> {
|
||||
let geojson_filter =
|
||||
self.filterable_attributes_rules(rtxn)?.iter().any(|field| field.has_geojson());
|
||||
Ok(geojson_filter)
|
||||
}
|
||||
|
||||
pub fn asc_desc_fields(&self, rtxn: &RoTxn<'_>) -> Result<HashSet<String>> {
|
||||
let asc_desc_fields = self
|
||||
.criteria(rtxn)?
|
||||
@@ -1882,6 +1896,7 @@ impl Index {
|
||||
field_id_docid_facet_strings,
|
||||
vector_store,
|
||||
embedder_category_id,
|
||||
cellulite,
|
||||
documents,
|
||||
} = self;
|
||||
|
||||
@@ -1955,6 +1970,17 @@ impl Index {
|
||||
sizes.insert("embedder_category_id", embedder_category_id.stat(rtxn).map(compute_size)?);
|
||||
sizes.insert("documents", documents.stat(rtxn).map(compute_size)?);
|
||||
|
||||
// Cellulite
|
||||
const _CELLULITE_DB_CHECK: () = {
|
||||
if Cellulite::nb_dbs() != 4 {
|
||||
panic!("Cellulite database count has changed, please update the code accordingly.")
|
||||
}
|
||||
};
|
||||
sizes.insert("cellulite_item", cellulite.item_db_stats(rtxn).map(compute_size)?);
|
||||
sizes.insert("cellulite_cell", cellulite.cell_db_stats(rtxn).map(compute_size)?);
|
||||
sizes.insert("cellulite_update", cellulite.update_db_stats(rtxn).map(compute_size)?);
|
||||
sizes.insert("cellulite_metadata", cellulite.metadata_db_stats(rtxn).map(compute_size)?);
|
||||
|
||||
Ok(sizes)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#![allow(clippy::type_complexity)]
|
||||
#![allow(clippy::result_large_err)]
|
||||
|
||||
#[cfg(not(windows))]
|
||||
#[cfg(test)]
|
||||
@@ -53,7 +54,7 @@ pub use search::new::{
|
||||
};
|
||||
use serde_json::Value;
|
||||
pub use thread_pool_no_abort::{PanicCatched, ThreadPoolNoAbort, ThreadPoolNoAbortBuilder};
|
||||
pub use {arroy, charabia as tokenizer, hannoy, heed, rhai};
|
||||
pub use {arroy, cellulite, charabia as tokenizer, hannoy, heed, rhai};
|
||||
|
||||
pub use self::asc_desc::{AscDesc, AscDescError, Member, SortError};
|
||||
pub use self::attribute_patterns::{AttributePatterns, PatternMatch};
|
||||
@@ -86,7 +87,7 @@ pub use self::search::{
|
||||
};
|
||||
pub use self::update::ChannelCongestion;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, error::Error>;
|
||||
pub type Result<T, E = error::Error> = std::result::Result<T, E>;
|
||||
|
||||
pub type Attribute = u32;
|
||||
pub type BEU16 = heed::types::U16<heed::byteorder::BE>;
|
||||
|
||||
@@ -278,30 +278,6 @@ impl<U: Send + Sync + 'static> Step for VariableNameStep<U> {
|
||||
}
|
||||
}
|
||||
|
||||
// Integration with steppe
|
||||
|
||||
impl steppe::Progress for Progress {
|
||||
fn update(&self, sub_progress: impl steppe::Step) {
|
||||
self.update_progress(Compat(sub_progress));
|
||||
}
|
||||
}
|
||||
|
||||
struct Compat<T: steppe::Step>(T);
|
||||
|
||||
impl<T: steppe::Step> Step for Compat<T> {
|
||||
fn name(&self) -> Cow<'static, str> {
|
||||
self.0.name()
|
||||
}
|
||||
|
||||
fn current(&self) -> u32 {
|
||||
self.0.current().try_into().unwrap_or(u32::MAX)
|
||||
}
|
||||
|
||||
fn total(&self) -> u32 {
|
||||
self.0.total().try_into().unwrap_or(u32::MAX)
|
||||
}
|
||||
}
|
||||
|
||||
impl Step for arroy::MainStep {
|
||||
fn name(&self) -> Cow<'static, str> {
|
||||
match self {
|
||||
@@ -343,3 +319,27 @@ impl Step for arroy::SubStep {
|
||||
self.max
|
||||
}
|
||||
}
|
||||
|
||||
// Integration with steppe
|
||||
|
||||
impl steppe::Progress for Progress {
|
||||
fn update(&self, sub_progress: impl steppe::Step) {
|
||||
self.update_progress(Compat(sub_progress));
|
||||
}
|
||||
}
|
||||
|
||||
struct Compat<T: steppe::Step>(T);
|
||||
|
||||
impl<T: steppe::Step> Step for Compat<T> {
|
||||
fn name(&self) -> Cow<'static, str> {
|
||||
self.0.name()
|
||||
}
|
||||
|
||||
fn current(&self) -> u32 {
|
||||
self.0.current().try_into().unwrap_or(u32::MAX)
|
||||
}
|
||||
|
||||
fn total(&self) -> u32 {
|
||||
self.0.total().try_into().unwrap_or(u32::MAX)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -304,7 +304,7 @@ impl ArrayView for ParseableArray<'_> {
|
||||
|
||||
fn get(&self, index: i64) -> Option<&dyn ValueView> {
|
||||
let index = convert_index(index, self.size());
|
||||
if index <= 0 {
|
||||
if index < 0 {
|
||||
return None;
|
||||
}
|
||||
let v = self.0.get(index as usize)?;
|
||||
|
||||
@@ -38,7 +38,7 @@ where
|
||||
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
||||
|
||||
if let Some(first_bound) = get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? {
|
||||
fd.iterate(candidates, highest_level, first_bound, usize::MAX)?;
|
||||
let _ = fd.iterate(candidates, highest_level, first_bound, usize::MAX)?;
|
||||
Ok(())
|
||||
} else {
|
||||
Ok(())
|
||||
|
||||
@@ -12,7 +12,9 @@ use roaring::{MultiOps, RoaringBitmap};
|
||||
use serde_json::Value;
|
||||
|
||||
use super::facet_range_search;
|
||||
use crate::constants::{RESERVED_GEO_FIELD_NAME, RESERVED_VECTORS_FIELD_NAME};
|
||||
use crate::constants::{
|
||||
RESERVED_GEOJSON_FIELD_NAME, RESERVED_GEO_FIELD_NAME, RESERVED_VECTORS_FIELD_NAME,
|
||||
};
|
||||
use crate::error::{Error, UserError};
|
||||
use crate::filterable_attributes_rules::{filtered_matching_patterns, matching_features};
|
||||
use crate::heed_codec::facet::{FacetGroupKey, FacetGroupKeyCodec, FacetGroupValueCodec};
|
||||
@@ -36,6 +38,7 @@ pub struct Filter<'a> {
|
||||
pub enum BadGeoError {
|
||||
Lat(f64),
|
||||
Lng(f64),
|
||||
InvalidResolution(usize),
|
||||
BoundingBoxTopIsBelowBottom(f64, f64),
|
||||
}
|
||||
|
||||
@@ -47,16 +50,23 @@ impl Display for BadGeoError {
|
||||
Self::BoundingBoxTopIsBelowBottom(top, bottom) => {
|
||||
write!(f, "The top latitude `{top}` is below the bottom latitude `{bottom}`.")
|
||||
}
|
||||
Self::InvalidResolution(resolution) => write!(
|
||||
f,
|
||||
"Invalid resolution `{resolution}`. Resolution must be between 3 and 1000."
|
||||
),
|
||||
Self::Lat(lat) => write!(
|
||||
f,
|
||||
"Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees. ",
|
||||
"Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees.",
|
||||
lat
|
||||
),
|
||||
Self::Lng(lng) => write!(
|
||||
f,
|
||||
"Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. ",
|
||||
lng
|
||||
),
|
||||
Self::Lng(lng) => {
|
||||
let normalized = (lng + 180.0).rem_euclid(360.0) - 180.0;
|
||||
write!(
|
||||
f,
|
||||
"Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. Hint: try using `{normalized}` instead.",
|
||||
lng
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -225,11 +235,11 @@ impl<'a> Filter<'a> {
|
||||
Ok(Some(Self { condition }))
|
||||
}
|
||||
|
||||
pub fn use_contains_operator(&self) -> Option<&Token> {
|
||||
pub fn use_contains_operator(&self) -> Option<&Token<'_>> {
|
||||
self.condition.use_contains_operator()
|
||||
}
|
||||
|
||||
pub fn use_vector_filter(&self) -> Option<&Token> {
|
||||
pub fn use_vector_filter(&self) -> Option<&Token<'_>> {
|
||||
self.condition.use_vector_filter()
|
||||
}
|
||||
}
|
||||
@@ -612,50 +622,61 @@ impl<'a> Filter<'a> {
|
||||
.union(),
|
||||
FilterCondition::And(subfilters) => {
|
||||
let mut subfilters_iter = subfilters.iter();
|
||||
if let Some(first_subfilter) = subfilters_iter.next() {
|
||||
let mut bitmap = Self::inner_evaluate(
|
||||
&(first_subfilter.clone()).into(),
|
||||
let Some(first_subfilter) = subfilters_iter.next() else {
|
||||
return Ok(RoaringBitmap::new());
|
||||
};
|
||||
|
||||
let mut bitmap = Self::inner_evaluate(
|
||||
&(first_subfilter.clone()).into(),
|
||||
rtxn,
|
||||
index,
|
||||
field_ids_map,
|
||||
filterable_attribute_rules,
|
||||
universe,
|
||||
)?;
|
||||
for f in subfilters_iter {
|
||||
if bitmap.is_empty() {
|
||||
return Ok(bitmap);
|
||||
}
|
||||
// TODO We are doing the intersections two times,
|
||||
// it could be more efficient
|
||||
// Can't I just replace this `&=` by an `=`?
|
||||
bitmap &= Self::inner_evaluate(
|
||||
&(f.clone()).into(),
|
||||
rtxn,
|
||||
index,
|
||||
field_ids_map,
|
||||
filterable_attribute_rules,
|
||||
universe,
|
||||
Some(&bitmap),
|
||||
)?;
|
||||
for f in subfilters_iter {
|
||||
if bitmap.is_empty() {
|
||||
return Ok(bitmap);
|
||||
}
|
||||
// TODO We are doing the intersections two times,
|
||||
// it could be more efficient
|
||||
// Can't I just replace this `&=` by an `=`?
|
||||
bitmap &= Self::inner_evaluate(
|
||||
&(f.clone()).into(),
|
||||
rtxn,
|
||||
index,
|
||||
field_ids_map,
|
||||
filterable_attribute_rules,
|
||||
Some(&bitmap),
|
||||
)?;
|
||||
}
|
||||
Ok(bitmap)
|
||||
} else {
|
||||
Ok(RoaringBitmap::new())
|
||||
}
|
||||
Ok(bitmap)
|
||||
}
|
||||
FilterCondition::VectorExists { fid: _, embedder, filter } => {
|
||||
super::filter_vector::evaluate(rtxn, index, universe, embedder.clone(), filter)
|
||||
}
|
||||
FilterCondition::GeoLowerThan { point, radius } => {
|
||||
FilterCondition::GeoLowerThan { point, radius, resolution: res_token } => {
|
||||
let base_point: [f64; 2] =
|
||||
[point[0].parse_finite_float()?, point[1].parse_finite_float()?];
|
||||
if !(-90.0..=90.0).contains(&base_point[0]) {
|
||||
return Err(point[0].as_external_error(BadGeoError::Lat(base_point[0])))?;
|
||||
}
|
||||
if !(-180.0..=180.0).contains(&base_point[1]) {
|
||||
return Err(point[1].as_external_error(BadGeoError::Lng(base_point[1])))?;
|
||||
}
|
||||
let radius = radius.parse_finite_float()?;
|
||||
let mut resolution = 125;
|
||||
if let Some(res_token) = res_token {
|
||||
resolution = res_token.parse_finite_float()? as usize;
|
||||
if !(3..=1000).contains(&resolution) {
|
||||
return Err(
|
||||
res_token.as_external_error(BadGeoError::InvalidResolution(resolution))
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
let mut r1 = None;
|
||||
if index.is_geo_filtering_enabled(rtxn)? {
|
||||
let base_point: [f64; 2] =
|
||||
[point[0].parse_finite_float()?, point[1].parse_finite_float()?];
|
||||
if !(-90.0..=90.0).contains(&base_point[0]) {
|
||||
return Err(point[0].as_external_error(BadGeoError::Lat(base_point[0])))?;
|
||||
}
|
||||
if !(-180.0..=180.0).contains(&base_point[1]) {
|
||||
return Err(point[1].as_external_error(BadGeoError::Lng(base_point[1])))?;
|
||||
}
|
||||
let radius = radius.parse_finite_float()?;
|
||||
let rtree = match index.geo_rtree(rtxn)? {
|
||||
Some(rtree) => rtree,
|
||||
None => return Ok(RoaringBitmap::new()),
|
||||
@@ -671,52 +692,72 @@ impl<'a> Filter<'a> {
|
||||
})
|
||||
.map(|point| point.data.0)
|
||||
.collect();
|
||||
r1 = Some(result);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
} else {
|
||||
Err(point[0].as_external_error(FilterError::AttributeNotFilterable {
|
||||
attribute: RESERVED_GEO_FIELD_NAME,
|
||||
filterable_patterns: filtered_matching_patterns(
|
||||
filterable_attribute_rules,
|
||||
&|features| features.is_filterable(),
|
||||
),
|
||||
}))?
|
||||
let mut r2 = None;
|
||||
if index.is_geojson_filtering_enabled(rtxn)? {
|
||||
let point = geo_types::Point::new(base_point[1], base_point[0]);
|
||||
|
||||
let result = index.cellulite.in_circle(rtxn, point, radius, resolution)?;
|
||||
|
||||
r2 = Some(RoaringBitmap::from_iter(result)); // TODO: Remove once we update roaring in meilisearch
|
||||
}
|
||||
|
||||
match (r1, r2) {
|
||||
(Some(r1), Some(r2)) => Ok(r1 | r2),
|
||||
(Some(r1), None) => Ok(r1),
|
||||
(None, Some(r2)) => Ok(r2),
|
||||
(None, None) => {
|
||||
Err(point[0].as_external_error(FilterError::AttributeNotFilterable {
|
||||
attribute: &format!(
|
||||
"{RESERVED_GEO_FIELD_NAME}/{RESERVED_GEOJSON_FIELD_NAME}"
|
||||
),
|
||||
filterable_patterns: filtered_matching_patterns(
|
||||
filterable_attribute_rules,
|
||||
&|features| features.is_filterable(),
|
||||
),
|
||||
}))?
|
||||
}
|
||||
}
|
||||
}
|
||||
FilterCondition::GeoBoundingBox { top_right_point, bottom_left_point } => {
|
||||
if index.is_geo_filtering_enabled(rtxn)? {
|
||||
let top_right: [f64; 2] = [
|
||||
top_right_point[0].parse_finite_float()?,
|
||||
top_right_point[1].parse_finite_float()?,
|
||||
];
|
||||
let bottom_left: [f64; 2] = [
|
||||
bottom_left_point[0].parse_finite_float()?,
|
||||
bottom_left_point[1].parse_finite_float()?,
|
||||
];
|
||||
if !(-90.0..=90.0).contains(&top_right[0]) {
|
||||
return Err(
|
||||
top_right_point[0].as_external_error(BadGeoError::Lat(top_right[0]))
|
||||
)?;
|
||||
}
|
||||
if !(-180.0..=180.0).contains(&top_right[1]) {
|
||||
return Err(
|
||||
top_right_point[1].as_external_error(BadGeoError::Lng(top_right[1]))
|
||||
)?;
|
||||
}
|
||||
if !(-90.0..=90.0).contains(&bottom_left[0]) {
|
||||
return Err(bottom_left_point[0]
|
||||
.as_external_error(BadGeoError::Lat(bottom_left[0])))?;
|
||||
}
|
||||
if !(-180.0..=180.0).contains(&bottom_left[1]) {
|
||||
return Err(bottom_left_point[1]
|
||||
.as_external_error(BadGeoError::Lng(bottom_left[1])))?;
|
||||
}
|
||||
if top_right[0] < bottom_left[0] {
|
||||
return Err(bottom_left_point[1].as_external_error(
|
||||
BadGeoError::BoundingBoxTopIsBelowBottom(top_right[0], bottom_left[0]),
|
||||
))?;
|
||||
}
|
||||
let top_right: [f64; 2] = [
|
||||
top_right_point[0].parse_finite_float()?,
|
||||
top_right_point[1].parse_finite_float()?,
|
||||
];
|
||||
let bottom_left: [f64; 2] = [
|
||||
bottom_left_point[0].parse_finite_float()?,
|
||||
bottom_left_point[1].parse_finite_float()?,
|
||||
];
|
||||
if !(-90.0..=90.0).contains(&top_right[0]) {
|
||||
return Err(
|
||||
top_right_point[0].as_external_error(BadGeoError::Lat(top_right[0]))
|
||||
)?;
|
||||
}
|
||||
if !(-180.0..=180.0).contains(&top_right[1]) {
|
||||
return Err(
|
||||
top_right_point[1].as_external_error(BadGeoError::Lng(top_right[1]))
|
||||
)?;
|
||||
}
|
||||
if !(-90.0..=90.0).contains(&bottom_left[0]) {
|
||||
return Err(
|
||||
bottom_left_point[0].as_external_error(BadGeoError::Lat(bottom_left[0]))
|
||||
)?;
|
||||
}
|
||||
if !(-180.0..=180.0).contains(&bottom_left[1]) {
|
||||
return Err(
|
||||
bottom_left_point[1].as_external_error(BadGeoError::Lng(bottom_left[1]))
|
||||
)?;
|
||||
}
|
||||
if top_right[0] < bottom_left[0] {
|
||||
return Err(bottom_left_point[1].as_external_error(
|
||||
BadGeoError::BoundingBoxTopIsBelowBottom(top_right[0], bottom_left[0]),
|
||||
))?;
|
||||
}
|
||||
|
||||
let mut r1 = None;
|
||||
if index.is_geo_filtering_enabled(rtxn)? {
|
||||
// Instead of writing a custom `GeoBoundingBox` filter we're simply going to re-use the range
|
||||
// filter to create the following filter;
|
||||
// `_geo.lat {top_right[0]} TO {bottom_left[0]} AND _geo.lng {top_right[1]} TO {bottom_left[1]}`
|
||||
@@ -811,19 +852,76 @@ impl<'a> Filter<'a> {
|
||||
)?
|
||||
};
|
||||
|
||||
Ok(selected_lat & selected_lng)
|
||||
} else {
|
||||
Err(top_right_point[0].as_external_error(
|
||||
r1 = Some(selected_lat & selected_lng);
|
||||
}
|
||||
|
||||
let mut r2 = None;
|
||||
if index.is_geojson_filtering_enabled(rtxn)? {
|
||||
let polygon = geo_types::Polygon::new(
|
||||
geo_types::LineString(vec![
|
||||
geo_types::Coord { x: top_right[1], y: top_right[0] },
|
||||
geo_types::Coord { x: bottom_left[1], y: top_right[0] },
|
||||
geo_types::Coord { x: bottom_left[1], y: bottom_left[0] },
|
||||
geo_types::Coord { x: top_right[1], y: bottom_left[0] },
|
||||
]),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
let result = index.cellulite.in_shape(rtxn, &polygon)?;
|
||||
|
||||
r2 = Some(RoaringBitmap::from_iter(result)); // TODO: Remove once we update roaring in meilisearch
|
||||
}
|
||||
|
||||
match (r1, r2) {
|
||||
(Some(r1), Some(r2)) => Ok(r1 | r2),
|
||||
(Some(r1), None) => Ok(r1),
|
||||
(None, Some(r2)) => Ok(r2),
|
||||
(None, None) => Err(top_right_point[0].as_external_error(
|
||||
FilterError::AttributeNotFilterable {
|
||||
attribute: RESERVED_GEO_FIELD_NAME,
|
||||
attribute: &format!(
|
||||
"{RESERVED_GEO_FIELD_NAME}/{RESERVED_GEOJSON_FIELD_NAME}"
|
||||
),
|
||||
filterable_patterns: filtered_matching_patterns(
|
||||
filterable_attribute_rules,
|
||||
&|features| features.is_filterable(),
|
||||
),
|
||||
},
|
||||
))?
|
||||
))?,
|
||||
}
|
||||
}
|
||||
FilterCondition::GeoPolygon { points } => {
|
||||
if !index.is_geojson_filtering_enabled(rtxn)? {
|
||||
return Err(points[0][0].as_external_error(
|
||||
FilterError::AttributeNotFilterable {
|
||||
attribute: RESERVED_GEOJSON_FIELD_NAME,
|
||||
filterable_patterns: filtered_matching_patterns(
|
||||
filterable_attribute_rules,
|
||||
&|features| features.is_filterable(),
|
||||
),
|
||||
},
|
||||
))?;
|
||||
}
|
||||
|
||||
let mut coords = Vec::new();
|
||||
for [lat_token, lng_token] in points {
|
||||
let lat = lat_token.parse_finite_float()?;
|
||||
let lng = lng_token.parse_finite_float()?;
|
||||
if !(-90.0..=90.0).contains(&lat) {
|
||||
return Err(lat_token.as_external_error(BadGeoError::Lat(lat)))?;
|
||||
}
|
||||
if !(-180.0..=180.0).contains(&lng) {
|
||||
return Err(lng_token.as_external_error(BadGeoError::Lng(lng)))?;
|
||||
}
|
||||
coords.push(geo_types::Coord { x: lng, y: lat });
|
||||
}
|
||||
|
||||
let polygon = geo_types::Polygon::new(geo_types::LineString(coords), Vec::new());
|
||||
let result = index.cellulite.in_shape(rtxn, &polygon)?;
|
||||
|
||||
let result = roaring::RoaringBitmap::from_iter(result); // TODO: Remove once we update roaring in meilisearch
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -962,17 +1060,17 @@ mod tests {
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let filter = Filter::from_str("_geoRadius(42, 150, 10)").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
snapshot!(error.to_string(), @r###"
|
||||
Attribute `_geo` is not filterable. This index does not have configured filterable attributes.
|
||||
snapshot!(error.to_string(), @r"
|
||||
Attribute `_geo/_geojson` is not filterable. This index does not have configured filterable attributes.
|
||||
12:14 _geoRadius(42, 150, 10)
|
||||
"###);
|
||||
");
|
||||
|
||||
let filter = Filter::from_str("_geoBoundingBox([42, 150], [30, 10])").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
snapshot!(error.to_string(), @r###"
|
||||
Attribute `_geo` is not filterable. This index does not have configured filterable attributes.
|
||||
snapshot!(error.to_string(), @r"
|
||||
Attribute `_geo/_geojson` is not filterable. This index does not have configured filterable attributes.
|
||||
18:20 _geoBoundingBox([42, 150], [30, 10])
|
||||
"###);
|
||||
");
|
||||
|
||||
let filter = Filter::from_str("dog = \"bernese mountain\"").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
@@ -993,19 +1091,19 @@ mod tests {
|
||||
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
|
||||
let filter = Filter::from_str("_geoRadius(-100, 150, 10)").unwrap().unwrap();
|
||||
let filter = Filter::from_str("_geoRadius(-90, 150, 10)").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
snapshot!(error.to_string(), @r###"
|
||||
Attribute `_geo` is not filterable. Available filterable attribute patterns are: `title`.
|
||||
12:16 _geoRadius(-100, 150, 10)
|
||||
"###);
|
||||
snapshot!(error.to_string(), @r"
|
||||
Attribute `_geo/_geojson` is not filterable. Available filterable attribute patterns are: `title`.
|
||||
12:15 _geoRadius(-90, 150, 10)
|
||||
");
|
||||
|
||||
let filter = Filter::from_str("_geoBoundingBox([42, 150], [30, 10])").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
snapshot!(error.to_string(), @r###"
|
||||
Attribute `_geo` is not filterable. Available filterable attribute patterns are: `title`.
|
||||
snapshot!(error.to_string(), @r"
|
||||
Attribute `_geo/_geojson` is not filterable. Available filterable attribute patterns are: `title`.
|
||||
18:20 _geoBoundingBox([42, 150], [30, 10])
|
||||
"###);
|
||||
");
|
||||
|
||||
let filter = Filter::from_str("name = 12").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
@@ -1153,38 +1251,34 @@ mod tests {
|
||||
// georadius have a bad latitude
|
||||
let filter = Filter::from_str("_geoRadius(-100, 150, 10)").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
assert!(
|
||||
error.to_string().starts_with(
|
||||
"Bad latitude `-100`. Latitude must be contained between -90 and 90 degrees."
|
||||
),
|
||||
"{}",
|
||||
error.to_string()
|
||||
);
|
||||
snapshot!(error.to_string(), @r"
|
||||
Bad latitude `-100`. Latitude must be contained between -90 and 90 degrees.
|
||||
12:16 _geoRadius(-100, 150, 10)
|
||||
");
|
||||
|
||||
// georadius have a bad latitude
|
||||
let filter = Filter::from_str("_geoRadius(-90.0000001, 150, 10)").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
assert!(error.to_string().contains(
|
||||
"Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees."
|
||||
));
|
||||
snapshot!(error.to_string(), @r"
|
||||
Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees.
|
||||
12:23 _geoRadius(-90.0000001, 150, 10)
|
||||
");
|
||||
|
||||
// georadius have a bad longitude
|
||||
let filter = Filter::from_str("_geoRadius(-10, 250, 10)").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
assert!(
|
||||
error.to_string().contains(
|
||||
"Bad longitude `250`. Longitude must be contained between -180 and 180 degrees."
|
||||
),
|
||||
"{}",
|
||||
error.to_string(),
|
||||
);
|
||||
snapshot!(error.to_string(), @r"
|
||||
Bad longitude `250`. Longitude must be contained between -180 and 180 degrees. Hint: try using `-110` instead.
|
||||
17:20 _geoRadius(-10, 250, 10)
|
||||
");
|
||||
|
||||
// georadius have a bad longitude
|
||||
let filter = Filter::from_str("_geoRadius(-10, 180.000001, 10)").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
assert!(error.to_string().contains(
|
||||
"Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees."
|
||||
));
|
||||
snapshot!(error.to_string(), @r"
|
||||
Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees. Hint: try using `-179.999999` instead.
|
||||
17:27 _geoRadius(-10, 180.000001, 10)
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1207,73 +1301,73 @@ mod tests {
|
||||
let filter =
|
||||
Filter::from_str("_geoBoundingBox([-90.0000001, 150], [30, 10])").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
assert!(
|
||||
error.to_string().starts_with(
|
||||
"Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees."
|
||||
),
|
||||
"{}",
|
||||
error.to_string()
|
||||
);
|
||||
snapshot!(error.to_string(), @r"
|
||||
Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees.
|
||||
18:29 _geoBoundingBox([-90.0000001, 150], [30, 10])
|
||||
");
|
||||
|
||||
// geoboundingbox top left coord have a bad latitude
|
||||
let filter =
|
||||
Filter::from_str("_geoBoundingBox([90.0000001, 150], [30, 10])").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
assert!(
|
||||
error.to_string().starts_with(
|
||||
"Bad latitude `90.0000001`. Latitude must be contained between -90 and 90 degrees."
|
||||
),
|
||||
"{}",
|
||||
error.to_string()
|
||||
);
|
||||
snapshot!(error.to_string(), @r"
|
||||
Bad latitude `90.0000001`. Latitude must be contained between -90 and 90 degrees.
|
||||
18:28 _geoBoundingBox([90.0000001, 150], [30, 10])
|
||||
");
|
||||
|
||||
// geoboundingbox bottom right coord have a bad latitude
|
||||
let filter =
|
||||
Filter::from_str("_geoBoundingBox([30, 10], [-90.0000001, 150])").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
assert!(error.to_string().contains(
|
||||
"Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees."
|
||||
));
|
||||
snapshot!(error.to_string(), @r"
|
||||
Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees.
|
||||
28:39 _geoBoundingBox([30, 10], [-90.0000001, 150])
|
||||
");
|
||||
|
||||
// geoboundingbox bottom right coord have a bad latitude
|
||||
let filter =
|
||||
Filter::from_str("_geoBoundingBox([30, 10], [90.0000001, 150])").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
assert!(error.to_string().contains(
|
||||
"Bad latitude `90.0000001`. Latitude must be contained between -90 and 90 degrees."
|
||||
));
|
||||
snapshot!(error.to_string(), @r"
|
||||
Bad latitude `90.0000001`. Latitude must be contained between -90 and 90 degrees.
|
||||
28:38 _geoBoundingBox([30, 10], [90.0000001, 150])
|
||||
");
|
||||
|
||||
// geoboundingbox top left coord have a bad longitude
|
||||
let filter =
|
||||
Filter::from_str("_geoBoundingBox([-10, 180.000001], [30, 10])").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
assert!(error.to_string().contains(
|
||||
"Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees."
|
||||
));
|
||||
snapshot!(error.to_string(), @r"
|
||||
Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees. Hint: try using `-179.999999` instead.
|
||||
23:33 _geoBoundingBox([-10, 180.000001], [30, 10])
|
||||
");
|
||||
|
||||
// geoboundingbox top left coord have a bad longitude
|
||||
let filter =
|
||||
Filter::from_str("_geoBoundingBox([-10, -180.000001], [30, 10])").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
assert!(error.to_string().contains(
|
||||
"Bad longitude `-180.000001`. Longitude must be contained between -180 and 180 degrees."
|
||||
));
|
||||
snapshot!(error.to_string(), @r"
|
||||
Bad longitude `-180.000001`. Longitude must be contained between -180 and 180 degrees. Hint: try using `179.999999` instead.
|
||||
23:34 _geoBoundingBox([-10, -180.000001], [30, 10])
|
||||
");
|
||||
|
||||
// geoboundingbox bottom right coord have a bad longitude
|
||||
let filter =
|
||||
Filter::from_str("_geoBoundingBox([30, 10], [-10, -180.000001])").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
assert!(error.to_string().contains(
|
||||
"Bad longitude `-180.000001`. Longitude must be contained between -180 and 180 degrees."
|
||||
));
|
||||
snapshot!(error.to_string(), @r"
|
||||
Bad longitude `-180.000001`. Longitude must be contained between -180 and 180 degrees. Hint: try using `179.999999` instead.
|
||||
33:44 _geoBoundingBox([30, 10], [-10, -180.000001])
|
||||
");
|
||||
|
||||
// geoboundingbox bottom right coord have a bad longitude
|
||||
let filter =
|
||||
Filter::from_str("_geoBoundingBox([30, 10], [-10, 180.000001])").unwrap().unwrap();
|
||||
let error = filter.evaluate(&rtxn, &index).unwrap_err();
|
||||
assert!(error.to_string().contains(
|
||||
"Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees."
|
||||
));
|
||||
snapshot!(error.to_string(), @r"
|
||||
Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees. Hint: try using `-179.999999` instead.
|
||||
33:43 _geoBoundingBox([30, 10], [-10, 180.000001])
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -137,7 +137,7 @@ impl<'a> SearchForFacetValues<'a> {
|
||||
let exact_words_fst = self.search_query.index.exact_words(rtxn)?;
|
||||
if exact_words_fst.is_some_and(|fst| fst.contains(query)) {
|
||||
if fst.contains(query) {
|
||||
self.fetch_original_facets_using_normalized(
|
||||
let _ = self.fetch_original_facets_using_normalized(
|
||||
fid,
|
||||
query,
|
||||
query,
|
||||
|
||||
@@ -354,15 +354,14 @@ fn maybe_add_to_results<'ctx, Q: RankingRuleQueryTrait>(
|
||||
logger.add_to_results(&candidates);
|
||||
valid_docids.extend_from_slice(&candidates);
|
||||
valid_scores
|
||||
.extend(std::iter::repeat(ranking_rule_scores.to_owned()).take(candidates.len()));
|
||||
.extend(std::iter::repeat_n(ranking_rule_scores.to_owned(), candidates.len()));
|
||||
}
|
||||
} else {
|
||||
// if we have passed the offset already, add some of the documents (up to the limit)
|
||||
let candidates = candidates.iter().take(length - valid_docids.len()).collect::<Vec<u32>>();
|
||||
logger.add_to_results(&candidates);
|
||||
valid_docids.extend_from_slice(&candidates);
|
||||
valid_scores
|
||||
.extend(std::iter::repeat(ranking_rule_scores.to_owned()).take(candidates.len()));
|
||||
valid_scores.extend(std::iter::repeat_n(ranking_rule_scores.to_owned(), candidates.len()));
|
||||
}
|
||||
|
||||
*cur_offset += candidates.len() as usize;
|
||||
|
||||
@@ -193,6 +193,7 @@ impl<G: RankingRuleGraphTrait> VisitorState<G> {
|
||||
visit: VisitFn<'_, G>,
|
||||
ctx: &mut VisitorContext<'_, G>,
|
||||
) -> Result<ControlFlow<(), bool>> {
|
||||
#[allow(clippy::manual_contains)] // there is no method contains on mapped interner
|
||||
if !ctx
|
||||
.all_costs_from_node
|
||||
.get(dest_node)
|
||||
@@ -243,6 +244,8 @@ impl<G: RankingRuleGraphTrait> VisitorState<G> {
|
||||
|
||||
// Checking that from the destination node, there is at least
|
||||
// one cost that we can visit that corresponds to our remaining budget.
|
||||
|
||||
#[allow(clippy::manual_contains)] // there is no contains on MappedInterner
|
||||
if !ctx
|
||||
.all_costs_from_node
|
||||
.get(dest_node)
|
||||
|
||||
@@ -401,7 +401,7 @@ impl Iterator for SmallBitmapInternalIter<'_> {
|
||||
*cur &= *cur - 1;
|
||||
Some(idx + *base)
|
||||
} else if next.is_empty() {
|
||||
return None;
|
||||
None
|
||||
} else {
|
||||
*base += 64;
|
||||
*cur = next[0];
|
||||
|
||||
@@ -79,7 +79,7 @@ impl<'ctx, Query> Sort<'ctx, Query> {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
Ok(!displayed_fields.iter().any(|&field| field == field_name))
|
||||
Ok(!displayed_fields.contains(&field_name))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -48,6 +48,7 @@ impl<'t, 'i> ClearDocuments<'t, 'i> {
|
||||
field_id_docid_facet_strings,
|
||||
vector_store,
|
||||
embedder_category_id: _,
|
||||
cellulite,
|
||||
documents,
|
||||
} = self.index;
|
||||
|
||||
@@ -90,6 +91,7 @@ impl<'t, 'i> ClearDocuments<'t, 'i> {
|
||||
field_id_docid_facet_strings.clear(self.wtxn)?;
|
||||
// vector
|
||||
vector_store.clear(self.wtxn)?;
|
||||
cellulite.clear(self.wtxn)?;
|
||||
|
||||
documents.clear(self.wtxn)?;
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ use std::fs::File;
|
||||
use std::io::{self, BufReader};
|
||||
|
||||
use concat_arrays::concat_arrays;
|
||||
use geojson::GeoJson;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::helpers::{create_writer, writer_into_reader, GrenadParameters};
|
||||
@@ -9,7 +10,7 @@ use crate::error::GeoError;
|
||||
use crate::update::del_add::{DelAdd, KvReaderDelAdd, KvWriterDelAdd};
|
||||
use crate::update::index_documents::extract_finite_float_from_value;
|
||||
use crate::update::settings::{InnerIndexSettings, InnerIndexSettingsDiff};
|
||||
use crate::{FieldId, InternalError, Result};
|
||||
use crate::{DocumentId, FieldId, InternalError, Result, UserError};
|
||||
|
||||
/// Extracts the geographical coordinates contained in each document under the `_geo` field.
|
||||
///
|
||||
@@ -107,3 +108,72 @@ fn extract_lat_lng(
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Extracts the geographical coordinates contained in each document under the `_geojson` field.
|
||||
///
|
||||
/// Returns the generated grenad reader containing the docid as key associated to its zerometry
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::extract")]
|
||||
pub fn extract_geojson<R: io::Read + io::Seek>(
|
||||
obkv_documents: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
primary_key_id: FieldId,
|
||||
settings_diff: &InnerIndexSettingsDiff,
|
||||
) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
let mut writer = create_writer(
|
||||
indexer.chunk_compression_type,
|
||||
indexer.chunk_compression_level,
|
||||
tempfile::tempfile()?,
|
||||
);
|
||||
|
||||
let mut cursor = obkv_documents.into_cursor()?;
|
||||
while let Some((docid_bytes, value)) = cursor.move_on_next()? {
|
||||
let obkv = obkv::KvReader::from_slice(value);
|
||||
// since we only need the primary key when we throw an error
|
||||
// we create this getter to lazily get it when needed
|
||||
let document_id = || -> Value {
|
||||
let reader = KvReaderDelAdd::from_slice(obkv.get(primary_key_id).unwrap());
|
||||
let document_id =
|
||||
reader.get(DelAdd::Deletion).or(reader.get(DelAdd::Addition)).unwrap();
|
||||
serde_json::from_slice(document_id).unwrap()
|
||||
};
|
||||
|
||||
// extract old version
|
||||
let del_geojson =
|
||||
extract_geojson_field(obkv, &settings_diff.old, DelAdd::Deletion, document_id)?;
|
||||
// extract new version
|
||||
let add_geojson =
|
||||
extract_geojson_field(obkv, &settings_diff.new, DelAdd::Addition, document_id)?;
|
||||
|
||||
if del_geojson != add_geojson {
|
||||
let mut obkv = KvWriterDelAdd::memory();
|
||||
if del_geojson.is_some() {
|
||||
// We don't need to store the geojson, we'll just delete it by id
|
||||
obkv.insert(DelAdd::Deletion, [])?;
|
||||
}
|
||||
if let Some(geojson) = add_geojson {
|
||||
obkv.insert(DelAdd::Addition, geojson.to_string().as_bytes())?;
|
||||
}
|
||||
let bytes = obkv.into_inner()?;
|
||||
writer.insert(&docid_bytes[0..std::mem::size_of::<DocumentId>()], bytes)?;
|
||||
}
|
||||
}
|
||||
|
||||
writer_into_reader(writer)
|
||||
}
|
||||
|
||||
fn extract_geojson_field(
|
||||
obkv: &obkv::KvReader<FieldId>,
|
||||
settings: &InnerIndexSettings,
|
||||
deladd: DelAdd,
|
||||
_document_id: impl Fn() -> Value,
|
||||
) -> Result<Option<GeoJson>> {
|
||||
match settings.geojson_fid {
|
||||
Some(fid) if settings.filterable_attributes_rules.iter().any(|rule| rule.has_geojson()) => {
|
||||
let value = obkv.get(fid).map(KvReaderDelAdd::from_slice).and_then(|r| r.get(deladd));
|
||||
Ok(value
|
||||
.map(|v| GeoJson::from_reader(v).map_err(UserError::MalformedGeojson))
|
||||
.transpose()?)
|
||||
}
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,6 +31,7 @@ use self::extract_word_position_docids::extract_word_position_docids;
|
||||
use super::helpers::{as_cloneable_grenad, CursorClonableMmap, GrenadParameters};
|
||||
use super::{helpers, TypedChunk};
|
||||
use crate::progress::EmbedderStats;
|
||||
use crate::update::index_documents::extract::extract_geo_points::extract_geojson;
|
||||
use crate::update::index_documents::extract::extract_vector_points::extract_embeddings_from_fragments;
|
||||
use crate::update::settings::InnerIndexSettingsDiff;
|
||||
use crate::vector::db::EmbedderInfo;
|
||||
@@ -62,6 +63,7 @@ pub(crate) fn data_from_obkv_documents(
|
||||
original_documents_chunk,
|
||||
indexer,
|
||||
lmdb_writer_sx.clone(),
|
||||
primary_key_id,
|
||||
settings_diff.clone(),
|
||||
embedder_info.clone(),
|
||||
possible_embedding_mistakes.clone(),
|
||||
@@ -228,10 +230,12 @@ pub fn request_threads() -> &'static ThreadPoolNoAbort {
|
||||
|
||||
/// Extract chunked data and send it into lmdb_writer_sx sender:
|
||||
/// - documents
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn send_original_documents_data(
|
||||
original_documents_chunk: Result<grenad::Reader<BufReader<File>>>,
|
||||
indexer: GrenadParameters,
|
||||
lmdb_writer_sx: Sender<Result<TypedChunk>>,
|
||||
primary_key_id: FieldId,
|
||||
settings_diff: Arc<InnerIndexSettingsDiff>,
|
||||
embedder_info: Arc<Vec<(String, EmbedderInfo)>>,
|
||||
possible_embedding_mistakes: Arc<PossibleEmbeddingMistakes>,
|
||||
@@ -240,6 +244,20 @@ fn send_original_documents_data(
|
||||
let original_documents_chunk =
|
||||
original_documents_chunk.and_then(|c| unsafe { as_cloneable_grenad(&c) })?;
|
||||
|
||||
if settings_diff.reindex_geojson() {
|
||||
let documents_chunk_cloned = original_documents_chunk.clone();
|
||||
let lmdb_writer_sx_cloned = lmdb_writer_sx.clone();
|
||||
let settings_diff = settings_diff.clone();
|
||||
rayon::spawn(move || {
|
||||
let result =
|
||||
extract_geojson(documents_chunk_cloned, indexer, primary_key_id, &settings_diff);
|
||||
let _ = match result {
|
||||
Ok(geojson) => lmdb_writer_sx_cloned.send(Ok(TypedChunk::GeoJson(geojson))),
|
||||
Err(error) => lmdb_writer_sx_cloned.send(Err(error)),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
let index_vectors = (settings_diff.reindex_vectors() || !settings_diff.settings_update_only())
|
||||
// no point in indexing vectors without embedders
|
||||
&& (!settings_diff.new.runtime_embedders.inner_as_ref().is_empty());
|
||||
|
||||
@@ -523,7 +523,7 @@ where
|
||||
.is_some_and(|conf| conf.is_quantized);
|
||||
let is_quantizing = embedder_config.is_some_and(|action| action.is_being_quantized);
|
||||
|
||||
pool.install(|| {
|
||||
pool.install(|| -> Result<_> {
|
||||
let mut writer =
|
||||
VectorStore::new(backend, vector_store, embedder_index, was_quantized);
|
||||
writer.build_and_quantize(
|
||||
@@ -541,6 +541,8 @@ where
|
||||
.map_err(InternalError::from)??;
|
||||
}
|
||||
|
||||
self.index.cellulite.build(self.wtxn, &self.should_abort, &Progress::default())?;
|
||||
|
||||
self.execute_prefix_databases(
|
||||
word_docids.map(MergerBuilder::build),
|
||||
exact_word_docids.map(MergerBuilder::build),
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user