mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-11-28 17:00:32 +00:00
Compare commits
59 Commits
v1.2.1
...
prototype-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e563676ce5 | ||
|
|
513649a955 | ||
|
|
61265e2830 | ||
|
|
f5e77419f4 | ||
|
|
fd82ea828c | ||
|
|
7daa9049d8 | ||
|
|
42075d3a12 | ||
|
|
efdd5b3551 | ||
|
|
1ead3d87b5 | ||
|
|
bb6cba8c26 | ||
|
|
80e038be6c | ||
|
|
5883e8b621 | ||
|
|
fefa6ae92c | ||
|
|
46554f764d | ||
|
|
0709c02307 | ||
|
|
5e64294d21 | ||
|
|
d5edc38a34 | ||
|
|
01d2ee5cc1 | ||
|
|
e0c4682758 | ||
|
|
d9b4b39922 | ||
|
|
4829348d6e | ||
|
|
047d22fcb1 | ||
|
|
a2a3b8c973 | ||
|
|
9f37b61666 | ||
|
|
c15c076da9 | ||
|
|
9dcf1da59d | ||
|
|
8628a0c856 | ||
|
|
c1e3cc04b0 | ||
|
|
d96d8bb0dd | ||
|
|
4a3405afec | ||
|
|
3cfd653db1 | ||
|
|
b6b6a80b76 | ||
|
|
f3e2f79290 | ||
|
|
f517274d1f | ||
|
|
3f41bc642a | ||
|
|
672abdb341 | ||
|
|
a13ed4d0b0 | ||
|
|
4cc2988482 | ||
|
|
26c7e31f25 | ||
|
|
b2dee07b5e | ||
|
|
da04edff8c | ||
|
|
85a80f4f4c | ||
|
|
1213ec7164 | ||
|
|
51dce9e9d1 | ||
|
|
c9b65677bf | ||
|
|
35d5556f1f | ||
|
|
c433bdd1cd | ||
|
|
2db09725f8 | ||
|
|
fdb23132d4 | ||
|
|
11b95284cd | ||
|
|
1b601f70c6 | ||
|
|
8185731bbf | ||
|
|
840727d76f | ||
|
|
ead07d0b9d | ||
|
|
44f231d41e | ||
|
|
3c5d1c93de | ||
|
|
57d53de402 | ||
|
|
918ce1dd67 | ||
|
|
8095f21999 |
@@ -2,4 +2,3 @@ target
|
||||
Dockerfile
|
||||
.dockerignore
|
||||
.gitignore
|
||||
**/.git
|
||||
|
||||
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
- name: Build deb package
|
||||
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
||||
- name: Upload debian pkg to release
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/debian/meilisearch.deb
|
||||
|
||||
8
.github/workflows/publish-binaries.yml
vendored
8
.github/workflows/publish-binaries.yml
vendored
@@ -54,7 +54,7 @@ jobs:
|
||||
# No need to upload binaries for dry run (cron)
|
||||
- name: Upload binaries to release
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/release/meilisearch
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# No need to upload binaries for dry run (cron)
|
||||
- name: Upload binaries to release
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/release/${{ matrix.artifact_name }}
|
||||
@@ -121,7 +121,7 @@ jobs:
|
||||
- name: Upload the binary to release
|
||||
# No need to upload binaries for dry run (cron)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
@@ -183,7 +183,7 @@ jobs:
|
||||
- name: Upload the binary to release
|
||||
# No need to upload binaries for dry run (cron)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
|
||||
7
.github/workflows/publish-docker-images.yml
vendored
7
.github/workflows/publish-docker-images.yml
vendored
@@ -58,13 +58,9 @@ jobs:
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
@@ -92,13 +88,10 @@ jobs:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
build-args: |
|
||||
COMMIT_SHA=${{ github.sha }}
|
||||
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
||||
GIT_TAG=${{ github.ref_name }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
# /!\ Don't touch this without checking with Cloud team
|
||||
- name: Send CI information to Cloud team
|
||||
|
||||
42
.github/workflows/sdks-tests.yml
vendored
42
.github/workflows/sdks-tests.yml
vendored
@@ -3,6 +3,11 @@ name: SDKs tests
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
docker_image:
|
||||
description: 'The Meilisearch Docker image used'
|
||||
required: false
|
||||
default: nightly
|
||||
schedule:
|
||||
- cron: "0 6 * * MON" # Every Monday at 6:00AM
|
||||
|
||||
@@ -11,13 +16,28 @@ env:
|
||||
MEILI_NO_ANALYTICS: 'true'
|
||||
|
||||
jobs:
|
||||
define-docker-image:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker-image: ${{ steps.define-image.outputs.docker-image }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Define the Docker image we need to use
|
||||
id: define-image
|
||||
run: |
|
||||
event=${{ github.event.action }}
|
||||
echo "docker-image=nightly" >> $GITHUB_OUTPUT
|
||||
if [[ $event == 'workflow_dispatch' ]]; then
|
||||
echo "docker-image=${{ github.event.inputs.docker_image }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
meilisearch-js-tests:
|
||||
needs: define-docker-image
|
||||
name: JS SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
@@ -47,11 +67,12 @@ jobs:
|
||||
run: yarn test:env:browser
|
||||
|
||||
instant-meilisearch-tests:
|
||||
needs: define-docker-image
|
||||
name: instant-meilisearch tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
@@ -73,11 +94,12 @@ jobs:
|
||||
run: yarn build
|
||||
|
||||
meilisearch-php-tests:
|
||||
needs: define-docker-image
|
||||
name: PHP SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
@@ -103,11 +125,12 @@ jobs:
|
||||
composer remove --dev guzzlehttp/guzzle http-interop/http-factory-guzzle
|
||||
|
||||
meilisearch-python-tests:
|
||||
needs: define-docker-image
|
||||
name: Python SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
@@ -127,11 +150,12 @@ jobs:
|
||||
run: pipenv run pytest
|
||||
|
||||
meilisearch-go-tests:
|
||||
needs: define-docker-image
|
||||
name: Go SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
@@ -139,7 +163,7 @@ jobs:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: stable
|
||||
- uses: actions/checkout@v3
|
||||
@@ -156,11 +180,12 @@ jobs:
|
||||
run: go test -v ./...
|
||||
|
||||
meilisearch-ruby-tests:
|
||||
needs: define-docker-image
|
||||
name: Ruby SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
@@ -180,11 +205,12 @@ jobs:
|
||||
run: bundle exec rspec
|
||||
|
||||
meilisearch-rust-tests:
|
||||
needs: define-docker-image
|
||||
name: Rust SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
|
||||
33
.github/workflows/test-suite.yml
vendored
33
.github/workflows/test-suite.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
uses: Swatinem/rust-cache@v2.4.0
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -65,7 +65,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
uses: Swatinem/rust-cache@v2.4.0
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -105,6 +105,29 @@ jobs:
|
||||
command: test
|
||||
args: --workspace --locked --release --all-features
|
||||
|
||||
test-disabled-tokenization:
|
||||
name: Test disabled tokenization
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:18.04
|
||||
if: github.event_name == 'schedule'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install needed dependencies
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install --assume-yes build-essential curl
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Run cargo tree without default features and check lindera is not present
|
||||
run: |
|
||||
cargo tree -f '{p} {f}' -e normal --no-default-features | grep lindera -vqz
|
||||
- name: Run cargo tree with default features and check lindera is pressent
|
||||
run: |
|
||||
cargo tree -f '{p} {f}' -e normal | grep lindera -qz
|
||||
|
||||
# We run tests in debug also, to make sure that the debug_assertions are hit
|
||||
test-debug:
|
||||
name: Run tests in debug
|
||||
@@ -123,7 +146,7 @@ jobs:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
uses: Swatinem/rust-cache@v2.4.0
|
||||
- name: Run tests in debug
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -142,7 +165,7 @@ jobs:
|
||||
override: true
|
||||
components: clippy
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
uses: Swatinem/rust-cache@v2.4.0
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -161,7 +184,7 @@ jobs:
|
||||
override: true
|
||||
components: rustfmt
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
uses: Swatinem/rust-cache@v2.4.0
|
||||
- name: Run cargo fmt
|
||||
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
||||
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
||||
|
||||
26
Cargo.lock
generated
26
Cargo.lock
generated
@@ -463,7 +463,7 @@ checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf"
|
||||
|
||||
[[package]]
|
||||
name = "benchmarks"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@@ -1209,7 +1209,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "dump"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"big_s",
|
||||
@@ -1428,7 +1428,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "file-store"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"faux",
|
||||
"tempfile",
|
||||
@@ -1450,7 +1450,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "filter-parser"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"insta",
|
||||
"nom",
|
||||
@@ -1476,7 +1476,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "flatten-serde-json"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"criterion",
|
||||
"serde_json",
|
||||
@@ -1959,7 +1959,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "index-scheduler"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"big_s",
|
||||
@@ -2113,7 +2113,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "json-depth-checker"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"criterion",
|
||||
"serde_json",
|
||||
@@ -2539,7 +2539,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
|
||||
|
||||
[[package]]
|
||||
name = "meili-snap"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"insta",
|
||||
"md5",
|
||||
@@ -2548,7 +2548,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"actix-cors",
|
||||
"actix-http",
|
||||
@@ -2636,7 +2636,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-auth"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"base64 0.21.0",
|
||||
"enum-iterator",
|
||||
@@ -2655,7 +2655,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-types"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"actix-web",
|
||||
"anyhow",
|
||||
@@ -2709,7 +2709,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "milli"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"big_s",
|
||||
"bimap",
|
||||
@@ -3064,7 +3064,7 @@ checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
|
||||
|
||||
[[package]]
|
||||
name = "permissive-json-pointer"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"big_s",
|
||||
"serde_json",
|
||||
|
||||
@@ -17,7 +17,7 @@ members = [
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||
description = "Meilisearch HTTP server"
|
||||
homepage = "https://meilisearch.com"
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
# Compile
|
||||
FROM rust:alpine3.16 AS compiler
|
||||
|
||||
@@ -12,7 +11,7 @@ ARG GIT_TAG
|
||||
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG}
|
||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
|
||||
COPY --link . .
|
||||
COPY . .
|
||||
RUN set -eux; \
|
||||
apkArch="$(apk --print-arch)"; \
|
||||
if [ "$apkArch" = "aarch64" ]; then \
|
||||
@@ -31,7 +30,7 @@ RUN apk update --quiet \
|
||||
|
||||
# add meilisearch to the `/bin` so you can run it from anywhere and it's easy
|
||||
# to find.
|
||||
COPY --from=compiler --link /meilisearch/target/release/meilisearch /bin/meilisearch
|
||||
COPY --from=compiler /meilisearch/target/release/meilisearch /bin/meilisearch
|
||||
# To stay compatible with the older version of the container (pre v0.27.0) we're
|
||||
# going to symlink the meilisearch binary in the path to `/meilisearch`
|
||||
RUN ln -s /bin/meilisearch /meilisearch
|
||||
|
||||
1376
assets/grafana-dashboard.json
Normal file
1376
assets/grafana-dashboard.json
Normal file
File diff suppressed because it is too large
Load Diff
19
assets/prometheus-basic-scraper.yml
Normal file
19
assets/prometheus-basic-scraper.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
global:
|
||||
scrape_interval: 15s # By default, scrape targets every 15 seconds.
|
||||
|
||||
# Attach these labels to any time series or alerts when communicating with
|
||||
# external systems (federation, remote storage, Alertmanager).
|
||||
external_labels:
|
||||
monitor: 'codelab-monitor'
|
||||
|
||||
# A scrape configuration containing exactly one endpoint to scrape:
|
||||
# Here it's Prometheus itself.
|
||||
scrape_configs:
|
||||
# The job name is added as a label `job=<job_name>` to any timeseries scraped from this config.
|
||||
- job_name: 'meilisearch'
|
||||
|
||||
# Override the global default and scrape targets from this job every 5 seconds.
|
||||
scrape_interval: 5s
|
||||
|
||||
static_configs:
|
||||
- targets: ['localhost:7700']
|
||||
54
config.toml
54
config.toml
@@ -1,131 +1,131 @@
|
||||
# This file shows the default configuration of Meilisearch.
|
||||
# All variables are defined here: https://www.meilisearch.com/docs/learn/configuration/instance_options#environment-variables
|
||||
|
||||
db_path = "./data.ms"
|
||||
# Designates the location where database files will be created and retrieved.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#database-path
|
||||
db_path = "./data.ms"
|
||||
|
||||
env = "development"
|
||||
# Configures the instance's environment. Value must be either `production` or `development`.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#environment
|
||||
env = "development"
|
||||
|
||||
http_addr = "localhost:7700"
|
||||
# The address on which the HTTP server will listen.
|
||||
http_addr = "localhost:7700"
|
||||
|
||||
# master_key = "YOUR_MASTER_KEY_VALUE"
|
||||
# Sets the instance's master key, automatically protecting all routes except GET /health.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#master-key
|
||||
# master_key = "YOUR_MASTER_KEY_VALUE"
|
||||
|
||||
# no_analytics = true
|
||||
# Deactivates Meilisearch's built-in telemetry when provided.
|
||||
# Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
||||
# All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted at any time.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#disable-analytics
|
||||
# no_analytics = true
|
||||
|
||||
http_payload_size_limit = "100 MB"
|
||||
# Sets the maximum size of accepted payloads.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#payload-limit-size
|
||||
http_payload_size_limit = "100 MB"
|
||||
|
||||
log_level = "INFO"
|
||||
# Defines how much detail should be present in Meilisearch's logs.
|
||||
# Meilisearch currently supports six log levels, listed in order of increasing verbosity: `OFF`, `ERROR`, `WARN`, `INFO`, `DEBUG`, `TRACE`
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#log-level
|
||||
log_level = "INFO"
|
||||
|
||||
# max_indexing_memory = "2 GiB"
|
||||
# Sets the maximum amount of RAM Meilisearch can use when indexing.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#max-indexing-memory
|
||||
# max_indexing_memory = "2 GiB"
|
||||
|
||||
# max_indexing_threads = 4
|
||||
# Sets the maximum number of threads Meilisearch can use during indexing.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#max-indexing-threads
|
||||
# max_indexing_threads = 4
|
||||
|
||||
#############
|
||||
### DUMPS ###
|
||||
#############
|
||||
|
||||
dump_dir = "dumps/"
|
||||
# Sets the directory where Meilisearch will create dump files.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#dump-directory
|
||||
dump_dir = "dumps/"
|
||||
|
||||
# import_dump = "./path/to/my/file.dump"
|
||||
# Imports the dump file located at the specified path. Path must point to a .dump file.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#import-dump
|
||||
# import_dump = "./path/to/my/file.dump"
|
||||
|
||||
ignore_missing_dump = false
|
||||
# Prevents Meilisearch from throwing an error when `import_dump` does not point to a valid dump file.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-missing-dump
|
||||
ignore_missing_dump = false
|
||||
|
||||
ignore_dump_if_db_exists = false
|
||||
# Prevents a Meilisearch instance with an existing database from throwing an error when using `import_dump`.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-dump-if-db-exists
|
||||
ignore_dump_if_db_exists = false
|
||||
|
||||
|
||||
#################
|
||||
### SNAPSHOTS ###
|
||||
#################
|
||||
|
||||
schedule_snapshot = false
|
||||
# Enables scheduled snapshots when true, disable when false (the default).
|
||||
# If the value is given as an integer, then enables the scheduled snapshot with the passed value as the interval
|
||||
# between each snapshot, in seconds.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#schedule-snapshot-creation
|
||||
schedule_snapshot = false
|
||||
|
||||
snapshot_dir = "snapshots/"
|
||||
# Sets the directory where Meilisearch will store snapshots.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#snapshot-destination
|
||||
snapshot_dir = "snapshots/"
|
||||
|
||||
# import_snapshot = "./path/to/my/snapshot"
|
||||
# Launches Meilisearch after importing a previously-generated snapshot at the given filepath.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#import-snapshot
|
||||
# import_snapshot = "./path/to/my/snapshot"
|
||||
|
||||
ignore_missing_snapshot = false
|
||||
# Prevents a Meilisearch instance from throwing an error when `import_snapshot` does not point to a valid snapshot file.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-missing-snapshot
|
||||
ignore_missing_snapshot = false
|
||||
|
||||
ignore_snapshot_if_db_exists = false
|
||||
# Prevents a Meilisearch instance with an existing database from throwing an error when using `import_snapshot`.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-snapshot-if-db-exists
|
||||
ignore_snapshot_if_db_exists = false
|
||||
|
||||
|
||||
###########
|
||||
### SSL ###
|
||||
###########
|
||||
|
||||
# ssl_auth_path = "./path/to/root"
|
||||
# Enables client authentication in the specified path.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-authentication-path
|
||||
# ssl_auth_path = "./path/to/root"
|
||||
|
||||
# ssl_cert_path = "./path/to/certfile"
|
||||
# Sets the server's SSL certificates.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-certificates-path
|
||||
# ssl_cert_path = "./path/to/certfile"
|
||||
|
||||
# ssl_key_path = "./path/to/private-key"
|
||||
# Sets the server's SSL key files.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-key-path
|
||||
# ssl_key_path = "./path/to/private-key"
|
||||
|
||||
# ssl_ocsp_path = "./path/to/ocsp-file"
|
||||
# Sets the server's OCSP file.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-ocsp-path
|
||||
# ssl_ocsp_path = "./path/to/ocsp-file"
|
||||
|
||||
ssl_require_auth = false
|
||||
# Makes SSL authentication mandatory.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-require-auth
|
||||
ssl_require_auth = false
|
||||
|
||||
ssl_resumption = false
|
||||
# Activates SSL session resumption.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-resumption
|
||||
ssl_resumption = false
|
||||
|
||||
ssl_tickets = false
|
||||
# Activates SSL tickets.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-tickets
|
||||
ssl_tickets = false
|
||||
|
||||
#############################
|
||||
### Experimental features ###
|
||||
#############################
|
||||
|
||||
experimental_enable_metrics = false
|
||||
# Experimental metrics feature. For more information, see: <https://github.com/meilisearch/meilisearch/discussions/3518>
|
||||
# Enables the Prometheus metrics on the `GET /metrics` endpoint.
|
||||
experimental_enable_metrics = false
|
||||
|
||||
experimental_reduce_indexing_memory_usage = false
|
||||
# Experimental RAM reduction during indexing, do not use in production, see: <https://github.com/meilisearch/product/discussions/652>
|
||||
experimental_reduce_indexing_memory_usage = false
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -67,6 +67,10 @@ pub(crate) enum Batch {
|
||||
op: IndexOperation,
|
||||
must_create_index: bool,
|
||||
},
|
||||
IndexDocumentDeletionByFilter {
|
||||
index_uid: String,
|
||||
task: Task,
|
||||
},
|
||||
IndexCreation {
|
||||
index_uid: String,
|
||||
primary_key: Option<String>,
|
||||
@@ -110,10 +114,6 @@ pub(crate) enum IndexOperation {
|
||||
documents: Vec<Vec<String>>,
|
||||
tasks: Vec<Task>,
|
||||
},
|
||||
IndexDocumentDeletionByFilter {
|
||||
index_uid: String,
|
||||
task: Task,
|
||||
},
|
||||
DocumentClear {
|
||||
index_uid: String,
|
||||
tasks: Vec<Task>,
|
||||
@@ -155,6 +155,7 @@ impl Batch {
|
||||
| Batch::TaskDeletion(task)
|
||||
| Batch::Dump(task)
|
||||
| Batch::IndexCreation { task, .. }
|
||||
| Batch::IndexDocumentDeletionByFilter { task, .. }
|
||||
| Batch::IndexUpdate { task, .. } => vec![task.uid],
|
||||
Batch::SnapshotCreation(tasks) | Batch::IndexDeletion { tasks, .. } => {
|
||||
tasks.iter().map(|task| task.uid).collect()
|
||||
@@ -166,7 +167,6 @@ impl Batch {
|
||||
| IndexOperation::DocumentClear { tasks, .. } => {
|
||||
tasks.iter().map(|task| task.uid).collect()
|
||||
}
|
||||
IndexOperation::IndexDocumentDeletionByFilter { task, .. } => vec![task.uid],
|
||||
IndexOperation::SettingsAndDocumentOperation {
|
||||
document_import_tasks: tasks,
|
||||
settings_tasks: other,
|
||||
@@ -194,7 +194,8 @@ impl Batch {
|
||||
IndexOperation { op, .. } => Some(op.index_uid()),
|
||||
IndexCreation { index_uid, .. }
|
||||
| IndexUpdate { index_uid, .. }
|
||||
| IndexDeletion { index_uid, .. } => Some(index_uid),
|
||||
| IndexDeletion { index_uid, .. }
|
||||
| IndexDocumentDeletionByFilter { index_uid, .. } => Some(index_uid),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -204,7 +205,6 @@ impl IndexOperation {
|
||||
match self {
|
||||
IndexOperation::DocumentOperation { index_uid, .. }
|
||||
| IndexOperation::DocumentDeletion { index_uid, .. }
|
||||
| IndexOperation::IndexDocumentDeletionByFilter { index_uid, .. }
|
||||
| IndexOperation::DocumentClear { index_uid, .. }
|
||||
| IndexOperation::Settings { index_uid, .. }
|
||||
| IndexOperation::DocumentClearAndSetting { index_uid, .. }
|
||||
@@ -239,12 +239,9 @@ impl IndexScheduler {
|
||||
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||
match &task.kind {
|
||||
KindWithContent::DocumentDeletionByFilter { index_uid, .. } => {
|
||||
Ok(Some(Batch::IndexOperation {
|
||||
op: IndexOperation::IndexDocumentDeletionByFilter {
|
||||
index_uid: index_uid.clone(),
|
||||
task,
|
||||
},
|
||||
must_create_index: false,
|
||||
Ok(Some(Batch::IndexDocumentDeletionByFilter {
|
||||
index_uid: index_uid.clone(),
|
||||
task,
|
||||
}))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
@@ -890,6 +887,51 @@ impl IndexScheduler {
|
||||
|
||||
Ok(tasks)
|
||||
}
|
||||
Batch::IndexDocumentDeletionByFilter { mut task, index_uid: _ } => {
|
||||
let (index_uid, filter) =
|
||||
if let KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr } =
|
||||
&task.kind
|
||||
{
|
||||
(index_uid, filter_expr)
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
let index = {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
self.index_mapper.index(&rtxn, index_uid)?
|
||||
};
|
||||
let deleted_documents = delete_document_by_filter(filter, index);
|
||||
let original_filter = if let Some(Details::DocumentDeletionByFilter {
|
||||
original_filter,
|
||||
deleted_documents: _,
|
||||
}) = task.details
|
||||
{
|
||||
original_filter
|
||||
} else {
|
||||
// In the case of a `documentDeleteByFilter` the details MUST be set
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
match deleted_documents {
|
||||
Ok(deleted_documents) => {
|
||||
task.status = Status::Succeeded;
|
||||
task.details = Some(Details::DocumentDeletionByFilter {
|
||||
original_filter,
|
||||
deleted_documents: Some(deleted_documents),
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
task.status = Status::Failed;
|
||||
task.details = Some(Details::DocumentDeletionByFilter {
|
||||
original_filter,
|
||||
deleted_documents: Some(0),
|
||||
});
|
||||
task.error = Some(e.into());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(vec![task])
|
||||
}
|
||||
Batch::IndexCreation { index_uid, primary_key, task } => {
|
||||
let wtxn = self.env.write_txn()?;
|
||||
if self.index_mapper.exists(&wtxn, &index_uid)? {
|
||||
@@ -1246,47 +1288,6 @@ impl IndexScheduler {
|
||||
|
||||
Ok(tasks)
|
||||
}
|
||||
IndexOperation::IndexDocumentDeletionByFilter { mut task, index_uid: _ } => {
|
||||
let filter =
|
||||
if let KindWithContent::DocumentDeletionByFilter { filter_expr, .. } =
|
||||
&task.kind
|
||||
{
|
||||
filter_expr
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
let deleted_documents = delete_document_by_filter(index_wtxn, filter, index);
|
||||
let original_filter = if let Some(Details::DocumentDeletionByFilter {
|
||||
original_filter,
|
||||
deleted_documents: _,
|
||||
}) = task.details
|
||||
{
|
||||
original_filter
|
||||
} else {
|
||||
// In the case of a `documentDeleteByFilter` the details MUST be set
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
match deleted_documents {
|
||||
Ok(deleted_documents) => {
|
||||
task.status = Status::Succeeded;
|
||||
task.details = Some(Details::DocumentDeletionByFilter {
|
||||
original_filter,
|
||||
deleted_documents: Some(deleted_documents),
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
task.status = Status::Failed;
|
||||
task.details = Some(Details::DocumentDeletionByFilter {
|
||||
original_filter,
|
||||
deleted_documents: Some(0),
|
||||
});
|
||||
task.error = Some(e.into());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(vec![task])
|
||||
}
|
||||
IndexOperation::Settings { index_uid: _, settings, mut tasks } => {
|
||||
let indexer_config = self.index_mapper.indexer_config();
|
||||
let mut builder = milli::update::Settings::new(index_wtxn, index, indexer_config);
|
||||
@@ -1486,22 +1487,23 @@ impl IndexScheduler {
|
||||
}
|
||||
}
|
||||
|
||||
fn delete_document_by_filter<'a>(
|
||||
wtxn: &mut RwTxn<'a, '_>,
|
||||
filter: &serde_json::Value,
|
||||
index: &'a Index,
|
||||
) -> Result<u64> {
|
||||
fn delete_document_by_filter(filter: &serde_json::Value, index: Index) -> Result<u64> {
|
||||
let filter = Filter::from_json(filter)?;
|
||||
Ok(if let Some(filter) = filter {
|
||||
let candidates = filter.evaluate(wtxn, index).map_err(|err| match err {
|
||||
let mut wtxn = index.write_txn()?;
|
||||
|
||||
let candidates = filter.evaluate(&wtxn, &index).map_err(|err| match err {
|
||||
milli::Error::UserError(milli::UserError::InvalidFilter(_)) => {
|
||||
Error::from(err).with_custom_error_code(Code::InvalidDocumentFilter)
|
||||
}
|
||||
e => e.into(),
|
||||
})?;
|
||||
let mut delete_operation = DeleteDocuments::new(wtxn, index)?;
|
||||
let mut delete_operation = DeleteDocuments::new(&mut wtxn, &index)?;
|
||||
delete_operation.delete_documents(&candidates);
|
||||
delete_operation.execute().map(|result| result.deleted_documents)?
|
||||
let deleted_documents =
|
||||
delete_operation.execute().map(|result| result.deleted_documents)?;
|
||||
wtxn.commit()?;
|
||||
deleted_documents
|
||||
} else {
|
||||
0
|
||||
})
|
||||
|
||||
@@ -223,9 +223,7 @@ impl IndexMap {
|
||||
enable_mdb_writemap: bool,
|
||||
map_size_growth: usize,
|
||||
) {
|
||||
let Some(index) = self.available.remove(uuid) else {
|
||||
return;
|
||||
};
|
||||
let Some(index) = self.available.remove(uuid) else { return; };
|
||||
self.close(*uuid, index, enable_mdb_writemap, map_size_growth);
|
||||
}
|
||||
|
||||
|
||||
@@ -90,8 +90,17 @@ pub enum IndexStatus {
|
||||
pub struct IndexStats {
|
||||
/// Number of documents in the index.
|
||||
pub number_of_documents: u64,
|
||||
/// Size of the index' DB, in bytes.
|
||||
/// Size taken up by the index' DB, in bytes.
|
||||
///
|
||||
/// This includes the size taken by both the used and free pages of the DB, and as the free pages
|
||||
/// are not returned to the disk after a deletion, this number is typically larger than
|
||||
/// `used_database_size` that only includes the size of the used pages.
|
||||
pub database_size: u64,
|
||||
/// Size taken by the used pages of the index' DB, in bytes.
|
||||
///
|
||||
/// As the DB backend does not return to the disk the pages that are not currently used by the DB,
|
||||
/// this value is typically smaller than `database_size`.
|
||||
pub used_database_size: u64,
|
||||
/// Association of every field name with the number of times it occurs in the documents.
|
||||
pub field_distribution: FieldDistribution,
|
||||
/// Creation date of the index.
|
||||
@@ -107,10 +116,10 @@ impl IndexStats {
|
||||
///
|
||||
/// - rtxn: a RO transaction for the index, obtained from `Index::read_txn()`.
|
||||
pub fn new(index: &Index, rtxn: &RoTxn) -> Result<Self> {
|
||||
let database_size = index.on_disk_size()?;
|
||||
Ok(IndexStats {
|
||||
number_of_documents: index.number_of_documents(rtxn)?,
|
||||
database_size,
|
||||
database_size: index.on_disk_size()?,
|
||||
used_database_size: index.used_size()?,
|
||||
field_distribution: index.field_distribution(rtxn)?,
|
||||
created_at: index.created_at(rtxn)?,
|
||||
updated_at: index.updated_at(rtxn)?,
|
||||
|
||||
@@ -31,7 +31,7 @@ mod uuid_codec;
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
pub type TaskId = u32;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::ops::{Bound, RangeBounds};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::atomic::AtomicBool;
|
||||
@@ -573,10 +573,16 @@ impl IndexScheduler {
|
||||
&self.index_mapper.indexer_config
|
||||
}
|
||||
|
||||
/// Return the real database size (i.e.: The size **with** the free pages)
|
||||
pub fn size(&self) -> Result<u64> {
|
||||
Ok(self.env.real_disk_size()?)
|
||||
}
|
||||
|
||||
/// Return the used database size (i.e.: The size **without** the free pages)
|
||||
pub fn used_size(&self) -> Result<u64> {
|
||||
Ok(self.env.non_free_pages_size()?)
|
||||
}
|
||||
|
||||
/// Return the index corresponding to the name.
|
||||
///
|
||||
/// * If the index wasn't opened before, the index will be opened.
|
||||
@@ -756,6 +762,38 @@ impl IndexScheduler {
|
||||
Ok(tasks)
|
||||
}
|
||||
|
||||
/// The returned structure contains:
|
||||
/// 1. The name of the property being observed can be `statuses`, `types`, or `indexes`.
|
||||
/// 2. The name of the specific data related to the property can be `enqueued` for the `statuses`, `settingsUpdate` for the `types`, or the name of the index for the `indexes`, for example.
|
||||
/// 3. The number of times the properties appeared.
|
||||
pub fn get_stats(&self) -> Result<BTreeMap<String, BTreeMap<String, u64>>> {
|
||||
let rtxn = self.read_txn()?;
|
||||
|
||||
let mut res = BTreeMap::new();
|
||||
|
||||
res.insert(
|
||||
"statuses".to_string(),
|
||||
enum_iterator::all::<Status>()
|
||||
.map(|s| Ok((s.to_string(), self.get_status(&rtxn, s)?.len())))
|
||||
.collect::<Result<BTreeMap<String, u64>>>()?,
|
||||
);
|
||||
res.insert(
|
||||
"types".to_string(),
|
||||
enum_iterator::all::<Kind>()
|
||||
.map(|s| Ok((s.to_string(), self.get_kind(&rtxn, s)?.len())))
|
||||
.collect::<Result<BTreeMap<String, u64>>>()?,
|
||||
);
|
||||
res.insert(
|
||||
"indexes".to_string(),
|
||||
self.index_tasks
|
||||
.iter(&rtxn)?
|
||||
.map(|res| Ok(res.map(|(name, bitmap)| (name.to_string(), bitmap.len()))?))
|
||||
.collect::<Result<BTreeMap<String, u64>>>()?,
|
||||
);
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Return true iff there is at least one task associated with this index
|
||||
/// that is processing.
|
||||
pub fn is_index_processing(&self, index: &str) -> Result<bool> {
|
||||
|
||||
@@ -45,6 +45,11 @@ impl AuthController {
|
||||
self.store.size()
|
||||
}
|
||||
|
||||
/// Return the used size of the `AuthController` database in bytes.
|
||||
pub fn used_size(&self) -> Result<u64> {
|
||||
self.store.used_size()
|
||||
}
|
||||
|
||||
pub fn create_key(&self, create_key: CreateApiKey) -> Result<Key> {
|
||||
match self.store.get_api_key(create_key.uid)? {
|
||||
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(create_key.uid.to_string())),
|
||||
|
||||
@@ -75,6 +75,11 @@ impl HeedAuthStore {
|
||||
Ok(self.env.real_disk_size()?)
|
||||
}
|
||||
|
||||
/// Return the number of bytes actually used in the database
|
||||
pub fn used_size(&self) -> Result<u64> {
|
||||
Ok(self.env.non_free_pages_size()?)
|
||||
}
|
||||
|
||||
pub fn set_drop_on_close(&mut self, v: bool) {
|
||||
self.should_close_on_drop = v;
|
||||
}
|
||||
|
||||
@@ -240,6 +240,8 @@ InvalidSearchOffset , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchPage , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchQ , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchShowRankingScore , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchShowRankingScoreDetails , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
|
||||
|
||||
@@ -147,7 +147,9 @@ impl Key {
|
||||
fn parse_expiration_date(
|
||||
string: Option<String>,
|
||||
) -> std::result::Result<Option<OffsetDateTime>, ParseOffsetDateTimeError> {
|
||||
let Some(string) = string else { return Ok(None) };
|
||||
let Some(string) = string else {
|
||||
return Ok(None)
|
||||
};
|
||||
let datetime = if let Ok(datetime) = OffsetDateTime::parse(&string, &Rfc3339) {
|
||||
datetime
|
||||
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
||||
|
||||
@@ -4,20 +4,32 @@ use prometheus::{
|
||||
register_int_gauge_vec, HistogramVec, IntCounterVec, IntGauge, IntGaugeVec,
|
||||
};
|
||||
|
||||
const HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: &[f64; 14] = &[
|
||||
0.0005, 0.0008, 0.00085, 0.0009, 0.00095, 0.001, 0.00105, 0.0011, 0.00115, 0.0012, 0.0015,
|
||||
0.002, 0.003, 1.0,
|
||||
];
|
||||
/// Create evenly distributed buckets
|
||||
fn create_buckets() -> [f64; 29] {
|
||||
(0..10)
|
||||
.chain((10..100).step_by(10))
|
||||
.chain((100..=1000).step_by(100))
|
||||
.map(|i| i as f64 / 1000.)
|
||||
.collect::<Vec<_>>()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub static ref HTTP_REQUESTS_TOTAL: IntCounterVec = register_int_counter_vec!(
|
||||
opts!("http_requests_total", "HTTP requests total"),
|
||||
pub static ref HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: [f64; 29] = create_buckets();
|
||||
pub static ref MEILISEARCH_HTTP_REQUESTS_TOTAL: IntCounterVec = register_int_counter_vec!(
|
||||
opts!("meilisearch_http_requests_total", "Meilisearch HTTP requests total"),
|
||||
&["method", "path"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge =
|
||||
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch Db Size In Bytes"))
|
||||
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch DB Size In Bytes"))
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_USED_DB_SIZE_BYTES: IntGauge = register_int_gauge!(opts!(
|
||||
"meilisearch_used_db_size_bytes",
|
||||
"Meilisearch Used DB Size In Bytes"
|
||||
))
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_INDEX_COUNT: IntGauge =
|
||||
register_int_gauge!(opts!("meilisearch_index_count", "Meilisearch Index Count"))
|
||||
.expect("Can't create a metric");
|
||||
@@ -26,11 +38,16 @@ lazy_static! {
|
||||
&["index"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref HTTP_RESPONSE_TIME_SECONDS: HistogramVec = register_histogram_vec!(
|
||||
pub static ref MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS: HistogramVec = register_histogram_vec!(
|
||||
"http_response_time_seconds",
|
||||
"HTTP response times",
|
||||
&["method", "path"],
|
||||
HTTP_RESPONSE_TIME_CUSTOM_BUCKETS.to_vec()
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_NB_TASKS: IntGaugeVec = register_int_gauge_vec!(
|
||||
opts!("meilisearch_nb_tasks", "Meilisearch Number of tasks"),
|
||||
&["kind", "value"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
}
|
||||
|
||||
@@ -52,11 +52,11 @@ where
|
||||
if is_registered_resource {
|
||||
let request_method = req.method().to_string();
|
||||
histogram_timer = Some(
|
||||
crate::metrics::HTTP_RESPONSE_TIME_SECONDS
|
||||
crate::metrics::MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS
|
||||
.with_label_values(&[&request_method, request_path])
|
||||
.start_timer(),
|
||||
);
|
||||
crate::metrics::HTTP_REQUESTS_TOTAL
|
||||
crate::metrics::MEILISEARCH_HTTP_REQUESTS_TOTAL
|
||||
.with_label_values(&[&request_method, request_path])
|
||||
.inc();
|
||||
}
|
||||
|
||||
@@ -56,6 +56,10 @@ pub struct SearchQueryGet {
|
||||
sort: Option<String>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowMatchesPosition>)]
|
||||
show_matches_position: Param<bool>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowRankingScore>)]
|
||||
show_ranking_score: Param<bool>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowRankingScoreDetails>)]
|
||||
show_ranking_score_details: Param<bool>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchFacets>)]
|
||||
facets: Option<CS<String>>,
|
||||
#[deserr( default = DEFAULT_HIGHLIGHT_PRE_TAG(), error = DeserrQueryParamError<InvalidSearchHighlightPreTag>)]
|
||||
@@ -91,6 +95,8 @@ impl From<SearchQueryGet> for SearchQuery {
|
||||
filter,
|
||||
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
|
||||
show_matches_position: other.show_matches_position.0,
|
||||
show_ranking_score: other.show_ranking_score.0,
|
||||
show_ranking_score_details: other.show_ranking_score_details.0,
|
||||
facets: other.facets.map(|o| o.into_iter().collect()),
|
||||
highlight_pre_tag: other.highlight_pre_tag,
|
||||
highlight_post_tag: other.highlight_post_tag,
|
||||
|
||||
@@ -17,7 +17,7 @@ pub fn configure(config: &mut web::ServiceConfig) {
|
||||
|
||||
pub async fn get_metrics(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<AuthController>>,
|
||||
auth_controller: Data<AuthController>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let auth_filters = index_scheduler.filters();
|
||||
if !auth_filters.all_indexes_authorized() {
|
||||
@@ -28,10 +28,10 @@ pub async fn get_metrics(
|
||||
return Err(error);
|
||||
}
|
||||
|
||||
let response =
|
||||
create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), auth_filters)?;
|
||||
let response = create_all_stats((*index_scheduler).clone(), auth_controller, auth_filters)?;
|
||||
|
||||
crate::metrics::MEILISEARCH_DB_SIZE_BYTES.set(response.database_size as i64);
|
||||
crate::metrics::MEILISEARCH_USED_DB_SIZE_BYTES.set(response.used_database_size as i64);
|
||||
crate::metrics::MEILISEARCH_INDEX_COUNT.set(response.indexes.len() as i64);
|
||||
|
||||
for (index, value) in response.indexes.iter() {
|
||||
@@ -40,6 +40,14 @@ pub async fn get_metrics(
|
||||
.set(value.number_of_documents as i64);
|
||||
}
|
||||
|
||||
for (kind, value) in index_scheduler.get_stats()? {
|
||||
for (value, count) in value {
|
||||
crate::metrics::MEILISEARCH_NB_TASKS
|
||||
.with_label_values(&[&kind, &value])
|
||||
.set(count as i64);
|
||||
}
|
||||
}
|
||||
|
||||
let encoder = TextEncoder::new();
|
||||
let mut buffer = vec![];
|
||||
encoder.encode(&prometheus::gather(), &mut buffer).expect("Failed to encode metrics");
|
||||
|
||||
@@ -231,6 +231,8 @@ pub async fn running() -> HttpResponse {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Stats {
|
||||
pub database_size: u64,
|
||||
#[serde(skip)]
|
||||
pub used_database_size: u64,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||
pub last_update: Option<OffsetDateTime>,
|
||||
pub indexes: BTreeMap<String, indexes::IndexStats>,
|
||||
@@ -259,6 +261,7 @@ pub fn create_all_stats(
|
||||
let mut last_task: Option<OffsetDateTime> = None;
|
||||
let mut indexes = BTreeMap::new();
|
||||
let mut database_size = 0;
|
||||
let mut used_database_size = 0;
|
||||
|
||||
for index_uid in index_scheduler.index_names()? {
|
||||
// Accumulate the size of all indexes, even unauthorized ones, so
|
||||
@@ -266,6 +269,7 @@ pub fn create_all_stats(
|
||||
// See <https://github.com/meilisearch/meilisearch/pull/3541#discussion_r1126747643> for context.
|
||||
let stats = index_scheduler.index_stats(&index_uid)?;
|
||||
database_size += stats.inner_stats.database_size;
|
||||
used_database_size += stats.inner_stats.used_database_size;
|
||||
|
||||
if !filters.is_index_authorized(&index_uid) {
|
||||
continue;
|
||||
@@ -278,10 +282,14 @@ pub fn create_all_stats(
|
||||
}
|
||||
|
||||
database_size += index_scheduler.size()?;
|
||||
used_database_size += index_scheduler.used_size()?;
|
||||
database_size += auth_controller.size()?;
|
||||
database_size += index_scheduler.compute_update_file_size()?;
|
||||
used_database_size += auth_controller.used_size()?;
|
||||
let update_file_size = index_scheduler.compute_update_file_size()?;
|
||||
database_size += update_file_size;
|
||||
used_database_size += update_file_size;
|
||||
|
||||
let stats = Stats { database_size, last_update: last_task, indexes };
|
||||
let stats = Stats { database_size, used_database_size, last_update: last_task, indexes };
|
||||
Ok(stats)
|
||||
}
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::deserr_codes::*;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::milli::score_details::{ScoreDetails, ScoringStrategy};
|
||||
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
|
||||
use meilisearch_types::{milli, Document};
|
||||
use milli::tokenizer::TokenizerBuilder;
|
||||
@@ -54,6 +55,10 @@ pub struct SearchQuery {
|
||||
pub attributes_to_highlight: Option<HashSet<String>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchShowMatchesPosition>, default)]
|
||||
pub show_matches_position: bool,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchShowRankingScore>, default)]
|
||||
pub show_ranking_score: bool,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchShowRankingScoreDetails>, default)]
|
||||
pub show_ranking_score_details: bool,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
|
||||
pub filter: Option<Value>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchSort>)]
|
||||
@@ -103,6 +108,10 @@ pub struct SearchQueryWithIndex {
|
||||
pub crop_length: usize,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToHighlight>)]
|
||||
pub attributes_to_highlight: Option<HashSet<String>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchShowRankingScore>, default)]
|
||||
pub show_ranking_score: bool,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchShowRankingScoreDetails>, default)]
|
||||
pub show_ranking_score_details: bool,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchShowMatchesPosition>, default)]
|
||||
pub show_matches_position: bool,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
|
||||
@@ -134,6 +143,8 @@ impl SearchQueryWithIndex {
|
||||
attributes_to_crop,
|
||||
crop_length,
|
||||
attributes_to_highlight,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
show_matches_position,
|
||||
filter,
|
||||
sort,
|
||||
@@ -155,6 +166,8 @@ impl SearchQueryWithIndex {
|
||||
attributes_to_crop,
|
||||
crop_length,
|
||||
attributes_to_highlight,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
show_matches_position,
|
||||
filter,
|
||||
sort,
|
||||
@@ -194,7 +207,7 @@ impl From<MatchingStrategy> for TermsMatchingStrategy {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, Serialize, PartialEq)]
|
||||
pub struct SearchHit {
|
||||
#[serde(flatten)]
|
||||
pub document: Document,
|
||||
@@ -202,6 +215,10 @@ pub struct SearchHit {
|
||||
pub formatted: Document,
|
||||
#[serde(rename = "_matchesPosition", skip_serializing_if = "Option::is_none")]
|
||||
pub matches_position: Option<MatchesPosition>,
|
||||
#[serde(rename = "_rankingScore", skip_serializing_if = "Option::is_none")]
|
||||
pub ranking_score: Option<f64>,
|
||||
#[serde(rename = "_rankingScoreDetails", skip_serializing_if = "Option::is_none")]
|
||||
pub ranking_score_details: Option<serde_json::Map<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone, PartialEq)]
|
||||
@@ -283,6 +300,11 @@ pub fn perform_search(
|
||||
.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS);
|
||||
|
||||
search.exhaustive_number_hits(is_finite_pagination);
|
||||
search.scoring_strategy(if query.show_ranking_score || query.show_ranking_score_details {
|
||||
ScoringStrategy::Detailed
|
||||
} else {
|
||||
ScoringStrategy::Skip
|
||||
});
|
||||
|
||||
// compute the offset on the limit depending on the pagination mode.
|
||||
let (offset, limit) = if is_finite_pagination {
|
||||
@@ -320,7 +342,8 @@ pub fn perform_search(
|
||||
search.sort_criteria(sort);
|
||||
}
|
||||
|
||||
let milli::SearchResult { documents_ids, matching_words, candidates, .. } = search.execute()?;
|
||||
let milli::SearchResult { documents_ids, matching_words, candidates, document_scores, .. } =
|
||||
search.execute()?;
|
||||
|
||||
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||
|
||||
@@ -392,7 +415,7 @@ pub fn perform_search(
|
||||
|
||||
let documents_iter = index.documents(&rtxn, documents_ids)?;
|
||||
|
||||
for (_id, obkv) in documents_iter {
|
||||
for ((_id, obkv), score) in documents_iter.into_iter().zip(document_scores.into_iter()) {
|
||||
// First generate a document with all the displayed fields
|
||||
let displayed_document = make_document(&displayed_ids, &fields_ids_map, obkv)?;
|
||||
|
||||
@@ -416,7 +439,18 @@ pub fn perform_search(
|
||||
insert_geo_distance(sort, &mut document);
|
||||
}
|
||||
|
||||
let hit = SearchHit { document, formatted, matches_position };
|
||||
let ranking_score =
|
||||
query.show_ranking_score.then(|| ScoreDetails::global_score(score.iter()));
|
||||
let ranking_score_details =
|
||||
query.show_ranking_score_details.then(|| ScoreDetails::to_json_map(score.iter()));
|
||||
|
||||
let hit = SearchHit {
|
||||
document,
|
||||
formatted,
|
||||
matches_position,
|
||||
ranking_score_details,
|
||||
ranking_score,
|
||||
};
|
||||
documents.push(hit);
|
||||
}
|
||||
|
||||
|
||||
@@ -154,19 +154,6 @@ async fn delete_document_by_filter() {
|
||||
)
|
||||
.await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (stats, _) = index.stats().await;
|
||||
snapshot!(json_string!(stats), @r###"
|
||||
{
|
||||
"numberOfDocuments": 4,
|
||||
"isIndexing": false,
|
||||
"fieldDistribution": {
|
||||
"color": 3,
|
||||
"id": 4
|
||||
}
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) =
|
||||
index.delete_document_by_filter(json!({ "filter": "color = blue"})).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
@@ -201,18 +188,6 @@ async fn delete_document_by_filter() {
|
||||
}
|
||||
"###);
|
||||
|
||||
let (stats, _) = index.stats().await;
|
||||
snapshot!(json_string!(stats), @r###"
|
||||
{
|
||||
"numberOfDocuments": 2,
|
||||
"isIndexing": false,
|
||||
"fieldDistribution": {
|
||||
"color": 1,
|
||||
"id": 2
|
||||
}
|
||||
}
|
||||
"###);
|
||||
|
||||
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(documents), @r###"
|
||||
@@ -266,18 +241,6 @@ async fn delete_document_by_filter() {
|
||||
}
|
||||
"###);
|
||||
|
||||
let (stats, _) = index.stats().await;
|
||||
snapshot!(json_string!(stats), @r###"
|
||||
{
|
||||
"numberOfDocuments": 1,
|
||||
"isIndexing": false,
|
||||
"fieldDistribution": {
|
||||
"color": 1,
|
||||
"id": 1
|
||||
}
|
||||
}
|
||||
"###);
|
||||
|
||||
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(documents), @r###"
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use insta::{allow_duplicates, assert_json_snapshot};
|
||||
use serde_json::json;
|
||||
|
||||
use super::*;
|
||||
@@ -18,30 +19,43 @@ async fn formatted_contain_wildcard() {
|
||||
|response, code|
|
||||
{
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "<em>pésti</em>",
|
||||
},
|
||||
"_matchesPosition": {"cattos": [{"start": 0, "length": 5}]},
|
||||
})
|
||||
);
|
||||
}
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "<em>pésti</em>"
|
||||
},
|
||||
"_matchesPosition": {
|
||||
"cattos": [
|
||||
{
|
||||
"start": 0,
|
||||
"length": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "q": "pésti", "attributesToRetrieve": ["*"] }), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"cattos": "pésti",
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"id": 852,
|
||||
"cattos": "pésti"
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -50,20 +64,29 @@ async fn formatted_contain_wildcard() {
|
||||
json!({ "q": "pésti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"], "showMatchesPosition": true }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"cattos": "pésti",
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pésti",
|
||||
},
|
||||
"_matchesPosition": {"cattos": [{"start": 0, "length": 5}]},
|
||||
})
|
||||
);
|
||||
}
|
||||
)
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"id": 852,
|
||||
"cattos": "pésti",
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pésti"
|
||||
},
|
||||
"_matchesPosition": {
|
||||
"cattos": [
|
||||
{
|
||||
"start": 0,
|
||||
"length": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
@@ -71,17 +94,20 @@ async fn formatted_contain_wildcard() {
|
||||
json!({ "q": "pésti", "attributesToRetrieve": ["*"], "attributesToCrop": ["*"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"cattos": "pésti",
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pésti",
|
||||
}
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"id": 852,
|
||||
"cattos": "pésti",
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pésti"
|
||||
}
|
||||
}
|
||||
"###);
|
||||
}
|
||||
},
|
||||
)
|
||||
.await;
|
||||
@@ -89,17 +115,20 @@ async fn formatted_contain_wildcard() {
|
||||
index
|
||||
.search(json!({ "q": "pésti", "attributesToCrop": ["*"] }), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"cattos": "pésti",
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pésti",
|
||||
}
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"id": 852,
|
||||
"cattos": "pésti",
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pésti"
|
||||
}
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
}
|
||||
@@ -116,21 +145,24 @@ async fn format_nested() {
|
||||
index
|
||||
.search(json!({ "q": "pésti", "attributesToRetrieve": ["doggos"] }), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": 2,
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": 4,
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": 2
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -139,19 +171,22 @@ async fn format_nested() {
|
||||
json!({ "q": "pésti", "attributesToRetrieve": ["doggos.name"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby"
|
||||
},
|
||||
{
|
||||
"name": "buddy"
|
||||
}
|
||||
]
|
||||
}
|
||||
"###)
|
||||
}
|
||||
},
|
||||
)
|
||||
.await;
|
||||
@@ -161,20 +196,30 @@ async fn format_nested() {
|
||||
json!({ "q": "bobby", "attributesToRetrieve": ["doggos.name"], "showMatchesPosition": true }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
"_matchesPosition": {"doggos.name": [{"start": 0, "length": 5}]},
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby"
|
||||
},
|
||||
{
|
||||
"name": "buddy"
|
||||
}
|
||||
],
|
||||
"_matchesPosition": {
|
||||
"doggos.name": [
|
||||
{
|
||||
"start": 0,
|
||||
"length": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"###)
|
||||
}
|
||||
}
|
||||
)
|
||||
.await;
|
||||
@@ -183,21 +228,24 @@ async fn format_nested() {
|
||||
.search(json!({ "q": "pésti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.name"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby"
|
||||
},
|
||||
{
|
||||
"name": "buddy"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -205,21 +253,24 @@ async fn format_nested() {
|
||||
.search(json!({ "q": "pésti", "attributesToRetrieve": [], "attributesToCrop": ["doggos.name"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby"
|
||||
},
|
||||
{
|
||||
"name": "buddy"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -227,55 +278,61 @@ async fn format_nested() {
|
||||
.search(json!({ "q": "pésti", "attributesToRetrieve": ["doggos.name"], "attributesToHighlight": ["doggos.age"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": "2",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": "4",
|
||||
},
|
||||
],
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby"
|
||||
},
|
||||
})
|
||||
);
|
||||
})
|
||||
{
|
||||
"name": "buddy"
|
||||
}
|
||||
],
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": "2"
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": "4"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "q": "pésti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.age"], "attributesToCrop": ["doggos.name"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": "2",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": "4",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": "2"
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": "4"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"###)
|
||||
}
|
||||
}
|
||||
)
|
||||
.await;
|
||||
@@ -297,54 +354,66 @@ async fn displayedattr_2_smol() {
|
||||
.search(json!({ "attributesToRetrieve": ["father", "id"], "attributesToHighlight": ["mother"], "attributesToCrop": ["cattos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"id": 852
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "attributesToRetrieve": ["id"] }), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"id": 852
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "attributesToHighlight": ["id"] }), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"id": 852,
|
||||
"_formatted": {
|
||||
"id": "852"
|
||||
}
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "attributesToCrop": ["id"] }), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"id": 852,
|
||||
"_formatted": {
|
||||
"id": "852"
|
||||
}
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -353,15 +422,18 @@ async fn displayedattr_2_smol() {
|
||||
json!({ "attributesToHighlight": ["id"], "attributesToCrop": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"id": 852,
|
||||
"_formatted": {
|
||||
"id": "852"
|
||||
}
|
||||
}
|
||||
"###)
|
||||
}
|
||||
},
|
||||
)
|
||||
.await;
|
||||
@@ -369,31 +441,41 @@ async fn displayedattr_2_smol() {
|
||||
index
|
||||
.search(json!({ "attributesToHighlight": ["cattos"] }), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"id": 852
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "attributesToCrop": ["cattos"] }), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"id": 852
|
||||
}
|
||||
"###)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "attributesToRetrieve": ["cattos"] }), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"][0], json!({}));
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@"{}")
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -402,7 +484,11 @@ async fn displayedattr_2_smol() {
|
||||
json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["cattos"], "attributesToCrop": ["cattos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"][0], json!({}));
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@"{}")
|
||||
}
|
||||
|
||||
}
|
||||
)
|
||||
@@ -413,14 +499,17 @@ async fn displayedattr_2_smol() {
|
||||
json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"_formatted": {
|
||||
"id": "852"
|
||||
}
|
||||
}
|
||||
"###)
|
||||
}
|
||||
},
|
||||
)
|
||||
.await;
|
||||
@@ -430,14 +519,17 @@ async fn displayedattr_2_smol() {
|
||||
json!({ "attributesToRetrieve": ["cattos"], "attributesToCrop": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
{
|
||||
"_formatted": {
|
||||
"id": "852"
|
||||
}
|
||||
}
|
||||
"###)
|
||||
}
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -65,7 +65,7 @@ async fn simple_search_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
insta::assert_json_snapshot!(response["results"], { "[].processingTimeMs" => "[time]" }, @r###"
|
||||
insta::assert_json_snapshot!(response["results"], { "[].processingTimeMs" => "[time]", ".**._rankingScore" => "[score]" }, @r###"
|
||||
[
|
||||
{
|
||||
"indexUid": "test",
|
||||
@@ -170,7 +170,7 @@ async fn simple_search_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
insta::assert_json_snapshot!(response["results"], { "[].processingTimeMs" => "[time]" }, @r###"
|
||||
insta::assert_json_snapshot!(response["results"], { "[].processingTimeMs" => "[time]", ".**._rankingScore" => "[score]" }, @r###"
|
||||
[
|
||||
{
|
||||
"indexUid": "test",
|
||||
|
||||
@@ -53,6 +53,7 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||
&mut ctx,
|
||||
&(!query.trim().is_empty()).then(|| query.trim().to_owned()),
|
||||
TermsMatchingStrategy::Last,
|
||||
milli::score_details::ScoringStrategy::Skip,
|
||||
false,
|
||||
&None,
|
||||
&None,
|
||||
|
||||
@@ -49,7 +49,7 @@ impl CboRoaringBitmapCodec {
|
||||
} else {
|
||||
// Otherwise, it means we used the classic RoaringBitmapCodec and
|
||||
// that the header takes threshold integers.
|
||||
RoaringBitmap::deserialize_from(bytes)
|
||||
RoaringBitmap::deserialize_unchecked_from(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,7 +69,7 @@ impl CboRoaringBitmapCodec {
|
||||
vec.push(integer);
|
||||
}
|
||||
} else {
|
||||
roaring |= RoaringBitmap::deserialize_from(bytes.as_ref())?;
|
||||
roaring |= RoaringBitmap::deserialize_unchecked_from(bytes.as_ref())?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ impl heed::BytesDecode<'_> for RoaringBitmapCodec {
|
||||
type DItem = RoaringBitmap;
|
||||
|
||||
fn bytes_decode(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
RoaringBitmap::deserialize_from(bytes).ok()
|
||||
RoaringBitmap::deserialize_unchecked_from(bytes).ok()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -21,10 +21,9 @@ use crate::heed_codec::facet::{
|
||||
};
|
||||
use crate::heed_codec::{ScriptLanguageCodec, StrBEU16Codec, StrRefCodec};
|
||||
use crate::{
|
||||
default_criteria, BEU32StrCodec, BoRoaringBitmapCodec, CboRoaringBitmapCodec, Criterion,
|
||||
DocumentId, ExternalDocumentsIds, FacetDistribution, FieldDistribution, FieldId,
|
||||
FieldIdWordCountCodec, GeoPoint, ObkvCodec, Result, RoaringBitmapCodec, RoaringBitmapLenCodec,
|
||||
Search, U8StrStrCodec, BEU16, BEU32,
|
||||
default_criteria, CboRoaringBitmapCodec, Criterion, DocumentId, ExternalDocumentsIds,
|
||||
FacetDistribution, FieldDistribution, FieldId, FieldIdWordCountCodec, GeoPoint, ObkvCodec,
|
||||
Result, RoaringBitmapCodec, RoaringBitmapLenCodec, Search, U8StrStrCodec, BEU16, BEU32,
|
||||
};
|
||||
|
||||
pub const DEFAULT_MIN_WORD_LEN_ONE_TYPO: u8 = 5;
|
||||
@@ -111,9 +110,6 @@ pub struct Index {
|
||||
/// A prefix of word and all the documents ids containing this prefix, from attributes for which typos are not allowed.
|
||||
pub exact_word_prefix_docids: Database<Str, RoaringBitmapCodec>,
|
||||
|
||||
/// Maps a word and a document id (u32) to all the positions where the given word appears.
|
||||
pub docid_word_positions: Database<BEU32StrCodec, BoRoaringBitmapCodec>,
|
||||
|
||||
/// Maps the proximity between a pair of words with all the docids where this relation appears.
|
||||
pub word_pair_proximity_docids: Database<U8StrStrCodec, CboRoaringBitmapCodec>,
|
||||
/// Maps the proximity between a pair of word and prefix with all the docids where this relation appears.
|
||||
@@ -177,7 +173,6 @@ impl Index {
|
||||
let word_prefix_docids = env.create_database(&mut wtxn, Some(WORD_PREFIX_DOCIDS))?;
|
||||
let exact_word_prefix_docids =
|
||||
env.create_database(&mut wtxn, Some(EXACT_WORD_PREFIX_DOCIDS))?;
|
||||
let docid_word_positions = env.create_database(&mut wtxn, Some(DOCID_WORD_POSITIONS))?;
|
||||
let word_pair_proximity_docids =
|
||||
env.create_database(&mut wtxn, Some(WORD_PAIR_PROXIMITY_DOCIDS))?;
|
||||
let script_language_docids =
|
||||
@@ -220,7 +215,6 @@ impl Index {
|
||||
exact_word_docids,
|
||||
word_prefix_docids,
|
||||
exact_word_prefix_docids,
|
||||
docid_word_positions,
|
||||
word_pair_proximity_docids,
|
||||
script_language_docids,
|
||||
word_prefix_pair_proximity_docids,
|
||||
@@ -1472,9 +1466,9 @@ pub(crate) mod tests {
|
||||
|
||||
db_snap!(index, field_distribution,
|
||||
@r###"
|
||||
age 1
|
||||
id 2
|
||||
name 2
|
||||
age 1 |
|
||||
id 2 |
|
||||
name 2 |
|
||||
"###
|
||||
);
|
||||
|
||||
@@ -1492,9 +1486,9 @@ pub(crate) mod tests {
|
||||
|
||||
db_snap!(index, field_distribution,
|
||||
@r###"
|
||||
age 1
|
||||
id 2
|
||||
name 2
|
||||
age 1 |
|
||||
id 2 |
|
||||
name 2 |
|
||||
"###
|
||||
);
|
||||
|
||||
@@ -1508,9 +1502,9 @@ pub(crate) mod tests {
|
||||
|
||||
db_snap!(index, field_distribution,
|
||||
@r###"
|
||||
has_dog 1
|
||||
id 2
|
||||
name 2
|
||||
has_dog 1 |
|
||||
id 2 |
|
||||
name 2 |
|
||||
"###
|
||||
);
|
||||
}
|
||||
@@ -2494,8 +2488,12 @@ pub(crate) mod tests {
|
||||
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let search = Search::new(&rtxn, &index);
|
||||
let SearchResult { matching_words: _, candidates: _, mut documents_ids } =
|
||||
search.execute().unwrap();
|
||||
let SearchResult {
|
||||
matching_words: _,
|
||||
candidates: _,
|
||||
document_scores: _,
|
||||
mut documents_ids,
|
||||
} = search.execute().unwrap();
|
||||
let primary_key_id = index.fields_ids_map(&rtxn).unwrap().id("primary_key").unwrap();
|
||||
documents_ids.sort_unstable();
|
||||
let docs = index.documents(&rtxn, documents_ids).unwrap();
|
||||
|
||||
@@ -5,52 +5,6 @@
|
||||
#[global_allocator]
|
||||
pub static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
// #[cfg(test)]
|
||||
// pub mod allocator {
|
||||
// use std::alloc::{GlobalAlloc, System};
|
||||
// use std::sync::atomic::{self, AtomicI64};
|
||||
|
||||
// #[global_allocator]
|
||||
// pub static ALLOC: CountingAlloc = CountingAlloc {
|
||||
// max_resident: AtomicI64::new(0),
|
||||
// resident: AtomicI64::new(0),
|
||||
// allocated: AtomicI64::new(0),
|
||||
// };
|
||||
|
||||
// pub struct CountingAlloc {
|
||||
// pub max_resident: AtomicI64,
|
||||
// pub resident: AtomicI64,
|
||||
// pub allocated: AtomicI64,
|
||||
// }
|
||||
// unsafe impl GlobalAlloc for CountingAlloc {
|
||||
// unsafe fn alloc(&self, layout: std::alloc::Layout) -> *mut u8 {
|
||||
// self.allocated.fetch_add(layout.size() as i64, atomic::Ordering::SeqCst);
|
||||
// let old_resident =
|
||||
// self.resident.fetch_add(layout.size() as i64, atomic::Ordering::SeqCst);
|
||||
|
||||
// let resident = old_resident + layout.size() as i64;
|
||||
// self.max_resident.fetch_max(resident, atomic::Ordering::SeqCst);
|
||||
|
||||
// // if layout.size() > 1_000_000 {
|
||||
// // eprintln!(
|
||||
// // "allocating {} with new resident size: {resident}",
|
||||
// // layout.size() / 1_000_000
|
||||
// // );
|
||||
// // // let trace = std::backtrace::Backtrace::capture();
|
||||
// // // let t = trace.to_string();
|
||||
// // // eprintln!("{t}");
|
||||
// // }
|
||||
|
||||
// System.alloc(layout)
|
||||
// }
|
||||
|
||||
// unsafe fn dealloc(&self, ptr: *mut u8, layout: std::alloc::Layout) {
|
||||
// self.resident.fetch_sub(layout.size() as i64, atomic::Ordering::Relaxed);
|
||||
// System.dealloc(ptr, layout)
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
#[macro_use]
|
||||
pub mod documents;
|
||||
|
||||
@@ -63,6 +17,7 @@ mod fields_ids_map;
|
||||
pub mod heed_codec;
|
||||
pub mod index;
|
||||
pub mod proximity;
|
||||
pub mod score_details;
|
||||
mod search;
|
||||
pub mod update;
|
||||
|
||||
|
||||
316
milli/src/score_details.rs
Normal file
316
milli/src/score_details.rs
Normal file
@@ -0,0 +1,316 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::distance_between_two_points;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum ScoreDetails {
|
||||
Words(Words),
|
||||
Typo(Typo),
|
||||
Proximity(Rank),
|
||||
Fid(Rank),
|
||||
Position(Rank),
|
||||
ExactAttribute(ExactAttribute),
|
||||
Exactness(Rank),
|
||||
Sort(Sort),
|
||||
GeoSort(GeoSort),
|
||||
}
|
||||
|
||||
impl ScoreDetails {
|
||||
pub fn local_score(&self) -> Option<f64> {
|
||||
self.rank().map(Rank::local_score)
|
||||
}
|
||||
|
||||
pub fn rank(&self) -> Option<Rank> {
|
||||
match self {
|
||||
ScoreDetails::Words(details) => Some(details.rank()),
|
||||
ScoreDetails::Typo(details) => Some(details.rank()),
|
||||
ScoreDetails::Proximity(details) => Some(*details),
|
||||
ScoreDetails::Fid(details) => Some(*details),
|
||||
ScoreDetails::Position(details) => Some(*details),
|
||||
ScoreDetails::ExactAttribute(details) => Some(details.rank()),
|
||||
ScoreDetails::Exactness(details) => Some(*details),
|
||||
ScoreDetails::Sort(_) => None,
|
||||
ScoreDetails::GeoSort(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn global_score<'a>(details: impl Iterator<Item = &'a Self>) -> f64 {
|
||||
Rank::global_score(details.filter_map(Self::rank))
|
||||
}
|
||||
|
||||
/// Panics
|
||||
///
|
||||
/// - If Position is not preceded by Fid
|
||||
/// - If Exactness is not preceded by ExactAttribute
|
||||
pub fn to_json_map<'a>(
|
||||
details: impl Iterator<Item = &'a Self>,
|
||||
) -> serde_json::Map<String, serde_json::Value> {
|
||||
let mut order = 0;
|
||||
let mut fid_details = None;
|
||||
let mut details_map = serde_json::Map::default();
|
||||
for details in details {
|
||||
match details {
|
||||
ScoreDetails::Words(words) => {
|
||||
let words_details = serde_json::json!({
|
||||
"order": order,
|
||||
"matchingWords": words.matching_words,
|
||||
"maxMatchingWords": words.max_matching_words,
|
||||
"score": words.rank().local_score(),
|
||||
});
|
||||
details_map.insert("words".into(), words_details);
|
||||
order += 1;
|
||||
}
|
||||
ScoreDetails::Typo(typo) => {
|
||||
let typo_details = serde_json::json!({
|
||||
"order": order,
|
||||
"typoCount": typo.typo_count,
|
||||
"maxTypoCount": typo.max_typo_count,
|
||||
"score": typo.rank().local_score(),
|
||||
});
|
||||
details_map.insert("typo".into(), typo_details);
|
||||
order += 1;
|
||||
}
|
||||
ScoreDetails::Proximity(proximity) => {
|
||||
let proximity_details = serde_json::json!({
|
||||
"order": order,
|
||||
"score": proximity.local_score(),
|
||||
});
|
||||
details_map.insert("proximity".into(), proximity_details);
|
||||
order += 1;
|
||||
}
|
||||
ScoreDetails::Fid(fid) => {
|
||||
// copy the rank for future use in Position.
|
||||
fid_details = Some(*fid);
|
||||
// For now, fid is a virtual rule always followed by the "position" rule
|
||||
let fid_details = serde_json::json!({
|
||||
"order": order,
|
||||
"attributes_ranking_order": fid.local_score(),
|
||||
});
|
||||
details_map.insert("attribute".into(), fid_details);
|
||||
order += 1;
|
||||
}
|
||||
ScoreDetails::Position(position) => {
|
||||
// For now, position is a virtual rule always preceded by the "fid" rule
|
||||
let attribute_details = details_map
|
||||
.get_mut("attribute")
|
||||
.expect("position not preceded by attribute");
|
||||
let attribute_details = attribute_details
|
||||
.as_object_mut()
|
||||
.expect("attribute details was not an object");
|
||||
let Some(fid_details) = fid_details
|
||||
else {
|
||||
panic!("position not preceded by attribute");
|
||||
};
|
||||
|
||||
attribute_details.insert(
|
||||
"attributes_query_word_order".into(),
|
||||
position.local_score().into(),
|
||||
);
|
||||
let score = Rank::global_score([fid_details, *position].iter().copied());
|
||||
attribute_details.insert("score".into(), score.into());
|
||||
|
||||
// do not update the order since this was already done by fid
|
||||
}
|
||||
ScoreDetails::ExactAttribute(exact_attribute) => {
|
||||
let exactness_details = serde_json::json!({
|
||||
"order": order,
|
||||
"matchType": exact_attribute,
|
||||
"score": exact_attribute.rank().local_score(),
|
||||
});
|
||||
details_map.insert("exactness".into(), exactness_details);
|
||||
order += 1;
|
||||
}
|
||||
ScoreDetails::Exactness(details) => {
|
||||
// For now, exactness is a virtual rule always preceded by the "ExactAttribute" rule
|
||||
let exactness_details = details_map
|
||||
.get_mut("exactness")
|
||||
.expect("Exactness not preceded by exactAttribute");
|
||||
let exactness_details = exactness_details
|
||||
.as_object_mut()
|
||||
.expect("exactness details was not an object");
|
||||
if exactness_details.get("matchType").expect("missing 'matchType'")
|
||||
== &serde_json::json!(ExactAttribute::NoExactMatch)
|
||||
{
|
||||
let score = Rank::global_score(
|
||||
[ExactAttribute::NoExactMatch.rank(), *details].iter().copied(),
|
||||
);
|
||||
*exactness_details.get_mut("score").expect("missing score") = score.into();
|
||||
}
|
||||
// do not update the order since this was already done by exactAttribute
|
||||
}
|
||||
ScoreDetails::Sort(details) => {
|
||||
let sort = if details.redacted {
|
||||
format!("<hidden-rule-{order}>")
|
||||
} else {
|
||||
format!(
|
||||
"{}:{}",
|
||||
details.field_name,
|
||||
if details.ascending { "asc" } else { "desc" }
|
||||
)
|
||||
};
|
||||
let value =
|
||||
if details.redacted { "<hidden>".into() } else { details.value.clone() };
|
||||
let sort_details = serde_json::json!({
|
||||
"order": order,
|
||||
"value": value,
|
||||
});
|
||||
details_map.insert(sort, sort_details);
|
||||
order += 1;
|
||||
}
|
||||
ScoreDetails::GeoSort(details) => {
|
||||
let sort = format!(
|
||||
"_geoPoint({}, {}):{}",
|
||||
details.target_point[0],
|
||||
details.target_point[1],
|
||||
if details.ascending { "asc" } else { "desc" }
|
||||
);
|
||||
let point = if let Some(value) = details.value {
|
||||
serde_json::json!({ "lat": value[0], "lng": value[1]})
|
||||
} else {
|
||||
serde_json::Value::Null
|
||||
};
|
||||
let sort_details = serde_json::json!({
|
||||
"order": order,
|
||||
"value": point,
|
||||
"distance": details.distance(),
|
||||
});
|
||||
details_map.insert(sort, sort_details);
|
||||
order += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
details_map
|
||||
}
|
||||
}
|
||||
|
||||
/// The strategy to compute scores.
|
||||
///
|
||||
/// It makes sense to pass down this strategy to the internals of the search, because
|
||||
/// some optimizations (today, mainly skipping ranking rules for universes of a single document)
|
||||
/// are not correct to do when computing the scores.
|
||||
///
|
||||
/// This strategy could feasibly be extended to differentiate between the normalized score and the
|
||||
/// detailed scores, but it is not useful today as the normalized score is *derived from* the
|
||||
/// detailed scores.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||
pub enum ScoringStrategy {
|
||||
/// Don't compute scores
|
||||
#[default]
|
||||
Skip,
|
||||
/// Compute detailed scores
|
||||
Detailed,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Words {
|
||||
pub matching_words: u32,
|
||||
pub max_matching_words: u32,
|
||||
}
|
||||
|
||||
impl Words {
|
||||
pub fn rank(&self) -> Rank {
|
||||
Rank { rank: self.matching_words, max_rank: self.max_matching_words }
|
||||
}
|
||||
|
||||
pub(crate) fn from_rank(rank: Rank) -> Words {
|
||||
Words { matching_words: rank.rank, max_matching_words: rank.max_rank }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Typo {
|
||||
pub typo_count: u32,
|
||||
pub max_typo_count: u32,
|
||||
}
|
||||
|
||||
impl Typo {
|
||||
pub fn rank(&self) -> Rank {
|
||||
Rank {
|
||||
rank: self.max_typo_count - self.typo_count + 1,
|
||||
max_rank: (self.max_typo_count + 1),
|
||||
}
|
||||
}
|
||||
|
||||
// max_rank = max_typo + 1
|
||||
// max_typo = max_rank - 1
|
||||
//
|
||||
// rank = max_typo - typo + 1
|
||||
// rank = max_rank - 1 - typo + 1
|
||||
// rank + typo = max_rank
|
||||
// typo = max_rank - rank
|
||||
pub fn from_rank(rank: Rank) -> Typo {
|
||||
Typo { typo_count: rank.max_rank - rank.rank, max_typo_count: rank.max_rank - 1 }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Rank {
|
||||
/// The ordinal rank, such that `max_rank` is the first rank, and 0 is the last rank.
|
||||
///
|
||||
/// The higher the better. Documents with a rank of 0 have a score of 0 and are typically never returned
|
||||
/// (they don't match the query).
|
||||
pub rank: u32,
|
||||
/// The maximum possible rank. Documents with this rank have a score of 1.
|
||||
///
|
||||
/// The max rank should not be 0.
|
||||
pub max_rank: u32,
|
||||
}
|
||||
|
||||
impl Rank {
|
||||
pub fn local_score(self) -> f64 {
|
||||
self.rank as f64 / self.max_rank as f64
|
||||
}
|
||||
|
||||
pub fn global_score(details: impl Iterator<Item = Self>) -> f64 {
|
||||
let mut rank = Rank { rank: 1, max_rank: 1 };
|
||||
for inner_rank in details {
|
||||
rank.rank -= 1;
|
||||
|
||||
rank.rank *= inner_rank.max_rank;
|
||||
rank.max_rank *= inner_rank.max_rank;
|
||||
|
||||
rank.rank += inner_rank.rank;
|
||||
}
|
||||
rank.local_score()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum ExactAttribute {
|
||||
ExactMatch,
|
||||
MatchesStart,
|
||||
NoExactMatch,
|
||||
}
|
||||
|
||||
impl ExactAttribute {
|
||||
pub fn rank(&self) -> Rank {
|
||||
let rank = match self {
|
||||
ExactAttribute::ExactMatch => 3,
|
||||
ExactAttribute::MatchesStart => 2,
|
||||
ExactAttribute::NoExactMatch => 1,
|
||||
};
|
||||
Rank { rank, max_rank: 3 }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Sort {
|
||||
pub field_name: String,
|
||||
pub ascending: bool,
|
||||
pub redacted: bool,
|
||||
pub value: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
|
||||
pub struct GeoSort {
|
||||
pub target_point: [f64; 2],
|
||||
pub ascending: bool,
|
||||
pub value: Option<[f64; 2]>,
|
||||
}
|
||||
|
||||
impl GeoSort {
|
||||
pub fn distance(&self) -> Option<f64> {
|
||||
self.value.map(|value| distance_between_two_points(&self.target_point, &value))
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,7 @@ use roaring::bitmap::RoaringBitmap;
|
||||
pub use self::facet::{FacetDistribution, Filter, DEFAULT_VALUES_PER_FACET};
|
||||
pub use self::new::matches::{FormatOptions, MatchBounds, Matcher, MatcherBuilder, MatchingWords};
|
||||
use self::new::PartialSearchResult;
|
||||
use crate::score_details::{ScoreDetails, ScoringStrategy};
|
||||
use crate::{
|
||||
execute_search, AscDesc, DefaultSearchLogger, DocumentId, Index, Result, SearchContext,
|
||||
};
|
||||
@@ -29,6 +30,7 @@ pub struct Search<'a> {
|
||||
sort_criteria: Option<Vec<AscDesc>>,
|
||||
geo_strategy: new::GeoSortStrategy,
|
||||
terms_matching_strategy: TermsMatchingStrategy,
|
||||
scoring_strategy: ScoringStrategy,
|
||||
words_limit: usize,
|
||||
exhaustive_number_hits: bool,
|
||||
rtxn: &'a heed::RoTxn<'a>,
|
||||
@@ -45,6 +47,7 @@ impl<'a> Search<'a> {
|
||||
sort_criteria: None,
|
||||
geo_strategy: new::GeoSortStrategy::default(),
|
||||
terms_matching_strategy: TermsMatchingStrategy::default(),
|
||||
scoring_strategy: Default::default(),
|
||||
exhaustive_number_hits: false,
|
||||
words_limit: 10,
|
||||
rtxn,
|
||||
@@ -77,6 +80,11 @@ impl<'a> Search<'a> {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn scoring_strategy(&mut self, value: ScoringStrategy) -> &mut Search<'a> {
|
||||
self.scoring_strategy = value;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn words_limit(&mut self, value: usize) -> &mut Search<'a> {
|
||||
self.words_limit = value;
|
||||
self
|
||||
@@ -93,7 +101,7 @@ impl<'a> Search<'a> {
|
||||
self
|
||||
}
|
||||
|
||||
/// Force the search to exhastivelly compute the number of candidates,
|
||||
/// Forces the search to exhaustively compute the number of candidates,
|
||||
/// this will increase the search time but allows finite pagination.
|
||||
pub fn exhaustive_number_hits(&mut self, exhaustive_number_hits: bool) -> &mut Search<'a> {
|
||||
self.exhaustive_number_hits = exhaustive_number_hits;
|
||||
@@ -102,11 +110,12 @@ impl<'a> Search<'a> {
|
||||
|
||||
pub fn execute(&self) -> Result<SearchResult> {
|
||||
let mut ctx = SearchContext::new(self.index, self.rtxn);
|
||||
let PartialSearchResult { located_query_terms, candidates, documents_ids } =
|
||||
let PartialSearchResult { located_query_terms, candidates, documents_ids, document_scores } =
|
||||
execute_search(
|
||||
&mut ctx,
|
||||
&self.query,
|
||||
self.terms_matching_strategy,
|
||||
self.scoring_strategy,
|
||||
self.exhaustive_number_hits,
|
||||
&self.filter,
|
||||
&self.sort_criteria,
|
||||
@@ -124,7 +133,7 @@ impl<'a> Search<'a> {
|
||||
None => MatchingWords::default(),
|
||||
};
|
||||
|
||||
Ok(SearchResult { matching_words, candidates, documents_ids })
|
||||
Ok(SearchResult { matching_words, candidates, document_scores, documents_ids })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -138,6 +147,7 @@ impl fmt::Debug for Search<'_> {
|
||||
sort_criteria,
|
||||
geo_strategy: _,
|
||||
terms_matching_strategy,
|
||||
scoring_strategy,
|
||||
words_limit,
|
||||
exhaustive_number_hits,
|
||||
rtxn: _,
|
||||
@@ -150,6 +160,7 @@ impl fmt::Debug for Search<'_> {
|
||||
.field("limit", limit)
|
||||
.field("sort_criteria", sort_criteria)
|
||||
.field("terms_matching_strategy", terms_matching_strategy)
|
||||
.field("scoring_strategy", scoring_strategy)
|
||||
.field("exhaustive_number_hits", exhaustive_number_hits)
|
||||
.field("words_limit", words_limit)
|
||||
.finish()
|
||||
@@ -160,8 +171,8 @@ impl fmt::Debug for Search<'_> {
|
||||
pub struct SearchResult {
|
||||
pub matching_words: MatchingWords,
|
||||
pub candidates: RoaringBitmap,
|
||||
// TODO those documents ids should be associated with their criteria scores.
|
||||
pub documents_ids: Vec<DocumentId>,
|
||||
pub document_scores: Vec<Vec<ScoreDetails>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
|
||||
@@ -3,14 +3,18 @@ use roaring::RoaringBitmap;
|
||||
use super::logger::SearchLogger;
|
||||
use super::ranking_rules::{BoxRankingRule, RankingRuleQueryTrait};
|
||||
use super::SearchContext;
|
||||
use crate::score_details::{ScoreDetails, ScoringStrategy};
|
||||
use crate::search::new::distinct::{apply_distinct_rule, distinct_single_docid, DistinctOutput};
|
||||
use crate::Result;
|
||||
|
||||
pub struct BucketSortOutput {
|
||||
pub docids: Vec<u32>,
|
||||
pub scores: Vec<Vec<ScoreDetails>>,
|
||||
pub all_candidates: RoaringBitmap,
|
||||
}
|
||||
|
||||
// TODO: would probably be good to regroup some of these inside of a struct?
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
ctx: &mut SearchContext<'ctx>,
|
||||
mut ranking_rules: Vec<BoxRankingRule<'ctx, Q>>,
|
||||
@@ -18,6 +22,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
universe: &RoaringBitmap,
|
||||
from: usize,
|
||||
length: usize,
|
||||
scoring_strategy: ScoringStrategy,
|
||||
logger: &mut dyn SearchLogger<Q>,
|
||||
) -> Result<BucketSortOutput> {
|
||||
logger.initial_query(query);
|
||||
@@ -31,7 +36,11 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
};
|
||||
|
||||
if universe.len() < from as u64 {
|
||||
return Ok(BucketSortOutput { docids: vec![], all_candidates: universe.clone() });
|
||||
return Ok(BucketSortOutput {
|
||||
docids: vec![],
|
||||
scores: vec![],
|
||||
all_candidates: universe.clone(),
|
||||
});
|
||||
}
|
||||
if ranking_rules.is_empty() {
|
||||
if let Some(distinct_fid) = distinct_fid {
|
||||
@@ -49,22 +58,32 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
}
|
||||
let mut all_candidates = universe - excluded;
|
||||
all_candidates.extend(results.iter().copied());
|
||||
return Ok(BucketSortOutput { docids: results, all_candidates });
|
||||
return Ok(BucketSortOutput {
|
||||
scores: vec![Default::default(); results.len()],
|
||||
docids: results,
|
||||
all_candidates,
|
||||
});
|
||||
} else {
|
||||
let docids = universe.iter().skip(from).take(length).collect();
|
||||
return Ok(BucketSortOutput { docids, all_candidates: universe.clone() });
|
||||
let docids: Vec<u32> = universe.iter().skip(from).take(length).collect();
|
||||
return Ok(BucketSortOutput {
|
||||
scores: vec![Default::default(); docids.len()],
|
||||
docids,
|
||||
all_candidates: universe.clone(),
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
let ranking_rules_len = ranking_rules.len();
|
||||
|
||||
logger.start_iteration_ranking_rule(0, ranking_rules[0].as_ref(), query, universe);
|
||||
|
||||
ranking_rules[0].start_iteration(ctx, logger, universe, query)?;
|
||||
|
||||
let mut ranking_rule_scores: Vec<ScoreDetails> = vec![];
|
||||
|
||||
let mut ranking_rule_universes: Vec<RoaringBitmap> =
|
||||
vec![RoaringBitmap::default(); ranking_rules_len];
|
||||
ranking_rule_universes[0] = universe.clone();
|
||||
|
||||
let mut cur_ranking_rule_index = 0;
|
||||
|
||||
/// Finish iterating over the current ranking rule, yielding
|
||||
@@ -89,11 +108,16 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
} else {
|
||||
cur_ranking_rule_index -= 1;
|
||||
}
|
||||
// FIXME: check off by one
|
||||
if ranking_rule_scores.len() > cur_ranking_rule_index {
|
||||
ranking_rule_scores.pop();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let mut all_candidates = universe.clone();
|
||||
let mut valid_docids = vec![];
|
||||
let mut valid_scores = vec![];
|
||||
let mut cur_offset = 0usize;
|
||||
|
||||
macro_rules! maybe_add_to_results {
|
||||
@@ -104,37 +128,39 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
length,
|
||||
logger,
|
||||
&mut valid_docids,
|
||||
&mut valid_scores,
|
||||
&mut all_candidates,
|
||||
&mut ranking_rule_universes,
|
||||
&mut ranking_rules,
|
||||
cur_ranking_rule_index,
|
||||
&mut cur_offset,
|
||||
distinct_fid,
|
||||
&ranking_rule_scores,
|
||||
$candidates,
|
||||
)?;
|
||||
};
|
||||
}
|
||||
|
||||
while valid_docids.len() < length {
|
||||
// The universe for this bucket is zero or one element, so we don't need to sort
|
||||
// anything, just extend the results and go back to the parent ranking rule.
|
||||
if ranking_rule_universes[cur_ranking_rule_index].len() <= 1 {
|
||||
// The universe for this bucket is zero, so we don't need to sort
|
||||
// anything, just go back to the parent ranking rule.
|
||||
if ranking_rule_universes[cur_ranking_rule_index].is_empty()
|
||||
|| (scoring_strategy == ScoringStrategy::Skip
|
||||
&& ranking_rule_universes[cur_ranking_rule_index].len() == 1)
|
||||
{
|
||||
let bucket = std::mem::take(&mut ranking_rule_universes[cur_ranking_rule_index]);
|
||||
maybe_add_to_results!(bucket);
|
||||
back!();
|
||||
continue;
|
||||
}
|
||||
|
||||
let Some(next_bucket) = ranking_rules[cur_ranking_rule_index].next_bucket(
|
||||
ctx,
|
||||
logger,
|
||||
&ranking_rule_universes[cur_ranking_rule_index],
|
||||
)?
|
||||
else {
|
||||
let Some(next_bucket) = ranking_rules[cur_ranking_rule_index].next_bucket(ctx, logger, &ranking_rule_universes[cur_ranking_rule_index])? else {
|
||||
back!();
|
||||
continue;
|
||||
};
|
||||
|
||||
ranking_rule_scores.push(next_bucket.score);
|
||||
|
||||
logger.next_bucket_ranking_rule(
|
||||
cur_ranking_rule_index,
|
||||
ranking_rules[cur_ranking_rule_index].as_ref(),
|
||||
@@ -148,10 +174,12 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
ranking_rule_universes[cur_ranking_rule_index] -= &next_bucket.candidates;
|
||||
|
||||
if cur_ranking_rule_index == ranking_rules_len - 1
|
||||
|| next_bucket.candidates.len() <= 1
|
||||
|| (scoring_strategy == ScoringStrategy::Skip && next_bucket.candidates.len() <= 1)
|
||||
|| cur_offset + (next_bucket.candidates.len() as usize) < from
|
||||
{
|
||||
maybe_add_to_results!(next_bucket.candidates);
|
||||
// FIXME: use index based logic like all the other rules so that you don't have to maintain the pop/push?
|
||||
ranking_rule_scores.pop();
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -171,7 +199,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(BucketSortOutput { docids: valid_docids, all_candidates })
|
||||
Ok(BucketSortOutput { docids: valid_docids, scores: valid_scores, all_candidates })
|
||||
}
|
||||
|
||||
/// Add the candidates to the results. Take `distinct`, `from`, `length`, and `cur_offset`
|
||||
@@ -184,14 +212,18 @@ fn maybe_add_to_results<'ctx, Q: RankingRuleQueryTrait>(
|
||||
logger: &mut dyn SearchLogger<Q>,
|
||||
|
||||
valid_docids: &mut Vec<u32>,
|
||||
valid_scores: &mut Vec<Vec<ScoreDetails>>,
|
||||
all_candidates: &mut RoaringBitmap,
|
||||
|
||||
ranking_rule_universes: &mut [RoaringBitmap],
|
||||
ranking_rules: &mut [BoxRankingRule<'ctx, Q>],
|
||||
|
||||
cur_ranking_rule_index: usize,
|
||||
|
||||
cur_offset: &mut usize,
|
||||
|
||||
distinct_fid: Option<u16>,
|
||||
ranking_rule_scores: &[ScoreDetails],
|
||||
candidates: RoaringBitmap,
|
||||
) -> Result<()> {
|
||||
// First apply the distinct rule on the candidates, reducing the universes if necessary
|
||||
@@ -236,13 +268,17 @@ fn maybe_add_to_results<'ctx, Q: RankingRuleQueryTrait>(
|
||||
let candidates =
|
||||
candidates.iter().take(length - valid_docids.len()).copied().collect::<Vec<_>>();
|
||||
logger.add_to_results(&candidates);
|
||||
valid_docids.extend(&candidates);
|
||||
valid_docids.extend_from_slice(&candidates);
|
||||
valid_scores
|
||||
.extend(std::iter::repeat(ranking_rule_scores.to_owned()).take(candidates.len()));
|
||||
}
|
||||
} else {
|
||||
// if we have passed the offset already, add some of the documents (up to the limit)
|
||||
let candidates = candidates.iter().take(length - valid_docids.len()).collect::<Vec<u32>>();
|
||||
logger.add_to_results(&candidates);
|
||||
valid_docids.extend(&candidates);
|
||||
valid_docids.extend_from_slice(&candidates);
|
||||
valid_scores
|
||||
.extend(std::iter::repeat(ranking_rule_scores.to_owned()).take(candidates.len()));
|
||||
}
|
||||
|
||||
*cur_offset += candidates.len() as usize;
|
||||
|
||||
@@ -2,6 +2,7 @@ use roaring::{MultiOps, RoaringBitmap};
|
||||
|
||||
use super::query_graph::QueryGraph;
|
||||
use super::ranking_rules::{RankingRule, RankingRuleOutput};
|
||||
use crate::score_details::{self, ScoreDetails};
|
||||
use crate::search::new::query_graph::QueryNodeData;
|
||||
use crate::search::new::query_term::ExactTerm;
|
||||
use crate::{Result, SearchContext, SearchLogger};
|
||||
@@ -244,7 +245,13 @@ impl State {
|
||||
candidates &= universe;
|
||||
(
|
||||
State::AttributeStarts(query_graph.clone(), candidates_per_attribute),
|
||||
Some(RankingRuleOutput { query: query_graph, candidates }),
|
||||
Some(RankingRuleOutput {
|
||||
query: query_graph,
|
||||
candidates,
|
||||
score: ScoreDetails::ExactAttribute(
|
||||
score_details::ExactAttribute::ExactMatch,
|
||||
),
|
||||
}),
|
||||
)
|
||||
}
|
||||
State::AttributeStarts(query_graph, candidates_per_attribute) => {
|
||||
@@ -257,12 +264,24 @@ impl State {
|
||||
candidates &= universe;
|
||||
(
|
||||
State::Empty(query_graph.clone()),
|
||||
Some(RankingRuleOutput { query: query_graph, candidates }),
|
||||
Some(RankingRuleOutput {
|
||||
query: query_graph,
|
||||
candidates,
|
||||
score: ScoreDetails::ExactAttribute(
|
||||
score_details::ExactAttribute::MatchesStart,
|
||||
),
|
||||
}),
|
||||
)
|
||||
}
|
||||
State::Empty(query_graph) => (
|
||||
State::Empty(query_graph.clone()),
|
||||
Some(RankingRuleOutput { query: query_graph, candidates: universe.clone() }),
|
||||
Some(RankingRuleOutput {
|
||||
query: query_graph,
|
||||
candidates: universe.clone(),
|
||||
score: ScoreDetails::ExactAttribute(
|
||||
score_details::ExactAttribute::NoExactMatch,
|
||||
),
|
||||
}),
|
||||
),
|
||||
};
|
||||
(state, output)
|
||||
|
||||
@@ -8,6 +8,7 @@ use rstar::RTree;
|
||||
|
||||
use super::ranking_rules::{RankingRule, RankingRuleOutput, RankingRuleQueryTrait};
|
||||
use crate::heed_codec::facet::{FieldDocIdFacetCodec, OrderedF64Codec};
|
||||
use crate::score_details::{self, ScoreDetails};
|
||||
use crate::{
|
||||
distance_between_two_points, lat_lng_to_xyz, GeoPoint, Index, Result, SearchContext,
|
||||
SearchLogger,
|
||||
@@ -80,7 +81,7 @@ pub struct GeoSort<Q: RankingRuleQueryTrait> {
|
||||
field_ids: Option<[u16; 2]>,
|
||||
rtree: Option<RTree<GeoPoint>>,
|
||||
|
||||
cached_sorted_docids: VecDeque<u32>,
|
||||
cached_sorted_docids: VecDeque<(u32, [f64; 2])>,
|
||||
geo_candidates: RoaringBitmap,
|
||||
}
|
||||
|
||||
@@ -130,7 +131,7 @@ impl<Q: RankingRuleQueryTrait> GeoSort<Q> {
|
||||
let point = lat_lng_to_xyz(&self.point);
|
||||
for point in rtree.nearest_neighbor_iter(&point) {
|
||||
if self.geo_candidates.contains(point.data.0) {
|
||||
self.cached_sorted_docids.push_back(point.data.0);
|
||||
self.cached_sorted_docids.push_back(point.data);
|
||||
if self.cached_sorted_docids.len() >= cache_size {
|
||||
break;
|
||||
}
|
||||
@@ -142,7 +143,7 @@ impl<Q: RankingRuleQueryTrait> GeoSort<Q> {
|
||||
let point = lat_lng_to_xyz(&opposite_of(self.point));
|
||||
for point in rtree.nearest_neighbor_iter(&point) {
|
||||
if self.geo_candidates.contains(point.data.0) {
|
||||
self.cached_sorted_docids.push_front(point.data.0);
|
||||
self.cached_sorted_docids.push_front(point.data);
|
||||
if self.cached_sorted_docids.len() >= cache_size {
|
||||
break;
|
||||
}
|
||||
@@ -177,7 +178,7 @@ impl<Q: RankingRuleQueryTrait> GeoSort<Q> {
|
||||
// computing the distance between two points is expensive thus we cache the result
|
||||
documents
|
||||
.sort_by_cached_key(|(_, p)| distance_between_two_points(&self.point, p) as usize);
|
||||
self.cached_sorted_docids.extend(documents.into_iter().map(|(doc_id, _)| doc_id));
|
||||
self.cached_sorted_docids.extend(documents.into_iter());
|
||||
};
|
||||
|
||||
Ok(())
|
||||
@@ -220,12 +221,19 @@ impl<'ctx, Q: RankingRuleQueryTrait> RankingRule<'ctx, Q> for GeoSort<Q> {
|
||||
logger: &mut dyn SearchLogger<Q>,
|
||||
universe: &RoaringBitmap,
|
||||
) -> Result<Option<RankingRuleOutput<Q>>> {
|
||||
assert!(universe.len() > 1);
|
||||
let query = self.query.as_ref().unwrap().clone();
|
||||
self.geo_candidates &= universe;
|
||||
|
||||
if self.geo_candidates.is_empty() {
|
||||
return Ok(Some(RankingRuleOutput { query, candidates: universe.clone() }));
|
||||
return Ok(Some(RankingRuleOutput {
|
||||
query,
|
||||
candidates: universe.clone(),
|
||||
score: ScoreDetails::GeoSort(score_details::GeoSort {
|
||||
target_point: self.point,
|
||||
ascending: self.ascending,
|
||||
value: None,
|
||||
}),
|
||||
}));
|
||||
}
|
||||
|
||||
let ascending = self.ascending;
|
||||
@@ -236,11 +244,16 @@ impl<'ctx, Q: RankingRuleQueryTrait> RankingRule<'ctx, Q> for GeoSort<Q> {
|
||||
cache.pop_back()
|
||||
}
|
||||
};
|
||||
while let Some(id) = next(&mut self.cached_sorted_docids) {
|
||||
while let Some((id, point)) = next(&mut self.cached_sorted_docids) {
|
||||
if self.geo_candidates.contains(id) {
|
||||
return Ok(Some(RankingRuleOutput {
|
||||
query,
|
||||
candidates: RoaringBitmap::from_iter([id]),
|
||||
score: ScoreDetails::GeoSort(score_details::GeoSort {
|
||||
target_point: self.point,
|
||||
ascending: self.ascending,
|
||||
value: Some(point),
|
||||
}),
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,6 +50,7 @@ use super::ranking_rule_graph::{
|
||||
};
|
||||
use super::small_bitmap::SmallBitmap;
|
||||
use super::{QueryGraph, RankingRule, RankingRuleOutput, SearchContext};
|
||||
use crate::score_details::Rank;
|
||||
use crate::search::new::query_term::LocatedQueryTermSubset;
|
||||
use crate::search::new::ranking_rule_graph::PathVisitor;
|
||||
use crate::{Result, TermsMatchingStrategy};
|
||||
@@ -118,6 +119,8 @@ pub struct GraphBasedRankingRuleState<G: RankingRuleGraphTrait> {
|
||||
all_costs: MappedInterner<QueryNode, Vec<u64>>,
|
||||
/// An index in the first element of `all_distances`, giving the cost of the next bucket
|
||||
cur_cost: u64,
|
||||
/// One above the highest possible cost for this rule
|
||||
next_max_cost: u64,
|
||||
}
|
||||
|
||||
impl<'ctx, G: RankingRuleGraphTrait> RankingRule<'ctx, QueryGraph> for GraphBasedRankingRule<G> {
|
||||
@@ -131,7 +134,20 @@ impl<'ctx, G: RankingRuleGraphTrait> RankingRule<'ctx, QueryGraph> for GraphBase
|
||||
_universe: &RoaringBitmap,
|
||||
query_graph: &QueryGraph,
|
||||
) -> Result<()> {
|
||||
// the `next_max_cost` is the successor integer to the maximum cost of the paths in the graph.
|
||||
//
|
||||
// When there is a matching strategy, it also factors the additional costs of:
|
||||
// 1. The words that are matched in phrases
|
||||
// 2. Skipping words (by adding them to the paths with a cost)
|
||||
let mut next_max_cost = 1;
|
||||
let removal_cost = if let Some(terms_matching_strategy) = self.terms_matching_strategy {
|
||||
// add the cost of the phrase to the next_max_cost
|
||||
next_max_cost += query_graph
|
||||
.words_in_phrases_count(ctx)
|
||||
// remove 1 from the words in phrases count, because when there is a phrase we can now have a document
|
||||
// where only the phrase is matching, and none of the non-phrase words.
|
||||
// With the `1` that `next_max_cost` is initialized with, this gets counted twice.
|
||||
.saturating_sub(1) as u64;
|
||||
match terms_matching_strategy {
|
||||
TermsMatchingStrategy::Last => {
|
||||
let removal_order =
|
||||
@@ -139,13 +155,12 @@ impl<'ctx, G: RankingRuleGraphTrait> RankingRule<'ctx, QueryGraph> for GraphBase
|
||||
let mut forbidden_nodes =
|
||||
SmallBitmap::for_interned_values_in(&query_graph.nodes);
|
||||
let mut costs = query_graph.nodes.map(|_| None);
|
||||
let mut cost = 100;
|
||||
// FIXME: this works because only words uses termsmatchingstrategy at the moment.
|
||||
for ns in removal_order {
|
||||
for n in ns.iter() {
|
||||
*costs.get_mut(n) = Some((cost, forbidden_nodes.clone()));
|
||||
*costs.get_mut(n) = Some((1, forbidden_nodes.clone()));
|
||||
}
|
||||
forbidden_nodes.union(&ns);
|
||||
cost += 100;
|
||||
}
|
||||
costs
|
||||
}
|
||||
@@ -162,12 +177,16 @@ impl<'ctx, G: RankingRuleGraphTrait> RankingRule<'ctx, QueryGraph> for GraphBase
|
||||
// Then pre-compute the cost of all paths from each node to the end node
|
||||
let all_costs = graph.find_all_costs_to_end();
|
||||
|
||||
next_max_cost +=
|
||||
all_costs.get(graph.query_graph.root_node).iter().copied().max().unwrap_or(0);
|
||||
|
||||
let state = GraphBasedRankingRuleState {
|
||||
graph,
|
||||
conditions_cache: condition_docids_cache,
|
||||
dead_ends_cache,
|
||||
all_costs,
|
||||
cur_cost: 0,
|
||||
next_max_cost,
|
||||
};
|
||||
|
||||
self.state = Some(state);
|
||||
@@ -181,22 +200,17 @@ impl<'ctx, G: RankingRuleGraphTrait> RankingRule<'ctx, QueryGraph> for GraphBase
|
||||
logger: &mut dyn SearchLogger<QueryGraph>,
|
||||
universe: &RoaringBitmap,
|
||||
) -> Result<Option<RankingRuleOutput<QueryGraph>>> {
|
||||
// If universe.len() <= 1, the bucket sort algorithm
|
||||
// should not have called this function.
|
||||
assert!(universe.len() > 1);
|
||||
// Will crash if `next_bucket` is called before `start_iteration` or after `end_iteration`,
|
||||
// should never happen
|
||||
let mut state = self.state.take().unwrap();
|
||||
|
||||
let all_costs = state.all_costs.get(state.graph.query_graph.root_node);
|
||||
// Retrieve the cost of the paths to compute
|
||||
let Some(&cost) = state
|
||||
.all_costs
|
||||
.get(state.graph.query_graph.root_node)
|
||||
let Some(&cost) = all_costs
|
||||
.iter()
|
||||
.find(|c| **c >= state.cur_cost)
|
||||
else {
|
||||
self.state = None;
|
||||
return Ok(None);
|
||||
.find(|c| **c >= state.cur_cost) else {
|
||||
self.state = None;
|
||||
return Ok(None);
|
||||
};
|
||||
state.cur_cost = cost + 1;
|
||||
|
||||
@@ -208,8 +222,12 @@ impl<'ctx, G: RankingRuleGraphTrait> RankingRule<'ctx, QueryGraph> for GraphBase
|
||||
dead_ends_cache,
|
||||
all_costs,
|
||||
cur_cost: _,
|
||||
next_max_cost,
|
||||
} = &mut state;
|
||||
|
||||
let rank = *next_max_cost - cost;
|
||||
let score = G::rank_to_score(Rank { rank: rank as u32, max_rank: *next_max_cost as u32 });
|
||||
|
||||
let mut universe = universe.clone();
|
||||
|
||||
let mut used_conditions = SmallBitmap::for_interned_values_in(&graph.conditions_interner);
|
||||
@@ -296,8 +314,6 @@ impl<'ctx, G: RankingRuleGraphTrait> RankingRule<'ctx, QueryGraph> for GraphBase
|
||||
|
||||
// We modify the next query graph so that it only contains the subgraph
|
||||
// that was used to compute this bucket
|
||||
// But we only do it in case the bucket length is >1, because otherwise
|
||||
// we know the child ranking rule won't be called anyway
|
||||
|
||||
let paths: Vec<Vec<(Option<LocatedQueryTermSubset>, LocatedQueryTermSubset)>> = good_paths
|
||||
.into_iter()
|
||||
@@ -326,7 +342,7 @@ impl<'ctx, G: RankingRuleGraphTrait> RankingRule<'ctx, QueryGraph> for GraphBase
|
||||
|
||||
self.state = Some(state);
|
||||
|
||||
Ok(Some(RankingRuleOutput { query: next_query_graph, candidates: bucket }))
|
||||
Ok(Some(RankingRuleOutput { query: next_query_graph, candidates: bucket, score }))
|
||||
}
|
||||
|
||||
fn end_iteration(
|
||||
|
||||
@@ -80,9 +80,7 @@ impl MatchingWords {
|
||||
let word = self.word_interner.get(*word);
|
||||
// if the word is a prefix we match using starts_with.
|
||||
if located_words.is_prefix && token.lemma().starts_with(word) {
|
||||
let Some((char_index, c)) =
|
||||
word.char_indices().take(located_words.original_char_count).last()
|
||||
else {
|
||||
let Some((char_index, c)) = word.char_indices().take(located_words.original_char_count).last() else {
|
||||
continue;
|
||||
};
|
||||
let prefix_length = char_index + c.len_utf8();
|
||||
|
||||
@@ -510,6 +510,7 @@ mod tests {
|
||||
&mut ctx,
|
||||
&Some(query.to_string()),
|
||||
crate::TermsMatchingStrategy::default(),
|
||||
crate::score_details::ScoringStrategy::Skip,
|
||||
false,
|
||||
&None,
|
||||
&None,
|
||||
|
||||
@@ -44,6 +44,7 @@ use self::geo_sort::GeoSort;
|
||||
pub use self::geo_sort::Strategy as GeoSortStrategy;
|
||||
use self::graph_based_ranking_rule::Words;
|
||||
use self::interner::Interned;
|
||||
use crate::score_details::{ScoreDetails, ScoringStrategy};
|
||||
use crate::search::new::distinct::apply_distinct_rule;
|
||||
use crate::{AscDesc, DocumentId, Filter, Index, Member, Result, TermsMatchingStrategy, UserError};
|
||||
|
||||
@@ -350,6 +351,7 @@ pub fn execute_search(
|
||||
ctx: &mut SearchContext,
|
||||
query: &Option<String>,
|
||||
terms_matching_strategy: TermsMatchingStrategy,
|
||||
scoring_strategy: ScoringStrategy,
|
||||
exhaustive_number_hits: bool,
|
||||
filters: &Option<Filter>,
|
||||
sort_criteria: &Option<Vec<AscDesc>>,
|
||||
@@ -411,7 +413,16 @@ pub fn execute_search(
|
||||
universe =
|
||||
resolve_universe(ctx, &universe, &graph, terms_matching_strategy, query_graph_logger)?;
|
||||
|
||||
bucket_sort(ctx, ranking_rules, &graph, &universe, from, length, query_graph_logger)?
|
||||
bucket_sort(
|
||||
ctx,
|
||||
ranking_rules,
|
||||
&graph,
|
||||
&universe,
|
||||
from,
|
||||
length,
|
||||
scoring_strategy,
|
||||
query_graph_logger,
|
||||
)?
|
||||
} else {
|
||||
let ranking_rules =
|
||||
get_ranking_rules_for_placeholder_search(ctx, sort_criteria, geo_strategy)?;
|
||||
@@ -422,17 +433,20 @@ pub fn execute_search(
|
||||
&universe,
|
||||
from,
|
||||
length,
|
||||
scoring_strategy,
|
||||
placeholder_search_logger,
|
||||
)?
|
||||
};
|
||||
|
||||
let BucketSortOutput { docids, mut all_candidates } = bucket_sort_output;
|
||||
let BucketSortOutput { docids, scores, mut all_candidates } = bucket_sort_output;
|
||||
|
||||
let fields_ids_map = ctx.index.fields_ids_map(ctx.txn)?;
|
||||
|
||||
// The candidates is the universe unless the exhaustive number of hits
|
||||
// is requested and a distinct attribute is set.
|
||||
if exhaustive_number_hits {
|
||||
if let Some(f) = ctx.index.distinct_field(ctx.txn)? {
|
||||
if let Some(distinct_fid) = ctx.index.fields_ids_map(ctx.txn)?.id(f) {
|
||||
if let Some(distinct_fid) = fields_ids_map.id(f) {
|
||||
all_candidates = apply_distinct_rule(ctx, distinct_fid, &all_candidates)?.remaining;
|
||||
}
|
||||
}
|
||||
@@ -440,6 +454,7 @@ pub fn execute_search(
|
||||
|
||||
Ok(PartialSearchResult {
|
||||
candidates: all_candidates,
|
||||
document_scores: scores,
|
||||
documents_ids: docids,
|
||||
located_query_terms,
|
||||
})
|
||||
@@ -491,4 +506,5 @@ pub struct PartialSearchResult {
|
||||
pub located_query_terms: Option<Vec<LocatedQueryTerm>>,
|
||||
pub candidates: RoaringBitmap,
|
||||
pub documents_ids: Vec<DocumentId>,
|
||||
pub document_scores: Vec<Vec<ScoreDetails>>,
|
||||
}
|
||||
|
||||
@@ -342,6 +342,25 @@ impl QueryGraph {
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Number of words in the phrases in this query graph
|
||||
pub(crate) fn words_in_phrases_count(&self, ctx: &SearchContext) -> usize {
|
||||
let mut word_count = 0;
|
||||
for (_, node) in self.nodes.iter() {
|
||||
match &node.data {
|
||||
QueryNodeData::Term(term) => {
|
||||
let Some(phrase) = term.term_subset.original_phrase(ctx)
|
||||
else {
|
||||
continue
|
||||
};
|
||||
let phrase = ctx.phrase_interner.get(phrase);
|
||||
word_count += phrase.words.iter().copied().filter(|a| a.is_some()).count()
|
||||
}
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
word_count
|
||||
}
|
||||
}
|
||||
|
||||
fn add_node(nodes_data: &mut Vec<QueryNodeData>, node_data: QueryNodeData) -> u16 {
|
||||
|
||||
@@ -176,7 +176,9 @@ impl QueryTermSubset {
|
||||
|
||||
pub fn use_prefix_db(&self, ctx: &SearchContext) -> Option<Word> {
|
||||
let original = ctx.term_interner.get(self.original);
|
||||
let Some(use_prefix_db) = original.zero_typo.use_prefix_db else { return None };
|
||||
let Some(use_prefix_db) = original.zero_typo.use_prefix_db else {
|
||||
return None
|
||||
};
|
||||
let word = match &self.zero_typo_subset {
|
||||
NTypoTermSubset::All => Some(use_prefix_db),
|
||||
NTypoTermSubset::Subset { words, phrases: _ } => {
|
||||
@@ -262,15 +264,13 @@ impl QueryTermSubset {
|
||||
|
||||
match &self.one_typo_subset {
|
||||
NTypoTermSubset::All => {
|
||||
let Lazy::Init(OneTypoTerm { split_words: _, one_typo }) = &original.one_typo
|
||||
else {
|
||||
let Lazy::Init(OneTypoTerm { split_words: _, one_typo }) = &original.one_typo else {
|
||||
panic!()
|
||||
};
|
||||
result.extend(one_typo.iter().copied().map(Word::Derived))
|
||||
}
|
||||
NTypoTermSubset::Subset { words, phrases: _ } => {
|
||||
let Lazy::Init(OneTypoTerm { split_words: _, one_typo }) = &original.one_typo
|
||||
else {
|
||||
let Lazy::Init(OneTypoTerm { split_words: _, one_typo }) = &original.one_typo else {
|
||||
panic!()
|
||||
};
|
||||
result.extend(one_typo.intersection(words).copied().map(Word::Derived));
|
||||
@@ -280,11 +280,15 @@ impl QueryTermSubset {
|
||||
|
||||
match &self.two_typo_subset {
|
||||
NTypoTermSubset::All => {
|
||||
let Lazy::Init(TwoTypoTerm { two_typos }) = &original.two_typo else { panic!() };
|
||||
let Lazy::Init(TwoTypoTerm { two_typos }) = &original.two_typo else {
|
||||
panic!()
|
||||
};
|
||||
result.extend(two_typos.iter().copied().map(Word::Derived));
|
||||
}
|
||||
NTypoTermSubset::Subset { words, phrases: _ } => {
|
||||
let Lazy::Init(TwoTypoTerm { two_typos }) = &original.two_typo else { panic!() };
|
||||
let Lazy::Init(TwoTypoTerm { two_typos }) = &original.two_typo else {
|
||||
panic!()
|
||||
};
|
||||
result.extend(two_typos.intersection(words).copied().map(Word::Derived));
|
||||
}
|
||||
NTypoTermSubset::Nothing => {}
|
||||
@@ -308,15 +312,13 @@ impl QueryTermSubset {
|
||||
|
||||
match &self.one_typo_subset {
|
||||
NTypoTermSubset::All => {
|
||||
let Lazy::Init(OneTypoTerm { split_words, one_typo: _ }) = &original.one_typo
|
||||
else {
|
||||
let Lazy::Init(OneTypoTerm { split_words, one_typo: _ }) = &original.one_typo else {
|
||||
panic!();
|
||||
};
|
||||
result.extend(split_words.iter().copied());
|
||||
}
|
||||
NTypoTermSubset::Subset { phrases, .. } => {
|
||||
let Lazy::Init(OneTypoTerm { split_words, one_typo: _ }) = &original.one_typo
|
||||
else {
|
||||
let Lazy::Init(OneTypoTerm { split_words, one_typo: _ }) = &original.one_typo else {
|
||||
panic!();
|
||||
};
|
||||
if let Some(split_words) = split_words {
|
||||
|
||||
@@ -79,7 +79,7 @@ pub fn located_query_terms_from_tokens(
|
||||
TokenKind::Separator(separator_kind) => {
|
||||
// add penalty for hard separators
|
||||
if let SeparatorKind::Hard = separator_kind {
|
||||
position = position.wrapping_add(1);
|
||||
position = position.wrapping_add(7);
|
||||
}
|
||||
|
||||
phrase = 'phrase: {
|
||||
|
||||
@@ -49,10 +49,15 @@ impl<G: RankingRuleGraphTrait> RankingRuleGraph<G> {
|
||||
if let Some((cost_of_ignoring, forbidden_nodes)) =
|
||||
cost_of_ignoring_node.get(dest_idx)
|
||||
{
|
||||
let dest = graph_nodes.get(dest_idx);
|
||||
let dest_size = match &dest.data {
|
||||
QueryNodeData::Term(term) => term.term_ids.len(),
|
||||
_ => panic!(),
|
||||
};
|
||||
let new_edge_id = edges_store.insert(Some(Edge {
|
||||
source_node: source_id,
|
||||
dest_node: dest_idx,
|
||||
cost: *cost_of_ignoring,
|
||||
cost: *cost_of_ignoring * dest_size as u32,
|
||||
condition: None,
|
||||
nodes_to_skip: forbidden_nodes.clone(),
|
||||
}));
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use super::{ComputedCondition, RankingRuleGraphTrait};
|
||||
use crate::score_details::{Rank, ScoreDetails};
|
||||
use crate::search::new::interner::{DedupInterner, Interned};
|
||||
use crate::search::new::query_term::{ExactTerm, LocatedQueryTermSubset};
|
||||
use crate::search::new::resolve_query_graph::compute_query_term_subset_docids;
|
||||
@@ -84,4 +85,8 @@ impl RankingRuleGraphTrait for ExactnessGraph {
|
||||
|
||||
Ok(vec![(0, exact_condition), (dest_node.term_ids.len() as u32, skip_condition)])
|
||||
}
|
||||
|
||||
fn rank_to_score(rank: Rank) -> ScoreDetails {
|
||||
ScoreDetails::Exactness(rank)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ use fxhash::FxHashSet;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use super::{ComputedCondition, RankingRuleGraphTrait};
|
||||
use crate::score_details::{Rank, ScoreDetails};
|
||||
use crate::search::new::interner::{DedupInterner, Interned};
|
||||
use crate::search::new::query_term::LocatedQueryTermSubset;
|
||||
use crate::search::new::resolve_query_graph::compute_query_term_subset_docids_within_field_id;
|
||||
@@ -68,7 +69,7 @@ impl RankingRuleGraphTrait for FidGraph {
|
||||
}
|
||||
|
||||
let mut edges = vec![];
|
||||
for fid in all_fields {
|
||||
for fid in all_fields.iter().copied() {
|
||||
// TODO: We can improve performances and relevancy by storing
|
||||
// the term subsets associated to each field ids fetched.
|
||||
edges.push((
|
||||
@@ -80,6 +81,35 @@ impl RankingRuleGraphTrait for FidGraph {
|
||||
));
|
||||
}
|
||||
|
||||
// always lookup the max_fid if we don't already and add an artificial condition for max scoring
|
||||
let max_fid: Option<u16> = {
|
||||
if let Some(max_fid) = ctx
|
||||
.index
|
||||
.searchable_fields_ids(ctx.txn)?
|
||||
.map(|field_ids| field_ids.into_iter().max())
|
||||
{
|
||||
max_fid
|
||||
} else {
|
||||
ctx.index.fields_ids_map(ctx.txn)?.ids().max()
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(max_fid) = max_fid {
|
||||
if !all_fields.contains(&max_fid) {
|
||||
edges.push((
|
||||
max_fid as u32 * term.term_ids.len() as u32, // TODO improve the fid score i.e. fid^10.
|
||||
conditions_interner.insert(FidCondition {
|
||||
term: term.clone(), // TODO remove this ugly clone
|
||||
fid: max_fid,
|
||||
}),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(edges)
|
||||
}
|
||||
|
||||
fn rank_to_score(rank: Rank) -> ScoreDetails {
|
||||
ScoreDetails::Fid(rank)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,6 +41,7 @@ use super::interner::{DedupInterner, FixedSizeInterner, Interned, MappedInterner
|
||||
use super::query_term::LocatedQueryTermSubset;
|
||||
use super::small_bitmap::SmallBitmap;
|
||||
use super::{QueryGraph, QueryNode, SearchContext};
|
||||
use crate::score_details::{Rank, ScoreDetails};
|
||||
use crate::Result;
|
||||
|
||||
pub struct ComputedCondition {
|
||||
@@ -110,6 +111,9 @@ pub trait RankingRuleGraphTrait: Sized + 'static {
|
||||
source_node: Option<&LocatedQueryTermSubset>,
|
||||
dest_node: &LocatedQueryTermSubset,
|
||||
) -> Result<Vec<(u32, Interned<Self::Condition>)>>;
|
||||
|
||||
/// Convert the rank of a path to its corresponding score for the ranking rule
|
||||
fn rank_to_score(rank: Rank) -> ScoreDetails;
|
||||
}
|
||||
|
||||
/// The graph used by graph-based ranking rules.
|
||||
|
||||
@@ -2,6 +2,7 @@ use fxhash::{FxHashMap, FxHashSet};
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use super::{ComputedCondition, RankingRuleGraphTrait};
|
||||
use crate::score_details::{Rank, ScoreDetails};
|
||||
use crate::search::new::interner::{DedupInterner, Interned};
|
||||
use crate::search::new::query_term::LocatedQueryTermSubset;
|
||||
use crate::search::new::resolve_query_graph::compute_query_term_subset_docids_within_position;
|
||||
@@ -77,6 +78,8 @@ impl RankingRuleGraphTrait for PositionGraph {
|
||||
let mut positions_for_costs = FxHashMap::<u32, Vec<u16>>::default();
|
||||
|
||||
for position in all_positions {
|
||||
// FIXME: bucketed position???
|
||||
let distance = position.abs_diff(*term.positions.start());
|
||||
let cost = {
|
||||
let mut cost = 0;
|
||||
for i in 0..term.term_ids.len() {
|
||||
@@ -84,15 +87,17 @@ impl RankingRuleGraphTrait for PositionGraph {
|
||||
// Because if two words are in the same bucketed position (e.g. 32) and consecutive,
|
||||
// then their position cost will be 32+32=64, but an ngram of these two words at the
|
||||
// same position will have a cost of 32+32+1=65
|
||||
cost += cost_from_position(position as u32 + i as u32);
|
||||
cost += cost_from_distance(distance as u32 + i as u32);
|
||||
}
|
||||
cost
|
||||
};
|
||||
positions_for_costs.entry(cost).or_default().push(position);
|
||||
}
|
||||
|
||||
let mut edges = vec![];
|
||||
let max_cost = term.term_ids.len() as u32 * 10;
|
||||
let max_cost_exists = positions_for_costs.contains_key(&max_cost);
|
||||
|
||||
let mut edges = vec![];
|
||||
for (cost, positions) in positions_for_costs {
|
||||
// TODO: We can improve performances and relevancy by storing
|
||||
// the term subsets associated to each position fetched
|
||||
@@ -105,12 +110,25 @@ impl RankingRuleGraphTrait for PositionGraph {
|
||||
));
|
||||
}
|
||||
|
||||
if !max_cost_exists {
|
||||
// artificial empty condition for computing max cost
|
||||
edges.push((
|
||||
max_cost,
|
||||
conditions_interner
|
||||
.insert(PositionCondition { term: term.clone(), positions: Vec::default() }),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(edges)
|
||||
}
|
||||
|
||||
fn rank_to_score(rank: Rank) -> ScoreDetails {
|
||||
ScoreDetails::Position(rank)
|
||||
}
|
||||
}
|
||||
|
||||
fn cost_from_position(sum_positions: u32) -> u32 {
|
||||
match sum_positions {
|
||||
fn cost_from_distance(distance: u32) -> u32 {
|
||||
match distance {
|
||||
0 => 0,
|
||||
1 => 1,
|
||||
2..=4 => 2,
|
||||
|
||||
@@ -12,13 +12,13 @@ pub fn build_edges(
|
||||
left_term: Option<&LocatedQueryTermSubset>,
|
||||
right_term: &LocatedQueryTermSubset,
|
||||
) -> Result<Vec<(u32, Interned<ProximityCondition>)>> {
|
||||
let right_ngram_length = right_term.term_ids.len();
|
||||
let right_ngram_max = right_term.term_ids.len().saturating_sub(1);
|
||||
|
||||
let Some(left_term) = left_term else {
|
||||
return Ok(vec![(
|
||||
(right_ngram_length - 1) as u32,
|
||||
right_ngram_max as u32,
|
||||
conditions_interner.insert(ProximityCondition::Term { term: right_term.clone() }),
|
||||
)]);
|
||||
)])
|
||||
};
|
||||
|
||||
if left_term.positions.end() + 1 != *right_term.positions.start() {
|
||||
@@ -29,25 +29,25 @@ pub fn build_edges(
|
||||
// The remaining query graph represents `the sun .. are beautiful`
|
||||
// but `sun` and `are` have no proximity condition between them
|
||||
return Ok(vec![(
|
||||
(right_ngram_length - 1) as u32,
|
||||
right_ngram_max as u32,
|
||||
conditions_interner.insert(ProximityCondition::Term { term: right_term.clone() }),
|
||||
)]);
|
||||
}
|
||||
|
||||
let mut conditions = vec![];
|
||||
for cost in right_ngram_length..(7 + right_ngram_length) {
|
||||
for cost in right_ngram_max..(7 + right_ngram_max) {
|
||||
conditions.push((
|
||||
cost as u32,
|
||||
conditions_interner.insert(ProximityCondition::Uninit {
|
||||
left_term: left_term.clone(),
|
||||
right_term: right_term.clone(),
|
||||
cost: cost as u8,
|
||||
cost: (cost + 1) as u8,
|
||||
}),
|
||||
))
|
||||
}
|
||||
|
||||
conditions.push((
|
||||
(7 + right_ngram_length) as u32,
|
||||
(7 + right_ngram_max) as u32,
|
||||
conditions_interner.insert(ProximityCondition::Term { term: right_term.clone() }),
|
||||
));
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ pub mod compute_docids;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use super::{ComputedCondition, RankingRuleGraphTrait};
|
||||
use crate::score_details::{Rank, ScoreDetails};
|
||||
use crate::search::new::interner::{DedupInterner, Interned};
|
||||
use crate::search::new::query_term::LocatedQueryTermSubset;
|
||||
use crate::search::new::SearchContext;
|
||||
@@ -36,4 +37,8 @@ impl RankingRuleGraphTrait for ProximityGraph {
|
||||
) -> Result<Vec<(u32, Interned<Self::Condition>)>> {
|
||||
build::build_edges(ctx, conditions_interner, source_term, dest_term)
|
||||
}
|
||||
|
||||
fn rank_to_score(rank: Rank) -> ScoreDetails {
|
||||
ScoreDetails::Proximity(rank)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use super::{ComputedCondition, RankingRuleGraphTrait};
|
||||
use crate::score_details::{self, Rank, ScoreDetails};
|
||||
use crate::search::new::interner::{DedupInterner, Interned};
|
||||
use crate::search::new::query_term::LocatedQueryTermSubset;
|
||||
use crate::search::new::resolve_query_graph::compute_query_term_subset_docids;
|
||||
@@ -75,4 +76,8 @@ impl RankingRuleGraphTrait for TypoGraph {
|
||||
}
|
||||
Ok(edges)
|
||||
}
|
||||
|
||||
fn rank_to_score(rank: Rank) -> ScoreDetails {
|
||||
ScoreDetails::Typo(score_details::Typo::from_rank(rank))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use super::{ComputedCondition, RankingRuleGraphTrait};
|
||||
use crate::score_details::{self, Rank, ScoreDetails};
|
||||
use crate::search::new::interner::{DedupInterner, Interned};
|
||||
use crate::search::new::query_term::LocatedQueryTermSubset;
|
||||
use crate::search::new::resolve_query_graph::compute_query_term_subset_docids;
|
||||
@@ -41,9 +42,10 @@ impl RankingRuleGraphTrait for WordsGraph {
|
||||
_from: Option<&LocatedQueryTermSubset>,
|
||||
to_term: &LocatedQueryTermSubset,
|
||||
) -> Result<Vec<(u32, Interned<Self::Condition>)>> {
|
||||
Ok(vec![(
|
||||
to_term.term_ids.len() as u32,
|
||||
conditions_interner.insert(WordsCondition { term: to_term.clone() }),
|
||||
)])
|
||||
Ok(vec![(0, conditions_interner.insert(WordsCondition { term: to_term.clone() }))])
|
||||
}
|
||||
|
||||
fn rank_to_score(rank: Rank) -> ScoreDetails {
|
||||
ScoreDetails::Words(score_details::Words::from_rank(rank))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ use roaring::RoaringBitmap;
|
||||
|
||||
use super::logger::SearchLogger;
|
||||
use super::{QueryGraph, SearchContext};
|
||||
use crate::score_details::ScoreDetails;
|
||||
use crate::Result;
|
||||
|
||||
/// An internal trait implemented by only [`PlaceholderQuery`] and [`QueryGraph`]
|
||||
@@ -66,4 +67,6 @@ pub struct RankingRuleOutput<Q> {
|
||||
pub query: Q,
|
||||
/// The allowed candidates for the child ranking rule
|
||||
pub candidates: RoaringBitmap,
|
||||
/// The score for the candidates of the current bucket
|
||||
pub score: ScoreDetails,
|
||||
}
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
use heed::BytesDecode;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use super::logger::SearchLogger;
|
||||
use super::{RankingRule, RankingRuleOutput, RankingRuleQueryTrait, SearchContext};
|
||||
use crate::heed_codec::facet::FacetGroupKeyCodec;
|
||||
use crate::heed_codec::ByteSliceRefCodec;
|
||||
use crate::heed_codec::facet::{FacetGroupKeyCodec, OrderedF64Codec};
|
||||
use crate::heed_codec::{ByteSliceRefCodec, StrRefCodec};
|
||||
use crate::score_details::{self, ScoreDetails};
|
||||
use crate::search::facet::{ascending_facet_sort, descending_facet_sort};
|
||||
use crate::{FieldId, Index, Result};
|
||||
|
||||
@@ -49,6 +51,7 @@ pub struct Sort<'ctx, Query> {
|
||||
is_ascending: bool,
|
||||
original_query: Option<Query>,
|
||||
iter: Option<RankingRuleOutputIterWrapper<'ctx, Query>>,
|
||||
must_redact: bool,
|
||||
}
|
||||
impl<'ctx, Query> Sort<'ctx, Query> {
|
||||
pub fn new(
|
||||
@@ -59,15 +62,30 @@ impl<'ctx, Query> Sort<'ctx, Query> {
|
||||
) -> Result<Self> {
|
||||
let fields_ids_map = index.fields_ids_map(rtxn)?;
|
||||
let field_id = fields_ids_map.id(&field_name);
|
||||
let must_redact = Self::must_redact(index, rtxn, &field_name)?;
|
||||
|
||||
Ok(Self { field_name, field_id, is_ascending, original_query: None, iter: None })
|
||||
Ok(Self {
|
||||
field_name,
|
||||
field_id,
|
||||
is_ascending,
|
||||
original_query: None,
|
||||
iter: None,
|
||||
must_redact,
|
||||
})
|
||||
}
|
||||
|
||||
fn must_redact(index: &Index, rtxn: &'ctx heed::RoTxn, field_name: &str) -> Result<bool> {
|
||||
let Some(displayed_fields) = index.displayed_fields(rtxn)?
|
||||
else { return Ok(false); };
|
||||
|
||||
Ok(!displayed_fields.iter().any(|&field| field == field_name))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ctx, Query: RankingRuleQueryTrait> RankingRule<'ctx, Query> for Sort<'ctx, Query> {
|
||||
fn id(&self) -> String {
|
||||
let Self { field_name, is_ascending, .. } = self;
|
||||
format!("{field_name}:{}", if *is_ascending { "asc" } else { "desc " })
|
||||
format!("{field_name}:{}", if *is_ascending { "asc" } else { "desc" })
|
||||
}
|
||||
fn start_iteration(
|
||||
&mut self,
|
||||
@@ -118,12 +136,45 @@ impl<'ctx, Query: RankingRuleQueryTrait> RankingRule<'ctx, Query> for Sort<'ctx,
|
||||
|
||||
(itertools::Either::Right(number_iter), itertools::Either::Right(string_iter))
|
||||
};
|
||||
let number_iter = number_iter.map(|r| -> Result<_> {
|
||||
let (docids, bytes) = r?;
|
||||
Ok((
|
||||
docids,
|
||||
serde_json::Value::Number(
|
||||
serde_json::Number::from_f64(
|
||||
OrderedF64Codec::bytes_decode(bytes).expect("some number"),
|
||||
)
|
||||
.expect("too big float"),
|
||||
),
|
||||
))
|
||||
});
|
||||
let string_iter = string_iter.map(|r| -> Result<_> {
|
||||
let (docids, bytes) = r?;
|
||||
Ok((
|
||||
docids,
|
||||
serde_json::Value::String(
|
||||
StrRefCodec::bytes_decode(bytes).expect("some string").to_owned(),
|
||||
),
|
||||
))
|
||||
});
|
||||
|
||||
let query_graph = parent_query.clone();
|
||||
let ascending = self.is_ascending;
|
||||
let field_name = self.field_name.clone();
|
||||
let must_redact = self.must_redact;
|
||||
RankingRuleOutputIterWrapper::new(Box::new(number_iter.chain(string_iter).map(
|
||||
move |r| {
|
||||
let (docids, _) = r?;
|
||||
Ok(RankingRuleOutput { query: query_graph.clone(), candidates: docids })
|
||||
let (docids, value) = r?;
|
||||
Ok(RankingRuleOutput {
|
||||
query: query_graph.clone(),
|
||||
candidates: docids,
|
||||
score: ScoreDetails::Sort(score_details::Sort {
|
||||
field_name: field_name.clone(),
|
||||
ascending,
|
||||
redacted: must_redact,
|
||||
value,
|
||||
}),
|
||||
})
|
||||
},
|
||||
)))
|
||||
}
|
||||
@@ -150,7 +201,16 @@ impl<'ctx, Query: RankingRuleQueryTrait> RankingRule<'ctx, Query> for Sort<'ctx,
|
||||
Ok(Some(bucket))
|
||||
} else {
|
||||
let query = self.original_query.as_ref().unwrap().clone();
|
||||
Ok(Some(RankingRuleOutput { query, candidates: universe.clone() }))
|
||||
Ok(Some(RankingRuleOutput {
|
||||
query,
|
||||
candidates: universe.clone(),
|
||||
score: ScoreDetails::Sort(score_details::Sort {
|
||||
field_name: self.field_name.clone(),
|
||||
ascending: self.is_ascending,
|
||||
redacted: self.must_redact,
|
||||
value: serde_json::Value::Null,
|
||||
}),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -122,8 +122,11 @@ fn test_attribute_fid_simple() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.query("the quick brown fox jumps over the lazy dog");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[2, 6, 5, 4, 3, 9, 7, 8, 11, 10, 12, 13, 14, 0]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -135,6 +138,11 @@ fn test_attribute_fid_ngrams() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.query("the quick brown fox jumps over the lazy dog");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[2, 6, 5, 4, 3, 9, 7, 8, 11, 10, 12, 13, 14, 0]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
}
|
||||
|
||||
@@ -40,68 +40,68 @@ fn create_index() -> TempIndex {
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"text": "a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
"text": "a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
the quick brown fox",
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"text": "quick a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
"text": "quick a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
brown",
|
||||
},
|
||||
{
|
||||
"id": 7,
|
||||
"text": "a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
"text": "a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
quickbrown",
|
||||
},
|
||||
{
|
||||
"id": 8,
|
||||
"text": "a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
"text": "a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
quick brown",
|
||||
},
|
||||
{
|
||||
"id": 9,
|
||||
"text": "a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
"text": "a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
quickbrown",
|
||||
},
|
||||
{
|
||||
@@ -137,8 +137,13 @@ fn test_attribute_position_simple() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.query("quick brown");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[10, 11, 12, 13, 2, 3, 4, 1, 0, 6, 8, 7, 9, 5]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
}
|
||||
#[test]
|
||||
fn test_attribute_position_repeated() {
|
||||
@@ -149,8 +154,13 @@ fn test_attribute_position_repeated() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.query("a a a a a");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[5, 7, 8, 9, 6]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -162,8 +172,13 @@ fn test_attribute_position_different_fields() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.query("quick brown");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[10, 11, 12, 13, 2, 3, 4, 1, 0, 6, 8, 7, 9, 5]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -175,6 +190,11 @@ fn test_attribute_position_ngrams() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.query("quick brown");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[10, 11, 12, 13, 2, 3, 4, 1, 0, 6, 8, 7, 9, 5]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
}
|
||||
|
||||
@@ -474,8 +474,14 @@ fn test_exactness_simple_ordered() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("the quick brown fox jumps over the lazy dog");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[9, 8, 7, 6, 5, 4, 3, 2, 1]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
[
|
||||
@@ -501,8 +507,14 @@ fn test_exactness_simple_reversed() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("the quick brown fox jumps over the lazy dog");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[9, 8, 3, 4, 5, 6, 7]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
[
|
||||
@@ -519,8 +531,14 @@ fn test_exactness_simple_reversed() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("the quick brown fox jumps over the lazy dog");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[9, 8, 3, 4, 5, 6, 7]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
[
|
||||
@@ -544,8 +562,14 @@ fn test_exactness_simple_random() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("the quick brown fox jumps over the lazy dog");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[8, 7, 4, 6, 3, 5]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
[
|
||||
@@ -568,8 +592,14 @@ fn test_exactness_attribute_starts_with_simple() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("this balcony");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[2, 1, 0]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
[
|
||||
@@ -589,8 +619,14 @@ fn test_exactness_attribute_starts_with_phrase() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("\"overlooking the sea\" is a beautiful balcony");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[6, 5, 4, 1]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
[
|
||||
@@ -604,8 +640,14 @@ fn test_exactness_attribute_starts_with_phrase() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("overlooking the sea is a beautiful balcony");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[6, 5, 4, 3, 1, 7]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
[
|
||||
@@ -628,8 +670,14 @@ fn test_exactness_all_candidates_with_typo() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("overlocking the sea is a beautiful balcony");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[4, 5, 6, 1, 7]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
// "overlooking" is returned here because the term matching strategy allows it
|
||||
// but it has the worst exactness score (0 exact words)
|
||||
@@ -659,8 +707,14 @@ fn test_exactness_after_words() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("the quick brown fox jumps over the lazy dog");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[19, 9, 18, 8, 17, 16, 6, 7, 15, 5, 14, 4, 13, 3, 12, 2, 1, 11]");
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
@@ -702,7 +756,13 @@ fn test_words_after_exactness() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("the quick brown fox jumps over the lazy dog");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[19, 9, 18, 8, 17, 16, 6, 7, 15, 5, 14, 4, 13, 3, 12, 2, 1, 11]");
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
|
||||
@@ -745,7 +805,14 @@ fn test_proximity_after_exactness() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("the quick brown fox jumps over the lazy dog");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[2, 1, 0, 4, 5, 8, 7, 3, 6]");
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
|
||||
@@ -776,7 +843,13 @@ fn test_proximity_after_exactness() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("the quick brown fox jumps over the lazy dog");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[0, 1, 2]");
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
|
||||
@@ -804,7 +877,13 @@ fn test_exactness_followed_by_typo_prefer_no_typo_prefix() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("quick brown fox extra");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[2, 1, 0, 4, 3]");
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
|
||||
@@ -834,7 +913,13 @@ fn test_typo_followed_by_exactness() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::Last);
|
||||
s.query("extraordinarily quick brown fox");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
|
||||
let document_ids_scores: Vec<_> =
|
||||
documents_ids.iter().zip(document_scores.into_iter()).collect();
|
||||
insta::assert_snapshot!(format!("{document_ids_scores:#?}"));
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[1, 0, 4, 3]");
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ use heed::RoTxn;
|
||||
use maplit::hashset;
|
||||
|
||||
use crate::index::tests::TempIndex;
|
||||
use crate::score_details::ScoreDetails;
|
||||
use crate::search::new::tests::collect_field_values;
|
||||
use crate::{AscDesc, Criterion, GeoSortStrategy, Member, Search, SearchResult};
|
||||
|
||||
@@ -28,30 +29,37 @@ fn execute_iterative_and_rtree_returns_the_same<'a>(
|
||||
rtxn: &RoTxn<'a>,
|
||||
index: &TempIndex,
|
||||
search: &mut Search<'a>,
|
||||
) -> Vec<usize> {
|
||||
) -> (Vec<usize>, Vec<Vec<ScoreDetails>>) {
|
||||
search.geo_sort_strategy(GeoSortStrategy::AlwaysIterative(2));
|
||||
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores: iterative_scores_bucketed, .. } =
|
||||
search.execute().unwrap();
|
||||
let iterative_ids_bucketed = collect_field_values(index, rtxn, "id", &documents_ids);
|
||||
|
||||
search.geo_sort_strategy(GeoSortStrategy::AlwaysIterative(1000));
|
||||
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores: iterative_scores, .. } =
|
||||
search.execute().unwrap();
|
||||
let iterative_ids = collect_field_values(index, rtxn, "id", &documents_ids);
|
||||
|
||||
assert_eq!(iterative_ids_bucketed, iterative_ids, "iterative bucket");
|
||||
assert_eq!(iterative_scores_bucketed, iterative_scores, "iterative bucket score");
|
||||
|
||||
search.geo_sort_strategy(GeoSortStrategy::AlwaysRtree(2));
|
||||
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores: rtree_scores_bucketed, .. } =
|
||||
search.execute().unwrap();
|
||||
let rtree_ids_bucketed = collect_field_values(index, rtxn, "id", &documents_ids);
|
||||
|
||||
search.geo_sort_strategy(GeoSortStrategy::AlwaysRtree(1000));
|
||||
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores: rtree_scores, .. } =
|
||||
search.execute().unwrap();
|
||||
let rtree_ids = collect_field_values(index, rtxn, "id", &documents_ids);
|
||||
|
||||
assert_eq!(rtree_ids_bucketed, rtree_ids, "rtree bucket");
|
||||
assert_eq!(rtree_scores_bucketed, rtree_scores, "rtree bucket score");
|
||||
|
||||
assert_eq!(iterative_ids, rtree_ids, "iterative vs rtree");
|
||||
assert_eq!(iterative_scores, rtree_scores, "iterative vs rtree scores");
|
||||
|
||||
iterative_ids.into_iter().map(|id| id.parse().unwrap()).collect()
|
||||
(iterative_ids.into_iter().map(|id| id.parse().unwrap()).collect(), iterative_scores)
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -73,14 +81,17 @@ fn test_geo_sort() {
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
|
||||
let mut s = Search::new(&rtxn, &index);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
s.sort_criteria(vec![AscDesc::Asc(Member::Geo([0., 0.]))]);
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[0, 1, 2, 3, 4, 5, 6, 8, 7, 10, 9]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
|
||||
s.sort_criteria(vec![AscDesc::Desc(Member::Geo([0., 0.]))]);
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[5, 4, 3, 2, 1, 0, 6, 8, 7, 10, 9]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -101,52 +112,63 @@ fn test_geo_sort_around_the_edge_of_the_flat_earth() {
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
|
||||
let mut s = Search::new(&rtxn, &index);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
|
||||
// --- asc
|
||||
s.sort_criteria(vec![AscDesc::Asc(Member::Geo([0., 0.]))]);
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[0, 1, 2, 3, 4]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
|
||||
// ensuring the lat doesn't wrap around
|
||||
s.sort_criteria(vec![AscDesc::Asc(Member::Geo([85., 0.]))]);
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[1, 0, 3, 4, 2]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
|
||||
s.sort_criteria(vec![AscDesc::Asc(Member::Geo([-85., 0.]))]);
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[2, 0, 3, 4, 1]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
|
||||
// ensuring the lng does wrap around
|
||||
s.sort_criteria(vec![AscDesc::Asc(Member::Geo([0., 175.]))]);
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[3, 4, 2, 1, 0]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
|
||||
s.sort_criteria(vec![AscDesc::Asc(Member::Geo([0., -175.]))]);
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[4, 3, 2, 1, 0]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
|
||||
// --- desc
|
||||
s.sort_criteria(vec![AscDesc::Desc(Member::Geo([0., 0.]))]);
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[4, 3, 2, 1, 0]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
|
||||
// ensuring the lat doesn't wrap around
|
||||
s.sort_criteria(vec![AscDesc::Desc(Member::Geo([85., 0.]))]);
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[2, 4, 3, 0, 1]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
|
||||
s.sort_criteria(vec![AscDesc::Desc(Member::Geo([-85., 0.]))]);
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[1, 4, 3, 0, 2]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
|
||||
// ensuring the lng does wrap around
|
||||
s.sort_criteria(vec![AscDesc::Desc(Member::Geo([0., 175.]))]);
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[0, 1, 2, 4, 3]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
|
||||
s.sort_criteria(vec![AscDesc::Desc(Member::Geo([0., -175.]))]);
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[0, 1, 2, 3, 4]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -166,19 +188,98 @@ fn geo_sort_mixed_with_words() {
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
|
||||
let mut s = Search::new(&rtxn, &index);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
s.sort_criteria(vec![AscDesc::Asc(Member::Geo([0., 0.]))]);
|
||||
|
||||
s.query("jean");
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[0, 2, 3]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
|
||||
s.query("bob");
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[2, 4]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"), @r###"
|
||||
[
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 1,
|
||||
},
|
||||
),
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
-89.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 1,
|
||||
},
|
||||
),
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
-179.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
"###);
|
||||
|
||||
s.query("intel");
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[1]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"), @r###"
|
||||
[
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 1,
|
||||
},
|
||||
),
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
88.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -198,9 +299,11 @@ fn geo_sort_without_any_geo_faceted_documents() {
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
|
||||
let mut s = Search::new(&rtxn, &index);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
s.sort_criteria(vec![AscDesc::Asc(Member::Geo([0., 0.]))]);
|
||||
|
||||
s.query("jean");
|
||||
let ids = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
let (ids, scores) = execute_iterative_and_rtree_returns_the_same(&rtxn, &index, &mut s);
|
||||
insta::assert_snapshot!(format!("{ids:?}"), @"[0, 2, 3]");
|
||||
insta::assert_snapshot!(format!("{scores:#?}"));
|
||||
}
|
||||
|
||||
@@ -80,10 +80,13 @@ fn test_2gram_simple() {
|
||||
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
s.query("sun flower");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
// will also match documents with "sunflower" + prefix tolerance
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[0, 1, 2, 3, 5]");
|
||||
// scores are empty because the only rule is Words with All matching strategy
|
||||
insta::assert_snapshot!(format!("{document_scores:?}"), @"[[], [], [], [], []]");
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
[
|
||||
|
||||
@@ -122,11 +122,11 @@ fn create_edge_cases_index() -> TempIndex {
|
||||
sta stb stc ste stf stg sth sti stj stk stl stm stn sto stp stq str stst stt stu stv stw stx sty stz
|
||||
"
|
||||
},
|
||||
// The next 5 documents lay out a trap with the split word, phrase search, or synonym `sun flower`.
|
||||
// If the search query is "sunflower", the split word "Sun Flower" will match some documents.
|
||||
// The next 5 documents lay out a trap with the split word, phrase search, or synonym `sun flower`.
|
||||
// If the search query is "sunflower", the split word "Sun Flower" will match some documents.
|
||||
// If the query is `sunflower wilting`, then we should make sure that
|
||||
// the sprximity condition `flower wilting: sprx N` also comes with the condition
|
||||
// `sun wilting: sprx N+1`. TODO: this is not the exact condition we use for now.
|
||||
// `sun wilting: sprx N+1`. TODO: this is not the exact condition we use for now.
|
||||
// We only check that the phrase `sun flower` exists and `flower wilting: sprx N`, which
|
||||
// is better than nothing but not the best.
|
||||
{
|
||||
@@ -139,7 +139,7 @@ fn create_edge_cases_index() -> TempIndex {
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
// This document matches the query `sunflower wilting`, but the sprximity condition
|
||||
// This document matches the query `sunflower wilting`, but the sprximity condition
|
||||
// between `sunflower` and `wilting` cannot be through the split-word `Sun Flower`
|
||||
// which would reduce to only `flower` and `wilting` being in sprximity.
|
||||
"text": "A flower wilting under the sun, unlike a sunflower"
|
||||
@@ -270,13 +270,13 @@ fn test_proximity_simple() {
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.query("the quick brown fox jumps over the lazy dog");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[4, 9, 10, 7, 6, 5, 2, 3, 0, 1]");
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[9, 10, 4, 7, 6, 5, 2, 3, 0, 1]");
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
[
|
||||
"\"the quickbrown fox jumps over the lazy dog\"",
|
||||
"\"the quack brown fox jumps over the lazy dog\"",
|
||||
"\"the quick brown fox jumps over the lazy dog\"",
|
||||
"\"the quickbrown fox jumps over the lazy dog\"",
|
||||
"\"the really quick brown fox jumps over the lazy dog\"",
|
||||
"\"the really quick brown fox jumps over the very lazy dog\"",
|
||||
"\"brown quick fox jumps over the lazy dog\"",
|
||||
@@ -295,9 +295,12 @@ fn test_proximity_split_word() {
|
||||
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
s.query("sunflower wilting");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[2, 4, 5, 1, 3]");
|
||||
insta::assert_snapshot!(format!("{document_scores:#?}"));
|
||||
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
// TODO: "2" and "4" should be swapped ideally
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
@@ -312,9 +315,11 @@ fn test_proximity_split_word() {
|
||||
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
s.query("\"sun flower\" wilting");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[2, 4, 1]");
|
||||
insta::assert_snapshot!(format!("{document_scores:#?}"));
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
// TODO: "2" and "4" should be swapped ideally
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
@@ -337,9 +342,11 @@ fn test_proximity_split_word() {
|
||||
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
s.query("xyz wilting");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[2, 4, 1]");
|
||||
insta::assert_snapshot!(format!("{document_scores:#?}"));
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
// TODO: "2" and "4" should be swapped ideally
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
@@ -358,9 +365,11 @@ fn test_proximity_prefix_db() {
|
||||
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
s.query("best s");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[10, 13, 9, 12, 8, 6, 7, 11, 15]");
|
||||
insta::assert_snapshot!(format!("{document_scores:#?}"));
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
|
||||
// This test illustrates the loss of precision from using the prefix DB
|
||||
@@ -381,9 +390,11 @@ fn test_proximity_prefix_db() {
|
||||
// Difference when using the `su` prefix, which is not in the prefix DB
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
s.query("best su");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[10, 13, 9, 12, 8, 11, 7, 6, 15]");
|
||||
insta::assert_snapshot!(format!("{document_scores:#?}"));
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
@@ -406,9 +417,11 @@ fn test_proximity_prefix_db() {
|
||||
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
s.query("best win");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[15, 16, 17, 18, 19, 20, 21, 22]");
|
||||
insta::assert_snapshot!(format!("{document_scores:#?}"));
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
@@ -428,9 +441,11 @@ fn test_proximity_prefix_db() {
|
||||
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
s.query("best wint");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[19, 22, 18, 21, 17, 20, 16, 15]");
|
||||
insta::assert_snapshot!(format!("{document_scores:#?}"));
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
@@ -450,9 +465,11 @@ fn test_proximity_prefix_db() {
|
||||
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
s.query("best wi");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[19, 22, 18, 21, 17, 15, 16, 20]");
|
||||
insta::assert_snapshot!(format!("{document_scores:#?}"));
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
|
||||
@@ -61,8 +61,41 @@ fn test_trap_basic() {
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.terms_matching_strategy(TermsMatchingStrategy::All);
|
||||
s.query("summer holiday");
|
||||
let SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
s.scoring_strategy(crate::score_details::ScoringStrategy::Detailed);
|
||||
let SearchResult { documents_ids, document_scores, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[0, 1]");
|
||||
insta::assert_snapshot!(format!("{document_scores:#?}"), @r###"
|
||||
[
|
||||
[
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 8,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
Typo(
|
||||
Typo {
|
||||
typo_count: 0,
|
||||
max_typo_count: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 8,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
Typo(
|
||||
Typo {
|
||||
typo_count: 0,
|
||||
max_typo_count: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
"###);
|
||||
let texts = collect_field_values(&index, &txn, "text", &documents_ids);
|
||||
// TODO: this is incorrect, 1 should come before 0
|
||||
insta::assert_debug_snapshot!(texts, @r###"
|
||||
|
||||
@@ -0,0 +1,244 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/attribute_fid.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
2,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 19,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 91,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 15,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 81,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 14,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 79,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 13,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 77,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 12,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 83,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
9,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 11,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 75,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
8,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 79,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 73,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
11,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 7,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 77,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
10,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 81,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
13,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 81,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
12,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 78,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
14,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 75,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
0,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 1,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 91,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,244 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/attribute_fid.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
2,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 19,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 91,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 15,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 81,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 14,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 79,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 13,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 77,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 12,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 83,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
9,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 11,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 75,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
8,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 79,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 73,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
11,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 7,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 77,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
10,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 81,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
13,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 81,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
12,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 78,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
14,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 75,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
0,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 1,
|
||||
max_rank: 19,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 91,
|
||||
max_rank: 91,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,244 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/attribute_position.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
10,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 21,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
12,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 21,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
11,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 20,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
13,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 20,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 19,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 19,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
2,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 18,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
0,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 15,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 15,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 13,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
8,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
9,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 1,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,244 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/attribute_position.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
10,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 21,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
12,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 21,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
11,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 20,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
13,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 20,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 19,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 19,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
2,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 18,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
0,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 15,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 15,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 13,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
8,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
9,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 1,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,91 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/attribute_position.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
5,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 11,
|
||||
max_rank: 11,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 51,
|
||||
max_rank: 51,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 11,
|
||||
max_rank: 11,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 51,
|
||||
max_rank: 51,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
8,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 11,
|
||||
max_rank: 11,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 51,
|
||||
max_rank: 51,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
9,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 11,
|
||||
max_rank: 11,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 51,
|
||||
max_rank: 51,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 11,
|
||||
max_rank: 11,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 50,
|
||||
max_rank: 51,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,244 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/attribute_position.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
10,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 21,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
12,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 21,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
11,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 20,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
13,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 20,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 19,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 19,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
2,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 18,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
0,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 15,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 15,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 13,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
8,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
9,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Fid(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Position(
|
||||
Rank {
|
||||
rank: 1,
|
||||
max_rank: 21,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,366 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
19,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
9,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 7,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
18,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 8,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 9,
|
||||
max_rank: 9,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
8,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 8,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 9,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
17,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 8,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
16,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 8,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
15,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 5,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 6,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 5,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 6,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
14,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
13,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 3,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 4,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 3,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 4,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
12,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 2,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 3,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
2,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 2,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 3,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
11,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,106 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
4,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 7,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 7,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 7,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 1,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,126 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
6,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 8,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
MatchesStart,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 8,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 8,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 7,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,86 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
6,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 6,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
MatchesStart,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 6,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 6,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 7,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 3,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,66 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
2,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 2,
|
||||
max_matching_words: 2,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 3,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 2,
|
||||
max_matching_words: 2,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
MatchesStart,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 3,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
0,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 2,
|
||||
max_matching_words: 2,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 3,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,136 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
2,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 4,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Typo(
|
||||
Typo {
|
||||
typo_count: 0,
|
||||
max_typo_count: 1,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 4,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Typo(
|
||||
Typo {
|
||||
typo_count: 0,
|
||||
max_typo_count: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
0,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 4,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Typo(
|
||||
Typo {
|
||||
typo_count: 1,
|
||||
max_typo_count: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 4,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Typo(
|
||||
Typo {
|
||||
typo_count: 1,
|
||||
max_typo_count: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 4,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Typo(
|
||||
Typo {
|
||||
typo_count: 2,
|
||||
max_typo_count: 3,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,186 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
9,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
8,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 8,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 9,
|
||||
max_rank: 9,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 8,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 8,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 5,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 6,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 3,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 4,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
2,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 2,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 3,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,126 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
8,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 3,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 4,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 2,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 3,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 2,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 3,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,146 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
9,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
8,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
MatchesStart,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,146 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
9,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
8,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
MatchesStart,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,84 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
0,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 35,
|
||||
max_rank: 57,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 35,
|
||||
max_rank: 57,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
2,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 35,
|
||||
max_rank: 57,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,240 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
2,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 57,
|
||||
max_rank: 57,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 56,
|
||||
max_rank: 57,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
0,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 35,
|
||||
max_rank: 57,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
MatchesStart,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 22,
|
||||
max_rank: 22,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
MatchesStart,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 22,
|
||||
max_rank: 22,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
8,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
MatchesStart,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 22,
|
||||
max_rank: 22,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 21,
|
||||
max_rank: 22,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 17,
|
||||
max_rank: 22,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 17,
|
||||
max_rank: 22,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,110 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
1,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 4,
|
||||
},
|
||||
),
|
||||
Typo(
|
||||
Typo {
|
||||
typo_count: 0,
|
||||
max_typo_count: 5,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
0,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 4,
|
||||
},
|
||||
),
|
||||
Typo(
|
||||
Typo {
|
||||
typo_count: 1,
|
||||
max_typo_count: 5,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 4,
|
||||
},
|
||||
),
|
||||
Typo(
|
||||
Typo {
|
||||
typo_count: 2,
|
||||
max_typo_count: 5,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 4,
|
||||
},
|
||||
),
|
||||
Typo(
|
||||
Typo {
|
||||
typo_count: 2,
|
||||
max_typo_count: 5,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,366 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/exactness.rs
|
||||
expression: "format!(\"{document_ids_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
(
|
||||
19,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 10,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
9,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 9,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 7,
|
||||
max_rank: 10,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
18,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 8,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 9,
|
||||
max_rank: 9,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
8,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 8,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 9,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
17,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 8,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
16,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 8,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
6,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
7,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 7,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
15,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 5,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 6,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
5,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 5,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 6,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
14,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
4,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 4,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 5,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
13,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 3,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 4,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
3,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 3,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 4,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
12,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 2,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 3,
|
||||
max_rank: 3,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
2,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 2,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
NoExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 3,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
1,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
11,
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 9,
|
||||
},
|
||||
),
|
||||
ExactAttribute(
|
||||
ExactMatch,
|
||||
),
|
||||
Exactness(
|
||||
Rank {
|
||||
rank: 2,
|
||||
max_rank: 2,
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,168 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
1.0,
|
||||
1.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
2.0,
|
||||
-1.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
-2.0,
|
||||
-2.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
3.0,
|
||||
5.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
6.0,
|
||||
-5.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,168 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
6.0,
|
||||
-5.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
3.0,
|
||||
5.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
-2.0,
|
||||
-2.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
2.0,
|
||||
-1.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
1.0,
|
||||
1.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,91 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
-175.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
-179.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
-175.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
178.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
-175.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
-89.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
-175.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
88.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
-175.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,91 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
-179.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
178.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
-89.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
88.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,91 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
-89.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
-179.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
178.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
88.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,91 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
-85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
88.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
-85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
-179.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
-85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
178.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
-85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
-85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
-89.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,91 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
175.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
175.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
88.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
175.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
-89.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
175.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
-179.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
175.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
178.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,91 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
88.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
-89.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
178.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
-179.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,91 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
-175.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
-175.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
88.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
-175.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
-89.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
-175.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
178.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
-175.0,
|
||||
],
|
||||
ascending: false,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
-179.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,91 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
88.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
178.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
-179.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
-89.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,91 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
-85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
-89.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
-85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
-85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
178.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
-85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
-179.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
-85.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
88.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,91 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
175.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
178.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
175.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
-179.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
175.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
-89.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
175.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
88.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
175.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,75 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 1,
|
||||
},
|
||||
),
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 1,
|
||||
},
|
||||
),
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
-89.0,
|
||||
0.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 1,
|
||||
},
|
||||
),
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: Some(
|
||||
[
|
||||
0.0,
|
||||
178.0,
|
||||
],
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,60 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/geo_sort.rs
|
||||
expression: "format!(\"{scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 1,
|
||||
},
|
||||
),
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 1,
|
||||
},
|
||||
),
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Words(
|
||||
Words {
|
||||
matching_words: 1,
|
||||
max_matching_words: 1,
|
||||
},
|
||||
),
|
||||
GeoSort(
|
||||
GeoSort {
|
||||
target_point: [
|
||||
0.0,
|
||||
0.0,
|
||||
],
|
||||
ascending: true,
|
||||
value: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,70 @@
|
||||
---
|
||||
source: milli/src/search/new/tests/proximity.rs
|
||||
expression: "format!(\"{document_scores:#?}\")"
|
||||
---
|
||||
[
|
||||
[
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 8,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 7,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 6,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 5,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 4,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
Proximity(
|
||||
Rank {
|
||||
rank: 1,
|
||||
max_rank: 8,
|
||||
},
|
||||
),
|
||||
],
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user