mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-21 05:41:01 +00:00
Compare commits
52 Commits
prototype-
...
prototype-
Author | SHA1 | Date | |
---|---|---|---|
fdba8c254b | |||
5f99c497f0 | |||
8731f047e2 | |||
f6dbd75a6f | |||
680bd2efea | |||
a6cbc5f28e | |||
f21fc84e22 | |||
18a0ed9aa3 | |||
4829348d6e | |||
047d22fcb1 | |||
a2a3b8c973 | |||
9f37b61666 | |||
c15c076da9 | |||
9dcf1da59d | |||
8628a0c856 | |||
c1e3cc04b0 | |||
d96d8bb0dd | |||
4a3405afec | |||
3cfd653db1 | |||
b6b6a80b76 | |||
f3e2f79290 | |||
f517274d1f | |||
3f41bc642a | |||
672abdb341 | |||
a13ed4d0b0 | |||
4cc2988482 | |||
26c7e31f25 | |||
b2dee07b5e | |||
d963b5f85a | |||
2acc3ec5ee | |||
da04edff8c | |||
85a80f4f4c | |||
1213ec7164 | |||
0a7817a002 | |||
1dfc4038ab | |||
73198179f1 | |||
51dce9e9d1 | |||
c9b65677bf | |||
35d5556f1f | |||
c433bdd1cd | |||
2db09725f8 | |||
fdb23132d4 | |||
11b95284cd | |||
1b601f70c6 | |||
8185731bbf | |||
840727d76f | |||
ead07d0b9d | |||
44f231d41e | |||
3c5d1c93de | |||
57d53de402 | |||
918ce1dd67 | |||
8095f21999 |
@ -2,4 +2,3 @@ target
|
||||
Dockerfile
|
||||
.dockerignore
|
||||
.gitignore
|
||||
**/.git
|
||||
|
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@ -35,7 +35,7 @@ jobs:
|
||||
- name: Build deb package
|
||||
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
||||
- name: Upload debian pkg to release
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/debian/meilisearch.deb
|
||||
|
8
.github/workflows/publish-binaries.yml
vendored
8
.github/workflows/publish-binaries.yml
vendored
@ -54,7 +54,7 @@ jobs:
|
||||
# No need to upload binaries for dry run (cron)
|
||||
- name: Upload binaries to release
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/release/meilisearch
|
||||
@ -87,7 +87,7 @@ jobs:
|
||||
# No need to upload binaries for dry run (cron)
|
||||
- name: Upload binaries to release
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/release/${{ matrix.artifact_name }}
|
||||
@ -121,7 +121,7 @@ jobs:
|
||||
- name: Upload the binary to release
|
||||
# No need to upload binaries for dry run (cron)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
@ -183,7 +183,7 @@ jobs:
|
||||
- name: Upload the binary to release
|
||||
# No need to upload binaries for dry run (cron)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
|
7
.github/workflows/publish-docker-images.yml
vendored
7
.github/workflows/publish-docker-images.yml
vendored
@ -58,13 +58,9 @@ jobs:
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
@ -92,13 +88,10 @@ jobs:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
build-args: |
|
||||
COMMIT_SHA=${{ github.sha }}
|
||||
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
||||
GIT_TAG=${{ github.ref_name }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
# /!\ Don't touch this without checking with Cloud team
|
||||
- name: Send CI information to Cloud team
|
||||
|
42
.github/workflows/sdks-tests.yml
vendored
42
.github/workflows/sdks-tests.yml
vendored
@ -3,6 +3,11 @@ name: SDKs tests
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
docker_image:
|
||||
description: 'The Meilisearch Docker image used'
|
||||
required: false
|
||||
default: nightly
|
||||
schedule:
|
||||
- cron: "0 6 * * MON" # Every Monday at 6:00AM
|
||||
|
||||
@ -11,13 +16,28 @@ env:
|
||||
MEILI_NO_ANALYTICS: 'true'
|
||||
|
||||
jobs:
|
||||
define-docker-image:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker-image: ${{ steps.define-image.outputs.docker-image }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Define the Docker image we need to use
|
||||
id: define-image
|
||||
run: |
|
||||
event=${{ github.event.action }}
|
||||
echo "docker-image=nightly" >> $GITHUB_OUTPUT
|
||||
if [[ $event == 'workflow_dispatch' ]]; then
|
||||
echo "docker-image=${{ github.event.inputs.docker_image }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
meilisearch-js-tests:
|
||||
needs: define-docker-image
|
||||
name: JS SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
@ -47,11 +67,12 @@ jobs:
|
||||
run: yarn test:env:browser
|
||||
|
||||
instant-meilisearch-tests:
|
||||
needs: define-docker-image
|
||||
name: instant-meilisearch tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
@ -73,11 +94,12 @@ jobs:
|
||||
run: yarn build
|
||||
|
||||
meilisearch-php-tests:
|
||||
needs: define-docker-image
|
||||
name: PHP SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
@ -103,11 +125,12 @@ jobs:
|
||||
composer remove --dev guzzlehttp/guzzle http-interop/http-factory-guzzle
|
||||
|
||||
meilisearch-python-tests:
|
||||
needs: define-docker-image
|
||||
name: Python SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
@ -127,11 +150,12 @@ jobs:
|
||||
run: pipenv run pytest
|
||||
|
||||
meilisearch-go-tests:
|
||||
needs: define-docker-image
|
||||
name: Go SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
@ -139,7 +163,7 @@ jobs:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: stable
|
||||
- uses: actions/checkout@v3
|
||||
@ -156,11 +180,12 @@ jobs:
|
||||
run: go test -v ./...
|
||||
|
||||
meilisearch-ruby-tests:
|
||||
needs: define-docker-image
|
||||
name: Ruby SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
@ -180,11 +205,12 @@ jobs:
|
||||
run: bundle exec rspec
|
||||
|
||||
meilisearch-rust-tests:
|
||||
needs: define-docker-image
|
||||
name: Rust SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
|
33
.github/workflows/test-suite.yml
vendored
33
.github/workflows/test-suite.yml
vendored
@ -43,7 +43,7 @@ jobs:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
uses: Swatinem/rust-cache@v2.4.0
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -65,7 +65,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
uses: Swatinem/rust-cache@v2.4.0
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -105,6 +105,29 @@ jobs:
|
||||
command: test
|
||||
args: --workspace --locked --release --all-features
|
||||
|
||||
test-disabled-tokenization:
|
||||
name: Test disabled tokenization
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:18.04
|
||||
if: github.event_name == 'schedule'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install needed dependencies
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install --assume-yes build-essential curl
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Run cargo tree without default features and check lindera is not present
|
||||
run: |
|
||||
cargo tree -f '{p} {f}' -e normal --no-default-features | grep lindera -vqz
|
||||
- name: Run cargo tree with default features and check lindera is pressent
|
||||
run: |
|
||||
cargo tree -f '{p} {f}' -e normal | grep lindera -qz
|
||||
|
||||
# We run tests in debug also, to make sure that the debug_assertions are hit
|
||||
test-debug:
|
||||
name: Run tests in debug
|
||||
@ -123,7 +146,7 @@ jobs:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
uses: Swatinem/rust-cache@v2.4.0
|
||||
- name: Run tests in debug
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -142,7 +165,7 @@ jobs:
|
||||
override: true
|
||||
components: clippy
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
uses: Swatinem/rust-cache@v2.4.0
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -161,7 +184,7 @@ jobs:
|
||||
override: true
|
||||
components: rustfmt
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
uses: Swatinem/rust-cache@v2.4.0
|
||||
- name: Run cargo fmt
|
||||
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
||||
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
||||
|
@ -1,4 +1,3 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
# Compile
|
||||
FROM rust:alpine3.16 AS compiler
|
||||
|
||||
@ -12,7 +11,7 @@ ARG GIT_TAG
|
||||
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG}
|
||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
|
||||
COPY --link . .
|
||||
COPY . .
|
||||
RUN set -eux; \
|
||||
apkArch="$(apk --print-arch)"; \
|
||||
if [ "$apkArch" = "aarch64" ]; then \
|
||||
@ -31,7 +30,7 @@ RUN apk update --quiet \
|
||||
|
||||
# add meilisearch to the `/bin` so you can run it from anywhere and it's easy
|
||||
# to find.
|
||||
COPY --from=compiler --link /meilisearch/target/release/meilisearch /bin/meilisearch
|
||||
COPY --from=compiler /meilisearch/target/release/meilisearch /bin/meilisearch
|
||||
# To stay compatible with the older version of the container (pre v0.27.0) we're
|
||||
# going to symlink the meilisearch binary in the path to `/meilisearch`
|
||||
RUN ln -s /bin/meilisearch /meilisearch
|
||||
|
1376
assets/grafana-dashboard.json
Normal file
1376
assets/grafana-dashboard.json
Normal file
File diff suppressed because it is too large
Load Diff
19
assets/prometheus-basic-scraper.yml
Normal file
19
assets/prometheus-basic-scraper.yml
Normal file
@ -0,0 +1,19 @@
|
||||
global:
|
||||
scrape_interval: 15s # By default, scrape targets every 15 seconds.
|
||||
|
||||
# Attach these labels to any time series or alerts when communicating with
|
||||
# external systems (federation, remote storage, Alertmanager).
|
||||
external_labels:
|
||||
monitor: 'codelab-monitor'
|
||||
|
||||
# A scrape configuration containing exactly one endpoint to scrape:
|
||||
# Here it's Prometheus itself.
|
||||
scrape_configs:
|
||||
# The job name is added as a label `job=<job_name>` to any timeseries scraped from this config.
|
||||
- job_name: 'meilisearch'
|
||||
|
||||
# Override the global default and scrape targets from this job every 5 seconds.
|
||||
scrape_interval: 5s
|
||||
|
||||
static_configs:
|
||||
- targets: ['localhost:7700']
|
54
config.toml
54
config.toml
@ -1,131 +1,131 @@
|
||||
# This file shows the default configuration of Meilisearch.
|
||||
# All variables are defined here: https://www.meilisearch.com/docs/learn/configuration/instance_options#environment-variables
|
||||
|
||||
db_path = "./data.ms"
|
||||
# Designates the location where database files will be created and retrieved.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#database-path
|
||||
db_path = "./data.ms"
|
||||
|
||||
env = "development"
|
||||
# Configures the instance's environment. Value must be either `production` or `development`.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#environment
|
||||
env = "development"
|
||||
|
||||
http_addr = "localhost:7700"
|
||||
# The address on which the HTTP server will listen.
|
||||
http_addr = "localhost:7700"
|
||||
|
||||
# master_key = "YOUR_MASTER_KEY_VALUE"
|
||||
# Sets the instance's master key, automatically protecting all routes except GET /health.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#master-key
|
||||
# master_key = "YOUR_MASTER_KEY_VALUE"
|
||||
|
||||
# no_analytics = true
|
||||
# Deactivates Meilisearch's built-in telemetry when provided.
|
||||
# Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
||||
# All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted at any time.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#disable-analytics
|
||||
# no_analytics = true
|
||||
|
||||
http_payload_size_limit = "100 MB"
|
||||
# Sets the maximum size of accepted payloads.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#payload-limit-size
|
||||
http_payload_size_limit = "100 MB"
|
||||
|
||||
log_level = "INFO"
|
||||
# Defines how much detail should be present in Meilisearch's logs.
|
||||
# Meilisearch currently supports six log levels, listed in order of increasing verbosity: `OFF`, `ERROR`, `WARN`, `INFO`, `DEBUG`, `TRACE`
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#log-level
|
||||
log_level = "INFO"
|
||||
|
||||
# max_indexing_memory = "2 GiB"
|
||||
# Sets the maximum amount of RAM Meilisearch can use when indexing.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#max-indexing-memory
|
||||
# max_indexing_memory = "2 GiB"
|
||||
|
||||
# max_indexing_threads = 4
|
||||
# Sets the maximum number of threads Meilisearch can use during indexing.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#max-indexing-threads
|
||||
# max_indexing_threads = 4
|
||||
|
||||
#############
|
||||
### DUMPS ###
|
||||
#############
|
||||
|
||||
dump_dir = "dumps/"
|
||||
# Sets the directory where Meilisearch will create dump files.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#dump-directory
|
||||
dump_dir = "dumps/"
|
||||
|
||||
# import_dump = "./path/to/my/file.dump"
|
||||
# Imports the dump file located at the specified path. Path must point to a .dump file.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#import-dump
|
||||
# import_dump = "./path/to/my/file.dump"
|
||||
|
||||
ignore_missing_dump = false
|
||||
# Prevents Meilisearch from throwing an error when `import_dump` does not point to a valid dump file.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-missing-dump
|
||||
ignore_missing_dump = false
|
||||
|
||||
ignore_dump_if_db_exists = false
|
||||
# Prevents a Meilisearch instance with an existing database from throwing an error when using `import_dump`.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-dump-if-db-exists
|
||||
ignore_dump_if_db_exists = false
|
||||
|
||||
|
||||
#################
|
||||
### SNAPSHOTS ###
|
||||
#################
|
||||
|
||||
schedule_snapshot = false
|
||||
# Enables scheduled snapshots when true, disable when false (the default).
|
||||
# If the value is given as an integer, then enables the scheduled snapshot with the passed value as the interval
|
||||
# between each snapshot, in seconds.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#schedule-snapshot-creation
|
||||
schedule_snapshot = false
|
||||
|
||||
snapshot_dir = "snapshots/"
|
||||
# Sets the directory where Meilisearch will store snapshots.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#snapshot-destination
|
||||
snapshot_dir = "snapshots/"
|
||||
|
||||
# import_snapshot = "./path/to/my/snapshot"
|
||||
# Launches Meilisearch after importing a previously-generated snapshot at the given filepath.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#import-snapshot
|
||||
# import_snapshot = "./path/to/my/snapshot"
|
||||
|
||||
ignore_missing_snapshot = false
|
||||
# Prevents a Meilisearch instance from throwing an error when `import_snapshot` does not point to a valid snapshot file.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-missing-snapshot
|
||||
ignore_missing_snapshot = false
|
||||
|
||||
ignore_snapshot_if_db_exists = false
|
||||
# Prevents a Meilisearch instance with an existing database from throwing an error when using `import_snapshot`.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-snapshot-if-db-exists
|
||||
ignore_snapshot_if_db_exists = false
|
||||
|
||||
|
||||
###########
|
||||
### SSL ###
|
||||
###########
|
||||
|
||||
# ssl_auth_path = "./path/to/root"
|
||||
# Enables client authentication in the specified path.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-authentication-path
|
||||
# ssl_auth_path = "./path/to/root"
|
||||
|
||||
# ssl_cert_path = "./path/to/certfile"
|
||||
# Sets the server's SSL certificates.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-certificates-path
|
||||
# ssl_cert_path = "./path/to/certfile"
|
||||
|
||||
# ssl_key_path = "./path/to/private-key"
|
||||
# Sets the server's SSL key files.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-key-path
|
||||
# ssl_key_path = "./path/to/private-key"
|
||||
|
||||
# ssl_ocsp_path = "./path/to/ocsp-file"
|
||||
# Sets the server's OCSP file.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-ocsp-path
|
||||
# ssl_ocsp_path = "./path/to/ocsp-file"
|
||||
|
||||
ssl_require_auth = false
|
||||
# Makes SSL authentication mandatory.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-require-auth
|
||||
ssl_require_auth = false
|
||||
|
||||
ssl_resumption = false
|
||||
# Activates SSL session resumption.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-resumption
|
||||
ssl_resumption = false
|
||||
|
||||
ssl_tickets = false
|
||||
# Activates SSL tickets.
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-tickets
|
||||
ssl_tickets = false
|
||||
|
||||
#############################
|
||||
### Experimental features ###
|
||||
#############################
|
||||
|
||||
experimental_enable_metrics = false
|
||||
# Experimental metrics feature. For more information, see: <https://github.com/meilisearch/meilisearch/discussions/3518>
|
||||
# Enables the Prometheus metrics on the `GET /metrics` endpoint.
|
||||
experimental_enable_metrics = false
|
||||
|
||||
experimental_reduce_indexing_memory_usage = false
|
||||
# Experimental RAM reduction during indexing, do not use in production, see: <https://github.com/meilisearch/product/discussions/652>
|
||||
experimental_reduce_indexing_memory_usage = false
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -90,8 +90,17 @@ pub enum IndexStatus {
|
||||
pub struct IndexStats {
|
||||
/// Number of documents in the index.
|
||||
pub number_of_documents: u64,
|
||||
/// Size of the index' DB, in bytes.
|
||||
/// Size taken up by the index' DB, in bytes.
|
||||
///
|
||||
/// This includes the size taken by both the used and free pages of the DB, and as the free pages
|
||||
/// are not returned to the disk after a deletion, this number is typically larger than
|
||||
/// `used_database_size` that only includes the size of the used pages.
|
||||
pub database_size: u64,
|
||||
/// Size taken by the used pages of the index' DB, in bytes.
|
||||
///
|
||||
/// As the DB backend does not return to the disk the pages that are not currently used by the DB,
|
||||
/// this value is typically smaller than `database_size`.
|
||||
pub used_database_size: u64,
|
||||
/// Association of every field name with the number of times it occurs in the documents.
|
||||
pub field_distribution: FieldDistribution,
|
||||
/// Creation date of the index.
|
||||
@ -107,10 +116,10 @@ impl IndexStats {
|
||||
///
|
||||
/// - rtxn: a RO transaction for the index, obtained from `Index::read_txn()`.
|
||||
pub fn new(index: &Index, rtxn: &RoTxn) -> Result<Self> {
|
||||
let database_size = index.on_disk_size()?;
|
||||
Ok(IndexStats {
|
||||
number_of_documents: index.number_of_documents(rtxn)?,
|
||||
database_size,
|
||||
database_size: index.on_disk_size()?,
|
||||
used_database_size: index.used_size()?,
|
||||
field_distribution: index.field_distribution(rtxn)?,
|
||||
created_at: index.created_at(rtxn)?,
|
||||
updated_at: index.updated_at(rtxn)?,
|
||||
|
@ -31,7 +31,7 @@ mod uuid_codec;
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
pub type TaskId = u32;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::ops::{Bound, RangeBounds};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::atomic::AtomicBool;
|
||||
@ -573,10 +573,16 @@ impl IndexScheduler {
|
||||
&self.index_mapper.indexer_config
|
||||
}
|
||||
|
||||
/// Return the real database size (i.e.: The size **with** the free pages)
|
||||
pub fn size(&self) -> Result<u64> {
|
||||
Ok(self.env.real_disk_size()?)
|
||||
}
|
||||
|
||||
/// Return the used database size (i.e.: The size **without** the free pages)
|
||||
pub fn used_size(&self) -> Result<u64> {
|
||||
Ok(self.env.non_free_pages_size()?)
|
||||
}
|
||||
|
||||
/// Return the index corresponding to the name.
|
||||
///
|
||||
/// * If the index wasn't opened before, the index will be opened.
|
||||
@ -756,6 +762,38 @@ impl IndexScheduler {
|
||||
Ok(tasks)
|
||||
}
|
||||
|
||||
/// The returned structure contains:
|
||||
/// 1. The name of the property being observed can be `statuses`, `types`, or `indexes`.
|
||||
/// 2. The name of the specific data related to the property can be `enqueued` for the `statuses`, `settingsUpdate` for the `types`, or the name of the index for the `indexes`, for example.
|
||||
/// 3. The number of times the properties appeared.
|
||||
pub fn get_stats(&self) -> Result<BTreeMap<String, BTreeMap<String, u64>>> {
|
||||
let rtxn = self.read_txn()?;
|
||||
|
||||
let mut res = BTreeMap::new();
|
||||
|
||||
res.insert(
|
||||
"statuses".to_string(),
|
||||
enum_iterator::all::<Status>()
|
||||
.map(|s| Ok((s.to_string(), self.get_status(&rtxn, s)?.len())))
|
||||
.collect::<Result<BTreeMap<String, u64>>>()?,
|
||||
);
|
||||
res.insert(
|
||||
"types".to_string(),
|
||||
enum_iterator::all::<Kind>()
|
||||
.map(|s| Ok((s.to_string(), self.get_kind(&rtxn, s)?.len())))
|
||||
.collect::<Result<BTreeMap<String, u64>>>()?,
|
||||
);
|
||||
res.insert(
|
||||
"indexes".to_string(),
|
||||
self.index_tasks
|
||||
.iter(&rtxn)?
|
||||
.map(|res| Ok(res.map(|(name, bitmap)| (name.to_string(), bitmap.len()))?))
|
||||
.collect::<Result<BTreeMap<String, u64>>>()?,
|
||||
);
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Return true iff there is at least one task associated with this index
|
||||
/// that is processing.
|
||||
pub fn is_index_processing(&self, index: &str) -> Result<bool> {
|
||||
|
@ -466,7 +466,7 @@ impl IndexScheduler {
|
||||
}
|
||||
}
|
||||
Details::DocumentDeletionByFilter { deleted_documents, original_filter: _ } => {
|
||||
assert_eq!(kind.as_kind(), Kind::DocumentDeletionByFilter);
|
||||
assert_eq!(kind.as_kind(), Kind::DocumentDeletion);
|
||||
let (index_uid, _) = if let KindWithContent::DocumentDeletionByFilter {
|
||||
ref index_uid,
|
||||
ref filter_expr,
|
||||
|
@ -45,6 +45,11 @@ impl AuthController {
|
||||
self.store.size()
|
||||
}
|
||||
|
||||
/// Return the used size of the `AuthController` database in bytes.
|
||||
pub fn used_size(&self) -> Result<u64> {
|
||||
self.store.used_size()
|
||||
}
|
||||
|
||||
pub fn create_key(&self, create_key: CreateApiKey) -> Result<Key> {
|
||||
match self.store.get_api_key(create_key.uid)? {
|
||||
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(create_key.uid.to_string())),
|
||||
|
@ -75,6 +75,11 @@ impl HeedAuthStore {
|
||||
Ok(self.env.real_disk_size()?)
|
||||
}
|
||||
|
||||
/// Return the number of bytes actually used in the database
|
||||
pub fn used_size(&self) -> Result<u64> {
|
||||
Ok(self.env.non_free_pages_size()?)
|
||||
}
|
||||
|
||||
pub fn set_drop_on_close(&mut self, v: bool) {
|
||||
self.should_close_on_drop = v;
|
||||
}
|
||||
|
@ -224,6 +224,7 @@ InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexUid , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidRestrictSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchAttributesToHighlight , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchAttributesToRetrieve , InvalidRequest , BAD_REQUEST ;
|
||||
|
@ -395,7 +395,6 @@ impl std::error::Error for ParseTaskStatusError {}
|
||||
pub enum Kind {
|
||||
DocumentAdditionOrUpdate,
|
||||
DocumentDeletion,
|
||||
DocumentDeletionByFilter,
|
||||
SettingsUpdate,
|
||||
IndexCreation,
|
||||
IndexDeletion,
|
||||
@ -412,7 +411,6 @@ impl Kind {
|
||||
match self {
|
||||
Kind::DocumentAdditionOrUpdate
|
||||
| Kind::DocumentDeletion
|
||||
| Kind::DocumentDeletionByFilter
|
||||
| Kind::SettingsUpdate
|
||||
| Kind::IndexCreation
|
||||
| Kind::IndexDeletion
|
||||
@ -430,7 +428,6 @@ impl Display for Kind {
|
||||
match self {
|
||||
Kind::DocumentAdditionOrUpdate => write!(f, "documentAdditionOrUpdate"),
|
||||
Kind::DocumentDeletion => write!(f, "documentDeletion"),
|
||||
Kind::DocumentDeletionByFilter => write!(f, "documentDeletionByFilter"),
|
||||
Kind::SettingsUpdate => write!(f, "settingsUpdate"),
|
||||
Kind::IndexCreation => write!(f, "indexCreation"),
|
||||
Kind::IndexDeletion => write!(f, "indexDeletion"),
|
||||
|
@ -4,20 +4,32 @@ use prometheus::{
|
||||
register_int_gauge_vec, HistogramVec, IntCounterVec, IntGauge, IntGaugeVec,
|
||||
};
|
||||
|
||||
const HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: &[f64; 14] = &[
|
||||
0.0005, 0.0008, 0.00085, 0.0009, 0.00095, 0.001, 0.00105, 0.0011, 0.00115, 0.0012, 0.0015,
|
||||
0.002, 0.003, 1.0,
|
||||
];
|
||||
/// Create evenly distributed buckets
|
||||
fn create_buckets() -> [f64; 29] {
|
||||
(0..10)
|
||||
.chain((10..100).step_by(10))
|
||||
.chain((100..=1000).step_by(100))
|
||||
.map(|i| i as f64 / 1000.)
|
||||
.collect::<Vec<_>>()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub static ref HTTP_REQUESTS_TOTAL: IntCounterVec = register_int_counter_vec!(
|
||||
opts!("http_requests_total", "HTTP requests total"),
|
||||
pub static ref HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: [f64; 29] = create_buckets();
|
||||
pub static ref MEILISEARCH_HTTP_REQUESTS_TOTAL: IntCounterVec = register_int_counter_vec!(
|
||||
opts!("meilisearch_http_requests_total", "Meilisearch HTTP requests total"),
|
||||
&["method", "path"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge =
|
||||
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch Db Size In Bytes"))
|
||||
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch DB Size In Bytes"))
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_USED_DB_SIZE_BYTES: IntGauge = register_int_gauge!(opts!(
|
||||
"meilisearch_used_db_size_bytes",
|
||||
"Meilisearch Used DB Size In Bytes"
|
||||
))
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_INDEX_COUNT: IntGauge =
|
||||
register_int_gauge!(opts!("meilisearch_index_count", "Meilisearch Index Count"))
|
||||
.expect("Can't create a metric");
|
||||
@ -26,11 +38,16 @@ lazy_static! {
|
||||
&["index"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref HTTP_RESPONSE_TIME_SECONDS: HistogramVec = register_histogram_vec!(
|
||||
pub static ref MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS: HistogramVec = register_histogram_vec!(
|
||||
"http_response_time_seconds",
|
||||
"HTTP response times",
|
||||
&["method", "path"],
|
||||
HTTP_RESPONSE_TIME_CUSTOM_BUCKETS.to_vec()
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_NB_TASKS: IntGaugeVec = register_int_gauge_vec!(
|
||||
opts!("meilisearch_nb_tasks", "Meilisearch Number of tasks"),
|
||||
&["kind", "value"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
}
|
||||
|
@ -52,11 +52,11 @@ where
|
||||
if is_registered_resource {
|
||||
let request_method = req.method().to_string();
|
||||
histogram_timer = Some(
|
||||
crate::metrics::HTTP_RESPONSE_TIME_SECONDS
|
||||
crate::metrics::MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS
|
||||
.with_label_values(&[&request_method, request_path])
|
||||
.start_timer(),
|
||||
);
|
||||
crate::metrics::HTTP_REQUESTS_TOTAL
|
||||
crate::metrics::MEILISEARCH_HTTP_REQUESTS_TOTAL
|
||||
.with_label_values(&[&request_method, request_path])
|
||||
.inc();
|
||||
}
|
||||
|
@ -66,6 +66,8 @@ pub struct SearchQueryGet {
|
||||
crop_marker: String,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchMatchingStrategy>)]
|
||||
matching_strategy: MatchingStrategy,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidRestrictSearchableAttributes>)]
|
||||
pub restrict_searchable_attributes: Option<CS<String>>,
|
||||
}
|
||||
|
||||
impl From<SearchQueryGet> for SearchQuery {
|
||||
@ -96,6 +98,9 @@ impl From<SearchQueryGet> for SearchQuery {
|
||||
highlight_post_tag: other.highlight_post_tag,
|
||||
crop_marker: other.crop_marker,
|
||||
matching_strategy: other.matching_strategy,
|
||||
restrict_searchable_attributes: other
|
||||
.restrict_searchable_attributes
|
||||
.map(|o| o.into_iter().collect()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ pub fn configure(config: &mut web::ServiceConfig) {
|
||||
|
||||
pub async fn get_metrics(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<AuthController>>,
|
||||
auth_controller: Data<AuthController>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let auth_filters = index_scheduler.filters();
|
||||
if !auth_filters.all_indexes_authorized() {
|
||||
@ -28,10 +28,10 @@ pub async fn get_metrics(
|
||||
return Err(error);
|
||||
}
|
||||
|
||||
let response =
|
||||
create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), auth_filters)?;
|
||||
let response = create_all_stats((*index_scheduler).clone(), auth_controller, auth_filters)?;
|
||||
|
||||
crate::metrics::MEILISEARCH_DB_SIZE_BYTES.set(response.database_size as i64);
|
||||
crate::metrics::MEILISEARCH_USED_DB_SIZE_BYTES.set(response.used_database_size as i64);
|
||||
crate::metrics::MEILISEARCH_INDEX_COUNT.set(response.indexes.len() as i64);
|
||||
|
||||
for (index, value) in response.indexes.iter() {
|
||||
@ -40,6 +40,14 @@ pub async fn get_metrics(
|
||||
.set(value.number_of_documents as i64);
|
||||
}
|
||||
|
||||
for (kind, value) in index_scheduler.get_stats()? {
|
||||
for (value, count) in value {
|
||||
crate::metrics::MEILISEARCH_NB_TASKS
|
||||
.with_label_values(&[&kind, &value])
|
||||
.set(count as i64);
|
||||
}
|
||||
}
|
||||
|
||||
let encoder = TextEncoder::new();
|
||||
let mut buffer = vec![];
|
||||
encoder.encode(&prometheus::gather(), &mut buffer).expect("Failed to encode metrics");
|
||||
|
@ -231,6 +231,8 @@ pub async fn running() -> HttpResponse {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Stats {
|
||||
pub database_size: u64,
|
||||
#[serde(skip)]
|
||||
pub used_database_size: u64,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||
pub last_update: Option<OffsetDateTime>,
|
||||
pub indexes: BTreeMap<String, indexes::IndexStats>,
|
||||
@ -259,6 +261,7 @@ pub fn create_all_stats(
|
||||
let mut last_task: Option<OffsetDateTime> = None;
|
||||
let mut indexes = BTreeMap::new();
|
||||
let mut database_size = 0;
|
||||
let mut used_database_size = 0;
|
||||
|
||||
for index_uid in index_scheduler.index_names()? {
|
||||
// Accumulate the size of all indexes, even unauthorized ones, so
|
||||
@ -266,6 +269,7 @@ pub fn create_all_stats(
|
||||
// See <https://github.com/meilisearch/meilisearch/pull/3541#discussion_r1126747643> for context.
|
||||
let stats = index_scheduler.index_stats(&index_uid)?;
|
||||
database_size += stats.inner_stats.database_size;
|
||||
used_database_size += stats.inner_stats.used_database_size;
|
||||
|
||||
if !filters.is_index_authorized(&index_uid) {
|
||||
continue;
|
||||
@ -278,10 +282,14 @@ pub fn create_all_stats(
|
||||
}
|
||||
|
||||
database_size += index_scheduler.size()?;
|
||||
used_database_size += index_scheduler.used_size()?;
|
||||
database_size += auth_controller.size()?;
|
||||
database_size += index_scheduler.compute_update_file_size()?;
|
||||
used_database_size += auth_controller.used_size()?;
|
||||
let update_file_size = index_scheduler.compute_update_file_size()?;
|
||||
database_size += update_file_size;
|
||||
used_database_size += update_file_size;
|
||||
|
||||
let stats = Stats { database_size, last_update: last_task, indexes };
|
||||
let stats = Stats { database_size, used_database_size, last_update: last_task, indexes };
|
||||
Ok(stats)
|
||||
}
|
||||
|
||||
|
@ -730,7 +730,7 @@ mod tests {
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `documentDeletionByFilter`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
|
||||
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
|
@ -68,6 +68,8 @@ pub struct SearchQuery {
|
||||
pub crop_marker: String,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchMatchingStrategy>, default)]
|
||||
pub matching_strategy: MatchingStrategy,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidRestrictSearchableAttributes>, default)]
|
||||
pub restrict_searchable_attributes: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl SearchQuery {
|
||||
@ -119,6 +121,8 @@ pub struct SearchQueryWithIndex {
|
||||
pub crop_marker: String,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchMatchingStrategy>, default)]
|
||||
pub matching_strategy: MatchingStrategy,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidRestrictSearchableAttributes>, default)]
|
||||
pub restrict_searchable_attributes: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl SearchQueryWithIndex {
|
||||
@ -142,6 +146,7 @@ impl SearchQueryWithIndex {
|
||||
highlight_post_tag,
|
||||
crop_marker,
|
||||
matching_strategy,
|
||||
restrict_searchable_attributes,
|
||||
} = self;
|
||||
(
|
||||
index_uid,
|
||||
@ -163,6 +168,7 @@ impl SearchQueryWithIndex {
|
||||
highlight_post_tag,
|
||||
crop_marker,
|
||||
matching_strategy,
|
||||
restrict_searchable_attributes,
|
||||
// do not use ..Default::default() here,
|
||||
// rather add any missing field from `SearchQuery` to `SearchQueryWithIndex`
|
||||
},
|
||||
@ -274,6 +280,10 @@ pub fn perform_search(
|
||||
search.query(query);
|
||||
}
|
||||
|
||||
if let Some(ref searchable) = query.restrict_searchable_attributes {
|
||||
search.searchable_attributes(searchable);
|
||||
}
|
||||
|
||||
let is_finite_pagination = query.is_finite_pagination();
|
||||
search.terms_matching_strategy(query.matching_strategy.into());
|
||||
|
||||
|
@ -5,6 +5,7 @@ mod errors;
|
||||
mod formatted;
|
||||
mod multi;
|
||||
mod pagination;
|
||||
mod restrict_searchable;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
|
263
meilisearch/tests/search/restrict_searchable.rs
Normal file
263
meilisearch/tests/search/restrict_searchable.rs
Normal file
@ -0,0 +1,263 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::common::index::Index;
|
||||
use crate::common::Server;
|
||||
|
||||
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
|
||||
index.add_documents(documents.clone(), None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
}
|
||||
|
||||
static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"desc": "a Captain Marvel ersatz",
|
||||
"id": "1",
|
||||
},
|
||||
{
|
||||
"title": "Captain Planet",
|
||||
"desc": "He's not part of the Marvel Cinematic Universe",
|
||||
"id": "2",
|
||||
},
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"desc": "a Shazam ersatz",
|
||||
"id": "3",
|
||||
}])
|
||||
});
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_title() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// simple search should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["title"]}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_prefix_search_on_title() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// simple search should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Mar", "restrictSearchableAttributes": ["title"]}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_title_matching_strategy_all() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
// simple search matching strategy all should only return 1 document (ids: 2).
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["title"], "matchingStrategy": "all"}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_unknown_field() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
// simple search on unknown field shouldn't return any document.
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["unknown"]}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_no_field() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
// simple search on no field shouldn't return any document.
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "restrictSearchableAttributes": []}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn word_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Document 3 should appear before document 2.
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"],
|
||||
json!([
|
||||
{"id": "3"},
|
||||
{"id": "2"},
|
||||
])
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn word_ranking_rule_order_exact_words() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
index.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]})).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
// simple search should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"],
|
||||
json!([
|
||||
{"id": "3"},
|
||||
{"id": "2"},
|
||||
])
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn typo_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Capitain Marivel",
|
||||
"desc": "Captain Marvel",
|
||||
"id": "1",
|
||||
},
|
||||
{
|
||||
"title": "Captain Marivel",
|
||||
"desc": "a Shazam ersatz",
|
||||
"id": "2",
|
||||
}]),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Document 2 should appear before document 1.
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["title"], "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"],
|
||||
json!([
|
||||
{"id": "2"},
|
||||
{"id": "1"},
|
||||
])
|
||||
);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attributes_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"desc": "a Shazam ersatz",
|
||||
"footer": "The story of Captain Marvel",
|
||||
"id": "1",
|
||||
},
|
||||
{
|
||||
"title": "The Avengers",
|
||||
"desc": "Captain Marvel is far from the earth",
|
||||
"footer": "A super hero team",
|
||||
"id": "2",
|
||||
}]),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Document 2 should appear before document 1.
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["desc", "footer"], "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"],
|
||||
json!([
|
||||
{"id": "2"},
|
||||
{"id": "1"},
|
||||
])
|
||||
);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn exactness_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"desc": "Captain Marivel",
|
||||
"id": "1",
|
||||
},
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"desc": "CaptainMarvel",
|
||||
"id": "2",
|
||||
}]),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Document 2 should appear before document 1.
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "attributesToRetrieve": ["id"], "restrictSearchableAttributes": ["desc"]}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"],
|
||||
json!([
|
||||
{"id": "2"},
|
||||
{"id": "1"},
|
||||
])
|
||||
);
|
||||
})
|
||||
.await;
|
||||
}
|
@ -97,7 +97,7 @@ async fn task_bad_types() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `documentDeletionByFilter`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
@ -108,7 +108,7 @@ async fn task_bad_types() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `documentDeletionByFilter`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
@ -119,7 +119,7 @@ async fn task_bad_types() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `documentDeletionByFilter`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
|
@ -23,3 +23,9 @@ pub use self::roaring_bitmap_length::{
|
||||
pub use self::script_language_codec::ScriptLanguageCodec;
|
||||
pub use self::str_beu32_codec::{StrBEU16Codec, StrBEU32Codec};
|
||||
pub use self::str_str_u8_codec::{U8StrStrCodec, UncheckedU8StrStrCodec};
|
||||
|
||||
pub trait BytesDecodeOwned {
|
||||
type DItem;
|
||||
|
||||
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem>;
|
||||
}
|
||||
|
@ -2,8 +2,11 @@ use std::borrow::Cow;
|
||||
use std::convert::TryInto;
|
||||
use std::mem::size_of;
|
||||
|
||||
use heed::BytesDecode;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use crate::heed_codec::BytesDecodeOwned;
|
||||
|
||||
pub struct BoRoaringBitmapCodec;
|
||||
|
||||
impl BoRoaringBitmapCodec {
|
||||
@ -13,7 +16,7 @@ impl BoRoaringBitmapCodec {
|
||||
}
|
||||
}
|
||||
|
||||
impl heed::BytesDecode<'_> for BoRoaringBitmapCodec {
|
||||
impl BytesDecode<'_> for BoRoaringBitmapCodec {
|
||||
type DItem = RoaringBitmap;
|
||||
|
||||
fn bytes_decode(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
@ -28,6 +31,14 @@ impl heed::BytesDecode<'_> for BoRoaringBitmapCodec {
|
||||
}
|
||||
}
|
||||
|
||||
impl BytesDecodeOwned for BoRoaringBitmapCodec {
|
||||
type DItem = RoaringBitmap;
|
||||
|
||||
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
Self::bytes_decode(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
impl heed::BytesEncode<'_> for BoRoaringBitmapCodec {
|
||||
type EItem = RoaringBitmap;
|
||||
|
||||
|
@ -5,6 +5,8 @@ use std::mem::size_of;
|
||||
use byteorder::{NativeEndian, ReadBytesExt, WriteBytesExt};
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use crate::heed_codec::BytesDecodeOwned;
|
||||
|
||||
/// This is the limit where using a byteorder became less size efficient
|
||||
/// than using a direct roaring encoding, it is also the point where we are able
|
||||
/// to determine the encoding used only by using the array of bytes length.
|
||||
@ -49,7 +51,7 @@ impl CboRoaringBitmapCodec {
|
||||
} else {
|
||||
// Otherwise, it means we used the classic RoaringBitmapCodec and
|
||||
// that the header takes threshold integers.
|
||||
RoaringBitmap::deserialize_from(bytes)
|
||||
RoaringBitmap::deserialize_unchecked_from(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
@ -69,7 +71,7 @@ impl CboRoaringBitmapCodec {
|
||||
vec.push(integer);
|
||||
}
|
||||
} else {
|
||||
roaring |= RoaringBitmap::deserialize_from(bytes.as_ref())?;
|
||||
roaring |= RoaringBitmap::deserialize_unchecked_from(bytes.as_ref())?;
|
||||
}
|
||||
}
|
||||
|
||||
@ -103,6 +105,14 @@ impl heed::BytesDecode<'_> for CboRoaringBitmapCodec {
|
||||
}
|
||||
}
|
||||
|
||||
impl BytesDecodeOwned for CboRoaringBitmapCodec {
|
||||
type DItem = RoaringBitmap;
|
||||
|
||||
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
Self::deserialize_from(bytes).ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl heed::BytesEncode<'_> for CboRoaringBitmapCodec {
|
||||
type EItem = RoaringBitmap;
|
||||
|
||||
|
@ -2,12 +2,22 @@ use std::borrow::Cow;
|
||||
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use crate::heed_codec::BytesDecodeOwned;
|
||||
|
||||
pub struct RoaringBitmapCodec;
|
||||
|
||||
impl heed::BytesDecode<'_> for RoaringBitmapCodec {
|
||||
type DItem = RoaringBitmap;
|
||||
|
||||
fn bytes_decode(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
RoaringBitmap::deserialize_unchecked_from(bytes).ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl BytesDecodeOwned for RoaringBitmapCodec {
|
||||
type DItem = RoaringBitmap;
|
||||
|
||||
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
RoaringBitmap::deserialize_from(bytes).ok()
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,23 @@
|
||||
use std::mem;
|
||||
|
||||
use heed::BytesDecode;
|
||||
|
||||
use crate::heed_codec::BytesDecodeOwned;
|
||||
|
||||
pub struct BoRoaringBitmapLenCodec;
|
||||
|
||||
impl heed::BytesDecode<'_> for BoRoaringBitmapLenCodec {
|
||||
impl BytesDecode<'_> for BoRoaringBitmapLenCodec {
|
||||
type DItem = u64;
|
||||
|
||||
fn bytes_decode(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
Some((bytes.len() / mem::size_of::<u32>()) as u64)
|
||||
}
|
||||
}
|
||||
|
||||
impl BytesDecodeOwned for BoRoaringBitmapLenCodec {
|
||||
type DItem = u64;
|
||||
|
||||
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
Self::bytes_decode(bytes)
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,14 @@
|
||||
use std::mem;
|
||||
|
||||
use heed::BytesDecode;
|
||||
|
||||
use super::{BoRoaringBitmapLenCodec, RoaringBitmapLenCodec};
|
||||
use crate::heed_codec::roaring_bitmap::cbo_roaring_bitmap_codec::THRESHOLD;
|
||||
use crate::heed_codec::BytesDecodeOwned;
|
||||
|
||||
pub struct CboRoaringBitmapLenCodec;
|
||||
|
||||
impl heed::BytesDecode<'_> for CboRoaringBitmapLenCodec {
|
||||
impl BytesDecode<'_> for CboRoaringBitmapLenCodec {
|
||||
type DItem = u64;
|
||||
|
||||
fn bytes_decode(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
@ -20,3 +23,11 @@ impl heed::BytesDecode<'_> for CboRoaringBitmapLenCodec {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BytesDecodeOwned for CboRoaringBitmapLenCodec {
|
||||
type DItem = u64;
|
||||
|
||||
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
Self::bytes_decode(bytes)
|
||||
}
|
||||
}
|
||||
|
@ -3,6 +3,8 @@ use std::mem;
|
||||
|
||||
use byteorder::{LittleEndian, ReadBytesExt};
|
||||
|
||||
use crate::heed_codec::BytesDecodeOwned;
|
||||
|
||||
const SERIAL_COOKIE_NO_RUNCONTAINER: u32 = 12346;
|
||||
const SERIAL_COOKIE: u16 = 12347;
|
||||
|
||||
@ -59,6 +61,14 @@ impl heed::BytesDecode<'_> for RoaringBitmapLenCodec {
|
||||
}
|
||||
}
|
||||
|
||||
impl BytesDecodeOwned for RoaringBitmapLenCodec {
|
||||
type DItem = u64;
|
||||
|
||||
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
RoaringBitmapLenCodec::deserialize_from_slice(bytes).ok()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use heed::BytesEncode;
|
||||
|
@ -21,10 +21,9 @@ use crate::heed_codec::facet::{
|
||||
};
|
||||
use crate::heed_codec::{ScriptLanguageCodec, StrBEU16Codec, StrRefCodec};
|
||||
use crate::{
|
||||
default_criteria, BEU32StrCodec, BoRoaringBitmapCodec, CboRoaringBitmapCodec, Criterion,
|
||||
DocumentId, ExternalDocumentsIds, FacetDistribution, FieldDistribution, FieldId,
|
||||
FieldIdWordCountCodec, GeoPoint, ObkvCodec, Result, RoaringBitmapCodec, RoaringBitmapLenCodec,
|
||||
Search, U8StrStrCodec, BEU16, BEU32,
|
||||
default_criteria, CboRoaringBitmapCodec, Criterion, DocumentId, ExternalDocumentsIds,
|
||||
FacetDistribution, FieldDistribution, FieldId, FieldIdWordCountCodec, GeoPoint, ObkvCodec,
|
||||
Result, RoaringBitmapCodec, RoaringBitmapLenCodec, Search, U8StrStrCodec, BEU16, BEU32,
|
||||
};
|
||||
|
||||
pub const DEFAULT_MIN_WORD_LEN_ONE_TYPO: u8 = 5;
|
||||
@ -111,9 +110,6 @@ pub struct Index {
|
||||
/// A prefix of word and all the documents ids containing this prefix, from attributes for which typos are not allowed.
|
||||
pub exact_word_prefix_docids: Database<Str, RoaringBitmapCodec>,
|
||||
|
||||
/// Maps a word and a document id (u32) to all the positions where the given word appears.
|
||||
pub docid_word_positions: Database<BEU32StrCodec, BoRoaringBitmapCodec>,
|
||||
|
||||
/// Maps the proximity between a pair of words with all the docids where this relation appears.
|
||||
pub word_pair_proximity_docids: Database<U8StrStrCodec, CboRoaringBitmapCodec>,
|
||||
/// Maps the proximity between a pair of word and prefix with all the docids where this relation appears.
|
||||
@ -177,7 +173,6 @@ impl Index {
|
||||
let word_prefix_docids = env.create_database(&mut wtxn, Some(WORD_PREFIX_DOCIDS))?;
|
||||
let exact_word_prefix_docids =
|
||||
env.create_database(&mut wtxn, Some(EXACT_WORD_PREFIX_DOCIDS))?;
|
||||
let docid_word_positions = env.create_database(&mut wtxn, Some(DOCID_WORD_POSITIONS))?;
|
||||
let word_pair_proximity_docids =
|
||||
env.create_database(&mut wtxn, Some(WORD_PAIR_PROXIMITY_DOCIDS))?;
|
||||
let script_language_docids =
|
||||
@ -220,7 +215,6 @@ impl Index {
|
||||
exact_word_docids,
|
||||
word_prefix_docids,
|
||||
exact_word_prefix_docids,
|
||||
docid_word_positions,
|
||||
word_pair_proximity_docids,
|
||||
script_language_docids,
|
||||
word_prefix_pair_proximity_docids,
|
||||
|
@ -5,52 +5,6 @@
|
||||
#[global_allocator]
|
||||
pub static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
// #[cfg(test)]
|
||||
// pub mod allocator {
|
||||
// use std::alloc::{GlobalAlloc, System};
|
||||
// use std::sync::atomic::{self, AtomicI64};
|
||||
|
||||
// #[global_allocator]
|
||||
// pub static ALLOC: CountingAlloc = CountingAlloc {
|
||||
// max_resident: AtomicI64::new(0),
|
||||
// resident: AtomicI64::new(0),
|
||||
// allocated: AtomicI64::new(0),
|
||||
// };
|
||||
|
||||
// pub struct CountingAlloc {
|
||||
// pub max_resident: AtomicI64,
|
||||
// pub resident: AtomicI64,
|
||||
// pub allocated: AtomicI64,
|
||||
// }
|
||||
// unsafe impl GlobalAlloc for CountingAlloc {
|
||||
// unsafe fn alloc(&self, layout: std::alloc::Layout) -> *mut u8 {
|
||||
// self.allocated.fetch_add(layout.size() as i64, atomic::Ordering::SeqCst);
|
||||
// let old_resident =
|
||||
// self.resident.fetch_add(layout.size() as i64, atomic::Ordering::SeqCst);
|
||||
|
||||
// let resident = old_resident + layout.size() as i64;
|
||||
// self.max_resident.fetch_max(resident, atomic::Ordering::SeqCst);
|
||||
|
||||
// // if layout.size() > 1_000_000 {
|
||||
// // eprintln!(
|
||||
// // "allocating {} with new resident size: {resident}",
|
||||
// // layout.size() / 1_000_000
|
||||
// // );
|
||||
// // // let trace = std::backtrace::Backtrace::capture();
|
||||
// // // let t = trace.to_string();
|
||||
// // // eprintln!("{t}");
|
||||
// // }
|
||||
|
||||
// System.alloc(layout)
|
||||
// }
|
||||
|
||||
// unsafe fn dealloc(&self, ptr: *mut u8, layout: std::alloc::Layout) {
|
||||
// self.resident.fetch_sub(layout.size() as i64, atomic::Ordering::Relaxed);
|
||||
// System.dealloc(ptr, layout)
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
#[macro_use]
|
||||
pub mod documents;
|
||||
|
||||
|
@ -27,6 +27,7 @@ pub struct Search<'a> {
|
||||
offset: usize,
|
||||
limit: usize,
|
||||
sort_criteria: Option<Vec<AscDesc>>,
|
||||
searchable_attributes: Option<&'a [String]>,
|
||||
geo_strategy: new::GeoSortStrategy,
|
||||
terms_matching_strategy: TermsMatchingStrategy,
|
||||
words_limit: usize,
|
||||
@ -43,6 +44,7 @@ impl<'a> Search<'a> {
|
||||
offset: 0,
|
||||
limit: 20,
|
||||
sort_criteria: None,
|
||||
searchable_attributes: None,
|
||||
geo_strategy: new::GeoSortStrategy::default(),
|
||||
terms_matching_strategy: TermsMatchingStrategy::default(),
|
||||
exhaustive_number_hits: false,
|
||||
@ -72,6 +74,11 @@ impl<'a> Search<'a> {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn searchable_attributes(&mut self, searchable: &'a [String]) -> &mut Search<'a> {
|
||||
self.searchable_attributes = Some(searchable);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn terms_matching_strategy(&mut self, value: TermsMatchingStrategy) -> &mut Search<'a> {
|
||||
self.terms_matching_strategy = value;
|
||||
self
|
||||
@ -102,6 +109,11 @@ impl<'a> Search<'a> {
|
||||
|
||||
pub fn execute(&self) -> Result<SearchResult> {
|
||||
let mut ctx = SearchContext::new(self.index, self.rtxn);
|
||||
|
||||
if let Some(searchable_attributes) = self.searchable_attributes {
|
||||
ctx.searchable_attributes(searchable_attributes)?;
|
||||
}
|
||||
|
||||
let PartialSearchResult { located_query_terms, candidates, documents_ids } =
|
||||
execute_search(
|
||||
&mut ctx,
|
||||
@ -136,6 +148,7 @@ impl fmt::Debug for Search<'_> {
|
||||
offset,
|
||||
limit,
|
||||
sort_criteria,
|
||||
searchable_attributes,
|
||||
geo_strategy: _,
|
||||
terms_matching_strategy,
|
||||
words_limit,
|
||||
@ -149,6 +162,7 @@ impl fmt::Debug for Search<'_> {
|
||||
.field("offset", offset)
|
||||
.field("limit", limit)
|
||||
.field("sort_criteria", sort_criteria)
|
||||
.field("searchable_attributes", searchable_attributes)
|
||||
.field("terms_matching_strategy", terms_matching_strategy)
|
||||
.field("exhaustive_number_hits", exhaustive_number_hits)
|
||||
.field("words_limit", words_limit)
|
||||
|
@ -4,12 +4,13 @@ use std::hash::Hash;
|
||||
|
||||
use fxhash::FxHashMap;
|
||||
use heed::types::ByteSlice;
|
||||
use heed::{BytesDecode, BytesEncode, Database, RoTxn};
|
||||
use heed::{BytesEncode, Database, RoTxn};
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use super::interner::Interned;
|
||||
use super::Word;
|
||||
use crate::heed_codec::StrBEU16Codec;
|
||||
use crate::heed_codec::{BytesDecodeOwned, StrBEU16Codec};
|
||||
use crate::update::{merge_cbo_roaring_bitmaps, MergeFn};
|
||||
use crate::{
|
||||
CboRoaringBitmapCodec, CboRoaringBitmapLenCodec, Result, RoaringBitmapCodec, SearchContext,
|
||||
};
|
||||
@ -22,50 +23,110 @@ use crate::{
|
||||
#[derive(Default)]
|
||||
pub struct DatabaseCache<'ctx> {
|
||||
pub word_pair_proximity_docids:
|
||||
FxHashMap<(u8, Interned<String>, Interned<String>), Option<&'ctx [u8]>>,
|
||||
FxHashMap<(u8, Interned<String>, Interned<String>), Option<Cow<'ctx, [u8]>>>,
|
||||
pub word_prefix_pair_proximity_docids:
|
||||
FxHashMap<(u8, Interned<String>, Interned<String>), Option<&'ctx [u8]>>,
|
||||
FxHashMap<(u8, Interned<String>, Interned<String>), Option<Cow<'ctx, [u8]>>>,
|
||||
pub prefix_word_pair_proximity_docids:
|
||||
FxHashMap<(u8, Interned<String>, Interned<String>), Option<&'ctx [u8]>>,
|
||||
pub word_docids: FxHashMap<Interned<String>, Option<&'ctx [u8]>>,
|
||||
pub exact_word_docids: FxHashMap<Interned<String>, Option<&'ctx [u8]>>,
|
||||
pub word_prefix_docids: FxHashMap<Interned<String>, Option<&'ctx [u8]>>,
|
||||
pub exact_word_prefix_docids: FxHashMap<Interned<String>, Option<&'ctx [u8]>>,
|
||||
FxHashMap<(u8, Interned<String>, Interned<String>), Option<Cow<'ctx, [u8]>>>,
|
||||
pub word_docids: FxHashMap<Interned<String>, Option<Cow<'ctx, [u8]>>>,
|
||||
pub exact_word_docids: FxHashMap<Interned<String>, Option<Cow<'ctx, [u8]>>>,
|
||||
pub word_prefix_docids: FxHashMap<Interned<String>, Option<Cow<'ctx, [u8]>>>,
|
||||
pub exact_word_prefix_docids: FxHashMap<Interned<String>, Option<Cow<'ctx, [u8]>>>,
|
||||
|
||||
pub words_fst: Option<fst::Set<Cow<'ctx, [u8]>>>,
|
||||
pub word_position_docids: FxHashMap<(Interned<String>, u16), Option<&'ctx [u8]>>,
|
||||
pub word_prefix_position_docids: FxHashMap<(Interned<String>, u16), Option<&'ctx [u8]>>,
|
||||
pub word_position_docids: FxHashMap<(Interned<String>, u16), Option<Cow<'ctx, [u8]>>>,
|
||||
pub word_prefix_position_docids: FxHashMap<(Interned<String>, u16), Option<Cow<'ctx, [u8]>>>,
|
||||
pub word_positions: FxHashMap<Interned<String>, Vec<u16>>,
|
||||
pub word_prefix_positions: FxHashMap<Interned<String>, Vec<u16>>,
|
||||
|
||||
pub word_fid_docids: FxHashMap<(Interned<String>, u16), Option<&'ctx [u8]>>,
|
||||
pub word_prefix_fid_docids: FxHashMap<(Interned<String>, u16), Option<&'ctx [u8]>>,
|
||||
pub word_fid_docids: FxHashMap<(Interned<String>, u16), Option<Cow<'ctx, [u8]>>>,
|
||||
pub word_prefix_fid_docids: FxHashMap<(Interned<String>, u16), Option<Cow<'ctx, [u8]>>>,
|
||||
pub word_fids: FxHashMap<Interned<String>, Vec<u16>>,
|
||||
pub word_prefix_fids: FxHashMap<Interned<String>, Vec<u16>>,
|
||||
}
|
||||
impl<'ctx> DatabaseCache<'ctx> {
|
||||
fn get_value<'v, K1, KC>(
|
||||
fn get_value<'v, K1, KC, DC>(
|
||||
txn: &'ctx RoTxn,
|
||||
cache_key: K1,
|
||||
db_key: &'v KC::EItem,
|
||||
cache: &mut FxHashMap<K1, Option<&'ctx [u8]>>,
|
||||
cache: &mut FxHashMap<K1, Option<Cow<'ctx, [u8]>>>,
|
||||
db: Database<KC, ByteSlice>,
|
||||
) -> Result<Option<&'ctx [u8]>>
|
||||
) -> Result<Option<DC::DItem>>
|
||||
where
|
||||
K1: Copy + Eq + Hash,
|
||||
KC: BytesEncode<'v>,
|
||||
DC: BytesDecodeOwned,
|
||||
{
|
||||
let bitmap_ptr = match cache.entry(cache_key) {
|
||||
Entry::Occupied(bitmap_ptr) => *bitmap_ptr.get(),
|
||||
match cache.entry(cache_key) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
let bitmap_ptr = db.get(txn, db_key)?;
|
||||
let bitmap_ptr = db.get(txn, db_key)?.map(Cow::Borrowed);
|
||||
entry.insert(bitmap_ptr);
|
||||
}
|
||||
}
|
||||
|
||||
match cache.get(&cache_key).unwrap() {
|
||||
Some(Cow::Borrowed(bytes)) => {
|
||||
DC::bytes_decode_owned(bytes).ok_or(heed::Error::Decoding.into()).map(Some)
|
||||
}
|
||||
Some(Cow::Owned(bytes)) => {
|
||||
DC::bytes_decode_owned(bytes).ok_or(heed::Error::Decoding.into()).map(Some)
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_value_from_keys<'v, K1, KC, DC>(
|
||||
txn: &'ctx RoTxn,
|
||||
cache_key: K1,
|
||||
db_keys: &'v [KC::EItem],
|
||||
cache: &mut FxHashMap<K1, Option<Cow<'ctx, [u8]>>>,
|
||||
db: Database<KC, ByteSlice>,
|
||||
merger: MergeFn,
|
||||
) -> Result<Option<DC::DItem>>
|
||||
where
|
||||
K1: Copy + Eq + Hash,
|
||||
KC: BytesEncode<'v>,
|
||||
DC: BytesDecodeOwned,
|
||||
KC::EItem: Sized,
|
||||
{
|
||||
match cache.entry(cache_key) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
let bitmap_ptr: Option<Cow<'ctx, [u8]>> = match db_keys {
|
||||
[] => None,
|
||||
[key] => db.get(txn, key)?.map(Cow::Borrowed),
|
||||
keys => {
|
||||
let bitmaps = keys
|
||||
.iter()
|
||||
.filter_map(|key| db.get(txn, key).transpose())
|
||||
.map(|v| v.map(Cow::Borrowed))
|
||||
.collect::<std::result::Result<Vec<Cow<[u8]>>, _>>()?;
|
||||
|
||||
if bitmaps.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(merger(&[], &bitmaps[..])?)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
entry.insert(bitmap_ptr);
|
||||
bitmap_ptr
|
||||
}
|
||||
};
|
||||
Ok(bitmap_ptr)
|
||||
|
||||
match cache.get(&cache_key).unwrap() {
|
||||
Some(Cow::Borrowed(bytes)) => {
|
||||
DC::bytes_decode_owned(bytes).ok_or(heed::Error::Decoding.into()).map(Some)
|
||||
}
|
||||
Some(Cow::Owned(bytes)) => {
|
||||
DC::bytes_decode_owned(bytes).ok_or(heed::Error::Decoding.into()).map(Some)
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ctx> SearchContext<'ctx> {
|
||||
pub fn get_words_fst(&mut self) -> Result<fst::Set<Cow<'ctx, [u8]>>> {
|
||||
if let Some(fst) = self.db_cache.words_fst.clone() {
|
||||
@ -99,30 +160,41 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
|
||||
/// Retrieve or insert the given value in the `word_docids` database.
|
||||
fn get_db_word_docids(&mut self, word: Interned<String>) -> Result<Option<RoaringBitmap>> {
|
||||
DatabaseCache::get_value(
|
||||
self.txn,
|
||||
word,
|
||||
self.word_interner.get(word).as_str(),
|
||||
&mut self.db_cache.word_docids,
|
||||
self.index.word_docids.remap_data_type::<ByteSlice>(),
|
||||
)?
|
||||
.map(|bytes| RoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
|
||||
.transpose()
|
||||
match &self.restricted_fids {
|
||||
Some(restricted_fids) => {
|
||||
let interned = self.word_interner.get(word).as_str();
|
||||
let keys: Vec<_> = restricted_fids.iter().map(|fid| (interned, *fid)).collect();
|
||||
|
||||
DatabaseCache::get_value_from_keys::<_, _, CboRoaringBitmapCodec>(
|
||||
self.txn,
|
||||
word,
|
||||
&keys[..],
|
||||
&mut self.db_cache.word_docids,
|
||||
self.index.word_fid_docids.remap_data_type::<ByteSlice>(),
|
||||
merge_cbo_roaring_bitmaps,
|
||||
)
|
||||
}
|
||||
None => DatabaseCache::get_value::<_, _, RoaringBitmapCodec>(
|
||||
self.txn,
|
||||
word,
|
||||
self.word_interner.get(word).as_str(),
|
||||
&mut self.db_cache.word_docids,
|
||||
self.index.word_docids.remap_data_type::<ByteSlice>(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_db_exact_word_docids(
|
||||
&mut self,
|
||||
word: Interned<String>,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
DatabaseCache::get_value(
|
||||
DatabaseCache::get_value::<_, _, RoaringBitmapCodec>(
|
||||
self.txn,
|
||||
word,
|
||||
self.word_interner.get(word).as_str(),
|
||||
&mut self.db_cache.exact_word_docids,
|
||||
self.index.exact_word_docids.remap_data_type::<ByteSlice>(),
|
||||
)?
|
||||
.map(|bytes| RoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
|
||||
.transpose()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn word_prefix_docids(&mut self, prefix: Word) -> Result<Option<RoaringBitmap>> {
|
||||
@ -150,30 +222,41 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
&mut self,
|
||||
prefix: Interned<String>,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
DatabaseCache::get_value(
|
||||
self.txn,
|
||||
prefix,
|
||||
self.word_interner.get(prefix).as_str(),
|
||||
&mut self.db_cache.word_prefix_docids,
|
||||
self.index.word_prefix_docids.remap_data_type::<ByteSlice>(),
|
||||
)?
|
||||
.map(|bytes| RoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
|
||||
.transpose()
|
||||
match &self.restricted_fids {
|
||||
Some(restricted_fids) => {
|
||||
let interned = self.word_interner.get(prefix).as_str();
|
||||
let keys: Vec<_> = restricted_fids.iter().map(|fid| (interned, *fid)).collect();
|
||||
|
||||
DatabaseCache::get_value_from_keys::<_, _, CboRoaringBitmapCodec>(
|
||||
self.txn,
|
||||
prefix,
|
||||
&keys[..],
|
||||
&mut self.db_cache.word_prefix_docids,
|
||||
self.index.word_prefix_fid_docids.remap_data_type::<ByteSlice>(),
|
||||
merge_cbo_roaring_bitmaps,
|
||||
)
|
||||
}
|
||||
None => DatabaseCache::get_value::<_, _, RoaringBitmapCodec>(
|
||||
self.txn,
|
||||
prefix,
|
||||
self.word_interner.get(prefix).as_str(),
|
||||
&mut self.db_cache.word_prefix_docids,
|
||||
self.index.word_prefix_docids.remap_data_type::<ByteSlice>(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_db_exact_word_prefix_docids(
|
||||
&mut self,
|
||||
prefix: Interned<String>,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
DatabaseCache::get_value(
|
||||
DatabaseCache::get_value::<_, _, RoaringBitmapCodec>(
|
||||
self.txn,
|
||||
prefix,
|
||||
self.word_interner.get(prefix).as_str(),
|
||||
&mut self.db_cache.exact_word_prefix_docids,
|
||||
self.index.exact_word_prefix_docids.remap_data_type::<ByteSlice>(),
|
||||
)?
|
||||
.map(|bytes| RoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
|
||||
.transpose()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_db_word_pair_proximity_docids(
|
||||
@ -182,7 +265,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
word2: Interned<String>,
|
||||
proximity: u8,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
DatabaseCache::get_value(
|
||||
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
|
||||
self.txn,
|
||||
(proximity, word1, word2),
|
||||
&(
|
||||
@ -192,9 +275,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
),
|
||||
&mut self.db_cache.word_pair_proximity_docids,
|
||||
self.index.word_pair_proximity_docids.remap_data_type::<ByteSlice>(),
|
||||
)?
|
||||
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
|
||||
.transpose()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_db_word_pair_proximity_docids_len(
|
||||
@ -203,7 +284,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
word2: Interned<String>,
|
||||
proximity: u8,
|
||||
) -> Result<Option<u64>> {
|
||||
DatabaseCache::get_value(
|
||||
DatabaseCache::get_value::<_, _, CboRoaringBitmapLenCodec>(
|
||||
self.txn,
|
||||
(proximity, word1, word2),
|
||||
&(
|
||||
@ -213,11 +294,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
),
|
||||
&mut self.db_cache.word_pair_proximity_docids,
|
||||
self.index.word_pair_proximity_docids.remap_data_type::<ByteSlice>(),
|
||||
)?
|
||||
.map(|bytes| {
|
||||
CboRoaringBitmapLenCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into())
|
||||
})
|
||||
.transpose()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_db_word_prefix_pair_proximity_docids(
|
||||
@ -226,7 +303,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
prefix2: Interned<String>,
|
||||
proximity: u8,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
DatabaseCache::get_value(
|
||||
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
|
||||
self.txn,
|
||||
(proximity, word1, prefix2),
|
||||
&(
|
||||
@ -236,9 +313,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
),
|
||||
&mut self.db_cache.word_prefix_pair_proximity_docids,
|
||||
self.index.word_prefix_pair_proximity_docids.remap_data_type::<ByteSlice>(),
|
||||
)?
|
||||
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
|
||||
.transpose()
|
||||
)
|
||||
}
|
||||
pub fn get_db_prefix_word_pair_proximity_docids(
|
||||
&mut self,
|
||||
@ -246,7 +321,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
right: Interned<String>,
|
||||
proximity: u8,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
DatabaseCache::get_value(
|
||||
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
|
||||
self.txn,
|
||||
(proximity, left_prefix, right),
|
||||
&(
|
||||
@ -256,9 +331,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
),
|
||||
&mut self.db_cache.prefix_word_pair_proximity_docids,
|
||||
self.index.prefix_word_pair_proximity_docids.remap_data_type::<ByteSlice>(),
|
||||
)?
|
||||
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
|
||||
.transpose()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_db_word_fid_docids(
|
||||
@ -266,15 +339,18 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
word: Interned<String>,
|
||||
fid: u16,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
DatabaseCache::get_value(
|
||||
// if the requested fid isn't in the restricted list, return None.
|
||||
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
|
||||
self.txn,
|
||||
(word, fid),
|
||||
&(self.word_interner.get(word).as_str(), fid),
|
||||
&mut self.db_cache.word_fid_docids,
|
||||
self.index.word_fid_docids.remap_data_type::<ByteSlice>(),
|
||||
)?
|
||||
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
|
||||
.transpose()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_db_word_prefix_fid_docids(
|
||||
@ -282,15 +358,18 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
word_prefix: Interned<String>,
|
||||
fid: u16,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
DatabaseCache::get_value(
|
||||
// if the requested fid isn't in the restricted list, return None.
|
||||
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
|
||||
self.txn,
|
||||
(word_prefix, fid),
|
||||
&(self.word_interner.get(word_prefix).as_str(), fid),
|
||||
&mut self.db_cache.word_prefix_fid_docids,
|
||||
self.index.word_prefix_fid_docids.remap_data_type::<ByteSlice>(),
|
||||
)?
|
||||
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
|
||||
.transpose()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_db_word_fids(&mut self, word: Interned<String>) -> Result<Vec<u16>> {
|
||||
@ -309,7 +388,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
for result in remap_key_type {
|
||||
let ((_, fid), value) = result?;
|
||||
// filling other caches to avoid searching for them again
|
||||
self.db_cache.word_fid_docids.insert((word, fid), Some(value));
|
||||
self.db_cache.word_fid_docids.insert((word, fid), Some(Cow::Borrowed(value)));
|
||||
fids.push(fid);
|
||||
}
|
||||
entry.insert(fids.clone());
|
||||
@ -335,7 +414,9 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
for result in remap_key_type {
|
||||
let ((_, fid), value) = result?;
|
||||
// filling other caches to avoid searching for them again
|
||||
self.db_cache.word_prefix_fid_docids.insert((word_prefix, fid), Some(value));
|
||||
self.db_cache
|
||||
.word_prefix_fid_docids
|
||||
.insert((word_prefix, fid), Some(Cow::Borrowed(value)));
|
||||
fids.push(fid);
|
||||
}
|
||||
entry.insert(fids.clone());
|
||||
@ -350,15 +431,13 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
word: Interned<String>,
|
||||
position: u16,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
DatabaseCache::get_value(
|
||||
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
|
||||
self.txn,
|
||||
(word, position),
|
||||
&(self.word_interner.get(word).as_str(), position),
|
||||
&mut self.db_cache.word_position_docids,
|
||||
self.index.word_position_docids.remap_data_type::<ByteSlice>(),
|
||||
)?
|
||||
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
|
||||
.transpose()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_db_word_prefix_position_docids(
|
||||
@ -366,15 +445,13 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
word_prefix: Interned<String>,
|
||||
position: u16,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
DatabaseCache::get_value(
|
||||
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
|
||||
self.txn,
|
||||
(word_prefix, position),
|
||||
&(self.word_interner.get(word_prefix).as_str(), position),
|
||||
&mut self.db_cache.word_prefix_position_docids,
|
||||
self.index.word_prefix_position_docids.remap_data_type::<ByteSlice>(),
|
||||
)?
|
||||
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
|
||||
.transpose()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_db_word_positions(&mut self, word: Interned<String>) -> Result<Vec<u16>> {
|
||||
@ -393,7 +470,9 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
for result in remap_key_type {
|
||||
let ((_, position), value) = result?;
|
||||
// filling other caches to avoid searching for them again
|
||||
self.db_cache.word_position_docids.insert((word, position), Some(value));
|
||||
self.db_cache
|
||||
.word_position_docids
|
||||
.insert((word, position), Some(Cow::Borrowed(value)));
|
||||
positions.push(position);
|
||||
}
|
||||
entry.insert(positions.clone());
|
||||
@ -424,7 +503,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
// filling other caches to avoid searching for them again
|
||||
self.db_cache
|
||||
.word_prefix_position_docids
|
||||
.insert((word_prefix, position), Some(value));
|
||||
.insert((word_prefix, position), Some(Cow::Borrowed(value)));
|
||||
positions.push(position);
|
||||
}
|
||||
entry.insert(positions.clone());
|
||||
|
449
milli/src/search/new/matches/.mod.rs.pending-snap
Normal file
449
milli/src/search/new/matches/.mod.rs.pending-snap
Normal file
@ -0,0 +1,449 @@
|
||||
{"run_id":"1683129457-574770000","line":622,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode-3","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":622,"expression":"matcher.format(format_options)"},"snapshot":"<em>Westfália</em>"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Westfáli</em>a"}}
|
||||
{"run_id":"1683133106-100492000","line":738,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":839,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":573,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":640,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":600,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":802,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":746,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":648,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":609,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":811,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":582,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":755,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":657,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":820,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":764,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":666,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":773,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":675,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":684,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":782,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":693,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":702,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":711,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":720,"new":null,"old":null}
|
||||
{"run_id":"1683133106-100492000","line":622,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode-3","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":622,"expression":"matcher.format(format_options)"},"snapshot":"Westfáliaaaaaa"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Westfáli</em>aaaaaaa"}}
|
||||
{"run_id":"1683193451-2793000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":851,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1683193451-2793000","line":637,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode-3","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":637,"expression":"matcher.format(format_options)"},"snapshot":"Westfáliaaaaaa"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Westfáli</em>aaaaaaa"}}
|
||||
{"run_id":"1683193542-499542000","line":851,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1683193542-499542000","line":637,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode-3","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":637,"expression":"matcher.format(format_options)"},"snapshot":"Westfáliaaaaaa"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Westfáli</em>aaaaaaa"}}
|
||||
{"run_id":"1683193590-661809000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":851,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1683193590-661809000","line":637,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode-3","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":637,"expression":"matcher.format(format_options)"},"snapshot":"<em>Westfália</em>"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Westfáli</em>a"}}
|
||||
{"run_id":"1683196250-584747000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":851,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1683196250-584747000","line":637,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode-3","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":637,"expression":"matcher.format(format_options)"},"snapshot":"<em>Westfália</em>"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Westfáli</em>a"}}
|
||||
{"run_id":"1683196569-36502000","line":851,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1683196569-36502000","line":616,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":616,"expression":"matcher.format(format_options)"},"snapshot":"<em>Ŵôřlḑ</em>ôle"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Ŵôřlḑôle</em>"}}
|
||||
{"run_id":"1683196614-298348000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":851,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":637,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1683196614-298348000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":851,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":637,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1683196758-130465000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":637,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1683213265-505594000","line":850,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"partial_matches","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":850,"expression":"matcher.format(format_options)"},"snapshot":"_the_ do or die can't be he do and or isn't he"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"_the_ _do_ _or_ die can't be he do and or isn'_t_ _he_"}}
|
||||
{"run_id":"1683213465-911114000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":637,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1683213465-911114000","line":850,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"partial_matches","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":850,"expression":"matcher.format(format_options)"},"snapshot":"_the_ do or die can't be he do and or isn't he"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"_the_ _do_ _or_ die can't be he do and or isn'_t_ _he_"}}
|
||||
{"run_id":"1683213557-564653000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":637,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1683213557-564653000","line":850,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"partial_matches","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":850,"expression":"matcher.format(format_options)"},"snapshot":"_the_ do or die can't be he do and or isn't he thedoor"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"_the_ _do_ _or_ die can't be he do and or isn'_t_ _he_ _thedoor_"}}
|
||||
{"run_id":"1683213999-273520000","line":657,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":754,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":619,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":593,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":817,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":665,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":762,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":628,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":826,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":602,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":674,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":835,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":771,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":683,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":780,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":692,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":701,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":789,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":640,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":710,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":798,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":719,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":728,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":737,"new":null,"old":null}
|
||||
{"run_id":"1683213999-273520000","line":853,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"partial_matches","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":853,"expression":"matcher.format(format_options)"},"snapshot":"_the_ do or die can't be he do and or isn't he thedoor"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"_the_ _do_ _or_ die can't be he do and or isn'_t_ _he_ _thedoor_"}}
|
||||
{"run_id":"1683710541-379812000","line":754,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":593,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":657,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":817,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":619,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":762,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":665,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":628,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":826,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":602,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":771,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":835,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":674,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":780,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":683,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":789,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":692,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":640,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":798,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":701,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":710,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":719,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":728,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":737,"new":null,"old":null}
|
||||
{"run_id":"1683710541-379812000","line":853,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"partial_matches","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":853,"expression":"matcher.format(format_options)"},"snapshot":"_the_ do or die can't be he do and or isn't he _thedoor_"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"_the_ _do_ _or_ die can't be he do and or isn'_t_ _he_ _thedoor_"}}
|
||||
{"run_id":"1683710687-182342000","line":619,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":657,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":817,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":593,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":754,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":665,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":628,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":762,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":826,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":602,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":835,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":674,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":771,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":780,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":683,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":640,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":692,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":789,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":701,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":798,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":710,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":719,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":728,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":737,"new":null,"old":null}
|
||||
{"run_id":"1683710687-182342000","line":853,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"partial_matches","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":853,"expression":"matcher.format(format_options)"},"snapshot":"_the_ do or die can't be he do and or isn't he _thedoor_"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"_the_ _do_ _or_ die can't be he do and or isn'_t_ _he_ _thedoor_"}}
|
||||
{"run_id":"1684141548-57871000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":637,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":851,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1684141548-57871000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":637,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":851,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1684141761-300166000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":637,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":851,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1684227379-943236000","line":832,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":654,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":662,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":671,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":680,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":689,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":698,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":707,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":716,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":725,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":734,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":590,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":599,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":751,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":759,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":768,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":777,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":786,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":795,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":616,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":625,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":637,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":851,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":814,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":823,"new":null,"old":null}
|
||||
{"run_id":"1686671229-287954000","line":832,"new":null,"old":null}
|
@ -56,6 +56,7 @@ pub struct SearchContext<'ctx> {
|
||||
pub phrase_interner: DedupInterner<Phrase>,
|
||||
pub term_interner: Interner<QueryTerm>,
|
||||
pub phrase_docids: PhraseDocIdsCache,
|
||||
pub restricted_fids: Option<Vec<u16>>,
|
||||
}
|
||||
|
||||
impl<'ctx> SearchContext<'ctx> {
|
||||
@ -68,8 +69,18 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
phrase_interner: <_>::default(),
|
||||
term_interner: <_>::default(),
|
||||
phrase_docids: <_>::default(),
|
||||
restricted_fids: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn searchable_attributes(&mut self, searchable_attributes: &'ctx [String]) -> Result<()> {
|
||||
let fids_map = self.index.fields_ids_map(self.txn)?;
|
||||
let restricted_fids =
|
||||
searchable_attributes.iter().filter_map(|name| fids_map.id(name)).collect();
|
||||
self.restricted_fids = Some(restricted_fids);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, PartialOrd, Ord, Eq)]
|
||||
|
@ -77,13 +77,9 @@ pub fn located_query_terms_from_tokens(
|
||||
}
|
||||
}
|
||||
TokenKind::Separator(separator_kind) => {
|
||||
match separator_kind {
|
||||
SeparatorKind::Hard => {
|
||||
position += 1;
|
||||
}
|
||||
SeparatorKind::Soft => {
|
||||
position += 0;
|
||||
}
|
||||
// add penalty for hard separators
|
||||
if let SeparatorKind::Hard = separator_kind {
|
||||
position = position.wrapping_add(7);
|
||||
}
|
||||
|
||||
phrase = 'phrase: {
|
||||
@ -288,3 +284,36 @@ impl PhraseBuilder {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use charabia::TokenizerBuilder;
|
||||
|
||||
use super::*;
|
||||
use crate::index::tests::TempIndex;
|
||||
|
||||
fn temp_index_with_documents() -> TempIndex {
|
||||
let temp_index = TempIndex::new();
|
||||
temp_index
|
||||
.add_documents(documents!([
|
||||
{ "id": 1, "name": "split this world westfali westfalia the Ŵôřlḑôle" },
|
||||
{ "id": 2, "name": "Westfália" },
|
||||
{ "id": 3, "name": "Ŵôřlḑôle" },
|
||||
]))
|
||||
.unwrap();
|
||||
temp_index
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn start_with_hard_separator() -> Result<()> {
|
||||
let tokenizer = TokenizerBuilder::new().build();
|
||||
let tokens = tokenizer.tokenize(".");
|
||||
let index = temp_index_with_documents();
|
||||
let rtxn = index.read_txn()?;
|
||||
let mut ctx = SearchContext::new(&index, &rtxn);
|
||||
// panics with `attempt to add with overflow` before <https://github.com/meilisearch/meilisearch/issues/3785>
|
||||
let located_query_terms = located_query_terms_from_tokens(&mut ctx, tokens, None)?;
|
||||
assert!(located_query_terms.is_empty());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -89,7 +89,6 @@ Create a snapshot test of the given database.
|
||||
- `exact_word_docids`
|
||||
- `word_prefix_docids`
|
||||
- `exact_word_prefix_docids`
|
||||
- `docid_word_positions`
|
||||
- `word_pair_proximity_docids`
|
||||
- `word_prefix_pair_proximity_docids`
|
||||
- `word_position_docids`
|
||||
@ -217,11 +216,6 @@ pub fn snap_exact_word_prefix_docids(index: &Index) -> String {
|
||||
&format!("{s:<16} {}", display_bitmap(&b))
|
||||
})
|
||||
}
|
||||
pub fn snap_docid_word_positions(index: &Index) -> String {
|
||||
make_db_snap_from_iter!(index, docid_word_positions, |((idx, s), b)| {
|
||||
&format!("{idx:<6} {s:<16} {}", display_bitmap(&b))
|
||||
})
|
||||
}
|
||||
pub fn snap_word_pair_proximity_docids(index: &Index) -> String {
|
||||
make_db_snap_from_iter!(index, word_pair_proximity_docids, |((proximity, word1, word2), b)| {
|
||||
&format!("{proximity:<2} {word1:<16} {word2:<16} {}", display_bitmap(&b))
|
||||
@ -477,9 +471,6 @@ macro_rules! full_snap_of_db {
|
||||
($index:ident, exact_word_prefix_docids) => {{
|
||||
$crate::snapshot_tests::snap_exact_word_prefix_docids(&$index)
|
||||
}};
|
||||
($index:ident, docid_word_positions) => {{
|
||||
$crate::snapshot_tests::snap_docid_word_positions(&$index)
|
||||
}};
|
||||
($index:ident, word_pair_proximity_docids) => {{
|
||||
$crate::snapshot_tests::snap_word_pair_proximity_docids(&$index)
|
||||
}};
|
||||
|
@ -23,7 +23,6 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> {
|
||||
exact_word_docids,
|
||||
word_prefix_docids,
|
||||
exact_word_prefix_docids,
|
||||
docid_word_positions,
|
||||
word_pair_proximity_docids,
|
||||
word_prefix_pair_proximity_docids,
|
||||
prefix_word_pair_proximity_docids,
|
||||
@ -80,7 +79,6 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> {
|
||||
exact_word_docids.clear(self.wtxn)?;
|
||||
word_prefix_docids.clear(self.wtxn)?;
|
||||
exact_word_prefix_docids.clear(self.wtxn)?;
|
||||
docid_word_positions.clear(self.wtxn)?;
|
||||
word_pair_proximity_docids.clear(self.wtxn)?;
|
||||
word_prefix_pair_proximity_docids.clear(self.wtxn)?;
|
||||
prefix_word_pair_proximity_docids.clear(self.wtxn)?;
|
||||
@ -141,7 +139,6 @@ mod tests {
|
||||
|
||||
assert!(index.word_docids.is_empty(&rtxn).unwrap());
|
||||
assert!(index.word_prefix_docids.is_empty(&rtxn).unwrap());
|
||||
assert!(index.docid_word_positions.is_empty(&rtxn).unwrap());
|
||||
assert!(index.word_pair_proximity_docids.is_empty(&rtxn).unwrap());
|
||||
assert!(index.field_id_word_count_docids.is_empty(&rtxn).unwrap());
|
||||
assert!(index.word_prefix_pair_proximity_docids.is_empty(&rtxn).unwrap());
|
||||
|
@ -1,5 +1,5 @@
|
||||
use std::collections::btree_map::Entry;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||
|
||||
use fst::IntoStreamer;
|
||||
use heed::types::{ByteSlice, DecodeIgnore, Str, UnalignedSlice};
|
||||
@ -15,8 +15,7 @@ use crate::facet::FacetType;
|
||||
use crate::heed_codec::facet::FieldDocIdFacetCodec;
|
||||
use crate::heed_codec::CboRoaringBitmapCodec;
|
||||
use crate::{
|
||||
ExternalDocumentsIds, FieldId, FieldIdMapMissingEntry, Index, Result, RoaringBitmapCodec,
|
||||
SmallString32, BEU32,
|
||||
ExternalDocumentsIds, FieldId, FieldIdMapMissingEntry, Index, Result, RoaringBitmapCodec, BEU32,
|
||||
};
|
||||
|
||||
pub struct DeleteDocuments<'t, 'u, 'i> {
|
||||
@ -232,7 +231,6 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
|
||||
exact_word_docids,
|
||||
word_prefix_docids,
|
||||
exact_word_prefix_docids,
|
||||
docid_word_positions,
|
||||
word_pair_proximity_docids,
|
||||
field_id_word_count_docids,
|
||||
word_prefix_pair_proximity_docids,
|
||||
@ -251,23 +249,9 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
|
||||
facet_id_is_empty_docids,
|
||||
documents,
|
||||
} = self.index;
|
||||
|
||||
// Retrieve the words contained in the documents.
|
||||
let mut words = Vec::new();
|
||||
// Remove from the documents database
|
||||
for docid in &self.to_delete_docids {
|
||||
documents.delete(self.wtxn, &BEU32::new(docid))?;
|
||||
|
||||
// We iterate through the words positions of the document id, retrieve the word and delete the positions.
|
||||
// We create an iterator to be able to get the content and delete the key-value itself.
|
||||
// It's faster to acquire a cursor to get and delete, as we avoid traversing the LMDB B-Tree two times but only once.
|
||||
let mut iter = docid_word_positions.prefix_iter_mut(self.wtxn, &(docid, ""))?;
|
||||
while let Some(result) = iter.next() {
|
||||
let ((_docid, word), _positions) = result?;
|
||||
// This boolean will indicate if we must remove this word from the words FST.
|
||||
words.push((SmallString32::from(word), false));
|
||||
// safety: we don't keep references from inside the LMDB database.
|
||||
unsafe { iter.del_current()? };
|
||||
}
|
||||
}
|
||||
// We acquire the current external documents ids map...
|
||||
// Note that its soft-deleted document ids field will be equal to the `to_delete_docids`
|
||||
@ -278,42 +262,27 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
|
||||
let new_external_documents_ids = new_external_documents_ids.into_static();
|
||||
self.index.put_external_documents_ids(self.wtxn, &new_external_documents_ids)?;
|
||||
|
||||
// Maybe we can improve the get performance of the words
|
||||
// if we sort the words first, keeping the LMDB pages in cache.
|
||||
words.sort_unstable();
|
||||
|
||||
let mut words_to_keep = BTreeSet::default();
|
||||
let mut words_to_delete = BTreeSet::default();
|
||||
// We iterate over the words and delete the documents ids
|
||||
// from the word docids database.
|
||||
for (word, must_remove) in &mut words {
|
||||
remove_from_word_docids(
|
||||
self.wtxn,
|
||||
word_docids,
|
||||
word.as_str(),
|
||||
must_remove,
|
||||
&self.to_delete_docids,
|
||||
)?;
|
||||
|
||||
remove_from_word_docids(
|
||||
self.wtxn,
|
||||
exact_word_docids,
|
||||
word.as_str(),
|
||||
must_remove,
|
||||
&self.to_delete_docids,
|
||||
)?;
|
||||
}
|
||||
remove_from_word_docids(
|
||||
self.wtxn,
|
||||
word_docids,
|
||||
&self.to_delete_docids,
|
||||
&mut words_to_keep,
|
||||
&mut words_to_delete,
|
||||
)?;
|
||||
remove_from_word_docids(
|
||||
self.wtxn,
|
||||
exact_word_docids,
|
||||
&self.to_delete_docids,
|
||||
&mut words_to_keep,
|
||||
&mut words_to_delete,
|
||||
)?;
|
||||
|
||||
// We construct an FST set that contains the words to delete from the words FST.
|
||||
let words_to_delete =
|
||||
words.iter().filter_map(
|
||||
|(word, must_remove)| {
|
||||
if *must_remove {
|
||||
Some(word.as_str())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
);
|
||||
let words_to_delete = fst::Set::from_iter(words_to_delete)?;
|
||||
let words_to_delete = fst::Set::from_iter(words_to_delete.difference(&words_to_keep))?;
|
||||
|
||||
let new_words_fst = {
|
||||
// We retrieve the current words FST from the database.
|
||||
@ -532,23 +501,24 @@ fn remove_from_word_prefix_docids(
|
||||
fn remove_from_word_docids(
|
||||
txn: &mut heed::RwTxn,
|
||||
db: &heed::Database<Str, RoaringBitmapCodec>,
|
||||
word: &str,
|
||||
must_remove: &mut bool,
|
||||
to_remove: &RoaringBitmap,
|
||||
words_to_keep: &mut BTreeSet<String>,
|
||||
words_to_remove: &mut BTreeSet<String>,
|
||||
) -> Result<()> {
|
||||
// We create an iterator to be able to get the content and delete the word docids.
|
||||
// It's faster to acquire a cursor to get and delete or put, as we avoid traversing
|
||||
// the LMDB B-Tree two times but only once.
|
||||
let mut iter = db.prefix_iter_mut(txn, word)?;
|
||||
if let Some((key, mut docids)) = iter.next().transpose()? {
|
||||
if key == word {
|
||||
let previous_len = docids.len();
|
||||
docids -= to_remove;
|
||||
if docids.is_empty() {
|
||||
// safety: we don't keep references from inside the LMDB database.
|
||||
unsafe { iter.del_current()? };
|
||||
*must_remove = true;
|
||||
} else if docids.len() != previous_len {
|
||||
let mut iter = db.iter_mut(txn)?;
|
||||
while let Some((key, mut docids)) = iter.next().transpose()? {
|
||||
let previous_len = docids.len();
|
||||
docids -= to_remove;
|
||||
if docids.is_empty() {
|
||||
// safety: we don't keep references from inside the LMDB database.
|
||||
unsafe { iter.del_current()? };
|
||||
words_to_remove.insert(key.to_owned());
|
||||
} else {
|
||||
words_to_keep.insert(key.to_owned());
|
||||
if docids.len() != previous_len {
|
||||
let key = key.to_owned();
|
||||
// safety: we don't keep references from inside the LMDB database.
|
||||
unsafe { iter.put_current(&key, &docids)? };
|
||||
@ -627,7 +597,7 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
use crate::index::tests::TempIndex;
|
||||
use crate::{db_snap, Filter};
|
||||
use crate::{db_snap, Filter, Search};
|
||||
|
||||
fn delete_documents<'t>(
|
||||
wtxn: &mut RwTxn<'t, '_>,
|
||||
@ -1199,4 +1169,52 @@ mod tests {
|
||||
DeletionStrategy::AlwaysSoft,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn delete_words_exact_attributes() {
|
||||
let index = TempIndex::new();
|
||||
|
||||
index
|
||||
.update_settings(|settings| {
|
||||
settings.set_primary_key(S("id"));
|
||||
settings.set_searchable_fields(vec![S("text"), S("exact")]);
|
||||
settings.set_exact_attributes(vec![S("exact")].into_iter().collect());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
index
|
||||
.add_documents(documents!([
|
||||
{ "id": 0, "text": "hello" },
|
||||
{ "id": 1, "exact": "hello"}
|
||||
]))
|
||||
.unwrap();
|
||||
db_snap!(index, word_docids, 1, @r###"
|
||||
hello [0, ]
|
||||
"###);
|
||||
db_snap!(index, exact_word_docids, 1, @r###"
|
||||
hello [1, ]
|
||||
"###);
|
||||
db_snap!(index, words_fst, 1, @"300000000000000001084cfcfc2ce1000000016000000090ea47f");
|
||||
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let deleted_internal_ids =
|
||||
delete_documents(&mut wtxn, &index, &["1"], DeletionStrategy::AlwaysHard);
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
db_snap!(index, word_docids, 2, @r###"
|
||||
hello [0, ]
|
||||
"###);
|
||||
db_snap!(index, exact_word_docids, 2, @"");
|
||||
db_snap!(index, words_fst, 2, @"300000000000000001084cfcfc2ce1000000016000000090ea47f");
|
||||
|
||||
insta::assert_snapshot!(format!("{deleted_internal_ids:?}"), @"[1]");
|
||||
let txn = index.read_txn().unwrap();
|
||||
let words = index.words_fst(&txn).unwrap().into_stream().into_strs().unwrap();
|
||||
insta::assert_snapshot!(format!("{words:?}"), @r###"["hello"]"###);
|
||||
|
||||
let mut s = Search::new(&txn, &index);
|
||||
s.query("hello");
|
||||
let crate::SearchResult { documents_ids, .. } = s.execute().unwrap();
|
||||
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[0]");
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::{cmp, io};
|
||||
use std::io;
|
||||
|
||||
use grenad::Sorter;
|
||||
|
||||
@ -54,11 +54,10 @@ pub fn extract_fid_word_count_docids<R: io::Read + io::Seek>(
|
||||
}
|
||||
|
||||
for position in read_u32_ne_bytes(value) {
|
||||
let (field_id, position) = relative_from_absolute_position(position);
|
||||
let word_count = position as u32 + 1;
|
||||
let (field_id, _) = relative_from_absolute_position(position);
|
||||
|
||||
let value = document_fid_wordcount.entry(field_id as FieldId).or_insert(0);
|
||||
*value = cmp::max(*value, word_count);
|
||||
*value += 1;
|
||||
}
|
||||
}
|
||||
|
||||
@ -83,7 +82,7 @@ fn drain_document_fid_wordcount_into_sorter(
|
||||
let mut key_buffer = Vec::new();
|
||||
|
||||
for (fid, count) in document_fid_wordcount.drain() {
|
||||
if count <= 10 {
|
||||
if count <= 30 {
|
||||
key_buffer.clear();
|
||||
key_buffer.extend_from_slice(&fid.to_be_bytes());
|
||||
key_buffer.push(count as u8);
|
||||
|
@ -325,8 +325,6 @@ fn send_and_extract_flattened_documents_data(
|
||||
// send docid_word_positions_chunk to DB writer
|
||||
let docid_word_positions_chunk =
|
||||
unsafe { as_cloneable_grenad(&docid_word_positions_chunk)? };
|
||||
let _ = lmdb_writer_sx
|
||||
.send(Ok(TypedChunk::DocidWordPositions(docid_word_positions_chunk.clone())));
|
||||
|
||||
let _ =
|
||||
lmdb_writer_sx.send(Ok(TypedChunk::ScriptLanguageDocids(script_language_pair)));
|
||||
|
@ -4,7 +4,6 @@ use std::result::Result as StdResult;
|
||||
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use super::read_u32_ne_bytes;
|
||||
use crate::heed_codec::CboRoaringBitmapCodec;
|
||||
use crate::update::index_documents::transform::Operation;
|
||||
use crate::Result;
|
||||
@ -22,10 +21,6 @@ pub fn concat_u32s_array<'a>(_key: &[u8], values: &[Cow<'a, [u8]>]) -> Result<Co
|
||||
}
|
||||
}
|
||||
|
||||
pub fn roaring_bitmap_from_u32s_array(slice: &[u8]) -> RoaringBitmap {
|
||||
read_u32_ne_bytes(slice).collect()
|
||||
}
|
||||
|
||||
pub fn serialize_roaring_bitmap(bitmap: &RoaringBitmap, buffer: &mut Vec<u8>) -> io::Result<()> {
|
||||
buffer.clear();
|
||||
buffer.reserve(bitmap.serialized_size());
|
||||
|
@ -14,8 +14,8 @@ pub use grenad_helpers::{
|
||||
};
|
||||
pub use merge_functions::{
|
||||
concat_u32s_array, keep_first, keep_latest_obkv, merge_cbo_roaring_bitmaps,
|
||||
merge_obkvs_and_operations, merge_roaring_bitmaps, merge_two_obkvs,
|
||||
roaring_bitmap_from_u32s_array, serialize_roaring_bitmap, MergeFn,
|
||||
merge_obkvs_and_operations, merge_roaring_bitmaps, merge_two_obkvs, serialize_roaring_bitmap,
|
||||
MergeFn,
|
||||
};
|
||||
|
||||
use crate::MAX_WORD_LENGTH;
|
||||
|
@ -2471,11 +2471,11 @@ mod tests {
|
||||
{
|
||||
"id": 3,
|
||||
"text": "a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a a a a a a
|
||||
a a a a a a a a a a a a a a a a a a a a a "
|
||||
}
|
||||
]))
|
||||
@ -2513,6 +2513,5 @@ mod tests {
|
||||
|
||||
db_snap!(index, word_fid_docids, 3, @"4c2e2a1832e5802796edc1638136d933");
|
||||
db_snap!(index, word_position_docids, 3, @"74f556b91d161d997a89468b4da1cb8f");
|
||||
db_snap!(index, docid_word_positions, 3, @"5287245332627675740b28bd46e1cde1");
|
||||
}
|
||||
}
|
||||
|
@ -7,24 +7,19 @@ use std::io;
|
||||
use charabia::{Language, Script};
|
||||
use grenad::MergerBuilder;
|
||||
use heed::types::ByteSlice;
|
||||
use heed::{BytesDecode, RwTxn};
|
||||
use heed::RwTxn;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use super::helpers::{
|
||||
self, merge_ignore_values, roaring_bitmap_from_u32s_array, serialize_roaring_bitmap,
|
||||
valid_lmdb_key, CursorClonableMmap,
|
||||
self, merge_ignore_values, serialize_roaring_bitmap, valid_lmdb_key, CursorClonableMmap,
|
||||
};
|
||||
use super::{ClonableMmap, MergeFn};
|
||||
use crate::facet::FacetType;
|
||||
use crate::update::facet::FacetsUpdate;
|
||||
use crate::update::index_documents::helpers::as_cloneable_grenad;
|
||||
use crate::{
|
||||
lat_lng_to_xyz, BoRoaringBitmapCodec, CboRoaringBitmapCodec, DocumentId, GeoPoint, Index,
|
||||
Result,
|
||||
};
|
||||
use crate::{lat_lng_to_xyz, CboRoaringBitmapCodec, DocumentId, GeoPoint, Index, Result};
|
||||
|
||||
pub(crate) enum TypedChunk {
|
||||
DocidWordPositions(grenad::Reader<CursorClonableMmap>),
|
||||
FieldIdDocidFacetStrings(grenad::Reader<CursorClonableMmap>),
|
||||
FieldIdDocidFacetNumbers(grenad::Reader<CursorClonableMmap>),
|
||||
Documents(grenad::Reader<CursorClonableMmap>),
|
||||
@ -56,29 +51,6 @@ pub(crate) fn write_typed_chunk_into_index(
|
||||
) -> Result<(RoaringBitmap, bool)> {
|
||||
let mut is_merged_database = false;
|
||||
match typed_chunk {
|
||||
TypedChunk::DocidWordPositions(docid_word_positions_iter) => {
|
||||
write_entries_into_database(
|
||||
docid_word_positions_iter,
|
||||
&index.docid_word_positions,
|
||||
wtxn,
|
||||
index_is_empty,
|
||||
|value, buffer| {
|
||||
// ensure that values are unique and ordered
|
||||
let positions = roaring_bitmap_from_u32s_array(value);
|
||||
BoRoaringBitmapCodec::serialize_into(&positions, buffer);
|
||||
Ok(buffer)
|
||||
},
|
||||
|new_values, db_values, buffer| {
|
||||
let new_values = roaring_bitmap_from_u32s_array(new_values);
|
||||
let positions = match BoRoaringBitmapCodec::bytes_decode(db_values) {
|
||||
Some(db_values) => new_values | db_values,
|
||||
None => new_values, // should not happen
|
||||
};
|
||||
BoRoaringBitmapCodec::serialize_into(&positions, buffer);
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
}
|
||||
TypedChunk::Documents(obkv_documents_iter) => {
|
||||
let mut cursor = obkv_documents_iter.into_cursor()?;
|
||||
while let Some((key, value)) = cursor.move_on_next()? {
|
||||
|
@ -4,7 +4,8 @@ pub use self::delete_documents::{DeleteDocuments, DeletionStrategy, DocumentDele
|
||||
pub use self::facet::bulk::FacetsUpdateBulk;
|
||||
pub use self::facet::incremental::FacetsUpdateIncrementalInner;
|
||||
pub use self::index_documents::{
|
||||
DocumentAdditionResult, DocumentId, IndexDocuments, IndexDocumentsConfig, IndexDocumentsMethod,
|
||||
merge_cbo_roaring_bitmaps, merge_roaring_bitmaps, DocumentAdditionResult, DocumentId,
|
||||
IndexDocuments, IndexDocumentsConfig, IndexDocumentsMethod, MergeFn,
|
||||
};
|
||||
pub use self::indexer_config::IndexerConfig;
|
||||
pub use self::prefix_word_pairs::{
|
||||
|
Reference in New Issue
Block a user