Compare commits

..

9 Commits

Author SHA1 Message Date
Kerollmops
3fa40a3f9c Change the error codes of the faceting settings 2023-06-22 18:15:15 +02:00
Kerollmops
5675847a5f Move the sortFacetValuesBy in the faceting settings 2023-06-22 17:40:10 +02:00
Kerollmops
1c77117d02 Make Clippy happy 2023-05-29 16:02:54 +02:00
Kerollmops
26dc415d9e Replace the BTreeMap by an IndexMap to return values in order 2023-05-29 15:47:45 +02:00
Kerollmops
89a4e7cee4 Expose a sortFacetValuesBy parameter to the user 2023-05-29 15:32:09 +02:00
Kerollmops
f2040e50b2 Clean and make the facet order configurable internally 2023-05-29 15:09:41 +02:00
Kerollmops
2b62e85622 Make the search to always return the facets ordered by count 2023-05-29 11:52:57 +02:00
Kerollmops
c13e3d5c8a First to-test version of the algorithm 2023-05-25 12:28:26 +02:00
Kerollmops
73a8018eb1 Rename facet distribution to be explicit on the order to find them 2023-05-25 10:59:04 +02:00
65 changed files with 1931 additions and 3018 deletions

View File

@@ -2,3 +2,4 @@ target
Dockerfile
.dockerignore
.gitignore
**/.git

View File

@@ -35,7 +35,7 @@ jobs:
- name: Build deb package
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
- name: Upload debian pkg to release
uses: svenstaro/upload-release-action@2.6.1
uses: svenstaro/upload-release-action@2.5.0
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/debian/meilisearch.deb

View File

@@ -54,7 +54,7 @@ jobs:
# No need to upload binaries for dry run (cron)
- name: Upload binaries to release
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.6.1
uses: svenstaro/upload-release-action@2.5.0
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/release/meilisearch
@@ -87,7 +87,7 @@ jobs:
# No need to upload binaries for dry run (cron)
- name: Upload binaries to release
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.6.1
uses: svenstaro/upload-release-action@2.5.0
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/release/${{ matrix.artifact_name }}
@@ -121,7 +121,7 @@ jobs:
- name: Upload the binary to release
# No need to upload binaries for dry run (cron)
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.6.1
uses: svenstaro/upload-release-action@2.5.0
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/${{ matrix.target }}/release/meilisearch
@@ -183,7 +183,7 @@ jobs:
- name: Upload the binary to release
# No need to upload binaries for dry run (cron)
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.6.1
uses: svenstaro/upload-release-action@2.5.0
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/${{ matrix.target }}/release/meilisearch

View File

@@ -58,9 +58,13 @@ jobs:
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
with:
platforms: linux/amd64,linux/arm64
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- name: Login to Docker Hub
uses: docker/login-action@v2
@@ -88,10 +92,13 @@ jobs:
push: true
platforms: linux/amd64,linux/arm64
tags: ${{ steps.meta.outputs.tags }}
builder: ${{ steps.buildx.outputs.name }}
build-args: |
COMMIT_SHA=${{ github.sha }}
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
GIT_TAG=${{ github.ref_name }}
cache-from: type=gha
cache-to: type=gha,mode=max
# /!\ Don't touch this without checking with Cloud team
- name: Send CI information to Cloud team

View File

@@ -3,11 +3,6 @@ name: SDKs tests
on:
workflow_dispatch:
inputs:
docker_image:
description: 'The Meilisearch Docker image used'
required: false
default: nightly
schedule:
- cron: "0 6 * * MON" # Every Monday at 6:00AM
@@ -16,28 +11,13 @@ env:
MEILI_NO_ANALYTICS: 'true'
jobs:
define-docker-image:
runs-on: ubuntu-latest
outputs:
docker-image: ${{ steps.define-image.outputs.docker-image }}
steps:
- uses: actions/checkout@v3
- name: Define the Docker image we need to use
id: define-image
run: |
event=${{ github.event.action }}
echo "docker-image=nightly" >> $GITHUB_OUTPUT
if [[ $event == 'workflow_dispatch' ]]; then
echo "docker-image=${{ github.event.inputs.docker_image }}" >> $GITHUB_OUTPUT
fi
meilisearch-js-tests:
needs: define-docker-image
name: JS SDK tests
runs-on: ubuntu-latest
services:
meilisearch:
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
image: getmeili/meilisearch:nightly
env:
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
@@ -67,12 +47,11 @@ jobs:
run: yarn test:env:browser
instant-meilisearch-tests:
needs: define-docker-image
name: instant-meilisearch tests
runs-on: ubuntu-latest
services:
meilisearch:
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
image: getmeili/meilisearch:nightly
env:
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
@@ -94,12 +73,11 @@ jobs:
run: yarn build
meilisearch-php-tests:
needs: define-docker-image
name: PHP SDK tests
runs-on: ubuntu-latest
services:
meilisearch:
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
image: getmeili/meilisearch:nightly
env:
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
@@ -125,12 +103,11 @@ jobs:
composer remove --dev guzzlehttp/guzzle http-interop/http-factory-guzzle
meilisearch-python-tests:
needs: define-docker-image
name: Python SDK tests
runs-on: ubuntu-latest
services:
meilisearch:
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
image: getmeili/meilisearch:nightly
env:
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
@@ -150,12 +127,11 @@ jobs:
run: pipenv run pytest
meilisearch-go-tests:
needs: define-docker-image
name: Go SDK tests
runs-on: ubuntu-latest
services:
meilisearch:
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
image: getmeili/meilisearch:nightly
env:
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
@@ -163,7 +139,7 @@ jobs:
- '7700:7700'
steps:
- name: Set up Go
uses: actions/setup-go@v4
uses: actions/setup-go@v3
with:
go-version: stable
- uses: actions/checkout@v3
@@ -180,12 +156,11 @@ jobs:
run: go test -v ./...
meilisearch-ruby-tests:
needs: define-docker-image
name: Ruby SDK tests
runs-on: ubuntu-latest
services:
meilisearch:
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
image: getmeili/meilisearch:nightly
env:
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
@@ -205,12 +180,11 @@ jobs:
run: bundle exec rspec
meilisearch-rust-tests:
needs: define-docker-image
name: Rust SDK tests
runs-on: ubuntu-latest
services:
meilisearch:
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
image: getmeili/meilisearch:nightly
env:
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}

View File

@@ -43,7 +43,7 @@ jobs:
toolchain: nightly
override: true
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.4.0
uses: Swatinem/rust-cache@v2.2.1
- name: Run cargo check without any default features
uses: actions-rs/cargo@v1
with:
@@ -65,7 +65,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.4.0
uses: Swatinem/rust-cache@v2.2.1
- name: Run cargo check without any default features
uses: actions-rs/cargo@v1
with:
@@ -105,29 +105,6 @@ jobs:
command: test
args: --workspace --locked --release --all-features
test-disabled-tokenization:
name: Test disabled tokenization
runs-on: ubuntu-latest
container:
image: ubuntu:18.04
if: github.event_name == 'schedule'
steps:
- uses: actions/checkout@v3
- name: Install needed dependencies
run: |
apt-get update
apt-get install --assume-yes build-essential curl
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- name: Run cargo tree without default features and check lindera is not present
run: |
cargo tree -f '{p} {f}' -e normal --no-default-features | grep lindera -vqz
- name: Run cargo tree with default features and check lindera is pressent
run: |
cargo tree -f '{p} {f}' -e normal | grep lindera -qz
# We run tests in debug also, to make sure that the debug_assertions are hit
test-debug:
name: Run tests in debug
@@ -146,7 +123,7 @@ jobs:
toolchain: stable
override: true
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.4.0
uses: Swatinem/rust-cache@v2.2.1
- name: Run tests in debug
uses: actions-rs/cargo@v1
with:
@@ -165,7 +142,7 @@ jobs:
override: true
components: clippy
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.4.0
uses: Swatinem/rust-cache@v2.2.1
- name: Run cargo clippy
uses: actions-rs/cargo@v1
with:
@@ -184,7 +161,7 @@ jobs:
override: true
components: rustfmt
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.4.0
uses: Swatinem/rust-cache@v2.2.1
- name: Run cargo fmt
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate

1
Cargo.lock generated
View File

@@ -2730,6 +2730,7 @@ dependencies = [
"geoutils",
"grenad",
"heed",
"indexmap",
"insta",
"itertools",
"json-depth-checker",

View File

@@ -1,3 +1,4 @@
# syntax=docker/dockerfile:1.4
# Compile
FROM rust:alpine3.16 AS compiler
@@ -11,7 +12,7 @@ ARG GIT_TAG
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG}
ENV RUSTFLAGS="-C target-feature=-crt-static"
COPY . .
COPY --link . .
RUN set -eux; \
apkArch="$(apk --print-arch)"; \
if [ "$apkArch" = "aarch64" ]; then \
@@ -30,7 +31,7 @@ RUN apk update --quiet \
# add meilisearch to the `/bin` so you can run it from anywhere and it's easy
# to find.
COPY --from=compiler /meilisearch/target/release/meilisearch /bin/meilisearch
COPY --from=compiler --link /meilisearch/target/release/meilisearch /bin/meilisearch
# To stay compatible with the older version of the container (pre v0.27.0) we're
# going to symlink the meilisearch binary in the path to `/meilisearch`
RUN ln -s /bin/meilisearch /meilisearch

File diff suppressed because it is too large Load Diff

View File

@@ -1,19 +0,0 @@
global:
scrape_interval: 15s # By default, scrape targets every 15 seconds.
# Attach these labels to any time series or alerts when communicating with
# external systems (federation, remote storage, Alertmanager).
external_labels:
monitor: 'codelab-monitor'
# A scrape configuration containing exactly one endpoint to scrape:
# Here it's Prometheus itself.
scrape_configs:
# The job name is added as a label `job=<job_name>` to any timeseries scraped from this config.
- job_name: 'meilisearch'
# Override the global default and scrape targets from this job every 5 seconds.
scrape_interval: 5s
static_configs:
- targets: ['localhost:7700']

View File

@@ -1,131 +1,131 @@
# This file shows the default configuration of Meilisearch.
# All variables are defined here: https://www.meilisearch.com/docs/learn/configuration/instance_options#environment-variables
db_path = "./data.ms"
# Designates the location where database files will be created and retrieved.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#database-path
db_path = "./data.ms"
env = "development"
# Configures the instance's environment. Value must be either `production` or `development`.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#environment
env = "development"
# The address on which the HTTP server will listen.
http_addr = "localhost:7700"
# The address on which the HTTP server will listen.
# master_key = "YOUR_MASTER_KEY_VALUE"
# Sets the instance's master key, automatically protecting all routes except GET /health.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#master-key
# master_key = "YOUR_MASTER_KEY_VALUE"
# no_analytics = true
# Deactivates Meilisearch's built-in telemetry when provided.
# Meilisearch automatically collects data from all instances that do not opt out using this flag.
# All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted at any time.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#disable-analytics
# no_analytics = true
http_payload_size_limit = "100 MB"
# Sets the maximum size of accepted payloads.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#payload-limit-size
http_payload_size_limit = "100 MB"
log_level = "INFO"
# Defines how much detail should be present in Meilisearch's logs.
# Meilisearch currently supports six log levels, listed in order of increasing verbosity: `OFF`, `ERROR`, `WARN`, `INFO`, `DEBUG`, `TRACE`
# https://www.meilisearch.com/docs/learn/configuration/instance_options#log-level
log_level = "INFO"
# max_indexing_memory = "2 GiB"
# Sets the maximum amount of RAM Meilisearch can use when indexing.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#max-indexing-memory
# max_indexing_memory = "2 GiB"
# max_indexing_threads = 4
# Sets the maximum number of threads Meilisearch can use during indexing.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#max-indexing-threads
# max_indexing_threads = 4
#############
### DUMPS ###
#############
dump_dir = "dumps/"
# Sets the directory where Meilisearch will create dump files.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#dump-directory
dump_dir = "dumps/"
# import_dump = "./path/to/my/file.dump"
# Imports the dump file located at the specified path. Path must point to a .dump file.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#import-dump
# import_dump = "./path/to/my/file.dump"
ignore_missing_dump = false
# Prevents Meilisearch from throwing an error when `import_dump` does not point to a valid dump file.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-missing-dump
ignore_missing_dump = false
ignore_dump_if_db_exists = false
# Prevents a Meilisearch instance with an existing database from throwing an error when using `import_dump`.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-dump-if-db-exists
ignore_dump_if_db_exists = false
#################
### SNAPSHOTS ###
#################
schedule_snapshot = false
# Enables scheduled snapshots when true, disable when false (the default).
# If the value is given as an integer, then enables the scheduled snapshot with the passed value as the interval
# between each snapshot, in seconds.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#schedule-snapshot-creation
schedule_snapshot = false
snapshot_dir = "snapshots/"
# Sets the directory where Meilisearch will store snapshots.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#snapshot-destination
snapshot_dir = "snapshots/"
# import_snapshot = "./path/to/my/snapshot"
# Launches Meilisearch after importing a previously-generated snapshot at the given filepath.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#import-snapshot
# import_snapshot = "./path/to/my/snapshot"
ignore_missing_snapshot = false
# Prevents a Meilisearch instance from throwing an error when `import_snapshot` does not point to a valid snapshot file.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-missing-snapshot
ignore_missing_snapshot = false
ignore_snapshot_if_db_exists = false
# Prevents a Meilisearch instance with an existing database from throwing an error when using `import_snapshot`.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-snapshot-if-db-exists
ignore_snapshot_if_db_exists = false
###########
### SSL ###
###########
# ssl_auth_path = "./path/to/root"
# Enables client authentication in the specified path.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-authentication-path
# ssl_auth_path = "./path/to/root"
# ssl_cert_path = "./path/to/certfile"
# Sets the server's SSL certificates.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-certificates-path
# ssl_cert_path = "./path/to/certfile"
# ssl_key_path = "./path/to/private-key"
# Sets the server's SSL key files.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-key-path
# ssl_key_path = "./path/to/private-key"
# ssl_ocsp_path = "./path/to/ocsp-file"
# Sets the server's OCSP file.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-ocsp-path
# ssl_ocsp_path = "./path/to/ocsp-file"
ssl_require_auth = false
# Makes SSL authentication mandatory.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-require-auth
ssl_require_auth = false
ssl_resumption = false
# Activates SSL session resumption.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-resumption
ssl_resumption = false
ssl_tickets = false
# Activates SSL tickets.
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-tickets
ssl_tickets = false
#############################
### Experimental features ###
#############################
experimental_enable_metrics = false
# Experimental metrics feature. For more information, see: <https://github.com/meilisearch/meilisearch/discussions/3518>
# Enables the Prometheus metrics on the `GET /metrics` endpoint.
experimental_enable_metrics = false
# Experimental RAM reduction during indexing, do not use in production, see: <https://github.com/meilisearch/product/discussions/652>
experimental_reduce_indexing_memory_usage = false
# Experimental RAM reduction during indexing, do not use in production, see: <https://github.com/meilisearch/product/discussions/652>

View File

@@ -358,6 +358,7 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
faceting: match settings.faceting {
v5::Setting::Set(faceting) => v6::Setting::Set(v6::FacetingSettings {
max_values_per_facet: faceting.max_values_per_facet.into(),
sort_facet_values_by: v6::Setting::NotSet,
}),
v5::Setting::Reset => v6::Setting::Reset,
v5::Setting::NotSet => v6::Setting::NotSet,

File diff suppressed because it is too large Load Diff

View File

@@ -90,17 +90,8 @@ pub enum IndexStatus {
pub struct IndexStats {
/// Number of documents in the index.
pub number_of_documents: u64,
/// Size taken up by the index' DB, in bytes.
///
/// This includes the size taken by both the used and free pages of the DB, and as the free pages
/// are not returned to the disk after a deletion, this number is typically larger than
/// `used_database_size` that only includes the size of the used pages.
/// Size of the index' DB, in bytes.
pub database_size: u64,
/// Size taken by the used pages of the index' DB, in bytes.
///
/// As the DB backend does not return to the disk the pages that are not currently used by the DB,
/// this value is typically smaller than `database_size`.
pub used_database_size: u64,
/// Association of every field name with the number of times it occurs in the documents.
pub field_distribution: FieldDistribution,
/// Creation date of the index.
@@ -116,10 +107,10 @@ impl IndexStats {
///
/// - rtxn: a RO transaction for the index, obtained from `Index::read_txn()`.
pub fn new(index: &Index, rtxn: &RoTxn) -> Result<Self> {
let database_size = index.on_disk_size()?;
Ok(IndexStats {
number_of_documents: index.number_of_documents(rtxn)?,
database_size: index.on_disk_size()?,
used_database_size: index.used_size()?,
database_size,
field_distribution: index.field_distribution(rtxn)?,
created_at: index.created_at(rtxn)?,
updated_at: index.updated_at(rtxn)?,

View File

@@ -31,7 +31,7 @@ mod uuid_codec;
pub type Result<T> = std::result::Result<T, Error>;
pub type TaskId = u32;
use std::collections::{BTreeMap, HashMap};
use std::collections::HashMap;
use std::ops::{Bound, RangeBounds};
use std::path::{Path, PathBuf};
use std::sync::atomic::AtomicBool;
@@ -573,16 +573,10 @@ impl IndexScheduler {
&self.index_mapper.indexer_config
}
/// Return the real database size (i.e.: The size **with** the free pages)
pub fn size(&self) -> Result<u64> {
Ok(self.env.real_disk_size()?)
}
/// Return the used database size (i.e.: The size **without** the free pages)
pub fn used_size(&self) -> Result<u64> {
Ok(self.env.non_free_pages_size()?)
}
/// Return the index corresponding to the name.
///
/// * If the index wasn't opened before, the index will be opened.
@@ -762,38 +756,6 @@ impl IndexScheduler {
Ok(tasks)
}
/// The returned structure contains:
/// 1. The name of the property being observed can be `statuses`, `types`, or `indexes`.
/// 2. The name of the specific data related to the property can be `enqueued` for the `statuses`, `settingsUpdate` for the `types`, or the name of the index for the `indexes`, for example.
/// 3. The number of times the properties appeared.
pub fn get_stats(&self) -> Result<BTreeMap<String, BTreeMap<String, u64>>> {
let rtxn = self.read_txn()?;
let mut res = BTreeMap::new();
res.insert(
"statuses".to_string(),
enum_iterator::all::<Status>()
.map(|s| Ok((s.to_string(), self.get_status(&rtxn, s)?.len())))
.collect::<Result<BTreeMap<String, u64>>>()?,
);
res.insert(
"types".to_string(),
enum_iterator::all::<Kind>()
.map(|s| Ok((s.to_string(), self.get_kind(&rtxn, s)?.len())))
.collect::<Result<BTreeMap<String, u64>>>()?,
);
res.insert(
"indexes".to_string(),
self.index_tasks
.iter(&rtxn)?
.map(|res| Ok(res.map(|(name, bitmap)| (name.to_string(), bitmap.len()))?))
.collect::<Result<BTreeMap<String, u64>>>()?,
);
Ok(res)
}
/// Return true iff there is at least one task associated with this index
/// that is processing.
pub fn is_index_processing(&self, index: &str) -> Result<bool> {

View File

@@ -466,7 +466,7 @@ impl IndexScheduler {
}
}
Details::DocumentDeletionByFilter { deleted_documents, original_filter: _ } => {
assert_eq!(kind.as_kind(), Kind::DocumentDeletion);
assert_eq!(kind.as_kind(), Kind::DocumentDeletionByFilter);
let (index_uid, _) = if let KindWithContent::DocumentDeletionByFilter {
ref index_uid,
ref filter_expr,

View File

@@ -45,11 +45,6 @@ impl AuthController {
self.store.size()
}
/// Return the used size of the `AuthController` database in bytes.
pub fn used_size(&self) -> Result<u64> {
self.store.used_size()
}
pub fn create_key(&self, create_key: CreateApiKey) -> Result<Key> {
match self.store.get_api_key(create_key.uid)? {
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(create_key.uid.to_string())),

View File

@@ -75,11 +75,6 @@ impl HeedAuthStore {
Ok(self.env.real_disk_size()?)
}
/// Return the number of bytes actually used in the database
pub fn used_size(&self) -> Result<u64> {
Ok(self.env.non_free_pages_size()?)
}
pub fn set_drop_on_close(&mut self, v: bool) {
self.should_close_on_drop = v;
}

View File

@@ -175,121 +175,122 @@ macro_rules! make_error_codes {
// An exhaustive list of all the error codes used by meilisearch.
make_error_codes! {
ApiKeyAlreadyExists , InvalidRequest , CONFLICT ;
ApiKeyNotFound , InvalidRequest , NOT_FOUND ;
BadParameter , InvalidRequest , BAD_REQUEST;
BadRequest , InvalidRequest , BAD_REQUEST;
DatabaseSizeLimitReached , Internal , INTERNAL_SERVER_ERROR;
DocumentNotFound , InvalidRequest , NOT_FOUND;
DumpAlreadyProcessing , InvalidRequest , CONFLICT;
DumpNotFound , InvalidRequest , NOT_FOUND;
DumpProcessFailed , Internal , INTERNAL_SERVER_ERROR;
DuplicateIndexFound , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyActions , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyCreatedAt , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyExpiresAt , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyIndexes , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyKey , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyUid , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyUpdatedAt , InvalidRequest , BAD_REQUEST;
ImmutableIndexCreatedAt , InvalidRequest , BAD_REQUEST;
ImmutableIndexUid , InvalidRequest , BAD_REQUEST;
ImmutableIndexUpdatedAt , InvalidRequest , BAD_REQUEST;
IndexAlreadyExists , InvalidRequest , CONFLICT ;
IndexCreationFailed , Internal , INTERNAL_SERVER_ERROR;
IndexNotFound , InvalidRequest , NOT_FOUND;
IndexPrimaryKeyAlreadyExists , InvalidRequest , BAD_REQUEST ;
IndexPrimaryKeyMultipleCandidatesFound, InvalidRequest , BAD_REQUEST;
IndexPrimaryKeyNoCandidateFound , InvalidRequest , BAD_REQUEST ;
Internal , Internal , INTERNAL_SERVER_ERROR ;
InvalidApiKey , Auth , FORBIDDEN ;
InvalidApiKeyActions , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyDescription , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyExpiresAt , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyIndexes , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyLimit , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyName , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyOffset , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyUid , InvalidRequest , BAD_REQUEST ;
InvalidContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
InvalidDocumentCsvDelimiter , InvalidRequest , BAD_REQUEST ;
InvalidDocumentFields , InvalidRequest , BAD_REQUEST ;
MissingDocumentFilter , InvalidRequest , BAD_REQUEST ;
InvalidDocumentFilter , InvalidRequest , BAD_REQUEST ;
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
InvalidDocumentId , InvalidRequest , BAD_REQUEST ;
InvalidDocumentLimit , InvalidRequest , BAD_REQUEST ;
InvalidDocumentOffset , InvalidRequest , BAD_REQUEST ;
InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;
InvalidIndexUid , InvalidRequest , BAD_REQUEST ;
InvalidRestrictSearchableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ;
InvalidSearchAttributesToHighlight , InvalidRequest , BAD_REQUEST ;
InvalidSearchAttributesToRetrieve , InvalidRequest , BAD_REQUEST ;
InvalidSearchCropLength , InvalidRequest , BAD_REQUEST ;
InvalidSearchCropMarker , InvalidRequest , BAD_REQUEST ;
InvalidSearchFacets , InvalidRequest , BAD_REQUEST ;
InvalidSearchFilter , InvalidRequest , BAD_REQUEST ;
InvalidSearchHighlightPostTag , InvalidRequest , BAD_REQUEST ;
InvalidSearchHighlightPreTag , InvalidRequest , BAD_REQUEST ;
InvalidSearchHitsPerPage , InvalidRequest , BAD_REQUEST ;
InvalidSearchLimit , InvalidRequest , BAD_REQUEST ;
InvalidSearchMatchingStrategy , InvalidRequest , BAD_REQUEST ;
InvalidSearchOffset , InvalidRequest , BAD_REQUEST ;
InvalidSearchPage , InvalidRequest , BAD_REQUEST ;
InvalidSearchQ , InvalidRequest , BAD_REQUEST ;
InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ;
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ;
InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ;
InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
InvalidStoreFile , Internal , INTERNAL_SERVER_ERROR ;
InvalidSwapDuplicateIndexFound , InvalidRequest , BAD_REQUEST ;
InvalidSwapIndexes , InvalidRequest , BAD_REQUEST ;
InvalidTaskAfterEnqueuedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskAfterFinishedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskAfterStartedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskBeforeEnqueuedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskBeforeFinishedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskBeforeStartedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskCanceledBy , InvalidRequest , BAD_REQUEST ;
InvalidTaskFrom , InvalidRequest , BAD_REQUEST ;
InvalidTaskLimit , InvalidRequest , BAD_REQUEST ;
InvalidTaskStatuses , InvalidRequest , BAD_REQUEST ;
InvalidTaskTypes , InvalidRequest , BAD_REQUEST ;
InvalidTaskUids , InvalidRequest , BAD_REQUEST ;
IoError , System , UNPROCESSABLE_ENTITY;
MalformedPayload , InvalidRequest , BAD_REQUEST ;
MaxFieldsLimitExceeded , InvalidRequest , BAD_REQUEST ;
MissingApiKeyActions , InvalidRequest , BAD_REQUEST ;
MissingApiKeyExpiresAt , InvalidRequest , BAD_REQUEST ;
MissingApiKeyIndexes , InvalidRequest , BAD_REQUEST ;
MissingAuthorizationHeader , Auth , UNAUTHORIZED ;
MissingContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
MissingDocumentId , InvalidRequest , BAD_REQUEST ;
MissingIndexUid , InvalidRequest , BAD_REQUEST ;
MissingMasterKey , Auth , UNAUTHORIZED ;
MissingPayload , InvalidRequest , BAD_REQUEST ;
MissingSwapIndexes , InvalidRequest , BAD_REQUEST ;
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;
NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY;
PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ;
TaskNotFound , InvalidRequest , NOT_FOUND ;
TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ;
UnretrievableDocument , Internal , BAD_REQUEST ;
UnretrievableErrorCode , InvalidRequest , BAD_REQUEST ;
UnsupportedMediaType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE
ApiKeyAlreadyExists , InvalidRequest , CONFLICT ;
ApiKeyNotFound , InvalidRequest , NOT_FOUND ;
BadParameter , InvalidRequest , BAD_REQUEST;
BadRequest , InvalidRequest , BAD_REQUEST;
DatabaseSizeLimitReached , Internal , INTERNAL_SERVER_ERROR;
DocumentNotFound , InvalidRequest , NOT_FOUND;
DumpAlreadyProcessing , InvalidRequest , CONFLICT;
DumpNotFound , InvalidRequest , NOT_FOUND;
DumpProcessFailed , Internal , INTERNAL_SERVER_ERROR;
DuplicateIndexFound , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyActions , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyCreatedAt , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyExpiresAt , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyIndexes , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyKey , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyUid , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyUpdatedAt , InvalidRequest , BAD_REQUEST;
ImmutableIndexCreatedAt , InvalidRequest , BAD_REQUEST;
ImmutableIndexUid , InvalidRequest , BAD_REQUEST;
ImmutableIndexUpdatedAt , InvalidRequest , BAD_REQUEST;
IndexAlreadyExists , InvalidRequest , CONFLICT ;
IndexCreationFailed , Internal , INTERNAL_SERVER_ERROR;
IndexNotFound , InvalidRequest , NOT_FOUND;
IndexPrimaryKeyAlreadyExists , InvalidRequest , BAD_REQUEST ;
IndexPrimaryKeyMultipleCandidatesFound , InvalidRequest , BAD_REQUEST;
IndexPrimaryKeyNoCandidateFound , InvalidRequest , BAD_REQUEST ;
Internal , Internal , INTERNAL_SERVER_ERROR ;
InvalidApiKey , Auth , FORBIDDEN ;
InvalidApiKeyActions , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyDescription , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyExpiresAt , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyIndexes , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyLimit , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyName , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyOffset , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyUid , InvalidRequest , BAD_REQUEST ;
InvalidContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
InvalidDocumentCsvDelimiter , InvalidRequest , BAD_REQUEST ;
InvalidDocumentFields , InvalidRequest , BAD_REQUEST ;
MissingDocumentFilter , InvalidRequest , BAD_REQUEST ;
InvalidDocumentFilter , InvalidRequest , BAD_REQUEST ;
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
InvalidDocumentId , InvalidRequest , BAD_REQUEST ;
InvalidDocumentLimit , InvalidRequest , BAD_REQUEST ;
InvalidDocumentOffset , InvalidRequest , BAD_REQUEST ;
InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;
InvalidIndexUid , InvalidRequest , BAD_REQUEST ;
InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ;
InvalidSearchAttributesToHighlight , InvalidRequest , BAD_REQUEST ;
InvalidSearchAttributesToRetrieve , InvalidRequest , BAD_REQUEST ;
InvalidSearchCropLength , InvalidRequest , BAD_REQUEST ;
InvalidSearchCropMarker , InvalidRequest , BAD_REQUEST ;
InvalidSearchFacets , InvalidRequest , BAD_REQUEST ;
InvalidSearchFilter , InvalidRequest , BAD_REQUEST ;
InvalidSearchHighlightPostTag , InvalidRequest , BAD_REQUEST ;
InvalidSearchHighlightPreTag , InvalidRequest , BAD_REQUEST ;
InvalidSearchHitsPerPage , InvalidRequest , BAD_REQUEST ;
InvalidSearchLimit , InvalidRequest , BAD_REQUEST ;
InvalidSearchMatchingStrategy , InvalidRequest , BAD_REQUEST ;
InvalidSearchOffset , InvalidRequest , BAD_REQUEST ;
InvalidSearchPage , InvalidRequest , BAD_REQUEST ;
InvalidSearchQ , InvalidRequest , BAD_REQUEST ;
InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ;
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ;
InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ;
InvalidSettingsFacetingMaxValuesPerFacet , InvalidRequest , BAD_REQUEST ;
InvalidSettingsFacetingSortFacetValuesBy , InvalidRequest , BAD_REQUEST ;
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
InvalidStoreFile , Internal , INTERNAL_SERVER_ERROR ;
InvalidSwapDuplicateIndexFound , InvalidRequest , BAD_REQUEST ;
InvalidSwapIndexes , InvalidRequest , BAD_REQUEST ;
InvalidTaskAfterEnqueuedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskAfterFinishedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskAfterStartedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskBeforeEnqueuedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskBeforeFinishedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskBeforeStartedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskCanceledBy , InvalidRequest , BAD_REQUEST ;
InvalidTaskFrom , InvalidRequest , BAD_REQUEST ;
InvalidTaskLimit , InvalidRequest , BAD_REQUEST ;
InvalidTaskStatuses , InvalidRequest , BAD_REQUEST ;
InvalidTaskTypes , InvalidRequest , BAD_REQUEST ;
InvalidTaskUids , InvalidRequest , BAD_REQUEST ;
IoError , System , UNPROCESSABLE_ENTITY;
MalformedPayload , InvalidRequest , BAD_REQUEST ;
MaxFieldsLimitExceeded , InvalidRequest , BAD_REQUEST ;
MissingApiKeyActions , InvalidRequest , BAD_REQUEST ;
MissingApiKeyExpiresAt , InvalidRequest , BAD_REQUEST ;
MissingApiKeyIndexes , InvalidRequest , BAD_REQUEST ;
MissingAuthorizationHeader , Auth , UNAUTHORIZED ;
MissingContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
MissingDocumentId , InvalidRequest , BAD_REQUEST ;
MissingIndexUid , InvalidRequest , BAD_REQUEST ;
MissingMasterKey , Auth , UNAUTHORIZED ;
MissingPayload , InvalidRequest , BAD_REQUEST ;
MissingSwapIndexes , InvalidRequest , BAD_REQUEST ;
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;
NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY;
PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ;
TaskNotFound , InvalidRequest , NOT_FOUND ;
TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ;
UnretrievableDocument , Internal , BAD_REQUEST ;
UnretrievableErrorCode , InvalidRequest , BAD_REQUEST ;
UnsupportedMediaType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE
}
impl ErrorCode for JoinError {

View File

@@ -0,0 +1,33 @@
use deserr::Deserr;
use milli::OrderBy;
use serde::{Deserialize, Serialize};
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Deserr)]
#[serde(rename_all = "camelCase")]
#[deserr(rename_all = camelCase)]
pub enum FacetValuesSort {
/// Facet values are sorted in alphabetical order, ascending from A to Z.
#[default]
Alpha,
/// Facet values are sorted by decreasing count.
/// The count is the number of records containing this facet value in the results of the query.
Count,
}
impl From<FacetValuesSort> for OrderBy {
fn from(val: FacetValuesSort) -> Self {
match val {
FacetValuesSort::Alpha => OrderBy::Lexicographic,
FacetValuesSort::Count => OrderBy::Count,
}
}
}
impl From<OrderBy> for FacetValuesSort {
fn from(val: OrderBy) -> Self {
match val {
OrderBy::Lexicographic => FacetValuesSort::Alpha,
OrderBy::Count => FacetValuesSort::Count,
}
}
}

View File

@@ -2,6 +2,7 @@ pub mod compression;
pub mod deserr;
pub mod document_formats;
pub mod error;
pub mod facet_values_sort;
pub mod index_uid;
pub mod index_uid_pattern;
pub mod keys;

View File

@@ -14,6 +14,7 @@ use serde::{Deserialize, Serialize, Serializer};
use crate::deserr::DeserrJsonError;
use crate::error::deserr_codes::*;
use crate::facet_values_sort::FacetValuesSort;
/// The maximimum number of results that the engine
/// will be able to return in one search call.
@@ -97,11 +98,14 @@ pub struct TypoSettings {
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError<DeserrJsonError<InvalidSettingsFaceting>> + deserr::MergeWithError<DeserrJsonError<InvalidSettingsFacetingMaxValuesPerFacet>> + deserr::MergeWithError<DeserrJsonError<InvalidSettingsFacetingSortFacetValuesBy>>)]
pub struct FacetingSettings {
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
#[deserr(default, error = DeserrJsonError<InvalidSettingsFacetingMaxValuesPerFacet>)]
pub max_values_per_facet: Setting<usize>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsFacetingSortFacetValuesBy>)]
pub sort_facet_values_by: Setting<BTreeMap<String, FacetValuesSort>>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
@@ -398,12 +402,21 @@ pub fn apply_settings_to_builder(
Setting::NotSet => (),
}
match settings.faceting {
Setting::Set(ref value) => match value.max_values_per_facet {
Setting::Set(val) => builder.set_max_values_per_facet(val),
Setting::Reset => builder.reset_max_values_per_facet(),
Setting::NotSet => (),
},
match &settings.faceting {
Setting::Set(FacetingSettings { max_values_per_facet, sort_facet_values_by }) => {
match max_values_per_facet {
Setting::Set(val) => builder.set_max_values_per_facet(*val),
Setting::Reset => builder.reset_max_values_per_facet(),
Setting::NotSet => (),
}
match sort_facet_values_by {
Setting::Set(val) => builder.set_sort_facet_values_by(
val.iter().map(|(name, order)| (name.clone(), (*order).into())).collect(),
),
Setting::Reset => builder.reset_sort_facet_values_by(),
Setting::NotSet => (),
}
}
Setting::Reset => builder.reset_max_values_per_facet(),
Setting::NotSet => (),
}
@@ -476,6 +489,13 @@ pub fn settings(
max_values_per_facet: Setting::Set(
index.max_values_per_facet(rtxn)?.unwrap_or(DEFAULT_VALUES_PER_FACET),
),
sort_facet_values_by: Setting::Set(
index
.sort_facet_values_by(rtxn)?
.into_iter()
.map(|(name, sort)| (name, sort.into()))
.collect(),
),
};
let pagination = PaginationSettings {

View File

@@ -395,6 +395,7 @@ impl std::error::Error for ParseTaskStatusError {}
pub enum Kind {
DocumentAdditionOrUpdate,
DocumentDeletion,
DocumentDeletionByFilter,
SettingsUpdate,
IndexCreation,
IndexDeletion,
@@ -411,6 +412,7 @@ impl Kind {
match self {
Kind::DocumentAdditionOrUpdate
| Kind::DocumentDeletion
| Kind::DocumentDeletionByFilter
| Kind::SettingsUpdate
| Kind::IndexCreation
| Kind::IndexDeletion
@@ -428,6 +430,7 @@ impl Display for Kind {
match self {
Kind::DocumentAdditionOrUpdate => write!(f, "documentAdditionOrUpdate"),
Kind::DocumentDeletion => write!(f, "documentDeletion"),
Kind::DocumentDeletionByFilter => write!(f, "documentDeletionByFilter"),
Kind::SettingsUpdate => write!(f, "settingsUpdate"),
Kind::IndexCreation => write!(f, "indexCreation"),
Kind::IndexDeletion => write!(f, "indexDeletion"),

View File

@@ -14,14 +14,27 @@ default-run = "meilisearch"
[dependencies]
actix-cors = "0.6.4"
actix-http = { version = "3.3.1", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls"] }
actix-web = { version = "4.3.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] }
actix-http = { version = "3.3.1", default-features = false, features = [
"compress-brotli",
"compress-gzip",
"rustls",
] }
actix-web = { version = "4.3.1", default-features = false, features = [
"macros",
"compress-brotli",
"compress-gzip",
"cookies",
"rustls",
] }
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
anyhow = { version = "1.0.70", features = ["backtrace"] }
async-stream = "0.3.5"
async-trait = "0.1.68"
bstr = "1.4.0"
byte-unit = { version = "4.0.19", default-features = false, features = ["std", "serde"] }
byte-unit = { version = "4.0.19", default-features = false, features = [
"std",
"serde",
] }
bytes = "1.4.0"
clap = { version = "4.2.1", features = ["derive", "env"] }
crossbeam-channel = "0.5.8"
@@ -56,7 +69,10 @@ prometheus = { version = "0.13.3", features = ["process"] }
rand = "0.8.5"
rayon = "1.7.0"
regex = "1.7.3"
reqwest = { version = "0.11.16", features = ["rustls-tls", "json"], default-features = false }
reqwest = { version = "0.11.16", features = [
"rustls-tls",
"json",
], default-features = false }
rustls = "0.20.8"
rustls-pemfile = "1.0.2"
segment = { version = "0.2.2", optional = true }
@@ -70,7 +86,12 @@ sysinfo = "0.28.4"
tar = "0.4.38"
tempfile = "3.5.0"
thiserror = "1.0.40"
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
time = { version = "0.3.20", features = [
"serde-well-known",
"formatting",
"parsing",
"macros",
] }
tokio = { version = "1.27.0", features = ["full"] }
tokio-stream = "0.1.12"
toml = "0.7.3"
@@ -89,7 +110,7 @@ brotli = "3.3.4"
insta = "1.29.0"
manifest-dir-macros = "0.1.16"
maplit = "1.0.2"
meili-snap = {path = "../meili-snap"}
meili-snap = { path = "../meili-snap" }
temp-env = "0.3.3"
urlencoding = "2.1.2"
yaup = "0.2.1"
@@ -98,7 +119,10 @@ yaup = "0.2.1"
anyhow = { version = "1.0.70", optional = true }
cargo_toml = { version = "0.15.2", optional = true }
hex = { version = "0.4.3", optional = true }
reqwest = { version = "0.11.16", features = ["blocking", "rustls-tls"], default-features = false, optional = true }
reqwest = { version = "0.11.16", features = [
"blocking",
"rustls-tls",
], default-features = false, optional = true }
sha-1 = { version = "0.10.1", optional = true }
static-files = { version = "0.2.3", optional = true }
tempfile = { version = "3.5.0", optional = true }
@@ -108,7 +132,17 @@ zip = { version = "0.6.4", optional = true }
[features]
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
analytics = ["segment"]
mini-dashboard = ["actix-web-static-files", "static-files", "anyhow", "cargo_toml", "hex", "reqwest", "sha-1", "tempfile", "zip"]
mini-dashboard = [
"actix-web-static-files",
"static-files",
"anyhow",
"cargo_toml",
"hex",
"reqwest",
"sha-1",
"tempfile",
"zip",
]
chinese = ["meilisearch-types/chinese"]
hebrew = ["meilisearch-types/hebrew"]
japanese = ["meilisearch-types/japanese"]

View File

@@ -4,32 +4,20 @@ use prometheus::{
register_int_gauge_vec, HistogramVec, IntCounterVec, IntGauge, IntGaugeVec,
};
/// Create evenly distributed buckets
fn create_buckets() -> [f64; 29] {
(0..10)
.chain((10..100).step_by(10))
.chain((100..=1000).step_by(100))
.map(|i| i as f64 / 1000.)
.collect::<Vec<_>>()
.try_into()
.unwrap()
}
const HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: &[f64; 14] = &[
0.0005, 0.0008, 0.00085, 0.0009, 0.00095, 0.001, 0.00105, 0.0011, 0.00115, 0.0012, 0.0015,
0.002, 0.003, 1.0,
];
lazy_static! {
pub static ref HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: [f64; 29] = create_buckets();
pub static ref MEILISEARCH_HTTP_REQUESTS_TOTAL: IntCounterVec = register_int_counter_vec!(
opts!("meilisearch_http_requests_total", "Meilisearch HTTP requests total"),
pub static ref HTTP_REQUESTS_TOTAL: IntCounterVec = register_int_counter_vec!(
opts!("http_requests_total", "HTTP requests total"),
&["method", "path"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge =
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch DB Size In Bytes"))
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch Db Size In Bytes"))
.expect("Can't create a metric");
pub static ref MEILISEARCH_USED_DB_SIZE_BYTES: IntGauge = register_int_gauge!(opts!(
"meilisearch_used_db_size_bytes",
"Meilisearch Used DB Size In Bytes"
))
.expect("Can't create a metric");
pub static ref MEILISEARCH_INDEX_COUNT: IntGauge =
register_int_gauge!(opts!("meilisearch_index_count", "Meilisearch Index Count"))
.expect("Can't create a metric");
@@ -38,16 +26,11 @@ lazy_static! {
&["index"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS: HistogramVec = register_histogram_vec!(
pub static ref HTTP_RESPONSE_TIME_SECONDS: HistogramVec = register_histogram_vec!(
"http_response_time_seconds",
"HTTP response times",
&["method", "path"],
HTTP_RESPONSE_TIME_CUSTOM_BUCKETS.to_vec()
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_NB_TASKS: IntGaugeVec = register_int_gauge_vec!(
opts!("meilisearch_nb_tasks", "Meilisearch Number of tasks"),
&["kind", "value"]
)
.expect("Can't create a metric");
}

View File

@@ -52,11 +52,11 @@ where
if is_registered_resource {
let request_method = req.method().to_string();
histogram_timer = Some(
crate::metrics::MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS
crate::metrics::HTTP_RESPONSE_TIME_SECONDS
.with_label_values(&[&request_method, request_path])
.start_timer(),
);
crate::metrics::MEILISEARCH_HTTP_REQUESTS_TOTAL
crate::metrics::HTTP_REQUESTS_TOTAL
.with_label_values(&[&request_method, request_path])
.inc();
}

View File

@@ -66,8 +66,6 @@ pub struct SearchQueryGet {
crop_marker: String,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchMatchingStrategy>)]
matching_strategy: MatchingStrategy,
#[deserr(default, error = DeserrQueryParamError<InvalidRestrictSearchableAttributes>)]
pub restrict_searchable_attributes: Option<CS<String>>,
}
impl From<SearchQueryGet> for SearchQuery {
@@ -98,9 +96,6 @@ impl From<SearchQueryGet> for SearchQuery {
highlight_post_tag: other.highlight_post_tag,
crop_marker: other.crop_marker,
matching_strategy: other.matching_strategy,
restrict_searchable_attributes: other
.restrict_searchable_attributes
.map(|o| o.into_iter().collect()),
}
}
}

View File

@@ -407,6 +407,7 @@ make_setting_route!(
json!({
"faceting": {
"max_values_per_facet": setting.as_ref().and_then(|s| s.max_values_per_facet.set()),
"sort_facet_values_by": setting.as_ref().and_then(|s| s.sort_facet_values_by.clone().set()),
},
}),
Some(req),
@@ -545,6 +546,10 @@ pub async fn update_all(
.as_ref()
.set()
.and_then(|s| s.max_values_per_facet.as_ref().set()),
"sort_facet_values_by": new_settings.faceting
.as_ref()
.set()
.and_then(|s| s.sort_facet_values_by.as_ref().set()),
},
"pagination": {
"max_total_hits": new_settings.pagination

View File

@@ -17,7 +17,7 @@ pub fn configure(config: &mut web::ServiceConfig) {
pub async fn get_metrics(
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
auth_controller: Data<AuthController>,
auth_controller: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<AuthController>>,
) -> Result<HttpResponse, ResponseError> {
let auth_filters = index_scheduler.filters();
if !auth_filters.all_indexes_authorized() {
@@ -28,10 +28,10 @@ pub async fn get_metrics(
return Err(error);
}
let response = create_all_stats((*index_scheduler).clone(), auth_controller, auth_filters)?;
let response =
create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), auth_filters)?;
crate::metrics::MEILISEARCH_DB_SIZE_BYTES.set(response.database_size as i64);
crate::metrics::MEILISEARCH_USED_DB_SIZE_BYTES.set(response.used_database_size as i64);
crate::metrics::MEILISEARCH_INDEX_COUNT.set(response.indexes.len() as i64);
for (index, value) in response.indexes.iter() {
@@ -40,14 +40,6 @@ pub async fn get_metrics(
.set(value.number_of_documents as i64);
}
for (kind, value) in index_scheduler.get_stats()? {
for (value, count) in value {
crate::metrics::MEILISEARCH_NB_TASKS
.with_label_values(&[&kind, &value])
.set(count as i64);
}
}
let encoder = TextEncoder::new();
let mut buffer = vec![];
encoder.encode(&prometheus::gather(), &mut buffer).expect("Failed to encode metrics");

View File

@@ -231,8 +231,6 @@ pub async fn running() -> HttpResponse {
#[serde(rename_all = "camelCase")]
pub struct Stats {
pub database_size: u64,
#[serde(skip)]
pub used_database_size: u64,
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
pub last_update: Option<OffsetDateTime>,
pub indexes: BTreeMap<String, indexes::IndexStats>,
@@ -261,7 +259,6 @@ pub fn create_all_stats(
let mut last_task: Option<OffsetDateTime> = None;
let mut indexes = BTreeMap::new();
let mut database_size = 0;
let mut used_database_size = 0;
for index_uid in index_scheduler.index_names()? {
// Accumulate the size of all indexes, even unauthorized ones, so
@@ -269,7 +266,6 @@ pub fn create_all_stats(
// See <https://github.com/meilisearch/meilisearch/pull/3541#discussion_r1126747643> for context.
let stats = index_scheduler.index_stats(&index_uid)?;
database_size += stats.inner_stats.database_size;
used_database_size += stats.inner_stats.used_database_size;
if !filters.is_index_authorized(&index_uid) {
continue;
@@ -282,14 +278,10 @@ pub fn create_all_stats(
}
database_size += index_scheduler.size()?;
used_database_size += index_scheduler.used_size()?;
database_size += auth_controller.size()?;
used_database_size += auth_controller.used_size()?;
let update_file_size = index_scheduler.compute_update_file_size()?;
database_size += update_file_size;
used_database_size += update_file_size;
database_size += index_scheduler.compute_update_file_size()?;
let stats = Stats { database_size, used_database_size, last_update: last_task, indexes };
let stats = Stats { database_size, last_update: last_task, indexes };
Ok(stats)
}

View File

@@ -730,7 +730,7 @@ mod tests {
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `documentDeletionByFilter`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"

View File

@@ -5,6 +5,7 @@ use std::time::Instant;
use deserr::Deserr;
use either::Either;
use indexmap::IndexMap;
use meilisearch_auth::IndexSearchRules;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::deserr_codes::*;
@@ -68,8 +69,6 @@ pub struct SearchQuery {
pub crop_marker: String,
#[deserr(default, error = DeserrJsonError<InvalidSearchMatchingStrategy>, default)]
pub matching_strategy: MatchingStrategy,
#[deserr(default, error = DeserrJsonError<InvalidRestrictSearchableAttributes>, default)]
pub restrict_searchable_attributes: Option<Vec<String>>,
}
impl SearchQuery {
@@ -121,8 +120,6 @@ pub struct SearchQueryWithIndex {
pub crop_marker: String,
#[deserr(default, error = DeserrJsonError<InvalidSearchMatchingStrategy>, default)]
pub matching_strategy: MatchingStrategy,
#[deserr(default, error = DeserrJsonError<InvalidRestrictSearchableAttributes>, default)]
pub restrict_searchable_attributes: Option<Vec<String>>,
}
impl SearchQueryWithIndex {
@@ -146,7 +143,6 @@ impl SearchQueryWithIndex {
highlight_post_tag,
crop_marker,
matching_strategy,
restrict_searchable_attributes,
} = self;
(
index_uid,
@@ -168,7 +164,6 @@ impl SearchQueryWithIndex {
highlight_post_tag,
crop_marker,
matching_strategy,
restrict_searchable_attributes,
// do not use ..Default::default() here,
// rather add any missing field from `SearchQuery` to `SearchQueryWithIndex`
},
@@ -219,7 +214,7 @@ pub struct SearchResult {
#[serde(flatten)]
pub hits_info: HitsInfo,
#[serde(skip_serializing_if = "Option::is_none")]
pub facet_distribution: Option<BTreeMap<String, BTreeMap<String, u64>>>,
pub facet_distribution: Option<BTreeMap<String, IndexMap<String, u64>>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub facet_stats: Option<BTreeMap<String, FacetStats>>,
}
@@ -280,10 +275,6 @@ pub fn perform_search(
search.query(query);
}
if let Some(ref searchable) = query.restrict_searchable_attributes {
search.searchable_attributes(searchable);
}
let is_finite_pagination = query.is_finite_pagination();
search.terms_matching_strategy(query.matching_strategy.into());
@@ -458,10 +449,30 @@ pub fn perform_search(
.unwrap_or(DEFAULT_VALUES_PER_FACET);
facet_distribution.max_values_per_facet(max_values_by_facet);
let sort_facet_values_by =
index.sort_facet_values_by(&rtxn).map_err(milli::Error::from)?;
let default_sort_facet_values_by =
sort_facet_values_by.get("*").copied().unwrap_or_default();
if fields.iter().all(|f| f != "*") {
let fields: Vec<_> = fields
.into_iter()
.map(|n| {
(
n,
sort_facet_values_by
.get(n)
.copied()
.unwrap_or(default_sort_facet_values_by),
)
})
.collect();
facet_distribution.facets(fields);
}
let distribution = facet_distribution.candidates(candidates).execute()?;
let distribution = facet_distribution
.candidates(candidates)
.default_order_by(default_sort_facet_values_by)
.execute()?;
let stats = facet_distribution.compute_stats()?;
(Some(distribution), Some(stats))
}

View File

@@ -5,7 +5,6 @@ mod errors;
mod formatted;
mod multi;
mod pagination;
mod restrict_searchable;
use once_cell::sync::Lazy;
use serde_json::{json, Value};

View File

@@ -1,263 +0,0 @@
use once_cell::sync::Lazy;
use serde_json::{json, Value};
use crate::common::index::Index;
use crate::common::Server;
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
let index = server.index("test");
index.add_documents(documents.clone(), None).await;
index.wait_task(0).await;
index
}
static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
json!([
{
"title": "Shazam!",
"desc": "a Captain Marvel ersatz",
"id": "1",
},
{
"title": "Captain Planet",
"desc": "He's not part of the Marvel Cinematic Universe",
"id": "2",
},
{
"title": "Captain Marvel",
"desc": "a Shazam ersatz",
"id": "3",
}])
});
#[actix_rt::test]
async fn simple_search_on_title() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search should return 2 documents (ids: 2 and 3).
index
.search(
json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["title"]}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
},
)
.await;
}
#[actix_rt::test]
async fn simple_prefix_search_on_title() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search should return 2 documents (ids: 2 and 3).
index
.search(
json!({"q": "Captain Mar", "restrictSearchableAttributes": ["title"]}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
},
)
.await;
}
#[actix_rt::test]
async fn simple_search_on_title_matching_strategy_all() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search matching strategy all should only return 1 document (ids: 2).
index
.search(json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["title"], "matchingStrategy": "all"}), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
})
.await;
}
#[actix_rt::test]
async fn simple_search_on_unknown_field() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search on unknown field shouldn't return any document.
index
.search(
json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["unknown"]}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
},
)
.await;
}
#[actix_rt::test]
async fn simple_search_on_no_field() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search on no field shouldn't return any document.
index
.search(
json!({"q": "Captain Marvel", "restrictSearchableAttributes": []}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
},
)
.await;
}
#[actix_rt::test]
async fn word_ranking_rule_order() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
// Document 3 should appear before document 2.
index
.search(
json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["title"], "attributesToRetrieve": ["id"]}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"],
json!([
{"id": "3"},
{"id": "2"},
])
);
},
)
.await;
}
#[actix_rt::test]
async fn word_ranking_rule_order_exact_words() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
index.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]})).await;
index.wait_task(1).await;
// simple search should return 2 documents (ids: 2 and 3).
index
.search(
json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["title"], "attributesToRetrieve": ["id"]}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"],
json!([
{"id": "3"},
{"id": "2"},
])
);
},
)
.await;
}
#[actix_rt::test]
async fn typo_ranking_rule_order() {
let server = Server::new().await;
let index = index_with_documents(
&server,
&json!([
{
"title": "Capitain Marivel",
"desc": "Captain Marvel",
"id": "1",
},
{
"title": "Captain Marivel",
"desc": "a Shazam ersatz",
"id": "2",
}]),
)
.await;
// Document 2 should appear before document 1.
index
.search(json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["title"], "attributesToRetrieve": ["id"]}), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"],
json!([
{"id": "2"},
{"id": "1"},
])
);
})
.await;
}
#[actix_rt::test]
async fn attributes_ranking_rule_order() {
let server = Server::new().await;
let index = index_with_documents(
&server,
&json!([
{
"title": "Captain Marvel",
"desc": "a Shazam ersatz",
"footer": "The story of Captain Marvel",
"id": "1",
},
{
"title": "The Avengers",
"desc": "Captain Marvel is far from the earth",
"footer": "A super hero team",
"id": "2",
}]),
)
.await;
// Document 2 should appear before document 1.
index
.search(json!({"q": "Captain Marvel", "restrictSearchableAttributes": ["desc", "footer"], "attributesToRetrieve": ["id"]}), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"],
json!([
{"id": "2"},
{"id": "1"},
])
);
})
.await;
}
#[actix_rt::test]
async fn exactness_ranking_rule_order() {
let server = Server::new().await;
let index = index_with_documents(
&server,
&json!([
{
"title": "Captain Marvel",
"desc": "Captain Marivel",
"id": "1",
},
{
"title": "Captain Marvel",
"desc": "CaptainMarvel",
"id": "2",
}]),
)
.await;
// Document 2 should appear before document 1.
index
.search(json!({"q": "Captain Marvel", "attributesToRetrieve": ["id"], "restrictSearchableAttributes": ["desc"]}), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"],
json!([
{"id": "2"},
{"id": "1"},
])
);
})
.await;
}

View File

@@ -97,7 +97,7 @@ async fn task_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `documentDeletionByFilter`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
@@ -108,7 +108,7 @@ async fn task_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `documentDeletionByFilter`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
@@ -119,7 +119,7 @@ async fn task_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `documentDeletionByFilter`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"

View File

@@ -32,6 +32,7 @@ heed = { git = "https://github.com/meilisearch/heed", tag = "v0.12.6", default-f
"lmdb",
"sync-read-txn",
] }
indexmap = { version = "1.9.3", features = ["serde"] }
json-depth-checker = { path = "../json-depth-checker" }
levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] }
memmap2 = "0.5.10"

View File

@@ -23,9 +23,3 @@ pub use self::roaring_bitmap_length::{
pub use self::script_language_codec::ScriptLanguageCodec;
pub use self::str_beu32_codec::{StrBEU16Codec, StrBEU32Codec};
pub use self::str_str_u8_codec::{U8StrStrCodec, UncheckedU8StrStrCodec};
pub trait BytesDecodeOwned {
type DItem;
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem>;
}

View File

@@ -2,11 +2,8 @@ use std::borrow::Cow;
use std::convert::TryInto;
use std::mem::size_of;
use heed::BytesDecode;
use roaring::RoaringBitmap;
use crate::heed_codec::BytesDecodeOwned;
pub struct BoRoaringBitmapCodec;
impl BoRoaringBitmapCodec {
@@ -16,7 +13,7 @@ impl BoRoaringBitmapCodec {
}
}
impl BytesDecode<'_> for BoRoaringBitmapCodec {
impl heed::BytesDecode<'_> for BoRoaringBitmapCodec {
type DItem = RoaringBitmap;
fn bytes_decode(bytes: &[u8]) -> Option<Self::DItem> {
@@ -31,14 +28,6 @@ impl BytesDecode<'_> for BoRoaringBitmapCodec {
}
}
impl BytesDecodeOwned for BoRoaringBitmapCodec {
type DItem = RoaringBitmap;
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem> {
Self::bytes_decode(bytes)
}
}
impl heed::BytesEncode<'_> for BoRoaringBitmapCodec {
type EItem = RoaringBitmap;

View File

@@ -5,8 +5,6 @@ use std::mem::size_of;
use byteorder::{NativeEndian, ReadBytesExt, WriteBytesExt};
use roaring::RoaringBitmap;
use crate::heed_codec::BytesDecodeOwned;
/// This is the limit where using a byteorder became less size efficient
/// than using a direct roaring encoding, it is also the point where we are able
/// to determine the encoding used only by using the array of bytes length.
@@ -51,7 +49,7 @@ impl CboRoaringBitmapCodec {
} else {
// Otherwise, it means we used the classic RoaringBitmapCodec and
// that the header takes threshold integers.
RoaringBitmap::deserialize_unchecked_from(bytes)
RoaringBitmap::deserialize_from(bytes)
}
}
@@ -71,7 +69,7 @@ impl CboRoaringBitmapCodec {
vec.push(integer);
}
} else {
roaring |= RoaringBitmap::deserialize_unchecked_from(bytes.as_ref())?;
roaring |= RoaringBitmap::deserialize_from(bytes.as_ref())?;
}
}
@@ -105,14 +103,6 @@ impl heed::BytesDecode<'_> for CboRoaringBitmapCodec {
}
}
impl BytesDecodeOwned for CboRoaringBitmapCodec {
type DItem = RoaringBitmap;
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem> {
Self::deserialize_from(bytes).ok()
}
}
impl heed::BytesEncode<'_> for CboRoaringBitmapCodec {
type EItem = RoaringBitmap;

View File

@@ -2,22 +2,12 @@ use std::borrow::Cow;
use roaring::RoaringBitmap;
use crate::heed_codec::BytesDecodeOwned;
pub struct RoaringBitmapCodec;
impl heed::BytesDecode<'_> for RoaringBitmapCodec {
type DItem = RoaringBitmap;
fn bytes_decode(bytes: &[u8]) -> Option<Self::DItem> {
RoaringBitmap::deserialize_unchecked_from(bytes).ok()
}
}
impl BytesDecodeOwned for RoaringBitmapCodec {
type DItem = RoaringBitmap;
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem> {
RoaringBitmap::deserialize_from(bytes).ok()
}
}

View File

@@ -1,23 +1,11 @@
use std::mem;
use heed::BytesDecode;
use crate::heed_codec::BytesDecodeOwned;
pub struct BoRoaringBitmapLenCodec;
impl BytesDecode<'_> for BoRoaringBitmapLenCodec {
impl heed::BytesDecode<'_> for BoRoaringBitmapLenCodec {
type DItem = u64;
fn bytes_decode(bytes: &[u8]) -> Option<Self::DItem> {
Some((bytes.len() / mem::size_of::<u32>()) as u64)
}
}
impl BytesDecodeOwned for BoRoaringBitmapLenCodec {
type DItem = u64;
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem> {
Self::bytes_decode(bytes)
}
}

View File

@@ -1,14 +1,11 @@
use std::mem;
use heed::BytesDecode;
use super::{BoRoaringBitmapLenCodec, RoaringBitmapLenCodec};
use crate::heed_codec::roaring_bitmap::cbo_roaring_bitmap_codec::THRESHOLD;
use crate::heed_codec::BytesDecodeOwned;
pub struct CboRoaringBitmapLenCodec;
impl BytesDecode<'_> for CboRoaringBitmapLenCodec {
impl heed::BytesDecode<'_> for CboRoaringBitmapLenCodec {
type DItem = u64;
fn bytes_decode(bytes: &[u8]) -> Option<Self::DItem> {
@@ -23,11 +20,3 @@ impl BytesDecode<'_> for CboRoaringBitmapLenCodec {
}
}
}
impl BytesDecodeOwned for CboRoaringBitmapLenCodec {
type DItem = u64;
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem> {
Self::bytes_decode(bytes)
}
}

View File

@@ -3,8 +3,6 @@ use std::mem;
use byteorder::{LittleEndian, ReadBytesExt};
use crate::heed_codec::BytesDecodeOwned;
const SERIAL_COOKIE_NO_RUNCONTAINER: u32 = 12346;
const SERIAL_COOKIE: u16 = 12347;
@@ -61,14 +59,6 @@ impl heed::BytesDecode<'_> for RoaringBitmapLenCodec {
}
}
impl BytesDecodeOwned for RoaringBitmapLenCodec {
type DItem = u64;
fn bytes_decode_owned(bytes: &[u8]) -> Option<Self::DItem> {
RoaringBitmapLenCodec::deserialize_from_slice(bytes).ok()
}
}
#[cfg(test)]
mod tests {
use heed::BytesEncode;

View File

@@ -21,9 +21,10 @@ use crate::heed_codec::facet::{
};
use crate::heed_codec::{ScriptLanguageCodec, StrBEU16Codec, StrRefCodec};
use crate::{
default_criteria, CboRoaringBitmapCodec, Criterion, DocumentId, ExternalDocumentsIds,
FacetDistribution, FieldDistribution, FieldId, FieldIdWordCountCodec, GeoPoint, ObkvCodec,
Result, RoaringBitmapCodec, RoaringBitmapLenCodec, Search, U8StrStrCodec, BEU16, BEU32,
default_criteria, BEU32StrCodec, BoRoaringBitmapCodec, CboRoaringBitmapCodec, Criterion,
DocumentId, ExternalDocumentsIds, FacetDistribution, FieldDistribution, FieldId,
FieldIdWordCountCodec, GeoPoint, ObkvCodec, OrderBy, Result, RoaringBitmapCodec,
RoaringBitmapLenCodec, Search, U8StrStrCodec, BEU16, BEU32,
};
pub const DEFAULT_MIN_WORD_LEN_ONE_TYPO: u8 = 5;
@@ -61,6 +62,7 @@ pub mod main_key {
pub const EXACT_WORDS: &str = "exact-words";
pub const EXACT_ATTRIBUTES: &str = "exact-attributes";
pub const MAX_VALUES_PER_FACET: &str = "max-values-per-facet";
pub const SORT_FACET_VALUES_BY: &str = "sort-facet-values-by";
pub const PAGINATION_MAX_TOTAL_HITS: &str = "pagination-max-total-hits";
}
@@ -110,6 +112,9 @@ pub struct Index {
/// A prefix of word and all the documents ids containing this prefix, from attributes for which typos are not allowed.
pub exact_word_prefix_docids: Database<Str, RoaringBitmapCodec>,
/// Maps a word and a document id (u32) to all the positions where the given word appears.
pub docid_word_positions: Database<BEU32StrCodec, BoRoaringBitmapCodec>,
/// Maps the proximity between a pair of words with all the docids where this relation appears.
pub word_pair_proximity_docids: Database<U8StrStrCodec, CboRoaringBitmapCodec>,
/// Maps the proximity between a pair of word and prefix with all the docids where this relation appears.
@@ -173,6 +178,7 @@ impl Index {
let word_prefix_docids = env.create_database(&mut wtxn, Some(WORD_PREFIX_DOCIDS))?;
let exact_word_prefix_docids =
env.create_database(&mut wtxn, Some(EXACT_WORD_PREFIX_DOCIDS))?;
let docid_word_positions = env.create_database(&mut wtxn, Some(DOCID_WORD_POSITIONS))?;
let word_pair_proximity_docids =
env.create_database(&mut wtxn, Some(WORD_PAIR_PROXIMITY_DOCIDS))?;
let script_language_docids =
@@ -215,6 +221,7 @@ impl Index {
exact_word_docids,
word_prefix_docids,
exact_word_prefix_docids,
docid_word_positions,
word_pair_proximity_docids,
script_language_docids,
word_prefix_pair_proximity_docids,
@@ -1228,6 +1235,31 @@ impl Index {
self.main.delete::<_, Str>(txn, main_key::MAX_VALUES_PER_FACET)
}
pub fn sort_facet_values_by(&self, txn: &RoTxn) -> heed::Result<HashMap<String, OrderBy>> {
let mut orders = self
.main
.get::<_, Str, SerdeJson<HashMap<String, OrderBy>>>(
txn,
main_key::SORT_FACET_VALUES_BY,
)?
.unwrap_or_default();
// Insert the default ordering if it is not already overwritten by the user.
orders.entry("*".to_string()).or_insert(OrderBy::Lexicographic);
Ok(orders)
}
pub(crate) fn put_sort_facet_values_by(
&self,
txn: &mut RwTxn,
val: &HashMap<String, OrderBy>,
) -> heed::Result<()> {
self.main.put::<_, Str, SerdeJson<_>>(txn, main_key::SORT_FACET_VALUES_BY, &val)
}
pub(crate) fn delete_sort_facet_values_by(&self, txn: &mut RwTxn) -> heed::Result<bool> {
self.main.delete::<_, Str>(txn, main_key::SORT_FACET_VALUES_BY)
}
pub fn pagination_max_total_hits(&self, txn: &RoTxn) -> heed::Result<Option<usize>> {
self.main.get::<_, Str, OwnedType<usize>>(txn, main_key::PAGINATION_MAX_TOTAL_HITS)
}
@@ -1466,9 +1498,9 @@ pub(crate) mod tests {
db_snap!(index, field_distribution,
@r###"
age 1
id 2
name 2
age 1
id 2
name 2
"###
);
@@ -1486,9 +1518,9 @@ pub(crate) mod tests {
db_snap!(index, field_distribution,
@r###"
age 1
id 2
name 2
age 1
id 2
name 2
"###
);
@@ -1502,9 +1534,9 @@ pub(crate) mod tests {
db_snap!(index, field_distribution,
@r###"
has_dog 1
id 2
name 2
has_dog 1
id 2
name 2
"###
);
}

View File

@@ -5,6 +5,52 @@
#[global_allocator]
pub static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
// #[cfg(test)]
// pub mod allocator {
// use std::alloc::{GlobalAlloc, System};
// use std::sync::atomic::{self, AtomicI64};
// #[global_allocator]
// pub static ALLOC: CountingAlloc = CountingAlloc {
// max_resident: AtomicI64::new(0),
// resident: AtomicI64::new(0),
// allocated: AtomicI64::new(0),
// };
// pub struct CountingAlloc {
// pub max_resident: AtomicI64,
// pub resident: AtomicI64,
// pub allocated: AtomicI64,
// }
// unsafe impl GlobalAlloc for CountingAlloc {
// unsafe fn alloc(&self, layout: std::alloc::Layout) -> *mut u8 {
// self.allocated.fetch_add(layout.size() as i64, atomic::Ordering::SeqCst);
// let old_resident =
// self.resident.fetch_add(layout.size() as i64, atomic::Ordering::SeqCst);
// let resident = old_resident + layout.size() as i64;
// self.max_resident.fetch_max(resident, atomic::Ordering::SeqCst);
// // if layout.size() > 1_000_000 {
// // eprintln!(
// // "allocating {} with new resident size: {resident}",
// // layout.size() / 1_000_000
// // );
// // // let trace = std::backtrace::Backtrace::capture();
// // // let t = trace.to_string();
// // // eprintln!("{t}");
// // }
// System.alloc(layout)
// }
// unsafe fn dealloc(&self, ptr: *mut u8, layout: std::alloc::Layout) {
// self.resident.fetch_sub(layout.size() as i64, atomic::Ordering::Relaxed);
// System.dealloc(ptr, layout)
// }
// }
// }
#[macro_use]
pub mod documents;
@@ -53,8 +99,8 @@ pub use self::heed_codec::{
};
pub use self::index::Index;
pub use self::search::{
FacetDistribution, Filter, FormatOptions, MatchBounds, MatcherBuilder, MatchingWords, Search,
SearchResult, TermsMatchingStrategy, DEFAULT_VALUES_PER_FACET,
FacetDistribution, Filter, FormatOptions, MatchBounds, MatcherBuilder, MatchingWords, OrderBy,
Search, SearchResult, TermsMatchingStrategy, DEFAULT_VALUES_PER_FACET,
};
pub type Result<T> = std::result::Result<T, error::Error>;

View File

@@ -1,19 +1,22 @@
use std::collections::{BTreeMap, HashSet};
use std::collections::{BTreeMap, HashMap, HashSet};
use std::ops::ControlFlow;
use std::{fmt, mem};
use heed::types::ByteSlice;
use heed::BytesDecode;
use indexmap::IndexMap;
use roaring::RoaringBitmap;
use serde::{Deserialize, Serialize};
use crate::error::UserError;
use crate::facet::FacetType;
use crate::heed_codec::facet::{
FacetGroupKeyCodec, FacetGroupValueCodec, FieldDocIdFacetF64Codec, FieldDocIdFacetStringCodec,
OrderedF64Codec,
FacetGroupKeyCodec, FieldDocIdFacetF64Codec, FieldDocIdFacetStringCodec, OrderedF64Codec,
};
use crate::heed_codec::{ByteSliceRefCodec, StrRefCodec};
use crate::search::facet::facet_distribution_iter;
use crate::search::facet::facet_distribution_iter::{
count_iterate_over_facet_distribution, lexicographically_iterate_over_facet_distribution,
};
use crate::{FieldId, Index, Result};
/// The default number of values by facets that will
@@ -24,10 +27,21 @@ pub const DEFAULT_VALUES_PER_FACET: usize = 100;
/// the system to choose between one algorithm or another.
const CANDIDATES_THRESHOLD: u64 = 3000;
/// How should we fetch the facets?
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum OrderBy {
/// By lexicographic order...
#[default]
Lexicographic,
/// Or by number of docids in common?
Count,
}
pub struct FacetDistribution<'a> {
facets: Option<HashSet<String>>,
facets: Option<HashMap<String, OrderBy>>,
candidates: Option<RoaringBitmap>,
max_values_per_facet: usize,
default_order_by: OrderBy,
rtxn: &'a heed::RoTxn<'a>,
index: &'a Index,
}
@@ -38,13 +52,22 @@ impl<'a> FacetDistribution<'a> {
facets: None,
candidates: None,
max_values_per_facet: DEFAULT_VALUES_PER_FACET,
default_order_by: OrderBy::default(),
rtxn,
index,
}
}
pub fn facets<I: IntoIterator<Item = A>, A: AsRef<str>>(&mut self, names: I) -> &mut Self {
self.facets = Some(names.into_iter().map(|s| s.as_ref().to_string()).collect());
pub fn facets<I: IntoIterator<Item = (A, OrderBy)>, A: AsRef<str>>(
&mut self,
names_ordered_by: I,
) -> &mut Self {
self.facets = Some(
names_ordered_by
.into_iter()
.map(|(name, order_by)| (name.as_ref().to_string(), order_by))
.collect(),
);
self
}
@@ -53,6 +76,11 @@ impl<'a> FacetDistribution<'a> {
self
}
pub fn default_order_by(&mut self, order_by: OrderBy) -> &mut Self {
self.default_order_by = order_by;
self
}
pub fn candidates(&mut self, candidates: RoaringBitmap) -> &mut Self {
self.candidates = Some(candidates);
self
@@ -65,7 +93,7 @@ impl<'a> FacetDistribution<'a> {
field_id: FieldId,
facet_type: FacetType,
candidates: &RoaringBitmap,
distribution: &mut BTreeMap<String, u64>,
distribution: &mut IndexMap<String, u64>,
) -> heed::Result<()> {
match facet_type {
FacetType::Number => {
@@ -134,9 +162,15 @@ impl<'a> FacetDistribution<'a> {
&self,
field_id: FieldId,
candidates: &RoaringBitmap,
distribution: &mut BTreeMap<String, u64>,
order_by: OrderBy,
distribution: &mut IndexMap<String, u64>,
) -> heed::Result<()> {
facet_distribution_iter::iterate_over_facet_distribution(
let search_function = match order_by {
OrderBy::Lexicographic => lexicographically_iterate_over_facet_distribution,
OrderBy::Count => count_iterate_over_facet_distribution,
};
search_function(
self.rtxn,
self.index
.facet_id_f64_docids
@@ -159,9 +193,15 @@ impl<'a> FacetDistribution<'a> {
&self,
field_id: FieldId,
candidates: &RoaringBitmap,
distribution: &mut BTreeMap<String, u64>,
order_by: OrderBy,
distribution: &mut IndexMap<String, u64>,
) -> heed::Result<()> {
facet_distribution_iter::iterate_over_facet_distribution(
let search_function = match order_by {
OrderBy::Lexicographic => lexicographically_iterate_over_facet_distribution,
OrderBy::Count => count_iterate_over_facet_distribution,
};
search_function(
self.rtxn,
self.index
.facet_id_string_docids
@@ -189,93 +229,48 @@ impl<'a> FacetDistribution<'a> {
)
}
/// Placeholder search, a.k.a. no candidates were specified. We iterate throught the
/// facet values one by one and iterate on the facet level 0 for numbers.
fn facet_values_from_raw_facet_database(
fn facet_values(
&self,
field_id: FieldId,
) -> heed::Result<BTreeMap<String, u64>> {
let mut distribution = BTreeMap::new();
let db = self.index.facet_id_f64_docids;
let mut prefix = vec![];
prefix.extend_from_slice(&field_id.to_be_bytes());
prefix.push(0); // read values from level 0 only
let iter = db
.as_polymorph()
.prefix_iter::<_, ByteSlice, ByteSlice>(self.rtxn, prefix.as_slice())?
.remap_types::<FacetGroupKeyCodec<OrderedF64Codec>, FacetGroupValueCodec>();
for result in iter {
let (key, value) = result?;
distribution.insert(key.left_bound.to_string(), value.bitmap.len());
if distribution.len() == self.max_values_per_facet {
break;
}
}
let iter = self
.index
.facet_id_string_docids
.as_polymorph()
.prefix_iter::<_, ByteSlice, ByteSlice>(self.rtxn, prefix.as_slice())?
.remap_types::<FacetGroupKeyCodec<StrRefCodec>, FacetGroupValueCodec>();
for result in iter {
let (key, value) = result?;
let docid = value.bitmap.iter().next().unwrap();
let key: (FieldId, _, &'a str) = (field_id, docid, key.left_bound);
let original_string =
self.index.field_id_docid_facet_strings.get(self.rtxn, &key)?.unwrap().to_owned();
distribution.insert(original_string, value.bitmap.len());
if distribution.len() == self.max_values_per_facet {
break;
}
}
Ok(distribution)
}
fn facet_values(&self, field_id: FieldId) -> heed::Result<BTreeMap<String, u64>> {
order_by: OrderBy,
) -> heed::Result<IndexMap<String, u64>> {
use FacetType::{Number, String};
match self.candidates {
Some(ref candidates) => {
let mut distribution = IndexMap::new();
match (order_by, &self.candidates) {
(OrderBy::Lexicographic, Some(cnd)) if cnd.len() <= CANDIDATES_THRESHOLD => {
// Classic search, candidates were specified, we must return facet values only related
// to those candidates. We also enter here for facet strings for performance reasons.
let mut distribution = BTreeMap::new();
if candidates.len() <= CANDIDATES_THRESHOLD {
self.facet_distribution_from_documents(
field_id,
Number,
candidates,
&mut distribution,
)?;
self.facet_distribution_from_documents(
field_id,
String,
candidates,
&mut distribution,
)?;
} else {
self.facet_numbers_distribution_from_facet_levels(
field_id,
candidates,
&mut distribution,
)?;
self.facet_strings_distribution_from_facet_levels(
field_id,
candidates,
&mut distribution,
)?;
}
Ok(distribution)
self.facet_distribution_from_documents(field_id, Number, cnd, &mut distribution)?;
self.facet_distribution_from_documents(field_id, String, cnd, &mut distribution)?;
}
None => self.facet_values_from_raw_facet_database(field_id),
}
_ => {
let universe;
let candidates;
match &self.candidates {
Some(cnd) => candidates = cnd,
None => {
universe = self.index.documents_ids(self.rtxn)?;
candidates = &universe;
}
}
self.facet_numbers_distribution_from_facet_levels(
field_id,
candidates,
order_by,
&mut distribution,
)?;
self.facet_strings_distribution_from_facet_levels(
field_id,
candidates,
order_by,
&mut distribution,
)?;
}
};
Ok(distribution)
}
pub fn compute_stats(&self) -> Result<BTreeMap<String, (f64, f64)>> {
@@ -291,6 +286,7 @@ impl<'a> FacetDistribution<'a> {
Some(facets) => {
let invalid_fields: HashSet<_> = facets
.iter()
.map(|(name, _)| name)
.filter(|facet| !crate::is_faceted(facet, &filterable_fields))
.collect();
if !invalid_fields.is_empty() {
@@ -300,7 +296,7 @@ impl<'a> FacetDistribution<'a> {
}
.into());
} else {
facets.clone()
facets.into_iter().map(|(name, _)| name).cloned().collect()
}
}
None => filterable_fields,
@@ -337,7 +333,7 @@ impl<'a> FacetDistribution<'a> {
Ok(distribution)
}
pub fn execute(&self) -> Result<BTreeMap<String, BTreeMap<String, u64>>> {
pub fn execute(&self) -> Result<BTreeMap<String, IndexMap<String, u64>>> {
let fields_ids_map = self.index.fields_ids_map(self.rtxn)?;
let filterable_fields = self.index.filterable_fields(self.rtxn)?;
@@ -345,6 +341,7 @@ impl<'a> FacetDistribution<'a> {
Some(ref facets) => {
let invalid_fields: HashSet<_> = facets
.iter()
.map(|(name, _)| name)
.filter(|facet| !crate::is_faceted(facet, &filterable_fields))
.collect();
if !invalid_fields.is_empty() {
@@ -354,7 +351,7 @@ impl<'a> FacetDistribution<'a> {
}
.into());
} else {
facets.clone()
facets.into_iter().map(|(name, _)| name).cloned().collect()
}
}
None => filterable_fields,
@@ -363,7 +360,13 @@ impl<'a> FacetDistribution<'a> {
let mut distribution = BTreeMap::new();
for (fid, name) in fields_ids_map.iter() {
if crate::is_faceted(name, &fields) {
let values = self.facet_values(fid)?;
let order_by = self
.facets
.as_ref()
.map(|facets| facets.get(name).copied())
.flatten()
.unwrap_or(self.default_order_by);
let values = self.facet_values(fid, order_by)?;
distribution.insert(name.to_string(), values);
}
}
@@ -374,13 +377,20 @@ impl<'a> FacetDistribution<'a> {
impl fmt::Debug for FacetDistribution<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let FacetDistribution { facets, candidates, max_values_per_facet, rtxn: _, index: _ } =
self;
let FacetDistribution {
facets,
candidates,
max_values_per_facet,
default_order_by,
rtxn: _,
index: _,
} = self;
f.debug_struct("FacetDistribution")
.field("facets", facets)
.field("candidates", candidates)
.field("max_values_per_facet", max_values_per_facet)
.field("default_order_by", default_order_by)
.finish()
}
}
@@ -392,7 +402,7 @@ mod tests {
use crate::documents::documents_batch_reader_from_objects;
use crate::index::tests::TempIndex;
use crate::{milli_snap, FacetDistribution};
use crate::{milli_snap, FacetDistribution, OrderBy};
#[test]
fn few_candidates_few_facet_values() {
@@ -417,14 +427,14 @@ mod tests {
let txn = index.read_txn().unwrap();
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.execute()
.unwrap();
milli_snap!(format!("{map:?}"), @r###"{"colour": {"Blue": 2, "RED": 1}}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates([0, 1, 2].iter().copied().collect())
.execute()
.unwrap();
@@ -432,7 +442,7 @@ mod tests {
milli_snap!(format!("{map:?}"), @r###"{"colour": {"Blue": 2, "RED": 1}}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates([1, 2].iter().copied().collect())
.execute()
.unwrap();
@@ -443,7 +453,7 @@ mod tests {
milli_snap!(format!("{map:?}"), @r###"{"colour": {" blue": 1, "RED": 1}}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates([2].iter().copied().collect())
.execute()
.unwrap();
@@ -451,7 +461,7 @@ mod tests {
milli_snap!(format!("{map:?}"), @r###"{"colour": {"RED": 1}}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates([0, 1, 2].iter().copied().collect())
.max_values_per_facet(1)
.execute()
@@ -489,14 +499,14 @@ mod tests {
let txn = index.read_txn().unwrap();
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.execute()
.unwrap();
milli_snap!(format!("{map:?}"), @r###"{"colour": {"Blue": 4000, "Red": 6000}}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.max_values_per_facet(1)
.execute()
.unwrap();
@@ -504,7 +514,7 @@ mod tests {
milli_snap!(format!("{map:?}"), @r###"{"colour": {"Blue": 4000}}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((0..10_000).collect())
.execute()
.unwrap();
@@ -512,7 +522,7 @@ mod tests {
milli_snap!(format!("{map:?}"), @r###"{"colour": {"Blue": 4000, "Red": 6000}}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((0..5_000).collect())
.execute()
.unwrap();
@@ -520,7 +530,7 @@ mod tests {
milli_snap!(format!("{map:?}"), @r###"{"colour": {"Blue": 2000, "Red": 3000}}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((0..5_000).collect())
.execute()
.unwrap();
@@ -528,7 +538,7 @@ mod tests {
milli_snap!(format!("{map:?}"), @r###"{"colour": {"Blue": 2000, "Red": 3000}}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((0..5_000).collect())
.max_values_per_facet(1)
.execute()
@@ -566,14 +576,14 @@ mod tests {
let txn = index.read_txn().unwrap();
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.execute()
.unwrap();
milli_snap!(format!("{map:?}"), "no_candidates", @"ac9229ed5964d893af96a7076e2f8af5");
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.max_values_per_facet(2)
.execute()
.unwrap();
@@ -581,7 +591,7 @@ mod tests {
milli_snap!(format!("{map:?}"), "no_candidates_with_max_2", @r###"{"colour": {"0": 10, "1": 10}}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((0..10_000).collect())
.execute()
.unwrap();
@@ -589,7 +599,7 @@ mod tests {
milli_snap!(format!("{map:?}"), "candidates_0_10_000", @"ac9229ed5964d893af96a7076e2f8af5");
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((0..5_000).collect())
.execute()
.unwrap();
@@ -626,14 +636,14 @@ mod tests {
let txn = index.read_txn().unwrap();
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.compute_stats()
.unwrap();
milli_snap!(format!("{map:?}"), "no_candidates", @"{}");
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((0..1000).collect())
.compute_stats()
.unwrap();
@@ -641,7 +651,7 @@ mod tests {
milli_snap!(format!("{map:?}"), "candidates_0_1000", @r###"{"colour": (0.0, 999.0)}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((217..777).collect())
.compute_stats()
.unwrap();
@@ -678,14 +688,14 @@ mod tests {
let txn = index.read_txn().unwrap();
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.compute_stats()
.unwrap();
milli_snap!(format!("{map:?}"), "no_candidates", @"{}");
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((0..1000).collect())
.compute_stats()
.unwrap();
@@ -693,7 +703,7 @@ mod tests {
milli_snap!(format!("{map:?}"), "candidates_0_1000", @r###"{"colour": (0.0, 1999.0)}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((217..777).collect())
.compute_stats()
.unwrap();
@@ -730,14 +740,14 @@ mod tests {
let txn = index.read_txn().unwrap();
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.compute_stats()
.unwrap();
milli_snap!(format!("{map:?}"), "no_candidates", @"{}");
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((0..1000).collect())
.compute_stats()
.unwrap();
@@ -745,7 +755,7 @@ mod tests {
milli_snap!(format!("{map:?}"), "candidates_0_1000", @r###"{"colour": (0.0, 999.0)}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((217..777).collect())
.compute_stats()
.unwrap();
@@ -786,14 +796,14 @@ mod tests {
let txn = index.read_txn().unwrap();
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.compute_stats()
.unwrap();
milli_snap!(format!("{map:?}"), "no_candidates", @"{}");
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((0..1000).collect())
.compute_stats()
.unwrap();
@@ -801,7 +811,7 @@ mod tests {
milli_snap!(format!("{map:?}"), "candidates_0_1000", @r###"{"colour": (0.0, 1998.0)}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(std::iter::once("colour"))
.facets(std::iter::once(("colour", OrderBy::default())))
.candidates((217..777).collect())
.compute_stats()
.unwrap();

View File

@@ -1,3 +1,5 @@
use std::cmp::Reverse;
use std::collections::BinaryHeap;
use std::ops::ControlFlow;
use heed::Result;
@@ -19,7 +21,7 @@ use crate::DocumentId;
///
/// The return value of the closure is a `ControlFlow<()>` which indicates whether we should
/// keep iterating over the different facet values or stop.
pub fn iterate_over_facet_distribution<'t, CB>(
pub fn lexicographically_iterate_over_facet_distribution<'t, CB>(
rtxn: &'t heed::RoTxn<'t>,
db: heed::Database<FacetGroupKeyCodec<ByteSliceRefCodec>, FacetGroupValueCodec>,
field_id: u16,
@@ -29,7 +31,7 @@ pub fn iterate_over_facet_distribution<'t, CB>(
where
CB: FnMut(&'t [u8], u64, DocumentId) -> Result<ControlFlow<()>>,
{
let mut fd = FacetDistribution { rtxn, db, field_id, callback };
let mut fd = LexicographicFacetDistribution { rtxn, db, field_id, callback };
let highest_level = get_highest_level(
rtxn,
db.remap_key_type::<FacetGroupKeyCodec<ByteSliceRefCodec>>(),
@@ -44,7 +46,99 @@ where
}
}
struct FacetDistribution<'t, CB>
pub fn count_iterate_over_facet_distribution<'t, CB>(
rtxn: &'t heed::RoTxn<'t>,
db: heed::Database<FacetGroupKeyCodec<ByteSliceRefCodec>, FacetGroupValueCodec>,
field_id: u16,
candidates: &RoaringBitmap,
mut callback: CB,
) -> Result<()>
where
CB: FnMut(&'t [u8], u64, DocumentId) -> Result<ControlFlow<()>>,
{
#[derive(Debug, PartialOrd, Ord, PartialEq, Eq)]
struct LevelEntry<'t> {
/// The number of candidates in this entry.
count: u64,
/// The key level of the entry.
level: Reverse<u8>,
/// The left bound key.
left_bound: &'t [u8],
/// The number of keys we must look for after `left_bound`.
group_size: u8,
/// Any docid in the set of matching documents. Used to find the original facet string.
any_docid: u32,
}
// Represents the list of keys that we must explore.
let mut heap = BinaryHeap::new();
let highest_level = get_highest_level(
rtxn,
db.remap_key_type::<FacetGroupKeyCodec<ByteSliceRefCodec>>(),
field_id,
)?;
if let Some(first_bound) = get_first_facet_value::<ByteSliceRefCodec>(rtxn, db, field_id)? {
// We first fill the heap with values from the highest level
let starting_key =
FacetGroupKey { field_id, level: highest_level, left_bound: first_bound };
for el in db.range(rtxn, &(&starting_key..)).unwrap().take(usize::MAX) {
let (key, value) = el.unwrap();
// The range is unbounded on the right and the group size for the highest level is MAX,
// so we need to check that we are not iterating over the next field id
if key.field_id != field_id {
break;
}
let intersection = value.bitmap & candidates;
let count = intersection.len();
if count != 0 {
heap.push(LevelEntry {
count,
level: Reverse(key.level),
left_bound: key.left_bound,
group_size: value.size,
any_docid: intersection.min().unwrap(),
});
}
}
while let Some(LevelEntry { count, level, left_bound, group_size, any_docid }) = heap.pop()
{
if let Reverse(0) = level {
match (callback)(left_bound, count, any_docid)? {
ControlFlow::Continue(_) => (),
ControlFlow::Break(_) => return Ok(()),
}
} else {
let starting_key = FacetGroupKey { field_id, level: level.0 - 1, left_bound };
for el in db.range(rtxn, &(&starting_key..)).unwrap().take(group_size as usize) {
let (key, value) = el.unwrap();
// The range is unbounded on the right and the group size for the highest level is MAX,
// so we need to check that we are not iterating over the next field id
if key.field_id != field_id {
break;
}
let intersection = value.bitmap & candidates;
let count = intersection.len();
if count != 0 {
heap.push(LevelEntry {
count,
level: Reverse(key.level),
left_bound: key.left_bound,
group_size: value.size,
any_docid: intersection.min().unwrap(),
});
}
}
}
}
}
Ok(())
}
/// Iterate over the facets values by lexicographic order.
struct LexicographicFacetDistribution<'t, CB>
where
CB: FnMut(&'t [u8], u64, DocumentId) -> Result<ControlFlow<()>>,
{
@@ -54,7 +148,7 @@ where
callback: CB,
}
impl<'t, CB> FacetDistribution<'t, CB>
impl<'t, CB> LexicographicFacetDistribution<'t, CB>
where
CB: FnMut(&'t [u8], u64, DocumentId) -> Result<ControlFlow<()>>,
{
@@ -86,6 +180,7 @@ where
}
Ok(ControlFlow::Continue(()))
}
fn iterate(
&mut self,
candidates: &RoaringBitmap,
@@ -116,7 +211,7 @@ where
value.size as usize,
)?;
match cf {
ControlFlow::Continue(_) => {}
ControlFlow::Continue(_) => (),
ControlFlow::Break(_) => return Ok(ControlFlow::Break(())),
}
}
@@ -132,7 +227,7 @@ mod tests {
use heed::BytesDecode;
use roaring::RoaringBitmap;
use super::iterate_over_facet_distribution;
use super::lexicographically_iterate_over_facet_distribution;
use crate::heed_codec::facet::OrderedF64Codec;
use crate::milli_snap;
use crate::search::facet::tests::{get_random_looking_index, get_simple_index};
@@ -144,7 +239,7 @@ mod tests {
let txn = index.env.read_txn().unwrap();
let candidates = (0..=255).collect::<RoaringBitmap>();
let mut results = String::new();
iterate_over_facet_distribution(
lexicographically_iterate_over_facet_distribution(
&txn,
index.content,
0,
@@ -161,6 +256,7 @@ mod tests {
txn.commit().unwrap();
}
}
#[test]
fn filter_distribution_all_stop_early() {
let indexes = [get_simple_index(), get_random_looking_index()];
@@ -169,7 +265,7 @@ mod tests {
let candidates = (0..=255).collect::<RoaringBitmap>();
let mut results = String::new();
let mut nbr_facets = 0;
iterate_over_facet_distribution(
lexicographically_iterate_over_facet_distribution(
&txn,
index.content,
0,

View File

@@ -4,7 +4,7 @@ use heed::types::{ByteSlice, DecodeIgnore};
use heed::{BytesDecode, RoTxn};
use roaring::RoaringBitmap;
pub use self::facet_distribution::{FacetDistribution, DEFAULT_VALUES_PER_FACET};
pub use self::facet_distribution::{FacetDistribution, OrderBy, DEFAULT_VALUES_PER_FACET};
pub use self::filter::{BadGeoError, Filter};
use crate::heed_codec::facet::{FacetGroupKeyCodec, FacetGroupValueCodec, OrderedF64Codec};
use crate::heed_codec::ByteSliceRefCodec;

View File

@@ -4,7 +4,7 @@ use levenshtein_automata::{LevenshteinAutomatonBuilder as LevBuilder, DFA};
use once_cell::sync::Lazy;
use roaring::bitmap::RoaringBitmap;
pub use self::facet::{FacetDistribution, Filter, DEFAULT_VALUES_PER_FACET};
pub use self::facet::{FacetDistribution, Filter, OrderBy, DEFAULT_VALUES_PER_FACET};
pub use self::new::matches::{FormatOptions, MatchBounds, Matcher, MatcherBuilder, MatchingWords};
use self::new::PartialSearchResult;
use crate::{
@@ -27,7 +27,6 @@ pub struct Search<'a> {
offset: usize,
limit: usize,
sort_criteria: Option<Vec<AscDesc>>,
searchable_attributes: Option<&'a [String]>,
geo_strategy: new::GeoSortStrategy,
terms_matching_strategy: TermsMatchingStrategy,
words_limit: usize,
@@ -44,7 +43,6 @@ impl<'a> Search<'a> {
offset: 0,
limit: 20,
sort_criteria: None,
searchable_attributes: None,
geo_strategy: new::GeoSortStrategy::default(),
terms_matching_strategy: TermsMatchingStrategy::default(),
exhaustive_number_hits: false,
@@ -74,11 +72,6 @@ impl<'a> Search<'a> {
self
}
pub fn searchable_attributes(&mut self, searchable: &'a [String]) -> &mut Search<'a> {
self.searchable_attributes = Some(searchable);
self
}
pub fn terms_matching_strategy(&mut self, value: TermsMatchingStrategy) -> &mut Search<'a> {
self.terms_matching_strategy = value;
self
@@ -109,11 +102,6 @@ impl<'a> Search<'a> {
pub fn execute(&self) -> Result<SearchResult> {
let mut ctx = SearchContext::new(self.index, self.rtxn);
if let Some(searchable_attributes) = self.searchable_attributes {
ctx.searchable_attributes(searchable_attributes)?;
}
let PartialSearchResult { located_query_terms, candidates, documents_ids } =
execute_search(
&mut ctx,
@@ -148,7 +136,6 @@ impl fmt::Debug for Search<'_> {
offset,
limit,
sort_criteria,
searchable_attributes,
geo_strategy: _,
terms_matching_strategy,
words_limit,
@@ -162,7 +149,6 @@ impl fmt::Debug for Search<'_> {
.field("offset", offset)
.field("limit", limit)
.field("sort_criteria", sort_criteria)
.field("searchable_attributes", searchable_attributes)
.field("terms_matching_strategy", terms_matching_strategy)
.field("exhaustive_number_hits", exhaustive_number_hits)
.field("words_limit", words_limit)

View File

@@ -4,13 +4,12 @@ use std::hash::Hash;
use fxhash::FxHashMap;
use heed::types::ByteSlice;
use heed::{BytesEncode, Database, RoTxn};
use heed::{BytesDecode, BytesEncode, Database, RoTxn};
use roaring::RoaringBitmap;
use super::interner::Interned;
use super::Word;
use crate::heed_codec::{BytesDecodeOwned, StrBEU16Codec};
use crate::update::{merge_cbo_roaring_bitmaps, MergeFn};
use crate::heed_codec::StrBEU16Codec;
use crate::{
CboRoaringBitmapCodec, CboRoaringBitmapLenCodec, Result, RoaringBitmapCodec, SearchContext,
};
@@ -23,110 +22,50 @@ use crate::{
#[derive(Default)]
pub struct DatabaseCache<'ctx> {
pub word_pair_proximity_docids:
FxHashMap<(u8, Interned<String>, Interned<String>), Option<Cow<'ctx, [u8]>>>,
FxHashMap<(u8, Interned<String>, Interned<String>), Option<&'ctx [u8]>>,
pub word_prefix_pair_proximity_docids:
FxHashMap<(u8, Interned<String>, Interned<String>), Option<Cow<'ctx, [u8]>>>,
FxHashMap<(u8, Interned<String>, Interned<String>), Option<&'ctx [u8]>>,
pub prefix_word_pair_proximity_docids:
FxHashMap<(u8, Interned<String>, Interned<String>), Option<Cow<'ctx, [u8]>>>,
pub word_docids: FxHashMap<Interned<String>, Option<Cow<'ctx, [u8]>>>,
pub exact_word_docids: FxHashMap<Interned<String>, Option<Cow<'ctx, [u8]>>>,
pub word_prefix_docids: FxHashMap<Interned<String>, Option<Cow<'ctx, [u8]>>>,
pub exact_word_prefix_docids: FxHashMap<Interned<String>, Option<Cow<'ctx, [u8]>>>,
FxHashMap<(u8, Interned<String>, Interned<String>), Option<&'ctx [u8]>>,
pub word_docids: FxHashMap<Interned<String>, Option<&'ctx [u8]>>,
pub exact_word_docids: FxHashMap<Interned<String>, Option<&'ctx [u8]>>,
pub word_prefix_docids: FxHashMap<Interned<String>, Option<&'ctx [u8]>>,
pub exact_word_prefix_docids: FxHashMap<Interned<String>, Option<&'ctx [u8]>>,
pub words_fst: Option<fst::Set<Cow<'ctx, [u8]>>>,
pub word_position_docids: FxHashMap<(Interned<String>, u16), Option<Cow<'ctx, [u8]>>>,
pub word_prefix_position_docids: FxHashMap<(Interned<String>, u16), Option<Cow<'ctx, [u8]>>>,
pub word_position_docids: FxHashMap<(Interned<String>, u16), Option<&'ctx [u8]>>,
pub word_prefix_position_docids: FxHashMap<(Interned<String>, u16), Option<&'ctx [u8]>>,
pub word_positions: FxHashMap<Interned<String>, Vec<u16>>,
pub word_prefix_positions: FxHashMap<Interned<String>, Vec<u16>>,
pub word_fid_docids: FxHashMap<(Interned<String>, u16), Option<Cow<'ctx, [u8]>>>,
pub word_prefix_fid_docids: FxHashMap<(Interned<String>, u16), Option<Cow<'ctx, [u8]>>>,
pub word_fid_docids: FxHashMap<(Interned<String>, u16), Option<&'ctx [u8]>>,
pub word_prefix_fid_docids: FxHashMap<(Interned<String>, u16), Option<&'ctx [u8]>>,
pub word_fids: FxHashMap<Interned<String>, Vec<u16>>,
pub word_prefix_fids: FxHashMap<Interned<String>, Vec<u16>>,
}
impl<'ctx> DatabaseCache<'ctx> {
fn get_value<'v, K1, KC, DC>(
fn get_value<'v, K1, KC>(
txn: &'ctx RoTxn,
cache_key: K1,
db_key: &'v KC::EItem,
cache: &mut FxHashMap<K1, Option<Cow<'ctx, [u8]>>>,
cache: &mut FxHashMap<K1, Option<&'ctx [u8]>>,
db: Database<KC, ByteSlice>,
) -> Result<Option<DC::DItem>>
) -> Result<Option<&'ctx [u8]>>
where
K1: Copy + Eq + Hash,
KC: BytesEncode<'v>,
DC: BytesDecodeOwned,
{
match cache.entry(cache_key) {
Entry::Occupied(_) => {}
let bitmap_ptr = match cache.entry(cache_key) {
Entry::Occupied(bitmap_ptr) => *bitmap_ptr.get(),
Entry::Vacant(entry) => {
let bitmap_ptr = db.get(txn, db_key)?.map(Cow::Borrowed);
entry.insert(bitmap_ptr);
}
}
match cache.get(&cache_key).unwrap() {
Some(Cow::Borrowed(bytes)) => {
DC::bytes_decode_owned(bytes).ok_or(heed::Error::Decoding.into()).map(Some)
}
Some(Cow::Owned(bytes)) => {
DC::bytes_decode_owned(bytes).ok_or(heed::Error::Decoding.into()).map(Some)
}
None => Ok(None),
}
}
fn get_value_from_keys<'v, K1, KC, DC>(
txn: &'ctx RoTxn,
cache_key: K1,
db_keys: &'v [KC::EItem],
cache: &mut FxHashMap<K1, Option<Cow<'ctx, [u8]>>>,
db: Database<KC, ByteSlice>,
merger: MergeFn,
) -> Result<Option<DC::DItem>>
where
K1: Copy + Eq + Hash,
KC: BytesEncode<'v>,
DC: BytesDecodeOwned,
KC::EItem: Sized,
{
match cache.entry(cache_key) {
Entry::Occupied(_) => {}
Entry::Vacant(entry) => {
let bitmap_ptr: Option<Cow<'ctx, [u8]>> = match db_keys {
[] => None,
[key] => db.get(txn, key)?.map(Cow::Borrowed),
keys => {
let bitmaps = keys
.iter()
.filter_map(|key| db.get(txn, key).transpose())
.map(|v| v.map(Cow::Borrowed))
.collect::<std::result::Result<Vec<Cow<[u8]>>, _>>()?;
if bitmaps.is_empty() {
None
} else {
Some(merger(&[], &bitmaps[..])?)
}
}
};
let bitmap_ptr = db.get(txn, db_key)?;
entry.insert(bitmap_ptr);
bitmap_ptr
}
};
match cache.get(&cache_key).unwrap() {
Some(Cow::Borrowed(bytes)) => {
DC::bytes_decode_owned(bytes).ok_or(heed::Error::Decoding.into()).map(Some)
}
Some(Cow::Owned(bytes)) => {
DC::bytes_decode_owned(bytes).ok_or(heed::Error::Decoding.into()).map(Some)
}
None => Ok(None),
}
Ok(bitmap_ptr)
}
}
impl<'ctx> SearchContext<'ctx> {
pub fn get_words_fst(&mut self) -> Result<fst::Set<Cow<'ctx, [u8]>>> {
if let Some(fst) = self.db_cache.words_fst.clone() {
@@ -160,41 +99,30 @@ impl<'ctx> SearchContext<'ctx> {
/// Retrieve or insert the given value in the `word_docids` database.
fn get_db_word_docids(&mut self, word: Interned<String>) -> Result<Option<RoaringBitmap>> {
match &self.restricted_fids {
Some(restricted_fids) => {
let interned = self.word_interner.get(word).as_str();
let keys: Vec<_> = restricted_fids.iter().map(|fid| (interned, *fid)).collect();
DatabaseCache::get_value_from_keys::<_, _, CboRoaringBitmapCodec>(
self.txn,
word,
&keys[..],
&mut self.db_cache.word_docids,
self.index.word_fid_docids.remap_data_type::<ByteSlice>(),
merge_cbo_roaring_bitmaps,
)
}
None => DatabaseCache::get_value::<_, _, RoaringBitmapCodec>(
self.txn,
word,
self.word_interner.get(word).as_str(),
&mut self.db_cache.word_docids,
self.index.word_docids.remap_data_type::<ByteSlice>(),
),
}
DatabaseCache::get_value(
self.txn,
word,
self.word_interner.get(word).as_str(),
&mut self.db_cache.word_docids,
self.index.word_docids.remap_data_type::<ByteSlice>(),
)?
.map(|bytes| RoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
.transpose()
}
fn get_db_exact_word_docids(
&mut self,
word: Interned<String>,
) -> Result<Option<RoaringBitmap>> {
DatabaseCache::get_value::<_, _, RoaringBitmapCodec>(
DatabaseCache::get_value(
self.txn,
word,
self.word_interner.get(word).as_str(),
&mut self.db_cache.exact_word_docids,
self.index.exact_word_docids.remap_data_type::<ByteSlice>(),
)
)?
.map(|bytes| RoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
.transpose()
}
pub fn word_prefix_docids(&mut self, prefix: Word) -> Result<Option<RoaringBitmap>> {
@@ -222,41 +150,30 @@ impl<'ctx> SearchContext<'ctx> {
&mut self,
prefix: Interned<String>,
) -> Result<Option<RoaringBitmap>> {
match &self.restricted_fids {
Some(restricted_fids) => {
let interned = self.word_interner.get(prefix).as_str();
let keys: Vec<_> = restricted_fids.iter().map(|fid| (interned, *fid)).collect();
DatabaseCache::get_value_from_keys::<_, _, CboRoaringBitmapCodec>(
self.txn,
prefix,
&keys[..],
&mut self.db_cache.word_prefix_docids,
self.index.word_prefix_fid_docids.remap_data_type::<ByteSlice>(),
merge_cbo_roaring_bitmaps,
)
}
None => DatabaseCache::get_value::<_, _, RoaringBitmapCodec>(
self.txn,
prefix,
self.word_interner.get(prefix).as_str(),
&mut self.db_cache.word_prefix_docids,
self.index.word_prefix_docids.remap_data_type::<ByteSlice>(),
),
}
DatabaseCache::get_value(
self.txn,
prefix,
self.word_interner.get(prefix).as_str(),
&mut self.db_cache.word_prefix_docids,
self.index.word_prefix_docids.remap_data_type::<ByteSlice>(),
)?
.map(|bytes| RoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
.transpose()
}
fn get_db_exact_word_prefix_docids(
&mut self,
prefix: Interned<String>,
) -> Result<Option<RoaringBitmap>> {
DatabaseCache::get_value::<_, _, RoaringBitmapCodec>(
DatabaseCache::get_value(
self.txn,
prefix,
self.word_interner.get(prefix).as_str(),
&mut self.db_cache.exact_word_prefix_docids,
self.index.exact_word_prefix_docids.remap_data_type::<ByteSlice>(),
)
)?
.map(|bytes| RoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
.transpose()
}
pub fn get_db_word_pair_proximity_docids(
@@ -265,7 +182,7 @@ impl<'ctx> SearchContext<'ctx> {
word2: Interned<String>,
proximity: u8,
) -> Result<Option<RoaringBitmap>> {
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
DatabaseCache::get_value(
self.txn,
(proximity, word1, word2),
&(
@@ -275,7 +192,9 @@ impl<'ctx> SearchContext<'ctx> {
),
&mut self.db_cache.word_pair_proximity_docids,
self.index.word_pair_proximity_docids.remap_data_type::<ByteSlice>(),
)
)?
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
.transpose()
}
pub fn get_db_word_pair_proximity_docids_len(
@@ -284,7 +203,7 @@ impl<'ctx> SearchContext<'ctx> {
word2: Interned<String>,
proximity: u8,
) -> Result<Option<u64>> {
DatabaseCache::get_value::<_, _, CboRoaringBitmapLenCodec>(
DatabaseCache::get_value(
self.txn,
(proximity, word1, word2),
&(
@@ -294,7 +213,11 @@ impl<'ctx> SearchContext<'ctx> {
),
&mut self.db_cache.word_pair_proximity_docids,
self.index.word_pair_proximity_docids.remap_data_type::<ByteSlice>(),
)
)?
.map(|bytes| {
CboRoaringBitmapLenCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into())
})
.transpose()
}
pub fn get_db_word_prefix_pair_proximity_docids(
@@ -303,7 +226,7 @@ impl<'ctx> SearchContext<'ctx> {
prefix2: Interned<String>,
proximity: u8,
) -> Result<Option<RoaringBitmap>> {
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
DatabaseCache::get_value(
self.txn,
(proximity, word1, prefix2),
&(
@@ -313,7 +236,9 @@ impl<'ctx> SearchContext<'ctx> {
),
&mut self.db_cache.word_prefix_pair_proximity_docids,
self.index.word_prefix_pair_proximity_docids.remap_data_type::<ByteSlice>(),
)
)?
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
.transpose()
}
pub fn get_db_prefix_word_pair_proximity_docids(
&mut self,
@@ -321,7 +246,7 @@ impl<'ctx> SearchContext<'ctx> {
right: Interned<String>,
proximity: u8,
) -> Result<Option<RoaringBitmap>> {
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
DatabaseCache::get_value(
self.txn,
(proximity, left_prefix, right),
&(
@@ -331,7 +256,9 @@ impl<'ctx> SearchContext<'ctx> {
),
&mut self.db_cache.prefix_word_pair_proximity_docids,
self.index.prefix_word_pair_proximity_docids.remap_data_type::<ByteSlice>(),
)
)?
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
.transpose()
}
pub fn get_db_word_fid_docids(
@@ -339,18 +266,15 @@ impl<'ctx> SearchContext<'ctx> {
word: Interned<String>,
fid: u16,
) -> Result<Option<RoaringBitmap>> {
// if the requested fid isn't in the restricted list, return None.
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
return Ok(None);
}
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
DatabaseCache::get_value(
self.txn,
(word, fid),
&(self.word_interner.get(word).as_str(), fid),
&mut self.db_cache.word_fid_docids,
self.index.word_fid_docids.remap_data_type::<ByteSlice>(),
)
)?
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
.transpose()
}
pub fn get_db_word_prefix_fid_docids(
@@ -358,18 +282,15 @@ impl<'ctx> SearchContext<'ctx> {
word_prefix: Interned<String>,
fid: u16,
) -> Result<Option<RoaringBitmap>> {
// if the requested fid isn't in the restricted list, return None.
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
return Ok(None);
}
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
DatabaseCache::get_value(
self.txn,
(word_prefix, fid),
&(self.word_interner.get(word_prefix).as_str(), fid),
&mut self.db_cache.word_prefix_fid_docids,
self.index.word_prefix_fid_docids.remap_data_type::<ByteSlice>(),
)
)?
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
.transpose()
}
pub fn get_db_word_fids(&mut self, word: Interned<String>) -> Result<Vec<u16>> {
@@ -388,7 +309,7 @@ impl<'ctx> SearchContext<'ctx> {
for result in remap_key_type {
let ((_, fid), value) = result?;
// filling other caches to avoid searching for them again
self.db_cache.word_fid_docids.insert((word, fid), Some(Cow::Borrowed(value)));
self.db_cache.word_fid_docids.insert((word, fid), Some(value));
fids.push(fid);
}
entry.insert(fids.clone());
@@ -414,9 +335,7 @@ impl<'ctx> SearchContext<'ctx> {
for result in remap_key_type {
let ((_, fid), value) = result?;
// filling other caches to avoid searching for them again
self.db_cache
.word_prefix_fid_docids
.insert((word_prefix, fid), Some(Cow::Borrowed(value)));
self.db_cache.word_prefix_fid_docids.insert((word_prefix, fid), Some(value));
fids.push(fid);
}
entry.insert(fids.clone());
@@ -431,13 +350,15 @@ impl<'ctx> SearchContext<'ctx> {
word: Interned<String>,
position: u16,
) -> Result<Option<RoaringBitmap>> {
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
DatabaseCache::get_value(
self.txn,
(word, position),
&(self.word_interner.get(word).as_str(), position),
&mut self.db_cache.word_position_docids,
self.index.word_position_docids.remap_data_type::<ByteSlice>(),
)
)?
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
.transpose()
}
pub fn get_db_word_prefix_position_docids(
@@ -445,13 +366,15 @@ impl<'ctx> SearchContext<'ctx> {
word_prefix: Interned<String>,
position: u16,
) -> Result<Option<RoaringBitmap>> {
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
DatabaseCache::get_value(
self.txn,
(word_prefix, position),
&(self.word_interner.get(word_prefix).as_str(), position),
&mut self.db_cache.word_prefix_position_docids,
self.index.word_prefix_position_docids.remap_data_type::<ByteSlice>(),
)
)?
.map(|bytes| CboRoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding.into()))
.transpose()
}
pub fn get_db_word_positions(&mut self, word: Interned<String>) -> Result<Vec<u16>> {
@@ -470,9 +393,7 @@ impl<'ctx> SearchContext<'ctx> {
for result in remap_key_type {
let ((_, position), value) = result?;
// filling other caches to avoid searching for them again
self.db_cache
.word_position_docids
.insert((word, position), Some(Cow::Borrowed(value)));
self.db_cache.word_position_docids.insert((word, position), Some(value));
positions.push(position);
}
entry.insert(positions.clone());
@@ -503,7 +424,7 @@ impl<'ctx> SearchContext<'ctx> {
// filling other caches to avoid searching for them again
self.db_cache
.word_prefix_position_docids
.insert((word_prefix, position), Some(Cow::Borrowed(value)));
.insert((word_prefix, position), Some(value));
positions.push(position);
}
entry.insert(positions.clone());

View File

@@ -1,449 +0,0 @@
{"run_id":"1683129457-574770000","line":622,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode-3","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":622,"expression":"matcher.format(format_options)"},"snapshot":"<em>Westfália</em>"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Westfáli</em>a"}}
{"run_id":"1683133106-100492000","line":738,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":839,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":573,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":640,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":600,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":802,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":746,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":648,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":609,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":811,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":582,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":755,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":657,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":820,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":764,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":666,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":773,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":675,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":684,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":782,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":693,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":702,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":711,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":720,"new":null,"old":null}
{"run_id":"1683133106-100492000","line":622,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode-3","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":622,"expression":"matcher.format(format_options)"},"snapshot":"Westfáliaaaaaa"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Westfáli</em>aaaaaaa"}}
{"run_id":"1683193451-2793000","line":654,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":616,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":590,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":851,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":814,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":751,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":759,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":662,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":625,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":823,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":599,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":832,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":671,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":768,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":680,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":777,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":689,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":786,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":698,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":795,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":707,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":716,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":725,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":734,"new":null,"old":null}
{"run_id":"1683193451-2793000","line":637,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode-3","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":637,"expression":"matcher.format(format_options)"},"snapshot":"Westfáliaaaaaa"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Westfáli</em>aaaaaaa"}}
{"run_id":"1683193542-499542000","line":851,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":751,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":616,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":814,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":590,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":654,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":759,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":662,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":625,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":823,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":599,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":832,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":768,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":671,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":777,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":680,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":786,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":689,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":698,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":795,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":707,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":716,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":725,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":734,"new":null,"old":null}
{"run_id":"1683193542-499542000","line":637,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode-3","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":637,"expression":"matcher.format(format_options)"},"snapshot":"Westfáliaaaaaa"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Westfáli</em>aaaaaaa"}}
{"run_id":"1683193590-661809000","line":814,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":851,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":590,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":751,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":616,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":654,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":662,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":759,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":625,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":823,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":599,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":832,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":671,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":768,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":680,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":777,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":689,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":786,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":698,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":795,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":707,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":716,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":725,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":734,"new":null,"old":null}
{"run_id":"1683193590-661809000","line":637,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode-3","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":637,"expression":"matcher.format(format_options)"},"snapshot":"<em>Westfália</em>"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Westfáli</em>a"}}
{"run_id":"1683196250-584747000","line":654,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":616,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":851,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":814,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":590,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":751,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":662,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":759,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":625,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":823,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":599,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":671,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":832,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":768,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":680,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":777,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":689,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":786,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":698,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":795,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":707,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":716,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":725,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":734,"new":null,"old":null}
{"run_id":"1683196250-584747000","line":637,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode-3","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":637,"expression":"matcher.format(format_options)"},"snapshot":"<em>Westfália</em>"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Westfáli</em>a"}}
{"run_id":"1683196569-36502000","line":851,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":751,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":654,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":590,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":814,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":759,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":662,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":823,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":599,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":768,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":832,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":671,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":777,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":680,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":786,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":689,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":795,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":698,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":707,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":716,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":725,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":734,"new":null,"old":null}
{"run_id":"1683196569-36502000","line":616,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"highlight_unicode","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":616,"expression":"matcher.format(format_options)"},"snapshot":"<em>Ŵôřlḑ</em>ôle"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"<em>Ŵôřlḑôle</em>"}}
{"run_id":"1683196614-298348000","line":654,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":814,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":851,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":590,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":616,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":751,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":662,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":759,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":625,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":823,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":599,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":832,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":671,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":768,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":680,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":777,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":689,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":637,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":786,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":698,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":795,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":707,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":716,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":725,"new":null,"old":null}
{"run_id":"1683196614-298348000","line":734,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":751,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":590,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":654,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":616,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":814,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":851,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":662,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":759,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":625,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":823,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":599,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":671,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":832,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":768,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":680,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":777,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":689,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":786,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":637,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":698,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":795,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":707,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":716,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":725,"new":null,"old":null}
{"run_id":"1683196758-130465000","line":734,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":616,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":814,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":590,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":654,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":751,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":662,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":625,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":759,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":823,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":599,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":832,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":671,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":768,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":680,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":777,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":689,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":786,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":637,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":698,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":795,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":707,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":716,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":725,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":734,"new":null,"old":null}
{"run_id":"1683213265-505594000","line":850,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"partial_matches","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":850,"expression":"matcher.format(format_options)"},"snapshot":"_the_ do or die can't be he do and or isn't he"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"_the_ _do_ _or_ die can't be he do and or isn'_t_ _he_"}}
{"run_id":"1683213465-911114000","line":751,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":654,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":814,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":590,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":616,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":759,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":662,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":625,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":823,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":599,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":832,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":768,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":671,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":777,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":680,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":786,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":689,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":637,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":698,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":795,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":707,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":716,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":725,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":734,"new":null,"old":null}
{"run_id":"1683213465-911114000","line":850,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"partial_matches","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":850,"expression":"matcher.format(format_options)"},"snapshot":"_the_ do or die can't be he do and or isn't he"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"_the_ _do_ _or_ die can't be he do and or isn'_t_ _he_"}}
{"run_id":"1683213557-564653000","line":751,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":590,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":814,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":616,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":654,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":759,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":625,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":662,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":823,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":599,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":832,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":768,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":671,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":777,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":680,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":786,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":689,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":637,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":795,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":698,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":707,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":716,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":725,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":734,"new":null,"old":null}
{"run_id":"1683213557-564653000","line":850,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"partial_matches","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":850,"expression":"matcher.format(format_options)"},"snapshot":"_the_ do or die can't be he do and or isn't he thedoor"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"_the_ _do_ _or_ die can't be he do and or isn'_t_ _he_ _thedoor_"}}
{"run_id":"1683213999-273520000","line":657,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":754,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":619,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":593,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":817,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":665,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":762,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":628,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":826,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":602,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":674,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":835,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":771,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":683,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":780,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":692,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":701,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":789,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":640,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":710,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":798,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":719,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":728,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":737,"new":null,"old":null}
{"run_id":"1683213999-273520000","line":853,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"partial_matches","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":853,"expression":"matcher.format(format_options)"},"snapshot":"_the_ do or die can't be he do and or isn't he thedoor"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"_the_ _do_ _or_ die can't be he do and or isn'_t_ _he_ _thedoor_"}}
{"run_id":"1683710541-379812000","line":754,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":593,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":657,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":817,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":619,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":762,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":665,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":628,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":826,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":602,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":771,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":835,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":674,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":780,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":683,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":789,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":692,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":640,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":798,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":701,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":710,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":719,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":728,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":737,"new":null,"old":null}
{"run_id":"1683710541-379812000","line":853,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"partial_matches","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":853,"expression":"matcher.format(format_options)"},"snapshot":"_the_ do or die can't be he do and or isn't he _thedoor_"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"_the_ _do_ _or_ die can't be he do and or isn'_t_ _he_ _thedoor_"}}
{"run_id":"1683710687-182342000","line":619,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":657,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":817,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":593,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":754,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":665,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":628,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":762,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":826,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":602,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":835,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":674,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":771,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":780,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":683,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":640,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":692,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":789,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":701,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":798,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":710,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":719,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":728,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":737,"new":null,"old":null}
{"run_id":"1683710687-182342000","line":853,"new":{"module_name":"milli__search__new__matches__tests","snapshot_name":"partial_matches","metadata":{"source":"milli/src/search/new/matches/mod.rs","assertion_line":853,"expression":"matcher.format(format_options)"},"snapshot":"_the_ do or die can't be he do and or isn't he _thedoor_"},"old":{"module_name":"milli__search__new__matches__tests","metadata":{},"snapshot":"_the_ _do_ _or_ die can't be he do and or isn'_t_ _he_ _thedoor_"}}
{"run_id":"1684141548-57871000","line":654,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":662,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":671,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":680,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":689,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":698,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":707,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":716,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":725,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":734,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":590,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":599,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":751,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":759,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":768,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":777,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":786,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":795,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":616,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":625,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":637,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":851,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":814,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":823,"new":null,"old":null}
{"run_id":"1684141548-57871000","line":832,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":654,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":662,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":671,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":680,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":689,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":698,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":707,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":716,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":725,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":734,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":590,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":599,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":751,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":759,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":768,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":777,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":786,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":795,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":616,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":625,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":637,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":851,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":814,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":823,"new":null,"old":null}
{"run_id":"1684141761-300166000","line":832,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":654,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":662,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":671,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":680,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":689,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":698,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":707,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":716,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":725,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":734,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":590,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":599,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":751,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":759,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":768,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":777,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":786,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":795,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":616,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":625,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":637,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":851,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":814,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":823,"new":null,"old":null}
{"run_id":"1684227379-943236000","line":832,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":654,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":662,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":671,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":680,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":689,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":698,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":707,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":716,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":725,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":734,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":590,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":599,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":751,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":759,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":768,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":777,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":786,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":795,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":616,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":625,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":637,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":851,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":814,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":823,"new":null,"old":null}
{"run_id":"1686671229-287954000","line":832,"new":null,"old":null}

View File

@@ -56,7 +56,6 @@ pub struct SearchContext<'ctx> {
pub phrase_interner: DedupInterner<Phrase>,
pub term_interner: Interner<QueryTerm>,
pub phrase_docids: PhraseDocIdsCache,
pub restricted_fids: Option<Vec<u16>>,
}
impl<'ctx> SearchContext<'ctx> {
@@ -69,18 +68,8 @@ impl<'ctx> SearchContext<'ctx> {
phrase_interner: <_>::default(),
term_interner: <_>::default(),
phrase_docids: <_>::default(),
restricted_fids: None,
}
}
pub fn searchable_attributes(&mut self, searchable_attributes: &'ctx [String]) -> Result<()> {
let fids_map = self.index.fields_ids_map(self.txn)?;
let restricted_fids =
searchable_attributes.iter().filter_map(|name| fids_map.id(name)).collect();
self.restricted_fids = Some(restricted_fids);
Ok(())
}
}
#[derive(Clone, Copy, PartialEq, PartialOrd, Ord, Eq)]

View File

@@ -77,9 +77,13 @@ pub fn located_query_terms_from_tokens(
}
}
TokenKind::Separator(separator_kind) => {
// add penalty for hard separators
if let SeparatorKind::Hard = separator_kind {
position = position.wrapping_add(7);
match separator_kind {
SeparatorKind::Hard => {
position += 1;
}
SeparatorKind::Soft => {
position += 0;
}
}
phrase = 'phrase: {
@@ -284,36 +288,3 @@ impl PhraseBuilder {
})
}
}
#[cfg(test)]
mod tests {
use charabia::TokenizerBuilder;
use super::*;
use crate::index::tests::TempIndex;
fn temp_index_with_documents() -> TempIndex {
let temp_index = TempIndex::new();
temp_index
.add_documents(documents!([
{ "id": 1, "name": "split this world westfali westfalia the Ŵôřlḑôle" },
{ "id": 2, "name": "Westfália" },
{ "id": 3, "name": "Ŵôřlḑôle" },
]))
.unwrap();
temp_index
}
#[test]
fn start_with_hard_separator() -> Result<()> {
let tokenizer = TokenizerBuilder::new().build();
let tokens = tokenizer.tokenize(".");
let index = temp_index_with_documents();
let rtxn = index.read_txn()?;
let mut ctx = SearchContext::new(&index, &rtxn);
// panics with `attempt to add with overflow` before <https://github.com/meilisearch/meilisearch/issues/3785>
let located_query_terms = located_query_terms_from_tokens(&mut ctx, tokens, None)?;
assert!(located_query_terms.is_empty());
Ok(())
}
}

View File

@@ -89,6 +89,7 @@ Create a snapshot test of the given database.
- `exact_word_docids`
- `word_prefix_docids`
- `exact_word_prefix_docids`
- `docid_word_positions`
- `word_pair_proximity_docids`
- `word_prefix_pair_proximity_docids`
- `word_position_docids`
@@ -216,6 +217,11 @@ pub fn snap_exact_word_prefix_docids(index: &Index) -> String {
&format!("{s:<16} {}", display_bitmap(&b))
})
}
pub fn snap_docid_word_positions(index: &Index) -> String {
make_db_snap_from_iter!(index, docid_word_positions, |((idx, s), b)| {
&format!("{idx:<6} {s:<16} {}", display_bitmap(&b))
})
}
pub fn snap_word_pair_proximity_docids(index: &Index) -> String {
make_db_snap_from_iter!(index, word_pair_proximity_docids, |((proximity, word1, word2), b)| {
&format!("{proximity:<2} {word1:<16} {word2:<16} {}", display_bitmap(&b))
@@ -471,6 +477,9 @@ macro_rules! full_snap_of_db {
($index:ident, exact_word_prefix_docids) => {{
$crate::snapshot_tests::snap_exact_word_prefix_docids(&$index)
}};
($index:ident, docid_word_positions) => {{
$crate::snapshot_tests::snap_docid_word_positions(&$index)
}};
($index:ident, word_pair_proximity_docids) => {{
$crate::snapshot_tests::snap_word_pair_proximity_docids(&$index)
}};

View File

@@ -23,6 +23,7 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> {
exact_word_docids,
word_prefix_docids,
exact_word_prefix_docids,
docid_word_positions,
word_pair_proximity_docids,
word_prefix_pair_proximity_docids,
prefix_word_pair_proximity_docids,
@@ -79,6 +80,7 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> {
exact_word_docids.clear(self.wtxn)?;
word_prefix_docids.clear(self.wtxn)?;
exact_word_prefix_docids.clear(self.wtxn)?;
docid_word_positions.clear(self.wtxn)?;
word_pair_proximity_docids.clear(self.wtxn)?;
word_prefix_pair_proximity_docids.clear(self.wtxn)?;
prefix_word_pair_proximity_docids.clear(self.wtxn)?;
@@ -139,6 +141,7 @@ mod tests {
assert!(index.word_docids.is_empty(&rtxn).unwrap());
assert!(index.word_prefix_docids.is_empty(&rtxn).unwrap());
assert!(index.docid_word_positions.is_empty(&rtxn).unwrap());
assert!(index.word_pair_proximity_docids.is_empty(&rtxn).unwrap());
assert!(index.field_id_word_count_docids.is_empty(&rtxn).unwrap());
assert!(index.word_prefix_pair_proximity_docids.is_empty(&rtxn).unwrap());

View File

@@ -1,5 +1,5 @@
use std::collections::btree_map::Entry;
use std::collections::{BTreeSet, HashMap, HashSet};
use std::collections::{HashMap, HashSet};
use fst::IntoStreamer;
use heed::types::{ByteSlice, DecodeIgnore, Str, UnalignedSlice};
@@ -15,7 +15,8 @@ use crate::facet::FacetType;
use crate::heed_codec::facet::FieldDocIdFacetCodec;
use crate::heed_codec::CboRoaringBitmapCodec;
use crate::{
ExternalDocumentsIds, FieldId, FieldIdMapMissingEntry, Index, Result, RoaringBitmapCodec, BEU32,
ExternalDocumentsIds, FieldId, FieldIdMapMissingEntry, Index, Result, RoaringBitmapCodec,
SmallString32, BEU32,
};
pub struct DeleteDocuments<'t, 'u, 'i> {
@@ -231,6 +232,7 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
exact_word_docids,
word_prefix_docids,
exact_word_prefix_docids,
docid_word_positions,
word_pair_proximity_docids,
field_id_word_count_docids,
word_prefix_pair_proximity_docids,
@@ -249,9 +251,23 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
facet_id_is_empty_docids,
documents,
} = self.index;
// Remove from the documents database
// Retrieve the words contained in the documents.
let mut words = Vec::new();
for docid in &self.to_delete_docids {
documents.delete(self.wtxn, &BEU32::new(docid))?;
// We iterate through the words positions of the document id, retrieve the word and delete the positions.
// We create an iterator to be able to get the content and delete the key-value itself.
// It's faster to acquire a cursor to get and delete, as we avoid traversing the LMDB B-Tree two times but only once.
let mut iter = docid_word_positions.prefix_iter_mut(self.wtxn, &(docid, ""))?;
while let Some(result) = iter.next() {
let ((_docid, word), _positions) = result?;
// This boolean will indicate if we must remove this word from the words FST.
words.push((SmallString32::from(word), false));
// safety: we don't keep references from inside the LMDB database.
unsafe { iter.del_current()? };
}
}
// We acquire the current external documents ids map...
// Note that its soft-deleted document ids field will be equal to the `to_delete_docids`
@@ -262,27 +278,42 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
let new_external_documents_ids = new_external_documents_ids.into_static();
self.index.put_external_documents_ids(self.wtxn, &new_external_documents_ids)?;
let mut words_to_keep = BTreeSet::default();
let mut words_to_delete = BTreeSet::default();
// Maybe we can improve the get performance of the words
// if we sort the words first, keeping the LMDB pages in cache.
words.sort_unstable();
// We iterate over the words and delete the documents ids
// from the word docids database.
remove_from_word_docids(
self.wtxn,
word_docids,
&self.to_delete_docids,
&mut words_to_keep,
&mut words_to_delete,
)?;
remove_from_word_docids(
self.wtxn,
exact_word_docids,
&self.to_delete_docids,
&mut words_to_keep,
&mut words_to_delete,
)?;
for (word, must_remove) in &mut words {
remove_from_word_docids(
self.wtxn,
word_docids,
word.as_str(),
must_remove,
&self.to_delete_docids,
)?;
remove_from_word_docids(
self.wtxn,
exact_word_docids,
word.as_str(),
must_remove,
&self.to_delete_docids,
)?;
}
// We construct an FST set that contains the words to delete from the words FST.
let words_to_delete = fst::Set::from_iter(words_to_delete.difference(&words_to_keep))?;
let words_to_delete =
words.iter().filter_map(
|(word, must_remove)| {
if *must_remove {
Some(word.as_str())
} else {
None
}
},
);
let words_to_delete = fst::Set::from_iter(words_to_delete)?;
let new_words_fst = {
// We retrieve the current words FST from the database.
@@ -501,24 +532,23 @@ fn remove_from_word_prefix_docids(
fn remove_from_word_docids(
txn: &mut heed::RwTxn,
db: &heed::Database<Str, RoaringBitmapCodec>,
word: &str,
must_remove: &mut bool,
to_remove: &RoaringBitmap,
words_to_keep: &mut BTreeSet<String>,
words_to_remove: &mut BTreeSet<String>,
) -> Result<()> {
// We create an iterator to be able to get the content and delete the word docids.
// It's faster to acquire a cursor to get and delete or put, as we avoid traversing
// the LMDB B-Tree two times but only once.
let mut iter = db.iter_mut(txn)?;
while let Some((key, mut docids)) = iter.next().transpose()? {
let previous_len = docids.len();
docids -= to_remove;
if docids.is_empty() {
// safety: we don't keep references from inside the LMDB database.
unsafe { iter.del_current()? };
words_to_remove.insert(key.to_owned());
} else {
words_to_keep.insert(key.to_owned());
if docids.len() != previous_len {
let mut iter = db.prefix_iter_mut(txn, word)?;
if let Some((key, mut docids)) = iter.next().transpose()? {
if key == word {
let previous_len = docids.len();
docids -= to_remove;
if docids.is_empty() {
// safety: we don't keep references from inside the LMDB database.
unsafe { iter.del_current()? };
*must_remove = true;
} else if docids.len() != previous_len {
let key = key.to_owned();
// safety: we don't keep references from inside the LMDB database.
unsafe { iter.put_current(&key, &docids)? };
@@ -597,7 +627,7 @@ mod tests {
use super::*;
use crate::index::tests::TempIndex;
use crate::{db_snap, Filter, Search};
use crate::{db_snap, Filter};
fn delete_documents<'t>(
wtxn: &mut RwTxn<'t, '_>,
@@ -1169,52 +1199,4 @@ mod tests {
DeletionStrategy::AlwaysSoft,
);
}
#[test]
fn delete_words_exact_attributes() {
let index = TempIndex::new();
index
.update_settings(|settings| {
settings.set_primary_key(S("id"));
settings.set_searchable_fields(vec![S("text"), S("exact")]);
settings.set_exact_attributes(vec![S("exact")].into_iter().collect());
})
.unwrap();
index
.add_documents(documents!([
{ "id": 0, "text": "hello" },
{ "id": 1, "exact": "hello"}
]))
.unwrap();
db_snap!(index, word_docids, 1, @r###"
hello [0, ]
"###);
db_snap!(index, exact_word_docids, 1, @r###"
hello [1, ]
"###);
db_snap!(index, words_fst, 1, @"300000000000000001084cfcfc2ce1000000016000000090ea47f");
let mut wtxn = index.write_txn().unwrap();
let deleted_internal_ids =
delete_documents(&mut wtxn, &index, &["1"], DeletionStrategy::AlwaysHard);
wtxn.commit().unwrap();
db_snap!(index, word_docids, 2, @r###"
hello [0, ]
"###);
db_snap!(index, exact_word_docids, 2, @"");
db_snap!(index, words_fst, 2, @"300000000000000001084cfcfc2ce1000000016000000090ea47f");
insta::assert_snapshot!(format!("{deleted_internal_ids:?}"), @"[1]");
let txn = index.read_txn().unwrap();
let words = index.words_fst(&txn).unwrap().into_stream().into_strs().unwrap();
insta::assert_snapshot!(format!("{words:?}"), @r###"["hello"]"###);
let mut s = Search::new(&txn, &index);
s.query("hello");
let crate::SearchResult { documents_ids, .. } = s.execute().unwrap();
insta::assert_snapshot!(format!("{documents_ids:?}"), @"[0]");
}
}

View File

@@ -1,6 +1,6 @@
use std::collections::HashMap;
use std::fs::File;
use std::io;
use std::{cmp, io};
use grenad::Sorter;
@@ -54,10 +54,11 @@ pub fn extract_fid_word_count_docids<R: io::Read + io::Seek>(
}
for position in read_u32_ne_bytes(value) {
let (field_id, _) = relative_from_absolute_position(position);
let (field_id, position) = relative_from_absolute_position(position);
let word_count = position as u32 + 1;
let value = document_fid_wordcount.entry(field_id as FieldId).or_insert(0);
*value += 1;
*value = cmp::max(*value, word_count);
}
}
@@ -82,7 +83,7 @@ fn drain_document_fid_wordcount_into_sorter(
let mut key_buffer = Vec::new();
for (fid, count) in document_fid_wordcount.drain() {
if count <= 30 {
if count <= 10 {
key_buffer.clear();
key_buffer.extend_from_slice(&fid.to_be_bytes());
key_buffer.push(count as u8);

View File

@@ -325,6 +325,8 @@ fn send_and_extract_flattened_documents_data(
// send docid_word_positions_chunk to DB writer
let docid_word_positions_chunk =
unsafe { as_cloneable_grenad(&docid_word_positions_chunk)? };
let _ = lmdb_writer_sx
.send(Ok(TypedChunk::DocidWordPositions(docid_word_positions_chunk.clone())));
let _ =
lmdb_writer_sx.send(Ok(TypedChunk::ScriptLanguageDocids(script_language_pair)));

View File

@@ -4,6 +4,7 @@ use std::result::Result as StdResult;
use roaring::RoaringBitmap;
use super::read_u32_ne_bytes;
use crate::heed_codec::CboRoaringBitmapCodec;
use crate::update::index_documents::transform::Operation;
use crate::Result;
@@ -21,6 +22,10 @@ pub fn concat_u32s_array<'a>(_key: &[u8], values: &[Cow<'a, [u8]>]) -> Result<Co
}
}
pub fn roaring_bitmap_from_u32s_array(slice: &[u8]) -> RoaringBitmap {
read_u32_ne_bytes(slice).collect()
}
pub fn serialize_roaring_bitmap(bitmap: &RoaringBitmap, buffer: &mut Vec<u8>) -> io::Result<()> {
buffer.clear();
buffer.reserve(bitmap.serialized_size());

View File

@@ -14,8 +14,8 @@ pub use grenad_helpers::{
};
pub use merge_functions::{
concat_u32s_array, keep_first, keep_latest_obkv, merge_cbo_roaring_bitmaps,
merge_obkvs_and_operations, merge_roaring_bitmaps, merge_two_obkvs, serialize_roaring_bitmap,
MergeFn,
merge_obkvs_and_operations, merge_roaring_bitmaps, merge_two_obkvs,
roaring_bitmap_from_u32s_array, serialize_roaring_bitmap, MergeFn,
};
use crate::MAX_WORD_LENGTH;

View File

@@ -2471,11 +2471,11 @@ mod tests {
{
"id": 3,
"text": "a a a a a a a a a a a a a a a a a
a a a a a a a a a a a a a a a a a a a a a a a a a a
a a a a a a a a a a a a a a a a a a a a a a a a a a
a a a a a a a a a a a a a a a a a a a a a a a a a a
a a a a a a a a a a a a a a a a a a a a a a a a a a
a a a a a a a a a a a a a a a a a a a a a a a a a a
a a a a a a a a a a a a a a a a a a a a a a a a a a
a a a a a a a a a a a a a a a a a a a a a a a a a a
a a a a a a a a a a a a a a a a a a a a a a a a a a
a a a a a a a a a a a a a a a a a a a a a a a a a a
a a a a a a a a a a a a a a a a a a a a a a a a a a
a a a a a a a a a a a a a a a a a a a a a "
}
]))
@@ -2513,5 +2513,6 @@ mod tests {
db_snap!(index, word_fid_docids, 3, @"4c2e2a1832e5802796edc1638136d933");
db_snap!(index, word_position_docids, 3, @"74f556b91d161d997a89468b4da1cb8f");
db_snap!(index, docid_word_positions, 3, @"5287245332627675740b28bd46e1cde1");
}
}

View File

@@ -7,19 +7,24 @@ use std::io;
use charabia::{Language, Script};
use grenad::MergerBuilder;
use heed::types::ByteSlice;
use heed::RwTxn;
use heed::{BytesDecode, RwTxn};
use roaring::RoaringBitmap;
use super::helpers::{
self, merge_ignore_values, serialize_roaring_bitmap, valid_lmdb_key, CursorClonableMmap,
self, merge_ignore_values, roaring_bitmap_from_u32s_array, serialize_roaring_bitmap,
valid_lmdb_key, CursorClonableMmap,
};
use super::{ClonableMmap, MergeFn};
use crate::facet::FacetType;
use crate::update::facet::FacetsUpdate;
use crate::update::index_documents::helpers::as_cloneable_grenad;
use crate::{lat_lng_to_xyz, CboRoaringBitmapCodec, DocumentId, GeoPoint, Index, Result};
use crate::{
lat_lng_to_xyz, BoRoaringBitmapCodec, CboRoaringBitmapCodec, DocumentId, GeoPoint, Index,
Result,
};
pub(crate) enum TypedChunk {
DocidWordPositions(grenad::Reader<CursorClonableMmap>),
FieldIdDocidFacetStrings(grenad::Reader<CursorClonableMmap>),
FieldIdDocidFacetNumbers(grenad::Reader<CursorClonableMmap>),
Documents(grenad::Reader<CursorClonableMmap>),
@@ -51,6 +56,29 @@ pub(crate) fn write_typed_chunk_into_index(
) -> Result<(RoaringBitmap, bool)> {
let mut is_merged_database = false;
match typed_chunk {
TypedChunk::DocidWordPositions(docid_word_positions_iter) => {
write_entries_into_database(
docid_word_positions_iter,
&index.docid_word_positions,
wtxn,
index_is_empty,
|value, buffer| {
// ensure that values are unique and ordered
let positions = roaring_bitmap_from_u32s_array(value);
BoRoaringBitmapCodec::serialize_into(&positions, buffer);
Ok(buffer)
},
|new_values, db_values, buffer| {
let new_values = roaring_bitmap_from_u32s_array(new_values);
let positions = match BoRoaringBitmapCodec::bytes_decode(db_values) {
Some(db_values) => new_values | db_values,
None => new_values, // should not happen
};
BoRoaringBitmapCodec::serialize_into(&positions, buffer);
Ok(())
},
)?;
}
TypedChunk::Documents(obkv_documents_iter) => {
let mut cursor = obkv_documents_iter.into_cursor()?;
while let Some((key, value)) = cursor.move_on_next()? {

View File

@@ -4,8 +4,7 @@ pub use self::delete_documents::{DeleteDocuments, DeletionStrategy, DocumentDele
pub use self::facet::bulk::FacetsUpdateBulk;
pub use self::facet::incremental::FacetsUpdateIncrementalInner;
pub use self::index_documents::{
merge_cbo_roaring_bitmaps, merge_roaring_bitmaps, DocumentAdditionResult, DocumentId,
IndexDocuments, IndexDocumentsConfig, IndexDocumentsMethod, MergeFn,
DocumentAdditionResult, DocumentId, IndexDocuments, IndexDocumentsConfig, IndexDocumentsMethod,
};
pub use self::indexer_config::IndexerConfig;
pub use self::prefix_word_pairs::{

View File

@@ -14,7 +14,7 @@ use crate::error::UserError;
use crate::index::{DEFAULT_MIN_WORD_LEN_ONE_TYPO, DEFAULT_MIN_WORD_LEN_TWO_TYPOS};
use crate::update::index_documents::IndexDocumentsMethod;
use crate::update::{IndexDocuments, UpdateIndexingStep};
use crate::{FieldsIdsMap, Index, Result};
use crate::{FieldsIdsMap, Index, OrderBy, Result};
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
pub enum Setting<T> {
@@ -122,6 +122,7 @@ pub struct Settings<'a, 't, 'u, 'i> {
/// Attributes on which typo tolerance is disabled.
exact_attributes: Setting<HashSet<String>>,
max_values_per_facet: Setting<usize>,
sort_facet_values_by: Setting<HashMap<String, OrderBy>>,
pagination_max_total_hits: Setting<usize>,
}
@@ -149,6 +150,7 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
min_word_len_one_typo: Setting::NotSet,
exact_attributes: Setting::NotSet,
max_values_per_facet: Setting::NotSet,
sort_facet_values_by: Setting::NotSet,
pagination_max_total_hits: Setting::NotSet,
indexer_config,
}
@@ -275,6 +277,14 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
self.max_values_per_facet = Setting::Reset;
}
pub fn set_sort_facet_values_by(&mut self, value: HashMap<String, OrderBy>) {
self.sort_facet_values_by = Setting::Set(value);
}
pub fn reset_sort_facet_values_by(&mut self) {
self.sort_facet_values_by = Setting::Reset;
}
pub fn set_pagination_max_total_hits(&mut self, value: usize) {
self.pagination_max_total_hits = Setting::Set(value);
}
@@ -680,6 +690,20 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
Ok(())
}
fn update_sort_facet_values_by(&mut self) -> Result<()> {
match self.sort_facet_values_by.as_ref() {
Setting::Set(value) => {
self.index.put_sort_facet_values_by(self.wtxn, value)?;
}
Setting::Reset => {
self.index.delete_sort_facet_values_by(self.wtxn)?;
}
Setting::NotSet => (),
}
Ok(())
}
fn update_pagination_max_total_hits(&mut self) -> Result<()> {
match self.pagination_max_total_hits {
Setting::Set(max) => {
@@ -714,6 +738,7 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
self.update_min_typo_word_len()?;
self.update_exact_words()?;
self.update_max_values_per_facet()?;
self.update_sort_facet_values_by()?;
self.update_pagination_max_total_hits()?;
// If there is new faceted fields we indicate that we must reindex as we must
@@ -1515,6 +1540,7 @@ mod tests {
exact_words,
exact_attributes,
max_values_per_facet,
sort_facet_values_by,
pagination_max_total_hits,
} = settings;
assert!(matches!(searchable_fields, Setting::NotSet));
@@ -1532,6 +1558,7 @@ mod tests {
assert!(matches!(exact_words, Setting::NotSet));
assert!(matches!(exact_attributes, Setting::NotSet));
assert!(matches!(max_values_per_facet, Setting::NotSet));
assert!(matches!(sort_facet_values_by, Setting::NotSet));
assert!(matches!(pagination_max_total_hits, Setting::NotSet));
})
.unwrap();

View File

@@ -5,7 +5,7 @@ use heed::EnvOpenOptions;
use maplit::hashset;
use milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use milli::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings};
use milli::{FacetDistribution, Index, Object};
use milli::{FacetDistribution, Index, Object, OrderBy};
use serde_json::Deserializer;
#[test]
@@ -63,12 +63,12 @@ fn test_facet_distribution_with_no_facet_values() {
let txn = index.read_txn().unwrap();
let mut distrib = FacetDistribution::new(&txn, &index);
distrib.facets(vec!["genres"]);
distrib.facets(vec![("genres", OrderBy::default())]);
let result = distrib.execute().unwrap();
assert_eq!(result["genres"].len(), 0);
let mut distrib = FacetDistribution::new(&txn, &index);
distrib.facets(vec!["tags"]);
distrib.facets(vec![("tags", OrderBy::default())]);
let result = distrib.execute().unwrap();
assert_eq!(result["tags"].len(), 2);
}