mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-11-22 04:36:32 +00:00
Compare commits
18 Commits
v1.21.0
...
prototype-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ad804b60fc | ||
|
|
9fddbff916 | ||
|
|
7fe2b0a177 | ||
|
|
060562d156 | ||
|
|
44fdc8b350 | ||
|
|
a1a7643554 | ||
|
|
1d78637872 | ||
|
|
4fe073cc1a | ||
|
|
5cd3d36d20 | ||
|
|
d7ad76ea1e | ||
|
|
e82bb93221 | ||
|
|
000cb93aad | ||
|
|
ad4f5514b9 | ||
|
|
8d29a29867 | ||
|
|
d7de819d11 | ||
|
|
e43d67591c | ||
|
|
134237d1eb | ||
|
|
26d9070aa7 |
2
.github/workflows/bench-manual.yml
vendored
2
.github/workflows/bench-manual.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/bench-pr.yml
vendored
2
.github/workflows/bench-pr.yml
vendored
@@ -66,7 +66,7 @@ jobs:
|
||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/bench-push-indexing.yml
vendored
2
.github/workflows/bench-push-indexing.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/benchmarks-manual.yml
vendored
2
.github/workflows/benchmarks-manual.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/benchmarks-pr.yml
vendored
2
.github/workflows/benchmarks-pr.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/flaky-tests.yml
vendored
2
.github/workflows/flaky-tests.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Install cargo-flaky
|
||||
run: cargo install cargo-flaky
|
||||
- name: Run cargo flaky in the dumps
|
||||
|
||||
2
.github/workflows/fuzzer-indexing.yml
vendored
2
.github/workflows/fuzzer-indexing.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
8
.github/workflows/publish-release-assets.yml
vendored
8
.github/workflows/publish-release-assets.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Build
|
||||
run: cargo build --release --locked
|
||||
# No need to upload binaries for dry run (cron)
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
asset_name: meilisearch-windows-amd64.exe
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Build
|
||||
run: cargo build --release --locked
|
||||
# No need to upload binaries for dry run (cron)
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Installing Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@1.85
|
||||
uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
@@ -148,7 +148,7 @@ jobs:
|
||||
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
apt-get update -y && apt-get install -y docker-ce
|
||||
- name: Installing Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@1.85
|
||||
uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
14
.github/workflows/test-suite.yml
vendored
14
.github/workflows/test-suite.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- name: Setup test with Rust stable
|
||||
uses: dtolnay/rust-toolchain@1.85
|
||||
uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.8.0
|
||||
- name: Run cargo check without any default features
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.8.0
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install --assume-yes build-essential curl
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Run cargo build with almost all features
|
||||
run: |
|
||||
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install --assume-yes build-essential curl
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Run cargo tree without default features and check lindera is not present
|
||||
run: |
|
||||
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
||||
@@ -153,7 +153,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.8.0
|
||||
- name: Run tests in debug
|
||||
@@ -167,7 +167,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
components: clippy
|
||||
@@ -184,7 +184,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly-2024-07-09
|
||||
|
||||
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
profile: minimal
|
||||
- name: Install sd
|
||||
|
||||
14
Cargo.lock
generated
14
Cargo.lock
generated
@@ -2613,14 +2613,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hannoy"
|
||||
version = "0.0.5"
|
||||
version = "0.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4b6a412d145918473a8257706599a1088c505047eef9cc6c63c494c95786044f"
|
||||
checksum = "0dba13a271c49a119a97862ebf0a74131d879832868400d9fcd937b790058fdd"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"byteorder",
|
||||
"hashbrown 0.15.5",
|
||||
"heed",
|
||||
"madvise",
|
||||
"min-max-heap",
|
||||
"page_size",
|
||||
"papaya",
|
||||
@@ -3749,6 +3750,15 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "670fdfda89751bc4a84ac13eaa63e205cf0fd22b4c9a5fbfa085b63c1f1d3a30"
|
||||
|
||||
[[package]]
|
||||
name = "madvise"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4e1e75c3c34c2b34cec9f127418cb35240c7ebee5de36a51437e6b382c161b86"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "manifest-dir-macros"
|
||||
version = "0.1.18"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Compile
|
||||
FROM rust:1.85-alpine3.20 AS compiler
|
||||
FROM rust:1.89-alpine3.20 AS compiler
|
||||
|
||||
RUN apk add -q --no-cache build-base openssl-dev
|
||||
|
||||
|
||||
@@ -97,6 +97,7 @@ impl CompatV2ToV3 {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum CompatIndexV2ToV3 {
|
||||
V2(v2::V2IndexReader),
|
||||
Compat(Box<CompatIndexV1ToV2>),
|
||||
|
||||
@@ -124,7 +124,7 @@ pub fn parse_not_exists(input: Span) -> IResult<FilterCondition> {
|
||||
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Exists }))))
|
||||
}
|
||||
|
||||
fn parse_vectors(input: Span) -> IResult<(Token, Option<Token>, VectorFilter<'_>)> {
|
||||
fn parse_vectors(input: Span) -> IResult<(Token, Option<Token>, VectorFilter)> {
|
||||
let (input, _) = multispace0(input)?;
|
||||
let (input, fid) = tag("_vectors")(input)?;
|
||||
|
||||
|
||||
@@ -116,7 +116,7 @@ impl<'a> Token<'a> {
|
||||
self.span
|
||||
}
|
||||
|
||||
pub fn parse_finite_float(&self) -> Result<f64, Error> {
|
||||
pub fn parse_finite_float(&self) -> Result<f64, Error<'a>> {
|
||||
let value: f64 = self.value().parse().map_err(|e| self.as_external_error(e))?;
|
||||
if value.is_finite() {
|
||||
Ok(value)
|
||||
@@ -166,7 +166,7 @@ pub enum TraversedElement<'a> {
|
||||
}
|
||||
|
||||
impl<'a> FilterCondition<'a> {
|
||||
pub fn use_contains_operator(&self) -> Option<&Token> {
|
||||
pub fn use_contains_operator(&self) -> Option<&Token<'a>> {
|
||||
match self {
|
||||
FilterCondition::Condition { fid: _, op } => match op {
|
||||
Condition::GreaterThan(_)
|
||||
@@ -193,7 +193,7 @@ impl<'a> FilterCondition<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn use_vector_filter(&self) -> Option<&Token> {
|
||||
pub fn use_vector_filter(&self) -> Option<&Token<'a>> {
|
||||
match self {
|
||||
FilterCondition::Condition { .. } => None,
|
||||
FilterCondition::Not(this) => this.use_vector_filter(),
|
||||
@@ -207,7 +207,7 @@ impl<'a> FilterCondition<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fids(&self, depth: usize) -> Box<dyn Iterator<Item = &Token> + '_> {
|
||||
pub fn fids(&self, depth: usize) -> Box<dyn Iterator<Item = &Token<'a>> + '_> {
|
||||
if depth == 0 {
|
||||
return Box::new(std::iter::empty());
|
||||
}
|
||||
@@ -228,7 +228,7 @@ impl<'a> FilterCondition<'a> {
|
||||
}
|
||||
|
||||
/// Returns the first token found at the specified depth, `None` if no token at this depth.
|
||||
pub fn token_at_depth(&self, depth: usize) -> Option<&Token> {
|
||||
pub fn token_at_depth(&self, depth: usize) -> Option<&Token<'a>> {
|
||||
match self {
|
||||
FilterCondition::Condition { fid, .. } if depth == 0 => Some(fid),
|
||||
FilterCondition::Or(subfilters) => {
|
||||
@@ -651,7 +651,7 @@ pub mod tests {
|
||||
/// Create a raw [Token]. You must specify the string that appear BEFORE your element followed by your element
|
||||
pub fn rtok<'a>(before: &'a str, value: &'a str) -> Token<'a> {
|
||||
// if the string is empty we still need to return 1 for the line number
|
||||
let lines = before.is_empty().then_some(1).unwrap_or_else(|| before.lines().count());
|
||||
let lines = if before.is_empty() { 1 } else { before.lines().count() };
|
||||
let offset = before.chars().count();
|
||||
// the extra field is not checked in the tests so we can set it to nothing
|
||||
unsafe { Span::new_from_raw_offset(offset, lines as u32, value, "") }.into()
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#![allow(clippy::result_large_err)]
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::io;
|
||||
|
||||
|
||||
@@ -45,7 +45,6 @@ impl From<DateField> for Code {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error("{1}")]
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// The main Error type is large and boxing the large variant make the pattern matching fails
|
||||
#![allow(clippy::result_large_err)]
|
||||
|
||||
/*!
|
||||
This crate defines the index scheduler, which is responsible for:
|
||||
1. Keeping references to meilisearch's indexes and mapping them to their
|
||||
@@ -344,7 +347,7 @@ impl IndexScheduler {
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {
|
||||
fn read_txn(&self) -> Result<RoTxn<'_, WithoutTls>> {
|
||||
self.env.read_txn().map_err(|e| e.into())
|
||||
}
|
||||
|
||||
@@ -757,7 +760,7 @@ impl IndexScheduler {
|
||||
|
||||
/// Register a new task coming from a dump in the scheduler.
|
||||
/// By taking a mutable ref we're pretty sure no one will ever import a dump while actix is running.
|
||||
pub fn register_dumped_task(&mut self) -> Result<Dump> {
|
||||
pub fn register_dumped_task(&mut self) -> Result<Dump<'_>> {
|
||||
Dump::new(self)
|
||||
}
|
||||
|
||||
@@ -806,10 +809,8 @@ impl IndexScheduler {
|
||||
.queue
|
||||
.tasks
|
||||
.get_task(self.rtxn, task_id)
|
||||
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?
|
||||
.ok_or_else(|| {
|
||||
io::Error::new(io::ErrorKind::Other, Error::CorruptedTaskQueue)
|
||||
})?;
|
||||
.map_err(io::Error::other)?
|
||||
.ok_or_else(|| io::Error::other(Error::CorruptedTaskQueue))?;
|
||||
|
||||
serde_json::to_writer(&mut self.buffer, &TaskView::from_task(&task))?;
|
||||
self.buffer.push(b'\n');
|
||||
|
||||
@@ -326,7 +326,7 @@ impl Queue {
|
||||
);
|
||||
|
||||
// it's safe to unwrap here because we checked the len above
|
||||
let newest_task_id = to_delete.iter().last().unwrap();
|
||||
let newest_task_id = to_delete.iter().next_back().unwrap();
|
||||
let last_task_to_delete =
|
||||
self.tasks.get_task(wtxn, newest_task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||
|
||||
|
||||
@@ -66,6 +66,7 @@ pub(crate) enum DocumentOperation {
|
||||
|
||||
/// A [batch](Batch) that combines multiple tasks operating on an index.
|
||||
#[derive(Debug)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub(crate) enum IndexOperation {
|
||||
DocumentOperation {
|
||||
index_uid: String,
|
||||
|
||||
@@ -370,7 +370,7 @@ fn ureq_error_into_error(error: ureq::Error) -> Error {
|
||||
}
|
||||
Err(e) => e.into(),
|
||||
},
|
||||
ureq::Error::Transport(transport) => io::Error::new(io::ErrorKind::Other, transport).into(),
|
||||
ureq::Error::Transport(transport) => io::Error::other(transport).into(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ impl<'a> BytesDecode<'a> for UuidCodec {
|
||||
impl BytesEncode<'_> for UuidCodec {
|
||||
type EItem = Uuid;
|
||||
|
||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<'_, [u8]>, BoxedError> {
|
||||
Ok(Cow::Borrowed(item.as_bytes()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -315,7 +315,9 @@ impl<'a> heed::BytesDecode<'a> for KeyIdActionCodec {
|
||||
impl<'a> heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
type EItem = (&'a KeyId, &'a Action, Option<&'a [u8]>);
|
||||
|
||||
fn bytes_encode((key_id, action, index): &Self::EItem) -> StdResult<Cow<[u8]>, BoxedError> {
|
||||
fn bytes_encode(
|
||||
(key_id, action, index): &'_ Self::EItem,
|
||||
) -> StdResult<Cow<'_, [u8]>, BoxedError> {
|
||||
let mut bytes = Vec::new();
|
||||
|
||||
bytes.extend_from_slice(key_id.as_bytes());
|
||||
|
||||
@@ -91,7 +91,7 @@ time = { version = "0.3.41", features = [
|
||||
] }
|
||||
tokio = { version = "1.45.1", features = ["full"] }
|
||||
toml = "0.8.23"
|
||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
||||
uuid = { version = "1.18.0", features = ["serde", "v4", "v7"] }
|
||||
serde_urlencoded = "0.7.1"
|
||||
termcolor = "1.4.1"
|
||||
url = { version = "2.5.4", features = ["serde"] }
|
||||
|
||||
@@ -12,6 +12,7 @@ use tokio::task::JoinError;
|
||||
use crate::routes::indexes::{PROXY_ORIGIN_REMOTE_HEADER, PROXY_ORIGIN_TASK_UID_HEADER};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum MeilisearchHttpError {
|
||||
#[error("A Content-Type header is missing. Accepted values for the Content-Type header are: {}",
|
||||
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", "))]
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
#![allow(clippy::result_large_err)]
|
||||
#![allow(rustdoc::private_intra_doc_links)]
|
||||
|
||||
#[macro_use]
|
||||
pub mod error;
|
||||
pub mod analytics;
|
||||
|
||||
@@ -13,6 +13,7 @@ use meilisearch_types::serde_cs::vec::CS;
|
||||
use serde_json::Value;
|
||||
use tracing::debug;
|
||||
use utoipa::{IntoParams, OpenApi};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::MeilisearchHttpError;
|
||||
@@ -25,7 +26,7 @@ use crate::search::{
|
||||
add_search_rules, perform_search, HybridQuery, MatchingStrategy, RankingScoreThreshold,
|
||||
RetrieveVectors, SearchKind, SearchQuery, SearchResult, SemanticRatio, DEFAULT_CROP_LENGTH,
|
||||
DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
|
||||
DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET, DEFAULT_SEMANTIC_RATIO,
|
||||
DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET, DEFAULT_SEMANTIC_RATIO, INCLUDE_METADATA_HEADER,
|
||||
};
|
||||
use crate::search_queue::SearchQueue;
|
||||
|
||||
@@ -325,7 +326,8 @@ pub async fn search_with_url_query(
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?params, "Search get");
|
||||
let request_uid = Uuid::now_v7();
|
||||
debug!(request_uid = ?request_uid, parameters = ?params, "Search get");
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
let mut query: SearchQuery = params.into_inner().try_into()?;
|
||||
@@ -343,6 +345,11 @@ pub async fn search_with_url_query(
|
||||
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
|
||||
let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors);
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let include_metadata = req
|
||||
.headers()
|
||||
.get(INCLUDE_METADATA_HEADER)
|
||||
.is_some();
|
||||
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(
|
||||
index_uid.to_string(),
|
||||
@@ -351,6 +358,8 @@ pub async fn search_with_url_query(
|
||||
search_kind,
|
||||
retrieve_vector,
|
||||
index_scheduler.features(),
|
||||
request_uid,
|
||||
include_metadata,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
@@ -363,7 +372,7 @@ pub async fn search_with_url_query(
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
debug!(returns = ?search_result, "Search get");
|
||||
debug!(request_uid = ?request_uid, returns = ?search_result, "Search get");
|
||||
Ok(HttpResponse::Ok().json(search_result))
|
||||
}
|
||||
|
||||
@@ -432,9 +441,10 @@ pub async fn search_with_post(
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let request_uid = Uuid::now_v7();
|
||||
|
||||
let mut query = params.into_inner();
|
||||
debug!(parameters = ?query, "Search post");
|
||||
debug!(request_uid = ?request_uid, parameters = ?query, "Search post");
|
||||
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
||||
@@ -449,6 +459,11 @@ pub async fn search_with_post(
|
||||
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
|
||||
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors);
|
||||
|
||||
let include_metadata = req
|
||||
.headers()
|
||||
.get(INCLUDE_METADATA_HEADER)
|
||||
.is_some();
|
||||
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(
|
||||
@@ -458,6 +473,8 @@ pub async fn search_with_post(
|
||||
search_kind,
|
||||
retrieve_vectors,
|
||||
index_scheduler.features(),
|
||||
request_uid,
|
||||
include_metadata,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
@@ -473,7 +490,7 @@ pub async fn search_with_post(
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
debug!(returns = ?search_result, "Search post");
|
||||
debug!(request_uid = ?request_uid, returns = ?search_result, "Search post");
|
||||
Ok(HttpResponse::Ok().json(search_result))
|
||||
}
|
||||
|
||||
|
||||
@@ -234,6 +234,8 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
||||
facet_stats: _,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
request_uid: _,
|
||||
metadata: _,
|
||||
} = result;
|
||||
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(1);
|
||||
|
||||
@@ -180,12 +180,6 @@ pub async fn get_metrics(
|
||||
|
||||
let response = String::from_utf8(buffer).expect("Failed to convert bytes to string");
|
||||
|
||||
// We cannot specify the version with ContentType(TEXT_PLAIN_UTF_8) so we have to write everything by hand :(
|
||||
// see the following for what should be returned: https://prometheus.io/docs/instrumenting/content_negotiation/#content-type-response
|
||||
let content_type = ("content-type", "text/plain; version=0.0.4; charset=utf-8");
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
// .insert_header(header::ContentType(mime::TEXT_PLAIN_UTF_8))
|
||||
.insert_header(content_type)
|
||||
.body(response))
|
||||
let content_type = ("content-type", prometheus::TEXT_FORMAT);
|
||||
Ok(HttpResponse::Ok().insert_header(content_type).body(response))
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ use meilisearch_types::keys::actions;
|
||||
use serde::Serialize;
|
||||
use tracing::debug;
|
||||
use utoipa::{OpenApi, ToSchema};
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::multi_search_analytics::MultiSearchAggregator;
|
||||
use crate::analytics::Analytics;
|
||||
@@ -20,7 +21,7 @@ use crate::routes::indexes::search::search_kind;
|
||||
use crate::search::{
|
||||
add_search_rules, perform_federated_search, perform_search, FederatedSearch,
|
||||
FederatedSearchResult, RetrieveVectors, SearchQueryWithIndex, SearchResultWithIndex,
|
||||
PROXY_SEARCH_HEADER, PROXY_SEARCH_HEADER_VALUE,
|
||||
PROXY_SEARCH_HEADER, PROXY_SEARCH_HEADER_VALUE, INCLUDE_METADATA_HEADER,
|
||||
};
|
||||
use crate::search_queue::SearchQueue;
|
||||
|
||||
@@ -151,6 +152,7 @@ pub async fn multi_search_with_post(
|
||||
// Since we don't want to process half of the search requests and then get a permit refused
|
||||
// we're going to get one permit for the whole duration of the multi-search request.
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let request_uid = Uuid::now_v7();
|
||||
|
||||
let federated_search = params.into_inner();
|
||||
|
||||
@@ -188,14 +190,32 @@ pub async fn multi_search_with_post(
|
||||
|
||||
let response = match federation {
|
||||
Some(federation) => {
|
||||
debug!(
|
||||
request_uid = ?request_uid,
|
||||
federation = ?federation,
|
||||
parameters = ?queries,
|
||||
"Federated-search"
|
||||
);
|
||||
|
||||
// check remote header
|
||||
let is_proxy = req
|
||||
.headers()
|
||||
.get(PROXY_SEARCH_HEADER)
|
||||
.is_some_and(|value| value.as_bytes() == PROXY_SEARCH_HEADER_VALUE.as_bytes());
|
||||
let search_result =
|
||||
perform_federated_search(&index_scheduler, queries, federation, features, is_proxy)
|
||||
.await;
|
||||
let include_metadata = req
|
||||
.headers()
|
||||
.get(INCLUDE_METADATA_HEADER)
|
||||
.is_some();
|
||||
let search_result = perform_federated_search(
|
||||
&index_scheduler,
|
||||
queries,
|
||||
federation,
|
||||
features,
|
||||
is_proxy,
|
||||
request_uid,
|
||||
include_metadata,
|
||||
)
|
||||
.await;
|
||||
permit.drop().await;
|
||||
|
||||
if search_result.is_ok() {
|
||||
@@ -203,9 +223,21 @@ pub async fn multi_search_with_post(
|
||||
}
|
||||
|
||||
analytics.publish(multi_aggregate, &req);
|
||||
|
||||
debug!(
|
||||
request_uid = ?request_uid,
|
||||
returns = ?search_result,
|
||||
"Federated-search"
|
||||
);
|
||||
|
||||
HttpResponse::Ok().json(search_result?)
|
||||
}
|
||||
None => {
|
||||
let include_metadata = req
|
||||
.headers()
|
||||
.get(INCLUDE_METADATA_HEADER)
|
||||
.is_some();
|
||||
|
||||
// Explicitly expect a `(ResponseError, usize)` for the error type rather than `ResponseError` only,
|
||||
// so that `?` doesn't work if it doesn't use `with_index`, ensuring that it is not forgotten in case of code
|
||||
// changes.
|
||||
@@ -216,7 +248,12 @@ pub async fn multi_search_with_post(
|
||||
.map(SearchQueryWithIndex::into_index_query_federation)
|
||||
.enumerate()
|
||||
{
|
||||
debug!(on_index = query_index, parameters = ?query, "Multi-search");
|
||||
debug!(
|
||||
request_uid = ?request_uid,
|
||||
on_index = query_index,
|
||||
parameters = ?query,
|
||||
"Multi-search"
|
||||
);
|
||||
|
||||
if federation_options.is_some() {
|
||||
return Err((
|
||||
@@ -258,6 +295,8 @@ pub async fn multi_search_with_post(
|
||||
search_kind,
|
||||
retrieve_vector,
|
||||
features,
|
||||
request_uid,
|
||||
include_metadata,
|
||||
)
|
||||
})
|
||||
.await
|
||||
@@ -286,7 +325,11 @@ pub async fn multi_search_with_post(
|
||||
err
|
||||
})?;
|
||||
|
||||
debug!(returns = ?search_results, "Multi-search");
|
||||
debug!(
|
||||
request_uid = ?request_uid,
|
||||
returns = ?search_results,
|
||||
"Multi-search"
|
||||
);
|
||||
|
||||
HttpResponse::Ok().json(SearchResults { results: search_results })
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ mod types;
|
||||
mod weighted_scores;
|
||||
|
||||
pub use perform::perform_federated_search;
|
||||
pub use proxy::{PROXY_SEARCH_HEADER, PROXY_SEARCH_HEADER_VALUE};
|
||||
pub use proxy::{PROXY_SEARCH_HEADER, PROXY_SEARCH_HEADER_VALUE, INCLUDE_METADATA_HEADER};
|
||||
pub use types::{
|
||||
FederatedSearch, FederatedSearchResult, Federation, FederationOptions, MergeFacets,
|
||||
};
|
||||
|
||||
@@ -17,8 +17,10 @@ use meilisearch_types::milli::vector::Embedding;
|
||||
use meilisearch_types::milli::{self, DocumentId, OrderBy, TimeBudget, DEFAULT_VALUES_PER_FACET};
|
||||
use roaring::RoaringBitmap;
|
||||
use tokio::task::JoinHandle;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::super::ranking_rules::{self, RankingRules};
|
||||
use super::super::SearchMetadata;
|
||||
use super::super::{
|
||||
compute_facet_distribution_stats, prepare_search, AttributesFormat, ComputedFacets, HitMaker,
|
||||
HitsInfo, RetrieveVectors, SearchHit, SearchKind, SearchQuery, SearchQueryWithIndex,
|
||||
@@ -39,6 +41,8 @@ pub async fn perform_federated_search(
|
||||
federation: Federation,
|
||||
features: RoFeatures,
|
||||
is_proxy: bool,
|
||||
request_uid: Uuid,
|
||||
include_metadata: bool,
|
||||
) -> Result<FederatedSearchResult, ResponseError> {
|
||||
if is_proxy {
|
||||
features.check_network("Performing a remote federated search")?;
|
||||
@@ -57,14 +61,22 @@ pub async fn perform_federated_search(
|
||||
|
||||
// 1. partition queries by host and index
|
||||
let mut partitioned_queries = PartitionedQueries::new();
|
||||
|
||||
// Store the original queries order for later metadata building
|
||||
let original_queries = queries.clone();
|
||||
|
||||
for (query_index, federated_query) in queries.into_iter().enumerate() {
|
||||
partitioned_queries.partition(federated_query, query_index, &network, features)?
|
||||
}
|
||||
|
||||
// 2. perform queries, merge and make hits index by index
|
||||
// 2.1. start remote queries
|
||||
let remote_search =
|
||||
RemoteSearch::start(partitioned_queries.remote_queries_by_host, &federation, deadline);
|
||||
let remote_search = RemoteSearch::start(
|
||||
partitioned_queries.remote_queries_by_host.clone(),
|
||||
&federation,
|
||||
deadline,
|
||||
include_metadata,
|
||||
);
|
||||
|
||||
// 2.2. concurrently execute local queries
|
||||
let params = SearchByIndexParams {
|
||||
@@ -110,6 +122,56 @@ pub async fn perform_federated_search(
|
||||
let (estimated_total_hits, degraded, used_negative_operator, facets, max_remote_duration) =
|
||||
merge_metadata(&mut results_by_index, &remote_results);
|
||||
|
||||
// 3.1.1. Build metadata in the same order as the original queries
|
||||
let query_metadata = if include_metadata {
|
||||
let mut query_metadata = Vec::new();
|
||||
|
||||
// Create a map of remote results by index_uid for quick lookup
|
||||
let mut remote_results_by_index = std::collections::BTreeMap::new();
|
||||
for remote_result in &remote_results {
|
||||
if let Some(remote_metadata) = &remote_result.metadata {
|
||||
for remote_meta in remote_metadata {
|
||||
remote_results_by_index
|
||||
.insert(remote_meta.index_uid.clone(), remote_meta.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build metadata in the same order as the original queries
|
||||
for original_query in original_queries {
|
||||
let query_uid = Uuid::now_v7();
|
||||
let index_uid = original_query.index_uid.to_string();
|
||||
|
||||
// Determine if this is a remote query
|
||||
let (_, _, federation_options) = original_query.into_index_query_federation();
|
||||
let remote = federation_options.and_then(|options| options.remote);
|
||||
|
||||
// Get primary key for this index
|
||||
let mut primary_key = None;
|
||||
|
||||
if remote.is_some() {
|
||||
// For remote queries, try to get primary key from remote results
|
||||
if let Some(remote_meta) = remote_results_by_index.get(&index_uid) {
|
||||
primary_key = remote_meta.primary_key.clone();
|
||||
}
|
||||
} else {
|
||||
// For local queries, get primary key from local index
|
||||
primary_key = index_scheduler.index(&index_uid).ok().and_then(|index| {
|
||||
index.read_txn().ok().and_then(|rtxn| {
|
||||
let pk = index.primary_key(&rtxn).ok().flatten().map(|pk| pk.to_string());
|
||||
drop(rtxn);
|
||||
pk
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
query_metadata.push(SearchMetadata { query_uid, index_uid, primary_key, remote });
|
||||
}
|
||||
Some(query_metadata)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// 3.2. merge hits
|
||||
let merged_hits: Vec<_> = merge_index_global_results(results_by_index, &mut remote_results)
|
||||
.skip(federation.offset)
|
||||
@@ -170,6 +232,8 @@ pub async fn perform_federated_search(
|
||||
facet_stats,
|
||||
facets_by_index,
|
||||
remote_errors: partitioned_queries.has_remote.then_some(remote_errors),
|
||||
request_uid: Some(request_uid),
|
||||
metadata: query_metadata,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -219,7 +283,7 @@ struct SearchResultByQueryIterItem<'a> {
|
||||
|
||||
fn merge_index_local_results(
|
||||
results_by_query: Vec<SearchResultByQuery<'_>>,
|
||||
) -> impl Iterator<Item = SearchResultByQueryIterItem> + '_ {
|
||||
) -> impl Iterator<Item = SearchResultByQueryIterItem<'_>> + '_ {
|
||||
itertools::kmerge_by(
|
||||
results_by_query.into_iter().map(SearchResultByQueryIter::new),
|
||||
|left: &SearchResultByQueryIterItem, right: &SearchResultByQueryIterItem| {
|
||||
@@ -439,6 +503,8 @@ fn merge_metadata(
|
||||
degraded: degraded_for_host,
|
||||
used_negative_operator: host_used_negative_operator,
|
||||
remote_errors: _,
|
||||
metadata: _,
|
||||
request_uid: _,
|
||||
} in remote_results
|
||||
{
|
||||
let this_remote_duration = Duration::from_millis(*processing_time_ms as u64);
|
||||
@@ -566,7 +632,12 @@ struct RemoteSearch {
|
||||
}
|
||||
|
||||
impl RemoteSearch {
|
||||
fn start(queries: RemoteQueriesByHost, federation: &Federation, deadline: Instant) -> Self {
|
||||
fn start(
|
||||
queries: RemoteQueriesByHost,
|
||||
federation: &Federation,
|
||||
deadline: Instant,
|
||||
include_metadata: bool,
|
||||
) -> Self {
|
||||
let mut in_flight_remote_queries = BTreeMap::new();
|
||||
let client = reqwest::ClientBuilder::new()
|
||||
.connect_timeout(std::time::Duration::from_millis(200))
|
||||
@@ -586,7 +657,10 @@ impl RemoteSearch {
|
||||
// never merge distant facets
|
||||
proxy_federation.merge_facets = None;
|
||||
let params = params.clone();
|
||||
async move { proxy_search(&node, queries, proxy_federation, ¶ms).await }
|
||||
async move {
|
||||
proxy_search(&node, queries, proxy_federation, ¶ms, include_metadata)
|
||||
.await
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ use crate::search::SearchQueryWithIndex;
|
||||
|
||||
pub const PROXY_SEARCH_HEADER: &str = "Meili-Proxy-Search";
|
||||
pub const PROXY_SEARCH_HEADER_VALUE: &str = "true";
|
||||
pub const INCLUDE_METADATA_HEADER: &str = "Meili-Include-Metadata";
|
||||
|
||||
mod error {
|
||||
use meilisearch_types::error::ResponseError;
|
||||
@@ -98,6 +99,7 @@ pub async fn proxy_search(
|
||||
queries: Vec<SearchQueryWithIndex>,
|
||||
federation: Federation,
|
||||
params: &ProxySearchParams,
|
||||
include_metadata: bool,
|
||||
) -> Result<FederatedSearchResult, ProxySearchError> {
|
||||
let url = format!("{}/multi-search", node.url);
|
||||
|
||||
@@ -114,7 +116,16 @@ pub async fn proxy_search(
|
||||
};
|
||||
|
||||
for i in 0..params.try_count {
|
||||
match try_proxy_search(&url, search_api_key, &federated, ¶ms.client, deadline).await {
|
||||
match try_proxy_search(
|
||||
&url,
|
||||
search_api_key,
|
||||
&federated,
|
||||
¶ms.client,
|
||||
deadline,
|
||||
include_metadata,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(response) => return Ok(response),
|
||||
Err(retry) => {
|
||||
let duration = retry.into_duration(i)?;
|
||||
@@ -122,7 +133,7 @@ pub async fn proxy_search(
|
||||
}
|
||||
}
|
||||
}
|
||||
try_proxy_search(&url, search_api_key, &federated, ¶ms.client, deadline)
|
||||
try_proxy_search(&url, search_api_key, &federated, ¶ms.client, deadline, include_metadata)
|
||||
.await
|
||||
.map_err(Retry::into_error)
|
||||
}
|
||||
@@ -133,6 +144,7 @@ async fn try_proxy_search(
|
||||
federated: &FederatedSearch,
|
||||
client: &Client,
|
||||
deadline: std::time::Instant,
|
||||
include_metadata: bool,
|
||||
) -> Result<FederatedSearchResult, Retry> {
|
||||
let timeout = deadline.saturating_duration_since(std::time::Instant::now());
|
||||
|
||||
@@ -143,6 +155,8 @@ async fn try_proxy_search(
|
||||
request
|
||||
};
|
||||
let request = request.header(PROXY_SEARCH_HEADER, PROXY_SEARCH_HEADER_VALUE);
|
||||
let request =
|
||||
if include_metadata { request.header(INCLUDE_METADATA_HEADER, "true") } else { request };
|
||||
|
||||
let response = request.send().await;
|
||||
let response = match response {
|
||||
|
||||
@@ -16,6 +16,9 @@ use meilisearch_types::milli::order_by_map::OrderByMap;
|
||||
use meilisearch_types::milli::OrderBy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use utoipa::ToSchema;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::search::SearchMetadata;
|
||||
|
||||
use super::super::{ComputedFacets, FacetStats, HitsInfo, SearchHit, SearchQueryWithIndex};
|
||||
use crate::milli::vector::Embedding;
|
||||
@@ -131,6 +134,10 @@ pub struct FederatedSearchResult {
|
||||
pub facet_stats: Option<BTreeMap<String, FacetStats>>,
|
||||
#[serde(default, skip_serializing_if = "FederatedFacets::is_empty")]
|
||||
pub facets_by_index: FederatedFacets,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub request_uid: Option<Uuid>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub metadata: Option<Vec<SearchMetadata>>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub remote_errors: Option<BTreeMap<String, ResponseError>>,
|
||||
@@ -156,6 +163,8 @@ impl fmt::Debug for FederatedSearchResult {
|
||||
facet_stats,
|
||||
facets_by_index,
|
||||
remote_errors,
|
||||
request_uid,
|
||||
metadata,
|
||||
} = self;
|
||||
|
||||
let mut debug = f.debug_struct("SearchResult");
|
||||
@@ -188,6 +197,12 @@ impl fmt::Debug for FederatedSearchResult {
|
||||
if let Some(remote_errors) = remote_errors {
|
||||
debug.field("remote_errors", &remote_errors);
|
||||
}
|
||||
if let Some(request_uid) = request_uid {
|
||||
debug.field("request_uid", &request_uid);
|
||||
}
|
||||
if let Some(metadata) = metadata {
|
||||
debug.field("metadata", &metadata);
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
|
||||
@@ -36,13 +36,14 @@ use serde_json::{json, Value};
|
||||
#[cfg(test)]
|
||||
mod mod_test;
|
||||
use utoipa::ToSchema;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::error::MeilisearchHttpError;
|
||||
|
||||
mod federated;
|
||||
pub use federated::{
|
||||
perform_federated_search, FederatedSearch, FederatedSearchResult, Federation,
|
||||
FederationOptions, MergeFacets, PROXY_SEARCH_HEADER, PROXY_SEARCH_HEADER_VALUE,
|
||||
FederationOptions, MergeFacets, PROXY_SEARCH_HEADER, PROXY_SEARCH_HEADER_VALUE, INCLUDE_METADATA_HEADER,
|
||||
};
|
||||
|
||||
mod ranking_rules;
|
||||
@@ -835,6 +836,18 @@ pub struct SearchHit {
|
||||
pub ranking_score_details: Option<serde_json::Map<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub struct SearchMetadata {
|
||||
pub query_uid: Uuid,
|
||||
pub index_uid: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub primary_key: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub remote: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone, PartialEq, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
@@ -851,6 +864,10 @@ pub struct SearchResult {
|
||||
pub facet_distribution: Option<BTreeMap<String, IndexMap<String, u64>>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub facet_stats: Option<BTreeMap<String, FacetStats>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub request_uid: Option<Uuid>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub metadata: Option<SearchMetadata>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub semantic_hit_count: Option<u32>,
|
||||
@@ -872,6 +889,8 @@ impl fmt::Debug for SearchResult {
|
||||
hits_info,
|
||||
facet_distribution,
|
||||
facet_stats,
|
||||
request_uid,
|
||||
metadata,
|
||||
semantic_hit_count,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
@@ -901,6 +920,12 @@ impl fmt::Debug for SearchResult {
|
||||
if let Some(semantic_hit_count) = semantic_hit_count {
|
||||
debug.field("semantic_hit_count", &semantic_hit_count);
|
||||
}
|
||||
if let Some(request_uid) = request_uid {
|
||||
debug.field("request_uid", &request_uid);
|
||||
}
|
||||
if let Some(metadata) = metadata {
|
||||
debug.field("metadata", &metadata);
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
@@ -1120,8 +1145,11 @@ pub fn perform_search(
|
||||
search_kind: SearchKind,
|
||||
retrieve_vectors: RetrieveVectors,
|
||||
features: RoFeatures,
|
||||
request_uid: Uuid,
|
||||
include_metadata: bool,
|
||||
) -> Result<SearchResult, ResponseError> {
|
||||
let before_search = Instant::now();
|
||||
let index_uid_for_metadata = index_uid.clone();
|
||||
let rtxn = index.read_txn()?;
|
||||
let time_budget = match index.search_cutoff(&rtxn)? {
|
||||
Some(cutoff) => TimeBudget::new(Duration::from_millis(cutoff)),
|
||||
@@ -1142,7 +1170,20 @@ pub fn perform_search(
|
||||
query_vector,
|
||||
},
|
||||
semantic_hit_count,
|
||||
) = search_from_kind(index_uid, search_kind, search)?;
|
||||
) = search_from_kind(index_uid.clone(), search_kind, search)?;
|
||||
|
||||
let metadata = if include_metadata {
|
||||
let query_uid = Uuid::now_v7();
|
||||
let primary_key = index.primary_key(&rtxn)?.map(|pk| pk.to_string());
|
||||
Some(SearchMetadata {
|
||||
query_uid,
|
||||
index_uid: index_uid_for_metadata,
|
||||
primary_key,
|
||||
remote: None, // Local searches don't have a remote
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let SearchQuery {
|
||||
q,
|
||||
@@ -1225,7 +1266,6 @@ pub fn perform_search(
|
||||
.transpose()?
|
||||
.map(|ComputedFacets { distribution, stats }| (distribution, stats))
|
||||
.unzip();
|
||||
|
||||
let result = SearchResult {
|
||||
hits: documents,
|
||||
hits_info,
|
||||
@@ -1237,6 +1277,8 @@ pub fn perform_search(
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
semantic_hit_count,
|
||||
request_uid: Some(request_uid),
|
||||
metadata,
|
||||
};
|
||||
Ok(result)
|
||||
}
|
||||
@@ -2080,7 +2122,7 @@ pub(crate) fn parse_filter(
|
||||
facets: &Value,
|
||||
filter_parsing_error_code: Code,
|
||||
features: RoFeatures,
|
||||
) -> Result<Option<Filter>, ResponseError> {
|
||||
) -> Result<Option<Filter<'_>>, ResponseError> {
|
||||
let filter = match facets {
|
||||
Value::String(expr) => Filter::from_str(expr).map_err(|e| e.into()),
|
||||
Value::Array(arr) => parse_filter_array(arr).map_err(|e| e.into()),
|
||||
@@ -2117,7 +2159,7 @@ pub(crate) fn parse_filter(
|
||||
Ok(filter)
|
||||
}
|
||||
|
||||
fn parse_filter_array(arr: &[Value]) -> Result<Option<Filter>, MeilisearchHttpError> {
|
||||
fn parse_filter_array(arr: &'_ [Value]) -> Result<Option<Filter<'_>>, MeilisearchHttpError> {
|
||||
let mut ands = Vec::new();
|
||||
for value in arr {
|
||||
match value {
|
||||
|
||||
@@ -13,9 +13,9 @@
|
||||
//! What is going to happen at this point is that you're going to send a oneshot::Sender over an async mpsc channel.
|
||||
//! Then, the queue/scheduler is going to either:
|
||||
//! - Drop your oneshot channel => that means there are too many searches going on, and yours won't be executed.
|
||||
//! You should exit and free all the RAM you use ASAP.
|
||||
//! You should exit and free all the RAM you use ASAP.
|
||||
//! - Sends you a Permit => that will unlock the method, and you will be able to process your search.
|
||||
//! And should drop the Permit only once you have freed all the RAM consumed by the method.
|
||||
//! And should drop the Permit only once you have freed all the RAM consumed by the method.
|
||||
|
||||
use std::num::NonZeroUsize;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
@@ -1040,7 +1040,7 @@ async fn error_single_search_forbidden_token() {
|
||||
];
|
||||
|
||||
let failed_query_indexes: Vec<_> =
|
||||
std::iter::repeat(Some(0)).take(5).chain(std::iter::repeat(None).take(6)).collect();
|
||||
std::iter::repeat_n(Some(0), 5).chain(std::iter::repeat_n(None, 6)).collect();
|
||||
|
||||
let failed_query_indexes = vec![failed_query_indexes; ACCEPTED_KEYS_SINGLE.len()];
|
||||
|
||||
@@ -1118,10 +1118,9 @@ async fn error_multi_search_forbidden_token() {
|
||||
},
|
||||
];
|
||||
|
||||
let failed_query_indexes: Vec<_> = std::iter::repeat(Some(0))
|
||||
.take(5)
|
||||
.chain(std::iter::repeat(Some(1)).take(5))
|
||||
.chain(std::iter::repeat(None).take(6))
|
||||
let failed_query_indexes: Vec<_> = std::iter::repeat_n(Some(0), 5)
|
||||
.chain(std::iter::repeat_n(Some(1), 5))
|
||||
.chain(std::iter::repeat_n(None, 6))
|
||||
.collect();
|
||||
|
||||
let failed_query_indexes = vec![failed_query_indexes; ACCEPTED_KEYS_BOTH.len()];
|
||||
|
||||
@@ -1852,7 +1852,7 @@ async fn add_documents_with_geo_field() {
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
// we are expecting docs 4 and 3 first as they have geo
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }),
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }),
|
||||
@r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -1884,7 +1884,8 @@ async fn add_documents_with_geo_field() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1939,7 +1940,7 @@ async fn update_documents_with_geo_field() {
|
||||
let (response, code) = index.search_post(json!({"sort": ["_geoPoint(10,0):asc"]})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
// we are expecting docs 4 and 3 first as they have geo
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }),
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }),
|
||||
@r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -1971,7 +1972,8 @@ async fn update_documents_with_geo_field() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -2043,7 +2045,7 @@ async fn update_documents_with_geo_field() {
|
||||
let (response, code) = index.search_post(json!({"sort": ["_geoPoint(10,0):asc"]})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
// the search response should not have changed: we are expecting docs 4 and 3 first as they have geo
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }),
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }),
|
||||
@r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -2076,7 +2078,8 @@ async fn update_documents_with_geo_field() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -742,7 +742,7 @@ async fn vector_filter_all_embedders() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -762,9 +762,10 @@ async fn vector_filter_all_embedders() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -839,7 +840,7 @@ async fn vector_filter_specific_embedder() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -859,9 +860,10 @@ async fn vector_filter_specific_embedder() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -874,7 +876,7 @@ async fn vector_filter_user_provided() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -885,9 +887,10 @@ async fn vector_filter_user_provided() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -900,7 +903,7 @@ async fn vector_filter_specific_fragment() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -914,9 +917,10 @@ async fn vector_filter_specific_fragment() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
@@ -924,7 +928,7 @@ async fn vector_filter_specific_fragment() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -941,9 +945,10 @@ async fn vector_filter_specific_fragment() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -976,16 +981,17 @@ async fn vector_filter_document_template_but_fragments_used() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -1023,7 +1029,7 @@ async fn vector_filter_document_template() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1040,9 +1046,10 @@ async fn vector_filter_document_template() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -1075,7 +1082,7 @@ async fn vector_filter_negation() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1092,9 +1099,10 @@ async fn vector_filter_negation() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -1107,7 +1115,7 @@ async fn vector_filter_or_combination() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1124,9 +1132,10 @@ async fn vector_filter_or_combination() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -1139,7 +1148,7 @@ async fn vector_filter_regenerate() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1156,7 +1165,8 @@ async fn vector_filter_regenerate() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ async fn geo_bounding_box_with_string_and_number() {
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -63,7 +63,8 @@ async fn geo_bounding_box_with_string_and_number() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
},
|
||||
@@ -84,7 +85,7 @@ async fn bug_4640() {
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -123,7 +124,8 @@ async fn bug_4640() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
},
|
||||
@@ -147,7 +149,7 @@ async fn geo_asc_with_words() {
|
||||
&json!({"q": "jean"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -179,7 +181,8 @@ async fn geo_asc_with_words() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
},
|
||||
@@ -192,7 +195,7 @@ async fn geo_asc_with_words() {
|
||||
&json!({"q": "bob"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -216,7 +219,8 @@ async fn geo_asc_with_words() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
},
|
||||
@@ -229,7 +233,7 @@ async fn geo_asc_with_words() {
|
||||
&json!({"q": "intel"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -245,7 +249,8 @@ async fn geo_asc_with_words() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
},
|
||||
@@ -269,7 +274,7 @@ async fn geo_sort_with_words() {
|
||||
&json!({"q": "jean", "sort": ["_geoPoint(0.0, 0.0):asc"]}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -304,7 +309,8 @@ async fn geo_sort_with_words() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
},
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use meili_snap::snapshot;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::common::index::Index;
|
||||
@@ -148,7 +148,7 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response, @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -209,9 +209,10 @@ async fn simple_search() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 0
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
snapshot!(response["semanticHitCount"], @"0");
|
||||
|
||||
let (response, code) = index
|
||||
@@ -220,7 +221,7 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response, @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -284,9 +285,10 @@ async fn simple_search() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 2
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
snapshot!(response["semanticHitCount"], @"2");
|
||||
|
||||
let (response, code) = index
|
||||
@@ -295,7 +297,7 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response, @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -359,9 +361,10 @@ async fn simple_search() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 3
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
snapshot!(response["semanticHitCount"], @"3");
|
||||
}
|
||||
|
||||
|
||||
@@ -104,7 +104,7 @@ async fn simple_search() {
|
||||
// english
|
||||
index
|
||||
.search(json!({"q": "Atta", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -115,7 +115,8 @@ async fn simple_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -125,7 +126,7 @@ async fn simple_search() {
|
||||
// japanese
|
||||
index
|
||||
.search(json!({"q": "進撃", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -136,7 +137,8 @@ async fn simple_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -147,7 +149,7 @@ async fn simple_search() {
|
||||
.search(
|
||||
json!({"q": "進撃", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -158,9 +160,10 @@ async fn simple_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
},
|
||||
)
|
||||
@@ -169,7 +172,7 @@ async fn simple_search() {
|
||||
// chinese
|
||||
index
|
||||
.search(json!({"q": "进击", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
snapshot!(response, @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -180,9 +183,10 @@ async fn simple_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
})
|
||||
.await;
|
||||
@@ -222,7 +226,7 @@ async fn force_locales() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -233,7 +237,8 @@ async fn force_locales() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -246,7 +251,7 @@ async fn force_locales() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -257,7 +262,8 @@ async fn force_locales() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -300,7 +306,7 @@ async fn force_locales_with_pattern() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -311,7 +317,8 @@ async fn force_locales_with_pattern() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -324,7 +331,7 @@ async fn force_locales_with_pattern() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -335,7 +342,8 @@ async fn force_locales_with_pattern() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -376,14 +384,15 @@ async fn force_locales_with_pattern_nested() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -396,7 +405,7 @@ async fn force_locales_with_pattern_nested() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -407,7 +416,8 @@ async fn force_locales_with_pattern_nested() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -451,14 +461,15 @@ async fn force_different_locales_with_pattern() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -471,7 +482,7 @@ async fn force_different_locales_with_pattern() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -482,7 +493,8 @@ async fn force_different_locales_with_pattern() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -529,14 +541,15 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -549,7 +562,7 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"], "attributesToSearchOn": ["name_zh", "description_zh"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -560,7 +573,8 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -602,7 +616,7 @@ async fn auto_infer_locales_at_search() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -613,7 +627,8 @@ async fn auto_infer_locales_at_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -625,30 +640,7 @@ async fn auto_infer_locales_at_search() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"id": 853
|
||||
}
|
||||
],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -659,7 +651,32 @@ async fn auto_infer_locales_at_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"id": 853
|
||||
}
|
||||
],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -702,14 +719,15 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -722,31 +740,7 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"id": 852
|
||||
}
|
||||
],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
// force japanese
|
||||
index
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["ja"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -757,7 +751,33 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
// force japanese
|
||||
index
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["ja"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"id": 852
|
||||
}
|
||||
],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -799,14 +819,15 @@ async fn settings_change() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -819,14 +840,15 @@ async fn settings_change() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -862,14 +884,15 @@ async fn settings_change() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -882,14 +905,15 @@ async fn settings_change() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -1164,7 +1188,7 @@ async fn swedish_search() {
|
||||
// infer swedish
|
||||
index
|
||||
.search(json!({"q": "trä", "attributesToRetrieve": ["product"]}), |response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1178,7 +1202,8 @@ async fn swedish_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -1187,7 +1212,7 @@ async fn swedish_search() {
|
||||
|
||||
index
|
||||
.search(json!({"q": "tra", "attributesToRetrieve": ["product"]}), |response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1201,7 +1226,8 @@ async fn swedish_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -1213,7 +1239,7 @@ async fn swedish_search() {
|
||||
.search(
|
||||
json!({"q": "trä", "locales": ["swe"], "attributesToRetrieve": ["product"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1227,7 +1253,8 @@ async fn swedish_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -1238,7 +1265,7 @@ async fn swedish_search() {
|
||||
.search(
|
||||
json!({"q": "tra", "locales": ["swe"], "attributesToRetrieve": ["product"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1252,7 +1279,8 @@ async fn swedish_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -1287,20 +1315,21 @@ async fn german_search() {
|
||||
.search(
|
||||
json!({"q": "kulturalität", "attributesToRetrieve": ["product"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"product": "Interkulturalität"
|
||||
"hits": [
|
||||
{
|
||||
"product": "Interkulturalität"
|
||||
}
|
||||
],
|
||||
"query": "kulturalität",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
],
|
||||
"query": "kulturalität",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
},
|
||||
)
|
||||
@@ -1310,7 +1339,7 @@ async fn german_search() {
|
||||
.search(
|
||||
json!({"q": "organisation", "attributesToRetrieve": ["product"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1321,7 +1350,8 @@ async fn german_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
@@ -1044,7 +1044,7 @@ async fn test_degraded_score_details() {
|
||||
}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1103,7 +1103,8 @@ async fn test_degraded_score_details() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
},
|
||||
|
||||
@@ -93,13 +93,14 @@ async fn federation_empty_list() {
|
||||
|
||||
let (response, code) = server.multi_search(json!({"federation": {}, "queries": []})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -164,7 +165,7 @@ async fn simple_search_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["results"], { ".**.processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response["results"], { ".**.processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
[
|
||||
{
|
||||
"indexUid": "SHARED_DOCUMENTS",
|
||||
@@ -182,7 +183,8 @@ async fn simple_search_single_index() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
},
|
||||
{
|
||||
"indexUid": "SHARED_DOCUMENTS",
|
||||
@@ -200,7 +202,8 @@ async fn simple_search_single_index() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
]
|
||||
"###);
|
||||
@@ -217,7 +220,7 @@ async fn federation_single_search_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -237,7 +240,8 @@ async fn federation_single_search_single_index() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -256,7 +260,7 @@ async fn federation_multiple_search_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -308,7 +312,8 @@ async fn federation_multiple_search_single_index() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 5
|
||||
"estimatedTotalHits": 5,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -325,7 +330,7 @@ async fn federation_two_search_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -358,7 +363,8 @@ async fn federation_two_search_single_index() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -457,7 +463,7 @@ async fn simple_search_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["results"], { ".**.processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response["results"], { ".**.processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
[
|
||||
{
|
||||
"indexUid": "SHARED_DOCUMENTS",
|
||||
@@ -475,7 +481,8 @@ async fn simple_search_two_indexes() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
},
|
||||
{
|
||||
"indexUid": "SHARED_NESTED_DOCUMENTS",
|
||||
@@ -516,7 +523,8 @@ async fn simple_search_two_indexes() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
]
|
||||
"###);
|
||||
@@ -535,7 +543,7 @@ async fn federation_two_search_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -596,7 +604,8 @@ async fn federation_two_search_two_indexes() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -626,7 +635,7 @@ async fn federation_multiple_search_multiple_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -795,7 +804,8 @@ async fn federation_multiple_search_multiple_indexes() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 12
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1101,7 +1111,7 @@ async fn federation_filter() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1140,7 +1150,8 @@ async fn federation_filter() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1177,7 +1188,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1266,7 +1277,8 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1278,7 +1290,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1353,7 +1365,8 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1449,7 +1462,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1538,7 +1551,8 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1551,7 +1565,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1626,7 +1640,8 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1704,7 +1719,7 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1831,7 +1846,8 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 10
|
||||
"estimatedTotalHits": 10,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1844,7 +1860,7 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1915,7 +1931,8 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 6
|
||||
"estimatedTotalHits": 6,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1936,7 +1953,7 @@ async fn federation_sort_different_ranking_rules() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2063,7 +2080,8 @@ async fn federation_sort_different_ranking_rules() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 10
|
||||
"estimatedTotalHits": 10,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -2142,7 +2160,7 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2269,7 +2287,8 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 10
|
||||
"estimatedTotalHits": 10,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -2282,7 +2301,7 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2353,7 +2372,8 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 6
|
||||
"estimatedTotalHits": 6,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2424,7 +2444,7 @@ async fn federation_limit_offset() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2527,7 +2547,8 @@ async fn federation_limit_offset() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 12
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2549,7 +2570,7 @@ async fn federation_limit_offset() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2564,7 +2585,8 @@ async fn federation_limit_offset() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 12
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2586,7 +2608,7 @@ async fn federation_limit_offset() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2673,7 +2695,8 @@ async fn federation_limit_offset() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 2,
|
||||
"estimatedTotalHits": 12
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2695,13 +2718,14 @@ async fn federation_limit_offset() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 12,
|
||||
"estimatedTotalHits": 12
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2731,7 +2755,7 @@ async fn federation_formatting() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2861,7 +2885,8 @@ async fn federation_formatting() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 12
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2883,7 +2908,7 @@ async fn federation_formatting() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2898,7 +2923,8 @@ async fn federation_formatting() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 12
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2920,7 +2946,7 @@ async fn federation_formatting() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3007,7 +3033,8 @@ async fn federation_formatting() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 2,
|
||||
"estimatedTotalHits": 12
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -3029,13 +3056,14 @@ async fn federation_formatting() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 12,
|
||||
"estimatedTotalHits": 12
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -3098,7 +3126,7 @@ async fn federation_null_weight() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3137,7 +3165,8 @@ async fn federation_null_weight() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -3244,7 +3273,7 @@ async fn federation_federated_contains_facets() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3280,7 +3309,8 @@ async fn federation_federated_contains_facets() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -3488,7 +3518,7 @@ async fn federation_vector_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3532,7 +3562,8 @@ async fn federation_vector_single_index() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"semanticHitCount": 4
|
||||
"semanticHitCount": 4,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -3545,7 +3576,7 @@ async fn federation_vector_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3589,7 +3620,8 @@ async fn federation_vector_single_index() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"semanticHitCount": 4
|
||||
"semanticHitCount": 4,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -3603,7 +3635,7 @@ async fn federation_vector_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3651,7 +3683,8 @@ async fn federation_vector_single_index() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"semanticHitCount": 3
|
||||
"semanticHitCount": 3,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -3703,7 +3736,7 @@ async fn federation_vector_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3922,9 +3955,10 @@ async fn federation_vector_two_indexes() {
|
||||
0.6
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 6
|
||||
"semanticHitCount": 6,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
|
||||
// hybrid search, distinct embedder
|
||||
let (response, code) = server
|
||||
@@ -3934,7 +3968,7 @@ async fn federation_vector_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4161,7 +4195,8 @@ async fn federation_vector_two_indexes() {
|
||||
0.6
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 8
|
||||
"semanticHitCount": 8,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
@@ -4209,7 +4244,7 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4380,7 +4415,8 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
},
|
||||
"stats": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -4399,7 +4435,7 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4541,7 +4577,8 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
"Shazam!": 1
|
||||
}
|
||||
},
|
||||
"facetStats": {}
|
||||
"facetStats": {},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -4561,7 +4598,7 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4686,7 +4723,8 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
"distribution": {},
|
||||
"stats": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -4748,7 +4786,7 @@ async fn federation_facets_same_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4806,7 +4844,8 @@ async fn federation_facets_same_indexes() {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -4822,7 +4861,7 @@ async fn federation_facets_same_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4908,7 +4947,8 @@ async fn federation_facets_same_indexes() {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -4925,7 +4965,7 @@ async fn federation_facets_same_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4987,7 +5027,8 @@ async fn federation_facets_same_indexes() {
|
||||
"min": 2.0,
|
||||
"max": 6.0
|
||||
}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -5040,7 +5081,7 @@ async fn federation_inconsistent_merge_order() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -5217,7 +5258,8 @@ async fn federation_inconsistent_merge_order() {
|
||||
},
|
||||
"stats": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -5264,7 +5306,7 @@ async fn federation_inconsistent_merge_order() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -5404,7 +5446,8 @@ async fn federation_inconsistent_merge_order() {
|
||||
"Batman Returns": 1
|
||||
}
|
||||
},
|
||||
"facetStats": {}
|
||||
"facetStats": {},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -230,7 +230,7 @@ async fn remote_sharding() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -288,12 +288,13 @@ async fn remote_sharding() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 5,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
let (response, _status_code) = ms1.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -351,12 +352,13 @@ async fn remote_sharding() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 5,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
let (response, _status_code) = ms2.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -414,6 +416,7 @@ async fn remote_sharding() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 5,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
@@ -595,7 +598,7 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
@@ -620,9 +623,10 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
|
||||
// multi vector search: two local queries, one remote
|
||||
|
||||
@@ -670,7 +674,7 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
@@ -695,6 +699,7 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
@@ -745,7 +750,7 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
@@ -770,6 +775,7 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
@@ -820,7 +826,7 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
@@ -840,9 +846,10 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
|
||||
// multi vector search: no local queries, all remote
|
||||
|
||||
@@ -890,7 +897,7 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
@@ -914,9 +921,10 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
0.2
|
||||
]
|
||||
},
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -1134,7 +1142,7 @@ async fn error_no_weighted_score() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1162,6 +1170,7 @@ async fn error_no_weighted_score() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote hit does not contain `._federation.weightedScoreValues`\n - hint: check that the remote instance is a Meilisearch instance running the same version",
|
||||
@@ -1273,7 +1282,7 @@ async fn error_bad_response() {
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1301,6 +1310,7 @@ async fn error_bad_response() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "could not parse response from the remote host as a federated search response:\n - response from remote: <html>Returning an HTML page</html>\n - hint: check that the remote instance is a Meilisearch instance running the same version",
|
||||
@@ -1405,7 +1415,7 @@ async fn error_bad_request() {
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1433,6 +1443,7 @@ async fn error_bad_request() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.queries[1]`: Index `nottest` not found.\",\"code\":\"index_not_found\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#index_not_found\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
|
||||
@@ -1542,7 +1553,7 @@ async fn error_bad_request_facets_by_index() {
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1576,6 +1587,7 @@ async fn error_bad_request_facets_by_index() {
|
||||
"stats": {}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.federation.facetsByIndex.test0`: Index `test0` not found.\\n - Note: index `test0` is not used in queries\",\"code\":\"index_not_found\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#index_not_found\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
|
||||
@@ -1688,7 +1700,7 @@ async fn error_bad_request_facets_by_index_facet() {
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1727,6 +1739,7 @@ async fn error_bad_request_facets_by_index_facet() {
|
||||
"stats": {}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.federation.facetsByIndex.test`: Invalid facet distribution: Attribute `id` is not filterable. This index does not have configured filterable attributes.\\n - Note: index `test` used in `.queries[1]`\",\"code\":\"invalid_multi_search_facets\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#invalid_multi_search_facets\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
|
||||
@@ -2036,7 +2049,7 @@ async fn error_remote_404() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2064,6 +2077,7 @@ async fn error_remote_404() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote host responded with code 404:\n - response from remote: null\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
|
||||
@@ -2076,7 +2090,7 @@ async fn error_remote_404() {
|
||||
"###);
|
||||
let (response, _status_code) = ms1.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2114,6 +2128,7 @@ async fn error_remote_404() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
@@ -2245,7 +2260,7 @@ async fn error_remote_sharding_auth() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2273,6 +2288,7 @@ async fn error_remote_sharding_auth() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1-notsearch": {
|
||||
"message": "could not authenticate against the remote host\n - hint: check that the remote instance was registered with a valid API key having the `search` action",
|
||||
@@ -2406,7 +2422,7 @@ async fn remote_sharding_auth() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2454,6 +2470,7 @@ async fn remote_sharding_auth() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
@@ -2556,7 +2573,7 @@ async fn error_remote_500() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2584,6 +2601,7 @@ async fn error_remote_500() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote host responded with code 500:\n - response from remote: {\"error\":\"provoked error\",\"code\":\"test_error\",\"link\":\"https://docs.meilisearch.com/errors#test_error\"}",
|
||||
@@ -2597,7 +2615,7 @@ async fn error_remote_500() {
|
||||
let (response, _status_code) = ms1.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
// the response if full because we queried the instance that works
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2635,6 +2653,7 @@ async fn error_remote_500() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
@@ -2738,7 +2757,7 @@ async fn error_remote_500_once() {
|
||||
// Meilisearch is tolerant to a single failure
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2776,12 +2795,13 @@ async fn error_remote_500_once() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
let (response, _status_code) = ms1.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2819,6 +2839,7 @@ async fn error_remote_500_once() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -174,7 +174,8 @@ async fn test_issue_5274() {
|
||||
|
||||
snapshot!(json_string!(rep, {
|
||||
".processingTimeMs" => "[ignored]",
|
||||
}), @r#"
|
||||
".requestUid" => "[uuid]"
|
||||
}), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -188,7 +189,8 @@ async fn test_issue_5274() {
|
||||
"hitsPerPage": 1,
|
||||
"page": 1,
|
||||
"totalPages": 1,
|
||||
"totalHits": 1
|
||||
"totalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
snapshot_kind: text
|
||||
---
|
||||
{
|
||||
"hits": [
|
||||
@@ -21,5 +20,6 @@ snapshot_kind: text
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
|
||||
@@ -294,7 +294,7 @@ async fn check_the_index_features(server: &Server) {
|
||||
|
||||
let (results, _status) =
|
||||
kefir.search_post(json!({ "sort": ["age:asc"], "filter": "surname = kefirounet" })).await;
|
||||
snapshot!(results, name: "search_with_sort_and_filter");
|
||||
snapshot!(json_string!(results, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), name: "search_with_sort_and_filter");
|
||||
|
||||
// ensuring we can get the vectors and their `regenerate` is still good.
|
||||
let (results, _status) = kefir.search_post(json!({"retrieveVectors": true})).await;
|
||||
|
||||
@@ -323,7 +323,7 @@ async fn binary_quantize_clear_documents() {
|
||||
// Make sure the vector DB has been cleared
|
||||
let (documents, _code) =
|
||||
index.search_post(json!({ "hybrid": { "embedder": "manual" }, "vector": [1, 1, 1] })).await;
|
||||
snapshot!(documents, @r#"
|
||||
snapshot!(json_string!(documents, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "",
|
||||
@@ -331,9 +331,10 @@ async fn binary_quantize_clear_documents() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 0
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -257,7 +257,7 @@ async fn search_with_vector() {
|
||||
json!({"vector": [1.0, 1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "rest"}, "limit": 1}
|
||||
)).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".requestUid" => "[uuid]", ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -270,9 +270,10 @@ async fn search_with_vector() {
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -288,7 +289,7 @@ async fn search_with_media() {
|
||||
))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".requestUid" => "[uuid]", ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -302,9 +303,10 @@ async fn search_with_media() {
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -390,7 +392,7 @@ async fn search_with_query() {
|
||||
))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".requestUid" => "[uuid]", ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -404,9 +406,10 @@ async fn search_with_query() {
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -2076,7 +2079,7 @@ async fn composite() {
|
||||
json!({"vector": [1.0, 1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "rest"}, "limit": 1}
|
||||
)).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".requestUid" => "[uuid]", ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2089,9 +2092,10 @@ async fn composite() {
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
|
||||
let (value, code) = index
|
||||
.search_post(
|
||||
@@ -2100,7 +2104,7 @@ async fn composite() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r#"
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2114,6 +2118,7 @@ async fn composite() {
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"#);
|
||||
|
||||
@@ -249,7 +249,7 @@ async fn user_provide_mismatched_embedding_dimension() {
|
||||
"###);
|
||||
}
|
||||
|
||||
async fn generate_default_user_provided_documents(server: &Server) -> Index {
|
||||
async fn generate_default_user_provided_documents(server: &Server) -> Index<'_> {
|
||||
let index = server.index("doggo");
|
||||
|
||||
let (response, code) = index
|
||||
@@ -689,7 +689,7 @@ async fn clear_documents() {
|
||||
// Make sure the vector DB has been cleared
|
||||
let (documents, _code) =
|
||||
index.search_post(json!({ "vector": [1, 1, 1], "hybrid": {"embedder": "manual"} })).await;
|
||||
snapshot!(documents, @r#"
|
||||
snapshot!(json_string!(documents, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "",
|
||||
@@ -697,9 +697,10 @@ async fn clear_documents() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 0
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -743,7 +744,7 @@ async fn add_remove_one_vector_4588() {
|
||||
json!({"vector": [1, 1, 1], "hybrid": {"semanticRatio": 1.0, "embedder": "manual"} }),
|
||||
)
|
||||
.await;
|
||||
snapshot!(documents, @r#"
|
||||
snapshot!(json_string!(documents, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -756,9 +757,10 @@ async fn add_remove_one_vector_4588() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
|
||||
let (documents, _code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
|
||||
|
||||
@@ -17,7 +17,7 @@ impl<'a> BytesDecode<'a> for UuidCodec {
|
||||
impl BytesEncode<'_> for UuidCodec {
|
||||
type EItem = Uuid;
|
||||
|
||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
||||
fn bytes_encode(item: &'_ Self::EItem) -> Result<Cow<'_, [u8]>, BoxedError> {
|
||||
Ok(Cow::Borrowed(item.as_bytes()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,7 +88,7 @@ rhai = { version = "1.22.2", features = [
|
||||
"sync",
|
||||
] }
|
||||
arroy = "0.6.3"
|
||||
hannoy = "0.0.5"
|
||||
hannoy = { version = "0.0.8", features = ["arroy"] }
|
||||
rand = "0.8.5"
|
||||
tracing = "0.1.41"
|
||||
ureq = { version = "2.12.1", features = ["json"] }
|
||||
|
||||
@@ -621,7 +621,7 @@ impl From<HeedError> for Error {
|
||||
// TODO use the encoding
|
||||
HeedError::Encoding(_) => InternalError(Serialization(Encoding { db_name: None })),
|
||||
HeedError::Decoding(_) => InternalError(Serialization(Decoding { db_name: None })),
|
||||
HeedError::EnvAlreadyOpened { .. } => UserError(EnvAlreadyOpened),
|
||||
HeedError::EnvAlreadyOpened => UserError(EnvAlreadyOpened),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,14 +19,14 @@ impl RoaringBitmapLenCodec {
|
||||
if cookie == SERIAL_COOKIE_NO_RUNCONTAINER {
|
||||
(bytes.read_u32::<LittleEndian>()? as usize, true)
|
||||
} else if (cookie as u16) == SERIAL_COOKIE {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, "run containers are unsupported"));
|
||||
return Err(io::Error::other("run containers are unsupported"));
|
||||
} else {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, "unknown cookie value"));
|
||||
return Err(io::Error::other("unknown cookie value"));
|
||||
}
|
||||
};
|
||||
|
||||
if size > u16::MAX as usize + 1 {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, "size is greater than supported"));
|
||||
return Err(io::Error::other("size is greater than supported"));
|
||||
}
|
||||
|
||||
let mut description_bytes = vec![0u8; size * 4];
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#![allow(clippy::type_complexity)]
|
||||
#![allow(clippy::result_large_err)]
|
||||
|
||||
#[cfg(not(windows))]
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -304,7 +304,7 @@ impl ArrayView for ParseableArray<'_> {
|
||||
|
||||
fn get(&self, index: i64) -> Option<&dyn ValueView> {
|
||||
let index = convert_index(index, self.size());
|
||||
if index <= 0 {
|
||||
if index < 0 {
|
||||
return None;
|
||||
}
|
||||
let v = self.0.get(index as usize)?;
|
||||
|
||||
@@ -38,7 +38,7 @@ where
|
||||
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
||||
|
||||
if let Some(first_bound) = get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? {
|
||||
fd.iterate(candidates, highest_level, first_bound, usize::MAX)?;
|
||||
let _ = fd.iterate(candidates, highest_level, first_bound, usize::MAX)?;
|
||||
Ok(())
|
||||
} else {
|
||||
Ok(())
|
||||
|
||||
@@ -225,11 +225,11 @@ impl<'a> Filter<'a> {
|
||||
Ok(Some(Self { condition }))
|
||||
}
|
||||
|
||||
pub fn use_contains_operator(&self) -> Option<&Token> {
|
||||
pub fn use_contains_operator(&self) -> Option<&Token<'_>> {
|
||||
self.condition.use_contains_operator()
|
||||
}
|
||||
|
||||
pub fn use_vector_filter(&self) -> Option<&Token> {
|
||||
pub fn use_vector_filter(&self) -> Option<&Token<'_>> {
|
||||
self.condition.use_vector_filter()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,7 +137,7 @@ impl<'a> SearchForFacetValues<'a> {
|
||||
let exact_words_fst = self.search_query.index.exact_words(rtxn)?;
|
||||
if exact_words_fst.is_some_and(|fst| fst.contains(query)) {
|
||||
if fst.contains(query) {
|
||||
self.fetch_original_facets_using_normalized(
|
||||
let _ = self.fetch_original_facets_using_normalized(
|
||||
fid,
|
||||
query,
|
||||
query,
|
||||
|
||||
@@ -354,15 +354,14 @@ fn maybe_add_to_results<'ctx, Q: RankingRuleQueryTrait>(
|
||||
logger.add_to_results(&candidates);
|
||||
valid_docids.extend_from_slice(&candidates);
|
||||
valid_scores
|
||||
.extend(std::iter::repeat(ranking_rule_scores.to_owned()).take(candidates.len()));
|
||||
.extend(std::iter::repeat_n(ranking_rule_scores.to_owned(), candidates.len()));
|
||||
}
|
||||
} else {
|
||||
// if we have passed the offset already, add some of the documents (up to the limit)
|
||||
let candidates = candidates.iter().take(length - valid_docids.len()).collect::<Vec<u32>>();
|
||||
logger.add_to_results(&candidates);
|
||||
valid_docids.extend_from_slice(&candidates);
|
||||
valid_scores
|
||||
.extend(std::iter::repeat(ranking_rule_scores.to_owned()).take(candidates.len()));
|
||||
valid_scores.extend(std::iter::repeat_n(ranking_rule_scores.to_owned(), candidates.len()));
|
||||
}
|
||||
|
||||
*cur_offset += candidates.len() as usize;
|
||||
|
||||
@@ -193,6 +193,7 @@ impl<G: RankingRuleGraphTrait> VisitorState<G> {
|
||||
visit: VisitFn<'_, G>,
|
||||
ctx: &mut VisitorContext<'_, G>,
|
||||
) -> Result<ControlFlow<(), bool>> {
|
||||
#[allow(clippy::manual_contains)] // there is no method contains on mapped interner
|
||||
if !ctx
|
||||
.all_costs_from_node
|
||||
.get(dest_node)
|
||||
@@ -243,6 +244,8 @@ impl<G: RankingRuleGraphTrait> VisitorState<G> {
|
||||
|
||||
// Checking that from the destination node, there is at least
|
||||
// one cost that we can visit that corresponds to our remaining budget.
|
||||
|
||||
#[allow(clippy::manual_contains)] // there is no contains on MappedInterner
|
||||
if !ctx
|
||||
.all_costs_from_node
|
||||
.get(dest_node)
|
||||
|
||||
@@ -401,7 +401,7 @@ impl Iterator for SmallBitmapInternalIter<'_> {
|
||||
*cur &= *cur - 1;
|
||||
Some(idx + *base)
|
||||
} else if next.is_empty() {
|
||||
return None;
|
||||
None
|
||||
} else {
|
||||
*base += 64;
|
||||
*cur = next[0];
|
||||
|
||||
@@ -79,7 +79,7 @@ impl<'ctx, Query> Sort<'ctx, Query> {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
Ok(!displayed_fields.iter().any(|&field| field == field_name))
|
||||
Ok(!displayed_fields.contains(&field_name))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -52,10 +52,10 @@ const MAX_FRAME_HEADER_SIZE: usize = 9;
|
||||
/// a message in this queue only if it is empty to avoid filling
|
||||
/// the channel *and* the BBQueue.
|
||||
pub fn extractor_writer_bbqueue(
|
||||
bbbuffers: &mut Vec<BBBuffer>,
|
||||
bbbuffers: &'_ mut Vec<BBBuffer>,
|
||||
total_bbbuffer_capacity: usize,
|
||||
channel_capacity: usize,
|
||||
) -> (ExtractorBbqueueSender, WriterBbqueueReceiver) {
|
||||
) -> (ExtractorBbqueueSender<'_>, WriterBbqueueReceiver<'_>) {
|
||||
let current_num_threads = rayon::current_num_threads();
|
||||
let bbbuffer_capacity = total_bbbuffer_capacity.checked_div(current_num_threads).unwrap();
|
||||
bbbuffers.resize_with(current_num_threads, || BBBuffer::new(bbbuffer_capacity));
|
||||
|
||||
@@ -6,9 +6,7 @@ use rustc_hash::FxBuildHasher;
|
||||
use serde::de::{DeserializeSeed, Deserializer as _, Visitor};
|
||||
use serde_json::value::RawValue;
|
||||
|
||||
use crate::documents::{
|
||||
validate_document_id_str, DocumentIdExtractionError, FieldIdMapper, PrimaryKey,
|
||||
};
|
||||
use crate::documents::{validate_document_id_str, DocumentIdExtractionError, PrimaryKey};
|
||||
use crate::fields_ids_map::MutFieldIdMapper;
|
||||
use crate::{FieldId, UserError};
|
||||
|
||||
@@ -235,28 +233,6 @@ impl<'de, 'a, Mapper: MutFieldIdMapper> Visitor<'de> for MutFieldIdMapVisitor<'a
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FieldIdMapVisitor<'a, Mapper: FieldIdMapper>(pub &'a Mapper);
|
||||
|
||||
impl<'de, Mapper: FieldIdMapper> Visitor<'de> for FieldIdMapVisitor<'_, Mapper> {
|
||||
type Value = Option<FieldId>;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(formatter, "expecting a string")
|
||||
}
|
||||
fn visit_borrowed_str<E>(self, v: &'de str) -> std::result::Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(self.0.id(v))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> std::result::Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(self.0.id(v))
|
||||
}
|
||||
}
|
||||
pub struct DocumentIdVisitor<'indexer>(pub &'indexer Bump);
|
||||
|
||||
impl<'de, 'indexer: 'de> Visitor<'de> for DocumentIdVisitor<'indexer> {
|
||||
|
||||
@@ -138,7 +138,7 @@ impl<T: MostlySend> ThreadLocal<T> {
|
||||
self.inner.get_or_default().as_ref()
|
||||
}
|
||||
|
||||
pub fn iter_mut(&mut self) -> IterMut<T> {
|
||||
pub fn iter_mut(&mut self) -> IterMut<'_, T> {
|
||||
IterMut(self.inner.iter_mut())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ use crate::ThreadPoolNoAbort;
|
||||
pub(in crate::vector) const MAX_COMPOSITE_DISTANCE: f32 = 0.01;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum SubEmbedder {
|
||||
/// An embedder based on running local models, fetched from the Hugging Face Hub.
|
||||
HuggingFace(hf::Embedder),
|
||||
|
||||
@@ -19,6 +19,7 @@ use crate::ThreadPoolNoAbort;
|
||||
|
||||
/// An embedder can be used to transform text into embeddings.
|
||||
#[derive(Debug)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum Embedder {
|
||||
/// An embedder based on running local models, fetched from the Hugging Face Hub.
|
||||
HuggingFace(hf::Embedder),
|
||||
@@ -64,6 +65,7 @@ impl EmbeddingConfig {
|
||||
/// This type is serialized in and deserialized from the DB, any modification should either go
|
||||
/// through dumpless upgrade or be backward-compatible
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq, serde::Deserialize, serde::Serialize)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum EmbedderOptions {
|
||||
HuggingFace(hf::EmbedderOptions),
|
||||
OpenAi(openai::EmbedderOptions),
|
||||
|
||||
@@ -705,7 +705,7 @@ impl VectorStore {
|
||||
&'a self,
|
||||
rtxn: &'a RoTxn<'a>,
|
||||
db: hannoy::Database<D>,
|
||||
) -> impl Iterator<Item = Result<hannoy::Reader<'a, D>, hannoy::Error>> + 'a {
|
||||
) -> impl Iterator<Item = Result<hannoy::Reader<D>, hannoy::Error>> + 'a {
|
||||
vector_store_range_for_embedder(self.embedder_index).filter_map(move |index| {
|
||||
match hannoy::Reader::open(rtxn, index, db) {
|
||||
Ok(reader) => match reader.is_empty(rtxn) {
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
[toolchain]
|
||||
channel = "1.85.1"
|
||||
channel = "1.89.0"
|
||||
components = ["clippy"]
|
||||
|
||||
Reference in New Issue
Block a user