Compare commits

..

5 Commits

Author SHA1 Message Date
Kerollmops
18ba2e39c0 Multithread word prefix position docids 2025-09-23 11:02:57 +02:00
Kerollmops
4fc9c3cd22 Make clippy happy 2025-09-22 15:22:16 +02:00
Kerollmops
8ce3aa431e Fix the algorithm 2025-09-22 15:04:30 +02:00
Kerollmops
a9c8ad57db Multi-thread the facet bulk processing 2025-09-22 15:04:30 +02:00
Kerollmops
9146122047 Patch heed to create multiple nested RoTxns 2025-09-22 15:04:30 +02:00
130 changed files with 1623 additions and 4577 deletions

View File

@@ -7,5 +7,6 @@ updates:
schedule:
interval: "monthly"
labels:
- 'skip changelog'
- 'dependencies'
rebase-strategy: disabled

View File

@@ -18,7 +18,6 @@ categories:
label: 'security'
- title: '⚙️ Maintenance/misc'
label:
- 'dependencies'
- 'maintenance'
- 'documentation'
template: |
@@ -27,3 +26,8 @@ template: |
❤️ Huge thanks to our contributors: $CONTRIBUTORS.
no-changes-template: 'Changes are coming soon 😎'
sort-direction: 'ascending'
replacers:
- search: '/(?:and )?@dependabot-preview(?:\[bot\])?,?/g'
replace: ''
- search: '/(?:and )?@dependabot(?:\[bot\])?,?/g'
replace: ''

View File

@@ -65,7 +65,7 @@ jobs:
uses: docker/setup-buildx-action@v3
- name: Install cosign
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # tag=v3.10.0
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # tag=v3.9.2
- name: Login to Docker Hub
uses: docker/login-action@v3

View File

@@ -11,7 +11,7 @@ jobs:
check-version:
name: Check the version validity
runs-on: ubuntu-latest
# No need to check the version for dry run (cron or workflow_dispatch)
# No need to check the version for dry run (cron)
steps:
- uses: actions/checkout@v5
# Check if the tag has the v<nmumber>.<number>.<number> format.
@@ -48,7 +48,7 @@ jobs:
- uses: dtolnay/rust-toolchain@1.89
- name: Build
run: cargo build --release --locked
# No need to upload binaries for dry run (cron or workflow_dispatch)
# No need to upload binaries for dry run (cron)
- name: Upload binaries to release
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.11.2
@@ -78,7 +78,7 @@ jobs:
- uses: dtolnay/rust-toolchain@1.89
- name: Build
run: cargo build --release --locked
# No need to upload binaries for dry run (cron or workflow_dispatch)
# No need to upload binaries for dry run (cron)
- name: Upload binaries to release
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.11.2
@@ -111,7 +111,7 @@ jobs:
command: build
args: --release --target ${{ matrix.target }}
- name: Upload the binary to release
# No need to upload binaries for dry run (cron or workflow_dispatch)
# No need to upload binaries for dry run (cron)
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.11.2
with:
@@ -176,7 +176,7 @@ jobs:
- name: List target output files
run: ls -lR ./target
- name: Upload the binary to release
# No need to upload binaries for dry run (cron or workflow_dispatch)
# No need to upload binaries for dry run (cron)
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.11.2
with:
@@ -187,7 +187,6 @@ jobs:
publish-openapi-file:
name: Publish OpenAPI file
needs: check-version
runs-on: ubuntu-latest
steps:
- name: Checkout code
@@ -202,7 +201,7 @@ jobs:
cd crates/openapi-generator
cargo run --release -- --pretty --output ../../meilisearch.json
- name: Upload OpenAPI to Release
# No need to upload for dry run (cron or workflow_dispatch)
# No need to upload for dry run (cron)
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.11.2
with:

View File

@@ -50,7 +50,7 @@ jobs:
with:
repository: meilisearch/meilisearch-dotnet
- name: Setup .NET Core
uses: actions/setup-dotnet@v5
uses: actions/setup-dotnet@v4
with:
dotnet-version: "8.0.x"
- name: Install dependencies
@@ -100,7 +100,7 @@ jobs:
- '7700:7700'
steps:
- name: Set up Go
uses: actions/setup-go@v6
uses: actions/setup-go@v5
with:
go-version: stable
- uses: actions/checkout@v5
@@ -135,13 +135,13 @@ jobs:
- name: Set up Java
uses: actions/setup-java@v5
with:
java-version: 17
distribution: 'temurin'
java-version: 8
distribution: 'zulu'
cache: gradle
- name: Grant execute permission for gradlew
run: chmod +x gradlew
- name: Build and run unit and integration tests
run: ./gradlew build integrationTest --info
run: ./gradlew build integrationTest
meilisearch-js-tests:
needs: define-docker-image
@@ -160,7 +160,7 @@ jobs:
with:
repository: meilisearch/meilisearch-js
- name: Setup node
uses: actions/setup-node@v5
uses: actions/setup-node@v4
with:
cache: 'yarn'
- name: Install dependencies
@@ -224,7 +224,7 @@ jobs:
with:
repository: meilisearch/meilisearch-python
- name: Set up Python
uses: actions/setup-python@v6
uses: actions/setup-python@v5
- name: Install pipenv
uses: dschep/install-pipenv-action@v1
- name: Install dependencies
@@ -318,7 +318,7 @@ jobs:
with:
repository: meilisearch/meilisearch-js-plugins
- name: Setup node
uses: actions/setup-node@v5
uses: actions/setup-node@v4
with:
cache: yarn
- name: Install dependencies

1749
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -23,7 +23,7 @@ members = [
]
[workspace.package]
version = "1.22.1"
version = "1.21.0"
authors = [
"Quentin de Quelen <quentin@dequelen.me>",
"Clément Renault <clement@meilisearch.com>",
@@ -50,3 +50,6 @@ opt-level = 3
opt-level = 3
[profile.dev.package.roaring]
opt-level = 3
[patch.crates-io]
heed = { git = "https://github.com/meilisearch/heed", branch = "allow-nested-rtxn-from-wtxn" }

View File

@@ -121,7 +121,7 @@ If you want to know more about the kind of data we collect and what we use it fo
Meilisearch is a search engine created by [Meili](https://www.meilisearch.com/careers), a software development company headquartered in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=contact)
🗞 [Subscribe to our newsletter](https://share-eu1.hsforms.com/1LN5N0x_GQgq7ss7tXmSykwfg3aq) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
🗞 [Subscribe to our newsletter](https://meilisearch.us2.list-manage.com/subscribe?u=27870f7b71c908a8b359599fb&id=79582d828e) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
💌 Want to make a suggestion or give feedback? Here are some of the channels where you can reach us:

View File

@@ -158,9 +158,6 @@ pub enum KindDump {
UpgradeDatabase {
from: (u32, u32, u32),
},
IndexCompaction {
index_uid: String,
},
}
impl From<Task> for TaskDump {
@@ -243,9 +240,6 @@ impl From<KindWithContent> for KindDump {
KindWithContent::UpgradeDatabase { from: version } => {
KindDump::UpgradeDatabase { from: version }
}
KindWithContent::IndexCompaction { index_uid } => {
KindDump::IndexCompaction { index_uid }
}
}
}
}

View File

@@ -7,14 +7,23 @@
use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::character::complete::{char, multispace0, multispace1};
use nom::combinator::{cut, map, value};
use nom::sequence::{preceded, terminated, tuple};
use nom::character::complete::char;
use nom::character::complete::multispace0;
use nom::character::complete::multispace1;
use nom::combinator::cut;
use nom::combinator::map;
use nom::combinator::value;
use nom::sequence::preceded;
use nom::sequence::{terminated, tuple};
use Condition::*;
use crate::error::IResultExt;
use crate::value::{parse_vector_value, parse_vector_value_cut};
use crate::{parse_value, Error, ErrorKind, FilterCondition, IResult, Span, Token, VectorFilter};
use crate::value::parse_vector_value;
use crate::value::parse_vector_value_cut;
use crate::Error;
use crate::ErrorKind;
use crate::VectorFilter;
use crate::{parse_value, FilterCondition, IResult, Span, Token};
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Condition<'a> {

View File

@@ -75,11 +75,7 @@ pub enum ExpectedValueKind {
pub enum ErrorKind<'a> {
ReservedGeo(&'a str),
GeoRadius,
GeoRadiusArgumentCount(usize),
GeoBoundingBox,
GeoPolygon,
GeoPolygonNotEnoughPoints(usize),
GeoCoordinatesNotPair(usize),
MisusedGeoRadius,
MisusedGeoBoundingBox,
VectorFilterLeftover,
@@ -193,7 +189,7 @@ impl Display for Error<'_> {
}
ErrorKind::InvalidPrimary => {
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` {text}")?
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
}
ErrorKind::InvalidEscapedNumber => {
writeln!(f, "Found an invalid escaped sequence number: `{}`.", escaped_input)?
@@ -202,23 +198,11 @@ impl Display for Error<'_> {
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
}
ErrorKind::GeoRadius => {
writeln!(f, "The `_geoRadius` filter must be in the form: `_geoRadius(latitude, longitude, radius, optionalResolution)`.")?
}
ErrorKind::GeoRadiusArgumentCount(count) => {
writeln!(f, "Was expecting 3 or 4 arguments for `_geoRadius`, but instead found {count}.")?
writeln!(f, "The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.")?
}
ErrorKind::GeoBoundingBox => {
writeln!(f, "The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.")?
}
ErrorKind::GeoPolygon => {
writeln!(f, "The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.")?
}
ErrorKind::GeoPolygonNotEnoughPoints(n) => {
writeln!(f, "The `_geoPolygon` filter expects at least 3 points but only {n} were specified")?;
}
ErrorKind::GeoCoordinatesNotPair(number) => {
writeln!(f, "Was expecting 2 coordinates but instead found {number}.")?
}
ErrorKind::ReservedGeo(name) => {
writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.", name.escape_debug())?
}

View File

@@ -19,7 +19,6 @@
//! word = (alphanumeric | _ | - | .)+
//! geoRadius = "_geoRadius(" WS* float WS* "," WS* float WS* "," float WS* ")"
//! geoBoundingBox = "_geoBoundingBox([" WS * float WS* "," WS* float WS* "], [" WS* float WS* "," WS* float WS* "]")
//! geoPolygon = "_geoPolygon([[" WS* float WS* "," WS* float WS* "],+])"
//! ```
//!
//! Other BNF grammar used to handle some specific errors:
@@ -157,9 +156,8 @@ pub enum FilterCondition<'a> {
Or(Vec<Self>),
And(Vec<Self>),
VectorExists { fid: Token<'a>, embedder: Option<Token<'a>>, filter: VectorFilter<'a> },
GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a>, resolution: Option<Token<'a>> },
GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a> },
GeoBoundingBox { top_right_point: [Token<'a>; 2], bottom_left_point: [Token<'a>; 2] },
GeoPolygon { points: Vec<[Token<'a>; 2]> },
}
pub enum TraversedElement<'a> {
@@ -191,7 +189,6 @@ impl<'a> FilterCondition<'a> {
FilterCondition::VectorExists { .. }
| FilterCondition::GeoLowerThan { .. }
| FilterCondition::GeoBoundingBox { .. }
| FilterCondition::GeoPolygon { .. }
| FilterCondition::In { .. } => None,
}
}
@@ -205,7 +202,6 @@ impl<'a> FilterCondition<'a> {
}
FilterCondition::GeoLowerThan { .. }
| FilterCondition::GeoBoundingBox { .. }
| FilterCondition::GeoPolygon { .. }
| FilterCondition::In { .. } => None,
FilterCondition::VectorExists { fid, .. } => Some(fid),
}
@@ -400,27 +396,23 @@ fn parse_not(input: Span, depth: usize) -> IResult<FilterCondition> {
/// If we parse `_geoRadius` we MUST parse the rest of the expression.
fn parse_geo_radius(input: Span) -> IResult<FilterCondition> {
// we want to allow space BEFORE the _geoRadius but not after
let (input, _) = tuple((multispace0, word_exact("_geoRadius")))(input)?;
// if we were able to parse `_geoRadius` and can't parse the rest of the input we return a failure
let parsed =
delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))(input)
.map_cut(ErrorKind::GeoRadius);
let parsed = preceded(
tuple((multispace0, word_exact("_geoRadius"))),
// if we were able to parse `_geoRadius` and can't parse the rest of the input we return a failure
cut(delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))),
)(input)
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::GeoRadius)));
let (input, args) = parsed?;
if !(3..=4).contains(&args.len()) {
return Err(Error::failure_from_kind(input, ErrorKind::GeoRadiusArgumentCount(args.len())));
if args.len() != 3 {
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::GeoRadius)));
}
let res = FilterCondition::GeoLowerThan {
point: [args[0].into(), args[1].into()],
radius: args[2].into(),
resolution: args.get(3).cloned().map(Token::from),
};
Ok((input, res))
}
@@ -428,33 +420,26 @@ fn parse_geo_radius(input: Span) -> IResult<FilterCondition> {
/// If we parse `_geoBoundingBox` we MUST parse the rest of the expression.
fn parse_geo_bounding_box(input: Span) -> IResult<FilterCondition> {
// we want to allow space BEFORE the _geoBoundingBox but not after
let (input, _) = tuple((multispace0, word_exact("_geoBoundingBox")))(input)?;
// if we were able to parse `_geoBoundingBox` and can't parse the rest of the input we return a failure
let (input, args) = delimited(
char('('),
separated_list1(
tag(","),
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
),
char(')'),
let parsed = preceded(
tuple((multispace0, word_exact("_geoBoundingBox"))),
// if we were able to parse `_geoBoundingBox` and can't parse the rest of the input we return a failure
cut(delimited(
char('('),
separated_list1(
tag(","),
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
),
char(')'),
)),
)(input)
.map_cut(ErrorKind::GeoBoundingBox)?;
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::GeoBoundingBox)));
if args.len() != 2 {
let (input, args) = parsed?;
if args.len() != 2 || args[0].len() != 2 || args[1].len() != 2 {
return Err(Error::failure_from_kind(input, ErrorKind::GeoBoundingBox));
}
if let Some(offending) = args.iter().find(|a| a.len() != 2) {
let context = offending.first().unwrap_or(&input);
return Err(Error::failure_from_kind(
*context,
ErrorKind::GeoCoordinatesNotPair(offending.len()),
));
}
let res = FilterCondition::GeoBoundingBox {
top_right_point: [args[0][0].into(), args[0][1].into()],
bottom_left_point: [args[1][0].into(), args[1][1].into()],
@@ -462,47 +447,6 @@ fn parse_geo_bounding_box(input: Span) -> IResult<FilterCondition> {
Ok((input, res))
}
/// geoPolygon = "_geoPolygon([[" WS* float WS* "," WS* float WS* "],+])"
/// If we parse `_geoPolygon` we MUST parse the rest of the expression.
fn parse_geo_polygon(input: Span) -> IResult<FilterCondition> {
// we want to allow space BEFORE the _geoPolygon but not after
let (input, _) = tuple((multispace0, word_exact("_geoPolygon")))(input)?;
// if we were able to parse `_geoPolygon` and can't parse the rest of the input we return a failure
let (input, args): (_, Vec<Vec<LocatedSpan<_, _>>>) = delimited(
char('('),
separated_list1(
tag(","),
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
),
preceded(opt(ws(char(','))), char(')')), // Tolerate trailing comma
)(input)
.map_cut(ErrorKind::GeoPolygon)?;
if args.len() < 3 {
let context = args.last().and_then(|a| a.last()).unwrap_or(&input);
return Err(Error::failure_from_kind(
*context,
ErrorKind::GeoPolygonNotEnoughPoints(args.len()),
));
}
if let Some(offending) = args.iter().find(|a| a.len() != 2) {
let context = offending.first().unwrap_or(&input);
return Err(Error::failure_from_kind(
*context,
ErrorKind::GeoCoordinatesNotPair(offending.len()),
));
}
let res = FilterCondition::GeoPolygon {
points: args.into_iter().map(|a| [a[0].into(), a[1].into()]).collect(),
};
Ok((input, res))
}
/// geoPoint = WS* "_geoPoint(float WS* "," WS* float WS* "," WS* float)
fn parse_geo_point(input: Span) -> IResult<FilterCondition> {
// we want to forbid space BEFORE the _geoPoint but not after
@@ -572,8 +516,8 @@ fn parse_primary(input: Span, depth: usize) -> IResult<FilterCondition> {
Error::new_from_kind(input, ErrorKind::MissingClosingDelimiter(c.char()))
}),
),
// Made a random block of functions because we reached the maximum number of elements per alt
alt((parse_geo_radius, parse_geo_bounding_box, parse_geo_polygon)),
parse_geo_radius,
parse_geo_bounding_box,
parse_in,
parse_not_in,
parse_condition,
@@ -653,12 +597,9 @@ impl std::fmt::Display for FilterCondition<'_> {
}
write!(f, " EXISTS")
}
FilterCondition::GeoLowerThan { point, radius, resolution: None } => {
FilterCondition::GeoLowerThan { point, radius } => {
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
}
FilterCondition::GeoLowerThan { point, radius, resolution: Some(resolution) } => {
write!(f, "_geoRadius({}, {}, {}, {})", point[0], point[1], radius, resolution)
}
FilterCondition::GeoBoundingBox {
top_right_point: top_left_point,
bottom_left_point: bottom_right_point,
@@ -672,13 +613,6 @@ impl std::fmt::Display for FilterCondition<'_> {
bottom_right_point[1]
)
}
FilterCondition::GeoPolygon { points } => {
write!(f, "_geoPolygon([")?;
for point in points {
write!(f, "[{}, {}], ", point[0], point[1])?;
}
write!(f, "])")
}
}
}
}
@@ -842,17 +776,12 @@ pub mod tests {
insta::assert_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})");
insta::assert_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))");
insta::assert_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})");
insta::assert_snapshot!(p("_geoRadius(12,13,14,1000)"), @"_geoRadius({12}, {13}, {14}, {1000})");
// Test geo bounding box
insta::assert_snapshot!(p("_geoBoundingBox([12, 13], [14, 15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
insta::assert_snapshot!(p("NOT _geoBoundingBox([12, 13], [14, 15])"), @"NOT (_geoBoundingBox([{12}, {13}], [{14}, {15}]))");
insta::assert_snapshot!(p("_geoBoundingBox([12,13],[14,15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
// Test geo polygon
insta::assert_snapshot!(p("_geoPolygon([12, 13], [14, 15], [16, 17])"), @"_geoPolygon([[{12}, {13}], [{14}, {15}], [{16}, {17}], ])");
insta::assert_snapshot!(p("_geoPolygon([12, 13], [14, 15], [-1.2,2939.2], [1,1])"), @"_geoPolygon([[{12}, {13}], [{14}, {15}], [{-1.2}, {2939.2}], [{1}, {1}], ])");
// Test OR + AND
insta::assert_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
insta::assert_snapshot!(p("channel = ponce OR 'dog race' != 'bernese mountain'"), @"OR[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
@@ -909,80 +838,50 @@ pub mod tests {
11:12 channel = 🐻 AND followers < 100
"###);
insta::assert_snapshot!(p("'OR'"), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `\'OR\'`.
insta::assert_snapshot!(p("'OR'"), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`.
1:5 'OR'
");
"###);
insta::assert_snapshot!(p("OR"), @r###"
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
1:3 OR
"###);
insta::assert_snapshot!(p("channel Ponce"), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `channel Ponce`.
insta::assert_snapshot!(p("channel Ponce"), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`.
1:14 channel Ponce
");
"###);
insta::assert_snapshot!(p("channel = Ponce OR"), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` but instead got nothing.
insta::assert_snapshot!(p("channel = Ponce OR"), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.
19:19 channel = Ponce OR
");
"###);
insta::assert_snapshot!(p("_geoRadius"), @r"
The `_geoRadius` filter must be in the form: `_geoRadius(latitude, longitude, radius, optionalResolution)`.
11:11 _geoRadius
");
insta::assert_snapshot!(p("_geoRadius"), @r###"
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
1:11 _geoRadius
"###);
insta::assert_snapshot!(p("_geoRadius = 12"), @r"
The `_geoRadius` filter must be in the form: `_geoRadius(latitude, longitude, radius, optionalResolution)`.
11:16 _geoRadius = 12
");
insta::assert_snapshot!(p("_geoRadius = 12"), @r###"
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
1:16 _geoRadius = 12
"###);
insta::assert_snapshot!(p("_geoBoundingBox"), @r"
insta::assert_snapshot!(p("_geoBoundingBox"), @r###"
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
16:16 _geoBoundingBox
");
1:16 _geoBoundingBox
"###);
insta::assert_snapshot!(p("_geoBoundingBox = 12"), @r"
insta::assert_snapshot!(p("_geoBoundingBox = 12"), @r###"
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
16:21 _geoBoundingBox = 12
");
1:21 _geoBoundingBox = 12
"###);
insta::assert_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r"
insta::assert_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r###"
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
17:26 _geoBoundingBox(1.0, 1.0)
");
insta::assert_snapshot!(p("_geoPolygon([1,2,3])"), @r"
The `_geoPolygon` filter expects at least 3 points but only 1 were specified
18:19 _geoPolygon([1,2,3])
");
insta::assert_snapshot!(p("_geoPolygon(1,2,3)"), @r"
The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.
13:19 _geoPolygon(1,2,3)
");
insta::assert_snapshot!(p("_geoPolygon([1,2],[1,2],[1,2,3])"), @r"
Was expecting 2 coordinates but instead found 3.
26:27 _geoPolygon([1,2],[1,2],[1,2,3])
");
insta::assert_snapshot!(p("_geoPolygon([1,2],[1,2,3])"), @r"
The `_geoPolygon` filter expects at least 3 points but only 2 were specified
24:25 _geoPolygon([1,2],[1,2,3])
");
insta::assert_snapshot!(p("_geoPolygon(1)"), @r"
The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.
13:15 _geoPolygon(1)
");
insta::assert_snapshot!(p("_geoPolygon([1,2)"), @r"
The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.
17:18 _geoPolygon([1,2)
");
1:26 _geoBoundingBox(1.0, 1.0)
"###);
insta::assert_snapshot!(p("_geoPoint(12, 13, 14)"), @r###"
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
@@ -1039,15 +938,15 @@ pub mod tests {
34:35 channel = mv OR followers >= 1000)
"###);
insta::assert_snapshot!(p("colour NOT EXIST"), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `colour NOT EXIST`.
insta::assert_snapshot!(p("colour NOT EXIST"), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`.
1:17 colour NOT EXIST
");
"###);
insta::assert_snapshot!(p("subscribers 100 TO1000"), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `subscribers 100 TO1000`.
insta::assert_snapshot!(p("subscribers 100 TO1000"), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`.
1:23 subscribers 100 TO1000
");
"###);
insta::assert_snapshot!(p("channel = ponce ORdog != 'bernese mountain'"), @r###"
Found unexpected characters at the end of the filter: `ORdog != \'bernese mountain\'`. You probably forgot an `OR` or an `AND` rule.
@@ -1172,38 +1071,38 @@ pub mod tests {
5:7 NOT OR EXISTS AND EXISTS NOT EXISTS
"###);
insta::assert_snapshot!(p(r#"value NULL"#), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value NULL`.
insta::assert_snapshot!(p(r#"value NULL"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NULL`.
1:11 value NULL
");
insta::assert_snapshot!(p(r#"value NOT NULL"#), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value NOT NULL`.
"###);
insta::assert_snapshot!(p(r#"value NOT NULL"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NOT NULL`.
1:15 value NOT NULL
");
insta::assert_snapshot!(p(r#"value EMPTY"#), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value EMPTY`.
"###);
insta::assert_snapshot!(p(r#"value EMPTY"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value EMPTY`.
1:12 value EMPTY
");
insta::assert_snapshot!(p(r#"value NOT EMPTY"#), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value NOT EMPTY`.
"###);
insta::assert_snapshot!(p(r#"value NOT EMPTY"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NOT EMPTY`.
1:16 value NOT EMPTY
");
insta::assert_snapshot!(p(r#"value IS"#), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS`.
"###);
insta::assert_snapshot!(p(r#"value IS"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS`.
1:9 value IS
");
insta::assert_snapshot!(p(r#"value IS NOT"#), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS NOT`.
"###);
insta::assert_snapshot!(p(r#"value IS NOT"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT`.
1:13 value IS NOT
");
insta::assert_snapshot!(p(r#"value IS EXISTS"#), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS EXISTS`.
"###);
insta::assert_snapshot!(p(r#"value IS EXISTS"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS EXISTS`.
1:16 value IS EXISTS
");
insta::assert_snapshot!(p(r#"value IS NOT EXISTS"#), @r"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS NOT EXISTS`.
"###);
insta::assert_snapshot!(p(r#"value IS NOT EXISTS"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT EXISTS`.
1:20 value IS NOT EXISTS
");
"###);
}
#[test]

View File

@@ -234,9 +234,6 @@ impl<'a> Dump<'a> {
}
}
KindDump::UpgradeDatabase { from } => KindWithContent::UpgradeDatabase { from },
KindDump::IndexCompaction { index_uid } => {
KindWithContent::IndexCompaction { index_uid }
}
},
};

View File

@@ -45,7 +45,6 @@ impl From<DateField> for Code {
}
}
#[allow(clippy::large_enum_variant)]
#[derive(Error, Debug)]
pub enum Error {
#[error("{1}")]

View File

@@ -341,26 +341,6 @@ impl IndexMapper {
Ok(())
}
/// Closes the specified index.
///
/// This operation involves closing the underlying environment and so can take a long time to complete.
///
/// # Panics
///
/// - If the Index corresponding to the passed name is concurrently being deleted/resized or cannot be found in the
/// in memory hash map.
pub fn close_index(&self, rtxn: &RoTxn, name: &str) -> Result<()> {
let uuid = self
.index_mapping
.get(rtxn, name)?
.ok_or_else(|| Error::IndexNotFound(name.to_string()))?;
// We remove the index from the in-memory index map.
self.index_map.write().unwrap().close_for_resize(&uuid, self.enable_mdb_writemap, 0);
Ok(())
}
/// Return an index, may open it if it wasn't already opened.
pub fn index(&self, rtxn: &RoTxn, name: &str) -> Result<Index> {
if let Some((current_name, current_index)) =

View File

@@ -317,9 +317,6 @@ fn snapshot_details(d: &Details) -> String {
Details::UpgradeDatabase { from, to } => {
format!("{{ from: {from:?}, to: {to:?} }}")
}
Details::IndexCompaction { index_uid, pre_compaction_size, post_compaction_size } => {
format!("{{ index_uid: {index_uid:?}, pre_compaction_size: {pre_compaction_size:?}, post_compaction_size: {post_compaction_size:?} }}")
}
}
}

View File

@@ -138,17 +138,6 @@ make_enum_progress! {
}
}
make_enum_progress! {
pub enum IndexCompaction {
RetrieveTheIndex,
CreateTemporaryFile,
CopyAndCompactTheIndex,
PersistTheCompactedIndex,
CloseTheIndex,
ReopenTheIndex,
}
}
make_enum_progress! {
pub enum InnerSwappingTwoIndexes {
RetrieveTheTasks,

View File

@@ -310,8 +310,7 @@ impl Queue {
| self.tasks.status.get(wtxn, &Status::Failed)?.unwrap_or_default()
| self.tasks.status.get(wtxn, &Status::Canceled)?.unwrap_or_default();
let to_delete =
RoaringBitmap::from_sorted_iter(finished.into_iter().take(100_000)).unwrap();
let to_delete = RoaringBitmap::from_iter(finished.into_iter().rev().take(100_000));
// /!\ the len must be at least 2 or else we might enter an infinite loop where we only delete
// the deletion tasks we enqueued ourselves.

View File

@@ -25,7 +25,6 @@ enum AutobatchKind {
IndexDeletion,
IndexUpdate,
IndexSwap,
IndexCompaction,
}
impl AutobatchKind {
@@ -69,7 +68,6 @@ impl From<KindWithContent> for AutobatchKind {
KindWithContent::IndexCreation { .. } => AutobatchKind::IndexCreation,
KindWithContent::IndexUpdate { .. } => AutobatchKind::IndexUpdate,
KindWithContent::IndexSwap { .. } => AutobatchKind::IndexSwap,
KindWithContent::IndexCompaction { .. } => AutobatchKind::IndexCompaction,
KindWithContent::TaskCancelation { .. }
| KindWithContent::TaskDeletion { .. }
| KindWithContent::DumpCreation { .. }
@@ -120,9 +118,6 @@ pub enum BatchKind {
IndexSwap {
id: TaskId,
},
IndexCompaction {
id: TaskId,
},
}
impl BatchKind {
@@ -188,13 +183,6 @@ impl BatchKind {
)),
false,
),
K::IndexCompaction => (
Break((
BatchKind::IndexCompaction { id: task_id },
BatchStopReason::TaskCannotBeBatched { kind, id: task_id },
)),
false,
),
K::DocumentClear => (Continue(BatchKind::DocumentClear { ids: vec![task_id] }), false),
K::DocumentImport { allow_index_creation, primary_key: pk }
if primary_key.is_none() || pk.is_none() || primary_key == pk.as_deref() =>
@@ -299,10 +287,8 @@ impl BatchKind {
};
match (self, autobatch_kind) {
// We don't batch any of these operations
(this, K::IndexCreation | K::IndexUpdate | K::IndexSwap | K::DocumentEdition | K::IndexCompaction) => {
Break((this, BatchStopReason::TaskCannotBeBatched { kind, id }))
},
// We don't batch any of these operations
(this, K::IndexCreation | K::IndexUpdate | K::IndexSwap | K::DocumentEdition) => Break((this, BatchStopReason::TaskCannotBeBatched { kind, id })),
// We must not batch tasks that don't have the same index creation rights if the index doesn't already exists.
(this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => {
Break((this, BatchStopReason::IndexCreationMismatch { id }))
@@ -497,7 +483,6 @@ impl BatchKind {
| BatchKind::IndexDeletion { .. }
| BatchKind::IndexUpdate { .. }
| BatchKind::IndexSwap { .. }
| BatchKind::IndexCompaction { .. }
| BatchKind::DocumentEdition { .. },
_,
) => {

View File

@@ -55,10 +55,6 @@ pub(crate) enum Batch {
UpgradeDatabase {
tasks: Vec<Task>,
},
IndexCompaction {
index_uid: String,
task: Task,
},
}
#[derive(Debug)]
@@ -114,8 +110,7 @@ impl Batch {
| Batch::Dump(task)
| Batch::IndexCreation { task, .. }
| Batch::Export { task }
| Batch::IndexUpdate { task, .. }
| Batch::IndexCompaction { task, .. } => {
| Batch::IndexUpdate { task, .. } => {
RoaringBitmap::from_sorted_iter(std::iter::once(task.uid)).unwrap()
}
Batch::SnapshotCreation(tasks)
@@ -160,8 +155,7 @@ impl Batch {
IndexOperation { op, .. } => Some(op.index_uid()),
IndexCreation { index_uid, .. }
| IndexUpdate { index_uid, .. }
| IndexDeletion { index_uid, .. }
| IndexCompaction { index_uid, .. } => Some(index_uid),
| IndexDeletion { index_uid, .. } => Some(index_uid),
}
}
}
@@ -181,7 +175,6 @@ impl fmt::Display for Batch {
Batch::IndexUpdate { .. } => f.write_str("IndexUpdate")?,
Batch::IndexDeletion { .. } => f.write_str("IndexDeletion")?,
Batch::IndexSwap { .. } => f.write_str("IndexSwap")?,
Batch::IndexCompaction { .. } => f.write_str("IndexCompaction")?,
Batch::Export { .. } => f.write_str("Export")?,
Batch::UpgradeDatabase { .. } => f.write_str("UpgradeDatabase")?,
};
@@ -437,12 +430,6 @@ impl IndexScheduler {
current_batch.processing(Some(&mut task));
Ok(Some(Batch::IndexSwap { task }))
}
BatchKind::IndexCompaction { id } => {
let mut task =
self.queue.tasks.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
current_batch.processing(Some(&mut task));
Ok(Some(Batch::IndexCompaction { index_uid, task }))
}
}
}

View File

@@ -1,26 +1,22 @@
use std::collections::{BTreeSet, HashMap, HashSet};
use std::io::{Seek, SeekFrom};
use std::panic::{catch_unwind, AssertUnwindSafe};
use std::sync::atomic::Ordering;
use byte_unit::Byte;
use meilisearch_types::batches::{BatchEnqueuedAt, BatchId};
use meilisearch_types::heed::{RoTxn, RwTxn};
use meilisearch_types::milli::heed::CompactionOption;
use meilisearch_types::milli::progress::{Progress, VariableNameStep};
use meilisearch_types::milli::{self, ChannelCongestion};
use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status, Task};
use meilisearch_types::versioning::{VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
use milli::update::Settings as MilliSettings;
use roaring::RoaringBitmap;
use tempfile::PersistError;
use time::OffsetDateTime;
use super::create_batch::Batch;
use crate::processing::{
AtomicBatchStep, AtomicTaskStep, CreateIndexProgress, DeleteIndexProgress, FinalizingIndexStep,
IndexCompaction, InnerSwappingTwoIndexes, SwappingTheIndexes, TaskCancelationProgress,
TaskDeletionProgress, UpdateIndexProgress,
InnerSwappingTwoIndexes, SwappingTheIndexes, TaskCancelationProgress, TaskDeletionProgress,
UpdateIndexProgress,
};
use crate::utils::{
self, remove_n_tasks_datetime_earlier_than, remove_task_datetime, swap_index_uid_in_task,
@@ -422,47 +418,6 @@ impl IndexScheduler {
task.status = Status::Succeeded;
Ok((vec![task], ProcessBatchInfo::default()))
}
Batch::IndexCompaction { index_uid: _, mut task } => {
let KindWithContent::IndexCompaction { index_uid } = &task.kind else {
unreachable!()
};
let rtxn = self.env.read_txn()?;
let ret = catch_unwind(AssertUnwindSafe(|| {
self.apply_compaction(&rtxn, &progress, index_uid)
}));
let (pre_size, post_size) = match ret {
Ok(Ok(stats)) => stats,
Ok(Err(Error::AbortedTask)) => return Err(Error::AbortedTask),
Ok(Err(e)) => return Err(e),
Err(e) => {
let msg = match e.downcast_ref::<&'static str>() {
Some(s) => *s,
None => match e.downcast_ref::<String>() {
Some(s) => &s[..],
None => "Box<dyn Any>",
},
};
return Err(Error::Export(Box::new(Error::ProcessBatchPanicked(
msg.to_string(),
))));
}
};
task.status = Status::Succeeded;
if let Some(Details::IndexCompaction {
index_uid: _,
pre_compaction_size,
post_compaction_size,
}) = task.details.as_mut()
{
*pre_compaction_size = Some(Byte::from_u64(pre_size));
*post_compaction_size = Some(Byte::from_u64(post_size));
}
Ok((vec![task], ProcessBatchInfo::default()))
}
Batch::Export { mut task } => {
let KindWithContent::Export { url, api_key, payload_size, indexes } = &task.kind
else {
@@ -538,91 +493,6 @@ impl IndexScheduler {
}
}
fn apply_compaction(
&self,
rtxn: &RoTxn,
progress: &Progress,
index_uid: &str,
) -> Result<(u64, u64)> {
// 1. Verify that the index exists
if !self.index_mapper.index_exists(rtxn, index_uid)? {
return Err(Error::IndexNotFound(index_uid.to_owned()));
}
// 2. We retrieve the index and create a temporary file in the index directory
progress.update_progress(IndexCompaction::RetrieveTheIndex);
let index = self.index_mapper.index(rtxn, index_uid)?;
// the index operation can take a long time, so save this handle to make it available to the search for the duration of the tick
self.index_mapper
.set_currently_updating_index(Some((index_uid.to_string(), index.clone())));
progress.update_progress(IndexCompaction::CreateTemporaryFile);
let pre_size = std::fs::metadata(index.path().join("data.mdb"))?.len();
let mut file = tempfile::Builder::new()
.suffix("data.")
.prefix(".mdb.cpy")
.tempfile_in(index.path())?;
// 3. We copy the index data to the temporary file
progress.update_progress(IndexCompaction::CopyAndCompactTheIndex);
index
.copy_to_file(file.as_file_mut(), CompactionOption::Enabled)
.map_err(|error| Error::Milli { error, index_uid: Some(index_uid.to_string()) })?;
// ...and reset the file position as specified in the documentation
file.seek(SeekFrom::Start(0))?;
// 4. We replace the index data file with the temporary file
progress.update_progress(IndexCompaction::PersistTheCompactedIndex);
match file.persist(index.path().join("data.mdb")) {
Ok(file) => file.sync_all()?,
// TODO see if we have a _resource busy_ error and probably handle this by:
// 1. closing the index, 2. replacing and 3. reopening it
Err(PersistError { error, file: _ }) => return Err(Error::IoError(error)),
};
// 5. Prepare to close the index
progress.update_progress(IndexCompaction::CloseTheIndex);
// unmark that the index is the processing one so we don't keep a handle to it, preventing its closing
self.index_mapper.set_currently_updating_index(None);
self.index_mapper.close_index(rtxn, index_uid)?;
drop(index);
progress.update_progress(IndexCompaction::ReopenTheIndex);
// 6. Reopen the index
// The index will use the compacted data file when being reopened
let index = self.index_mapper.index(rtxn, index_uid)?;
// if the update processed successfully, we're going to store the new
// stats of the index. Since the tasks have already been processed and
// this is a non-critical operation. If it fails, we should not fail
// the entire batch.
let res = || -> Result<_> {
let mut wtxn = self.env.write_txn()?;
let index_rtxn = index.read_txn()?;
let stats = crate::index_mapper::IndexStats::new(&index, &index_rtxn)
.map_err(|e| Error::from_milli(e, Some(index_uid.to_string())))?;
self.index_mapper.store_stats_of(&mut wtxn, index_uid, &stats)?;
wtxn.commit()?;
Ok(stats.database_size)
}();
let post_size = match res {
Ok(post_size) => post_size,
Err(e) => {
tracing::error!(
error = &e as &dyn std::error::Error,
"Could not write the stats of the index"
);
0
}
};
Ok((pre_size, post_size))
}
/// Swap the index `lhs` with the index `rhs`.
fn apply_index_swap(
&self,

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 22, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 21, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggo` already exists.", error_code: "index_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_already_exists" }, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
@@ -57,7 +57,7 @@ girafo: { number_of_documents: 0, field_distribution: {} }
[timestamp] [4,]
----------------------------------------------------------------------
### All Batches:
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.22.1"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.21.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
1 {uid: 1, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
2 {uid: 2, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
3 {uid: 3, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 22, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 21, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
----------------------------------------------------------------------
### Status:
enqueued [0,]

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 22, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 21, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
----------------------------------------------------------------------
### Status:

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 22, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 21, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
----------------------------------------------------------------------
### Status:
@@ -37,7 +37,7 @@ catto [1,]
[timestamp] [0,]
----------------------------------------------------------------------
### All Batches:
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.22.1"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.21.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
----------------------------------------------------------------------
### Batch to tasks mapping:
0 [0,]

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 22, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 21, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
----------------------------------------------------------------------
@@ -40,7 +40,7 @@ doggo [2,]
[timestamp] [0,]
----------------------------------------------------------------------
### All Batches:
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.22.1"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.21.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
----------------------------------------------------------------------
### Batch to tasks mapping:
0 [0,]

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 22, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 21, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
3 {uid: 3, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
@@ -43,7 +43,7 @@ doggo [2,3,]
[timestamp] [0,]
----------------------------------------------------------------------
### All Batches:
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.22.1"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.21.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
----------------------------------------------------------------------
### Batch to tasks mapping:
0 [0,]

View File

@@ -722,7 +722,7 @@ fn basic_get_stats() {
let kind = index_creation_task("whalo", "fish");
let _task = index_scheduler.register(kind, None, false).unwrap();
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r#"
{
"indexes": {
"catto": 1,
@@ -742,7 +742,6 @@ fn basic_get_stats() {
"documentEdition": 0,
"dumpCreation": 0,
"export": 0,
"indexCompaction": 0,
"indexCreation": 3,
"indexDeletion": 0,
"indexSwap": 0,
@@ -754,10 +753,10 @@ fn basic_get_stats() {
"upgradeDatabase": 0
}
}
"###);
"#);
handle.advance_till([Start, BatchCreated]);
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r#"
{
"indexes": {
"catto": 1,
@@ -777,7 +776,6 @@ fn basic_get_stats() {
"documentEdition": 0,
"dumpCreation": 0,
"export": 0,
"indexCompaction": 0,
"indexCreation": 3,
"indexDeletion": 0,
"indexSwap": 0,
@@ -789,7 +787,7 @@ fn basic_get_stats() {
"upgradeDatabase": 0
}
}
"###);
"#);
handle.advance_till([
InsideProcessBatch,
@@ -799,7 +797,7 @@ fn basic_get_stats() {
Start,
BatchCreated,
]);
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r#"
{
"indexes": {
"catto": 1,
@@ -819,7 +817,6 @@ fn basic_get_stats() {
"documentEdition": 0,
"dumpCreation": 0,
"export": 0,
"indexCompaction": 0,
"indexCreation": 3,
"indexDeletion": 0,
"indexSwap": 0,
@@ -831,7 +828,7 @@ fn basic_get_stats() {
"upgradeDatabase": 0
}
}
"###);
"#);
// now we make one more batch, the started_at field of the new tasks will be past `second_start_time`
handle.advance_till([
@@ -842,7 +839,7 @@ fn basic_get_stats() {
Start,
BatchCreated,
]);
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r#"
{
"indexes": {
"catto": 1,
@@ -862,7 +859,6 @@ fn basic_get_stats() {
"documentEdition": 0,
"dumpCreation": 0,
"export": 0,
"indexCompaction": 0,
"indexCreation": 3,
"indexDeletion": 0,
"indexSwap": 0,
@@ -874,7 +870,7 @@ fn basic_get_stats() {
"upgradeDatabase": 0
}
}
"###);
"#);
}
#[test]

View File

@@ -45,7 +45,6 @@ pub fn upgrade_index_scheduler(
(1, 19, _) => 0,
(1, 20, _) => 0,
(1, 21, _) => 0,
(1, 22, _) => 0,
(major, minor, patch) => {
if major > current_major
|| (major == current_major && minor > current_minor)

View File

@@ -256,15 +256,14 @@ pub fn swap_index_uid_in_task(task: &mut Task, swap: (&str, &str)) {
use KindWithContent as K;
let mut index_uids = vec![];
match &mut task.kind {
K::DocumentAdditionOrUpdate { index_uid, .. }
| K::DocumentEdition { index_uid, .. }
| K::DocumentDeletion { index_uid, .. }
| K::DocumentDeletionByFilter { index_uid, .. }
| K::DocumentClear { index_uid }
| K::SettingsUpdate { index_uid, .. }
| K::IndexDeletion { index_uid }
| K::IndexCreation { index_uid, .. }
| K::IndexCompaction { index_uid, .. } => index_uids.push(index_uid),
K::DocumentAdditionOrUpdate { index_uid, .. } => index_uids.push(index_uid),
K::DocumentEdition { index_uid, .. } => index_uids.push(index_uid),
K::DocumentDeletion { index_uid, .. } => index_uids.push(index_uid),
K::DocumentDeletionByFilter { index_uid, .. } => index_uids.push(index_uid),
K::DocumentClear { index_uid } => index_uids.push(index_uid),
K::SettingsUpdate { index_uid, .. } => index_uids.push(index_uid),
K::IndexDeletion { index_uid } => index_uids.push(index_uid),
K::IndexCreation { index_uid, .. } => index_uids.push(index_uid),
K::IndexUpdate { index_uid, new_index_uid, .. } => {
index_uids.push(index_uid);
if let Some(new_uid) = new_index_uid {
@@ -619,13 +618,6 @@ impl crate::IndexScheduler {
Details::UpgradeDatabase { from: _, to: _ } => {
assert_eq!(kind.as_kind(), Kind::UpgradeDatabase);
}
Details::IndexCompaction {
index_uid: _,
pre_compaction_size: _,
post_compaction_size: _,
} => {
assert_eq!(kind.as_kind(), Kind::IndexCompaction);
}
}
}

View File

@@ -271,10 +271,9 @@ macro_rules! json_string {
#[cfg(test)]
mod tests {
use uuid::Uuid;
use crate as meili_snap;
use crate::UUID_IN_MESSAGE_RE;
use uuid::Uuid;
#[test]
fn snap() {

View File

@@ -109,7 +109,6 @@ impl HeedAuthStore {
Action::IndexesGet,
Action::IndexesUpdate,
Action::IndexesSwap,
Action::IndexesCompact,
]
.iter(),
);

View File

@@ -5,7 +5,6 @@ use actix_web::{self as aweb, HttpResponseBuilder};
use aweb::http::header;
use aweb::rt::task::JoinError;
use convert_case::Casing;
use milli::cellulite;
use milli::heed::{Error as HeedError, MdbError};
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
@@ -240,7 +239,6 @@ InconsistentDocumentChangeHeaders , InvalidRequest , BAD_REQU
InvalidDocumentFilter , InvalidRequest , BAD_REQUEST ;
InvalidDocumentSort , InvalidRequest , BAD_REQUEST ;
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
InvalidDocumentGeojsonField , InvalidRequest , BAD_REQUEST ;
InvalidHeaderValue , InvalidRequest , BAD_REQUEST ;
InvalidVectorDimensions , InvalidRequest , BAD_REQUEST ;
InvalidVectorsType , InvalidRequest , BAD_REQUEST ;
@@ -503,9 +501,7 @@ impl ErrorCode for milli::Error {
Code::InvalidFacetSearchFacetName
}
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
UserError::InvalidGeoField { .. } | UserError::GeoJsonError(_) => {
Code::InvalidDocumentGeoField
}
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
UserError::InvalidVectorDimensions { .. }
| UserError::InvalidIndexingVectorDimensions { .. } => {
Code::InvalidVectorDimensions
@@ -529,17 +525,6 @@ impl ErrorCode for milli::Error {
| UserError::DocumentEditionCompilationError(_) => {
Code::EditDocumentsByFunctionError
}
UserError::CelluliteError(err) => match err {
cellulite::Error::BuildCanceled
| cellulite::Error::VersionMismatchOnBuild(_)
| cellulite::Error::DatabaseDoesntExists
| cellulite::Error::Heed(_)
| cellulite::Error::InvalidGeometry(_)
| cellulite::Error::InternalDocIdMissing(_, _)
| cellulite::Error::CannotConvertLineToCell(_, _, _) => Code::Internal,
cellulite::Error::InvalidGeoJson(_) => Code::InvalidDocumentGeojsonField,
},
UserError::MalformedGeojson(_) => Code::InvalidDocumentGeojsonField,
}
}
}

View File

@@ -380,9 +380,6 @@ pub enum Action {
#[serde(rename = "webhooks.*")]
#[deserr(rename = "webhooks.*")]
WebhooksAll,
#[serde(rename = "indexes.compact")]
#[deserr(rename = "indexes.compact")]
IndexesCompact,
}
impl Action {
@@ -401,7 +398,6 @@ impl Action {
INDEXES_UPDATE => Some(Self::IndexesUpdate),
INDEXES_DELETE => Some(Self::IndexesDelete),
INDEXES_SWAP => Some(Self::IndexesSwap),
INDEXES_COMPACT => Some(Self::IndexesCompact),
TASKS_ALL => Some(Self::TasksAll),
TASKS_CANCEL => Some(Self::TasksCancel),
TASKS_DELETE => Some(Self::TasksDelete),
@@ -466,7 +462,6 @@ impl Action {
IndexesUpdate => false,
IndexesDelete => false,
IndexesSwap => false,
IndexesCompact => false,
TasksCancel => false,
TasksDelete => false,
TasksGet => true,
@@ -518,7 +513,6 @@ pub mod actions {
pub const INDEXES_UPDATE: u8 = IndexesUpdate.repr();
pub const INDEXES_DELETE: u8 = IndexesDelete.repr();
pub const INDEXES_SWAP: u8 = IndexesSwap.repr();
pub const INDEXES_COMPACT: u8 = IndexesCompact.repr();
pub const TASKS_ALL: u8 = TasksAll.repr();
pub const TASKS_CANCEL: u8 = TasksCancel.repr();
pub const TASKS_DELETE: u8 = TasksDelete.repr();
@@ -620,7 +614,6 @@ pub(crate) mod test {
assert!(WebhooksDelete.repr() == 47 && WEBHOOKS_DELETE == 47);
assert!(WebhooksCreate.repr() == 48 && WEBHOOKS_CREATE == 48);
assert!(WebhooksAll.repr() == 49 && WEBHOOKS_ALL == 49);
assert!(IndexesCompact.repr() == 50 && INDEXES_COMPACT == 50);
}
#[test]

View File

@@ -1,5 +1,3 @@
#![allow(clippy::result_large_err)]
pub mod batch_view;
pub mod batches;
pub mod compression;

View File

@@ -142,11 +142,6 @@ pub struct DetailsView {
pub old_index_uid: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub new_index_uid: Option<String>,
// index compaction
#[serde(skip_serializing_if = "Option::is_none")]
pub pre_compaction_size: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub post_compaction_size: Option<String>,
}
impl DetailsView {
@@ -319,24 +314,6 @@ impl DetailsView {
// We should never be able to batch multiple renames at the same time.
(Some(left), Some(_right)) => Some(left),
},
pre_compaction_size: match (
self.pre_compaction_size.clone(),
other.pre_compaction_size.clone(),
) {
(None, None) => None,
(None, Some(size)) | (Some(size), None) => Some(size),
// We should never be able to batch multiple compactions at the same time.
(Some(left), Some(_right)) => Some(left),
},
post_compaction_size: match (
self.post_compaction_size.clone(),
other.post_compaction_size.clone(),
) {
(None, None) => None,
(None, Some(size)) | (Some(size), None) => Some(size),
// We should never be able to batch multiple compactions at the same time.
(Some(left), Some(_right)) => Some(left),
},
}
}
}
@@ -438,15 +415,6 @@ impl From<Details> for DetailsView {
upgrade_to: Some(format!("v{}.{}.{}", to.0, to.1, to.2)),
..Default::default()
},
Details::IndexCompaction { pre_compaction_size, post_compaction_size, .. } => {
DetailsView {
pre_compaction_size: pre_compaction_size
.map(|size| size.get_appropriate_unit(UnitType::Both).to_string()),
post_compaction_size: post_compaction_size
.map(|size| size.get_appropriate_unit(UnitType::Both).to_string()),
..Default::default()
}
}
}
}
}

View File

@@ -67,8 +67,7 @@ impl Task {
| SettingsUpdate { index_uid, .. }
| IndexCreation { index_uid, .. }
| IndexUpdate { index_uid, .. }
| IndexDeletion { index_uid }
| IndexCompaction { index_uid } => Some(index_uid),
| IndexDeletion { index_uid } => Some(index_uid),
}
}
@@ -95,8 +94,7 @@ impl Task {
| KindWithContent::DumpCreation { .. }
| KindWithContent::SnapshotCreation
| KindWithContent::Export { .. }
| KindWithContent::UpgradeDatabase { .. }
| KindWithContent::IndexCompaction { .. } => None,
| KindWithContent::UpgradeDatabase { .. } => None,
}
}
}
@@ -172,9 +170,6 @@ pub enum KindWithContent {
UpgradeDatabase {
from: (u32, u32, u32),
},
IndexCompaction {
index_uid: String,
},
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, ToSchema)]
@@ -211,7 +206,6 @@ impl KindWithContent {
KindWithContent::SnapshotCreation => Kind::SnapshotCreation,
KindWithContent::Export { .. } => Kind::Export,
KindWithContent::UpgradeDatabase { .. } => Kind::UpgradeDatabase,
KindWithContent::IndexCompaction { .. } => Kind::IndexCompaction,
}
}
@@ -232,8 +226,7 @@ impl KindWithContent {
| DocumentClear { index_uid }
| SettingsUpdate { index_uid, .. }
| IndexCreation { index_uid, .. }
| IndexDeletion { index_uid }
| IndexCompaction { index_uid } => vec![index_uid],
| IndexDeletion { index_uid } => vec![index_uid],
IndexUpdate { index_uid, new_index_uid, .. } => {
let mut indexes = vec![index_uid.as_str()];
if let Some(new_uid) = new_index_uid {
@@ -332,11 +325,6 @@ impl KindWithContent {
versioning::VERSION_PATCH,
),
}),
KindWithContent::IndexCompaction { index_uid } => Some(Details::IndexCompaction {
index_uid: index_uid.clone(),
pre_compaction_size: None,
post_compaction_size: None,
}),
}
}
@@ -419,11 +407,6 @@ impl KindWithContent {
versioning::VERSION_PATCH,
),
}),
KindWithContent::IndexCompaction { index_uid } => Some(Details::IndexCompaction {
index_uid: index_uid.clone(),
pre_compaction_size: None,
post_compaction_size: None,
}),
}
}
}
@@ -486,11 +469,6 @@ impl From<&KindWithContent> for Option<Details> {
versioning::VERSION_PATCH,
),
}),
KindWithContent::IndexCompaction { index_uid } => Some(Details::IndexCompaction {
index_uid: index_uid.clone(),
pre_compaction_size: None,
post_compaction_size: None,
}),
}
}
}
@@ -601,7 +579,6 @@ pub enum Kind {
SnapshotCreation,
Export,
UpgradeDatabase,
IndexCompaction,
}
impl Kind {
@@ -613,8 +590,7 @@ impl Kind {
| Kind::SettingsUpdate
| Kind::IndexCreation
| Kind::IndexDeletion
| Kind::IndexUpdate
| Kind::IndexCompaction => true,
| Kind::IndexUpdate => true,
Kind::IndexSwap
| Kind::TaskCancelation
| Kind::TaskDeletion
@@ -642,7 +618,6 @@ impl Display for Kind {
Kind::SnapshotCreation => write!(f, "snapshotCreation"),
Kind::Export => write!(f, "export"),
Kind::UpgradeDatabase => write!(f, "upgradeDatabase"),
Kind::IndexCompaction => write!(f, "indexCompaction"),
}
}
}
@@ -678,8 +653,6 @@ impl FromStr for Kind {
Ok(Kind::Export)
} else if kind.eq_ignore_ascii_case("upgradeDatabase") {
Ok(Kind::UpgradeDatabase)
} else if kind.eq_ignore_ascii_case("indexCompaction") {
Ok(Kind::IndexCompaction)
} else {
Err(ParseTaskKindError(kind.to_owned()))
}
@@ -765,11 +738,6 @@ pub enum Details {
from: (u32, u32, u32),
to: (u32, u32, u32),
},
IndexCompaction {
index_uid: String,
pre_compaction_size: Option<Byte>,
post_compaction_size: Option<Byte>,
},
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, ToSchema)]
@@ -832,10 +800,6 @@ impl Details {
Self::ClearAll { deleted_documents } => *deleted_documents = Some(0),
Self::TaskCancelation { canceled_tasks, .. } => *canceled_tasks = Some(0),
Self::TaskDeletion { deleted_tasks, .. } => *deleted_tasks = Some(0),
Self::IndexCompaction { pre_compaction_size, post_compaction_size, .. } => {
*pre_compaction_size = None;
*post_compaction_size = None;
}
Self::SettingsUpdate { .. }
| Self::IndexInfo { .. }
| Self::Dump { .. }

View File

@@ -91,7 +91,7 @@ time = { version = "0.3.41", features = [
] }
tokio = { version = "1.45.1", features = ["full"] }
toml = "0.8.23"
uuid = { version = "1.18.0", features = ["serde", "v4", "v7"] }
uuid = { version = "1.17.0", features = ["serde", "v4"] }
serde_urlencoded = "0.7.1"
termcolor = "1.4.1"
url = { version = "2.5.4", features = ["serde"] }

View File

@@ -205,8 +205,6 @@ struct Infos {
experimental_no_snapshot_compaction: bool,
experimental_no_edition_2024_for_dumps: bool,
experimental_no_edition_2024_for_settings: bool,
experimental_no_edition_2024_for_prefix_post_processing: bool,
experimental_no_edition_2024_for_facet_post_processing: bool,
experimental_vector_store_setting: bool,
gpu_enabled: bool,
db_path: bool,
@@ -298,8 +296,6 @@ impl Infos {
skip_index_budget: _,
experimental_no_edition_2024_for_settings,
experimental_no_edition_2024_for_dumps,
experimental_no_edition_2024_for_prefix_post_processing,
experimental_no_edition_2024_for_facet_post_processing,
} = indexer_options;
let RuntimeTogglableFeatures {
@@ -369,8 +365,6 @@ impl Infos {
ssl_resumption,
ssl_tickets,
experimental_no_edition_2024_for_settings,
experimental_no_edition_2024_for_prefix_post_processing,
experimental_no_edition_2024_for_facet_post_processing,
}
}
}

View File

@@ -55,10 +55,6 @@ const MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE: &str = "MEILI_EXPERIMENTAL_ENABLE_LO
const MEILI_EXPERIMENTAL_CONTAINS_FILTER: &str = "MEILI_EXPERIMENTAL_CONTAINS_FILTER";
const MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS: &str =
"MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS";
const MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_FACET_POST_PROCESSING: &str =
"MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_FACET_POST_PROCESSING";
const MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_PREFIX_POST_PROCESSING: &str =
"MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_PREFIX_POST_PROCESSING";
const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS";
const MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE: &str = "MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE";
const MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER: &str = "MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER";
@@ -776,22 +772,6 @@ pub struct IndexerOpts {
#[clap(long, env = MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_DUMPS)]
#[serde(default)]
pub experimental_no_edition_2024_for_dumps: bool,
/// Experimental no edition 2024 to compute prefixes. For more information,
/// see: <https://github.com/orgs/meilisearch/discussions/862>
///
/// Enables the experimental no edition 2024 to compute prefixes.
#[clap(long, env = MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_PREFIX_POST_PROCESSING)]
#[serde(default)]
pub experimental_no_edition_2024_for_prefix_post_processing: bool,
/// Experimental no edition 2024 to compute facets. For more information,
/// see: <https://github.com/orgs/meilisearch/discussions/862>
///
/// Enables the experimental no edition 2024 to compute facets.
#[clap(long, env = MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_FACET_POST_PROCESSING)]
#[serde(default)]
pub experimental_no_edition_2024_for_facet_post_processing: bool,
}
impl IndexerOpts {
@@ -803,8 +783,6 @@ impl IndexerOpts {
skip_index_budget: _,
experimental_no_edition_2024_for_settings,
experimental_no_edition_2024_for_dumps,
experimental_no_edition_2024_for_prefix_post_processing,
experimental_no_edition_2024_for_facet_post_processing,
} = self;
if let Some(max_indexing_memory) = max_indexing_memory.0 {
export_to_env_if_not_present(
@@ -830,18 +808,6 @@ impl IndexerOpts {
experimental_no_edition_2024_for_dumps.to_string(),
);
}
if experimental_no_edition_2024_for_prefix_post_processing {
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_PREFIX_POST_PROCESSING,
experimental_no_edition_2024_for_prefix_post_processing.to_string(),
);
}
if experimental_no_edition_2024_for_facet_post_processing {
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_FACET_POST_PROCESSING,
experimental_no_edition_2024_for_facet_post_processing.to_string(),
);
}
}
}
@@ -867,10 +833,6 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
chunk_compression_level: Default::default(),
documents_chunk_size: Default::default(),
max_nb_chunks: Default::default(),
experimental_no_edition_2024_for_prefix_post_processing: other
.experimental_no_edition_2024_for_prefix_post_processing,
experimental_no_edition_2024_for_facet_post_processing: other
.experimental_no_edition_2024_for_facet_post_processing,
})
}
}

View File

@@ -1,84 +0,0 @@
use actix_web::web::{self, Data};
use actix_web::{HttpRequest, HttpResponse};
use index_scheduler::IndexScheduler;
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::keys::actions;
use meilisearch_types::tasks::KindWithContent;
use tracing::debug;
use utoipa::OpenApi;
use super::ActionPolicy;
use crate::analytics::Analytics;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::SummarizedTaskView;
#[derive(OpenApi)]
#[openapi(
paths(compact),
tags(
(
name = "Compact an index",
description = "The /compact route uses compacts the database to reorganize and make it smaller and more efficient.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/compact"),
),
),
)]
pub struct CompactApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(compact))));
}
/// Compact an index
#[utoipa::path(
post,
path = "{indexUid}/compact",
tag = "Compact an index",
security(("Bearer" = ["search", "*"])),
params(("indexUid" = String, Path, example = "movies", description = "Index Unique Identifier", nullable = false)),
responses(
(status = ACCEPTED, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": null,
"status": "enqueued",
"type": "documentDeletion",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn compact(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_COMPACT }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
req: HttpRequest,
analytics: web::Data<Analytics>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
analytics.publish(IndexCompacted::default(), &req);
let task = KindWithContent::IndexCompaction { index_uid: index_uid.to_string() };
let task =
match tokio::task::spawn_blocking(move || index_scheduler.register(task, None, false))
.await?
{
Ok(task) => task,
Err(e) => return Err(e.into()),
};
debug!(returns = ?task, "Compact the {index_uid} index");
Ok(HttpResponse::Accepted().json(SummarizedTaskView::from(task)))
}
crate::empty_analytics!(IndexCompacted, "Index Compacted");

View File

@@ -28,7 +28,6 @@ use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::is_dry_run;
use crate::Opt;
pub mod compact;
pub mod documents;
mod enterprise_edition;
pub mod facet_search;
@@ -50,9 +49,8 @@ pub use enterprise_edition::proxy::{PROXY_ORIGIN_REMOTE_HEADER, PROXY_ORIGIN_TAS
(path = "/", api = facet_search::FacetSearchApi),
(path = "/", api = similar::SimilarApi),
(path = "/", api = settings::SettingsApi),
(path = "/", api = compact::CompactApi),
),
paths(list_indexes, create_index, get_index, update_index, delete_index, get_index_stats, compact::compact),
paths(list_indexes, create_index, get_index, update_index, delete_index, get_index_stats),
tags(
(
name = "Indexes",
@@ -82,8 +80,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::scope("/search").configure(search::configure))
.service(web::scope("/facet-search").configure(facet_search::configure))
.service(web::scope("/similar").configure(similar::configure))
.service(web::scope("/settings").configure(settings::configure))
.service(web::scope("/compact").configure(compact::configure)),
.service(web::scope("/settings").configure(settings::configure)),
);
}

View File

@@ -13,7 +13,6 @@ use meilisearch_types::serde_cs::vec::CS;
use serde_json::Value;
use tracing::debug;
use utoipa::{IntoParams, OpenApi};
use uuid::Uuid;
use crate::analytics::Analytics;
use crate::error::MeilisearchHttpError;
@@ -326,8 +325,7 @@ pub async fn search_with_url_query(
req: HttpRequest,
analytics: web::Data<Analytics>,
) -> Result<HttpResponse, ResponseError> {
let request_uid = Uuid::now_v7();
debug!(request_uid = ?request_uid, parameters = ?params, "Search get");
debug!(parameters = ?params, "Search get");
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let mut query: SearchQuery = params.into_inner().try_into()?;
@@ -353,7 +351,6 @@ pub async fn search_with_url_query(
search_kind,
retrieve_vector,
index_scheduler.features(),
request_uid,
)
})
.await;
@@ -366,7 +363,7 @@ pub async fn search_with_url_query(
let search_result = search_result?;
debug!(request_uid = ?request_uid, returns = ?search_result, "Search get");
debug!(returns = ?search_result, "Search get");
Ok(HttpResponse::Ok().json(search_result))
}
@@ -435,10 +432,9 @@ pub async fn search_with_post(
analytics: web::Data<Analytics>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let request_uid = Uuid::now_v7();
let mut query = params.into_inner();
debug!(request_uid = ?request_uid, parameters = ?query, "Search post");
debug!(parameters = ?query, "Search post");
// Tenant token search_rules.
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
@@ -462,7 +458,6 @@ pub async fn search_with_post(
search_kind,
retrieve_vectors,
index_scheduler.features(),
request_uid,
)
})
.await;
@@ -478,7 +473,7 @@ pub async fn search_with_post(
let search_result = search_result?;
debug!(request_uid = ?request_uid, returns = ?search_result, "Search post");
debug!(returns = ?search_result, "Search post");
Ok(HttpResponse::Ok().json(search_result))
}

View File

@@ -234,7 +234,6 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
facet_stats: _,
degraded,
used_negative_operator,
request_uid: _,
} = result;
self.total_succeeded = self.total_succeeded.saturating_add(1);

View File

@@ -9,7 +9,6 @@ use meilisearch_types::keys::actions;
use serde::Serialize;
use tracing::debug;
use utoipa::{OpenApi, ToSchema};
use uuid::Uuid;
use super::multi_search_analytics::MultiSearchAggregator;
use crate::analytics::Analytics;
@@ -152,7 +151,6 @@ pub async fn multi_search_with_post(
// Since we don't want to process half of the search requests and then get a permit refused
// we're going to get one permit for the whole duration of the multi-search request.
let permit = search_queue.try_get_search_permit().await?;
let request_uid = Uuid::now_v7();
let federated_search = params.into_inner();
@@ -190,27 +188,14 @@ pub async fn multi_search_with_post(
let response = match federation {
Some(federation) => {
debug!(
request_uid = ?request_uid,
federation = ?federation,
parameters = ?queries,
"Federated-search"
);
// check remote header
let is_proxy = req
.headers()
.get(PROXY_SEARCH_HEADER)
.is_some_and(|value| value.as_bytes() == PROXY_SEARCH_HEADER_VALUE.as_bytes());
let search_result = perform_federated_search(
&index_scheduler,
queries,
federation,
features,
is_proxy,
request_uid,
)
.await;
let search_result =
perform_federated_search(&index_scheduler, queries, federation, features, is_proxy)
.await;
permit.drop().await;
if search_result.is_ok() {
@@ -218,13 +203,6 @@ pub async fn multi_search_with_post(
}
analytics.publish(multi_aggregate, &req);
debug!(
request_uid = ?request_uid,
returns = ?search_result,
"Federated-search"
);
HttpResponse::Ok().json(search_result?)
}
None => {
@@ -238,12 +216,7 @@ pub async fn multi_search_with_post(
.map(SearchQueryWithIndex::into_index_query_federation)
.enumerate()
{
debug!(
request_uid = ?request_uid,
on_index = query_index,
parameters = ?query,
"Multi-search"
);
debug!(on_index = query_index, parameters = ?query, "Multi-search");
if federation_options.is_some() {
return Err((
@@ -285,7 +258,6 @@ pub async fn multi_search_with_post(
search_kind,
retrieve_vector,
features,
request_uid,
)
})
.await
@@ -314,11 +286,7 @@ pub async fn multi_search_with_post(
err
})?;
debug!(
request_uid = ?request_uid,
returns = ?search_results,
"Multi-search"
);
debug!(returns = ?search_results, "Multi-search");
HttpResponse::Ok().json(SearchResults { results: search_results })
}

View File

@@ -226,14 +226,14 @@ mod tests {
{
let params = "types=createIndex";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
snapshot!(meili_snap::json_string!(err), @r#"
{
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`, `indexCompaction`.",
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
}
"###);
"#);
}
}
#[test]

View File

@@ -1,4 +1,3 @@
use core::convert::Infallible;
use std::collections::BTreeMap;
use std::str::FromStr;
@@ -8,6 +7,7 @@ use actix_http::header::{
};
use actix_web::web::{self, Data, Path};
use actix_web::{HttpRequest, HttpResponse};
use core::convert::Infallible;
use deserr::actix_web::AwebJson;
use deserr::{DeserializeError, Deserr, ValuePointerRef};
use index_scheduler::IndexScheduler;
@@ -24,12 +24,12 @@ use tracing::debug;
use url::Url;
use utoipa::{OpenApi, ToSchema};
use uuid::Uuid;
use WebhooksError::*;
use crate::analytics::{Aggregate, Analytics};
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use WebhooksError::*;
#[derive(OpenApi)]
#[openapi(

View File

@@ -17,7 +17,6 @@ use meilisearch_types::milli::vector::Embedding;
use meilisearch_types::milli::{self, DocumentId, OrderBy, TimeBudget, DEFAULT_VALUES_PER_FACET};
use roaring::RoaringBitmap;
use tokio::task::JoinHandle;
use uuid::Uuid;
use super::super::ranking_rules::{self, RankingRules};
use super::super::{
@@ -40,7 +39,6 @@ pub async fn perform_federated_search(
federation: Federation,
features: RoFeatures,
is_proxy: bool,
request_uid: Uuid,
) -> Result<FederatedSearchResult, ResponseError> {
if is_proxy {
features.check_network("Performing a remote federated search")?;
@@ -172,7 +170,6 @@ pub async fn perform_federated_search(
facet_stats,
facets_by_index,
remote_errors: partitioned_queries.has_remote.then_some(remote_errors),
request_uid: Some(request_uid),
})
}
@@ -442,7 +439,6 @@ fn merge_metadata(
degraded: degraded_for_host,
used_negative_operator: host_used_negative_operator,
remote_errors: _,
request_uid: _,
} in remote_results
{
let this_remote_duration = Duration::from_millis(*processing_time_ms as u64);

View File

@@ -16,7 +16,6 @@ use meilisearch_types::milli::order_by_map::OrderByMap;
use meilisearch_types::milli::OrderBy;
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
use uuid::Uuid;
use super::super::{ComputedFacets, FacetStats, HitsInfo, SearchHit, SearchQueryWithIndex};
use crate::milli::vector::Embedding;
@@ -132,8 +131,6 @@ pub struct FederatedSearchResult {
pub facet_stats: Option<BTreeMap<String, FacetStats>>,
#[serde(default, skip_serializing_if = "FederatedFacets::is_empty")]
pub facets_by_index: FederatedFacets,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub request_uid: Option<Uuid>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub remote_errors: Option<BTreeMap<String, ResponseError>>,
@@ -159,7 +156,6 @@ impl fmt::Debug for FederatedSearchResult {
facet_stats,
facets_by_index,
remote_errors,
request_uid,
} = self;
let mut debug = f.debug_struct("SearchResult");
@@ -192,9 +188,6 @@ impl fmt::Debug for FederatedSearchResult {
if let Some(remote_errors) = remote_errors {
debug.field("remote_errors", &remote_errors);
}
if let Some(request_uid) = request_uid {
debug.field("request_uid", &request_uid);
}
debug.finish()
}

View File

@@ -36,7 +36,6 @@ use serde_json::{json, Value};
#[cfg(test)]
mod mod_test;
use utoipa::ToSchema;
use uuid::Uuid;
use crate::error::MeilisearchHttpError;
@@ -852,8 +851,6 @@ pub struct SearchResult {
pub facet_distribution: Option<BTreeMap<String, IndexMap<String, u64>>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub facet_stats: Option<BTreeMap<String, FacetStats>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub request_uid: Option<Uuid>,
#[serde(skip_serializing_if = "Option::is_none")]
pub semantic_hit_count: Option<u32>,
@@ -875,7 +872,6 @@ impl fmt::Debug for SearchResult {
hits_info,
facet_distribution,
facet_stats,
request_uid,
semantic_hit_count,
degraded,
used_negative_operator,
@@ -905,9 +901,6 @@ impl fmt::Debug for SearchResult {
if let Some(semantic_hit_count) = semantic_hit_count {
debug.field("semantic_hit_count", &semantic_hit_count);
}
if let Some(request_uid) = request_uid {
debug.field("request_uid", &request_uid);
}
debug.finish()
}
@@ -1127,7 +1120,6 @@ pub fn perform_search(
search_kind: SearchKind,
retrieve_vectors: RetrieveVectors,
features: RoFeatures,
request_uid: Uuid,
) -> Result<SearchResult, ResponseError> {
let before_search = Instant::now();
let rtxn = index.read_txn()?;
@@ -1245,7 +1237,6 @@ pub fn perform_search(
degraded,
used_negative_operator,
semantic_hit_count,
request_uid: Some(request_uid),
};
Ok(result)
}

View File

@@ -419,14 +419,14 @@ async fn error_add_api_key_invalid_parameters_actions() {
let (response, code) = server.add_api_key(content).await;
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r#"
{
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`, `webhooks.get`, `webhooks.update`, `webhooks.delete`, `webhooks.create`, `webhooks.*`, `indexes.compact`",
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`, `webhooks.get`, `webhooks.update`, `webhooks.delete`, `webhooks.create`, `webhooks.*`",
"code": "invalid_api_key_actions",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
}
"###);
"#);
}
#[actix_rt::test]

View File

@@ -91,14 +91,14 @@ async fn create_api_key_bad_actions() {
// can't parse
let (response, code) = server.add_api_key(json!({ "actions": ["doggo"] })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
snapshot!(json_string!(response), @r#"
{
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`, `webhooks.get`, `webhooks.update`, `webhooks.delete`, `webhooks.create`, `webhooks.*`, `indexes.compact`",
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`, `webhooks.get`, `webhooks.update`, `webhooks.delete`, `webhooks.create`, `webhooks.*`",
"code": "invalid_api_key_actions",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
}
"###);
"#);
}
#[actix_rt::test]

View File

@@ -40,14 +40,14 @@ async fn batch_bad_types() {
let (response, code) = server.batches_filter("types=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`, `indexCompaction`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
}
"###);
"#);
}
#[actix_rt::test]

View File

@@ -522,26 +522,6 @@ pub async fn shared_index_with_geo_documents() -> &'static Index<'static, Shared
.await
}
pub async fn shared_index_geojson_documents() -> &'static Index<'static, Shared> {
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
INDEX
.get_or_init(|| async {
// Retrieved from https://gitlab-forge.din.developpement-durable.gouv.fr/pub/geomatique/descartes/d-map/-/blob/main/demo/examples/commons/countries.geojson?ref_type=heads
let server = Server::new_shared();
let index = server._index("SHARED_GEOJSON_DOCUMENTS").to_shared();
let countries = include_str!("../documents/geojson/assets/countries.json");
let lille = serde_json::from_str::<serde_json::Value>(countries).unwrap();
let (response, _code) = index._add_documents(Value(lille), Some("name")).await;
server.wait_task(response.uid()).await.succeeded();
let (response, _code) =
index._update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
server.wait_task(response.uid()).await.succeeded();
index
})
.await
}
pub async fn shared_index_for_fragments() -> Index<'static, Shared> {
static INDEX: OnceCell<(Server<Shared>, String)> = OnceCell::const_new();
let (server, uid) = INDEX

View File

@@ -490,8 +490,6 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
max_indexing_threads: MaxThreads::from_str("2").unwrap(),
experimental_no_edition_2024_for_settings: false,
experimental_no_edition_2024_for_dumps: false,
experimental_no_edition_2024_for_prefix_post_processing: false,
experimental_no_edition_2024_for_facet_post_processing: false,
},
experimental_enable_metrics: false,
..Parser::parse_from(None as Option<&str>)

View File

@@ -1,3 +1,6 @@
use crate::common::encoder::Encoder;
use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value};
use crate::json;
use actix_web::test;
use meili_snap::{json_string, snapshot};
use meilisearch::Opt;
@@ -5,10 +8,6 @@ use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
use uuid::Uuid;
use crate::common::encoder::Encoder;
use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value};
use crate::json;
/// This is the basic usage of our API and every other tests uses the content-type application/json
#[actix_rt::test]
async fn add_documents_test_json_content_types() {
@@ -1853,7 +1852,7 @@ async fn add_documents_with_geo_field() {
.await;
snapshot!(code, @"200 OK");
// we are expecting docs 4 and 3 first as they have geo
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }),
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }),
@r###"
{
"hits": [
@@ -1885,8 +1884,7 @@ async fn add_documents_with_geo_field() {
"processingTimeMs": "[time]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]"
"estimatedTotalHits": 4
}
"###);
}
@@ -1941,7 +1939,7 @@ async fn update_documents_with_geo_field() {
let (response, code) = index.search_post(json!({"sort": ["_geoPoint(10,0):asc"]})).await;
snapshot!(code, @"200 OK");
// we are expecting docs 4 and 3 first as they have geo
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }),
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }),
@r###"
{
"hits": [
@@ -1973,8 +1971,7 @@ async fn update_documents_with_geo_field() {
"processingTimeMs": "[time]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]"
"estimatedTotalHits": 4
}
"###);
@@ -2046,7 +2043,7 @@ async fn update_documents_with_geo_field() {
let (response, code) = index.search_post(json!({"sort": ["_geoPoint(10,0):asc"]})).await;
snapshot!(code, @"200 OK");
// the search response should not have changed: we are expecting docs 4 and 3 first as they have geo
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }),
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }),
@r###"
{
"hits": [
@@ -2079,8 +2076,7 @@ async fn update_documents_with_geo_field() {
"processingTimeMs": "[time]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]"
"estimatedTotalHits": 4
}
"###);
}

View File

@@ -134,14 +134,14 @@ async fn get_all_documents_bad_filter() {
let (response, code) = index.get_all_documents_raw("?filter=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
snapshot!(json_string!(response), @r###"
{
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `doggo`.\n1:6 doggo",
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `doggo`.\n1:6 doggo",
"code": "invalid_document_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_document_filter"
}
"#);
"###);
let (response, code) = index.get_all_documents_raw("?filter=doggo=bernese").await;
snapshot!(code, @"400 Bad Request");
@@ -523,14 +523,14 @@ async fn delete_document_by_filter() {
// send bad filter
let (response, code) = index.delete_document_by_filter(json!({ "filter": "hello"})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r#"
snapshot!(response, @r###"
{
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `hello`.\n1:6 hello",
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `hello`.\n1:6 hello",
"code": "invalid_document_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_document_filter"
}
"#);
"###);
// send empty filter
let (response, code) = index.delete_document_by_filter(json!({ "filter": ""})).await;
@@ -724,14 +724,14 @@ async fn fetch_document_by_filter() {
let (response, code) = index.fetch_documents(json!({ "filter": "cool doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r#"
snapshot!(response, @r###"
{
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `cool doggo`.\n1:11 cool doggo",
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `cool doggo`.\n1:11 cool doggo",
"code": "invalid_document_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_document_filter"
}
"#);
"###);
let (response, code) = index.fetch_documents(json!({ "filter": "doggo = bernese" })).await;
snapshot!(code, @"400 Bad Request");

File diff suppressed because one or more lines are too long

View File

@@ -1,547 +0,0 @@
{
"type": "Polygon",
"coordinates": [
[
[
3.11681,
50.63646
],
[
3.11945,
50.63488
],
[
3.12134,
50.63504
],
[
3.12064,
50.63127
],
[
3.12203,
50.62785
],
[
3.12389,
50.6262
],
[
3.12161,
50.62358
],
[
3.12547,
50.62114
],
[
3.12447,
50.61874
],
[
3.12288,
50.61988
],
[
3.12054,
50.61846
],
[
3.11846,
50.61754
],
[
3.11482,
50.6207
],
[
3.11232,
50.6188
],
[
3.10936,
50.61727
],
[
3.10822,
50.61765
],
[
3.10603,
50.61536
],
[
3.1041,
50.61596
],
[
3.10017,
50.6186
],
[
3.09688,
50.61714
],
[
3.09575,
50.61795
],
[
3.0891,
50.61532
],
[
3.08625,
50.61792
],
[
3.07948,
50.61428
],
[
3.07146,
50.6066
],
[
3.06819,
50.60918
],
[
3.06502,
50.61046
],
[
3.06223,
50.61223
],
[
3.05925,
50.60659
],
[
3.05463,
50.60077
],
[
3.04906,
50.6008
],
[
3.04726,
50.6035
],
[
3.04328,
50.60667
],
[
3.04155,
50.60417
],
[
3.03767,
50.60456
],
[
3.03528,
50.60538
],
[
3.03239,
50.60725
],
[
3.0254,
50.6111
],
[
3.02387,
50.6125
],
[
3.0248,
50.61344
],
[
3.02779,
50.61418
],
[
3.02414,
50.6169
],
[
3.02312,
50.61975
],
[
3.02172,
50.62082
],
[
3.01953,
50.62484
],
[
3.01811,
50.62529
],
[
3.01313,
50.62558
],
[
3.01385,
50.62695
],
[
3.00844,
50.62717
],
[
3.0056,
50.6267
],
[
3.00229,
50.62557
],
[
3.00119,
50.62723
],
[
2.99769,
50.62901
],
[
2.99391,
50.62732
],
[
2.98971,
50.63036
],
[
2.9862,
50.63328
],
[
2.98178,
50.63404
],
[
2.97917,
50.63499
],
[
2.97284,
50.63429
],
[
2.97174,
50.63365
],
[
2.97002,
50.63366
],
[
2.96956,
50.63506
],
[
2.97046,
50.6365
],
[
2.96878,
50.63833
],
[
2.97039,
50.6395
],
[
2.97275,
50.64183
],
[
2.97225,
50.64381
],
[
2.9745,
50.64442
],
[
2.97474,
50.64648
],
[
2.97091,
50.65108
],
[
2.96975,
50.65361
],
[
2.97061,
50.65513
],
[
2.96929,
50.65739
],
[
2.97072,
50.6581
],
[
2.97973,
50.66048
],
[
2.98369,
50.66123
],
[
2.9865,
50.65959
],
[
2.9896,
50.65845
],
[
2.9963,
50.65666
],
[
2.99903,
50.65552
],
[
3.00274,
50.65235
],
[
3.00714,
50.64887
],
[
3.01088,
50.64845
],
[
3.01318,
50.64541
],
[
3.01974,
50.63972
],
[
3.02317,
50.63813
],
[
3.02639,
50.63613
],
[
3.029,
50.63521
],
[
3.03414,
50.6382
],
[
3.03676,
50.63888
],
[
3.03686,
50.64147
],
[
3.03791,
50.64379
],
[
3.0409,
50.64577
],
[
3.04582,
50.64807
],
[
3.05132,
50.64866
],
[
3.05055,
50.64949
],
[
3.05244,
50.65055
],
[
3.05784,
50.64927
],
[
3.0596,
50.65105
],
[
3.06414,
50.65041
],
[
3.06705,
50.64936
],
[
3.07023,
50.64706
],
[
3.07203,
50.64355
],
[
3.07526,
50.64188
],
[
3.0758,
50.64453
],
[
3.07753,
50.64381
],
[
3.07861,
50.64542
],
[
3.08299,
50.64725
],
[
3.08046,
50.64912
],
[
3.08349,
50.65082
],
[
3.08354,
50.65155
],
[
3.08477,
50.65312
],
[
3.08542,
50.65654
],
[
3.08753,
50.65687
],
[
3.09032,
50.65602
],
[
3.09018,
50.65142
],
[
3.09278,
50.65086
],
[
3.09402,
50.64982
],
[
3.09908,
50.65146
],
[
3.10316,
50.65227
],
[
3.09726,
50.64723
],
[
3.09387,
50.64358
],
[
3.09357,
50.64095
],
[
3.09561,
50.64133
],
[
3.09675,
50.64018
],
[
3.09454,
50.63891
],
[
3.09627,
50.63693
],
[
3.09795,
50.63713
],
[
3.09919,
50.63576
],
[
3.10324,
50.6351
],
[
3.10613,
50.63532
],
[
3.10649,
50.63434
],
[
3.1109,
50.63525
],
[
3.11502,
50.63504
],
[
3.11681,
50.63646
]
]
]
}

View File

@@ -1,421 +0,0 @@
use meili_snap::{json_string, snapshot};
use crate::common::{shared_index_geojson_documents, Server};
use crate::json;
const LILLE: &str = include_str!("assets/lille.geojson");
#[actix_rt::test]
async fn basic_add_settings_and_geojson_documents() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) =
index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }),
@r###"
{
"hits": [],
"query": "",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]"
}
"###);
let lille: serde_json::Value = serde_json::from_str(LILLE).unwrap();
let documents = json!([
{
"id": "missing",
},
{
"id": "point",
"_geojson": { "type": "Point", "coordinates": [1, 1] },
},
{
"id": "lille",
"_geojson": lille,
},
]);
let (task, _status_code) = index.add_documents(documents, None).await;
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r#"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 3,
"indexedDocuments": 3
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"#);
let (response, code) = index.get_all_documents_raw("?ids=missing,point").await;
snapshot!(code, @"200 OK");
snapshot!(response,
@r#"
{
"results": [
{
"id": "missing"
},
{
"id": "point",
"_geojson": {
"type": "Point",
"coordinates": [
1,
1
]
}
}
],
"offset": 0,
"limit": 20,
"total": 2
}
"#);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }),
@r###"
{
"hits": [
{
"id": "point",
"_geojson": {
"type": "Point",
"coordinates": [
1,
1
]
}
}
],
"query": "",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
}
"###);
}
#[actix_rt::test]
async fn basic_add_geojson_documents_and_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let lille: serde_json::Value = serde_json::from_str(LILLE).unwrap();
let documents = json!([
{
"id": "missing",
},
{
"id": "point",
"_geojson": { "type": "Point", "coordinates": [1, 1] },
},
{
"id": "lille",
"_geojson": lille,
},
]);
let (task, _status_code) = index.add_documents(documents, None).await;
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(response,
@r#"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 3,
"indexedDocuments": 3
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"#);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
snapshot!(response,
@r#"
{
"message": "Index `[uuid]`: Attribute `_geojson` is not filterable. This index does not have configured filterable attributes.\n14:15 _geoPolygon([0,0],[0,2],[2,2],[2,0])",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"#);
let (task, _status_code) =
index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }),
@r###"
{
"hits": [
{
"id": "point",
"_geojson": {
"type": "Point",
"coordinates": [
1,
1
]
}
}
],
"query": "",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
}
"###);
}
#[actix_rt::test]
async fn add_and_remove_geojson() {
let server = Server::new_shared();
let index = server.unique_index();
index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
let documents = json!([
{
"id": "missing",
},
{
"id": 0,
"_geojson": { "type": "Point", "coordinates": [1, 1] },
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _code) =
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
let (task, _) = index.delete_document(0).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _code) =
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
// add it back
let documents = json!([
{
"id": 0,
"_geojson": { "type": "Point", "coordinates": [1, 1] },
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _code) =
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
}
#[actix_rt::test]
async fn partial_update_geojson() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _) = index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
server.wait_task(task.uid()).await.succeeded();
let documents = json!([
{
"id": 0,
"_geojson": { "type": "Point", "coordinates": [1, 1] },
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _code) =
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
let documents = json!([
{
"id": 0,
"_geojson": { "type": "Point", "coordinates": [0.5, 0.5] },
}
]);
let (task, _status_code) = index.update_documents(documents, None).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _code) =
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
let (response, _code) =
index.search_get("?filter=_geoPolygon([0.9,0.9],[0.9,2],[2,2],[2,0.9])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
}
#[actix_rt::test]
async fn geo_bounding_box() {
let index = shared_index_geojson_documents().await;
// The bounding box is a polygon over middle Europe
let (response, code) =
index.search_get("?filter=_geoBoundingBox([50.53987503447863,21.43443989912143],[43.76393151539099,0.54979129195425])&attributesToRetrieve=name").await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
{
"hits": [
{
"name": "Austria"
},
{
"name": "Belgium"
},
{
"name": "Bosnia_and_Herzegovina"
},
{
"name": "Switzerland"
},
{
"name": "Czech_Republic"
},
{
"name": "Germany"
},
{
"name": "France"
},
{
"name": "Croatia"
},
{
"name": "Hungary"
},
{
"name": "Italy"
},
{
"name": "Luxembourg"
},
{
"name": "Netherlands"
},
{
"name": "Poland"
},
{
"name": "Romania"
},
{
"name": "Republic_of_Serbia"
},
{
"name": "Slovakia"
},
{
"name": "Slovenia"
}
],
"query": "",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 17,
"requestUid": "[uuid]"
}
"###);
// Between Russia and Alaska
let (response, code) = index
.search_get("?filter=_geoBoundingBox([70,-148],[63,152])&attributesToRetrieve=name")
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
{
"hits": [
{
"name": "Canada"
},
{
"name": "Russia"
},
{
"name": "United_States_of_America"
}
],
"query": "",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
}
"###);
}
#[actix_rt::test]
async fn bug_5904() {
// https://github.com/meilisearch/meilisearch/issues/5904
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) =
index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
server.wait_task(response.uid()).await.succeeded();
let geojson = json!({
"id": 1,
"_geojson": {
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [
4.23914,
48.382893
]
},
"properties": {}
}
]
}
});
let (response, _code) = index.add_documents(geojson, Some("id")).await;
server.wait_task(response.uid()).await.succeeded();
}

View File

@@ -1,6 +1,5 @@
mod add_documents;
mod delete_documents;
mod errors;
mod geojson;
mod get_documents;
mod update_documents;

View File

@@ -1,4 +1,5 @@
use meili_snap::snapshot;
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;

View File

@@ -642,14 +642,14 @@ async fn filter_invalid_syntax_object() {
&json!({"filterableAttributes": ["title"]}),
&json!({"filter": "title & Glass"}),
|response, code| {
snapshot!(response, @r#"
snapshot!(response, @r###"
{
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `title & Glass`.\n1:14 title & Glass",
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"#);
"###);
snapshot!(code, @"400 Bad Request");
},
)
@@ -663,14 +663,14 @@ async fn filter_invalid_syntax_array() {
&json!({"filterableAttributes": ["title"]}),
&json!({"filter": ["title & Glass"]}),
|response, code| {
snapshot!(response, @r#"
snapshot!(response, @r###"
{
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `title & Glass`.\n1:14 title & Glass",
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"#);
"###);
snapshot!(code, @"400 Bad Request");
},
)

View File

@@ -742,7 +742,7 @@ async fn vector_filter_all_embedders() {
"attributesToRetrieve": ["name"]
}))
.await;
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -762,10 +762,9 @@ async fn vector_filter_all_embedders() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]"
"estimatedTotalHits": 4
}
"###);
"#);
}
#[actix_rt::test]
@@ -840,7 +839,7 @@ async fn vector_filter_specific_embedder() {
"attributesToRetrieve": ["name"]
}))
.await;
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -860,10 +859,9 @@ async fn vector_filter_specific_embedder() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]"
"estimatedTotalHits": 4
}
"###);
"#);
}
#[actix_rt::test]
@@ -876,7 +874,7 @@ async fn vector_filter_user_provided() {
"attributesToRetrieve": ["name"]
}))
.await;
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -887,10 +885,9 @@ async fn vector_filter_user_provided() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
"#);
}
#[actix_rt::test]
@@ -903,7 +900,7 @@ async fn vector_filter_specific_fragment() {
"attributesToRetrieve": ["name"]
}))
.await;
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -917,10 +914,9 @@ async fn vector_filter_specific_fragment() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]"
"estimatedTotalHits": 2
}
"###);
"#);
let (value, _code) = index
.search_post(json!({
@@ -928,7 +924,7 @@ async fn vector_filter_specific_fragment() {
"attributesToRetrieve": ["name"]
}))
.await;
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -945,10 +941,9 @@ async fn vector_filter_specific_fragment() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
"#);
}
#[actix_rt::test]
@@ -981,17 +976,16 @@ async fn vector_filter_document_template_but_fragments_used() {
"attributesToRetrieve": ["name"]
}))
.await;
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(value, @r#"
{
"hits": [],
"query": "",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]"
"estimatedTotalHits": 0
}
"###);
"#);
}
#[actix_rt::test]
@@ -1029,7 +1023,7 @@ async fn vector_filter_document_template() {
"attributesToRetrieve": ["name"]
}))
.await;
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -1046,10 +1040,9 @@ async fn vector_filter_document_template() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
"#);
}
#[actix_rt::test]
@@ -1082,7 +1075,7 @@ async fn vector_filter_negation() {
"attributesToRetrieve": ["name"]
}))
.await;
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -1099,10 +1092,9 @@ async fn vector_filter_negation() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
"#);
}
#[actix_rt::test]
@@ -1115,7 +1107,7 @@ async fn vector_filter_or_combination() {
"attributesToRetrieve": ["name"]
}))
.await;
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -1132,10 +1124,9 @@ async fn vector_filter_or_combination() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
"#);
}
#[actix_rt::test]
@@ -1148,7 +1139,7 @@ async fn vector_filter_regenerate() {
"attributesToRetrieve": ["name"]
}))
.await;
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -1165,8 +1156,7 @@ async fn vector_filter_regenerate() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
"#);
}

View File

@@ -33,7 +33,7 @@ async fn geo_bounding_box_with_string_and_number() {
}),
|response, code| {
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -63,8 +63,7 @@ async fn geo_bounding_box_with_string_and_number() {
"processingTimeMs": "[time]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]"
"estimatedTotalHits": 2
}
"###);
},
@@ -85,7 +84,7 @@ async fn bug_4640() {
}),
|response, code| {
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -124,8 +123,7 @@ async fn bug_4640() {
"processingTimeMs": "[time]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
},
@@ -149,7 +147,7 @@ async fn geo_asc_with_words() {
&json!({"q": "jean"}),
|response, code| {
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -181,8 +179,7 @@ async fn geo_asc_with_words() {
"processingTimeMs": "[time]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
},
@@ -195,7 +192,7 @@ async fn geo_asc_with_words() {
&json!({"q": "bob"}),
|response, code| {
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -219,8 +216,7 @@ async fn geo_asc_with_words() {
"processingTimeMs": "[time]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]"
"estimatedTotalHits": 2
}
"###);
},
@@ -233,7 +229,7 @@ async fn geo_asc_with_words() {
&json!({"q": "intel"}),
|response, code| {
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -249,8 +245,7 @@ async fn geo_asc_with_words() {
"processingTimeMs": "[time]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
},
@@ -274,7 +269,7 @@ async fn geo_sort_with_words() {
&json!({"q": "jean", "sort": ["_geoPoint(0.0, 0.0):asc"]}),
|response, code| {
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -309,8 +304,7 @@ async fn geo_sort_with_words() {
"processingTimeMs": "[time]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
},

View File

@@ -1,4 +1,4 @@
use meili_snap::{json_string, snapshot};
use meili_snap::snapshot;
use once_cell::sync::Lazy;
use crate::common::index::Index;
@@ -148,7 +148,7 @@ async fn simple_search() {
)
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r#"
{
"hits": [
{
@@ -209,10 +209,9 @@ async fn simple_search() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]",
"semanticHitCount": 0
}
"###);
"#);
snapshot!(response["semanticHitCount"], @"0");
let (response, code) = index
@@ -221,7 +220,7 @@ async fn simple_search() {
)
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r#"
{
"hits": [
{
@@ -285,10 +284,9 @@ async fn simple_search() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]",
"semanticHitCount": 2
}
"###);
"#);
snapshot!(response["semanticHitCount"], @"2");
let (response, code) = index
@@ -297,7 +295,7 @@ async fn simple_search() {
)
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r#"
{
"hits": [
{
@@ -361,10 +359,9 @@ async fn simple_search() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]",
"semanticHitCount": 3
}
"###);
"#);
snapshot!(response["semanticHitCount"], @"3");
}

View File

@@ -104,7 +104,7 @@ async fn simple_search() {
// english
index
.search(json!({"q": "Atta", "attributesToRetrieve": ["id"]}), |response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -115,8 +115,7 @@ async fn simple_search() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
@@ -126,7 +125,7 @@ async fn simple_search() {
// japanese
index
.search(json!({"q": "進撃", "attributesToRetrieve": ["id"]}), |response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -137,8 +136,7 @@ async fn simple_search() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
@@ -149,7 +147,7 @@ async fn simple_search() {
.search(
json!({"q": "進撃", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r#"
{
"hits": [
{
@@ -160,10 +158,9 @@ async fn simple_search() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
"#);
snapshot!(code, @"200 OK");
},
)
@@ -172,7 +169,7 @@ async fn simple_search() {
// chinese
index
.search(json!({"q": "进击", "attributesToRetrieve": ["id"]}), |response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r#"
{
"hits": [
{
@@ -183,10 +180,9 @@ async fn simple_search() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
"#);
snapshot!(code, @"200 OK");
})
.await;
@@ -226,7 +222,7 @@ async fn force_locales() {
.search(
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -237,8 +233,7 @@ async fn force_locales() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
@@ -251,7 +246,7 @@ async fn force_locales() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -262,8 +257,7 @@ async fn force_locales() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
@@ -306,7 +300,7 @@ async fn force_locales_with_pattern() {
.search(
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -317,8 +311,7 @@ async fn force_locales_with_pattern() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
@@ -331,7 +324,7 @@ async fn force_locales_with_pattern() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -342,8 +335,7 @@ async fn force_locales_with_pattern() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
@@ -384,15 +376,14 @@ async fn force_locales_with_pattern_nested() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [],
"query": "\"进击的巨人\"",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]"
"estimatedTotalHits": 0
}
"###);
snapshot!(code, @"200 OK");
@@ -405,7 +396,7 @@ async fn force_locales_with_pattern_nested() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -416,8 +407,7 @@ async fn force_locales_with_pattern_nested() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
@@ -461,15 +451,14 @@ async fn force_different_locales_with_pattern() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [],
"query": "\"进击的巨人\"",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]"
"estimatedTotalHits": 0
}
"###);
snapshot!(code, @"200 OK");
@@ -482,7 +471,7 @@ async fn force_different_locales_with_pattern() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -493,8 +482,7 @@ async fn force_different_locales_with_pattern() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
@@ -541,15 +529,14 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
.search(
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [],
"query": "\"进击的巨人\"",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]"
"estimatedTotalHits": 0
}
"###);
snapshot!(code, @"200 OK");
@@ -562,7 +549,7 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
.search(
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"], "attributesToSearchOn": ["name_zh", "description_zh"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -573,8 +560,7 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
@@ -616,7 +602,7 @@ async fn auto_infer_locales_at_search() {
.search(
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -627,8 +613,7 @@ async fn auto_infer_locales_at_search() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
@@ -640,21 +625,20 @@ async fn auto_infer_locales_at_search() {
.search(
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
{
"hits": [
snapshot!(response, @r###"
{
"id": 853
"hits": [
{
"id": 853
}
],
"query": "\"进击的巨人\"",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1
}
],
"query": "\"进击的巨人\"",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
}
"###);
"###);
snapshot!(code, @"200 OK");
},
)
@@ -664,7 +648,7 @@ async fn auto_infer_locales_at_search() {
.search(
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -675,8 +659,7 @@ async fn auto_infer_locales_at_search() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
@@ -719,15 +702,14 @@ async fn force_different_locales_with_pattern_nested() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [],
"query": "\"进击的巨人\"",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]"
"estimatedTotalHits": 0
}
"###);
snapshot!(code, @"200 OK");
@@ -740,21 +722,20 @@ async fn force_different_locales_with_pattern_nested() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
{
"hits": [
snapshot!(response, @r###"
{
"id": 852
"hits": [
{
"id": 852
}
],
"query": "\"进击的巨人\"",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1
}
],
"query": "\"进击的巨人\"",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
}
"###);
"###);
snapshot!(code, @"200 OK");
},
)
@@ -765,7 +746,7 @@ async fn force_different_locales_with_pattern_nested() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["ja"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -776,8 +757,7 @@ async fn force_different_locales_with_pattern_nested() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
@@ -819,15 +799,14 @@ async fn settings_change() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [],
"query": "\"进击的巨人\"",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]"
"estimatedTotalHits": 0
}
"###);
snapshot!(code, @"200 OK");
@@ -840,15 +819,14 @@ async fn settings_change() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [],
"query": "\"进击的巨人\"",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]"
"estimatedTotalHits": 0
}
"###);
snapshot!(code, @"200 OK");
@@ -884,15 +862,14 @@ async fn settings_change() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [],
"query": "\"进击的巨人\"",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]"
"estimatedTotalHits": 0
}
"###);
snapshot!(code, @"200 OK");
@@ -905,15 +882,14 @@ async fn settings_change() {
.search(
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [],
"query": "\"进击的巨人\"",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]"
"estimatedTotalHits": 0
}
"###);
snapshot!(code, @"200 OK");
@@ -1188,7 +1164,7 @@ async fn swedish_search() {
// infer swedish
index
.search(json!({"q": "trä", "attributesToRetrieve": ["product"]}), |response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -1202,8 +1178,7 @@ async fn swedish_search() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]"
"estimatedTotalHits": 2
}
"###);
snapshot!(code, @"200 OK");
@@ -1212,7 +1187,7 @@ async fn swedish_search() {
index
.search(json!({"q": "tra", "attributesToRetrieve": ["product"]}), |response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -1226,8 +1201,7 @@ async fn swedish_search() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]"
"estimatedTotalHits": 2
}
"###);
snapshot!(code, @"200 OK");
@@ -1239,7 +1213,7 @@ async fn swedish_search() {
.search(
json!({"q": "trä", "locales": ["swe"], "attributesToRetrieve": ["product"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -1253,8 +1227,7 @@ async fn swedish_search() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]"
"estimatedTotalHits": 2
}
"###);
snapshot!(code, @"200 OK");
@@ -1265,7 +1238,7 @@ async fn swedish_search() {
.search(
json!({"q": "tra", "locales": ["swe"], "attributesToRetrieve": ["product"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -1279,8 +1252,7 @@ async fn swedish_search() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]"
"estimatedTotalHits": 2
}
"###);
snapshot!(code, @"200 OK");
@@ -1315,21 +1287,20 @@ async fn german_search() {
.search(
json!({"q": "kulturalität", "attributesToRetrieve": ["product"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
"hits": [
{
"product": "Interkulturalität"
}
],
"query": "kulturalität",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"product": "Interkulturalität"
}
"###);
],
"query": "kulturalität",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
},
)
@@ -1339,7 +1310,7 @@ async fn german_search() {
.search(
json!({"q": "organisation", "attributesToRetrieve": ["product"]}),
|response, code| {
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -1350,8 +1321,7 @@ async fn german_search() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");

View File

@@ -1044,7 +1044,7 @@ async fn test_degraded_score_details() {
}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -1103,8 +1103,7 @@ async fn test_degraded_score_details() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
},

View File

@@ -93,14 +93,13 @@ async fn federation_empty_list() {
let (response, code) = server.multi_search(json!({"federation": {}, "queries": []})).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [],
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]"
"estimatedTotalHits": 0
}
"###);
}
@@ -165,7 +164,7 @@ async fn simple_search_single_index() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["results"], { ".**.processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response["results"], { ".**.processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
[
{
"indexUid": "SHARED_DOCUMENTS",
@@ -183,8 +182,7 @@ async fn simple_search_single_index() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
},
{
"indexUid": "SHARED_DOCUMENTS",
@@ -202,8 +200,7 @@ async fn simple_search_single_index() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
]
"###);
@@ -220,7 +217,7 @@ async fn federation_single_search_single_index() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -240,8 +237,7 @@ async fn federation_single_search_single_index() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}
"###);
}
@@ -260,7 +256,7 @@ async fn federation_multiple_search_single_index() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -312,8 +308,7 @@ async fn federation_multiple_search_single_index() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 5,
"requestUid": "[uuid]"
"estimatedTotalHits": 5
}
"###);
}
@@ -330,7 +325,7 @@ async fn federation_two_search_single_index() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -363,8 +358,7 @@ async fn federation_two_search_single_index() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]"
"estimatedTotalHits": 2
}
"###);
}
@@ -463,7 +457,7 @@ async fn simple_search_two_indexes() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["results"], { ".**.processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response["results"], { ".**.processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
[
{
"indexUid": "SHARED_DOCUMENTS",
@@ -481,8 +475,7 @@ async fn simple_search_two_indexes() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
},
{
"indexUid": "SHARED_NESTED_DOCUMENTS",
@@ -523,8 +516,7 @@ async fn simple_search_two_indexes() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]"
"estimatedTotalHits": 2
}
]
"###);
@@ -543,7 +535,7 @@ async fn federation_two_search_two_indexes() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -604,8 +596,7 @@ async fn federation_two_search_two_indexes() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
}
@@ -635,7 +626,7 @@ async fn federation_multiple_search_multiple_indexes() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -804,8 +795,7 @@ async fn federation_multiple_search_multiple_indexes() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 12,
"requestUid": "[uuid]"
"estimatedTotalHits": 12
}
"###);
}
@@ -1111,7 +1101,7 @@ async fn federation_filter() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(response, @r###"
{
"hits": [
{
@@ -1150,8 +1140,7 @@ async fn federation_filter() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
}
@@ -1188,7 +1177,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -1277,8 +1266,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]"
"estimatedTotalHits": 4
}
"###);
@@ -1290,7 +1278,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -1365,8 +1353,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
}
@@ -1462,7 +1449,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -1551,8 +1538,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]"
"estimatedTotalHits": 4
}
"###);
@@ -1565,7 +1551,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -1640,8 +1626,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
}
@@ -1719,7 +1704,7 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -1846,8 +1831,7 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 10,
"requestUid": "[uuid]"
"estimatedTotalHits": 10
}
"###);
@@ -1860,7 +1844,7 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -1931,8 +1915,7 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 6,
"requestUid": "[uuid]"
"estimatedTotalHits": 6
}
"###);
}
@@ -1953,7 +1936,7 @@ async fn federation_sort_different_ranking_rules() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -2080,8 +2063,7 @@ async fn federation_sort_different_ranking_rules() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 10,
"requestUid": "[uuid]"
"estimatedTotalHits": 10
}
"###);
@@ -2160,7 +2142,7 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -2287,8 +2269,7 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 10,
"requestUid": "[uuid]"
"estimatedTotalHits": 10
}
"###);
@@ -2301,7 +2282,7 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -2372,8 +2353,7 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 6,
"requestUid": "[uuid]"
"estimatedTotalHits": 6
}
"###);
}
@@ -2444,7 +2424,7 @@ async fn federation_limit_offset() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -2547,8 +2527,7 @@ async fn federation_limit_offset() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 12,
"requestUid": "[uuid]"
"estimatedTotalHits": 12
}
"###);
}
@@ -2570,7 +2549,7 @@ async fn federation_limit_offset() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -2585,8 +2564,7 @@ async fn federation_limit_offset() {
"processingTimeMs": "[duration]",
"limit": 1,
"offset": 0,
"estimatedTotalHits": 12,
"requestUid": "[uuid]"
"estimatedTotalHits": 12
}
"###);
}
@@ -2608,7 +2586,7 @@ async fn federation_limit_offset() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -2695,8 +2673,7 @@ async fn federation_limit_offset() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 2,
"estimatedTotalHits": 12,
"requestUid": "[uuid]"
"estimatedTotalHits": 12
}
"###);
}
@@ -2718,14 +2695,13 @@ async fn federation_limit_offset() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [],
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 12,
"estimatedTotalHits": 12,
"requestUid": "[uuid]"
"estimatedTotalHits": 12
}
"###);
}
@@ -2755,7 +2731,7 @@ async fn federation_formatting() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -2885,8 +2861,7 @@ async fn federation_formatting() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 12,
"requestUid": "[uuid]"
"estimatedTotalHits": 12
}
"###);
}
@@ -2908,7 +2883,7 @@ async fn federation_formatting() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -2923,8 +2898,7 @@ async fn federation_formatting() {
"processingTimeMs": "[duration]",
"limit": 1,
"offset": 0,
"estimatedTotalHits": 12,
"requestUid": "[uuid]"
"estimatedTotalHits": 12
}
"###);
}
@@ -2946,7 +2920,7 @@ async fn federation_formatting() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -3033,8 +3007,7 @@ async fn federation_formatting() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 2,
"estimatedTotalHits": 12,
"requestUid": "[uuid]"
"estimatedTotalHits": 12
}
"###);
}
@@ -3056,14 +3029,13 @@ async fn federation_formatting() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [],
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 12,
"estimatedTotalHits": 12,
"requestUid": "[uuid]"
"estimatedTotalHits": 12
}
"###);
}
@@ -3126,7 +3098,7 @@ async fn federation_null_weight() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -3165,8 +3137,7 @@ async fn federation_null_weight() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
}
@@ -3273,7 +3244,7 @@ async fn federation_federated_contains_facets() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -3309,8 +3280,7 @@ async fn federation_federated_contains_facets() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]"
"estimatedTotalHits": 3
}
"###);
@@ -3518,7 +3488,7 @@ async fn federation_vector_single_index() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -3562,8 +3532,7 @@ async fn federation_vector_single_index() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 4,
"semanticHitCount": 4,
"requestUid": "[uuid]"
"semanticHitCount": 4
}
"###);
@@ -3576,7 +3545,7 @@ async fn federation_vector_single_index() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -3620,8 +3589,7 @@ async fn federation_vector_single_index() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 4,
"semanticHitCount": 4,
"requestUid": "[uuid]"
"semanticHitCount": 4
}
"###);
@@ -3635,7 +3603,7 @@ async fn federation_vector_single_index() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
{
"hits": [
{
@@ -3683,8 +3651,7 @@ async fn federation_vector_single_index() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 4,
"semanticHitCount": 3,
"requestUid": "[uuid]"
"semanticHitCount": 3
}
"###);
}
@@ -3736,7 +3703,7 @@ async fn federation_vector_two_indexes() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r#"
{
"hits": [
{
@@ -3955,10 +3922,9 @@ async fn federation_vector_two_indexes() {
0.6
]
},
"semanticHitCount": 6,
"requestUid": "[uuid]"
"semanticHitCount": 6
}
"###);
"#);
// hybrid search, distinct embedder
let (response, code) = server
@@ -3968,7 +3934,7 @@ async fn federation_vector_two_indexes() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r#"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r#"
{
"hits": [
{
@@ -4195,8 +4161,7 @@ async fn federation_vector_two_indexes() {
0.6
]
},
"semanticHitCount": 8,
"requestUid": "[uuid]"
"semanticHitCount": 8
}
"#);
}
@@ -4244,7 +4209,7 @@ async fn federation_facets_different_indexes_same_facet() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -4415,8 +4380,7 @@ async fn federation_facets_different_indexes_same_facet() {
},
"stats": {}
}
},
"requestUid": "[uuid]"
}
}
"###);
@@ -4435,7 +4399,7 @@ async fn federation_facets_different_indexes_same_facet() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -4577,8 +4541,7 @@ async fn federation_facets_different_indexes_same_facet() {
"Shazam!": 1
}
},
"facetStats": {},
"requestUid": "[uuid]"
"facetStats": {}
}
"###);
@@ -4598,7 +4561,7 @@ async fn federation_facets_different_indexes_same_facet() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -4723,8 +4686,7 @@ async fn federation_facets_different_indexes_same_facet() {
"distribution": {},
"stats": {}
}
},
"requestUid": "[uuid]"
}
}
"###);
}
@@ -4786,7 +4748,7 @@ async fn federation_facets_same_indexes() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -4844,8 +4806,7 @@ async fn federation_facets_same_indexes() {
}
}
}
},
"requestUid": "[uuid]"
}
}
"###);
@@ -4861,7 +4822,7 @@ async fn federation_facets_same_indexes() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -4947,8 +4908,7 @@ async fn federation_facets_same_indexes() {
}
}
}
},
"requestUid": "[uuid]"
}
}
"###);
@@ -4965,7 +4925,7 @@ async fn federation_facets_same_indexes() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -5027,8 +4987,7 @@ async fn federation_facets_same_indexes() {
"min": 2.0,
"max": 6.0
}
},
"requestUid": "[uuid]"
}
}
"###);
}
@@ -5081,7 +5040,7 @@ async fn federation_inconsistent_merge_order() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -5258,8 +5217,7 @@ async fn federation_inconsistent_merge_order() {
},
"stats": {}
}
},
"requestUid": "[uuid]"
}
}
"###);
@@ -5306,7 +5264,7 @@ async fn federation_inconsistent_merge_order() {
]}))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@@ -5446,8 +5404,7 @@ async fn federation_inconsistent_merge_order() {
"Batman Returns": 1
}
},
"facetStats": {},
"requestUid": "[uuid]"
"facetStats": {}
}
"###);
}

View File

@@ -2,7 +2,8 @@ use std::sync::Arc;
use actix_http::StatusCode;
use meili_snap::{json_string, snapshot};
use wiremock::matchers::{method, path, AnyMatcher};
use wiremock::matchers::method;
use wiremock::matchers::{path, AnyMatcher};
use wiremock::{Mock, MockServer, Request, ResponseTemplate};
use crate::common::{Server, Value, SCORE_DOCUMENTS};
@@ -229,7 +230,7 @@ async fn remote_sharding() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -287,13 +288,12 @@ async fn remote_sharding() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 5,
"requestUid": "[uuid]",
"remoteErrors": {}
}
"###);
let (response, _status_code) = ms1.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -351,13 +351,12 @@ async fn remote_sharding() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 5,
"requestUid": "[uuid]",
"remoteErrors": {}
}
"###);
let (response, _status_code) = ms2.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -415,7 +414,6 @@ async fn remote_sharding() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 5,
"requestUid": "[uuid]",
"remoteErrors": {}
}
"###);
@@ -597,7 +595,7 @@ async fn remote_sharding_retrieve_vectors() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
{
"hits": [],
"processingTimeMs": "[time]",
@@ -622,10 +620,9 @@ async fn remote_sharding_retrieve_vectors() {
]
},
"semanticHitCount": 0,
"requestUid": "[uuid]",
"remoteErrors": {}
}
"###);
"#);
// multi vector search: two local queries, one remote
@@ -673,7 +670,7 @@ async fn remote_sharding_retrieve_vectors() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r#"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
{
"hits": [],
"processingTimeMs": "[time]",
@@ -698,7 +695,6 @@ async fn remote_sharding_retrieve_vectors() {
]
},
"semanticHitCount": 0,
"requestUid": "[uuid]",
"remoteErrors": {}
}
"#);
@@ -749,7 +745,7 @@ async fn remote_sharding_retrieve_vectors() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r#"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
{
"hits": [],
"processingTimeMs": "[time]",
@@ -774,7 +770,6 @@ async fn remote_sharding_retrieve_vectors() {
]
},
"semanticHitCount": 0,
"requestUid": "[uuid]",
"remoteErrors": {}
}
"#);
@@ -825,7 +820,7 @@ async fn remote_sharding_retrieve_vectors() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
{
"hits": [],
"processingTimeMs": "[time]",
@@ -845,10 +840,9 @@ async fn remote_sharding_retrieve_vectors() {
]
},
"semanticHitCount": 0,
"requestUid": "[uuid]",
"remoteErrors": {}
}
"###);
"#);
// multi vector search: no local queries, all remote
@@ -896,7 +890,7 @@ async fn remote_sharding_retrieve_vectors() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
{
"hits": [],
"processingTimeMs": "[time]",
@@ -920,10 +914,9 @@ async fn remote_sharding_retrieve_vectors() {
0.2
]
},
"requestUid": "[uuid]",
"remoteErrors": {}
}
"###);
"#);
}
#[actix_rt::test]
@@ -1141,7 +1134,7 @@ async fn error_no_weighted_score() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -1169,7 +1162,6 @@ async fn error_no_weighted_score() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]",
"remoteErrors": {
"ms1": {
"message": "remote hit does not contain `._federation.weightedScoreValues`\n - hint: check that the remote instance is a Meilisearch instance running the same version",
@@ -1281,7 +1273,7 @@ async fn error_bad_response() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -1309,7 +1301,6 @@ async fn error_bad_response() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]",
"remoteErrors": {
"ms1": {
"message": "could not parse response from the remote host as a federated search response:\n - response from remote: <html>Returning an HTML page</html>\n - hint: check that the remote instance is a Meilisearch instance running the same version",
@@ -1414,7 +1405,7 @@ async fn error_bad_request() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -1442,7 +1433,6 @@ async fn error_bad_request() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]",
"remoteErrors": {
"ms1": {
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.queries[1]`: Index `nottest` not found.\",\"code\":\"index_not_found\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#index_not_found\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
@@ -1552,7 +1542,7 @@ async fn error_bad_request_facets_by_index() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -1586,7 +1576,6 @@ async fn error_bad_request_facets_by_index() {
"stats": {}
}
},
"requestUid": "[uuid]",
"remoteErrors": {
"ms1": {
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.federation.facetsByIndex.test0`: Index `test0` not found.\\n - Note: index `test0` is not used in queries\",\"code\":\"index_not_found\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#index_not_found\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
@@ -1699,7 +1688,7 @@ async fn error_bad_request_facets_by_index_facet() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -1738,7 +1727,6 @@ async fn error_bad_request_facets_by_index_facet() {
"stats": {}
}
},
"requestUid": "[uuid]",
"remoteErrors": {
"ms1": {
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.federation.facetsByIndex.test`: Invalid facet distribution: Attribute `id` is not filterable. This index does not have configured filterable attributes.\\n - Note: index `test` used in `.queries[1]`\",\"code\":\"invalid_multi_search_facets\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#invalid_multi_search_facets\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
@@ -2048,7 +2036,7 @@ async fn error_remote_404() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -2076,7 +2064,6 @@ async fn error_remote_404() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]",
"remoteErrors": {
"ms1": {
"message": "remote host responded with code 404:\n - response from remote: null\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
@@ -2089,7 +2076,7 @@ async fn error_remote_404() {
"###);
let (response, _status_code) = ms1.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -2127,7 +2114,6 @@ async fn error_remote_404() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]",
"remoteErrors": {}
}
"###);
@@ -2259,7 +2245,7 @@ async fn error_remote_sharding_auth() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -2287,7 +2273,6 @@ async fn error_remote_sharding_auth() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]",
"remoteErrors": {
"ms1-notsearch": {
"message": "could not authenticate against the remote host\n - hint: check that the remote instance was registered with a valid API key having the `search` action",
@@ -2421,7 +2406,7 @@ async fn remote_sharding_auth() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -2469,7 +2454,6 @@ async fn remote_sharding_auth() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]",
"remoteErrors": {}
}
"###);
@@ -2572,7 +2556,7 @@ async fn error_remote_500() {
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -2600,7 +2584,6 @@ async fn error_remote_500() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"requestUid": "[uuid]",
"remoteErrors": {
"ms1": {
"message": "remote host responded with code 500:\n - response from remote: {\"error\":\"provoked error\",\"code\":\"test_error\",\"link\":\"https://docs.meilisearch.com/errors#test_error\"}",
@@ -2614,7 +2597,7 @@ async fn error_remote_500() {
let (response, _status_code) = ms1.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
// the response if full because we queried the instance that works
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -2652,7 +2635,6 @@ async fn error_remote_500() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]",
"remoteErrors": {}
}
"###);
@@ -2756,7 +2738,7 @@ async fn error_remote_500_once() {
// Meilisearch is tolerant to a single failure
let (response, _status_code) = ms0.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -2794,13 +2776,12 @@ async fn error_remote_500_once() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]",
"remoteErrors": {}
}
"###);
let (response, _status_code) = ms1.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
{
@@ -2838,7 +2819,6 @@ async fn error_remote_500_once() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 3,
"requestUid": "[uuid]",
"remoteErrors": {}
}
"###);

View File

@@ -1,8 +1,7 @@
use meili_snap::{json_string, snapshot};
use super::shared_index_with_documents;
use crate::common::Server;
use crate::json;
use meili_snap::{json_string, snapshot};
#[actix_rt::test]
async fn default_search_should_return_estimated_total_hit() {
@@ -175,8 +174,7 @@ async fn test_issue_5274() {
snapshot!(json_string!(rep, {
".processingTimeMs" => "[ignored]",
".requestUid" => "[uuid]"
}), @r###"
}), @r#"
{
"hits": [
{
@@ -190,8 +188,7 @@ async fn test_issue_5274() {
"hitsPerPage": 1,
"page": 1,
"totalPages": 1,
"totalHits": 1,
"requestUid": "[uuid]"
"totalHits": 1
}
"###);
"#);
}

View File

@@ -1,7 +1,6 @@
use meili_snap::{json_string, snapshot};
use crate::common::Server;
use crate::json;
use meili_snap::{json_string, snapshot};
#[actix_rt::test]
async fn set_reset_chat_issue_5772() {

View File

@@ -339,14 +339,14 @@ async fn filter_invalid_syntax_object() {
index
.similar(json!({"id": 287947, "filter": "title & Glass", "embedder": "manual"}), |response, code| {
snapshot!(response, @r#"
snapshot!(response, @r###"
{
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `title & Glass`.\n1:14 title & Glass",
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
"code": "invalid_similar_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_similar_filter"
}
"#);
"###);
snapshot!(code, @"400 Bad Request");
})
.await;
@@ -377,14 +377,14 @@ async fn filter_invalid_syntax_array() {
index
.similar(json!({"id": 287947, "filter": ["title & Glass"], "embedder": "manual"}), |response, code| {
snapshot!(response, @r#"
snapshot!(response, @r###"
{
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `title & Glass`.\n1:14 title & Glass",
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
"code": "invalid_similar_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_similar_filter"
}
"#);
"###);
snapshot!(code, @"400 Bad Request");
})
.await;

View File

@@ -97,7 +97,7 @@ async fn task_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`, `indexCompaction`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
@@ -108,7 +108,7 @@ async fn task_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`, `indexCompaction`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
@@ -119,7 +119,7 @@ async fn task_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`, `indexCompaction`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"

View File

@@ -43,7 +43,7 @@ async fn version_too_old() {
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.22.1");
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.21.0");
}
#[actix_rt::test]
@@ -58,7 +58,7 @@ async fn version_requires_downgrade() {
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
snapshot!(err, @"Database version 1.22.2 is higher than the Meilisearch version 1.22.1. Downgrade is not supported");
snapshot!(err, @"Database version 1.21.1 is higher than the Meilisearch version 1.21.0. Downgrade is not supported");
}
#[actix_rt::test]

View File

@@ -1,5 +1,6 @@
---
source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
snapshot_kind: text
---
{
"hits": [
@@ -20,6 +21,5 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]"
"estimatedTotalHits": 1
}

View File

@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.22.1"
"upgradeTo": "v1.21.0"
},
"stats": {
"totalNbTasks": 1,

View File

@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.22.1"
"upgradeTo": "v1.21.0"
},
"stats": {
"totalNbTasks": 1,

View File

@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.22.1"
"upgradeTo": "v1.21.0"
},
"stats": {
"totalNbTasks": 1,

View File

@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.22.1"
"upgradeTo": "v1.21.0"
},
"error": null,
"duration": "[duration]",

View File

@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.22.1"
"upgradeTo": "v1.21.0"
},
"error": null,
"duration": "[duration]",

View File

@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.22.1"
"upgradeTo": "v1.21.0"
},
"error": null,
"duration": "[duration]",

View File

@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.22.1"
"upgradeTo": "v1.21.0"
},
"stats": {
"totalNbTasks": 1,

View File

@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.22.1"
"upgradeTo": "v1.21.0"
},
"error": null,
"duration": "[duration]",

View File

@@ -294,7 +294,7 @@ async fn check_the_index_features(server: &Server) {
let (results, _status) =
kefir.search_post(json!({ "sort": ["age:asc"], "filter": "surname = kefirounet" })).await;
snapshot!(json_string!(results, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), name: "search_with_sort_and_filter");
snapshot!(results, name: "search_with_sort_and_filter");
// ensuring we can get the vectors and their `regenerate` is still good.
let (results, _status) = kefir.search_post(json!({"retrieveVectors": true})).await;

View File

@@ -323,7 +323,7 @@ async fn binary_quantize_clear_documents() {
// Make sure the vector DB has been cleared
let (documents, _code) =
index.search_post(json!({ "hybrid": { "embedder": "manual" }, "vector": [1, 1, 1] })).await;
snapshot!(json_string!(documents, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(documents, @r#"
{
"hits": [],
"query": "",
@@ -331,10 +331,9 @@ async fn binary_quantize_clear_documents() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]",
"semanticHitCount": 0
}
"###);
"#);
}
#[actix_rt::test]

View File

@@ -257,7 +257,7 @@ async fn search_with_vector() {
json!({"vector": [1.0, 1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "rest"}, "limit": 1}
)).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(value, { ".requestUid" => "[uuid]", ".processingTimeMs" => "[duration]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -270,10 +270,9 @@ async fn search_with_vector() {
"limit": 1,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]",
"semanticHitCount": 1
}
"###);
"#);
}
#[actix_rt::test]
@@ -289,7 +288,7 @@ async fn search_with_media() {
))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(value, { ".requestUid" => "[uuid]", ".processingTimeMs" => "[duration]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -303,10 +302,9 @@ async fn search_with_media() {
"limit": 1,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]",
"semanticHitCount": 1
}
"###);
"#);
}
#[actix_rt::test]
@@ -392,7 +390,7 @@ async fn search_with_query() {
))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(value, { ".requestUid" => "[uuid]", ".processingTimeMs" => "[duration]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -406,10 +404,9 @@ async fn search_with_query() {
"limit": 1,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]",
"semanticHitCount": 1
}
"###);
"#);
}
#[actix_rt::test]
@@ -2079,7 +2076,7 @@ async fn composite() {
json!({"vector": [1.0, 1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "rest"}, "limit": 1}
)).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(value, { ".requestUid" => "[uuid]", ".processingTimeMs" => "[duration]" }), @r###"
snapshot!(value, @r#"
{
"hits": [
{
@@ -2092,10 +2089,9 @@ async fn composite() {
"limit": 1,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]",
"semanticHitCount": 1
}
"###);
"#);
let (value, code) = index
.search_post(
@@ -2104,7 +2100,7 @@ async fn composite() {
)
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r#"
snapshot!(value, @r#"
{
"hits": [
{
@@ -2118,7 +2114,6 @@ async fn composite() {
"limit": 1,
"offset": 0,
"estimatedTotalHits": 4,
"requestUid": "[uuid]",
"semanticHitCount": 1
}
"#);

View File

@@ -689,7 +689,7 @@ async fn clear_documents() {
// Make sure the vector DB has been cleared
let (documents, _code) =
index.search_post(json!({ "vector": [1, 1, 1], "hybrid": {"embedder": "manual"} })).await;
snapshot!(json_string!(documents, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(documents, @r#"
{
"hits": [],
"query": "",
@@ -697,10 +697,9 @@ async fn clear_documents() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"requestUid": "[uuid]",
"semanticHitCount": 0
}
"###);
"#);
}
#[actix_rt::test]
@@ -744,7 +743,7 @@ async fn add_remove_one_vector_4588() {
json!({"vector": [1, 1, 1], "hybrid": {"semanticRatio": 1.0, "embedder": "manual"} }),
)
.await;
snapshot!(json_string!(documents, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
snapshot!(documents, @r#"
{
"hits": [
{
@@ -757,10 +756,9 @@ async fn add_remove_one_vector_4588() {
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1,
"requestUid": "[uuid]",
"semanticHitCount": 1
}
"###);
"#);
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })

View File

@@ -1,5 +1,3 @@
#![allow(clippy::result_large_err)]
use std::fs::{read_dir, read_to_string, remove_file, File};
use std::io::{BufWriter, Write as _};
use std::path::PathBuf;
@@ -126,7 +124,7 @@ enum Command {
/// before running the copy and compaction. This way the current indexation must finish before
/// the compaction operation can start. Once the compaction is done, the big index is replaced
/// by the compacted one and the mutable transaction is released.
IndexCompaction { index_name: String },
CompactIndex { index_name: String },
/// Uses the hair dryer the dedicate pages hot in cache
///
@@ -165,7 +163,7 @@ fn main() -> anyhow::Result<()> {
let target_version = parse_version(&target_version).context("While parsing `--target-version`. Make sure `--target-version` is in the format MAJOR.MINOR.PATCH")?;
OfflineUpgrade { db_path, current_version: detected_version, target_version }.upgrade()
}
Command::IndexCompaction { index_name } => compact_index(db_path, &index_name),
Command::CompactIndex { index_name } => compact_index(db_path, &index_name),
Command::HairDryer { index_name, index_part } => {
hair_dryer(db_path, &index_name, &index_part)
}

View File

@@ -19,7 +19,6 @@ bstr = "1.12.0"
bytemuck = { version = "1.23.1", features = ["extern_crate_alloc"] }
byteorder = "1.5.0"
charabia = { version = "0.9.7", default-features = false }
cellulite = "0.3.1-nested-rtxns"
concat-arrays = "0.1.2"
convert_case = "0.8.0"
crossbeam-channel = "0.5.15"
@@ -28,13 +27,12 @@ either = { version = "1.15.0", features = ["serde"] }
flatten-serde-json = { path = "../flatten-serde-json" }
fst = "0.4.7"
fxhash = "0.2.1"
geojson = "0.24.2"
geoutils = "0.5.1"
grenad = { version = "0.5.0", default-features = false, features = [
"rayon",
"tempfile",
] }
heed = { version = "0.22.1-nested-rtxns", default-features = false, features = [
heed = { version = "0.22.0", default-features = false, features = [
"serde-json",
"serde-bincode",
] }
@@ -89,8 +87,8 @@ rhai = { version = "1.22.2", features = [
"no_time",
"sync",
] }
arroy = "0.6.4-nested-rtxns"
hannoy = { version = "0.0.9-nested-rtxns", features = ["arroy"] }
arroy = "0.6.3"
hannoy = { version = "0.0.8", features = ["arroy"] }
rand = "0.8.5"
tracing = "0.1.41"
ureq = { version = "2.12.1", features = ["json"] }
@@ -98,7 +96,7 @@ url = "2.5.4"
hashbrown = "0.15.4"
bumpalo = "3.18.1"
bumparaw-collections = "0.1.4"
steppe = { version = "0.4", default-features = false }
steppe = { version = "0.4.0", default-features = false }
thread_local = "1.1.9"
allocator-api2 = "0.3.0"
rustc-hash = "2.1.1"
@@ -118,8 +116,6 @@ twox-hash = { version = "2.1.1", default-features = false, features = [
"xxhash3_64",
"xxhash64",
] }
geo-types = "0.7.16"
zerometry = "0.3.0"
[dev-dependencies]
mimalloc = { version = "0.1.47", default-features = false }

View File

@@ -11,4 +11,3 @@ const fn parse_u32(s: &str) -> u32 {
pub const RESERVED_VECTORS_FIELD_NAME: &str = "_vectors";
pub const RESERVED_GEO_FIELD_NAME: &str = "_geo";
pub const RESERVED_GEOJSON_FIELD_NAME: &str = "_geojson";

View File

@@ -48,7 +48,6 @@ pub enum PrimaryKey<'a> {
Nested { name: &'a str },
}
#[allow(clippy::large_enum_variant)]
pub enum DocumentIdExtractionError {
InvalidDocumentId(UserError),
MissingDocumentId,

View File

@@ -10,26 +10,17 @@ use rhai::EvalAltResult;
use serde_json::Value;
use thiserror::Error;
use crate::constants::{RESERVED_GEOJSON_FIELD_NAME, RESERVED_GEO_FIELD_NAME};
use crate::constants::RESERVED_GEO_FIELD_NAME;
use crate::documents::{self, DocumentsBatchCursorError};
use crate::thread_pool_no_abort::PanicCatched;
use crate::vector::settings::EmbeddingSettings;
use crate::{CriterionError, DocumentId, FieldId, Object, SortError};
pub fn is_reserved_keyword(keyword: &str) -> bool {
[
RESERVED_GEO_FIELD_NAME,
RESERVED_GEOJSON_FIELD_NAME,
"_geoDistance",
"_geoPoint",
"_geoRadius",
"_geoBoundingBox",
"_geoPolygon",
]
.contains(&keyword)
[RESERVED_GEO_FIELD_NAME, "_geoDistance", "_geoPoint", "_geoRadius", "_geoBoundingBox"]
.contains(&keyword)
}
#[allow(clippy::large_enum_variant)]
#[derive(Error, Debug)]
pub enum Error {
#[error("internal: {0}.")]
@@ -89,8 +80,6 @@ pub enum InternalError {
#[error(transparent)]
HannoyError(#[from] hannoy::Error),
#[error(transparent)]
CelluliteError(#[from] cellulite::Error),
#[error(transparent)]
VectorEmbeddingError(#[from] crate::vector::Error),
}
@@ -110,12 +99,6 @@ pub enum SerializationError {
InvalidNumberSerialization,
}
impl From<cellulite::Error> for Error {
fn from(error: cellulite::Error) -> Self {
Self::UserError(UserError::CelluliteError(error))
}
}
#[derive(Error, Debug)]
pub enum FieldIdMapMissingEntry {
#[error("unknown field id {field_id} coming from the {process} process")]
@@ -124,13 +107,8 @@ pub enum FieldIdMapMissingEntry {
FieldName { field_name: String, process: &'static str },
}
#[allow(clippy::large_enum_variant)]
#[derive(Error, Debug)]
pub enum UserError {
#[error(transparent)]
CelluliteError(#[from] cellulite::Error),
#[error("Malformed geojson: {0}")]
MalformedGeojson(serde_json::Error),
#[error("A document cannot contain more than 65,535 fields.")]
AttributeLimitReached,
#[error(transparent)]
@@ -175,8 +153,6 @@ and can not be more than 511 bytes.", .document_id.to_string()
},
#[error(transparent)]
InvalidGeoField(#[from] Box<GeoError>),
#[error(transparent)]
GeoJsonError(#[from] geojson::Error),
#[error("Invalid vector dimensions: expected: `{}`, found: `{}`.", .expected, .found)]
InvalidVectorDimensions { expected: usize, found: usize },
#[error("Invalid vector dimensions in document with id `{document_id}` in `._vectors.{embedder_name}`.\n - note: embedding #{embedding_index} has dimensions {found}\n - note: embedder `{embedder_name}` requires {expected}")]

View File

@@ -6,9 +6,7 @@ use heed::RoTxn;
use super::FieldsIdsMap;
use crate::attribute_patterns::{match_field_legacy, PatternMatch};
use crate::constants::{
RESERVED_GEOJSON_FIELD_NAME, RESERVED_GEO_FIELD_NAME, RESERVED_VECTORS_FIELD_NAME,
};
use crate::constants::{RESERVED_GEO_FIELD_NAME, RESERVED_VECTORS_FIELD_NAME};
use crate::{
is_faceted_by, FieldId, FilterableAttributesFeatures, FilterableAttributesRule, Index,
LocalizedAttributesRule, Result, Weight,
@@ -26,8 +24,6 @@ pub struct Metadata {
pub asc_desc: bool,
/// The field is a geo field (`_geo`, `_geo.lat`, `_geo.lng`).
pub geo: bool,
/// The field is a geo json field (`_geojson`).
pub geo_json: bool,
/// The id of the localized attributes rule if the field is localized.
pub localized_attributes_rule_id: Option<NonZeroU16>,
/// The id of the filterable attributes rule if the field is filterable.
@@ -273,7 +269,6 @@ impl MetadataBuilder {
distinct: false,
asc_desc: false,
geo: false,
geo_json: false,
localized_attributes_rule_id: None,
filterable_attributes_rule_id: None,
};
@@ -300,20 +295,6 @@ impl MetadataBuilder {
distinct: false,
asc_desc: false,
geo: true,
geo_json: false,
localized_attributes_rule_id: None,
filterable_attributes_rule_id,
};
}
if match_field_legacy(RESERVED_GEOJSON_FIELD_NAME, field) == PatternMatch::Match {
debug_assert!(!sortable, "geojson fields should not be sortable");
return Metadata {
searchable: None,
sortable,
distinct: false,
asc_desc: false,
geo: false,
geo_json: true,
localized_attributes_rule_id: None,
filterable_attributes_rule_id,
};
@@ -347,7 +328,6 @@ impl MetadataBuilder {
distinct,
asc_desc,
geo: false,
geo_json: false,
localized_attributes_rule_id,
filterable_attributes_rule_id,
}

View File

@@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
use crate::attribute_patterns::{match_distinct_field, match_field_legacy, PatternMatch};
use crate::constants::{RESERVED_GEOJSON_FIELD_NAME, RESERVED_GEO_FIELD_NAME};
use crate::constants::RESERVED_GEO_FIELD_NAME;
use crate::AttributePatterns;
#[derive(Serialize, Deserialize, PartialEq, Eq, Clone, Debug, ToSchema)]
@@ -34,10 +34,6 @@ impl FilterableAttributesRule {
matches!(self, FilterableAttributesRule::Field(field_name) if field_name == RESERVED_GEO_FIELD_NAME)
}
pub fn has_geojson(&self) -> bool {
matches!(self, FilterableAttributesRule::Field(field_name) if field_name == RESERVED_GEOJSON_FIELD_NAME)
}
/// Get the features of the rule.
pub fn features(&self) -> FilterableAttributesFeatures {
match self {

View File

@@ -5,7 +5,6 @@ use std::fmt;
use std::fs::File;
use std::path::Path;
use cellulite::Cellulite;
use deserr::Deserr;
use heed::types::*;
use heed::{CompactionOption, Database, DatabaseStat, RoTxn, RwTxn, Unspecified, WithoutTls};
@@ -116,10 +115,9 @@ pub mod db_name {
pub const FIELD_ID_DOCID_FACET_STRINGS: &str = "field-id-docid-facet-strings";
pub const VECTOR_EMBEDDER_CATEGORY_ID: &str = "vector-embedder-category-id";
pub const VECTOR_STORE: &str = "vector-arroy";
pub const CELLULITE: &str = "cellulite";
pub const DOCUMENTS: &str = "documents";
}
const NUMBER_OF_DBS: u32 = 25 + Cellulite::nb_dbs();
const NUMBER_OF_DBS: u32 = 25;
#[derive(Clone)]
pub struct Index {
@@ -185,9 +183,6 @@ pub struct Index {
/// Vector store based on hannoy™.
pub vector_store: hannoy::Database<Unspecified>,
/// Geo store based on cellulite™.
pub cellulite: Cellulite,
/// Maps the document id to the document as an obkv store.
pub(crate) documents: Database<BEU32, ObkvCodec>,
}
@@ -244,7 +239,6 @@ impl Index {
let embedder_category_id =
env.create_database(&mut wtxn, Some(VECTOR_EMBEDDER_CATEGORY_ID))?;
let vector_store = env.create_database(&mut wtxn, Some(VECTOR_STORE))?;
let cellulite = cellulite::Cellulite::create_from_env(&env, &mut wtxn, CELLULITE)?;
let documents = env.create_database(&mut wtxn, Some(DOCUMENTS))?;
@@ -273,7 +267,6 @@ impl Index {
field_id_docid_facet_strings,
vector_store,
embedder_category_id,
cellulite,
documents,
};
if this.get_version(&wtxn)?.is_none() && creation {
@@ -1059,13 +1052,6 @@ impl Index {
Ok(geo_filter)
}
/// Returns true if the geo sorting feature is enabled.
pub fn is_geojson_filtering_enabled(&self, rtxn: &RoTxn<'_>) -> Result<bool> {
let geojson_filter =
self.filterable_attributes_rules(rtxn)?.iter().any(|field| field.has_geojson());
Ok(geojson_filter)
}
pub fn asc_desc_fields(&self, rtxn: &RoTxn<'_>) -> Result<HashSet<String>> {
let asc_desc_fields = self
.criteria(rtxn)?
@@ -1896,7 +1882,6 @@ impl Index {
field_id_docid_facet_strings,
vector_store,
embedder_category_id,
cellulite,
documents,
} = self;
@@ -1970,17 +1955,6 @@ impl Index {
sizes.insert("embedder_category_id", embedder_category_id.stat(rtxn).map(compute_size)?);
sizes.insert("documents", documents.stat(rtxn).map(compute_size)?);
// Cellulite
const _CELLULITE_DB_CHECK: () = {
if Cellulite::nb_dbs() != 4 {
panic!("Cellulite database count has changed, please update the code accordingly.")
}
};
sizes.insert("cellulite_item", cellulite.item_db_stats(rtxn).map(compute_size)?);
sizes.insert("cellulite_cell", cellulite.cell_db_stats(rtxn).map(compute_size)?);
sizes.insert("cellulite_update", cellulite.update_db_stats(rtxn).map(compute_size)?);
sizes.insert("cellulite_metadata", cellulite.metadata_db_stats(rtxn).map(compute_size)?);
Ok(sizes)
}
}

View File

@@ -54,7 +54,7 @@ pub use search::new::{
};
use serde_json::Value;
pub use thread_pool_no_abort::{PanicCatched, ThreadPoolNoAbort, ThreadPoolNoAbortBuilder};
pub use {arroy, cellulite, charabia as tokenizer, hannoy, heed, rhai};
pub use {arroy, charabia as tokenizer, hannoy, heed, rhai};
pub use self::asc_desc::{AscDesc, AscDescError, Member, SortError};
pub use self::attribute_patterns::{AttributePatterns, PatternMatch};
@@ -87,7 +87,7 @@ pub use self::search::{
};
pub use self::update::ChannelCongestion;
pub type Result<T, E = error::Error> = std::result::Result<T, E>;
pub type Result<T> = std::result::Result<T, error::Error>;
pub type Attribute = u32;
pub type BEU16 = heed::types::U16<heed::byteorder::BE>;

View File

@@ -278,6 +278,30 @@ impl<U: Send + Sync + 'static> Step for VariableNameStep<U> {
}
}
// Integration with steppe
impl steppe::Progress for Progress {
fn update(&self, sub_progress: impl steppe::Step) {
self.update_progress(Compat(sub_progress));
}
}
struct Compat<T: steppe::Step>(T);
impl<T: steppe::Step> Step for Compat<T> {
fn name(&self) -> Cow<'static, str> {
self.0.name()
}
fn current(&self) -> u32 {
self.0.current().try_into().unwrap_or(u32::MAX)
}
fn total(&self) -> u32 {
self.0.total().try_into().unwrap_or(u32::MAX)
}
}
impl Step for arroy::MainStep {
fn name(&self) -> Cow<'static, str> {
match self {
@@ -319,27 +343,3 @@ impl Step for arroy::SubStep {
self.max
}
}
// Integration with steppe
impl steppe::Progress for Progress {
fn update(&self, sub_progress: impl steppe::Step) {
self.update_progress(Compat(sub_progress));
}
}
struct Compat<T: steppe::Step>(T);
impl<T: steppe::Step> Step for Compat<T> {
fn name(&self) -> Cow<'static, str> {
self.0.name()
}
fn current(&self) -> u32 {
self.0.current().try_into().unwrap_or(u32::MAX)
}
fn total(&self) -> u32 {
self.0.total().try_into().unwrap_or(u32::MAX)
}
}

View File

@@ -304,7 +304,7 @@ impl ArrayView for ParseableArray<'_> {
fn get(&self, index: i64) -> Option<&dyn ValueView> {
let index = convert_index(index, self.size());
if index < 0 {
if index <= 0 {
return None;
}
let v = self.0.get(index as usize)?;

View File

@@ -12,9 +12,7 @@ use roaring::{MultiOps, RoaringBitmap};
use serde_json::Value;
use super::facet_range_search;
use crate::constants::{
RESERVED_GEOJSON_FIELD_NAME, RESERVED_GEO_FIELD_NAME, RESERVED_VECTORS_FIELD_NAME,
};
use crate::constants::{RESERVED_GEO_FIELD_NAME, RESERVED_VECTORS_FIELD_NAME};
use crate::error::{Error, UserError};
use crate::filterable_attributes_rules::{filtered_matching_patterns, matching_features};
use crate::heed_codec::facet::{FacetGroupKey, FacetGroupKeyCodec, FacetGroupValueCodec};
@@ -38,7 +36,6 @@ pub struct Filter<'a> {
pub enum BadGeoError {
Lat(f64),
Lng(f64),
InvalidResolution(usize),
BoundingBoxTopIsBelowBottom(f64, f64),
}
@@ -50,23 +47,16 @@ impl Display for BadGeoError {
Self::BoundingBoxTopIsBelowBottom(top, bottom) => {
write!(f, "The top latitude `{top}` is below the bottom latitude `{bottom}`.")
}
Self::InvalidResolution(resolution) => write!(
f,
"Invalid resolution `{resolution}`. Resolution must be between 3 and 1000."
),
Self::Lat(lat) => write!(
f,
"Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees.",
"Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees. ",
lat
),
Self::Lng(lng) => {
let normalized = (lng + 180.0).rem_euclid(360.0) - 180.0;
write!(
f,
"Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. Hint: try using `{normalized}` instead.",
lng
)
}
Self::Lng(lng) => write!(
f,
"Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. ",
lng
),
}
}
}
@@ -622,61 +612,50 @@ impl<'a> Filter<'a> {
.union(),
FilterCondition::And(subfilters) => {
let mut subfilters_iter = subfilters.iter();
let Some(first_subfilter) = subfilters_iter.next() else {
return Ok(RoaringBitmap::new());
};
let mut bitmap = Self::inner_evaluate(
&(first_subfilter.clone()).into(),
rtxn,
index,
field_ids_map,
filterable_attribute_rules,
universe,
)?;
for f in subfilters_iter {
if bitmap.is_empty() {
return Ok(bitmap);
}
// TODO We are doing the intersections two times,
// it could be more efficient
// Can't I just replace this `&=` by an `=`?
bitmap &= Self::inner_evaluate(
&(f.clone()).into(),
if let Some(first_subfilter) = subfilters_iter.next() {
let mut bitmap = Self::inner_evaluate(
&(first_subfilter.clone()).into(),
rtxn,
index,
field_ids_map,
filterable_attribute_rules,
Some(&bitmap),
universe,
)?;
for f in subfilters_iter {
if bitmap.is_empty() {
return Ok(bitmap);
}
// TODO We are doing the intersections two times,
// it could be more efficient
// Can't I just replace this `&=` by an `=`?
bitmap &= Self::inner_evaluate(
&(f.clone()).into(),
rtxn,
index,
field_ids_map,
filterable_attribute_rules,
Some(&bitmap),
)?;
}
Ok(bitmap)
} else {
Ok(RoaringBitmap::new())
}
Ok(bitmap)
}
FilterCondition::VectorExists { fid: _, embedder, filter } => {
super::filter_vector::evaluate(rtxn, index, universe, embedder.clone(), filter)
}
FilterCondition::GeoLowerThan { point, radius, resolution: res_token } => {
let base_point: [f64; 2] =
[point[0].parse_finite_float()?, point[1].parse_finite_float()?];
if !(-90.0..=90.0).contains(&base_point[0]) {
return Err(point[0].as_external_error(BadGeoError::Lat(base_point[0])))?;
}
if !(-180.0..=180.0).contains(&base_point[1]) {
return Err(point[1].as_external_error(BadGeoError::Lng(base_point[1])))?;
}
let radius = radius.parse_finite_float()?;
let mut resolution = 125;
if let Some(res_token) = res_token {
resolution = res_token.parse_finite_float()? as usize;
if !(3..=1000).contains(&resolution) {
return Err(
res_token.as_external_error(BadGeoError::InvalidResolution(resolution))
)?;
}
}
let mut r1 = None;
FilterCondition::GeoLowerThan { point, radius } => {
if index.is_geo_filtering_enabled(rtxn)? {
let base_point: [f64; 2] =
[point[0].parse_finite_float()?, point[1].parse_finite_float()?];
if !(-90.0..=90.0).contains(&base_point[0]) {
return Err(point[0].as_external_error(BadGeoError::Lat(base_point[0])))?;
}
if !(-180.0..=180.0).contains(&base_point[1]) {
return Err(point[1].as_external_error(BadGeoError::Lng(base_point[1])))?;
}
let radius = radius.parse_finite_float()?;
let rtree = match index.geo_rtree(rtxn)? {
Some(rtree) => rtree,
None => return Ok(RoaringBitmap::new()),
@@ -692,72 +671,52 @@ impl<'a> Filter<'a> {
})
.map(|point| point.data.0)
.collect();
r1 = Some(result);
}
let mut r2 = None;
if index.is_geojson_filtering_enabled(rtxn)? {
let point = geo_types::Point::new(base_point[1], base_point[0]);
let result = index.cellulite.in_circle(rtxn, point, radius, resolution)?;
r2 = Some(RoaringBitmap::from_iter(result)); // TODO: Remove once we update roaring in meilisearch
}
match (r1, r2) {
(Some(r1), Some(r2)) => Ok(r1 | r2),
(Some(r1), None) => Ok(r1),
(None, Some(r2)) => Ok(r2),
(None, None) => {
Err(point[0].as_external_error(FilterError::AttributeNotFilterable {
attribute: &format!(
"{RESERVED_GEO_FIELD_NAME}/{RESERVED_GEOJSON_FIELD_NAME}"
),
filterable_patterns: filtered_matching_patterns(
filterable_attribute_rules,
&|features| features.is_filterable(),
),
}))?
}
Ok(result)
} else {
Err(point[0].as_external_error(FilterError::AttributeNotFilterable {
attribute: RESERVED_GEO_FIELD_NAME,
filterable_patterns: filtered_matching_patterns(
filterable_attribute_rules,
&|features| features.is_filterable(),
),
}))?
}
}
FilterCondition::GeoBoundingBox { top_right_point, bottom_left_point } => {
let top_right: [f64; 2] = [
top_right_point[0].parse_finite_float()?,
top_right_point[1].parse_finite_float()?,
];
let bottom_left: [f64; 2] = [
bottom_left_point[0].parse_finite_float()?,
bottom_left_point[1].parse_finite_float()?,
];
if !(-90.0..=90.0).contains(&top_right[0]) {
return Err(
top_right_point[0].as_external_error(BadGeoError::Lat(top_right[0]))
)?;
}
if !(-180.0..=180.0).contains(&top_right[1]) {
return Err(
top_right_point[1].as_external_error(BadGeoError::Lng(top_right[1]))
)?;
}
if !(-90.0..=90.0).contains(&bottom_left[0]) {
return Err(
bottom_left_point[0].as_external_error(BadGeoError::Lat(bottom_left[0]))
)?;
}
if !(-180.0..=180.0).contains(&bottom_left[1]) {
return Err(
bottom_left_point[1].as_external_error(BadGeoError::Lng(bottom_left[1]))
)?;
}
if top_right[0] < bottom_left[0] {
return Err(bottom_left_point[1].as_external_error(
BadGeoError::BoundingBoxTopIsBelowBottom(top_right[0], bottom_left[0]),
))?;
}
let mut r1 = None;
if index.is_geo_filtering_enabled(rtxn)? {
let top_right: [f64; 2] = [
top_right_point[0].parse_finite_float()?,
top_right_point[1].parse_finite_float()?,
];
let bottom_left: [f64; 2] = [
bottom_left_point[0].parse_finite_float()?,
bottom_left_point[1].parse_finite_float()?,
];
if !(-90.0..=90.0).contains(&top_right[0]) {
return Err(
top_right_point[0].as_external_error(BadGeoError::Lat(top_right[0]))
)?;
}
if !(-180.0..=180.0).contains(&top_right[1]) {
return Err(
top_right_point[1].as_external_error(BadGeoError::Lng(top_right[1]))
)?;
}
if !(-90.0..=90.0).contains(&bottom_left[0]) {
return Err(bottom_left_point[0]
.as_external_error(BadGeoError::Lat(bottom_left[0])))?;
}
if !(-180.0..=180.0).contains(&bottom_left[1]) {
return Err(bottom_left_point[1]
.as_external_error(BadGeoError::Lng(bottom_left[1])))?;
}
if top_right[0] < bottom_left[0] {
return Err(bottom_left_point[1].as_external_error(
BadGeoError::BoundingBoxTopIsBelowBottom(top_right[0], bottom_left[0]),
))?;
}
// Instead of writing a custom `GeoBoundingBox` filter we're simply going to re-use the range
// filter to create the following filter;
// `_geo.lat {top_right[0]} TO {bottom_left[0]} AND _geo.lng {top_right[1]} TO {bottom_left[1]}`
@@ -852,76 +811,19 @@ impl<'a> Filter<'a> {
)?
};
r1 = Some(selected_lat & selected_lng);
}
let mut r2 = None;
if index.is_geojson_filtering_enabled(rtxn)? {
let polygon = geo_types::Polygon::new(
geo_types::LineString(vec![
geo_types::Coord { x: top_right[1], y: top_right[0] },
geo_types::Coord { x: bottom_left[1], y: top_right[0] },
geo_types::Coord { x: bottom_left[1], y: bottom_left[0] },
geo_types::Coord { x: top_right[1], y: bottom_left[0] },
]),
Vec::new(),
);
let result = index.cellulite.in_shape(rtxn, &polygon)?;
r2 = Some(RoaringBitmap::from_iter(result)); // TODO: Remove once we update roaring in meilisearch
}
match (r1, r2) {
(Some(r1), Some(r2)) => Ok(r1 | r2),
(Some(r1), None) => Ok(r1),
(None, Some(r2)) => Ok(r2),
(None, None) => Err(top_right_point[0].as_external_error(
Ok(selected_lat & selected_lng)
} else {
Err(top_right_point[0].as_external_error(
FilterError::AttributeNotFilterable {
attribute: &format!(
"{RESERVED_GEO_FIELD_NAME}/{RESERVED_GEOJSON_FIELD_NAME}"
),
attribute: RESERVED_GEO_FIELD_NAME,
filterable_patterns: filtered_matching_patterns(
filterable_attribute_rules,
&|features| features.is_filterable(),
),
},
))?,
))?
}
}
FilterCondition::GeoPolygon { points } => {
if !index.is_geojson_filtering_enabled(rtxn)? {
return Err(points[0][0].as_external_error(
FilterError::AttributeNotFilterable {
attribute: RESERVED_GEOJSON_FIELD_NAME,
filterable_patterns: filtered_matching_patterns(
filterable_attribute_rules,
&|features| features.is_filterable(),
),
},
))?;
}
let mut coords = Vec::new();
for [lat_token, lng_token] in points {
let lat = lat_token.parse_finite_float()?;
let lng = lng_token.parse_finite_float()?;
if !(-90.0..=90.0).contains(&lat) {
return Err(lat_token.as_external_error(BadGeoError::Lat(lat)))?;
}
if !(-180.0..=180.0).contains(&lng) {
return Err(lng_token.as_external_error(BadGeoError::Lng(lng)))?;
}
coords.push(geo_types::Coord { x: lng, y: lat });
}
let polygon = geo_types::Polygon::new(geo_types::LineString(coords), Vec::new());
let result = index.cellulite.in_shape(rtxn, &polygon)?;
let result = roaring::RoaringBitmap::from_iter(result); // TODO: Remove once we update roaring in meilisearch
Ok(result)
}
}
}
}
@@ -1060,17 +962,17 @@ mod tests {
let rtxn = index.read_txn().unwrap();
let filter = Filter::from_str("_geoRadius(42, 150, 10)").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Attribute `_geo/_geojson` is not filterable. This index does not have configured filterable attributes.
snapshot!(error.to_string(), @r###"
Attribute `_geo` is not filterable. This index does not have configured filterable attributes.
12:14 _geoRadius(42, 150, 10)
");
"###);
let filter = Filter::from_str("_geoBoundingBox([42, 150], [30, 10])").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Attribute `_geo/_geojson` is not filterable. This index does not have configured filterable attributes.
snapshot!(error.to_string(), @r###"
Attribute `_geo` is not filterable. This index does not have configured filterable attributes.
18:20 _geoBoundingBox([42, 150], [30, 10])
");
"###);
let filter = Filter::from_str("dog = \"bernese mountain\"").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
@@ -1091,19 +993,19 @@ mod tests {
let rtxn = index.read_txn().unwrap();
let filter = Filter::from_str("_geoRadius(-90, 150, 10)").unwrap().unwrap();
let filter = Filter::from_str("_geoRadius(-100, 150, 10)").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Attribute `_geo/_geojson` is not filterable. Available filterable attribute patterns are: `title`.
12:15 _geoRadius(-90, 150, 10)
");
snapshot!(error.to_string(), @r###"
Attribute `_geo` is not filterable. Available filterable attribute patterns are: `title`.
12:16 _geoRadius(-100, 150, 10)
"###);
let filter = Filter::from_str("_geoBoundingBox([42, 150], [30, 10])").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Attribute `_geo/_geojson` is not filterable. Available filterable attribute patterns are: `title`.
snapshot!(error.to_string(), @r###"
Attribute `_geo` is not filterable. Available filterable attribute patterns are: `title`.
18:20 _geoBoundingBox([42, 150], [30, 10])
");
"###);
let filter = Filter::from_str("name = 12").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
@@ -1251,34 +1153,38 @@ mod tests {
// georadius have a bad latitude
let filter = Filter::from_str("_geoRadius(-100, 150, 10)").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Bad latitude `-100`. Latitude must be contained between -90 and 90 degrees.
12:16 _geoRadius(-100, 150, 10)
");
assert!(
error.to_string().starts_with(
"Bad latitude `-100`. Latitude must be contained between -90 and 90 degrees."
),
"{}",
error.to_string()
);
// georadius have a bad latitude
let filter = Filter::from_str("_geoRadius(-90.0000001, 150, 10)").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees.
12:23 _geoRadius(-90.0000001, 150, 10)
");
assert!(error.to_string().contains(
"Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees."
));
// georadius have a bad longitude
let filter = Filter::from_str("_geoRadius(-10, 250, 10)").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Bad longitude `250`. Longitude must be contained between -180 and 180 degrees. Hint: try using `-110` instead.
17:20 _geoRadius(-10, 250, 10)
");
assert!(
error.to_string().contains(
"Bad longitude `250`. Longitude must be contained between -180 and 180 degrees."
),
"{}",
error.to_string(),
);
// georadius have a bad longitude
let filter = Filter::from_str("_geoRadius(-10, 180.000001, 10)").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees. Hint: try using `-179.999999` instead.
17:27 _geoRadius(-10, 180.000001, 10)
");
assert!(error.to_string().contains(
"Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees."
));
}
#[test]
@@ -1301,73 +1207,73 @@ mod tests {
let filter =
Filter::from_str("_geoBoundingBox([-90.0000001, 150], [30, 10])").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees.
18:29 _geoBoundingBox([-90.0000001, 150], [30, 10])
");
assert!(
error.to_string().starts_with(
"Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees."
),
"{}",
error.to_string()
);
// geoboundingbox top left coord have a bad latitude
let filter =
Filter::from_str("_geoBoundingBox([90.0000001, 150], [30, 10])").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Bad latitude `90.0000001`. Latitude must be contained between -90 and 90 degrees.
18:28 _geoBoundingBox([90.0000001, 150], [30, 10])
");
assert!(
error.to_string().starts_with(
"Bad latitude `90.0000001`. Latitude must be contained between -90 and 90 degrees."
),
"{}",
error.to_string()
);
// geoboundingbox bottom right coord have a bad latitude
let filter =
Filter::from_str("_geoBoundingBox([30, 10], [-90.0000001, 150])").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees.
28:39 _geoBoundingBox([30, 10], [-90.0000001, 150])
");
assert!(error.to_string().contains(
"Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees."
));
// geoboundingbox bottom right coord have a bad latitude
let filter =
Filter::from_str("_geoBoundingBox([30, 10], [90.0000001, 150])").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Bad latitude `90.0000001`. Latitude must be contained between -90 and 90 degrees.
28:38 _geoBoundingBox([30, 10], [90.0000001, 150])
");
assert!(error.to_string().contains(
"Bad latitude `90.0000001`. Latitude must be contained between -90 and 90 degrees."
));
// geoboundingbox top left coord have a bad longitude
let filter =
Filter::from_str("_geoBoundingBox([-10, 180.000001], [30, 10])").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees. Hint: try using `-179.999999` instead.
23:33 _geoBoundingBox([-10, 180.000001], [30, 10])
");
assert!(error.to_string().contains(
"Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees."
));
// geoboundingbox top left coord have a bad longitude
let filter =
Filter::from_str("_geoBoundingBox([-10, -180.000001], [30, 10])").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Bad longitude `-180.000001`. Longitude must be contained between -180 and 180 degrees. Hint: try using `179.999999` instead.
23:34 _geoBoundingBox([-10, -180.000001], [30, 10])
");
assert!(error.to_string().contains(
"Bad longitude `-180.000001`. Longitude must be contained between -180 and 180 degrees."
));
// geoboundingbox bottom right coord have a bad longitude
let filter =
Filter::from_str("_geoBoundingBox([30, 10], [-10, -180.000001])").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Bad longitude `-180.000001`. Longitude must be contained between -180 and 180 degrees. Hint: try using `179.999999` instead.
33:44 _geoBoundingBox([30, 10], [-10, -180.000001])
");
assert!(error.to_string().contains(
"Bad longitude `-180.000001`. Longitude must be contained between -180 and 180 degrees."
));
// geoboundingbox bottom right coord have a bad longitude
let filter =
Filter::from_str("_geoBoundingBox([30, 10], [-10, 180.000001])").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r"
Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees. Hint: try using `-179.999999` instead.
33:43 _geoBoundingBox([30, 10], [-10, 180.000001])
");
assert!(error.to_string().contains(
"Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees."
));
}
#[test]

Some files were not shown because too many files have changed in this diff Show More