Compare commits

..

1 Commits

Author SHA1 Message Date
Kerollmops
15321ce924 Replace the hand-made VecDequeue by a FutureUnordered 2025-11-12 09:12:37 +01:00
51 changed files with 145 additions and 492 deletions

View File

@@ -65,9 +65,9 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [macos-14, windows-2022]
os: [macos-13, windows-2022]
include:
- os: macos-14
- os: macos-13
artifact_name: meilisearch
asset_name: meilisearch-macos-amd64
- os: windows-2022
@@ -90,7 +90,7 @@ jobs:
publish-macos-apple-silicon:
name: Publish binary for macOS silicon
runs-on: macos-14
runs-on: macos-13
needs: check-version
strategy:
matrix:

View File

@@ -47,7 +47,7 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [macos-14, windows-2022]
os: [macos-13, windows-2022]
steps:
- uses: actions/checkout@v5
- name: Cache dependencies

50
Cargo.lock generated
View File

@@ -345,6 +345,12 @@ version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "allocator-api2"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c583acf993cf4245c4acb0a2cc2ab1f9cc097de73411bb6d3647ff6af2b1013d"
[[package]]
name = "anes"
version = "0.1.6"
@@ -584,7 +590,7 @@ source = "git+https://github.com/meilisearch/bbqueue#cbb87cc707b5af415ef203bdaf2
[[package]]
name = "benchmarks"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"anyhow",
"bumpalo",
@@ -794,7 +800,7 @@ dependencies = [
[[package]]
name = "build-info"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"anyhow",
"time",
@@ -807,7 +813,7 @@ version = "3.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
dependencies = [
"allocator-api2",
"allocator-api2 0.2.21",
"serde",
]
@@ -817,7 +823,7 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ce682bdc86c2e25ef5cd95881d9d6a1902214eddf74cf9ffea88fe1464377e8"
dependencies = [
"allocator-api2",
"allocator-api2 0.2.21",
"bitpacking",
"bumpalo",
"hashbrown 0.15.5",
@@ -1784,7 +1790,7 @@ dependencies = [
[[package]]
name = "dump"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"anyhow",
"big_s",
@@ -2027,7 +2033,7 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "file-store"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"tempfile",
"thiserror 2.0.16",
@@ -2049,7 +2055,7 @@ dependencies = [
[[package]]
name = "filter-parser"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"insta",
"levenshtein_automata",
@@ -2077,7 +2083,7 @@ dependencies = [
[[package]]
name = "flatten-serde-json"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"criterion",
"serde_json",
@@ -2234,7 +2240,7 @@ dependencies = [
[[package]]
name = "fuzzers"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"arbitrary",
"bumpalo",
@@ -2760,7 +2766,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
dependencies = [
"ahash 0.8.12",
"allocator-api2",
"allocator-api2 0.2.21",
]
[[package]]
@@ -2769,7 +2775,7 @@ version = "0.15.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
dependencies = [
"allocator-api2",
"allocator-api2 0.2.21",
"equivalent",
"foldhash",
"serde",
@@ -3188,7 +3194,7 @@ dependencies = [
[[package]]
name = "index-scheduler"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"anyhow",
"backoff",
@@ -3206,6 +3212,7 @@ dependencies = [
"enum-iterator",
"file-store",
"flate2",
"futures",
"indexmap",
"insta",
"maplit",
@@ -3461,7 +3468,7 @@ dependencies = [
[[package]]
name = "json-depth-checker"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"criterion",
"serde_json",
@@ -3980,7 +3987,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
[[package]]
name = "meili-snap"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"insta",
"md5",
@@ -3991,7 +3998,7 @@ dependencies = [
[[package]]
name = "meilisearch"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"actix-cors",
"actix-http",
@@ -4088,7 +4095,7 @@ dependencies = [
[[package]]
name = "meilisearch-auth"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"base64 0.22.1",
"enum-iterator",
@@ -4107,7 +4114,7 @@ dependencies = [
[[package]]
name = "meilisearch-types"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"actix-web",
"anyhow",
@@ -4142,7 +4149,7 @@ dependencies = [
[[package]]
name = "meilitool"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"anyhow",
"clap",
@@ -4176,8 +4183,9 @@ dependencies = [
[[package]]
name = "milli"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"allocator-api2 0.3.1",
"arroy",
"bbqueue",
"big_s",
@@ -4757,7 +4765,7 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
[[package]]
name = "permissive-json-pointer"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"big_s",
"serde_json",
@@ -7879,7 +7887,7 @@ dependencies = [
[[package]]
name = "xtask"
version = "1.27.0"
version = "1.25.0"
dependencies = [
"anyhow",
"build-info",

View File

@@ -23,7 +23,7 @@ members = [
]
[workspace.package]
version = "1.27.0"
version = "1.25.0"
authors = [
"Quentin de Quelen <quentin@dequelen.me>",
"Clément Renault <clement@meilisearch.com>",

View File

@@ -317,7 +317,6 @@ pub(crate) mod test {
FilterableAttributesRule::Field(S("race")),
FilterableAttributesRule::Field(S("age")),
]),
foreign_keys: Setting::NotSet,
sortable_attributes: Setting::Set(btreeset! { S("age") }),
ranking_rules: Setting::NotSet,
stop_words: Setting::NotSet,

View File

@@ -349,7 +349,6 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
v5::settings::Setting::Reset => v6::Setting::Reset,
v5::settings::Setting::NotSet => v6::Setting::NotSet,
},
foreign_keys: v6::Setting::NotSet,
sortable_attributes: settings.sortable_attributes.into(),
ranking_rules: {
match settings.ranking_rules {

View File

@@ -46,10 +46,11 @@ time = { version = "0.3.41", features = [
tracing = "0.1.41"
ureq = "2.12.1"
uuid = { version = "1.17.0", features = ["serde", "v4"] }
backoff = "0.4.0"
backoff = { version = "0.4.0", features = ["tokio"] }
reqwest = { version = "0.12.23", features = ["rustls-tls", "http2"], default-features = false }
rusty-s3 = "0.8.1"
tokio = { version = "1.47.1", features = ["full"] }
futures = "0.3.31"
[dev-dependencies]
big_s = "1.0.2"

View File

@@ -171,19 +171,6 @@ impl RoFeatures {
.into())
}
}
pub fn check_foreign_keys_setting(&self, disabled_action: &'static str) -> Result<()> {
if self.runtime.foreign_keys {
Ok(())
} else {
Err(FeatureNotEnabledError {
disabled_action,
feature: "foreign_keys",
issue_link: "https://github.com/orgs/meilisearch/discussions/873",
}
.into())
}
}
}
impl FeatureData {

View File

@@ -438,16 +438,13 @@ async fn multipart_stream_to_s3(
db_name: String,
reader: std::io::PipeReader,
) -> Result<(), Error> {
use std::collections::VecDeque;
use std::io;
use std::os::fd::OwnedFd;
use std::path::PathBuf;
use std::{os::fd::OwnedFd, path::PathBuf};
use bytes::{Bytes, BytesMut};
use reqwest::{Client, Response};
use rusty_s3::actions::CreateMultipartUpload;
use rusty_s3::{Bucket, BucketError, Credentials, S3Action as _, UrlStyle};
use tokio::task::JoinHandle;
use bytes::BytesMut;
use futures::stream::{FuturesUnordered, StreamExt};
use reqwest::Client;
use rusty_s3::S3Action as _;
use rusty_s3::{actions::CreateMultipartUpload, Bucket, BucketError, Credentials, UrlStyle};
let reader = OwnedFd::from(reader);
let reader = tokio::net::unix::pipe::Receiver::from_owned_fd(reader)?;
@@ -485,9 +482,7 @@ async fn multipart_stream_to_s3(
// We use this bumpalo for etags strings.
let bump = bumpalo::Bump::new();
let mut etags = Vec::<&str>::new();
let mut in_flight = VecDeque::<(JoinHandle<reqwest::Result<Response>>, Bytes)>::with_capacity(
s3_max_in_flight_parts.get(),
);
let mut in_flight = FuturesUnordered::new();
// Part numbers start at 1 and cannot be larger than 10k
for part_number in 1u16.. {
@@ -501,8 +496,21 @@ async fn multipart_stream_to_s3(
// Wait for a buffer to be ready if there are in-flight parts that landed
let mut buffer = if in_flight.len() >= s3_max_in_flight_parts.get() {
let (handle, buffer) = in_flight.pop_front().expect("At least one in flight request");
let resp = join_and_map_error(handle).await?;
let (join_result, buffer): (
Result<reqwest::Result<reqwest::Response>, tokio::task::JoinError>,
bytes::Bytes,
) = in_flight.next().await.expect("At least one in flight request");
// safety: Panic happens if the task (JoinHandle) was aborted, cancelled, or panicked
let resp = join_result.unwrap().map_err(Error::S3HttpError)?;
let resp = match resp.error_for_status_ref() {
Ok(_) => resp,
Err(_) => {
return Err(Error::S3Error {
status: resp.status(),
body: resp.text().await.unwrap_or_default(),
})
}
};
extract_and_append_etag(&bump, &mut etags, resp.headers())?;
let mut buffer = match buffer.try_into_mut() {
@@ -520,6 +528,7 @@ async fn multipart_stream_to_s3(
while buffer.len() < (s3_multipart_part_size as usize / 2) {
// Wait for the pipe to be readable
use std::io;
reader.readable().await?;
match reader.try_read_buf(&mut buffer) {
@@ -558,11 +567,24 @@ async fn multipart_stream_to_s3(
}
})
});
in_flight.push_back((task, body));
// Wrap the task to return both the result and the buffer
let task_with_buffer = async move { (task.await, body) };
in_flight.push(task_with_buffer);
}
for (handle, _buffer) in in_flight {
let resp = join_and_map_error(handle).await?;
while let Some((join_result, _buffer)) = in_flight.next().await {
// safety: Panic happens if the task (JoinHandle) was aborted, cancelled, or panicked
let resp = join_result.unwrap().map_err(Error::S3HttpError)?;
let resp = match resp.error_for_status_ref() {
Ok(_) => resp,
Err(_) => {
return Err(Error::S3Error {
status: resp.status(),
body: resp.text().await.unwrap_or_default(),
})
}
};
extract_and_append_etag(&bump, &mut etags, resp.headers())?;
}
@@ -583,17 +605,15 @@ async fn multipart_stream_to_s3(
async move {
match client.post(url).body(body).send().await {
Ok(resp) if resp.status().is_client_error() => {
Err(backoff::Error::Permanent(Error::S3Error {
status: resp.status(),
body: resp.text().await.unwrap_or_default(),
}))
resp.error_for_status().map_err(backoff::Error::Permanent)
}
Ok(resp) => Ok(resp),
Err(e) => Err(backoff::Error::transient(Error::S3HttpError(e))),
Err(e) => Err(backoff::Error::transient(e)),
}
}
})
.await?;
.await
.map_err(Error::S3HttpError)?;
let status = resp.status();
let body = resp.text().await.map_err(|e| Error::S3Error { status, body: e.to_string() })?;
@@ -604,22 +624,6 @@ async fn multipart_stream_to_s3(
}
}
#[cfg(unix)]
async fn join_and_map_error(
join_handle: tokio::task::JoinHandle<Result<reqwest::Response, reqwest::Error>>,
) -> Result<reqwest::Response> {
// safety: Panic happens if the task (JoinHandle) was aborted, cancelled, or panicked
let request = join_handle.await.unwrap();
let resp = request.map_err(Error::S3HttpError)?;
match resp.error_for_status_ref() {
Ok(_) => Ok(resp),
Err(_) => Err(Error::S3Error {
status: resp.status(),
body: resp.text().await.unwrap_or_default(),
}),
}
}
#[cfg(unix)]
fn extract_and_append_etag<'b>(
bump: &'b bumpalo::Bump,

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, foreign_keys: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, foreign_keys: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
----------------------------------------------------------------------
### Status:
enqueued [0,]

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, foreign_keys: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, foreign_keys: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
----------------------------------------------------------------------
### Status:
enqueued []

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 27, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 25, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggo` already exists.", error_code: "index_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_already_exists" }, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
@@ -57,7 +57,7 @@ girafo: { number_of_documents: 0, field_distribution: {} }
[timestamp] [4,]
----------------------------------------------------------------------
### All Batches:
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.27.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.25.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
1 {uid: 1, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
2 {uid: 2, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
3 {uid: 3, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 27, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 25, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
----------------------------------------------------------------------
### Status:
enqueued [0,]

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 27, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 25, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
----------------------------------------------------------------------
### Status:

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 27, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 25, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
----------------------------------------------------------------------
### Status:
@@ -37,7 +37,7 @@ catto [1,]
[timestamp] [0,]
----------------------------------------------------------------------
### All Batches:
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.27.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.25.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
----------------------------------------------------------------------
### Batch to tasks mapping:
0 [0,]

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 27, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 25, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
----------------------------------------------------------------------
@@ -40,7 +40,7 @@ doggo [2,]
[timestamp] [0,]
----------------------------------------------------------------------
### All Batches:
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.27.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.25.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
----------------------------------------------------------------------
### Batch to tasks mapping:
0 [0,]

View File

@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 27, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 25, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
3 {uid: 3, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
@@ -43,7 +43,7 @@ doggo [2,3,]
[timestamp] [0,]
----------------------------------------------------------------------
### All Batches:
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.27.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.25.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
----------------------------------------------------------------------
### Batch to tasks mapping:
0 [0,]

View File

@@ -48,9 +48,6 @@ pub fn upgrade_index_scheduler(
(1, 22, _) => 0,
(1, 23, _) => 0,
(1, 24, _) => 0,
(1, 25, _) => 0,
(1, 26, _) => 0,
(1, 27, _) => 0,
(major, minor, patch) => {
if major > current_major
|| (major == current_major && minor > current_minor)

View File

@@ -327,7 +327,6 @@ InvalidSettingsFacetSearch , InvalidRequest , BAD_REQU
InvalidSettingsPrefixSearch , InvalidRequest , BAD_REQUEST ;
InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ;
InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsForeignKeys , InvalidRequest , BAD_REQUEST ;
InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSearchCutoffMs , InvalidRequest , BAD_REQUEST ;
InvalidSettingsEmbedders , InvalidRequest , BAD_REQUEST ;

View File

@@ -22,7 +22,6 @@ pub struct RuntimeTogglableFeatures {
pub chat_completions: bool,
pub multimodal: bool,
pub vector_store_setting: bool,
pub foreign_keys: bool,
}
#[derive(Default, Debug, Clone, Copy)]

View File

@@ -15,10 +15,7 @@ pub use milli::update::ChatSettings;
use milli::update::Setting;
use milli::vector::db::IndexEmbeddingConfig;
use milli::vector::VectorStoreBackend;
use milli::{
Criterion, CriterionError, FilterableAttributesRule, ForeignKey, Index,
DEFAULT_VALUES_PER_FACET,
};
use milli::{Criterion, CriterionError, FilterableAttributesRule, Index, DEFAULT_VALUES_PER_FACET};
use serde::{Deserialize, Serialize, Serializer};
use utoipa::ToSchema;
@@ -224,12 +221,6 @@ pub struct Settings<T> {
#[schema(value_type = Option<Vec<String>>, example = json!(["release_date"]))]
pub sortable_attributes: Setting<BTreeSet<String>>,
/// Foreign keys to use for cross-index filtering search.
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsForeignKeys>)]
#[schema(value_type = Option<Vec<ForeignKey>>, example = json!([{"foreignIndexUid": "products", "fieldName": "productId"}]))]
pub foreign_keys: Setting<Vec<ForeignKey>>,
/// List of ranking rules sorted by order of importance. The order is customizable.
/// [A list of ordered built-in ranking rules](https://www.meilisearch.com/docs/learn/relevancy/relevancy).
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
@@ -385,7 +376,6 @@ impl Settings<Checked> {
displayed_attributes: Setting::Reset.into(),
searchable_attributes: Setting::Reset.into(),
filterable_attributes: Setting::Reset,
foreign_keys: Setting::Reset,
sortable_attributes: Setting::Reset,
ranking_rules: Setting::Reset,
stop_words: Setting::Reset,
@@ -414,7 +404,6 @@ impl Settings<Checked> {
displayed_attributes,
searchable_attributes,
filterable_attributes,
foreign_keys,
sortable_attributes,
ranking_rules,
stop_words,
@@ -442,7 +431,6 @@ impl Settings<Checked> {
searchable_attributes,
filterable_attributes,
sortable_attributes,
foreign_keys,
ranking_rules,
stop_words,
non_separator_tokens,
@@ -494,7 +482,6 @@ impl Settings<Unchecked> {
displayed_attributes: displayed_attributes.into(),
searchable_attributes: searchable_attributes.into(),
filterable_attributes: self.filterable_attributes,
foreign_keys: self.foreign_keys,
sortable_attributes: self.sortable_attributes,
ranking_rules: self.ranking_rules,
stop_words: self.stop_words,
@@ -556,7 +543,6 @@ impl Settings<Unchecked> {
.sortable_attributes
.clone()
.or(self.sortable_attributes.clone()),
foreign_keys: other.foreign_keys.clone().or(self.foreign_keys.clone()),
ranking_rules: other.ranking_rules.clone().or(self.ranking_rules.clone()),
stop_words: other.stop_words.clone().or(self.stop_words.clone()),
non_separator_tokens: other
@@ -618,7 +604,6 @@ pub fn apply_settings_to_builder(
searchable_attributes,
filterable_attributes,
sortable_attributes,
foreign_keys,
ranking_rules,
stop_words,
non_separator_tokens,
@@ -666,12 +651,6 @@ pub fn apply_settings_to_builder(
Setting::NotSet => (),
}
match foreign_keys {
Setting::Set(ref keys) => builder.set_foreign_keys(keys.clone().into_iter().collect()),
Setting::Reset => builder.reset_foreign_keys(),
Setting::NotSet => (),
}
match ranking_rules {
Setting::Set(ref criteria) => {
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
@@ -889,8 +868,6 @@ pub fn settings(
let sortable_attributes = index.sortable_fields(rtxn)?.into_iter().collect();
let foreign_keys = index.foreign_keys(rtxn)?.into_iter().collect();
let criteria = index.criteria(rtxn)?;
let stop_words = index
@@ -988,7 +965,6 @@ pub fn settings(
.into(),
filterable_attributes: Setting::Set(filterable_attributes),
sortable_attributes: Setting::Set(sortable_attributes),
foreign_keys: Setting::Set(foreign_keys),
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
stop_words: Setting::Set(stop_words),
non_separator_tokens: Setting::Set(non_separator_tokens),
@@ -1231,7 +1207,6 @@ pub(crate) mod test {
searchable_attributes: Setting::Set(vec![String::from("hello")]).into(),
filterable_attributes: Setting::NotSet,
sortable_attributes: Setting::NotSet,
foreign_keys: Setting::NotSet,
ranking_rules: Setting::NotSet,
stop_words: Setting::NotSet,
non_separator_tokens: Setting::NotSet,
@@ -1265,7 +1240,6 @@ pub(crate) mod test {
.into(),
filterable_attributes: Setting::NotSet,
sortable_attributes: Setting::NotSet,
foreign_keys: Setting::NotSet,
ranking_rules: Setting::NotSet,
stop_words: Setting::NotSet,
non_separator_tokens: Setting::NotSet,

View File

@@ -195,7 +195,7 @@ struct Infos {
experimental_enable_logs_route: bool,
experimental_reduce_indexing_memory_usage: bool,
experimental_max_number_of_batched_tasks: usize,
experimental_limit_batched_tasks_total_size: Option<u64>,
experimental_limit_batched_tasks_total_size: u64,
experimental_network: bool,
experimental_multimodal: bool,
experimental_chat_completions: bool,
@@ -208,7 +208,6 @@ struct Infos {
experimental_no_edition_2024_for_prefix_post_processing: bool,
experimental_no_edition_2024_for_facet_post_processing: bool,
experimental_vector_store_setting: bool,
experimental_foreign_keys: bool,
experimental_personalization: bool,
gpu_enabled: bool,
db_path: bool,
@@ -318,7 +317,6 @@ impl Infos {
chat_completions,
multimodal,
vector_store_setting,
foreign_keys,
} = features;
// We're going to override every sensible information.
@@ -345,7 +343,6 @@ impl Infos {
experimental_no_snapshot_compaction,
experimental_no_edition_2024_for_dumps,
experimental_vector_store_setting: vector_store_setting,
experimental_foreign_keys: foreign_keys,
gpu_enabled: meilisearch_types::milli::vector::is_cuda_enabled(),
db_path: db_path != PathBuf::from("./data.ms"),
import_dump: import_dump.is_some(),
@@ -362,7 +359,7 @@ impl Infos {
http_payload_size_limit,
experimental_max_number_of_batched_tasks,
experimental_limit_batched_tasks_total_size:
experimental_limit_batched_tasks_total_size.map(|size| size.as_u64()),
experimental_limit_batched_tasks_total_size.into(),
task_queue_webhook: task_webhook_url.is_some(),
task_webhook_authorization_header: task_webhook_authorization_header.is_some(),
log_level: log_level.to_string(),

View File

@@ -230,17 +230,7 @@ pub fn setup_meilisearch(
cleanup_enabled: !opt.experimental_replication_parameters,
max_number_of_tasks: 1_000_000,
max_number_of_batched_tasks: opt.experimental_max_number_of_batched_tasks,
batched_tasks_size_limit: opt.experimental_limit_batched_tasks_total_size.map_or_else(
|| {
opt.indexer_options
.max_indexing_memory
// By default, we use half of the available memory to determine the size of batched tasks
.map_or(u64::MAX, |mem| mem.as_u64() / 2)
// And never exceed 10 GiB when we infer the limit
.min(10 * 1024 * 1024 * 1024)
},
|size| size.as_u64(),
),
batched_tasks_size_limit: opt.experimental_limit_batched_tasks_total_size.into(),
index_growth_amount: byte_unit::Byte::from_str("10GiB").unwrap().as_u64() as usize,
index_count: DEFAULT_INDEX_COUNT,
instance_features: opt.to_instance_features(),

View File

@@ -473,14 +473,11 @@ pub struct Opt {
#[serde(default = "default_limit_batched_tasks")]
pub experimental_max_number_of_batched_tasks: usize,
/// Experimentally controls the maximum total size, in bytes, of tasks that will be processed
/// simultaneously. When unspecified, defaults to half of the maximum indexing memory and
/// clamped to 10 GiB.
///
/// See: <https://github.com/orgs/meilisearch/discussions/801>
#[clap(long, env = MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE)]
#[serde(default)]
pub experimental_limit_batched_tasks_total_size: Option<Byte>,
/// Experimentally reduces the maximum total size, in bytes, of tasks that will be processed at once,
/// see: <https://github.com/orgs/meilisearch/discussions/801>
#[clap(long, env = MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE, default_value_t = default_limit_batched_tasks_total_size())]
#[serde(default = "default_limit_batched_tasks_total_size")]
pub experimental_limit_batched_tasks_total_size: Byte,
/// Enables experimental caching of search query embeddings. The value represents the maximal number of entries in the cache of each
/// distinct embedder.
@@ -704,12 +701,10 @@ impl Opt {
MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS,
experimental_max_number_of_batched_tasks.to_string(),
);
if let Some(limit) = experimental_limit_batched_tasks_total_size {
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE,
limit.to_string(),
);
}
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE,
experimental_limit_batched_tasks_total_size.to_string(),
);
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES,
experimental_embedding_cache_entries.to_string(),
@@ -1278,6 +1273,10 @@ fn default_limit_batched_tasks() -> usize {
usize::MAX
}
fn default_limit_batched_tasks_total_size() -> Byte {
Byte::from_u64(u64::MAX)
}
fn default_embedding_cache_entries() -> usize {
0
}

View File

@@ -1,14 +1,14 @@
use std::time::Duration;
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
use meilisearch_types::milli::TimeBudget;
use crate::search::{Personalize, SearchResult};
use meilisearch_types::{
error::{Code, ErrorCode, ResponseError},
milli::TimeBudget,
};
use rand::Rng;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use std::time::Duration;
use tracing::{debug, info, warn};
use crate::search::{Personalize, SearchResult};
const COHERE_API_URL: &str = "https://api.cohere.ai/v1/rerank";
const MAX_RETRIES: u32 = 10;

View File

@@ -56,7 +56,6 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
chat_completions: Some(false),
multimodal: Some(false),
vector_store_setting: Some(false),
foreign_keys: Some(false),
})),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
@@ -107,8 +106,6 @@ pub struct RuntimeTogglableFeatures {
pub multimodal: Option<bool>,
#[deserr(default)]
pub vector_store_setting: Option<bool>,
#[deserr(default)]
pub foreign_keys: Option<bool>,
}
impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogglableFeatures {
@@ -124,7 +121,6 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
chat_completions,
multimodal,
vector_store_setting,
foreign_keys,
} = value;
Self {
@@ -138,7 +134,6 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
chat_completions: Some(chat_completions),
multimodal: Some(multimodal),
vector_store_setting: Some(vector_store_setting),
foreign_keys: Some(foreign_keys),
}
}
}
@@ -155,7 +150,6 @@ pub struct PatchExperimentalFeatureAnalytics {
chat_completions: bool,
multimodal: bool,
vector_store_setting: bool,
foreign_keys: bool,
}
impl Aggregate for PatchExperimentalFeatureAnalytics {
@@ -175,7 +169,6 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
chat_completions: new.chat_completions,
multimodal: new.multimodal,
vector_store_setting: new.vector_store_setting,
foreign_keys: new.foreign_keys,
})
}
@@ -204,7 +197,6 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
chat_completions: Some(false),
multimodal: Some(false),
vector_store_setting: Some(false),
foreign_keys: Some(false),
})),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
@@ -252,7 +244,6 @@ async fn patch_features(
.0
.vector_store_setting
.unwrap_or(old_features.vector_store_setting),
foreign_keys: new_features.0.foreign_keys.unwrap_or(old_features.foreign_keys),
};
// explicitly destructure for analytics rather than using the `Serialize` implementation, because
@@ -269,7 +260,6 @@ async fn patch_features(
chat_completions,
multimodal,
vector_store_setting,
foreign_keys,
} = new_features;
analytics.publish(
@@ -284,7 +274,6 @@ async fn patch_features(
chat_completions,
multimodal,
vector_store_setting,
foreign_keys,
},
&req,
);

View File

@@ -531,17 +531,6 @@ make_setting_routes!(
camelcase_attr: "vectorStore",
analytics: VectorStoreAnalytics
},
{
route: "/foreign-keys",
update_verb: put,
value_type: Vec<meilisearch_types::milli::ForeignKey>,
err_type: meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsForeignKeys,
>,
attr: foreign_keys,
camelcase_attr: "foreignKeys",
analytics: ForeignKeysAnalytics
},
);
#[utoipa::path(
@@ -606,7 +595,6 @@ pub async fn update_all(
filterable_attributes: FilterableAttributesAnalytics::new(
new_settings.filterable_attributes.as_ref().set(),
),
foreign_keys: ForeignKeysAnalytics::new(new_settings.foreign_keys.as_ref().set()),
distinct_attribute: DistinctAttributeAnalytics::new(
new_settings.distinct_attribute.as_ref().set(),
),
@@ -700,10 +688,6 @@ pub async fn get_all(
new_settings.vector_store = Setting::NotSet;
}
if features.check_foreign_keys_setting("showing index `foreignKeys` settings").is_err() {
new_settings.foreign_keys = Setting::NotSet;
}
debug!(returns = ?new_settings, "Get all settings");
Ok(HttpResponse::Ok().json(new_settings))
}
@@ -809,9 +793,5 @@ fn validate_settings(
features.check_vector_store_setting("setting `vectorStore` in the index settings")?;
}
if let Setting::Set(_) = &settings.foreign_keys {
features.check_foreign_keys_setting("setting `foreignKeys` in the index settings")?;
}
Ok(settings.validate()?)
}

View File

@@ -9,7 +9,7 @@ use meilisearch_types::facet_values_sort::FacetValuesSort;
use meilisearch_types::locales::{Locale, LocalizedAttributesRuleView};
use meilisearch_types::milli::update::Setting;
use meilisearch_types::milli::vector::VectorStoreBackend;
use meilisearch_types::milli::{FilterableAttributesRule, ForeignKey};
use meilisearch_types::milli::FilterableAttributesRule;
use meilisearch_types::settings::{
ChatSettings, FacetingSettings, PaginationSettings, PrefixSearchSettings,
ProximityPrecisionView, RankingRuleView, SettingEmbeddingSettings, TypoSettings,
@@ -25,7 +25,6 @@ pub struct SettingsAnalytics {
pub displayed_attributes: DisplayedAttributesAnalytics,
pub sortable_attributes: SortableAttributesAnalytics,
pub filterable_attributes: FilterableAttributesAnalytics,
pub foreign_keys: ForeignKeysAnalytics,
pub distinct_attribute: DistinctAttributeAnalytics,
pub proximity_precision: ProximityPrecisionAnalytics,
pub typo_tolerance: TypoToleranceAnalytics,
@@ -99,10 +98,6 @@ impl Aggregate for SettingsAnalytics {
.has_patterns
.or(self.filterable_attributes.has_patterns),
},
foreign_keys: ForeignKeysAnalytics {
set: new.foreign_keys.set | self.foreign_keys.set,
total: new.foreign_keys.total.or(self.foreign_keys.total),
},
distinct_attribute: DistinctAttributeAnalytics {
set: self.distinct_attribute.set | new.distinct_attribute.set,
},
@@ -367,22 +362,6 @@ impl FilterableAttributesAnalytics {
}
}
#[derive(Serialize, Default)]
pub struct ForeignKeysAnalytics {
pub set: bool,
pub total: Option<usize>,
}
impl ForeignKeysAnalytics {
pub fn new(settings: Option<&Vec<ForeignKey>>) -> Self {
Self { set: settings.is_some(), total: settings.as_ref().map(|s| s.len()) }
}
pub fn into_settings(self) -> SettingsAnalytics {
SettingsAnalytics { foreign_keys: self, ..Default::default() }
}
}
#[derive(Serialize, Default)]
pub struct DistinctAttributeAnalytics {
pub set: bool,

View File

@@ -18,9 +18,10 @@ use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
use uuid::Uuid;
use crate::search::SearchMetadata;
use super::super::{ComputedFacets, FacetStats, HitsInfo, SearchHit, SearchQueryWithIndex};
use crate::milli::vector::Embedding;
use crate::search::SearchMetadata;
pub const DEFAULT_FEDERATED_WEIGHT: f64 = 1.0;

View File

@@ -1339,117 +1339,3 @@ async fn get_document_with_vectors() {
}
"###);
}
#[actix_rt::test]
async fn test_fetch_documents_pagination_with_sorting() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
server.wait_task(task.uid()).await.succeeded();
// Set name as sortable attribute
let (task, code) = index.update_settings_sortable_attributes(json!(["name"])).await;
assert_eq!(code, 202);
server.wait_task(task.uid()).await.succeeded();
let documents = json!((0..50)
.map(|i| json!({"id": i, "name": format!("doc_{:05}", std::cmp::min(i, 5))}))
.collect::<Vec<_>>());
// Add documents as described in the bug report
let (task, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
server.wait_task(task.uid()).await.succeeded();
// Request 1 (first page): offset 0, limit 2
let (response, code) = index
.fetch_documents(json!({
"offset": 0,
"limit": 2,
"sort": ["name:asc"]
}))
.await;
assert_eq!(code, 200);
let results = response["results"].as_array().unwrap();
snapshot!(json_string!(results), @r###"
[
{
"id": 0,
"name": "doc_00000"
},
{
"id": 1,
"name": "doc_00001"
}
]
"###);
// Request 2 (second page): offset 2, limit 2
let (response, code) = index
.fetch_documents(json!({
"offset": 2,
"limit": 2,
"sort": ["name:asc"]
}))
.await;
assert_eq!(code, 200);
let results = response["results"].as_array().unwrap();
snapshot!(json_string!(results), @r###"
[
{
"id": 2,
"name": "doc_00002"
},
{
"id": 3,
"name": "doc_00003"
}
]
"###);
// Request 3 (third page): offset 4, limit 2
let (response, code) = index
.fetch_documents(json!({
"offset": 4,
"limit": 2,
"sort": ["name:asc"]
}))
.await;
assert_eq!(code, 200);
let results = response["results"].as_array().unwrap();
snapshot!(json_string!(results), @r###"
[
{
"id": 4,
"name": "doc_00004"
},
{
"id": 5,
"name": "doc_00005"
}
]
"###);
// Request 4 (fourth page): offset 6, limit 2
let (response, code) = index
.fetch_documents(json!({
"offset": 6,
"limit": 2,
"sort": ["name:asc"]
}))
.await;
assert_eq!(code, 200);
let results = response["results"].as_array().unwrap();
snapshot!(json_string!(results), @r###"
[
{
"id": 6,
"name": "doc_00005"
},
{
"id": 7,
"name": "doc_00005"
}
]
"###);
}

View File

@@ -237,7 +237,6 @@ async fn import_dump_v1_movie_with_settings() {
"sortableAttributes": [
"genres"
],
"foreignKeys": [],
"rankingRules": [
"typo",
"words",
@@ -412,7 +411,6 @@ async fn import_dump_v1_rubygems_with_settings() {
"sortableAttributes": [
"version"
],
"foreignKeys": [],
"rankingRules": [
"typo",
"words",
@@ -742,7 +740,6 @@ async fn import_dump_v2_movie_with_settings() {
"genres"
],
"sortableAttributes": [],
"foreignKeys": [],
"rankingRules": [
"words",
"typo",
@@ -914,7 +911,6 @@ async fn import_dump_v2_rubygems_with_settings() {
"version"
],
"sortableAttributes": [],
"foreignKeys": [],
"rankingRules": [
"typo",
"words",
@@ -1244,7 +1240,6 @@ async fn import_dump_v3_movie_with_settings() {
"genres"
],
"sortableAttributes": [],
"foreignKeys": [],
"rankingRules": [
"words",
"typo",
@@ -1416,7 +1411,6 @@ async fn import_dump_v3_rubygems_with_settings() {
"version"
],
"sortableAttributes": [],
"foreignKeys": [],
"rankingRules": [
"typo",
"words",
@@ -1746,7 +1740,6 @@ async fn import_dump_v4_movie_with_settings() {
"genres"
],
"sortableAttributes": [],
"foreignKeys": [],
"rankingRules": [
"words",
"typo",
@@ -1918,7 +1911,6 @@ async fn import_dump_v4_rubygems_with_settings() {
"version"
],
"sortableAttributes": [],
"foreignKeys": [],
"rankingRules": [
"typo",
"words",
@@ -2198,8 +2190,7 @@ async fn import_dump_v6_containing_experimental_features() {
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false,
"vectorStoreSetting": false,
"foreignKeys": false
"vectorStoreSetting": false
}
"###);

View File

@@ -27,8 +27,7 @@ async fn experimental_features() {
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false,
"vectorStoreSetting": false,
"foreignKeys": false
"vectorStoreSetting": false
}
"###);
@@ -46,8 +45,7 @@ async fn experimental_features() {
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false,
"vectorStoreSetting": false,
"foreignKeys": false
"vectorStoreSetting": false
}
"###);
@@ -65,8 +63,7 @@ async fn experimental_features() {
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false,
"vectorStoreSetting": false,
"foreignKeys": false
"vectorStoreSetting": false
}
"###);
@@ -85,8 +82,7 @@ async fn experimental_features() {
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false,
"vectorStoreSetting": false,
"foreignKeys": false
"vectorStoreSetting": false
}
"###);
@@ -105,8 +101,7 @@ async fn experimental_features() {
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false,
"vectorStoreSetting": false,
"foreignKeys": false
"vectorStoreSetting": false
}
"###);
}
@@ -132,8 +127,7 @@ async fn experimental_feature_metrics() {
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false,
"vectorStoreSetting": false,
"foreignKeys": false
"vectorStoreSetting": false
}
"###);
@@ -180,7 +174,7 @@ async fn errors() {
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`, `chatCompletions`, `multimodal`, `vectorStoreSetting`, `foreignKeys`",
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`, `chatCompletions`, `multimodal`, `vectorStoreSetting`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"

View File

@@ -137,60 +137,6 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
}])
});
static MANY_DOCS: Lazy<Value> = Lazy::new(|| {
json!([
{
"title": "Shazam!",
"desc": "a Captain Marvel ersatz",
"id": "1",
},
{
"title": "Captain Planet",
"desc": "He's not part of the Marvel Cinematic Universe",
"id": "2",
},
{
"title": "Captain Marvel",
"desc": "a Shazam ersatz",
"id": "3",
},
{
"title": "Captain Marvel",
"desc": "a Shazam ersatz",
"id": "4",
},
{
"title": "Captain Marvel",
"desc": "a Shazam ersatz",
"id": "5",
},
{
"title": "Captain Marvel",
"desc": "a Shazam ersatz",
"id": "6",
},
{
"title": "Captain Marvel",
"desc": "a Shazam ersatz",
"id": "7",
},
{
"title": "Captain Marvel",
"desc": "a Shazam ersatz",
"id": "8",
},
{
"title": "Captain Marvel",
"desc": "a Shazam ersatz",
"id": "9",
},
{
"title": "Captain Marvel",
"desc": "a Shazam ersatz",
"id": "10",
}])
});
#[actix_rt::test]
async fn simple_search() {
let server = Server::new_shared();
@@ -503,38 +449,6 @@ async fn simple_search_hf() {
snapshot!(response["semanticHitCount"], @"3");
}
#[actix_rt::test]
async fn issue_5976_missing_docs_hf() {
let server = Server::new_shared();
let index = index_with_documents_hf(server, &MANY_DOCS).await;
let (response, code) = index
.search_post(
json!({"q": "Wonder replacement", "hybrid": {"embedder": "default", "semanticRatio": 1.0}, "retrieveVectors": true}),
)
.await;
snapshot!(code, @"200 OK");
let are_empty: Vec<_> = response["hits"]
.as_array()
.unwrap()
.iter()
.map(|hit| hit["_vectors"]["default"]["embeddings"].as_array().unwrap().is_empty())
.collect();
snapshot!(json!(are_empty), @r###"
[
false,
false,
false,
false,
false,
false,
false,
false,
false,
false
]
"###);
}
#[actix_rt::test]
async fn distribution_shift() {
let server = Server::new_shared();

View File

@@ -318,7 +318,6 @@ async fn secrets_are_hidden_in_settings() {
],
"filterableAttributes": [],
"sortableAttributes": [],
"foreignKeys": [],
"rankingRules": [
"words",
"typo",

View File

@@ -43,7 +43,7 @@ async fn version_too_old() {
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.27.0");
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.25.0");
}
#[actix_rt::test]
@@ -58,7 +58,7 @@ async fn version_requires_downgrade() {
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
snapshot!(err, @"Database version 1.27.1 is higher than the Meilisearch version 1.27.0. Downgrade is not supported");
snapshot!(err, @"Database version 1.25.1 is higher than the Meilisearch version 1.25.0. Downgrade is not supported");
}
#[actix_rt::test]

View File

@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.27.0"
"upgradeTo": "v1.25.0"
},
"stats": {
"totalNbTasks": 1,

View File

@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.27.0"
"upgradeTo": "v1.25.0"
},
"stats": {
"totalNbTasks": 1,

View File

@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.27.0"
"upgradeTo": "v1.25.0"
},
"stats": {
"totalNbTasks": 1,

View File

@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.27.0"
"upgradeTo": "v1.25.0"
},
"error": null,
"duration": "[duration]",

View File

@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.27.0"
"upgradeTo": "v1.25.0"
},
"error": null,
"duration": "[duration]",

View File

@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.27.0"
"upgradeTo": "v1.25.0"
},
"error": null,
"duration": "[duration]",

View File

@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.27.0"
"upgradeTo": "v1.25.0"
},
"stats": {
"totalNbTasks": 1,

View File

@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.27.0"
"upgradeTo": "v1.25.0"
},
"error": null,
"duration": "[duration]",

View File

@@ -101,6 +101,7 @@ bumpalo = "3.18.1"
bumparaw-collections = "0.1.4"
steppe = { version = "0.4", default-features = false }
thread_local = "1.1.9"
allocator-api2 = "0.3.0"
rustc-hash = "2.1.1"
enum-iterator = "2.1.0"
bbqueue = { git = "https://github.com/meilisearch/bbqueue" }

View File

@@ -87,7 +87,7 @@ impl Iterator for SortedDocumentsIterator<'_> {
};
// Otherwise don't directly iterate over children, skip them if we know we will go further
let mut to_skip = n;
let mut to_skip = n - 1;
while to_skip > 0 {
if let Err(e) = SortedDocumentsIterator::update_current(
current_child,
@@ -108,7 +108,7 @@ impl Iterator for SortedDocumentsIterator<'_> {
continue;
} else {
// The current iterator is large enough, so we can forward the call to it.
return inner.nth(to_skip);
return inner.nth(to_skip + 1);
}
}

View File

@@ -53,7 +53,6 @@ pub mod main_key {
pub const HIDDEN_FACETED_FIELDS_KEY: &str = "hidden-faceted-fields";
pub const FILTERABLE_FIELDS_KEY: &str = "filterable-fields";
pub const SORTABLE_FIELDS_KEY: &str = "sortable-fields";
pub const FOREIGN_KEYS_KEY: &str = "foreign-keys";
pub const FIELD_DISTRIBUTION_KEY: &str = "fields-distribution";
pub const FIELDS_IDS_MAP_KEY: &str = "fields-ids-map";
pub const FIELDIDS_WEIGHTS_MAP_KEY: &str = "fieldids-weights-map";

View File

@@ -19,7 +19,6 @@ mod external_documents_ids;
pub mod facet;
mod fields_ids_map;
mod filterable_attributes_rules;
mod foreign_key;
pub mod heed_codec;
pub mod index;
mod localized_attributes_rules;
@@ -72,7 +71,6 @@ pub use self::filterable_attributes_rules::{
FilterFeatures, FilterableAttributesFeatures, FilterableAttributesPatterns,
FilterableAttributesRule,
};
pub use self::foreign_key::ForeignKey;
pub use self::heed_codec::{
BEU16StrCodec, BEU32StrCodec, BoRoaringBitmapCodec, BoRoaringBitmapLenCodec,
CboRoaringBitmapCodec, CboRoaringBitmapLenCodec, FieldIdWordCountCodec, ObkvCodec,

View File

@@ -45,8 +45,7 @@ use crate::vector::{
VectorStoreBackend,
};
use crate::{
ChannelCongestion, FieldId, FilterableAttributesRule, ForeignKey, Index,
LocalizedAttributesRule, Result,
ChannelCongestion, FieldId, FilterableAttributesRule, Index, LocalizedAttributesRule, Result,
};
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
@@ -177,7 +176,6 @@ pub struct Settings<'a, 't, 'i> {
displayed_fields: Setting<Vec<String>>,
filterable_fields: Setting<Vec<FilterableAttributesRule>>,
sortable_fields: Setting<HashSet<String>>,
foreign_keys: Setting<Vec<ForeignKey>>,
criteria: Setting<Vec<Criterion>>,
stop_words: Setting<BTreeSet<String>>,
non_separator_tokens: Setting<BTreeSet<String>>,
@@ -219,7 +217,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
displayed_fields: Setting::NotSet,
filterable_fields: Setting::NotSet,
sortable_fields: Setting::NotSet,
foreign_keys: Setting::NotSet,
criteria: Setting::NotSet,
stop_words: Setting::NotSet,
non_separator_tokens: Setting::NotSet,
@@ -281,14 +278,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
self.sortable_fields = Setting::Reset;
}
pub fn set_foreign_keys(&mut self, keys: Vec<ForeignKey>) {
self.foreign_keys = Setting::Set(keys);
}
pub fn reset_foreign_keys(&mut self) {
self.foreign_keys = Setting::Reset;
}
pub fn reset_criteria(&mut self) {
self.criteria = Setting::Reset;
}
@@ -833,19 +822,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
Ok(())
}
fn update_foreign_keys(&mut self) -> Result<()> {
match self.foreign_keys {
Setting::Set(ref keys) => {
self.index.put_foreign_keys(self.wtxn, keys)?;
}
Setting::Reset => {
self.index.delete_foreign_keys(self.wtxn)?;
}
Setting::NotSet => (),
}
Ok(())
}
fn update_criteria(&mut self) -> Result<()> {
match &self.criteria {
Setting::Set(criteria) => {
@@ -1479,7 +1455,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
self.update_sort_facet_values_by()?;
self.update_pagination_max_total_hits()?;
self.update_search_cutoff()?;
self.update_foreign_keys()?;
// could trigger re-indexing
self.update_filterable()?;
@@ -1618,7 +1593,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
displayed_fields: Setting::NotSet,
filterable_fields: Setting::NotSet,
sortable_fields: Setting::NotSet,
foreign_keys: Setting::NotSet,
criteria: Setting::NotSet,
stop_words: Setting::NotSet,
non_separator_tokens: Setting::NotSet,

View File

@@ -874,7 +874,6 @@ fn test_correct_settings_init() {
displayed_fields,
filterable_fields,
sortable_fields,
foreign_keys,
criteria,
stop_words,
non_separator_tokens,
@@ -905,7 +904,6 @@ fn test_correct_settings_init() {
assert!(matches!(displayed_fields, Setting::NotSet));
assert!(matches!(filterable_fields, Setting::NotSet));
assert!(matches!(sortable_fields, Setting::NotSet));
assert!(matches!(foreign_keys, Setting::NotSet));
assert!(matches!(criteria, Setting::NotSet));
assert!(matches!(stop_words, Setting::NotSet));
assert!(matches!(non_separator_tokens, Setting::NotSet));

View File

@@ -43,8 +43,6 @@ const UPGRADE_FUNCTIONS: &[&dyn UpgradeIndex] = &[
&ToTargetNoOp { target: (1, 23, 0) },
&ToTargetNoOp { target: (1, 24, 0) },
&ToTargetNoOp { target: (1, 25, 0) },
&ToTargetNoOp { target: (1, 26, 0) },
&ToTargetNoOp { target: (1, 27, 0) },
// This is the last upgrade function, it will be called when the index is up to date.
// any other upgrade function should be added before this one.
&ToCurrentNoOp {},
@@ -81,8 +79,6 @@ const fn start(from: (u32, u32, u32)) -> Option<usize> {
(1, 23, _) => function_index!(13),
(1, 24, _) => function_index!(14),
(1, 25, _) => function_index!(15),
(1, 26, _) => function_index!(16),
(1, 27, _) => function_index!(17),
// We deliberately don't add a placeholder with (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH) here to force manually
// considering dumpless upgrade.
(_major, _minor, _patch) => return None,

View File

@@ -112,12 +112,13 @@ impl<'doc, C: OnEmbed<'doc>, I: Input> EmbedSession<'doc, C, I> {
rendered: I,
unused_vectors_distribution: &C::ErrorMetadata,
) -> Result<()> {
if self.inputs.len() >= self.inputs.capacity() {
self.embed_chunks(unused_vectors_distribution)?;
if self.inputs.len() < self.inputs.capacity() {
self.inputs.push(rendered);
self.metadata.push(metadata);
return Ok(());
}
self.inputs.push(rendered);
self.metadata.push(metadata);
Ok(())
self.embed_chunks(unused_vectors_distribution)
}
pub fn drain(mut self, unused_vectors_distribution: &C::ErrorMetadata) -> Result<C> {