mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-12 15:45:48 +00:00
Compare commits
10 Commits
v1.29.0
...
document-j
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e8fdc63e2c | ||
|
|
9e3d3bb11c | ||
|
|
26e368b116 | ||
|
|
ba95ac0915 | ||
|
|
75fcbfc2fe | ||
|
|
8c19b6d55e | ||
|
|
08d0f05ece | ||
|
|
4762e9afa0 | ||
|
|
12fcab91c5 | ||
|
|
792a72a23f |
2
.github/workflows/bench-manual.yml
vendored
2
.github/workflows/bench-manual.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/bench-pr.yml
vendored
2
.github/workflows/bench-pr.yml
vendored
@@ -66,7 +66,7 @@ jobs:
|
||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
|
||||
- name: Run benchmarks on PR ${{ github.event.issue.id }}
|
||||
run: |
|
||||
|
||||
2
.github/workflows/bench-push-indexing.yml
vendored
2
.github/workflows/bench-push-indexing.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
|
||||
# Run benchmarks
|
||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
|
||||
|
||||
2
.github/workflows/benchmarks-manual.yml
vendored
2
.github/workflows/benchmarks-manual.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/benchmarks-pr.yml
vendored
2
.github/workflows/benchmarks-pr.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
4
.github/workflows/flaky-tests.yml
vendored
4
.github/workflows/flaky-tests.yml
vendored
@@ -3,7 +3,7 @@ name: Look for flaky tests
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 4 * * *' # Every day at 4:00AM
|
||||
- cron: "0 4 * * *" # Every day at 4:00AM
|
||||
|
||||
jobs:
|
||||
flaky:
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
- name: Install cargo-flaky
|
||||
run: cargo install cargo-flaky
|
||||
- name: Run cargo flaky in the dumps
|
||||
|
||||
2
.github/workflows/fuzzer-indexing.yml
vendored
2
.github/workflows/fuzzer-indexing.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
|
||||
# Run benchmarks
|
||||
- name: Run the fuzzer
|
||||
|
||||
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
sudo rm -rf "/usr/share/dotnet" || true
|
||||
sudo rm -rf "/usr/local/lib/android" || true
|
||||
sudo rm -rf "/usr/local/share/boost" || true
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
2
.github/workflows/publish-release-assets.yml
vendored
2
.github/workflows/publish-release-assets.yml
vendored
@@ -76,7 +76,7 @@ jobs:
|
||||
needs: check-version
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
- name: Build
|
||||
run: cargo build --release --locked ${{ matrix.feature-flag }} ${{ matrix.extra-args }}
|
||||
# No need to upload binaries for dry run (cron or workflow_dispatch)
|
||||
|
||||
12
.github/workflows/sdks-tests.yml
vendored
12
.github/workflows/sdks-tests.yml
vendored
@@ -25,14 +25,18 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Define the Docker image we need to use
|
||||
id: define-image
|
||||
env:
|
||||
EVENT_NAME: ${{ github.event_name }}
|
||||
DOCKER_IMAGE_INPUT: ${{ github.event.inputs.docker_image }}
|
||||
run: |
|
||||
event=${{ github.event_name }}
|
||||
echo "docker-image=nightly" >> $GITHUB_OUTPUT
|
||||
if [[ $event == 'workflow_dispatch' ]]; then
|
||||
echo "docker-image=${{ github.event.inputs.docker_image }}" >> $GITHUB_OUTPUT
|
||||
if [[ "$EVENT_NAME" == 'workflow_dispatch' ]]; then
|
||||
echo "docker-image=$DOCKER_IMAGE_INPUT" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Docker image is ${{ steps.define-image.outputs.docker-image }}
|
||||
run: echo "Docker image is ${{ steps.define-image.outputs.docker-image }}"
|
||||
env:
|
||||
DOCKER_IMAGE: ${{ steps.define-image.outputs.docker-image }}
|
||||
run: echo "Docker image is $DOCKER_IMAGE"
|
||||
|
||||
##########
|
||||
## SDKs ##
|
||||
|
||||
16
.github/workflows/test-suite.yml
vendored
16
.github/workflows/test-suite.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
- name: check free space after
|
||||
run: df -h
|
||||
- name: Setup test with Rust stable
|
||||
uses: dtolnay/rust-toolchain@1.89
|
||||
uses: dtolnay/rust-toolchain@1.91.1
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.8.0
|
||||
with:
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.8.0
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
- name: Run cargo build without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
sudo rm -rf "/usr/share/dotnet" || true
|
||||
sudo rm -rf "/usr/local/lib/android" || true
|
||||
sudo rm -rf "/usr/local/share/boost" || true
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
- name: Run cargo build with almost all features
|
||||
run: |
|
||||
cargo build --workspace --locked --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
||||
@@ -145,7 +145,7 @@ jobs:
|
||||
sudo rm -rf "/usr/share/dotnet" || true
|
||||
sudo rm -rf "/usr/local/lib/android" || true
|
||||
sudo rm -rf "/usr/local/share/boost" || true
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
- name: Run cargo tree without default features and check lindera is not present
|
||||
run: |
|
||||
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
||||
@@ -167,7 +167,7 @@ jobs:
|
||||
sudo rm -rf "/usr/share/dotnet" || true
|
||||
sudo rm -rf "/usr/local/lib/android" || true
|
||||
sudo rm -rf "/usr/local/share/boost" || true
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.8.0
|
||||
- name: Build
|
||||
@@ -187,7 +187,7 @@ jobs:
|
||||
sudo rm -rf "/usr/share/dotnet" || true
|
||||
sudo rm -rf "/usr/local/lib/android" || true
|
||||
sudo rm -rf "/usr/local/share/boost" || true
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
with:
|
||||
components: clippy
|
||||
- name: Cache dependencies
|
||||
@@ -209,7 +209,7 @@ jobs:
|
||||
sudo rm -rf "/usr/share/dotnet" || true
|
||||
sudo rm -rf "/usr/local/lib/android" || true
|
||||
sudo rm -rf "/usr/local/share/boost" || true
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: Cache dependencies
|
||||
@@ -235,7 +235,7 @@ jobs:
|
||||
sudo rm -rf "/usr/share/dotnet" || true
|
||||
sudo rm -rf "/usr/local/lib/android" || true
|
||||
sudo rm -rf "/usr/local/share/boost" || true
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.8.0
|
||||
- name: Run declarative tests
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
sudo rm -rf "/usr/share/dotnet" || true
|
||||
sudo rm -rf "/usr/local/lib/android" || true
|
||||
sudo rm -rf "/usr/local/share/boost" || true
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.91.1
|
||||
- name: Install sd
|
||||
run: cargo install sd
|
||||
- name: Update Cargo.toml file
|
||||
|
||||
@@ -317,6 +317,7 @@ pub(crate) mod test {
|
||||
FilterableAttributesRule::Field(S("race")),
|
||||
FilterableAttributesRule::Field(S("age")),
|
||||
]),
|
||||
foreign_keys: Setting::NotSet,
|
||||
sortable_attributes: Setting::Set(btreeset! { S("age") }),
|
||||
ranking_rules: Setting::NotSet,
|
||||
stop_words: Setting::NotSet,
|
||||
|
||||
@@ -349,6 +349,7 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
||||
v5::settings::Setting::Reset => v6::Setting::Reset,
|
||||
v5::settings::Setting::NotSet => v6::Setting::NotSet,
|
||||
},
|
||||
foreign_keys: v6::Setting::NotSet,
|
||||
sortable_attributes: settings.sortable_attributes.into(),
|
||||
ranking_rules: {
|
||||
match settings.ranking_rules {
|
||||
|
||||
@@ -107,19 +107,14 @@ impl Settings<Unchecked> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Default, Debug, Clone, PartialEq)]
|
||||
pub enum Setting<T> {
|
||||
Set(T),
|
||||
Reset,
|
||||
#[default]
|
||||
NotSet,
|
||||
}
|
||||
|
||||
impl<T> Default for Setting<T> {
|
||||
fn default() -> Self {
|
||||
Self::NotSet
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Setting<T> {
|
||||
pub const fn is_not_set(&self) -> bool {
|
||||
matches!(self, Self::NotSet)
|
||||
|
||||
@@ -161,19 +161,14 @@ pub struct Facets {
|
||||
pub min_level_size: Option<NonZeroUsize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Setting<T> {
|
||||
Set(T),
|
||||
Reset,
|
||||
#[default]
|
||||
NotSet,
|
||||
}
|
||||
|
||||
impl<T> Default for Setting<T> {
|
||||
fn default() -> Self {
|
||||
Self::NotSet
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Setting<T> {
|
||||
pub fn map<U, F>(self, f: F) -> Setting<U>
|
||||
where
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::marker::PhantomData;
|
||||
use std::str::FromStr;
|
||||
|
||||
use serde::de::Visitor;
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use serde::Deserialize;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::settings::{Settings, Unchecked};
|
||||
@@ -82,59 +80,3 @@ impl Display for IndexUidFormatError {
|
||||
}
|
||||
|
||||
impl std::error::Error for IndexUidFormatError {}
|
||||
|
||||
/// A type that tries to match either a star (*) or
|
||||
/// any other thing that implements `FromStr`.
|
||||
#[derive(Debug)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub enum StarOr<T> {
|
||||
Star,
|
||||
Other(T),
|
||||
}
|
||||
|
||||
impl<'de, T, E> Deserialize<'de> for StarOr<T>
|
||||
where
|
||||
T: FromStr<Err = E>,
|
||||
E: Display,
|
||||
{
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
/// Serde can't differentiate between `StarOr::Star` and `StarOr::Other` without a tag.
|
||||
/// Simply using `#[serde(untagged)]` + `#[serde(rename="*")]` will lead to attempting to
|
||||
/// deserialize everything as a `StarOr::Other`, including "*".
|
||||
/// [`#[serde(other)]`](https://serde.rs/variant-attrs.html#other) might have helped but is
|
||||
/// not supported on untagged enums.
|
||||
struct StarOrVisitor<T>(PhantomData<T>);
|
||||
|
||||
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
|
||||
where
|
||||
T: FromStr<Err = FE>,
|
||||
FE: Display,
|
||||
{
|
||||
type Value = StarOr<T>;
|
||||
|
||||
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a string")
|
||||
}
|
||||
|
||||
fn visit_str<SE>(self, v: &str) -> Result<Self::Value, SE>
|
||||
where
|
||||
SE: serde::de::Error,
|
||||
{
|
||||
match v {
|
||||
"*" => Ok(StarOr::Star),
|
||||
v => {
|
||||
let other = FromStr::from_str(v).map_err(|e: T::Err| {
|
||||
SE::custom(format!("Invalid `other` value: {}", e))
|
||||
})?;
|
||||
Ok(StarOr::Other(other))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_str(StarOrVisitor(PhantomData))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,19 +192,14 @@ pub struct Facets {
|
||||
pub min_level_size: Option<NonZeroUsize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq, Copy)]
|
||||
pub enum Setting<T> {
|
||||
Set(T),
|
||||
Reset,
|
||||
#[default]
|
||||
NotSet,
|
||||
}
|
||||
|
||||
impl<T> Default for Setting<T> {
|
||||
fn default() -> Self {
|
||||
Self::NotSet
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Setting<T> {
|
||||
pub fn set(self) -> Option<T> {
|
||||
match self {
|
||||
|
||||
@@ -47,20 +47,15 @@ pub struct Settings<T> {
|
||||
pub _kind: PhantomData<T>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq, Copy)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub enum Setting<T> {
|
||||
Set(T),
|
||||
Reset,
|
||||
#[default]
|
||||
NotSet,
|
||||
}
|
||||
|
||||
impl<T> Default for Setting<T> {
|
||||
fn default() -> Self {
|
||||
Self::NotSet
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Setting<T> {
|
||||
pub fn set(self) -> Option<T> {
|
||||
match self {
|
||||
|
||||
@@ -322,7 +322,7 @@ impl From<Task> for TaskView {
|
||||
_ => None,
|
||||
});
|
||||
|
||||
let duration = finished_at.zip(started_at).map(|(tf, ts)| (tf - ts));
|
||||
let duration = finished_at.zip(started_at).map(|(tf, ts)| tf - ts);
|
||||
|
||||
Self {
|
||||
uid: id,
|
||||
|
||||
@@ -171,6 +171,19 @@ impl RoFeatures {
|
||||
.into())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_foreign_keys_setting(&self, disabled_action: &'static str) -> Result<()> {
|
||||
if self.runtime.foreign_keys {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(FeatureNotEnabledError {
|
||||
disabled_action,
|
||||
feature: "foreign_keys",
|
||||
issue_link: "https://github.com/orgs/meilisearch/discussions/873",
|
||||
}
|
||||
.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FeatureData {
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, foreign_keys: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, foreign_keys: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, foreign_keys: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, foreign_keys: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
|
||||
@@ -327,6 +327,7 @@ InvalidSettingsFacetSearch , InvalidRequest , BAD_REQU
|
||||
InvalidSettingsPrefixSearch , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsForeignKeys , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsSearchCutoffMs , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsEmbedders , InvalidRequest , BAD_REQUEST ;
|
||||
|
||||
@@ -22,6 +22,7 @@ pub struct RuntimeTogglableFeatures {
|
||||
pub chat_completions: bool,
|
||||
pub multimodal: bool,
|
||||
pub vector_store_setting: bool,
|
||||
pub foreign_keys: bool,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, Copy)]
|
||||
|
||||
@@ -15,7 +15,10 @@ pub use milli::update::ChatSettings;
|
||||
use milli::update::Setting;
|
||||
use milli::vector::db::IndexEmbeddingConfig;
|
||||
use milli::vector::VectorStoreBackend;
|
||||
use milli::{Criterion, CriterionError, FilterableAttributesRule, Index, DEFAULT_VALUES_PER_FACET};
|
||||
use milli::{
|
||||
Criterion, CriterionError, FilterableAttributesRule, ForeignKey, Index,
|
||||
DEFAULT_VALUES_PER_FACET,
|
||||
};
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use utoipa::ToSchema;
|
||||
|
||||
@@ -221,6 +224,12 @@ pub struct Settings<T> {
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["release_date"]))]
|
||||
pub sortable_attributes: Setting<BTreeSet<String>>,
|
||||
|
||||
/// Foreign keys to use for cross-index filtering search.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsForeignKeys>)]
|
||||
#[schema(value_type = Option<Vec<ForeignKey>>, example = json!([{"foreignIndexUid": "products", "fieldName": "productId"}]))]
|
||||
pub foreign_keys: Setting<Vec<ForeignKey>>,
|
||||
|
||||
/// List of ranking rules sorted by order of importance. The order is customizable.
|
||||
/// [A list of ordered built-in ranking rules](https://www.meilisearch.com/docs/learn/relevancy/relevancy).
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
@@ -376,6 +385,7 @@ impl Settings<Checked> {
|
||||
displayed_attributes: Setting::Reset.into(),
|
||||
searchable_attributes: Setting::Reset.into(),
|
||||
filterable_attributes: Setting::Reset,
|
||||
foreign_keys: Setting::Reset,
|
||||
sortable_attributes: Setting::Reset,
|
||||
ranking_rules: Setting::Reset,
|
||||
stop_words: Setting::Reset,
|
||||
@@ -404,6 +414,7 @@ impl Settings<Checked> {
|
||||
displayed_attributes,
|
||||
searchable_attributes,
|
||||
filterable_attributes,
|
||||
foreign_keys,
|
||||
sortable_attributes,
|
||||
ranking_rules,
|
||||
stop_words,
|
||||
@@ -431,6 +442,7 @@ impl Settings<Checked> {
|
||||
searchable_attributes,
|
||||
filterable_attributes,
|
||||
sortable_attributes,
|
||||
foreign_keys,
|
||||
ranking_rules,
|
||||
stop_words,
|
||||
non_separator_tokens,
|
||||
@@ -482,6 +494,7 @@ impl Settings<Unchecked> {
|
||||
displayed_attributes: displayed_attributes.into(),
|
||||
searchable_attributes: searchable_attributes.into(),
|
||||
filterable_attributes: self.filterable_attributes,
|
||||
foreign_keys: self.foreign_keys,
|
||||
sortable_attributes: self.sortable_attributes,
|
||||
ranking_rules: self.ranking_rules,
|
||||
stop_words: self.stop_words,
|
||||
@@ -543,6 +556,7 @@ impl Settings<Unchecked> {
|
||||
.sortable_attributes
|
||||
.clone()
|
||||
.or(self.sortable_attributes.clone()),
|
||||
foreign_keys: other.foreign_keys.clone().or(self.foreign_keys.clone()),
|
||||
ranking_rules: other.ranking_rules.clone().or(self.ranking_rules.clone()),
|
||||
stop_words: other.stop_words.clone().or(self.stop_words.clone()),
|
||||
non_separator_tokens: other
|
||||
@@ -604,6 +618,7 @@ pub fn apply_settings_to_builder(
|
||||
searchable_attributes,
|
||||
filterable_attributes,
|
||||
sortable_attributes,
|
||||
foreign_keys,
|
||||
ranking_rules,
|
||||
stop_words,
|
||||
non_separator_tokens,
|
||||
@@ -651,6 +666,12 @@ pub fn apply_settings_to_builder(
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match foreign_keys {
|
||||
Setting::Set(ref keys) => builder.set_foreign_keys(keys.clone().into_iter().collect()),
|
||||
Setting::Reset => builder.reset_foreign_keys(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match ranking_rules {
|
||||
Setting::Set(ref criteria) => {
|
||||
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
|
||||
@@ -868,6 +889,8 @@ pub fn settings(
|
||||
|
||||
let sortable_attributes = index.sortable_fields(rtxn)?.into_iter().collect();
|
||||
|
||||
let foreign_keys = index.foreign_keys(rtxn)?.into_iter().collect();
|
||||
|
||||
let criteria = index.criteria(rtxn)?;
|
||||
|
||||
let stop_words = index
|
||||
@@ -965,6 +988,7 @@ pub fn settings(
|
||||
.into(),
|
||||
filterable_attributes: Setting::Set(filterable_attributes),
|
||||
sortable_attributes: Setting::Set(sortable_attributes),
|
||||
foreign_keys: Setting::Set(foreign_keys),
|
||||
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
|
||||
stop_words: Setting::Set(stop_words),
|
||||
non_separator_tokens: Setting::Set(non_separator_tokens),
|
||||
@@ -1207,6 +1231,7 @@ pub(crate) mod test {
|
||||
searchable_attributes: Setting::Set(vec![String::from("hello")]).into(),
|
||||
filterable_attributes: Setting::NotSet,
|
||||
sortable_attributes: Setting::NotSet,
|
||||
foreign_keys: Setting::NotSet,
|
||||
ranking_rules: Setting::NotSet,
|
||||
stop_words: Setting::NotSet,
|
||||
non_separator_tokens: Setting::NotSet,
|
||||
@@ -1240,6 +1265,7 @@ pub(crate) mod test {
|
||||
.into(),
|
||||
filterable_attributes: Setting::NotSet,
|
||||
sortable_attributes: Setting::NotSet,
|
||||
foreign_keys: Setting::NotSet,
|
||||
ranking_rules: Setting::NotSet,
|
||||
stop_words: Setting::NotSet,
|
||||
non_separator_tokens: Setting::NotSet,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::any::TypeId;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
@@ -208,6 +208,7 @@ struct Infos {
|
||||
experimental_no_edition_2024_for_prefix_post_processing: bool,
|
||||
experimental_no_edition_2024_for_facet_post_processing: bool,
|
||||
experimental_vector_store_setting: bool,
|
||||
experimental_foreign_keys: bool,
|
||||
experimental_personalization: bool,
|
||||
gpu_enabled: bool,
|
||||
db_path: bool,
|
||||
@@ -317,6 +318,7 @@ impl Infos {
|
||||
chat_completions,
|
||||
multimodal,
|
||||
vector_store_setting,
|
||||
foreign_keys,
|
||||
} = features;
|
||||
|
||||
// We're going to override every sensible information.
|
||||
@@ -343,15 +345,16 @@ impl Infos {
|
||||
experimental_no_snapshot_compaction,
|
||||
experimental_no_edition_2024_for_dumps,
|
||||
experimental_vector_store_setting: vector_store_setting,
|
||||
experimental_foreign_keys: foreign_keys,
|
||||
gpu_enabled: meilisearch_types::milli::vector::is_cuda_enabled(),
|
||||
db_path: db_path != PathBuf::from("./data.ms"),
|
||||
db_path: db_path != Path::new("./data.ms"),
|
||||
import_dump: import_dump.is_some(),
|
||||
dump_dir: dump_dir != PathBuf::from("dumps/"),
|
||||
dump_dir: dump_dir != Path::new("dumps/"),
|
||||
ignore_missing_dump,
|
||||
ignore_dump_if_db_exists,
|
||||
import_snapshot: import_snapshot.is_some(),
|
||||
schedule_snapshot,
|
||||
snapshot_dir: snapshot_dir != PathBuf::from("snapshots/"),
|
||||
snapshot_dir: snapshot_dir != Path::new("snapshots/"),
|
||||
uses_s3_snapshots: s3_snapshot_options.is_some(),
|
||||
ignore_missing_snapshot,
|
||||
ignore_snapshot_if_db_exists,
|
||||
|
||||
@@ -56,6 +56,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
chat_completions: Some(false),
|
||||
multimodal: Some(false),
|
||||
vector_store_setting: Some(false),
|
||||
foreign_keys: Some(false),
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
||||
{
|
||||
@@ -106,6 +107,8 @@ pub struct RuntimeTogglableFeatures {
|
||||
pub multimodal: Option<bool>,
|
||||
#[deserr(default)]
|
||||
pub vector_store_setting: Option<bool>,
|
||||
#[deserr(default)]
|
||||
pub foreign_keys: Option<bool>,
|
||||
}
|
||||
|
||||
impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogglableFeatures {
|
||||
@@ -121,6 +124,7 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
|
||||
chat_completions,
|
||||
multimodal,
|
||||
vector_store_setting,
|
||||
foreign_keys,
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
@@ -134,6 +138,7 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
|
||||
chat_completions: Some(chat_completions),
|
||||
multimodal: Some(multimodal),
|
||||
vector_store_setting: Some(vector_store_setting),
|
||||
foreign_keys: Some(foreign_keys),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -150,6 +155,7 @@ pub struct PatchExperimentalFeatureAnalytics {
|
||||
chat_completions: bool,
|
||||
multimodal: bool,
|
||||
vector_store_setting: bool,
|
||||
foreign_keys: bool,
|
||||
}
|
||||
|
||||
impl Aggregate for PatchExperimentalFeatureAnalytics {
|
||||
@@ -169,6 +175,7 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
|
||||
chat_completions: new.chat_completions,
|
||||
multimodal: new.multimodal,
|
||||
vector_store_setting: new.vector_store_setting,
|
||||
foreign_keys: new.foreign_keys,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -197,6 +204,7 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
|
||||
chat_completions: Some(false),
|
||||
multimodal: Some(false),
|
||||
vector_store_setting: Some(false),
|
||||
foreign_keys: Some(false),
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
||||
{
|
||||
@@ -244,6 +252,7 @@ async fn patch_features(
|
||||
.0
|
||||
.vector_store_setting
|
||||
.unwrap_or(old_features.vector_store_setting),
|
||||
foreign_keys: new_features.0.foreign_keys.unwrap_or(old_features.foreign_keys),
|
||||
};
|
||||
|
||||
// explicitly destructure for analytics rather than using the `Serialize` implementation, because
|
||||
@@ -260,6 +269,7 @@ async fn patch_features(
|
||||
chat_completions,
|
||||
multimodal,
|
||||
vector_store_setting,
|
||||
foreign_keys,
|
||||
} = new_features;
|
||||
|
||||
analytics.publish(
|
||||
@@ -274,6 +284,7 @@ async fn patch_features(
|
||||
chat_completions,
|
||||
multimodal,
|
||||
vector_store_setting,
|
||||
foreign_keys,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
@@ -531,6 +531,17 @@ make_setting_routes!(
|
||||
camelcase_attr: "vectorStore",
|
||||
analytics: VectorStoreAnalytics
|
||||
},
|
||||
{
|
||||
route: "/foreign-keys",
|
||||
update_verb: put,
|
||||
value_type: Vec<meilisearch_types::milli::ForeignKey>,
|
||||
err_type: meilisearch_types::deserr::DeserrJsonError<
|
||||
meilisearch_types::error::deserr_codes::InvalidSettingsForeignKeys,
|
||||
>,
|
||||
attr: foreign_keys,
|
||||
camelcase_attr: "foreignKeys",
|
||||
analytics: ForeignKeysAnalytics
|
||||
},
|
||||
);
|
||||
|
||||
#[utoipa::path(
|
||||
@@ -595,6 +606,7 @@ pub async fn update_all(
|
||||
filterable_attributes: FilterableAttributesAnalytics::new(
|
||||
new_settings.filterable_attributes.as_ref().set(),
|
||||
),
|
||||
foreign_keys: ForeignKeysAnalytics::new(new_settings.foreign_keys.as_ref().set()),
|
||||
distinct_attribute: DistinctAttributeAnalytics::new(
|
||||
new_settings.distinct_attribute.as_ref().set(),
|
||||
),
|
||||
@@ -688,6 +700,10 @@ pub async fn get_all(
|
||||
new_settings.vector_store = Setting::NotSet;
|
||||
}
|
||||
|
||||
if features.check_foreign_keys_setting("showing index `foreignKeys` settings").is_err() {
|
||||
new_settings.foreign_keys = Setting::NotSet;
|
||||
}
|
||||
|
||||
debug!(returns = ?new_settings, "Get all settings");
|
||||
Ok(HttpResponse::Ok().json(new_settings))
|
||||
}
|
||||
@@ -793,5 +809,9 @@ fn validate_settings(
|
||||
features.check_vector_store_setting("setting `vectorStore` in the index settings")?;
|
||||
}
|
||||
|
||||
if let Setting::Set(_) = &settings.foreign_keys {
|
||||
features.check_foreign_keys_setting("setting `foreignKeys` in the index settings")?;
|
||||
}
|
||||
|
||||
Ok(settings.validate()?)
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use meilisearch_types::facet_values_sort::FacetValuesSort;
|
||||
use meilisearch_types::locales::{Locale, LocalizedAttributesRuleView};
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::milli::vector::VectorStoreBackend;
|
||||
use meilisearch_types::milli::FilterableAttributesRule;
|
||||
use meilisearch_types::milli::{FilterableAttributesRule, ForeignKey};
|
||||
use meilisearch_types::settings::{
|
||||
ChatSettings, FacetingSettings, PaginationSettings, PrefixSearchSettings,
|
||||
ProximityPrecisionView, RankingRuleView, SettingEmbeddingSettings, TypoSettings,
|
||||
@@ -25,6 +25,7 @@ pub struct SettingsAnalytics {
|
||||
pub displayed_attributes: DisplayedAttributesAnalytics,
|
||||
pub sortable_attributes: SortableAttributesAnalytics,
|
||||
pub filterable_attributes: FilterableAttributesAnalytics,
|
||||
pub foreign_keys: ForeignKeysAnalytics,
|
||||
pub distinct_attribute: DistinctAttributeAnalytics,
|
||||
pub proximity_precision: ProximityPrecisionAnalytics,
|
||||
pub typo_tolerance: TypoToleranceAnalytics,
|
||||
@@ -98,6 +99,10 @@ impl Aggregate for SettingsAnalytics {
|
||||
.has_patterns
|
||||
.or(self.filterable_attributes.has_patterns),
|
||||
},
|
||||
foreign_keys: ForeignKeysAnalytics {
|
||||
set: new.foreign_keys.set | self.foreign_keys.set,
|
||||
total: new.foreign_keys.total.or(self.foreign_keys.total),
|
||||
},
|
||||
distinct_attribute: DistinctAttributeAnalytics {
|
||||
set: self.distinct_attribute.set | new.distinct_attribute.set,
|
||||
},
|
||||
@@ -362,6 +367,22 @@ impl FilterableAttributesAnalytics {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct ForeignKeysAnalytics {
|
||||
pub set: bool,
|
||||
pub total: Option<usize>,
|
||||
}
|
||||
|
||||
impl ForeignKeysAnalytics {
|
||||
pub fn new(settings: Option<&Vec<ForeignKey>>) -> Self {
|
||||
Self { set: settings.is_some(), total: settings.as_ref().map(|s| s.len()) }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { foreign_keys: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct DistinctAttributeAnalytics {
|
||||
pub set: bool,
|
||||
|
||||
@@ -789,11 +789,12 @@ impl TryFrom<Value> for ExternalDocumentId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserr, ToSchema, Serialize)]
|
||||
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Deserr, ToSchema, Serialize)]
|
||||
#[deserr(rename_all = camelCase)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum MatchingStrategy {
|
||||
/// Remove query words from last to first
|
||||
#[default]
|
||||
Last,
|
||||
/// All query words are mandatory
|
||||
All,
|
||||
@@ -801,12 +802,6 @@ pub enum MatchingStrategy {
|
||||
Frequency,
|
||||
}
|
||||
|
||||
impl Default for MatchingStrategy {
|
||||
fn default() -> Self {
|
||||
Self::Last
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MatchingStrategy> for TermsMatchingStrategy {
|
||||
fn from(other: MatchingStrategy) -> Self {
|
||||
match other {
|
||||
|
||||
@@ -187,7 +187,7 @@ macro_rules! compute_forbidden_search {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_authorized_simple_token() {
|
||||
let tenant_tokens = vec![
|
||||
let tenant_tokens = [
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
@@ -239,7 +239,7 @@ async fn search_authorized_simple_token() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_authorized_filter_token() {
|
||||
let tenant_tokens = vec![
|
||||
let tenant_tokens = [
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
@@ -292,7 +292,7 @@ async fn search_authorized_filter_token() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_search_authorized_filter_token() {
|
||||
let tenant_tokens = vec![
|
||||
let tenant_tokens = [
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
@@ -353,7 +353,7 @@ async fn filter_search_authorized_filter_token() {
|
||||
/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above.
|
||||
#[actix_rt::test]
|
||||
async fn error_search_token_forbidden_parent_key() {
|
||||
let tenant_tokens = vec![
|
||||
let tenant_tokens = [
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
@@ -389,7 +389,7 @@ async fn error_search_token_forbidden_parent_key() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_search_forbidden_token() {
|
||||
let tenant_tokens = vec![
|
||||
let tenant_tokens = [
|
||||
// bad index
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": {}}),
|
||||
|
||||
@@ -680,7 +680,7 @@ async fn multi_search_authorized_simple_token() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn single_search_authorized_filter_token() {
|
||||
let tenant_tokens = vec![
|
||||
let tenant_tokens = [
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
@@ -733,7 +733,7 @@ async fn single_search_authorized_filter_token() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn multi_search_authorized_filter_token() {
|
||||
let both_tenant_tokens = vec![
|
||||
let both_tenant_tokens = [
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": "color = blue"}, "products": {"filter": "doggos.age <= 5"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
@@ -842,7 +842,7 @@ async fn filter_single_search_authorized_filter_token() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_multi_search_authorized_filter_token() {
|
||||
let tenant_tokens = vec![
|
||||
let tenant_tokens = [
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": "color = blue"}, "products": {"filter": "doggos.age <= 5"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
@@ -900,7 +900,7 @@ async fn filter_multi_search_authorized_filter_token() {
|
||||
/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above.
|
||||
#[actix_rt::test]
|
||||
async fn error_single_search_token_forbidden_parent_key() {
|
||||
let tenant_tokens = vec![
|
||||
let tenant_tokens = [
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
@@ -941,7 +941,7 @@ async fn error_single_search_token_forbidden_parent_key() {
|
||||
/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above.
|
||||
#[actix_rt::test]
|
||||
async fn error_multi_search_token_forbidden_parent_key() {
|
||||
let tenant_tokens = vec![
|
||||
let tenant_tokens = [
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
|
||||
@@ -237,6 +237,7 @@ async fn import_dump_v1_movie_with_settings() {
|
||||
"sortableAttributes": [
|
||||
"genres"
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
@@ -411,6 +412,7 @@ async fn import_dump_v1_rubygems_with_settings() {
|
||||
"sortableAttributes": [
|
||||
"version"
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
@@ -740,6 +742,7 @@ async fn import_dump_v2_movie_with_settings() {
|
||||
"genres"
|
||||
],
|
||||
"sortableAttributes": [],
|
||||
"foreignKeys": [],
|
||||
"rankingRules": [
|
||||
"words",
|
||||
"typo",
|
||||
@@ -911,6 +914,7 @@ async fn import_dump_v2_rubygems_with_settings() {
|
||||
"version"
|
||||
],
|
||||
"sortableAttributes": [],
|
||||
"foreignKeys": [],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
@@ -1240,6 +1244,7 @@ async fn import_dump_v3_movie_with_settings() {
|
||||
"genres"
|
||||
],
|
||||
"sortableAttributes": [],
|
||||
"foreignKeys": [],
|
||||
"rankingRules": [
|
||||
"words",
|
||||
"typo",
|
||||
@@ -1411,6 +1416,7 @@ async fn import_dump_v3_rubygems_with_settings() {
|
||||
"version"
|
||||
],
|
||||
"sortableAttributes": [],
|
||||
"foreignKeys": [],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
@@ -1740,6 +1746,7 @@ async fn import_dump_v4_movie_with_settings() {
|
||||
"genres"
|
||||
],
|
||||
"sortableAttributes": [],
|
||||
"foreignKeys": [],
|
||||
"rankingRules": [
|
||||
"words",
|
||||
"typo",
|
||||
@@ -1911,6 +1918,7 @@ async fn import_dump_v4_rubygems_with_settings() {
|
||||
"version"
|
||||
],
|
||||
"sortableAttributes": [],
|
||||
"foreignKeys": [],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
@@ -2190,7 +2198,8 @@ async fn import_dump_v6_containing_experimental_features() {
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false,
|
||||
"multimodal": false,
|
||||
"vectorStoreSetting": false
|
||||
"vectorStoreSetting": false,
|
||||
"foreignKeys": false
|
||||
}
|
||||
"###);
|
||||
|
||||
|
||||
@@ -27,7 +27,8 @@ async fn experimental_features() {
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false,
|
||||
"multimodal": false,
|
||||
"vectorStoreSetting": false
|
||||
"vectorStoreSetting": false,
|
||||
"foreignKeys": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -45,7 +46,8 @@ async fn experimental_features() {
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false,
|
||||
"multimodal": false,
|
||||
"vectorStoreSetting": false
|
||||
"vectorStoreSetting": false,
|
||||
"foreignKeys": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -63,7 +65,8 @@ async fn experimental_features() {
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false,
|
||||
"multimodal": false,
|
||||
"vectorStoreSetting": false
|
||||
"vectorStoreSetting": false,
|
||||
"foreignKeys": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -82,7 +85,8 @@ async fn experimental_features() {
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false,
|
||||
"multimodal": false,
|
||||
"vectorStoreSetting": false
|
||||
"vectorStoreSetting": false,
|
||||
"foreignKeys": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -101,7 +105,8 @@ async fn experimental_features() {
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false,
|
||||
"multimodal": false,
|
||||
"vectorStoreSetting": false
|
||||
"vectorStoreSetting": false,
|
||||
"foreignKeys": false
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -127,7 +132,8 @@ async fn experimental_feature_metrics() {
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false,
|
||||
"multimodal": false,
|
||||
"vectorStoreSetting": false
|
||||
"vectorStoreSetting": false,
|
||||
"foreignKeys": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -174,7 +180,7 @@ async fn errors() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`, `chatCompletions`, `multimodal`, `vectorStoreSetting`",
|
||||
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`, `chatCompletions`, `multimodal`, `vectorStoreSetting`, `foreignKeys`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
|
||||
@@ -318,6 +318,7 @@ async fn secrets_are_hidden_in_settings() {
|
||||
],
|
||||
"filterableAttributes": [],
|
||||
"sortableAttributes": [],
|
||||
"foreignKeys": [],
|
||||
"rankingRules": [
|
||||
"words",
|
||||
"typo",
|
||||
|
||||
@@ -53,6 +53,7 @@ pub mod main_key {
|
||||
pub const HIDDEN_FACETED_FIELDS_KEY: &str = "hidden-faceted-fields";
|
||||
pub const FILTERABLE_FIELDS_KEY: &str = "filterable-fields";
|
||||
pub const SORTABLE_FIELDS_KEY: &str = "sortable-fields";
|
||||
pub const FOREIGN_KEYS_KEY: &str = "foreign-keys";
|
||||
pub const FIELD_DISTRIBUTION_KEY: &str = "fields-distribution";
|
||||
pub const FIELDS_IDS_MAP_KEY: &str = "fields-ids-map";
|
||||
pub const FIELDIDS_WEIGHTS_MAP_KEY: &str = "fieldids-weights-map";
|
||||
|
||||
@@ -19,6 +19,7 @@ mod external_documents_ids;
|
||||
pub mod facet;
|
||||
mod fields_ids_map;
|
||||
mod filterable_attributes_rules;
|
||||
mod foreign_key;
|
||||
pub mod heed_codec;
|
||||
pub mod index;
|
||||
mod localized_attributes_rules;
|
||||
@@ -71,6 +72,7 @@ pub use self::filterable_attributes_rules::{
|
||||
FilterFeatures, FilterableAttributesFeatures, FilterableAttributesPatterns,
|
||||
FilterableAttributesRule,
|
||||
};
|
||||
pub use self::foreign_key::ForeignKey;
|
||||
pub use self::heed_codec::{
|
||||
BEU16StrCodec, BEU32StrCodec, BoRoaringBitmapCodec, BoRoaringBitmapLenCodec,
|
||||
CboRoaringBitmapCodec, CboRoaringBitmapLenCodec, FieldIdWordCountCodec, ObkvCodec,
|
||||
|
||||
@@ -385,9 +385,10 @@ pub struct SearchResult {
|
||||
pub query_vector: Option<Embedding>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum TermsMatchingStrategy {
|
||||
// remove last word first
|
||||
#[default]
|
||||
Last,
|
||||
// all words are mandatory
|
||||
All,
|
||||
@@ -395,12 +396,6 @@ pub enum TermsMatchingStrategy {
|
||||
Frequency,
|
||||
}
|
||||
|
||||
impl Default for TermsMatchingStrategy {
|
||||
fn default() -> Self {
|
||||
Self::Last
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MatchingStrategy> for TermsMatchingStrategy {
|
||||
fn from(other: MatchingStrategy) -> Self {
|
||||
match other {
|
||||
|
||||
@@ -124,7 +124,7 @@ impl GrenadParameters {
|
||||
/// This should be called inside of a rayon thread pool,
|
||||
/// otherwise, it will take the global number of threads.
|
||||
pub fn max_memory_by_thread(&self) -> Option<usize> {
|
||||
self.max_memory.map(|max_memory| (max_memory / rayon::current_num_threads()))
|
||||
self.max_memory.map(|max_memory| max_memory / rayon::current_num_threads())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -54,11 +54,12 @@ pub struct DocumentAdditionResult {
|
||||
pub number_of_documents: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub enum IndexDocumentsMethod {
|
||||
/// Replace the previous document with the new one,
|
||||
/// removing all the already known attributes.
|
||||
#[default]
|
||||
ReplaceDocuments,
|
||||
|
||||
/// Merge the previous version of the document with the new version,
|
||||
@@ -66,12 +67,6 @@ pub enum IndexDocumentsMethod {
|
||||
UpdateDocuments,
|
||||
}
|
||||
|
||||
impl Default for IndexDocumentsMethod {
|
||||
fn default() -> Self {
|
||||
Self::ReplaceDocuments
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IndexDocuments<'t, 'i, 'a, FP, FA> {
|
||||
wtxn: &'t mut heed::RwTxn<'i>,
|
||||
index: &'i Index,
|
||||
|
||||
@@ -45,13 +45,15 @@ use crate::vector::{
|
||||
VectorStoreBackend,
|
||||
};
|
||||
use crate::{
|
||||
ChannelCongestion, FieldId, FilterableAttributesRule, Index, LocalizedAttributesRule, Result,
|
||||
ChannelCongestion, FieldId, FilterableAttributesRule, ForeignKey, Index,
|
||||
LocalizedAttributesRule, Result,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq, Copy)]
|
||||
pub enum Setting<T> {
|
||||
Set(T),
|
||||
Reset,
|
||||
#[default]
|
||||
NotSet,
|
||||
}
|
||||
|
||||
@@ -71,12 +73,6 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Default for Setting<T> {
|
||||
fn default() -> Self {
|
||||
Self::NotSet
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Setting<T> {
|
||||
pub fn set(self) -> Option<T> {
|
||||
match self {
|
||||
@@ -176,6 +172,7 @@ pub struct Settings<'a, 't, 'i> {
|
||||
displayed_fields: Setting<Vec<String>>,
|
||||
filterable_fields: Setting<Vec<FilterableAttributesRule>>,
|
||||
sortable_fields: Setting<HashSet<String>>,
|
||||
foreign_keys: Setting<Vec<ForeignKey>>,
|
||||
criteria: Setting<Vec<Criterion>>,
|
||||
stop_words: Setting<BTreeSet<String>>,
|
||||
non_separator_tokens: Setting<BTreeSet<String>>,
|
||||
@@ -217,6 +214,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
displayed_fields: Setting::NotSet,
|
||||
filterable_fields: Setting::NotSet,
|
||||
sortable_fields: Setting::NotSet,
|
||||
foreign_keys: Setting::NotSet,
|
||||
criteria: Setting::NotSet,
|
||||
stop_words: Setting::NotSet,
|
||||
non_separator_tokens: Setting::NotSet,
|
||||
@@ -278,6 +276,14 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
self.sortable_fields = Setting::Reset;
|
||||
}
|
||||
|
||||
pub fn set_foreign_keys(&mut self, keys: Vec<ForeignKey>) {
|
||||
self.foreign_keys = Setting::Set(keys);
|
||||
}
|
||||
|
||||
pub fn reset_foreign_keys(&mut self) {
|
||||
self.foreign_keys = Setting::Reset;
|
||||
}
|
||||
|
||||
pub fn reset_criteria(&mut self) {
|
||||
self.criteria = Setting::Reset;
|
||||
}
|
||||
@@ -822,6 +828,19 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_foreign_keys(&mut self) -> Result<()> {
|
||||
match self.foreign_keys {
|
||||
Setting::Set(ref keys) => {
|
||||
self.index.put_foreign_keys(self.wtxn, keys)?;
|
||||
}
|
||||
Setting::Reset => {
|
||||
self.index.delete_foreign_keys(self.wtxn)?;
|
||||
}
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_criteria(&mut self) -> Result<()> {
|
||||
match &self.criteria {
|
||||
Setting::Set(criteria) => {
|
||||
@@ -1455,6 +1474,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
self.update_sort_facet_values_by()?;
|
||||
self.update_pagination_max_total_hits()?;
|
||||
self.update_search_cutoff()?;
|
||||
self.update_foreign_keys()?;
|
||||
|
||||
// could trigger re-indexing
|
||||
self.update_filterable()?;
|
||||
@@ -1593,6 +1613,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
displayed_fields: Setting::NotSet,
|
||||
filterable_fields: Setting::NotSet,
|
||||
sortable_fields: Setting::NotSet,
|
||||
foreign_keys: Setting::NotSet,
|
||||
criteria: Setting::NotSet,
|
||||
stop_words: Setting::NotSet, // TODO (require force reindexing of searchables)
|
||||
non_separator_tokens: Setting::NotSet, // TODO (require force reindexing of searchables)
|
||||
|
||||
@@ -867,6 +867,7 @@ fn test_correct_settings_init() {
|
||||
displayed_fields,
|
||||
filterable_fields,
|
||||
sortable_fields,
|
||||
foreign_keys,
|
||||
criteria,
|
||||
stop_words,
|
||||
non_separator_tokens,
|
||||
@@ -897,6 +898,7 @@ fn test_correct_settings_init() {
|
||||
assert!(matches!(displayed_fields, Setting::NotSet));
|
||||
assert!(matches!(filterable_fields, Setting::NotSet));
|
||||
assert!(matches!(sortable_fields, Setting::NotSet));
|
||||
assert!(matches!(foreign_keys, Setting::NotSet));
|
||||
assert!(matches!(criteria, Setting::NotSet));
|
||||
assert!(matches!(stop_words, Setting::NotSet));
|
||||
assert!(matches!(non_separator_tokens, Setting::NotSet));
|
||||
|
||||
@@ -67,7 +67,7 @@ impl<F> Embeddings<F> {
|
||||
///
|
||||
/// If `embeddings.len() % self.dimension != 0`, then the append operation fails.
|
||||
pub fn append(&mut self, mut embeddings: Vec<F>) -> Result<(), Vec<F>> {
|
||||
if embeddings.len() % self.dimension != 0 {
|
||||
if !embeddings.len().is_multiple_of(self.dimension) {
|
||||
return Err(embeddings);
|
||||
}
|
||||
self.data.append(&mut embeddings);
|
||||
|
||||
@@ -178,6 +178,7 @@ pub fn get_arch() -> anyhow::Result<&'static str> {
|
||||
#[cfg(not(all(target_os = "linux", target_arch = "aarch64")))]
|
||||
#[cfg(not(all(target_os = "linux", target_arch = "x86_64")))]
|
||||
#[cfg(not(all(target_os = "macos", target_arch = "aarch64")))]
|
||||
#[cfg(not(all(target_os = "macos", target_arch = "x86_64")))]
|
||||
anyhow::bail!("unsupported platform")
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
[toolchain]
|
||||
channel = "1.89.0"
|
||||
channel = "1.91.1"
|
||||
components = ["clippy"]
|
||||
|
||||
Reference in New Issue
Block a user