mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-10 22:55:43 +00:00
Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
26e368b116 | ||
|
|
ba95ac0915 | ||
|
|
75fcbfc2fe | ||
|
|
8c19b6d55e | ||
|
|
08d0f05ece | ||
|
|
4762e9afa0 | ||
|
|
12fcab91c5 | ||
|
|
792a72a23f |
2
.github/workflows/bench-manual.yml
vendored
2
.github/workflows/bench-manual.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
|||||||
timeout-minutes: 180 # 3h
|
timeout-minutes: 180 # 3h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/bench-pr.yml
vendored
2
.github/workflows/bench-pr.yml
vendored
@@ -66,7 +66,7 @@ jobs:
|
|||||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
|
|
||||||
- name: Run benchmarks on PR ${{ github.event.issue.id }}
|
- name: Run benchmarks on PR ${{ github.event.issue.id }}
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
2
.github/workflows/bench-push-indexing.yml
vendored
2
.github/workflows/bench-push-indexing.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
|||||||
timeout-minutes: 180 # 3h
|
timeout-minutes: 180 # 3h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
|
|
||||||
# Run benchmarks
|
# Run benchmarks
|
||||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
|
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
|
||||||
|
|||||||
2
.github/workflows/benchmarks-manual.yml
vendored
2
.github/workflows/benchmarks-manual.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
|||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/benchmarks-pr.yml
vendored
2
.github/workflows/benchmarks-pr.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ jobs:
|
|||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/flaky-tests.yml
vendored
4
.github/workflows/flaky-tests.yml
vendored
@@ -3,7 +3,7 @@ name: Look for flaky tests
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 4 * * *' # Every day at 4:00AM
|
- cron: "0 4 * * *" # Every day at 4:00AM
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
flaky:
|
flaky:
|
||||||
@@ -23,7 +23,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
- name: Install cargo-flaky
|
- name: Install cargo-flaky
|
||||||
run: cargo install cargo-flaky
|
run: cargo install cargo-flaky
|
||||||
- name: Run cargo flaky in the dumps
|
- name: Run cargo flaky in the dumps
|
||||||
|
|||||||
2
.github/workflows/fuzzer-indexing.yml
vendored
2
.github/workflows/fuzzer-indexing.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
|||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
|
|
||||||
# Run benchmarks
|
# Run benchmarks
|
||||||
- name: Run the fuzzer
|
- name: Run the fuzzer
|
||||||
|
|||||||
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
|||||||
sudo rm -rf "/usr/share/dotnet" || true
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
sudo rm -rf "/usr/local/lib/android" || true
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
sudo rm -rf "/usr/local/share/boost" || true
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
- name: Install cargo-deb
|
- name: Install cargo-deb
|
||||||
run: cargo install cargo-deb
|
run: cargo install cargo-deb
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
|
|||||||
2
.github/workflows/publish-release-assets.yml
vendored
2
.github/workflows/publish-release-assets.yml
vendored
@@ -76,7 +76,7 @@ jobs:
|
|||||||
needs: check-version
|
needs: check-version
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --release --locked ${{ matrix.feature-flag }} ${{ matrix.extra-args }}
|
run: cargo build --release --locked ${{ matrix.feature-flag }} ${{ matrix.extra-args }}
|
||||||
# No need to upload binaries for dry run (cron or workflow_dispatch)
|
# No need to upload binaries for dry run (cron or workflow_dispatch)
|
||||||
|
|||||||
12
.github/workflows/sdks-tests.yml
vendored
12
.github/workflows/sdks-tests.yml
vendored
@@ -25,14 +25,18 @@ jobs:
|
|||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- name: Define the Docker image we need to use
|
- name: Define the Docker image we need to use
|
||||||
id: define-image
|
id: define-image
|
||||||
|
env:
|
||||||
|
EVENT_NAME: ${{ github.event_name }}
|
||||||
|
DOCKER_IMAGE_INPUT: ${{ github.event.inputs.docker_image }}
|
||||||
run: |
|
run: |
|
||||||
event=${{ github.event_name }}
|
|
||||||
echo "docker-image=nightly" >> $GITHUB_OUTPUT
|
echo "docker-image=nightly" >> $GITHUB_OUTPUT
|
||||||
if [[ $event == 'workflow_dispatch' ]]; then
|
if [[ "$EVENT_NAME" == 'workflow_dispatch' ]]; then
|
||||||
echo "docker-image=${{ github.event.inputs.docker_image }}" >> $GITHUB_OUTPUT
|
echo "docker-image=$DOCKER_IMAGE_INPUT" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
- name: Docker image is ${{ steps.define-image.outputs.docker-image }}
|
- name: Docker image is ${{ steps.define-image.outputs.docker-image }}
|
||||||
run: echo "Docker image is ${{ steps.define-image.outputs.docker-image }}"
|
env:
|
||||||
|
DOCKER_IMAGE: ${{ steps.define-image.outputs.docker-image }}
|
||||||
|
run: echo "Docker image is $DOCKER_IMAGE"
|
||||||
|
|
||||||
##########
|
##########
|
||||||
## SDKs ##
|
## SDKs ##
|
||||||
|
|||||||
16
.github/workflows/test-suite.yml
vendored
16
.github/workflows/test-suite.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
|||||||
- name: check free space after
|
- name: check free space after
|
||||||
run: df -h
|
run: df -h
|
||||||
- name: Setup test with Rust stable
|
- name: Setup test with Rust stable
|
||||||
uses: dtolnay/rust-toolchain@1.89
|
uses: dtolnay/rust-toolchain@1.91.1
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
with:
|
with:
|
||||||
@@ -63,7 +63,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
- name: Run cargo build without any default features
|
- name: Run cargo build without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -87,7 +87,7 @@ jobs:
|
|||||||
sudo rm -rf "/usr/share/dotnet" || true
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
sudo rm -rf "/usr/local/lib/android" || true
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
sudo rm -rf "/usr/local/share/boost" || true
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
- name: Run cargo build with almost all features
|
- name: Run cargo build with almost all features
|
||||||
run: |
|
run: |
|
||||||
cargo build --workspace --locked --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
cargo build --workspace --locked --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
||||||
@@ -145,7 +145,7 @@ jobs:
|
|||||||
sudo rm -rf "/usr/share/dotnet" || true
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
sudo rm -rf "/usr/local/lib/android" || true
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
sudo rm -rf "/usr/local/share/boost" || true
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
- name: Run cargo tree without default features and check lindera is not present
|
- name: Run cargo tree without default features and check lindera is not present
|
||||||
run: |
|
run: |
|
||||||
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
||||||
@@ -167,7 +167,7 @@ jobs:
|
|||||||
sudo rm -rf "/usr/share/dotnet" || true
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
sudo rm -rf "/usr/local/lib/android" || true
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
sudo rm -rf "/usr/local/share/boost" || true
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- name: Build
|
- name: Build
|
||||||
@@ -187,7 +187,7 @@ jobs:
|
|||||||
sudo rm -rf "/usr/share/dotnet" || true
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
sudo rm -rf "/usr/local/lib/android" || true
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
sudo rm -rf "/usr/local/share/boost" || true
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
with:
|
with:
|
||||||
components: clippy
|
components: clippy
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
@@ -209,7 +209,7 @@ jobs:
|
|||||||
sudo rm -rf "/usr/share/dotnet" || true
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
sudo rm -rf "/usr/local/lib/android" || true
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
sudo rm -rf "/usr/local/share/boost" || true
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
with:
|
with:
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
@@ -235,7 +235,7 @@ jobs:
|
|||||||
sudo rm -rf "/usr/share/dotnet" || true
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
sudo rm -rf "/usr/local/lib/android" || true
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
sudo rm -rf "/usr/local/share/boost" || true
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- name: Run declarative tests
|
- name: Run declarative tests
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ jobs:
|
|||||||
sudo rm -rf "/usr/share/dotnet" || true
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
sudo rm -rf "/usr/local/lib/android" || true
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
sudo rm -rf "/usr/local/share/boost" || true
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
- uses: dtolnay/rust-toolchain@1.89
|
- uses: dtolnay/rust-toolchain@1.91.1
|
||||||
- name: Install sd
|
- name: Install sd
|
||||||
run: cargo install sd
|
run: cargo install sd
|
||||||
- name: Update Cargo.toml file
|
- name: Update Cargo.toml file
|
||||||
|
|||||||
@@ -107,19 +107,14 @@ impl Settings<Unchecked> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Default, Debug, Clone, PartialEq)]
|
||||||
pub enum Setting<T> {
|
pub enum Setting<T> {
|
||||||
Set(T),
|
Set(T),
|
||||||
Reset,
|
Reset,
|
||||||
|
#[default]
|
||||||
NotSet,
|
NotSet,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Default for Setting<T> {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::NotSet
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Setting<T> {
|
impl<T> Setting<T> {
|
||||||
pub const fn is_not_set(&self) -> bool {
|
pub const fn is_not_set(&self) -> bool {
|
||||||
matches!(self, Self::NotSet)
|
matches!(self, Self::NotSet)
|
||||||
|
|||||||
@@ -161,19 +161,14 @@ pub struct Facets {
|
|||||||
pub min_level_size: Option<NonZeroUsize>,
|
pub min_level_size: Option<NonZeroUsize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum Setting<T> {
|
pub enum Setting<T> {
|
||||||
Set(T),
|
Set(T),
|
||||||
Reset,
|
Reset,
|
||||||
|
#[default]
|
||||||
NotSet,
|
NotSet,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Default for Setting<T> {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::NotSet
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Setting<T> {
|
impl<T> Setting<T> {
|
||||||
pub fn map<U, F>(self, f: F) -> Setting<U>
|
pub fn map<U, F>(self, f: F) -> Setting<U>
|
||||||
where
|
where
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
use std::fmt::{self, Display, Formatter};
|
use std::fmt::{self, Display, Formatter};
|
||||||
use std::marker::PhantomData;
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use serde::de::Visitor;
|
use serde::Deserialize;
|
||||||
use serde::{Deserialize, Deserializer};
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::settings::{Settings, Unchecked};
|
use super::settings::{Settings, Unchecked};
|
||||||
@@ -82,59 +80,3 @@ impl Display for IndexUidFormatError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl std::error::Error for IndexUidFormatError {}
|
impl std::error::Error for IndexUidFormatError {}
|
||||||
|
|
||||||
/// A type that tries to match either a star (*) or
|
|
||||||
/// any other thing that implements `FromStr`.
|
|
||||||
#[derive(Debug)]
|
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
|
||||||
pub enum StarOr<T> {
|
|
||||||
Star,
|
|
||||||
Other(T),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de, T, E> Deserialize<'de> for StarOr<T>
|
|
||||||
where
|
|
||||||
T: FromStr<Err = E>,
|
|
||||||
E: Display,
|
|
||||||
{
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: Deserializer<'de>,
|
|
||||||
{
|
|
||||||
/// Serde can't differentiate between `StarOr::Star` and `StarOr::Other` without a tag.
|
|
||||||
/// Simply using `#[serde(untagged)]` + `#[serde(rename="*")]` will lead to attempting to
|
|
||||||
/// deserialize everything as a `StarOr::Other`, including "*".
|
|
||||||
/// [`#[serde(other)]`](https://serde.rs/variant-attrs.html#other) might have helped but is
|
|
||||||
/// not supported on untagged enums.
|
|
||||||
struct StarOrVisitor<T>(PhantomData<T>);
|
|
||||||
|
|
||||||
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
|
|
||||||
where
|
|
||||||
T: FromStr<Err = FE>,
|
|
||||||
FE: Display,
|
|
||||||
{
|
|
||||||
type Value = StarOr<T>;
|
|
||||||
|
|
||||||
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
|
|
||||||
formatter.write_str("a string")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_str<SE>(self, v: &str) -> Result<Self::Value, SE>
|
|
||||||
where
|
|
||||||
SE: serde::de::Error,
|
|
||||||
{
|
|
||||||
match v {
|
|
||||||
"*" => Ok(StarOr::Star),
|
|
||||||
v => {
|
|
||||||
let other = FromStr::from_str(v).map_err(|e: T::Err| {
|
|
||||||
SE::custom(format!("Invalid `other` value: {}", e))
|
|
||||||
})?;
|
|
||||||
Ok(StarOr::Other(other))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
deserializer.deserialize_str(StarOrVisitor(PhantomData))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -192,19 +192,14 @@ pub struct Facets {
|
|||||||
pub min_level_size: Option<NonZeroUsize>,
|
pub min_level_size: Option<NonZeroUsize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
|
#[derive(Default, Debug, Clone, PartialEq, Eq, Copy)]
|
||||||
pub enum Setting<T> {
|
pub enum Setting<T> {
|
||||||
Set(T),
|
Set(T),
|
||||||
Reset,
|
Reset,
|
||||||
|
#[default]
|
||||||
NotSet,
|
NotSet,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Default for Setting<T> {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::NotSet
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Setting<T> {
|
impl<T> Setting<T> {
|
||||||
pub fn set(self) -> Option<T> {
|
pub fn set(self) -> Option<T> {
|
||||||
match self {
|
match self {
|
||||||
|
|||||||
@@ -47,20 +47,15 @@ pub struct Settings<T> {
|
|||||||
pub _kind: PhantomData<T>,
|
pub _kind: PhantomData<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
|
#[derive(Default, Debug, Clone, PartialEq, Eq, Copy)]
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
pub enum Setting<T> {
|
pub enum Setting<T> {
|
||||||
Set(T),
|
Set(T),
|
||||||
Reset,
|
Reset,
|
||||||
|
#[default]
|
||||||
NotSet,
|
NotSet,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Default for Setting<T> {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::NotSet
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Setting<T> {
|
impl<T> Setting<T> {
|
||||||
pub fn set(self) -> Option<T> {
|
pub fn set(self) -> Option<T> {
|
||||||
match self {
|
match self {
|
||||||
|
|||||||
@@ -322,7 +322,7 @@ impl From<Task> for TaskView {
|
|||||||
_ => None,
|
_ => None,
|
||||||
});
|
});
|
||||||
|
|
||||||
let duration = finished_at.zip(started_at).map(|(tf, ts)| (tf - ts));
|
let duration = finished_at.zip(started_at).map(|(tf, ts)| tf - ts);
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
uid: id,
|
uid: id,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use std::any::TypeId;
|
use std::any::TypeId;
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
@@ -344,14 +344,14 @@ impl Infos {
|
|||||||
experimental_no_edition_2024_for_dumps,
|
experimental_no_edition_2024_for_dumps,
|
||||||
experimental_vector_store_setting: vector_store_setting,
|
experimental_vector_store_setting: vector_store_setting,
|
||||||
gpu_enabled: meilisearch_types::milli::vector::is_cuda_enabled(),
|
gpu_enabled: meilisearch_types::milli::vector::is_cuda_enabled(),
|
||||||
db_path: db_path != PathBuf::from("./data.ms"),
|
db_path: db_path != Path::new("./data.ms"),
|
||||||
import_dump: import_dump.is_some(),
|
import_dump: import_dump.is_some(),
|
||||||
dump_dir: dump_dir != PathBuf::from("dumps/"),
|
dump_dir: dump_dir != Path::new("dumps/"),
|
||||||
ignore_missing_dump,
|
ignore_missing_dump,
|
||||||
ignore_dump_if_db_exists,
|
ignore_dump_if_db_exists,
|
||||||
import_snapshot: import_snapshot.is_some(),
|
import_snapshot: import_snapshot.is_some(),
|
||||||
schedule_snapshot,
|
schedule_snapshot,
|
||||||
snapshot_dir: snapshot_dir != PathBuf::from("snapshots/"),
|
snapshot_dir: snapshot_dir != Path::new("snapshots/"),
|
||||||
uses_s3_snapshots: s3_snapshot_options.is_some(),
|
uses_s3_snapshots: s3_snapshot_options.is_some(),
|
||||||
ignore_missing_snapshot,
|
ignore_missing_snapshot,
|
||||||
ignore_snapshot_if_db_exists,
|
ignore_snapshot_if_db_exists,
|
||||||
|
|||||||
@@ -789,11 +789,12 @@ impl TryFrom<Value> for ExternalDocumentId {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserr, ToSchema, Serialize)]
|
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Deserr, ToSchema, Serialize)]
|
||||||
#[deserr(rename_all = camelCase)]
|
#[deserr(rename_all = camelCase)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub enum MatchingStrategy {
|
pub enum MatchingStrategy {
|
||||||
/// Remove query words from last to first
|
/// Remove query words from last to first
|
||||||
|
#[default]
|
||||||
Last,
|
Last,
|
||||||
/// All query words are mandatory
|
/// All query words are mandatory
|
||||||
All,
|
All,
|
||||||
@@ -801,12 +802,6 @@ pub enum MatchingStrategy {
|
|||||||
Frequency,
|
Frequency,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for MatchingStrategy {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::Last
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<MatchingStrategy> for TermsMatchingStrategy {
|
impl From<MatchingStrategy> for TermsMatchingStrategy {
|
||||||
fn from(other: MatchingStrategy) -> Self {
|
fn from(other: MatchingStrategy) -> Self {
|
||||||
match other {
|
match other {
|
||||||
|
|||||||
@@ -187,7 +187,7 @@ macro_rules! compute_forbidden_search {
|
|||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_authorized_simple_token() {
|
async fn search_authorized_simple_token() {
|
||||||
let tenant_tokens = vec![
|
let tenant_tokens = [
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
@@ -239,7 +239,7 @@ async fn search_authorized_simple_token() {
|
|||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_authorized_filter_token() {
|
async fn search_authorized_filter_token() {
|
||||||
let tenant_tokens = vec![
|
let tenant_tokens = [
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
@@ -292,7 +292,7 @@ async fn search_authorized_filter_token() {
|
|||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn filter_search_authorized_filter_token() {
|
async fn filter_search_authorized_filter_token() {
|
||||||
let tenant_tokens = vec![
|
let tenant_tokens = [
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
@@ -353,7 +353,7 @@ async fn filter_search_authorized_filter_token() {
|
|||||||
/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above.
|
/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above.
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_search_token_forbidden_parent_key() {
|
async fn error_search_token_forbidden_parent_key() {
|
||||||
let tenant_tokens = vec![
|
let tenant_tokens = [
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
@@ -389,7 +389,7 @@ async fn error_search_token_forbidden_parent_key() {
|
|||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_search_forbidden_token() {
|
async fn error_search_forbidden_token() {
|
||||||
let tenant_tokens = vec![
|
let tenant_tokens = [
|
||||||
// bad index
|
// bad index
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": {}}),
|
"searchRules" => json!({"products": {}}),
|
||||||
|
|||||||
@@ -680,7 +680,7 @@ async fn multi_search_authorized_simple_token() {
|
|||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn single_search_authorized_filter_token() {
|
async fn single_search_authorized_filter_token() {
|
||||||
let tenant_tokens = vec![
|
let tenant_tokens = [
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
@@ -733,7 +733,7 @@ async fn single_search_authorized_filter_token() {
|
|||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn multi_search_authorized_filter_token() {
|
async fn multi_search_authorized_filter_token() {
|
||||||
let both_tenant_tokens = vec![
|
let both_tenant_tokens = [
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {"filter": "color = blue"}, "products": {"filter": "doggos.age <= 5"}}),
|
"searchRules" => json!({"sales": {"filter": "color = blue"}, "products": {"filter": "doggos.age <= 5"}}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
@@ -842,7 +842,7 @@ async fn filter_single_search_authorized_filter_token() {
|
|||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn filter_multi_search_authorized_filter_token() {
|
async fn filter_multi_search_authorized_filter_token() {
|
||||||
let tenant_tokens = vec![
|
let tenant_tokens = [
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {"filter": "color = blue"}, "products": {"filter": "doggos.age <= 5"}}),
|
"searchRules" => json!({"sales": {"filter": "color = blue"}, "products": {"filter": "doggos.age <= 5"}}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
@@ -900,7 +900,7 @@ async fn filter_multi_search_authorized_filter_token() {
|
|||||||
/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above.
|
/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above.
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_single_search_token_forbidden_parent_key() {
|
async fn error_single_search_token_forbidden_parent_key() {
|
||||||
let tenant_tokens = vec![
|
let tenant_tokens = [
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
@@ -941,7 +941,7 @@ async fn error_single_search_token_forbidden_parent_key() {
|
|||||||
/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above.
|
/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above.
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_multi_search_token_forbidden_parent_key() {
|
async fn error_multi_search_token_forbidden_parent_key() {
|
||||||
let tenant_tokens = vec![
|
let tenant_tokens = [
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
|
|||||||
@@ -385,9 +385,10 @@ pub struct SearchResult {
|
|||||||
pub query_vector: Option<Embedding>,
|
pub query_vector: Option<Embedding>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
pub enum TermsMatchingStrategy {
|
pub enum TermsMatchingStrategy {
|
||||||
// remove last word first
|
// remove last word first
|
||||||
|
#[default]
|
||||||
Last,
|
Last,
|
||||||
// all words are mandatory
|
// all words are mandatory
|
||||||
All,
|
All,
|
||||||
@@ -395,12 +396,6 @@ pub enum TermsMatchingStrategy {
|
|||||||
Frequency,
|
Frequency,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for TermsMatchingStrategy {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::Last
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<MatchingStrategy> for TermsMatchingStrategy {
|
impl From<MatchingStrategy> for TermsMatchingStrategy {
|
||||||
fn from(other: MatchingStrategy) -> Self {
|
fn from(other: MatchingStrategy) -> Self {
|
||||||
match other {
|
match other {
|
||||||
|
|||||||
@@ -124,7 +124,7 @@ impl GrenadParameters {
|
|||||||
/// This should be called inside of a rayon thread pool,
|
/// This should be called inside of a rayon thread pool,
|
||||||
/// otherwise, it will take the global number of threads.
|
/// otherwise, it will take the global number of threads.
|
||||||
pub fn max_memory_by_thread(&self) -> Option<usize> {
|
pub fn max_memory_by_thread(&self) -> Option<usize> {
|
||||||
self.max_memory.map(|max_memory| (max_memory / rayon::current_num_threads()))
|
self.max_memory.map(|max_memory| max_memory / rayon::current_num_threads())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -54,11 +54,12 @@ pub struct DocumentAdditionResult {
|
|||||||
pub number_of_documents: u64,
|
pub number_of_documents: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum IndexDocumentsMethod {
|
pub enum IndexDocumentsMethod {
|
||||||
/// Replace the previous document with the new one,
|
/// Replace the previous document with the new one,
|
||||||
/// removing all the already known attributes.
|
/// removing all the already known attributes.
|
||||||
|
#[default]
|
||||||
ReplaceDocuments,
|
ReplaceDocuments,
|
||||||
|
|
||||||
/// Merge the previous version of the document with the new version,
|
/// Merge the previous version of the document with the new version,
|
||||||
@@ -66,12 +67,6 @@ pub enum IndexDocumentsMethod {
|
|||||||
UpdateDocuments,
|
UpdateDocuments,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for IndexDocumentsMethod {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::ReplaceDocuments
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct IndexDocuments<'t, 'i, 'a, FP, FA> {
|
pub struct IndexDocuments<'t, 'i, 'a, FP, FA> {
|
||||||
wtxn: &'t mut heed::RwTxn<'i>,
|
wtxn: &'t mut heed::RwTxn<'i>,
|
||||||
index: &'i Index,
|
index: &'i Index,
|
||||||
|
|||||||
@@ -48,10 +48,11 @@ use crate::{
|
|||||||
ChannelCongestion, FieldId, FilterableAttributesRule, Index, LocalizedAttributesRule, Result,
|
ChannelCongestion, FieldId, FilterableAttributesRule, Index, LocalizedAttributesRule, Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
|
#[derive(Default, Debug, Clone, PartialEq, Eq, Copy)]
|
||||||
pub enum Setting<T> {
|
pub enum Setting<T> {
|
||||||
Set(T),
|
Set(T),
|
||||||
Reset,
|
Reset,
|
||||||
|
#[default]
|
||||||
NotSet,
|
NotSet,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -71,12 +72,6 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Default for Setting<T> {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::NotSet
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Setting<T> {
|
impl<T> Setting<T> {
|
||||||
pub fn set(self) -> Option<T> {
|
pub fn set(self) -> Option<T> {
|
||||||
match self {
|
match self {
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ impl<F> Embeddings<F> {
|
|||||||
///
|
///
|
||||||
/// If `embeddings.len() % self.dimension != 0`, then the append operation fails.
|
/// If `embeddings.len() % self.dimension != 0`, then the append operation fails.
|
||||||
pub fn append(&mut self, mut embeddings: Vec<F>) -> Result<(), Vec<F>> {
|
pub fn append(&mut self, mut embeddings: Vec<F>) -> Result<(), Vec<F>> {
|
||||||
if embeddings.len() % self.dimension != 0 {
|
if !embeddings.len().is_multiple_of(self.dimension) {
|
||||||
return Err(embeddings);
|
return Err(embeddings);
|
||||||
}
|
}
|
||||||
self.data.append(&mut embeddings);
|
self.data.append(&mut embeddings);
|
||||||
|
|||||||
@@ -178,6 +178,7 @@ pub fn get_arch() -> anyhow::Result<&'static str> {
|
|||||||
#[cfg(not(all(target_os = "linux", target_arch = "aarch64")))]
|
#[cfg(not(all(target_os = "linux", target_arch = "aarch64")))]
|
||||||
#[cfg(not(all(target_os = "linux", target_arch = "x86_64")))]
|
#[cfg(not(all(target_os = "linux", target_arch = "x86_64")))]
|
||||||
#[cfg(not(all(target_os = "macos", target_arch = "aarch64")))]
|
#[cfg(not(all(target_os = "macos", target_arch = "aarch64")))]
|
||||||
|
#[cfg(not(all(target_os = "macos", target_arch = "x86_64")))]
|
||||||
anyhow::bail!("unsupported platform")
|
anyhow::bail!("unsupported platform")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "1.89.0"
|
channel = "1.91.1"
|
||||||
components = ["clippy"]
|
components = ["clippy"]
|
||||||
|
|||||||
Reference in New Issue
Block a user