Merge remote-tracking branch 'origin/release-v1.16.0' into document-sorting

This commit is contained in:
Mubelotix
2025-07-15 17:41:46 +02:00
181 changed files with 9716 additions and 2765 deletions

View File

@ -3,7 +3,7 @@ use serde::Serialize;
use time::{Duration, OffsetDateTime};
use utoipa::ToSchema;
use crate::batches::{Batch, BatchId, BatchStats};
use crate::batches::{Batch, BatchId, BatchStats, EmbedderStatsView};
use crate::task_view::DetailsView;
use crate::tasks::serialize_duration;
@ -14,7 +14,7 @@ pub struct BatchView {
pub uid: BatchId,
pub progress: Option<ProgressView>,
pub details: DetailsView,
pub stats: BatchStats,
pub stats: BatchStatsView,
#[serde(serialize_with = "serialize_duration", default)]
pub duration: Option<Duration>,
#[serde(with = "time::serde::rfc3339", default)]
@ -25,13 +25,26 @@ pub struct BatchView {
pub batch_strategy: String,
}
#[derive(Debug, Clone, Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct BatchStatsView {
#[serde(flatten)]
pub stats: BatchStats,
#[serde(skip_serializing_if = "EmbedderStatsView::skip_serializing", default)]
pub embedder_requests: EmbedderStatsView,
}
impl BatchView {
pub fn from_batch(batch: &Batch) -> Self {
Self {
uid: batch.uid,
progress: batch.progress.clone(),
details: batch.details.clone(),
stats: batch.stats.clone(),
stats: BatchStatsView {
stats: batch.stats.clone(),
embedder_requests: batch.embedder_stats.clone(),
},
duration: batch.finished_at.map(|finished_at| finished_at - batch.started_at),
started_at: batch.started_at,
finished_at: batch.finished_at,

View File

@ -1,6 +1,6 @@
use std::collections::BTreeMap;
use milli::progress::ProgressView;
use milli::progress::{EmbedderStats, ProgressView};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use utoipa::ToSchema;
@ -19,6 +19,8 @@ pub struct Batch {
pub progress: Option<ProgressView>,
pub details: DetailsView,
pub stats: BatchStats,
#[serde(skip_serializing_if = "EmbedderStatsView::skip_serializing", default)]
pub embedder_stats: EmbedderStatsView,
#[serde(with = "time::serde::rfc3339")]
pub started_at: OffsetDateTime,
@ -43,6 +45,7 @@ impl PartialEq for Batch {
progress,
details,
stats,
embedder_stats,
started_at,
finished_at,
enqueued_at,
@ -53,6 +56,7 @@ impl PartialEq for Batch {
&& progress.is_none() == other.progress.is_none()
&& details == &other.details
&& stats == &other.stats
&& embedder_stats == &other.embedder_stats
&& started_at == &other.started_at
&& finished_at == &other.finished_at
&& enqueued_at == &other.enqueued_at
@ -83,3 +87,30 @@ pub struct BatchStats {
#[serde(default, skip_serializing_if = "serde_json::Map::is_empty")]
pub internal_database_sizes: serde_json::Map<String, serde_json::Value>,
}
#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct EmbedderStatsView {
pub total: usize,
pub failed: usize,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub last_error: Option<String>,
}
impl From<&EmbedderStats> for EmbedderStatsView {
fn from(stats: &EmbedderStats) -> Self {
let errors = stats.errors.read().unwrap_or_else(|p| p.into_inner());
Self {
total: stats.total_count.load(std::sync::atomic::Ordering::Relaxed),
failed: errors.1 as usize,
last_error: errors.0.clone(),
}
}
}
impl EmbedderStatsView {
pub fn skip_serializing(&self) -> bool {
self.total == 0 && self.failed == 0 && self.last_error.is_none()
}
}

View File

@ -302,6 +302,7 @@ InvalidFacetSearchQuery , InvalidRequest , BAD_REQU
InvalidFacetSearchName , InvalidRequest , BAD_REQUEST ;
FacetSearchDisabled , InvalidRequest , BAD_REQUEST ;
InvalidSearchVector , InvalidRequest , BAD_REQUEST ;
InvalidSearchMedia , InvalidRequest , BAD_REQUEST ;
InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ;
InvalidSearchShowRankingScore , InvalidRequest , BAD_REQUEST ;
InvalidSimilarShowRankingScore , InvalidRequest , BAD_REQUEST ;
@ -309,6 +310,7 @@ InvalidSearchShowRankingScoreDetails , InvalidRequest , BAD_REQU
InvalidSimilarShowRankingScoreDetails , InvalidRequest , BAD_REQUEST ;
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
InvalidSearchDistinct , InvalidRequest , BAD_REQUEST ;
InvalidSearchMediaAndVector , InvalidRequest , BAD_REQUEST ;
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
InvalidSettingsProximityPrecision , InvalidRequest , BAD_REQUEST ;
@ -390,6 +392,13 @@ InvalidDocumentEditionContext , InvalidRequest , BAD_REQU
InvalidDocumentEditionFunctionFilter , InvalidRequest , BAD_REQUEST ;
EditDocumentsByFunctionError , InvalidRequest , BAD_REQUEST ;
InvalidSettingsIndexChat , InvalidRequest , BAD_REQUEST ;
// Export
InvalidExportUrl , InvalidRequest , BAD_REQUEST ;
InvalidExportApiKey , InvalidRequest , BAD_REQUEST ;
InvalidExportPayloadSize , InvalidRequest , BAD_REQUEST ;
InvalidExportIndexesPatterns , InvalidRequest , BAD_REQUEST ;
InvalidExportIndexFilter , InvalidRequest , BAD_REQUEST ;
InvalidExportIndexOverrideSettings , InvalidRequest , BAD_REQUEST ;
// Experimental features - Chat Completions
UnimplementedExternalFunctionCalling , InvalidRequest , NOT_IMPLEMENTED ;
UnimplementedNonStreamingChatCompletions , InvalidRequest , NOT_IMPLEMENTED ;
@ -458,6 +467,7 @@ impl ErrorCode for milli::Error {
| UserError::MissingSourceForNested { .. }
| UserError::InvalidSettingsEmbedder { .. } => Code::InvalidSettingsEmbedders,
UserError::TooManyEmbedders(_) => Code::InvalidSettingsEmbedders,
UserError::TooManyFragments(_) => Code::InvalidSettingsEmbedders,
UserError::InvalidPromptForEmbeddings(..) => Code::InvalidSettingsEmbedders,
UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound,
UserError::MultiplePrimaryKeyCandidatesFound { .. } => {

View File

@ -21,6 +21,7 @@ pub struct RuntimeTogglableFeatures {
pub get_task_documents_route: bool,
pub composite_embedders: bool,
pub chat_completions: bool,
pub multimodal: bool,
}
#[derive(Default, Debug, Clone, Copy)]

View File

@ -12,7 +12,7 @@ use crate::index_uid::{IndexUid, IndexUidFormatError};
/// An index uid pattern is composed of only ascii alphanumeric characters, - and _, between 1 and 400
/// bytes long and optionally ending with a *.
#[derive(Serialize, Deserialize, Deserr, Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Serialize, Deserialize, Deserr, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[deserr(try_from(&String) = FromStr::from_str -> IndexUidPatternFormatError)]
pub struct IndexUidPattern(String);

View File

@ -144,6 +144,21 @@ impl Key {
}
}
pub fn default_read_only_admin() -> Self {
let now = OffsetDateTime::now_utc();
let uid = Uuid::new_v4();
Self {
name: Some("Default Read-Only Admin API Key".to_string()),
description: Some("Use it to read information across the whole database. Caution! Do not expose this key on a public frontend".to_string()),
uid,
actions: vec![Action::AllGet, Action::KeysGet],
indexes: vec![IndexUidPattern::all()],
expires_at: None,
created_at: now,
updated_at: now,
}
}
pub fn default_search() -> Self {
let now = OffsetDateTime::now_utc();
let uid = Uuid::new_v4();
@ -218,6 +233,9 @@ pub enum Action {
#[serde(rename = "*")]
#[deserr(rename = "*")]
All = 0,
#[serde(rename = "*.get")]
#[deserr(rename = "*.get")]
AllGet,
#[serde(rename = "search")]
#[deserr(rename = "search")]
Search,
@ -317,6 +335,9 @@ pub enum Action {
#[serde(rename = "experimental.update")]
#[deserr(rename = "experimental.update")]
ExperimentalFeaturesUpdate,
#[serde(rename = "export")]
#[deserr(rename = "export")]
Export,
#[serde(rename = "network.get")]
#[deserr(rename = "network.get")]
NetworkGet,
@ -396,6 +417,52 @@ impl Action {
}
}
/// Whether the action should be included in [Action::AllRead].
pub fn is_read(&self) -> bool {
use Action::*;
// It's using an exhaustive match to force the addition of new actions.
match self {
// Any action that expands to others must return false, as it wouldn't be able to expand recursively.
All | AllGet | DocumentsAll | IndexesAll | ChatsAll | TasksAll | SettingsAll
| StatsAll | MetricsAll | DumpsAll | SnapshotsAll | ChatsSettingsAll => false,
Search => true,
DocumentsAdd => false,
DocumentsGet => true,
DocumentsDelete => false,
Export => true,
IndexesAdd => false,
IndexesGet => true,
IndexesUpdate => false,
IndexesDelete => false,
IndexesSwap => false,
TasksCancel => false,
TasksDelete => false,
TasksGet => true,
SettingsGet => true,
SettingsUpdate => false,
StatsGet => true,
MetricsGet => true,
DumpsCreate => false,
SnapshotsCreate => false,
Version => true,
KeysAdd => false,
KeysGet => false, // Disabled in order to prevent privilege escalation
KeysUpdate => false,
KeysDelete => false,
ExperimentalFeaturesGet => true,
ExperimentalFeaturesUpdate => false,
NetworkGet => true,
NetworkUpdate => false,
ChatCompletions => false, // Disabled because it might trigger generation of new chats
ChatsGet => true,
ChatsDelete => false,
ChatsSettingsGet => true,
ChatsSettingsUpdate => false,
}
}
pub const fn repr(&self) -> u8 {
*self as u8
}
@ -405,6 +472,7 @@ pub mod actions {
use super::Action::*;
pub(crate) const ALL: u8 = All.repr();
pub const ALL_GET: u8 = AllGet.repr();
pub const SEARCH: u8 = Search.repr();
pub const DOCUMENTS_ALL: u8 = DocumentsAll.repr();
pub const DOCUMENTS_ADD: u8 = DocumentsAdd.repr();
@ -438,6 +506,8 @@ pub mod actions {
pub const EXPERIMENTAL_FEATURES_GET: u8 = ExperimentalFeaturesGet.repr();
pub const EXPERIMENTAL_FEATURES_UPDATE: u8 = ExperimentalFeaturesUpdate.repr();
pub const EXPORT: u8 = Export.repr();
pub const NETWORK_GET: u8 = NetworkGet.repr();
pub const NETWORK_UPDATE: u8 = NetworkUpdate.repr();

View File

@ -18,7 +18,7 @@ pub mod versioning;
pub use milli::{heed, Index};
use uuid::Uuid;
pub use versioning::VERSION_FILE_NAME;
pub use {milli, serde_cs};
pub use {byte_unit, milli, serde_cs};
pub type Document = serde_json::Map<String, serde_json::Value>;
pub type InstanceUid = Uuid;

View File

@ -9,10 +9,11 @@ use std::str::FromStr;
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
use fst::IntoStreamer;
use milli::disabled_typos_terms::DisabledTyposTerms;
use milli::index::{IndexEmbeddingConfig, PrefixSearch};
use milli::index::PrefixSearch;
use milli::proximity::ProximityPrecision;
pub use milli::update::ChatSettings;
use milli::update::Setting;
use milli::vector::db::IndexEmbeddingConfig;
use milli::{Criterion, CriterionError, FilterableAttributesRule, Index, DEFAULT_VALUES_PER_FACET};
use serde::{Deserialize, Serialize, Serializer};
use utoipa::ToSchema;
@ -500,8 +501,11 @@ impl Settings<Unchecked> {
let Setting::Set(mut configs) = self.embedders else { return Ok(self) };
for (name, config) in configs.iter_mut() {
let config_to_check = std::mem::take(config);
let checked_config =
milli::update::validate_embedding_settings(config_to_check.inner, name)?;
let checked_config = milli::update::validate_embedding_settings(
config_to_check.inner,
name,
milli::vector::settings::EmbeddingValidationContext::SettingsPartialUpdate,
)?;
*config = SettingEmbeddingSettings { inner: checked_config };
}
self.embedders = Setting::Set(configs);
@ -751,6 +755,7 @@ pub fn apply_settings_to_builder(
builder.reset_min_word_len_two_typos();
builder.reset_exact_words();
builder.reset_exact_attributes();
builder.reset_disable_on_numbers();
}
Setting::NotSet => (),
}
@ -910,6 +915,7 @@ pub fn settings(
};
let embedders: BTreeMap<_, _> = index
.embedding_configs()
.embedding_configs(rtxn)?
.into_iter()
.map(|IndexEmbeddingConfig { name, config, .. }| {
@ -968,6 +974,7 @@ pub fn settings(
if let SecretPolicy::HideSecrets = secret_policy {
settings.hide_secrets()
}
Ok(settings)
}

View File

@ -1,3 +1,6 @@
use std::collections::BTreeMap;
use byte_unit::UnitType;
use milli::Object;
use serde::{Deserialize, Serialize};
use time::{Duration, OffsetDateTime};
@ -6,7 +9,9 @@ use utoipa::ToSchema;
use crate::batches::BatchId;
use crate::error::ResponseError;
use crate::settings::{Settings, Unchecked};
use crate::tasks::{serialize_duration, Details, IndexSwap, Kind, Status, Task, TaskId};
use crate::tasks::{
serialize_duration, Details, DetailsExportIndexSettings, IndexSwap, Kind, Status, Task, TaskId,
};
#[derive(Debug, Clone, PartialEq, Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
@ -118,6 +123,15 @@ pub struct DetailsView {
pub upgrade_from: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub upgrade_to: Option<String>,
// exporting
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub api_key: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub payload_size: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub indexes: Option<BTreeMap<String, DetailsExportIndexSettings>>,
}
impl DetailsView {
@ -238,6 +252,34 @@ impl DetailsView {
Some(left)
}
},
url: match (self.url.clone(), other.url.clone()) {
(None, None) => None,
(None, Some(url)) | (Some(url), None) => Some(url),
// We should never be able to batch multiple exports at the same time.
// So we return the first one we encounter but that shouldn't be an issue anyway.
(Some(left), Some(_right)) => Some(left),
},
api_key: match (self.api_key.clone(), other.api_key.clone()) {
(None, None) => None,
(None, Some(key)) | (Some(key), None) => Some(key),
// We should never be able to batch multiple exports at the same time.
// So we return the first one we encounter but that shouldn't be an issue anyway.
(Some(left), Some(_right)) => Some(left),
},
payload_size: match (self.payload_size.clone(), other.payload_size.clone()) {
(None, None) => None,
(None, Some(size)) | (Some(size), None) => Some(size),
// We should never be able to batch multiple exports at the same time.
// So we return the first one we encounter but that shouldn't be an issue anyway.
(Some(left), Some(_right)) => Some(left),
},
indexes: match (self.indexes.clone(), other.indexes.clone()) {
(None, None) => None,
(None, Some(indexes)) | (Some(indexes), None) => Some(indexes),
// We should never be able to batch multiple exports at the same time.
// So we return the first one we encounter but that shouldn't be an issue anyway.
(Some(left), Some(_right)) => Some(left),
},
// We want the earliest version
upgrade_from: match (self.upgrade_from.clone(), other.upgrade_from.clone()) {
(None, None) => None,
@ -327,6 +369,22 @@ impl From<Details> for DetailsView {
Details::IndexSwap { swaps } => {
DetailsView { swaps: Some(swaps), ..Default::default() }
}
Details::Export { url, api_key, payload_size, indexes } => DetailsView {
url: Some(url),
api_key: api_key.map(|mut api_key| {
hide_secret(&mut api_key);
api_key
}),
payload_size: payload_size
.map(|ps| ps.get_appropriate_unit(UnitType::Both).to_string()),
indexes: Some(
indexes
.into_iter()
.map(|(pattern, settings)| (pattern.to_string(), settings))
.collect(),
),
..Default::default()
},
Details::UpgradeDatabase { from, to } => DetailsView {
upgrade_from: Some(format!("v{}.{}.{}", from.0, from.1, from.2)),
upgrade_to: Some(format!("v{}.{}.{}", to.0, to.1, to.2)),
@ -335,3 +393,21 @@ impl From<Details> for DetailsView {
}
}
}
// We definitely need to factorize the code to hide the secret key
fn hide_secret(secret: &mut String) {
match secret.len() {
x if x < 10 => {
secret.replace_range(.., "XXX...");
}
x if x < 20 => {
secret.replace_range(2.., "XXXX...");
}
x if x < 30 => {
secret.replace_range(3.., "XXXXX...");
}
_x => {
secret.replace_range(5.., "XXXXXX...");
}
}
}

View File

@ -1,19 +1,22 @@
use core::fmt;
use std::collections::HashSet;
use std::collections::{BTreeMap, HashSet};
use std::fmt::{Display, Write};
use std::str::FromStr;
use byte_unit::Byte;
use enum_iterator::Sequence;
use milli::update::IndexDocumentsMethod;
use milli::Object;
use roaring::RoaringBitmap;
use serde::{Deserialize, Serialize, Serializer};
use serde_json::Value;
use time::{Duration, OffsetDateTime};
use utoipa::ToSchema;
use utoipa::{schema, ToSchema};
use uuid::Uuid;
use crate::batches::BatchId;
use crate::error::ResponseError;
use crate::index_uid_pattern::IndexUidPattern;
use crate::keys::Key;
use crate::settings::{Settings, Unchecked};
use crate::{versioning, InstanceUid};
@ -50,6 +53,7 @@ impl Task {
| SnapshotCreation
| TaskCancelation { .. }
| TaskDeletion { .. }
| Export { .. }
| UpgradeDatabase { .. }
| IndexSwap { .. } => None,
DocumentAdditionOrUpdate { index_uid, .. }
@ -86,6 +90,7 @@ impl Task {
| KindWithContent::TaskDeletion { .. }
| KindWithContent::DumpCreation { .. }
| KindWithContent::SnapshotCreation
| KindWithContent::Export { .. }
| KindWithContent::UpgradeDatabase { .. } => None,
}
}
@ -108,11 +113,11 @@ pub enum KindWithContent {
},
DocumentDeletionByFilter {
index_uid: String,
filter_expr: serde_json::Value,
filter_expr: Value,
},
DocumentEdition {
index_uid: String,
filter_expr: Option<serde_json::Value>,
filter_expr: Option<Value>,
context: Option<milli::Object>,
function: String,
},
@ -152,6 +157,12 @@ pub enum KindWithContent {
instance_uid: Option<InstanceUid>,
},
SnapshotCreation,
Export {
url: String,
api_key: Option<String>,
payload_size: Option<Byte>,
indexes: BTreeMap<IndexUidPattern, ExportIndexSettings>,
},
UpgradeDatabase {
from: (u32, u32, u32),
},
@ -163,6 +174,13 @@ pub struct IndexSwap {
pub indexes: (String, String),
}
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct ExportIndexSettings {
pub filter: Option<Value>,
pub override_settings: bool,
}
impl KindWithContent {
pub fn as_kind(&self) -> Kind {
match self {
@ -180,6 +198,7 @@ impl KindWithContent {
KindWithContent::TaskDeletion { .. } => Kind::TaskDeletion,
KindWithContent::DumpCreation { .. } => Kind::DumpCreation,
KindWithContent::SnapshotCreation => Kind::SnapshotCreation,
KindWithContent::Export { .. } => Kind::Export,
KindWithContent::UpgradeDatabase { .. } => Kind::UpgradeDatabase,
}
}
@ -192,6 +211,7 @@ impl KindWithContent {
| SnapshotCreation
| TaskCancelation { .. }
| TaskDeletion { .. }
| Export { .. }
| UpgradeDatabase { .. } => vec![],
DocumentAdditionOrUpdate { index_uid, .. }
| DocumentEdition { index_uid, .. }
@ -269,6 +289,14 @@ impl KindWithContent {
}),
KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }),
KindWithContent::SnapshotCreation => None,
KindWithContent::Export { url, api_key, payload_size, indexes } => {
Some(Details::Export {
url: url.clone(),
api_key: api_key.clone(),
payload_size: *payload_size,
indexes: indexes.iter().map(|(p, s)| (p.clone(), s.clone().into())).collect(),
})
}
KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase {
from: (from.0, from.1, from.2),
to: (
@ -335,6 +363,14 @@ impl KindWithContent {
}),
KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }),
KindWithContent::SnapshotCreation => None,
KindWithContent::Export { url, api_key, payload_size, indexes } => {
Some(Details::Export {
url: url.clone(),
api_key: api_key.clone(),
payload_size: *payload_size,
indexes: indexes.iter().map(|(p, s)| (p.clone(), s.clone().into())).collect(),
})
}
KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase {
from: *from,
to: (
@ -383,6 +419,14 @@ impl From<&KindWithContent> for Option<Details> {
}),
KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }),
KindWithContent::SnapshotCreation => None,
KindWithContent::Export { url, api_key, payload_size, indexes } => {
Some(Details::Export {
url: url.clone(),
api_key: api_key.clone(),
payload_size: *payload_size,
indexes: indexes.iter().map(|(p, s)| (p.clone(), s.clone().into())).collect(),
})
}
KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase {
from: *from,
to: (
@ -499,6 +543,7 @@ pub enum Kind {
TaskDeletion,
DumpCreation,
SnapshotCreation,
Export,
UpgradeDatabase,
}
@ -516,6 +561,7 @@ impl Kind {
| Kind::TaskCancelation
| Kind::TaskDeletion
| Kind::DumpCreation
| Kind::Export
| Kind::UpgradeDatabase
| Kind::SnapshotCreation => false,
}
@ -536,6 +582,7 @@ impl Display for Kind {
Kind::TaskDeletion => write!(f, "taskDeletion"),
Kind::DumpCreation => write!(f, "dumpCreation"),
Kind::SnapshotCreation => write!(f, "snapshotCreation"),
Kind::Export => write!(f, "export"),
Kind::UpgradeDatabase => write!(f, "upgradeDatabase"),
}
}
@ -568,6 +615,8 @@ impl FromStr for Kind {
Ok(Kind::DumpCreation)
} else if kind.eq_ignore_ascii_case("snapshotCreation") {
Ok(Kind::SnapshotCreation)
} else if kind.eq_ignore_ascii_case("export") {
Ok(Kind::Export)
} else if kind.eq_ignore_ascii_case("upgradeDatabase") {
Ok(Kind::UpgradeDatabase)
} else {
@ -643,12 +692,33 @@ pub enum Details {
IndexSwap {
swaps: Vec<IndexSwap>,
},
Export {
url: String,
api_key: Option<String>,
payload_size: Option<Byte>,
indexes: BTreeMap<IndexUidPattern, DetailsExportIndexSettings>,
},
UpgradeDatabase {
from: (u32, u32, u32),
to: (u32, u32, u32),
},
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, ToSchema)]
#[schema(rename_all = "camelCase")]
pub struct DetailsExportIndexSettings {
#[serde(flatten)]
pub settings: ExportIndexSettings,
#[serde(skip_serializing_if = "Option::is_none")]
pub matched_documents: Option<u64>,
}
impl From<ExportIndexSettings> for DetailsExportIndexSettings {
fn from(settings: ExportIndexSettings) -> Self {
DetailsExportIndexSettings { settings, matched_documents: None }
}
}
impl Details {
pub fn to_failed(&self) -> Self {
let mut details = self.clone();
@ -667,6 +737,7 @@ impl Details {
Self::SettingsUpdate { .. }
| Self::IndexInfo { .. }
| Self::Dump { .. }
| Self::Export { .. }
| Self::UpgradeDatabase { .. }
| Self::IndexSwap { .. } => (),
}