mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-09-06 04:36:32 +00:00
Merge pull request #5800 from meilisearch/tmp-release-v1.16.0
Bring back changes to main
This commit is contained in:
@ -93,7 +93,7 @@ pub struct ChatSearchParams {
|
||||
pub hybrid: Setting<HybridQuery>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default = Setting::Set(20))]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<usize>)]
|
||||
pub limit: Setting<usize>,
|
||||
|
||||
|
@ -2,7 +2,7 @@ use heed::RwTxn;
|
||||
use roaring::RoaringBitmap;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{FieldDistribution, Index, Result};
|
||||
use crate::{database_stats::DatabaseStats, FieldDistribution, Index, Result};
|
||||
|
||||
pub struct ClearDocuments<'t, 'i> {
|
||||
wtxn: &'t mut RwTxn<'i>,
|
||||
@ -92,6 +92,10 @@ impl<'t, 'i> ClearDocuments<'t, 'i> {
|
||||
|
||||
documents.clear(self.wtxn)?;
|
||||
|
||||
// Update the stats of the documents database after clearing all documents.
|
||||
let stats = DatabaseStats::new(self.index.documents.remap_data_type(), self.wtxn)?;
|
||||
self.index.put_documents_stats(self.wtxn, stats)?;
|
||||
|
||||
Ok(number_of_documents)
|
||||
}
|
||||
}
|
||||
@ -122,6 +126,9 @@ mod tests {
|
||||
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
|
||||
// Variables for statistics verification
|
||||
let stats = index.documents_stats(&rtxn).unwrap().unwrap();
|
||||
|
||||
// the value is 7 because there is `[id, name, age, country, _geo, _geo.lng, _geo.lat]`
|
||||
assert_eq!(index.fields_ids_map(&rtxn).unwrap().len(), 7);
|
||||
|
||||
@ -142,5 +149,9 @@ mod tests {
|
||||
assert!(index.field_id_docid_facet_f64s.is_empty(&rtxn).unwrap());
|
||||
assert!(index.field_id_docid_facet_strings.is_empty(&rtxn).unwrap());
|
||||
assert!(index.documents.is_empty(&rtxn).unwrap());
|
||||
|
||||
// Verify that the statistics are correctly updated after clearing documents
|
||||
assert_eq!(index.number_of_documents(&rtxn).unwrap(), 0);
|
||||
assert_eq!(stats.number_of_entries(), 0);
|
||||
}
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ use crate::progress::EmbedderStats;
|
||||
use crate::prompt::Prompt;
|
||||
use crate::update::del_add::{DelAdd, KvReaderDelAdd, KvWriterDelAdd};
|
||||
use crate::update::settings::InnerIndexSettingsDiff;
|
||||
use crate::vector::db::{EmbedderInfo, EmbeddingStatus, EmbeddingStatusDelta};
|
||||
use crate::vector::db::{EmbedderInfo, EmbeddingStatusDelta};
|
||||
use crate::vector::error::{EmbedErrorKind, PossibleEmbeddingMistakes, UnusedVectorsDistribution};
|
||||
use crate::vector::extractor::{Extractor, ExtractorDiff, RequestFragmentExtractor};
|
||||
use crate::vector::parsed_vectors::{ParsedVectorsDiff, VectorState};
|
||||
@ -441,6 +441,8 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
|
||||
{
|
||||
let embedder_is_manual = matches!(*runtime.embedder, Embedder::UserProvided(_));
|
||||
|
||||
let (old_is_user_provided, old_must_regenerate) =
|
||||
embedder_info.embedding_status.is_user_provided_must_regenerate(docid);
|
||||
let (old, new) = parsed_vectors.remove(embedder_name);
|
||||
let new_must_regenerate = new.must_regenerate();
|
||||
let delta = match action {
|
||||
@ -499,16 +501,19 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
|
||||
|
||||
let is_adding_fragments = has_fragments && !old_has_fragments;
|
||||
|
||||
if is_adding_fragments {
|
||||
if !has_fragments {
|
||||
// removing fragments
|
||||
regenerate_prompt(obkv, &runtime.document_template, new_fields_ids_map)?
|
||||
} else if is_adding_fragments ||
|
||||
// regenerate all fragments when going from user provided to ! user provided
|
||||
old_is_user_provided
|
||||
{
|
||||
regenerate_all_fragments(
|
||||
runtime.fragments(),
|
||||
&doc_alloc,
|
||||
new_fields_ids_map,
|
||||
obkv,
|
||||
)
|
||||
} else if !has_fragments {
|
||||
// removing fragments
|
||||
regenerate_prompt(obkv, &runtime.document_template, new_fields_ids_map)?
|
||||
} else {
|
||||
let mut fragment_diff = Vec::new();
|
||||
let new_fields_ids_map = new_fields_ids_map.as_fields_ids_map();
|
||||
@ -600,7 +605,8 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
|
||||
docid,
|
||||
&delta,
|
||||
new_must_regenerate,
|
||||
&embedder_info.embedding_status,
|
||||
old_is_user_provided,
|
||||
old_must_regenerate,
|
||||
);
|
||||
|
||||
// and we finally push the unique vectors into the writer
|
||||
@ -657,10 +663,9 @@ fn push_embedding_status_delta(
|
||||
docid: DocumentId,
|
||||
delta: &VectorStateDelta,
|
||||
new_must_regenerate: bool,
|
||||
embedding_status: &EmbeddingStatus,
|
||||
old_is_user_provided: bool,
|
||||
old_must_regenerate: bool,
|
||||
) {
|
||||
let (old_is_user_provided, old_must_regenerate) =
|
||||
embedding_status.is_user_provided_must_regenerate(docid);
|
||||
let new_is_user_provided = match delta {
|
||||
VectorStateDelta::NoChange => old_is_user_provided,
|
||||
VectorStateDelta::NowRemoved => {
|
||||
|
@ -16,6 +16,7 @@ pub struct IndexerConfig {
|
||||
pub max_positions_per_attributes: Option<u32>,
|
||||
pub skip_index_budget: bool,
|
||||
pub experimental_no_edition_2024_for_settings: bool,
|
||||
pub experimental_no_edition_2024_for_dumps: bool,
|
||||
}
|
||||
|
||||
impl IndexerConfig {
|
||||
@ -65,6 +66,7 @@ impl Default for IndexerConfig {
|
||||
max_positions_per_attributes: None,
|
||||
skip_index_budget: false,
|
||||
experimental_no_edition_2024_for_settings: false,
|
||||
experimental_no_edition_2024_for_dumps: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -620,12 +620,35 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
|
||||
where
|
||||
'a: 'doc,
|
||||
{
|
||||
match &mut self.kind {
|
||||
ChunkType::Fragments { fragments: _, session } => {
|
||||
let doc_alloc = session.doc_alloc();
|
||||
self.set_status(docid, old_is_user_provided, true, false, true);
|
||||
|
||||
if old_is_user_provided | full_reindex {
|
||||
match &mut self.kind {
|
||||
ChunkType::Fragments { fragments, session } => {
|
||||
let doc_alloc = session.doc_alloc();
|
||||
let reindex_all_fragments =
|
||||
// when the vectors were user-provided, Meilisearch cannot know if they come from a particular fragment,
|
||||
// and so Meilisearch needs to clear all embeddings in that case.
|
||||
// Fortunately, as dump export fragment vector with `regenerate` set to `false`,
|
||||
// this case should be rare and opt-in.
|
||||
old_is_user_provided ||
|
||||
// full-reindex case
|
||||
full_reindex;
|
||||
|
||||
if reindex_all_fragments {
|
||||
session.on_embed_mut().clear_vectors(docid);
|
||||
let extractors = fragments.iter().map(|fragment| {
|
||||
RequestFragmentExtractor::new(fragment, doc_alloc).ignore_errors()
|
||||
});
|
||||
insert_autogenerated(
|
||||
docid,
|
||||
external_docid,
|
||||
extractors,
|
||||
document,
|
||||
&(),
|
||||
session,
|
||||
unused_vectors_distribution,
|
||||
)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
settings_delta.try_for_each_fragment_diff(
|
||||
@ -669,7 +692,6 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
|
||||
Result::Ok(())
|
||||
},
|
||||
)?;
|
||||
self.set_status(docid, old_is_user_provided, true, false, true);
|
||||
}
|
||||
ChunkType::DocumentTemplate { document_template, session } => {
|
||||
let doc_alloc = session.doc_alloc();
|
||||
@ -690,12 +712,18 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
|
||||
|
||||
match extractor.diff_settings(document, &external_docid, old_extractor.as_ref())? {
|
||||
ExtractorDiff::Removed => {
|
||||
if old_is_user_provided || full_reindex {
|
||||
session.on_embed_mut().clear_vectors(docid);
|
||||
}
|
||||
OnEmbed::process_embedding_response(
|
||||
session.on_embed_mut(),
|
||||
crate::vector::session::EmbeddingResponse { metadata, embedding: None },
|
||||
);
|
||||
}
|
||||
ExtractorDiff::Added(input) | ExtractorDiff::Updated(input) => {
|
||||
if old_is_user_provided || full_reindex {
|
||||
session.on_embed_mut().clear_vectors(docid);
|
||||
}
|
||||
session.request_embedding(metadata, input, unused_vectors_distribution)?;
|
||||
}
|
||||
ExtractorDiff::Unchanged => { /* do nothing */ }
|
||||
@ -722,6 +750,13 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
|
||||
where
|
||||
'a: 'doc,
|
||||
{
|
||||
self.set_status(
|
||||
docid,
|
||||
old_is_user_provided,
|
||||
old_must_regenerate,
|
||||
false,
|
||||
new_must_regenerate,
|
||||
);
|
||||
match &mut self.kind {
|
||||
ChunkType::DocumentTemplate { document_template, session } => {
|
||||
let doc_alloc = session.doc_alloc();
|
||||
@ -731,10 +766,6 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
|
||||
new_fields_ids_map,
|
||||
);
|
||||
|
||||
if old_is_user_provided {
|
||||
session.on_embed_mut().clear_vectors(docid);
|
||||
}
|
||||
|
||||
update_autogenerated(
|
||||
docid,
|
||||
external_docid,
|
||||
@ -743,6 +774,7 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
|
||||
new_document,
|
||||
&external_docid,
|
||||
old_must_regenerate,
|
||||
old_is_user_provided,
|
||||
session,
|
||||
unused_vectors_distribution,
|
||||
)?
|
||||
@ -754,7 +786,21 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
|
||||
});
|
||||
|
||||
if old_is_user_provided {
|
||||
// when the document was `userProvided`, Meilisearch cannot know whose fragments a particular
|
||||
// vector was referring to.
|
||||
// So as a result Meilisearch will regenerate all fragments on this case.
|
||||
// Fortunately, since dumps for fragments set regenerate to false, this case should be rare.
|
||||
session.on_embed_mut().clear_vectors(docid);
|
||||
insert_autogenerated(
|
||||
docid,
|
||||
external_docid,
|
||||
extractors,
|
||||
new_document,
|
||||
&(),
|
||||
session,
|
||||
unused_vectors_distribution,
|
||||
)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
update_autogenerated(
|
||||
@ -765,25 +811,18 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
|
||||
new_document,
|
||||
&(),
|
||||
old_must_regenerate,
|
||||
false,
|
||||
session,
|
||||
unused_vectors_distribution,
|
||||
)?
|
||||
}
|
||||
};
|
||||
|
||||
self.set_status(
|
||||
docid,
|
||||
old_is_user_provided,
|
||||
old_must_regenerate,
|
||||
false,
|
||||
new_must_regenerate,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn insert_autogenerated<D: Document<'a> + Debug>(
|
||||
pub fn insert_autogenerated<'doc, D: Document<'doc> + Debug>(
|
||||
&mut self,
|
||||
docid: DocumentId,
|
||||
external_docid: &'a str,
|
||||
@ -791,7 +830,10 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
|
||||
new_fields_ids_map: &'a RefCell<crate::GlobalFieldsIdsMap>,
|
||||
unused_vectors_distribution: &UnusedVectorsDistributionBump<'a>,
|
||||
new_must_regenerate: bool,
|
||||
) -> Result<()> {
|
||||
) -> Result<()>
|
||||
where
|
||||
'a: 'doc,
|
||||
{
|
||||
let (default_is_user_provided, default_must_regenerate) = (false, true);
|
||||
self.set_status(
|
||||
docid,
|
||||
@ -956,6 +998,7 @@ fn update_autogenerated<'doc, 'a: 'doc, 'b, E, OD, ND>(
|
||||
new_document: ND,
|
||||
meta: &E::DocumentMetadata,
|
||||
old_must_regenerate: bool,
|
||||
mut must_clear_on_generation: bool,
|
||||
session: &mut EmbedSession<'a, OnEmbeddingDocumentUpdates<'a, 'b>, E::Input>,
|
||||
unused_vectors_distribution: &UnusedVectorsDistributionBump<'a>,
|
||||
) -> Result<()>
|
||||
@ -984,6 +1027,11 @@ where
|
||||
};
|
||||
|
||||
if must_regenerate {
|
||||
if must_clear_on_generation {
|
||||
must_clear_on_generation = false;
|
||||
session.on_embed_mut().clear_vectors(docid);
|
||||
}
|
||||
|
||||
let metadata =
|
||||
Metadata { docid, external_docid, extractor_id: extractor.extractor_id() };
|
||||
|
||||
@ -1002,7 +1050,7 @@ where
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn insert_autogenerated<'a, 'b, E, D: Document<'a> + Debug>(
|
||||
fn insert_autogenerated<'doc, 'a: 'doc, 'b, E, D: Document<'doc> + Debug>(
|
||||
docid: DocumentId,
|
||||
external_docid: &'a str,
|
||||
extractors: impl IntoIterator<Item = E>,
|
||||
|
@ -101,6 +101,10 @@ impl<T> Setting<T> {
|
||||
matches!(self, Self::NotSet)
|
||||
}
|
||||
|
||||
pub const fn is_reset(&self) -> bool {
|
||||
matches!(self, Self::Reset)
|
||||
}
|
||||
|
||||
/// If `Self` is `Reset`, then map self to `Set` with the provided `val`.
|
||||
pub fn or_reset(self, val: T) -> Self {
|
||||
match self {
|
||||
@ -554,10 +558,10 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
match self.searchable_fields {
|
||||
Setting::Set(ref fields) => {
|
||||
// Check to see if the searchable fields changed before doing anything else
|
||||
let old_fields = self.index.searchable_fields(self.wtxn)?;
|
||||
let old_fields = self.index.user_defined_searchable_fields(self.wtxn)?;
|
||||
let did_change = {
|
||||
let new_fields = fields.iter().map(String::as_str).collect::<Vec<_>>();
|
||||
new_fields != old_fields
|
||||
old_fields.is_none_or(|old| new_fields != old)
|
||||
};
|
||||
if !did_change {
|
||||
return Ok(false);
|
||||
@ -1213,6 +1217,10 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
// new config
|
||||
EitherOrBoth::Right((name, mut setting)) => {
|
||||
tracing::debug!(embedder = name, "new embedder");
|
||||
// if we are asked to reset an embedder that doesn't exist, just ignore it
|
||||
if setting.is_reset() {
|
||||
continue;
|
||||
}
|
||||
// apply the default source in case the source was not set so that it gets validated
|
||||
crate::vector::settings::EmbeddingSettings::apply_default_source(&mut setting);
|
||||
crate::vector::settings::EmbeddingSettings::apply_default_openai_model(
|
||||
|
@ -2,6 +2,7 @@ mod v1_12;
|
||||
mod v1_13;
|
||||
mod v1_14;
|
||||
mod v1_15;
|
||||
mod v1_16;
|
||||
use heed::RwTxn;
|
||||
use v1_12::{V1_12_3_To_V1_13_0, V1_12_To_V1_12_3};
|
||||
use v1_13::{V1_13_0_To_V1_13_1, V1_13_1_To_Latest_V1_13};
|
||||
@ -10,6 +11,7 @@ use v1_15::Latest_V1_14_To_Latest_V1_15;
|
||||
|
||||
use crate::constants::{VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
|
||||
use crate::progress::{Progress, VariableNameStep};
|
||||
use crate::update::upgrade::v1_16::Latest_V1_15_To_V1_16_0;
|
||||
use crate::{Index, InternalError, Result};
|
||||
|
||||
trait UpgradeIndex {
|
||||
@ -24,6 +26,59 @@ trait UpgradeIndex {
|
||||
fn target_version(&self) -> (u32, u32, u32);
|
||||
}
|
||||
|
||||
const UPGRADE_FUNCTIONS: &[&dyn UpgradeIndex] = &[
|
||||
&V1_12_To_V1_12_3 {},
|
||||
&V1_12_3_To_V1_13_0 {},
|
||||
&V1_13_0_To_V1_13_1 {},
|
||||
&V1_13_1_To_Latest_V1_13 {},
|
||||
&Latest_V1_13_To_Latest_V1_14 {},
|
||||
&Latest_V1_14_To_Latest_V1_15 {},
|
||||
&Latest_V1_15_To_V1_16_0 {},
|
||||
// This is the last upgrade function, it will be called when the index is up to date.
|
||||
// any other upgrade function should be added before this one.
|
||||
&ToCurrentNoOp {},
|
||||
];
|
||||
|
||||
/// Causes a compile-time error if the argument is not in range of `0..UPGRADE_FUNCTIONS.len()`
|
||||
macro_rules! function_index {
|
||||
($start:expr) => {{
|
||||
const _CHECK_INDEX: () = {
|
||||
if $start >= $crate::update::upgrade::UPGRADE_FUNCTIONS.len() {
|
||||
panic!("upgrade functions out of range")
|
||||
}
|
||||
};
|
||||
|
||||
$start
|
||||
}};
|
||||
}
|
||||
|
||||
const fn start(from: (u32, u32, u32)) -> Option<usize> {
|
||||
let start = match from {
|
||||
(1, 12, 0..=2) => function_index!(0),
|
||||
(1, 12, 3..) => function_index!(1),
|
||||
(1, 13, 0) => function_index!(2),
|
||||
(1, 13, _) => function_index!(4),
|
||||
(1, 14, _) => function_index!(5),
|
||||
// We must handle the current version in the match because in case of a failure some index may have been upgraded but not other.
|
||||
(1, 15, _) => function_index!(6),
|
||||
(1, 16, _) => function_index!(7),
|
||||
// We deliberately don't add a placeholder with (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH) here to force manually
|
||||
// considering dumpless upgrade.
|
||||
(_major, _minor, _patch) => return None,
|
||||
};
|
||||
|
||||
Some(start)
|
||||
}
|
||||
|
||||
/// Causes a compile-time error if the latest package cannot be upgraded.
|
||||
///
|
||||
/// This serves as a reminder to consider the proper dumpless upgrade implementation when changing the package version.
|
||||
const _CHECK_PACKAGE_CAN_UPGRADE: () = {
|
||||
if start((VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)).is_none() {
|
||||
panic!("cannot upgrade from latest package version")
|
||||
}
|
||||
};
|
||||
|
||||
/// Return true if the cached stats of the index must be regenerated
|
||||
pub fn upgrade<MSP>(
|
||||
wtxn: &mut RwTxn,
|
||||
@ -36,33 +91,12 @@ where
|
||||
MSP: Fn() -> bool + Sync,
|
||||
{
|
||||
let from = index.get_version(wtxn)?.unwrap_or(db_version);
|
||||
let upgrade_functions: &[&dyn UpgradeIndex] = &[
|
||||
&V1_12_To_V1_12_3 {},
|
||||
&V1_12_3_To_V1_13_0 {},
|
||||
&V1_13_0_To_V1_13_1 {},
|
||||
&V1_13_1_To_Latest_V1_13 {},
|
||||
&Latest_V1_13_To_Latest_V1_14 {},
|
||||
&Latest_V1_14_To_Latest_V1_15 {},
|
||||
// This is the last upgrade function, it will be called when the index is up to date.
|
||||
// any other upgrade function should be added before this one.
|
||||
&ToCurrentNoOp {},
|
||||
];
|
||||
|
||||
let start = match from {
|
||||
(1, 12, 0..=2) => 0,
|
||||
(1, 12, 3..) => 1,
|
||||
(1, 13, 0) => 2,
|
||||
(1, 13, _) => 4,
|
||||
(1, 14, _) => 5,
|
||||
// We must handle the current version in the match because in case of a failure some index may have been upgraded but not other.
|
||||
(1, 15, _) => 6,
|
||||
(major, minor, patch) => {
|
||||
return Err(InternalError::CannotUpgradeToVersion(major, minor, patch).into())
|
||||
}
|
||||
};
|
||||
let start =
|
||||
start(from).ok_or_else(|| InternalError::CannotUpgradeToVersion(from.0, from.1, from.2))?;
|
||||
|
||||
enum UpgradeVersion {}
|
||||
let upgrade_path = &upgrade_functions[start..];
|
||||
let upgrade_path = &UPGRADE_FUNCTIONS[start..];
|
||||
|
||||
let mut current_version = from;
|
||||
let mut regenerate_stats = false;
|
||||
|
@ -1,4 +1,6 @@
|
||||
use heed::RwTxn;
|
||||
use roaring::RoaringBitmap;
|
||||
use serde::Deserialize;
|
||||
|
||||
use super::UpgradeIndex;
|
||||
use crate::progress::Progress;
|
||||
@ -26,3 +28,14 @@ impl UpgradeIndex for Latest_V1_14_To_Latest_V1_15 {
|
||||
(1, 15, 0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parts of v1.15 `IndexingEmbeddingConfig` that are relevant for upgrade to v1.16
|
||||
///
|
||||
/// # Warning
|
||||
///
|
||||
/// This object should not be rewritten to the DB, only read to get the name and `user_provided` roaring.
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct IndexEmbeddingConfig {
|
||||
pub name: String,
|
||||
pub user_provided: RoaringBitmap,
|
||||
}
|
||||
|
48
crates/milli/src/update/upgrade/v1_16.rs
Normal file
48
crates/milli/src/update/upgrade/v1_16.rs
Normal file
@ -0,0 +1,48 @@
|
||||
use heed::types::{SerdeJson, Str};
|
||||
use heed::RwTxn;
|
||||
|
||||
use super::UpgradeIndex;
|
||||
use crate::progress::Progress;
|
||||
use crate::vector::db::{EmbedderInfo, EmbeddingStatus};
|
||||
use crate::{Index, InternalError, Result};
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
pub(super) struct Latest_V1_15_To_V1_16_0();
|
||||
|
||||
impl UpgradeIndex for Latest_V1_15_To_V1_16_0 {
|
||||
fn upgrade(
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
index: &Index,
|
||||
_original: (u32, u32, u32),
|
||||
_progress: Progress,
|
||||
) -> Result<bool> {
|
||||
let v1_15_indexing_configs = index
|
||||
.main
|
||||
.remap_types::<Str, SerdeJson<Vec<super::v1_15::IndexEmbeddingConfig>>>()
|
||||
.get(wtxn, crate::index::main_key::EMBEDDING_CONFIGS)?
|
||||
.unwrap_or_default();
|
||||
|
||||
let embedders = index.embedding_configs();
|
||||
for config in v1_15_indexing_configs {
|
||||
let embedder_id = embedders.embedder_id(wtxn, &config.name)?.ok_or(
|
||||
InternalError::DatabaseMissingEntry {
|
||||
db_name: crate::index::db_name::VECTOR_EMBEDDER_CATEGORY_ID,
|
||||
key: None,
|
||||
},
|
||||
)?;
|
||||
let info = EmbedderInfo {
|
||||
embedder_id,
|
||||
// v1.15 used not to make a difference between `user_provided` and `! regenerate`.
|
||||
embedding_status: EmbeddingStatus::from_user_provided(config.user_provided),
|
||||
};
|
||||
embedders.put_embedder_info(wtxn, &config.name, &info)?;
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn target_version(&self) -> (u32, u32, u32) {
|
||||
(1, 16, 0)
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user