mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-26 16:21:07 +00:00
Merge branch 'main' into change-matches-position-phrase-search
This commit is contained in:
@ -17,7 +17,7 @@ bincode = "1.3.3"
|
||||
bstr = "1.9.1"
|
||||
bytemuck = { version = "1.16.1", features = ["extern_crate_alloc"] }
|
||||
byteorder = "1.5.0"
|
||||
charabia = { version = "0.9.0", default-features = false }
|
||||
charabia = { version = "0.9.1", default-features = false }
|
||||
concat-arrays = "0.1.2"
|
||||
crossbeam-channel = "0.5.13"
|
||||
deserr = "0.6.2"
|
||||
@ -80,7 +80,7 @@ hf-hub = { git = "https://github.com/dureuill/hf-hub.git", branch = "rust_tls",
|
||||
tiktoken-rs = "0.5.9"
|
||||
liquid = "0.26.6"
|
||||
rhai = { version = "1.19.0", features = ["serde", "no_module", "no_custom_syntax", "no_time", "sync"] }
|
||||
arroy = "0.4.0"
|
||||
arroy = { git = "https://github.com/meilisearch/arroy/", rev = "2386594dfb009ce08821a925ccc89fb8e30bf73d" }
|
||||
rand = "0.8.5"
|
||||
tracing = "0.1.40"
|
||||
ureq = { version = "2.10.0", features = ["json"] }
|
||||
@ -106,6 +106,8 @@ all-tokenizations = [
|
||||
"charabia/greek",
|
||||
"charabia/khmer",
|
||||
"charabia/vietnamese",
|
||||
"charabia/swedish-recomposition",
|
||||
"charabia/german-segmentation",
|
||||
]
|
||||
|
||||
# Use POSIX semaphores instead of SysV semaphores in LMDB
|
||||
@ -138,6 +140,9 @@ khmer = ["charabia/khmer"]
|
||||
# allow vietnamese specialized tokenization
|
||||
vietnamese = ["charabia/vietnamese"]
|
||||
|
||||
# allow german specialized tokenization
|
||||
german = ["charabia/german-segmentation"]
|
||||
|
||||
# force swedish character recomposition
|
||||
swedish-recomposition = ["charabia/swedish-recomposition"]
|
||||
|
||||
|
@ -258,6 +258,10 @@ only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and undersco
|
||||
},
|
||||
#[error("`.embedders.{embedder_name}.dimensions`: `dimensions` cannot be zero")]
|
||||
InvalidSettingsDimensions { embedder_name: String },
|
||||
#[error(
|
||||
"`.embedders.{embedder_name}.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors."
|
||||
)]
|
||||
InvalidDisableBinaryQuantization { embedder_name: String },
|
||||
#[error("`.embedders.{embedder_name}.documentTemplateMaxBytes`: `documentTemplateMaxBytes` cannot be zero")]
|
||||
InvalidSettingsDocumentTemplateMaxBytes { embedder_name: String },
|
||||
#[error("`.embedders.{embedder_name}.url`: could not parse `{url}`: {inner_error}")]
|
||||
|
@ -21,7 +21,7 @@ use crate::heed_codec::{BEU16StrCodec, FstSetCodec, StrBEU16Codec, StrRefCodec};
|
||||
use crate::order_by_map::OrderByMap;
|
||||
use crate::proximity::ProximityPrecision;
|
||||
use crate::vector::parsed_vectors::RESERVED_VECTORS_FIELD_NAME;
|
||||
use crate::vector::{Embedding, EmbeddingConfig};
|
||||
use crate::vector::{ArroyWrapper, Embedding, EmbeddingConfig};
|
||||
use crate::{
|
||||
default_criteria, CboRoaringBitmapCodec, Criterion, DocumentId, ExternalDocumentsIds,
|
||||
FacetDistribution, FieldDistribution, FieldId, FieldIdMapMissingEntry, FieldIdWordCountCodec,
|
||||
@ -162,7 +162,7 @@ pub struct Index {
|
||||
/// Maps an embedder name to its id in the arroy store.
|
||||
pub embedder_category_id: Database<Str, U8>,
|
||||
/// Vector store based on arroy™.
|
||||
pub vector_arroy: arroy::Database<arroy::distances::Angular>,
|
||||
pub vector_arroy: arroy::Database<Unspecified>,
|
||||
|
||||
/// Maps the document id to the document as an obkv store.
|
||||
pub(crate) documents: Database<BEU32, ObkvCodec>,
|
||||
@ -1614,15 +1614,17 @@ impl Index {
|
||||
&'a self,
|
||||
rtxn: &'a RoTxn<'a>,
|
||||
embedder_id: u8,
|
||||
) -> impl Iterator<Item = Result<arroy::Reader<'a, arroy::distances::Angular>>> + 'a {
|
||||
quantized: bool,
|
||||
) -> impl Iterator<Item = Result<ArroyWrapper>> + 'a {
|
||||
crate::vector::arroy_db_range_for_embedder(embedder_id).map_while(move |k| {
|
||||
arroy::Reader::open(rtxn, k, self.vector_arroy)
|
||||
.map(Some)
|
||||
.or_else(|e| match e {
|
||||
arroy::Error::MissingMetadata(_) => Ok(None),
|
||||
e => Err(e.into()),
|
||||
})
|
||||
.transpose()
|
||||
let reader = ArroyWrapper::new(self.vector_arroy, k, quantized);
|
||||
// Here we don't care about the dimensions, but we want to know if we can read
|
||||
// in the database or if its metadata are missing because there is no document with that many vectors.
|
||||
match reader.dimensions(rtxn) {
|
||||
Ok(_) => Some(Ok(reader)),
|
||||
Err(arroy::Error::MissingMetadata(_)) => None,
|
||||
Err(e) => Some(Err(e.into())),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@ -1644,32 +1646,18 @@ impl Index {
|
||||
docid: DocumentId,
|
||||
) -> Result<BTreeMap<String, Vec<Embedding>>> {
|
||||
let mut res = BTreeMap::new();
|
||||
for row in self.embedder_category_id.iter(rtxn)? {
|
||||
let (embedder_name, embedder_id) = row?;
|
||||
let embedder_id = (embedder_id as u16) << 8;
|
||||
let mut embeddings = Vec::new();
|
||||
'vectors: for i in 0..=u8::MAX {
|
||||
let reader = arroy::Reader::open(rtxn, embedder_id | (i as u16), self.vector_arroy)
|
||||
.map(Some)
|
||||
.or_else(|e| match e {
|
||||
arroy::Error::MissingMetadata(_) => Ok(None),
|
||||
e => Err(e),
|
||||
})
|
||||
.transpose();
|
||||
|
||||
let Some(reader) = reader else {
|
||||
break 'vectors;
|
||||
};
|
||||
|
||||
let embedding = reader?.item_vector(rtxn, docid)?;
|
||||
if let Some(embedding) = embedding {
|
||||
embeddings.push(embedding)
|
||||
} else {
|
||||
break 'vectors;
|
||||
}
|
||||
}
|
||||
|
||||
res.insert(embedder_name.to_owned(), embeddings);
|
||||
let embedding_configs = self.embedding_configs(rtxn)?;
|
||||
for config in embedding_configs {
|
||||
let embedder_id = self.embedder_category_id.get(rtxn, &config.name)?.unwrap();
|
||||
let embeddings = self
|
||||
.arroy_readers(rtxn, embedder_id, config.config.quantized())
|
||||
.map_while(|reader| {
|
||||
reader
|
||||
.and_then(|r| r.item_vector(rtxn, docid).map_err(|e| e.into()))
|
||||
.transpose()
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
res.insert(config.name.to_owned(), embeddings);
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||
use std::fmt::Display;
|
||||
use std::ops::ControlFlow;
|
||||
use std::{fmt, mem};
|
||||
|
||||
@ -37,6 +38,15 @@ pub enum OrderBy {
|
||||
Count,
|
||||
}
|
||||
|
||||
impl Display for OrderBy {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
OrderBy::Lexicographic => f.write_str("alphabetically"),
|
||||
OrderBy::Count => f.write_str("by count"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FacetDistribution<'a> {
|
||||
facets: Option<HashMap<String, OrderBy>>,
|
||||
candidates: Option<RoaringBitmap>,
|
||||
@ -100,7 +110,6 @@ impl<'a> FacetDistribution<'a> {
|
||||
let mut lexicographic_distribution = BTreeMap::new();
|
||||
let mut key_buffer: Vec<_> = field_id.to_be_bytes().to_vec();
|
||||
|
||||
let distribution_prelength = distribution.len();
|
||||
let db = self.index.field_id_docid_facet_f64s;
|
||||
for docid in candidates {
|
||||
key_buffer.truncate(mem::size_of::<FieldId>());
|
||||
@ -113,23 +122,21 @@ impl<'a> FacetDistribution<'a> {
|
||||
for result in iter {
|
||||
let ((_, _, value), ()) = result?;
|
||||
*lexicographic_distribution.entry(value.to_string()).or_insert(0) += 1;
|
||||
|
||||
if lexicographic_distribution.len() - distribution_prelength
|
||||
== self.max_values_per_facet
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
distribution.extend(lexicographic_distribution);
|
||||
distribution.extend(
|
||||
lexicographic_distribution
|
||||
.into_iter()
|
||||
.take(self.max_values_per_facet.saturating_sub(distribution.len())),
|
||||
);
|
||||
}
|
||||
FacetType::String => {
|
||||
let mut normalized_distribution = BTreeMap::new();
|
||||
let mut key_buffer: Vec<_> = field_id.to_be_bytes().to_vec();
|
||||
|
||||
let db = self.index.field_id_docid_facet_strings;
|
||||
'outer: for docid in candidates {
|
||||
for docid in candidates {
|
||||
key_buffer.truncate(mem::size_of::<FieldId>());
|
||||
key_buffer.extend_from_slice(&docid.to_be_bytes());
|
||||
let iter = db
|
||||
@ -144,14 +151,14 @@ impl<'a> FacetDistribution<'a> {
|
||||
.or_insert_with(|| (original_value, 0));
|
||||
*count += 1;
|
||||
|
||||
if normalized_distribution.len() == self.max_values_per_facet {
|
||||
break 'outer;
|
||||
}
|
||||
// we'd like to break here if we have enough facet values, but we are collecting them by increasing docid,
|
||||
// so higher ranked facets could be in later docids
|
||||
}
|
||||
}
|
||||
|
||||
let iter = normalized_distribution
|
||||
.into_iter()
|
||||
.take(self.max_values_per_facet.saturating_sub(distribution.len()))
|
||||
.map(|(_normalized, (original, count))| (original.to_string(), count));
|
||||
distribution.extend(iter);
|
||||
}
|
||||
@ -467,7 +474,7 @@ mod tests {
|
||||
.execute()
|
||||
.unwrap();
|
||||
|
||||
milli_snap!(format!("{map:?}"), @r###"{"colour": {"Blue": 1}}"###);
|
||||
milli_snap!(format!("{map:?}"), @r###"{"colour": {"Blue": 2}}"###);
|
||||
|
||||
let map = FacetDistribution::new(&txn, &index)
|
||||
.facets(iter::once(("colour", OrderBy::Count)))
|
||||
|
@ -12,7 +12,7 @@ use serde_json::Value;
|
||||
use super::facet_range_search;
|
||||
use crate::error::{Error, UserError};
|
||||
use crate::heed_codec::facet::{
|
||||
FacetGroupKey, FacetGroupKeyCodec, FacetGroupValueCodec, OrderedF64Codec,
|
||||
FacetGroupKey, FacetGroupKeyCodec, FacetGroupValue, FacetGroupValueCodec, OrderedF64Codec,
|
||||
};
|
||||
use crate::index::db_name::FACET_ID_STRING_DOCIDS;
|
||||
use crate::{
|
||||
@ -336,6 +336,24 @@ impl<'a> Filter<'a> {
|
||||
|
||||
return Ok(docids);
|
||||
}
|
||||
Condition::StartsWith { keyword: _, word } => {
|
||||
let value = crate::normalize_facet(word.value());
|
||||
let base = FacetGroupKey { field_id, level: 0, left_bound: value.as_str() };
|
||||
let docids = strings_db
|
||||
.prefix_iter(rtxn, &base)?
|
||||
.map(|result| -> Result<RoaringBitmap> {
|
||||
match result {
|
||||
Ok((_facet_group_key, FacetGroupValue { bitmap, .. })) => Ok(bitmap),
|
||||
Err(_e) => Err(InternalError::from(SerializationError::Decoding {
|
||||
db_name: Some(FACET_ID_STRING_DOCIDS),
|
||||
})
|
||||
.into()),
|
||||
}
|
||||
})
|
||||
.union()?;
|
||||
|
||||
return Ok(docids);
|
||||
}
|
||||
};
|
||||
|
||||
let mut output = RoaringBitmap::new();
|
||||
|
@ -190,7 +190,7 @@ impl<'a> Search<'a> {
|
||||
return Ok(return_keyword_results(self.limit, self.offset, keyword_results));
|
||||
};
|
||||
// no embedder, no semantic search
|
||||
let Some(SemanticSearch { vector, embedder_name, embedder }) = semantic else {
|
||||
let Some(SemanticSearch { vector, embedder_name, embedder, quantized }) = semantic else {
|
||||
return Ok(return_keyword_results(self.limit, self.offset, keyword_results));
|
||||
};
|
||||
|
||||
@ -212,7 +212,7 @@ impl<'a> Search<'a> {
|
||||
};
|
||||
|
||||
search.semantic =
|
||||
Some(SemanticSearch { vector: Some(vector_query), embedder_name, embedder });
|
||||
Some(SemanticSearch { vector: Some(vector_query), embedder_name, embedder, quantized });
|
||||
|
||||
// TODO: would be better to have two distinct functions at this point
|
||||
let vector_results = search.execute()?;
|
||||
|
@ -32,6 +32,7 @@ pub struct SemanticSearch {
|
||||
vector: Option<Vec<f32>>,
|
||||
embedder_name: String,
|
||||
embedder: Arc<Embedder>,
|
||||
quantized: bool,
|
||||
}
|
||||
|
||||
pub struct Search<'a> {
|
||||
@ -89,9 +90,10 @@ impl<'a> Search<'a> {
|
||||
&mut self,
|
||||
embedder_name: String,
|
||||
embedder: Arc<Embedder>,
|
||||
quantized: bool,
|
||||
vector: Option<Vec<f32>>,
|
||||
) -> &mut Search<'a> {
|
||||
self.semantic = Some(SemanticSearch { embedder_name, embedder, vector });
|
||||
self.semantic = Some(SemanticSearch { embedder_name, embedder, quantized, vector });
|
||||
self
|
||||
}
|
||||
|
||||
@ -206,7 +208,7 @@ impl<'a> Search<'a> {
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
} = match self.semantic.as_ref() {
|
||||
Some(SemanticSearch { vector: Some(vector), embedder_name, embedder }) => {
|
||||
Some(SemanticSearch { vector: Some(vector), embedder_name, embedder, quantized }) => {
|
||||
execute_vector_search(
|
||||
&mut ctx,
|
||||
vector,
|
||||
@ -219,6 +221,7 @@ impl<'a> Search<'a> {
|
||||
self.limit,
|
||||
embedder_name,
|
||||
embedder,
|
||||
*quantized,
|
||||
self.time_budget.clone(),
|
||||
self.ranking_score_threshold,
|
||||
)?
|
||||
|
@ -312,6 +312,7 @@ fn get_ranking_rules_for_placeholder_search<'ctx>(
|
||||
Ok(ranking_rules)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn get_ranking_rules_for_vector<'ctx>(
|
||||
ctx: &SearchContext<'ctx>,
|
||||
sort_criteria: &Option<Vec<AscDesc>>,
|
||||
@ -320,6 +321,7 @@ fn get_ranking_rules_for_vector<'ctx>(
|
||||
target: &[f32],
|
||||
embedder_name: &str,
|
||||
embedder: &Embedder,
|
||||
quantized: bool,
|
||||
) -> Result<Vec<BoxRankingRule<'ctx, PlaceholderQuery>>> {
|
||||
// query graph search
|
||||
|
||||
@ -347,6 +349,7 @@ fn get_ranking_rules_for_vector<'ctx>(
|
||||
limit_plus_offset,
|
||||
embedder_name,
|
||||
embedder,
|
||||
quantized,
|
||||
)?;
|
||||
ranking_rules.push(Box::new(vector_sort));
|
||||
vector = true;
|
||||
@ -576,6 +579,7 @@ pub fn execute_vector_search(
|
||||
length: usize,
|
||||
embedder_name: &str,
|
||||
embedder: &Embedder,
|
||||
quantized: bool,
|
||||
time_budget: TimeBudget,
|
||||
ranking_score_threshold: Option<f64>,
|
||||
) -> Result<PartialSearchResult> {
|
||||
@ -591,6 +595,7 @@ pub fn execute_vector_search(
|
||||
vector,
|
||||
embedder_name,
|
||||
embedder,
|
||||
quantized,
|
||||
)?;
|
||||
|
||||
let mut placeholder_search_logger = logger::DefaultSearchLogger;
|
||||
|
@ -16,6 +16,7 @@ pub struct VectorSort<Q: RankingRuleQueryTrait> {
|
||||
limit: usize,
|
||||
distribution_shift: Option<DistributionShift>,
|
||||
embedder_index: u8,
|
||||
quantized: bool,
|
||||
}
|
||||
|
||||
impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
|
||||
@ -26,6 +27,7 @@ impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
|
||||
limit: usize,
|
||||
embedder_name: &str,
|
||||
embedder: &Embedder,
|
||||
quantized: bool,
|
||||
) -> Result<Self> {
|
||||
let embedder_index = ctx
|
||||
.index
|
||||
@ -41,6 +43,7 @@ impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
|
||||
limit,
|
||||
distribution_shift: embedder.distribution(),
|
||||
embedder_index,
|
||||
quantized,
|
||||
})
|
||||
}
|
||||
|
||||
@ -49,16 +52,12 @@ impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
|
||||
ctx: &mut SearchContext<'_>,
|
||||
vector_candidates: &RoaringBitmap,
|
||||
) -> Result<()> {
|
||||
let readers: std::result::Result<Vec<_>, _> =
|
||||
ctx.index.arroy_readers(ctx.txn, self.embedder_index).collect();
|
||||
let readers = readers?;
|
||||
|
||||
let target = &self.target;
|
||||
let mut results = Vec::new();
|
||||
|
||||
for reader in readers.iter() {
|
||||
for reader in ctx.index.arroy_readers(ctx.txn, self.embedder_index, self.quantized) {
|
||||
let nns_by_vector =
|
||||
reader.nns_by_vector(ctx.txn, target, self.limit, None, Some(vector_candidates))?;
|
||||
reader?.nns_by_vector(ctx.txn, target, self.limit, Some(vector_candidates))?;
|
||||
results.extend(nns_by_vector.into_iter());
|
||||
}
|
||||
results.sort_unstable_by_key(|(_, distance)| OrderedFloat(*distance));
|
||||
|
@ -18,9 +18,11 @@ pub struct Similar<'a> {
|
||||
embedder_name: String,
|
||||
embedder: Arc<Embedder>,
|
||||
ranking_score_threshold: Option<f64>,
|
||||
quantized: bool,
|
||||
}
|
||||
|
||||
impl<'a> Similar<'a> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
id: DocumentId,
|
||||
offset: usize,
|
||||
@ -29,6 +31,7 @@ impl<'a> Similar<'a> {
|
||||
rtxn: &'a heed::RoTxn<'a>,
|
||||
embedder_name: String,
|
||||
embedder: Arc<Embedder>,
|
||||
quantized: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
id,
|
||||
@ -40,6 +43,7 @@ impl<'a> Similar<'a> {
|
||||
embedder_name,
|
||||
embedder,
|
||||
ranking_score_threshold: None,
|
||||
quantized,
|
||||
}
|
||||
}
|
||||
|
||||
@ -67,19 +71,13 @@ impl<'a> Similar<'a> {
|
||||
.get(self.rtxn, &self.embedder_name)?
|
||||
.ok_or_else(|| crate::UserError::InvalidEmbedder(self.embedder_name.to_owned()))?;
|
||||
|
||||
let readers: std::result::Result<Vec<_>, _> =
|
||||
self.index.arroy_readers(self.rtxn, embedder_index).collect();
|
||||
|
||||
let readers = readers?;
|
||||
|
||||
let mut results = Vec::new();
|
||||
|
||||
for reader in readers.iter() {
|
||||
let nns_by_item = reader.nns_by_item(
|
||||
for reader in self.index.arroy_readers(self.rtxn, embedder_index, self.quantized) {
|
||||
let nns_by_item = reader?.nns_by_item(
|
||||
self.rtxn,
|
||||
self.id,
|
||||
self.limit + self.offset + 1,
|
||||
None,
|
||||
Some(&universe),
|
||||
)?;
|
||||
if let Some(mut nns_by_item) = nns_by_item {
|
||||
|
@ -20,7 +20,7 @@ use crate::update::del_add::{DelAdd, KvReaderDelAdd, KvWriterDelAdd};
|
||||
use crate::update::settings::InnerIndexSettingsDiff;
|
||||
use crate::vector::error::{EmbedErrorKind, PossibleEmbeddingMistakes, UnusedVectorsDistribution};
|
||||
use crate::vector::parsed_vectors::{ParsedVectorsDiff, VectorState, RESERVED_VECTORS_FIELD_NAME};
|
||||
use crate::vector::settings::{EmbedderAction, ReindexAction};
|
||||
use crate::vector::settings::ReindexAction;
|
||||
use crate::vector::{Embedder, Embeddings};
|
||||
use crate::{try_split_array_at, DocumentId, FieldId, Result, ThreadPoolNoAbort};
|
||||
|
||||
@ -208,65 +208,65 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
|
||||
|
||||
if reindex_vectors {
|
||||
for (name, action) in settings_diff.embedding_config_updates.iter() {
|
||||
match action {
|
||||
EmbedderAction::WriteBackToDocuments(_) => continue, // already deleted
|
||||
EmbedderAction::Reindex(action) => {
|
||||
let Some((embedder_name, (embedder, prompt))) = configs.remove_entry(name)
|
||||
else {
|
||||
tracing::error!(embedder = name, "Requested embedder config not found");
|
||||
continue;
|
||||
};
|
||||
if let Some(action) = action.reindex() {
|
||||
let Some((embedder_name, (embedder, prompt, _quantized))) =
|
||||
configs.remove_entry(name)
|
||||
else {
|
||||
tracing::error!(embedder = name, "Requested embedder config not found");
|
||||
continue;
|
||||
};
|
||||
|
||||
// (docid, _index) -> KvWriterDelAdd -> Vector
|
||||
let manual_vectors_writer = create_writer(
|
||||
indexer.chunk_compression_type,
|
||||
indexer.chunk_compression_level,
|
||||
tempfile::tempfile()?,
|
||||
);
|
||||
// (docid, _index) -> KvWriterDelAdd -> Vector
|
||||
let manual_vectors_writer = create_writer(
|
||||
indexer.chunk_compression_type,
|
||||
indexer.chunk_compression_level,
|
||||
tempfile::tempfile()?,
|
||||
);
|
||||
|
||||
// (docid) -> (prompt)
|
||||
let prompts_writer = create_writer(
|
||||
indexer.chunk_compression_type,
|
||||
indexer.chunk_compression_level,
|
||||
tempfile::tempfile()?,
|
||||
);
|
||||
// (docid) -> (prompt)
|
||||
let prompts_writer = create_writer(
|
||||
indexer.chunk_compression_type,
|
||||
indexer.chunk_compression_level,
|
||||
tempfile::tempfile()?,
|
||||
);
|
||||
|
||||
// (docid) -> ()
|
||||
let remove_vectors_writer = create_writer(
|
||||
indexer.chunk_compression_type,
|
||||
indexer.chunk_compression_level,
|
||||
tempfile::tempfile()?,
|
||||
);
|
||||
// (docid) -> ()
|
||||
let remove_vectors_writer = create_writer(
|
||||
indexer.chunk_compression_type,
|
||||
indexer.chunk_compression_level,
|
||||
tempfile::tempfile()?,
|
||||
);
|
||||
|
||||
let action = match action {
|
||||
ReindexAction::FullReindex => ExtractionAction::SettingsFullReindex,
|
||||
ReindexAction::RegeneratePrompts => {
|
||||
let Some((_, old_prompt)) = old_configs.get(name) else {
|
||||
tracing::error!(embedder = name, "Old embedder config not found");
|
||||
continue;
|
||||
};
|
||||
let action = match action {
|
||||
ReindexAction::FullReindex => ExtractionAction::SettingsFullReindex,
|
||||
ReindexAction::RegeneratePrompts => {
|
||||
let Some((_, old_prompt, _quantized)) = old_configs.get(name) else {
|
||||
tracing::error!(embedder = name, "Old embedder config not found");
|
||||
continue;
|
||||
};
|
||||
|
||||
ExtractionAction::SettingsRegeneratePrompts { old_prompt }
|
||||
}
|
||||
};
|
||||
ExtractionAction::SettingsRegeneratePrompts { old_prompt }
|
||||
}
|
||||
};
|
||||
|
||||
extractors.push(EmbedderVectorExtractor {
|
||||
embedder_name,
|
||||
embedder,
|
||||
prompt,
|
||||
prompts_writer,
|
||||
remove_vectors_writer,
|
||||
manual_vectors_writer,
|
||||
add_to_user_provided: RoaringBitmap::new(),
|
||||
action,
|
||||
});
|
||||
}
|
||||
extractors.push(EmbedderVectorExtractor {
|
||||
embedder_name,
|
||||
embedder,
|
||||
prompt,
|
||||
prompts_writer,
|
||||
remove_vectors_writer,
|
||||
manual_vectors_writer,
|
||||
add_to_user_provided: RoaringBitmap::new(),
|
||||
action,
|
||||
});
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// document operation
|
||||
|
||||
for (embedder_name, (embedder, prompt)) in configs.into_iter() {
|
||||
for (embedder_name, (embedder, prompt, _quantized)) in configs.into_iter() {
|
||||
// (docid, _index) -> KvWriterDelAdd -> Vector
|
||||
let manual_vectors_writer = create_writer(
|
||||
indexer.chunk_compression_type,
|
||||
|
@ -43,7 +43,7 @@ use crate::update::index_documents::parallel::ImmutableObkvs;
|
||||
use crate::update::{
|
||||
IndexerConfig, UpdateIndexingStep, WordPrefixDocids, WordPrefixIntegerDocids, WordsPrefixesFst,
|
||||
};
|
||||
use crate::vector::EmbeddingConfigs;
|
||||
use crate::vector::{ArroyWrapper, EmbeddingConfigs};
|
||||
use crate::{CboRoaringBitmapCodec, Index, Object, Result};
|
||||
|
||||
static MERGED_DATABASE_COUNT: usize = 7;
|
||||
@ -679,6 +679,24 @@ where
|
||||
let number_of_documents = self.index.number_of_documents(self.wtxn)?;
|
||||
let mut rng = rand::rngs::StdRng::seed_from_u64(42);
|
||||
|
||||
// If an embedder wasn't used in the typedchunk but must be binary quantized
|
||||
// we should insert it in `dimension`
|
||||
for (name, action) in settings_diff.embedding_config_updates.iter() {
|
||||
if action.is_being_quantized && !dimension.contains_key(name.as_str()) {
|
||||
let index = self.index.embedder_category_id.get(self.wtxn, name)?.ok_or(
|
||||
InternalError::DatabaseMissingEntry {
|
||||
db_name: "embedder_category_id",
|
||||
key: None,
|
||||
},
|
||||
)?;
|
||||
let first_id = crate::vector::arroy_db_range_for_embedder(index).next().unwrap();
|
||||
let reader =
|
||||
ArroyWrapper::new(self.index.vector_arroy, first_id, action.was_quantized);
|
||||
let dim = reader.dimensions(self.wtxn)?;
|
||||
dimension.insert(name.to_string(), dim);
|
||||
}
|
||||
}
|
||||
|
||||
for (embedder_name, dimension) in dimension {
|
||||
let wtxn = &mut *self.wtxn;
|
||||
let vector_arroy = self.index.vector_arroy;
|
||||
@ -686,13 +704,23 @@ where
|
||||
let embedder_index = self.index.embedder_category_id.get(wtxn, &embedder_name)?.ok_or(
|
||||
InternalError::DatabaseMissingEntry { db_name: "embedder_category_id", key: None },
|
||||
)?;
|
||||
let embedder_config = settings_diff.embedding_config_updates.get(&embedder_name);
|
||||
let was_quantized = settings_diff
|
||||
.old
|
||||
.embedding_configs
|
||||
.get(&embedder_name)
|
||||
.map_or(false, |conf| conf.2);
|
||||
let is_quantizing = embedder_config.map_or(false, |action| action.is_being_quantized);
|
||||
|
||||
pool.install(|| {
|
||||
for k in crate::vector::arroy_db_range_for_embedder(embedder_index) {
|
||||
let writer = arroy::Writer::new(vector_arroy, k, dimension);
|
||||
if writer.need_build(wtxn)? {
|
||||
writer.build(wtxn, &mut rng, None)?;
|
||||
} else if writer.is_empty(wtxn)? {
|
||||
let mut writer = ArroyWrapper::new(vector_arroy, k, was_quantized);
|
||||
if is_quantizing {
|
||||
writer.quantize(wtxn, k, dimension)?;
|
||||
}
|
||||
if writer.need_build(wtxn, dimension)? {
|
||||
writer.build(wtxn, &mut rng, dimension)?;
|
||||
} else if writer.is_empty(wtxn, dimension)? {
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -2746,6 +2774,7 @@ mod tests {
|
||||
response: Setting::NotSet,
|
||||
distribution: Setting::NotSet,
|
||||
headers: Setting::NotSet,
|
||||
binary_quantized: Setting::NotSet,
|
||||
}),
|
||||
);
|
||||
settings.set_embedder_settings(embedders);
|
||||
@ -2774,7 +2803,7 @@ mod tests {
|
||||
std::sync::Arc::new(crate::vector::Embedder::new(embedder.embedder_options).unwrap());
|
||||
let res = index
|
||||
.search(&rtxn)
|
||||
.semantic(embedder_name, embedder, Some([0.0, 1.0, 2.0].to_vec()))
|
||||
.semantic(embedder_name, embedder, false, Some([0.0, 1.0, 2.0].to_vec()))
|
||||
.execute()
|
||||
.unwrap();
|
||||
assert_eq!(res.documents_ids.len(), 3);
|
||||
|
@ -28,7 +28,8 @@ use crate::update::index_documents::GrenadParameters;
|
||||
use crate::update::settings::{InnerIndexSettings, InnerIndexSettingsDiff};
|
||||
use crate::update::{AvailableDocumentsIds, UpdateIndexingStep};
|
||||
use crate::vector::parsed_vectors::{ExplicitVectors, VectorOrArrayOfVectors};
|
||||
use crate::vector::settings::{EmbedderAction, WriteBackToDocuments};
|
||||
use crate::vector::settings::WriteBackToDocuments;
|
||||
use crate::vector::ArroyWrapper;
|
||||
use crate::{
|
||||
is_faceted_by, FieldDistribution, FieldId, FieldIdMapMissingEntry, FieldsIdsMap, Index, Result,
|
||||
};
|
||||
@ -989,19 +990,17 @@ impl<'a, 'i> Transform<'a, 'i> {
|
||||
None
|
||||
};
|
||||
|
||||
let readers: Result<
|
||||
BTreeMap<&str, (Vec<arroy::Reader<'_, arroy::distances::Angular>>, &RoaringBitmap)>,
|
||||
> = settings_diff
|
||||
let readers: Result<BTreeMap<&str, (Vec<ArroyWrapper>, &RoaringBitmap)>> = settings_diff
|
||||
.embedding_config_updates
|
||||
.iter()
|
||||
.filter_map(|(name, action)| {
|
||||
if let EmbedderAction::WriteBackToDocuments(WriteBackToDocuments {
|
||||
embedder_id,
|
||||
user_provided,
|
||||
}) = action
|
||||
if let Some(WriteBackToDocuments { embedder_id, user_provided }) =
|
||||
action.write_back()
|
||||
{
|
||||
let readers: Result<Vec<_>> =
|
||||
self.index.arroy_readers(wtxn, *embedder_id).collect();
|
||||
let readers: Result<Vec<_>> = self
|
||||
.index
|
||||
.arroy_readers(wtxn, *embedder_id, action.was_quantized)
|
||||
.collect();
|
||||
match readers {
|
||||
Ok(readers) => Some(Ok((name.as_str(), (readers, user_provided)))),
|
||||
Err(error) => Some(Err(error)),
|
||||
@ -1104,23 +1103,14 @@ impl<'a, 'i> Transform<'a, 'i> {
|
||||
}
|
||||
}
|
||||
|
||||
let mut writers = Vec::new();
|
||||
|
||||
// delete all vectors from the embedders that need removal
|
||||
for (_, (readers, _)) in readers {
|
||||
for reader in readers {
|
||||
let dimensions = reader.dimensions();
|
||||
let arroy_index = reader.index();
|
||||
drop(reader);
|
||||
let writer = arroy::Writer::new(self.index.vector_arroy, arroy_index, dimensions);
|
||||
writers.push(writer);
|
||||
let dimensions = reader.dimensions(wtxn)?;
|
||||
reader.clear(wtxn, dimensions)?;
|
||||
}
|
||||
}
|
||||
|
||||
for writer in writers {
|
||||
writer.clear(wtxn)?;
|
||||
}
|
||||
|
||||
let grenad_params = GrenadParameters {
|
||||
chunk_compression_type: self.indexer_settings.chunk_compression_type,
|
||||
chunk_compression_level: self.indexer_settings.chunk_compression_level,
|
||||
|
@ -27,6 +27,7 @@ use crate::update::index_documents::helpers::{
|
||||
as_cloneable_grenad, keep_latest_obkv, try_split_array_at,
|
||||
};
|
||||
use crate::update::settings::InnerIndexSettingsDiff;
|
||||
use crate::vector::ArroyWrapper;
|
||||
use crate::{
|
||||
lat_lng_to_xyz, CboRoaringBitmapCodec, DocumentId, FieldId, GeoPoint, Index, InternalError,
|
||||
Result, SerializationError, U8StrStrCodec,
|
||||
@ -666,9 +667,14 @@ pub(crate) fn write_typed_chunk_into_index(
|
||||
let embedder_index = index.embedder_category_id.get(wtxn, &embedder_name)?.ok_or(
|
||||
InternalError::DatabaseMissingEntry { db_name: "embedder_category_id", key: None },
|
||||
)?;
|
||||
let binary_quantized = settings_diff
|
||||
.old
|
||||
.embedding_configs
|
||||
.get(&embedder_name)
|
||||
.map_or(false, |conf| conf.2);
|
||||
// FIXME: allow customizing distance
|
||||
let writers: Vec<_> = crate::vector::arroy_db_range_for_embedder(embedder_index)
|
||||
.map(|k| arroy::Writer::new(index.vector_arroy, k, expected_dimension))
|
||||
.map(|k| ArroyWrapper::new(index.vector_arroy, k, binary_quantized))
|
||||
.collect();
|
||||
|
||||
// remove vectors for docids we want them removed
|
||||
@ -679,7 +685,7 @@ pub(crate) fn write_typed_chunk_into_index(
|
||||
|
||||
for writer in &writers {
|
||||
// Uses invariant: vectors are packed in the first writers.
|
||||
if !writer.del_item(wtxn, docid)? {
|
||||
if !writer.del_item(wtxn, expected_dimension, docid)? {
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -711,7 +717,7 @@ pub(crate) fn write_typed_chunk_into_index(
|
||||
)));
|
||||
}
|
||||
for (embedding, writer) in embeddings.iter().zip(&writers) {
|
||||
writer.add_item(wtxn, docid, embedding)?;
|
||||
writer.add_item(wtxn, expected_dimension, docid, embedding)?;
|
||||
}
|
||||
}
|
||||
|
||||
@ -734,7 +740,7 @@ pub(crate) fn write_typed_chunk_into_index(
|
||||
break;
|
||||
};
|
||||
if candidate == vector {
|
||||
writer.del_item(wtxn, docid)?;
|
||||
writer.del_item(wtxn, expected_dimension, docid)?;
|
||||
deleted_index = Some(index);
|
||||
}
|
||||
}
|
||||
@ -751,8 +757,13 @@ pub(crate) fn write_typed_chunk_into_index(
|
||||
if let Some((last_index, vector)) = last_index_with_a_vector {
|
||||
// unwrap: computed the index from the list of writers
|
||||
let writer = writers.get(last_index).unwrap();
|
||||
writer.del_item(wtxn, docid)?;
|
||||
writers.get(deleted_index).unwrap().add_item(wtxn, docid, &vector)?;
|
||||
writer.del_item(wtxn, expected_dimension, docid)?;
|
||||
writers.get(deleted_index).unwrap().add_item(
|
||||
wtxn,
|
||||
expected_dimension,
|
||||
docid,
|
||||
&vector,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -762,8 +773,8 @@ pub(crate) fn write_typed_chunk_into_index(
|
||||
|
||||
// overflow was detected during vector extraction.
|
||||
for writer in &writers {
|
||||
if !writer.contains_item(wtxn, docid)? {
|
||||
writer.add_item(wtxn, docid, &vector)?;
|
||||
if !writer.contains_item(wtxn, expected_dimension, docid)? {
|
||||
writer.add_item(wtxn, expected_dimension, docid, &vector)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -954,7 +954,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
let old_configs = self.index.embedding_configs(self.wtxn)?;
|
||||
let remove_all: Result<BTreeMap<String, EmbedderAction>> = old_configs
|
||||
.into_iter()
|
||||
.map(|IndexEmbeddingConfig { name, config: _, user_provided }| -> Result<_> {
|
||||
.map(|IndexEmbeddingConfig { name, config, user_provided }| -> Result<_> {
|
||||
let embedder_id =
|
||||
self.index.embedder_category_id.get(self.wtxn, &name)?.ok_or(
|
||||
crate::InternalError::DatabaseMissingEntry {
|
||||
@ -964,10 +964,10 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
)?;
|
||||
Ok((
|
||||
name,
|
||||
EmbedderAction::WriteBackToDocuments(WriteBackToDocuments {
|
||||
embedder_id,
|
||||
user_provided,
|
||||
}),
|
||||
EmbedderAction::with_write_back(
|
||||
WriteBackToDocuments { embedder_id, user_provided },
|
||||
config.quantized(),
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect();
|
||||
@ -1004,7 +1004,8 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
match joined {
|
||||
// updated config
|
||||
EitherOrBoth::Both((name, (old, user_provided)), (_, new)) => {
|
||||
let settings_diff = SettingsDiff::from_settings(old, new);
|
||||
let was_quantized = old.binary_quantized.set().unwrap_or_default();
|
||||
let settings_diff = SettingsDiff::from_settings(&name, old, new)?;
|
||||
match settings_diff {
|
||||
SettingsDiff::Remove => {
|
||||
tracing::debug!(
|
||||
@ -1023,25 +1024,29 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
self.index.embedder_category_id.delete(self.wtxn, &name)?;
|
||||
embedder_actions.insert(
|
||||
name,
|
||||
EmbedderAction::WriteBackToDocuments(WriteBackToDocuments {
|
||||
embedder_id,
|
||||
user_provided,
|
||||
}),
|
||||
EmbedderAction::with_write_back(
|
||||
WriteBackToDocuments { embedder_id, user_provided },
|
||||
was_quantized,
|
||||
),
|
||||
);
|
||||
}
|
||||
SettingsDiff::Reindex { action, updated_settings } => {
|
||||
SettingsDiff::Reindex { action, updated_settings, quantize } => {
|
||||
tracing::debug!(
|
||||
embedder = name,
|
||||
user_provided = user_provided.len(),
|
||||
?action,
|
||||
"reindex embedder"
|
||||
);
|
||||
embedder_actions.insert(name.clone(), EmbedderAction::Reindex(action));
|
||||
embedder_actions.insert(
|
||||
name.clone(),
|
||||
EmbedderAction::with_reindex(action, was_quantized)
|
||||
.with_is_being_quantized(quantize),
|
||||
);
|
||||
let new =
|
||||
validate_embedding_settings(Setting::Set(updated_settings), &name)?;
|
||||
updated_configs.insert(name, (new, user_provided));
|
||||
}
|
||||
SettingsDiff::UpdateWithoutReindex { updated_settings } => {
|
||||
SettingsDiff::UpdateWithoutReindex { updated_settings, quantize } => {
|
||||
tracing::debug!(
|
||||
embedder = name,
|
||||
user_provided = user_provided.len(),
|
||||
@ -1049,6 +1054,12 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
);
|
||||
let new =
|
||||
validate_embedding_settings(Setting::Set(updated_settings), &name)?;
|
||||
if quantize {
|
||||
embedder_actions.insert(
|
||||
name.clone(),
|
||||
EmbedderAction::default().with_is_being_quantized(true),
|
||||
);
|
||||
}
|
||||
updated_configs.insert(name, (new, user_provided));
|
||||
}
|
||||
}
|
||||
@ -1067,8 +1078,10 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
&mut setting,
|
||||
);
|
||||
let setting = validate_embedding_settings(setting, &name)?;
|
||||
embedder_actions
|
||||
.insert(name.clone(), EmbedderAction::Reindex(ReindexAction::FullReindex));
|
||||
embedder_actions.insert(
|
||||
name.clone(),
|
||||
EmbedderAction::with_reindex(ReindexAction::FullReindex, false),
|
||||
);
|
||||
updated_configs.insert(name, (setting, RoaringBitmap::new()));
|
||||
}
|
||||
}
|
||||
@ -1082,19 +1095,14 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
let mut find_free_index =
|
||||
move || free_indices.find(|(_, free)| **free).map(|(index, _)| index as u8);
|
||||
for (name, action) in embedder_actions.iter() {
|
||||
match action {
|
||||
EmbedderAction::Reindex(ReindexAction::RegeneratePrompts) => {
|
||||
/* cannot be a new embedder, so has to have an id already */
|
||||
}
|
||||
EmbedderAction::Reindex(ReindexAction::FullReindex) => {
|
||||
if self.index.embedder_category_id.get(self.wtxn, name)?.is_none() {
|
||||
let id = find_free_index()
|
||||
.ok_or(UserError::TooManyEmbedders(updated_configs.len()))?;
|
||||
tracing::debug!(embedder = name, id, "assigning free id to new embedder");
|
||||
self.index.embedder_category_id.put(self.wtxn, name, &id)?;
|
||||
}
|
||||
}
|
||||
EmbedderAction::WriteBackToDocuments(_) => { /* already removed */ }
|
||||
// ignore actions that are not possible for a new embedder
|
||||
if matches!(action.reindex(), Some(ReindexAction::FullReindex))
|
||||
&& self.index.embedder_category_id.get(self.wtxn, name)?.is_none()
|
||||
{
|
||||
let id =
|
||||
find_free_index().ok_or(UserError::TooManyEmbedders(updated_configs.len()))?;
|
||||
tracing::debug!(embedder = name, id, "assigning free id to new embedder");
|
||||
self.index.embedder_category_id.put(self.wtxn, name, &id)?;
|
||||
}
|
||||
}
|
||||
let updated_configs: Vec<IndexEmbeddingConfig> = updated_configs
|
||||
@ -1277,7 +1285,11 @@ impl InnerIndexSettingsDiff {
|
||||
|
||||
// if the user-defined searchables changed, then we need to reindex prompts.
|
||||
if cache_user_defined_searchables {
|
||||
for (embedder_name, (config, _)) in new_settings.embedding_configs.inner_as_ref() {
|
||||
for (embedder_name, (config, _, _quantized)) in
|
||||
new_settings.embedding_configs.inner_as_ref()
|
||||
{
|
||||
let was_quantized =
|
||||
old_settings.embedding_configs.get(embedder_name).map_or(false, |conf| conf.2);
|
||||
// skip embedders that don't use document templates
|
||||
if !config.uses_document_template() {
|
||||
continue;
|
||||
@ -1287,16 +1299,19 @@ impl InnerIndexSettingsDiff {
|
||||
// this always makes the code clearer by explicitly handling the cases
|
||||
match embedding_config_updates.entry(embedder_name.clone()) {
|
||||
std::collections::btree_map::Entry::Vacant(entry) => {
|
||||
entry.insert(EmbedderAction::Reindex(ReindexAction::RegeneratePrompts));
|
||||
entry.insert(EmbedderAction::with_reindex(
|
||||
ReindexAction::RegeneratePrompts,
|
||||
was_quantized,
|
||||
));
|
||||
}
|
||||
std::collections::btree_map::Entry::Occupied(entry) => {
|
||||
let EmbedderAction {
|
||||
was_quantized: _,
|
||||
is_being_quantized: _,
|
||||
write_back: _, // We are deleting this embedder, so no point in regeneration
|
||||
reindex: _, // We are already fully reindexing
|
||||
} = entry.get();
|
||||
}
|
||||
std::collections::btree_map::Entry::Occupied(entry) => match entry.get() {
|
||||
EmbedderAction::WriteBackToDocuments(_) => { /* we are deleting this embedder, so no point in regeneration */
|
||||
}
|
||||
EmbedderAction::Reindex(ReindexAction::FullReindex) => { /* we are already fully reindexing */
|
||||
}
|
||||
EmbedderAction::Reindex(ReindexAction::RegeneratePrompts) => { /* we are already regenerating prompts */
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -1546,7 +1561,7 @@ fn embedders(embedding_configs: Vec<IndexEmbeddingConfig>) -> Result<EmbeddingCo
|
||||
.map(
|
||||
|IndexEmbeddingConfig {
|
||||
name,
|
||||
config: EmbeddingConfig { embedder_options, prompt },
|
||||
config: EmbeddingConfig { embedder_options, prompt, quantized },
|
||||
..
|
||||
}| {
|
||||
let prompt = Arc::new(prompt.try_into().map_err(crate::Error::from)?);
|
||||
@ -1556,7 +1571,7 @@ fn embedders(embedding_configs: Vec<IndexEmbeddingConfig>) -> Result<EmbeddingCo
|
||||
.map_err(crate::vector::Error::from)
|
||||
.map_err(crate::Error::from)?,
|
||||
);
|
||||
Ok((name, (embedder, prompt)))
|
||||
Ok((name, (embedder, prompt, quantized.unwrap_or_default())))
|
||||
},
|
||||
)
|
||||
.collect();
|
||||
@ -1581,6 +1596,7 @@ fn validate_prompt(
|
||||
response,
|
||||
distribution,
|
||||
headers,
|
||||
binary_quantized: binary_quantize,
|
||||
}) => {
|
||||
let max_bytes = match document_template_max_bytes.set() {
|
||||
Some(max_bytes) => NonZeroUsize::new(max_bytes).ok_or_else(|| {
|
||||
@ -1613,6 +1629,7 @@ fn validate_prompt(
|
||||
response,
|
||||
distribution,
|
||||
headers,
|
||||
binary_quantized: binary_quantize,
|
||||
}))
|
||||
}
|
||||
new => Ok(new),
|
||||
@ -1638,6 +1655,7 @@ pub fn validate_embedding_settings(
|
||||
response,
|
||||
distribution,
|
||||
headers,
|
||||
binary_quantized: binary_quantize,
|
||||
} = settings;
|
||||
|
||||
if let Some(0) = dimensions.set() {
|
||||
@ -1678,6 +1696,7 @@ pub fn validate_embedding_settings(
|
||||
response,
|
||||
distribution,
|
||||
headers,
|
||||
binary_quantized: binary_quantize,
|
||||
}));
|
||||
};
|
||||
match inferred_source {
|
||||
@ -1779,6 +1798,7 @@ pub fn validate_embedding_settings(
|
||||
response,
|
||||
distribution,
|
||||
headers,
|
||||
binary_quantized: binary_quantize,
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,12 @@
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use arroy::distances::{Angular, BinaryQuantizedAngular};
|
||||
use arroy::ItemId;
|
||||
use deserr::{DeserializeError, Deserr};
|
||||
use heed::{RoTxn, RwTxn, Unspecified};
|
||||
use ordered_float::OrderedFloat;
|
||||
use roaring::RoaringBitmap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use self::error::{EmbedError, NewEmbedderError};
|
||||
@ -26,6 +30,171 @@ pub type Embedding = Vec<f32>;
|
||||
|
||||
pub const REQUEST_PARALLELISM: usize = 40;
|
||||
|
||||
pub struct ArroyWrapper {
|
||||
quantized: bool,
|
||||
index: u16,
|
||||
database: arroy::Database<Unspecified>,
|
||||
}
|
||||
|
||||
impl ArroyWrapper {
|
||||
pub fn new(database: arroy::Database<Unspecified>, index: u16, quantized: bool) -> Self {
|
||||
Self { database, index, quantized }
|
||||
}
|
||||
|
||||
pub fn index(&self) -> u16 {
|
||||
self.index
|
||||
}
|
||||
|
||||
pub fn dimensions(&self, rtxn: &RoTxn) -> Result<usize, arroy::Error> {
|
||||
if self.quantized {
|
||||
Ok(arroy::Reader::open(rtxn, self.index, self.quantized_db())?.dimensions())
|
||||
} else {
|
||||
Ok(arroy::Reader::open(rtxn, self.index, self.angular_db())?.dimensions())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn quantize(
|
||||
&mut self,
|
||||
wtxn: &mut RwTxn,
|
||||
index: u16,
|
||||
dimension: usize,
|
||||
) -> Result<(), arroy::Error> {
|
||||
if !self.quantized {
|
||||
let writer = arroy::Writer::new(self.angular_db(), index, dimension);
|
||||
writer.prepare_changing_distance::<BinaryQuantizedAngular>(wtxn)?;
|
||||
self.quantized = true;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn need_build(&self, rtxn: &RoTxn, dimension: usize) -> Result<bool, arroy::Error> {
|
||||
if self.quantized {
|
||||
arroy::Writer::new(self.quantized_db(), self.index, dimension).need_build(rtxn)
|
||||
} else {
|
||||
arroy::Writer::new(self.angular_db(), self.index, dimension).need_build(rtxn)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build<R: rand::Rng + rand::SeedableRng>(
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
rng: &mut R,
|
||||
dimension: usize,
|
||||
) -> Result<(), arroy::Error> {
|
||||
if self.quantized {
|
||||
arroy::Writer::new(self.quantized_db(), self.index, dimension).build(wtxn, rng, None)
|
||||
} else {
|
||||
arroy::Writer::new(self.angular_db(), self.index, dimension).build(wtxn, rng, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_item(
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
dimension: usize,
|
||||
item_id: arroy::ItemId,
|
||||
vector: &[f32],
|
||||
) -> Result<(), arroy::Error> {
|
||||
if self.quantized {
|
||||
arroy::Writer::new(self.quantized_db(), self.index, dimension)
|
||||
.add_item(wtxn, item_id, vector)
|
||||
} else {
|
||||
arroy::Writer::new(self.angular_db(), self.index, dimension)
|
||||
.add_item(wtxn, item_id, vector)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn del_item(
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
dimension: usize,
|
||||
item_id: arroy::ItemId,
|
||||
) -> Result<bool, arroy::Error> {
|
||||
if self.quantized {
|
||||
arroy::Writer::new(self.quantized_db(), self.index, dimension).del_item(wtxn, item_id)
|
||||
} else {
|
||||
arroy::Writer::new(self.angular_db(), self.index, dimension).del_item(wtxn, item_id)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear(&self, wtxn: &mut RwTxn, dimension: usize) -> Result<(), arroy::Error> {
|
||||
if self.quantized {
|
||||
arroy::Writer::new(self.quantized_db(), self.index, dimension).clear(wtxn)
|
||||
} else {
|
||||
arroy::Writer::new(self.angular_db(), self.index, dimension).clear(wtxn)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self, rtxn: &RoTxn, dimension: usize) -> Result<bool, arroy::Error> {
|
||||
if self.quantized {
|
||||
arroy::Writer::new(self.quantized_db(), self.index, dimension).is_empty(rtxn)
|
||||
} else {
|
||||
arroy::Writer::new(self.angular_db(), self.index, dimension).is_empty(rtxn)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contains_item(
|
||||
&self,
|
||||
rtxn: &RoTxn,
|
||||
dimension: usize,
|
||||
item: arroy::ItemId,
|
||||
) -> Result<bool, arroy::Error> {
|
||||
if self.quantized {
|
||||
arroy::Writer::new(self.quantized_db(), self.index, dimension).contains_item(rtxn, item)
|
||||
} else {
|
||||
arroy::Writer::new(self.angular_db(), self.index, dimension).contains_item(rtxn, item)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nns_by_item(
|
||||
&self,
|
||||
rtxn: &RoTxn,
|
||||
item: ItemId,
|
||||
limit: usize,
|
||||
filter: Option<&RoaringBitmap>,
|
||||
) -> Result<Option<Vec<(ItemId, f32)>>, arroy::Error> {
|
||||
if self.quantized {
|
||||
arroy::Reader::open(rtxn, self.index, self.quantized_db())?
|
||||
.nns_by_item(rtxn, item, limit, None, None, filter)
|
||||
} else {
|
||||
arroy::Reader::open(rtxn, self.index, self.angular_db())?
|
||||
.nns_by_item(rtxn, item, limit, None, None, filter)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nns_by_vector(
|
||||
&self,
|
||||
txn: &RoTxn,
|
||||
item: &[f32],
|
||||
limit: usize,
|
||||
filter: Option<&RoaringBitmap>,
|
||||
) -> Result<Vec<(ItemId, f32)>, arroy::Error> {
|
||||
if self.quantized {
|
||||
arroy::Reader::open(txn, self.index, self.quantized_db())?
|
||||
.nns_by_vector(txn, item, limit, None, None, filter)
|
||||
} else {
|
||||
arroy::Reader::open(txn, self.index, self.angular_db())?
|
||||
.nns_by_vector(txn, item, limit, None, None, filter)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item_vector(&self, rtxn: &RoTxn, docid: u32) -> Result<Option<Vec<f32>>, arroy::Error> {
|
||||
if self.quantized {
|
||||
arroy::Reader::open(rtxn, self.index, self.quantized_db())?.item_vector(rtxn, docid)
|
||||
} else {
|
||||
arroy::Reader::open(rtxn, self.index, self.angular_db())?.item_vector(rtxn, docid)
|
||||
}
|
||||
}
|
||||
|
||||
fn angular_db(&self) -> arroy::Database<Angular> {
|
||||
self.database.remap_data_type()
|
||||
}
|
||||
|
||||
fn quantized_db(&self) -> arroy::Database<BinaryQuantizedAngular> {
|
||||
self.database.remap_data_type()
|
||||
}
|
||||
}
|
||||
|
||||
/// One or multiple embeddings stored consecutively in a flat vector.
|
||||
pub struct Embeddings<F> {
|
||||
data: Vec<F>,
|
||||
@ -124,62 +293,48 @@ pub struct EmbeddingConfig {
|
||||
pub embedder_options: EmbedderOptions,
|
||||
/// Document template
|
||||
pub prompt: PromptData,
|
||||
/// If this embedder is binary quantized
|
||||
pub quantized: Option<bool>,
|
||||
// TODO: add metrics and anything needed
|
||||
}
|
||||
|
||||
impl EmbeddingConfig {
|
||||
pub fn quantized(&self) -> bool {
|
||||
self.quantized.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Map of embedder configurations.
|
||||
///
|
||||
/// Each configuration is mapped to a name.
|
||||
#[derive(Clone, Default)]
|
||||
pub struct EmbeddingConfigs(HashMap<String, (Arc<Embedder>, Arc<Prompt>)>);
|
||||
pub struct EmbeddingConfigs(HashMap<String, (Arc<Embedder>, Arc<Prompt>, bool)>);
|
||||
|
||||
impl EmbeddingConfigs {
|
||||
/// Create the map from its internal component.s
|
||||
pub fn new(data: HashMap<String, (Arc<Embedder>, Arc<Prompt>)>) -> Self {
|
||||
pub fn new(data: HashMap<String, (Arc<Embedder>, Arc<Prompt>, bool)>) -> Self {
|
||||
Self(data)
|
||||
}
|
||||
|
||||
/// Get an embedder configuration and template from its name.
|
||||
pub fn get(&self, name: &str) -> Option<(Arc<Embedder>, Arc<Prompt>)> {
|
||||
pub fn get(&self, name: &str) -> Option<(Arc<Embedder>, Arc<Prompt>, bool)> {
|
||||
self.0.get(name).cloned()
|
||||
}
|
||||
|
||||
/// Get the default embedder configuration, if any.
|
||||
pub fn get_default(&self) -> Option<(Arc<Embedder>, Arc<Prompt>)> {
|
||||
self.get(self.get_default_embedder_name())
|
||||
}
|
||||
|
||||
pub fn inner_as_ref(&self) -> &HashMap<String, (Arc<Embedder>, Arc<Prompt>)> {
|
||||
pub fn inner_as_ref(&self) -> &HashMap<String, (Arc<Embedder>, Arc<Prompt>, bool)> {
|
||||
&self.0
|
||||
}
|
||||
|
||||
pub fn into_inner(self) -> HashMap<String, (Arc<Embedder>, Arc<Prompt>)> {
|
||||
pub fn into_inner(self) -> HashMap<String, (Arc<Embedder>, Arc<Prompt>, bool)> {
|
||||
self.0
|
||||
}
|
||||
|
||||
/// Get the name of the default embedder configuration.
|
||||
///
|
||||
/// The default embedder is determined as follows:
|
||||
///
|
||||
/// - If there is only one embedder, it is always the default.
|
||||
/// - If there are multiple embedders and one of them is called `default`, then that one is the default embedder.
|
||||
/// - In all other cases, there is no default embedder.
|
||||
pub fn get_default_embedder_name(&self) -> &str {
|
||||
let mut it = self.0.keys();
|
||||
let first_name = it.next();
|
||||
let second_name = it.next();
|
||||
match (first_name, second_name) {
|
||||
(None, _) => "default",
|
||||
(Some(first), None) => first,
|
||||
(Some(_), Some(_)) => "default",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for EmbeddingConfigs {
|
||||
type Item = (String, (Arc<Embedder>, Arc<Prompt>));
|
||||
type Item = (String, (Arc<Embedder>, Arc<Prompt>, bool));
|
||||
|
||||
type IntoIter = std::collections::hash_map::IntoIter<String, (Arc<Embedder>, Arc<Prompt>)>;
|
||||
type IntoIter =
|
||||
std::collections::hash_map::IntoIter<String, (Arc<Embedder>, Arc<Prompt>, bool)>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.into_iter()
|
||||
|
@ -32,6 +32,9 @@ pub struct EmbeddingSettings {
|
||||
pub dimensions: Setting<usize>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
pub binary_quantized: Setting<bool>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
pub document_template: Setting<String>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
@ -85,23 +88,63 @@ pub enum ReindexAction {
|
||||
|
||||
pub enum SettingsDiff {
|
||||
Remove,
|
||||
Reindex { action: ReindexAction, updated_settings: EmbeddingSettings },
|
||||
UpdateWithoutReindex { updated_settings: EmbeddingSettings },
|
||||
Reindex { action: ReindexAction, updated_settings: EmbeddingSettings, quantize: bool },
|
||||
UpdateWithoutReindex { updated_settings: EmbeddingSettings, quantize: bool },
|
||||
}
|
||||
|
||||
pub enum EmbedderAction {
|
||||
WriteBackToDocuments(WriteBackToDocuments),
|
||||
Reindex(ReindexAction),
|
||||
#[derive(Default, Debug)]
|
||||
pub struct EmbedderAction {
|
||||
pub was_quantized: bool,
|
||||
pub is_being_quantized: bool,
|
||||
pub write_back: Option<WriteBackToDocuments>,
|
||||
pub reindex: Option<ReindexAction>,
|
||||
}
|
||||
|
||||
impl EmbedderAction {
|
||||
pub fn is_being_quantized(&self) -> bool {
|
||||
self.is_being_quantized
|
||||
}
|
||||
|
||||
pub fn write_back(&self) -> Option<&WriteBackToDocuments> {
|
||||
self.write_back.as_ref()
|
||||
}
|
||||
|
||||
pub fn reindex(&self) -> Option<&ReindexAction> {
|
||||
self.reindex.as_ref()
|
||||
}
|
||||
|
||||
pub fn with_is_being_quantized(mut self, quantize: bool) -> Self {
|
||||
self.is_being_quantized = quantize;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_write_back(write_back: WriteBackToDocuments, was_quantized: bool) -> Self {
|
||||
Self {
|
||||
was_quantized,
|
||||
is_being_quantized: false,
|
||||
write_back: Some(write_back),
|
||||
reindex: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_reindex(reindex: ReindexAction, was_quantized: bool) -> Self {
|
||||
Self { was_quantized, is_being_quantized: false, write_back: None, reindex: Some(reindex) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct WriteBackToDocuments {
|
||||
pub embedder_id: u8,
|
||||
pub user_provided: RoaringBitmap,
|
||||
}
|
||||
|
||||
impl SettingsDiff {
|
||||
pub fn from_settings(old: EmbeddingSettings, new: Setting<EmbeddingSettings>) -> Self {
|
||||
match new {
|
||||
pub fn from_settings(
|
||||
embedder_name: &str,
|
||||
old: EmbeddingSettings,
|
||||
new: Setting<EmbeddingSettings>,
|
||||
) -> Result<Self, UserError> {
|
||||
let ret = match new {
|
||||
Setting::Set(new) => {
|
||||
let EmbeddingSettings {
|
||||
mut source,
|
||||
@ -116,6 +159,7 @@ impl SettingsDiff {
|
||||
mut distribution,
|
||||
mut headers,
|
||||
mut document_template_max_bytes,
|
||||
binary_quantized: mut binary_quantize,
|
||||
} = old;
|
||||
|
||||
let EmbeddingSettings {
|
||||
@ -131,8 +175,17 @@ impl SettingsDiff {
|
||||
distribution: new_distribution,
|
||||
headers: new_headers,
|
||||
document_template_max_bytes: new_document_template_max_bytes,
|
||||
binary_quantized: new_binary_quantize,
|
||||
} = new;
|
||||
|
||||
if matches!(binary_quantize, Setting::Set(true))
|
||||
&& matches!(new_binary_quantize, Setting::Set(false))
|
||||
{
|
||||
return Err(UserError::InvalidDisableBinaryQuantization {
|
||||
embedder_name: embedder_name.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
let mut reindex_action = None;
|
||||
|
||||
// **Warning**: do not use short-circuiting || here, we want all these operations applied
|
||||
@ -172,6 +225,7 @@ impl SettingsDiff {
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
let binary_quantize_changed = binary_quantize.apply(new_binary_quantize);
|
||||
if url.apply(new_url) {
|
||||
match source {
|
||||
// do not regenerate on an url change in OpenAI
|
||||
@ -231,16 +285,27 @@ impl SettingsDiff {
|
||||
distribution,
|
||||
headers,
|
||||
document_template_max_bytes,
|
||||
binary_quantized: binary_quantize,
|
||||
};
|
||||
|
||||
match reindex_action {
|
||||
Some(action) => Self::Reindex { action, updated_settings },
|
||||
None => Self::UpdateWithoutReindex { updated_settings },
|
||||
Some(action) => Self::Reindex {
|
||||
action,
|
||||
updated_settings,
|
||||
quantize: binary_quantize_changed,
|
||||
},
|
||||
None => Self::UpdateWithoutReindex {
|
||||
updated_settings,
|
||||
quantize: binary_quantize_changed,
|
||||
},
|
||||
}
|
||||
}
|
||||
Setting::Reset => Self::Remove,
|
||||
Setting::NotSet => Self::UpdateWithoutReindex { updated_settings: old },
|
||||
}
|
||||
Setting::NotSet => {
|
||||
Self::UpdateWithoutReindex { updated_settings: old, quantize: false }
|
||||
}
|
||||
};
|
||||
Ok(ret)
|
||||
}
|
||||
}
|
||||
|
||||
@ -486,7 +551,7 @@ impl std::fmt::Display for EmbedderSource {
|
||||
|
||||
impl From<EmbeddingConfig> for EmbeddingSettings {
|
||||
fn from(value: EmbeddingConfig) -> Self {
|
||||
let EmbeddingConfig { embedder_options, prompt } = value;
|
||||
let EmbeddingConfig { embedder_options, prompt, quantized } = value;
|
||||
let document_template_max_bytes =
|
||||
Setting::Set(prompt.max_bytes.unwrap_or(default_max_bytes()).get());
|
||||
match embedder_options {
|
||||
@ -507,6 +572,7 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
|
||||
response: Setting::NotSet,
|
||||
headers: Setting::NotSet,
|
||||
distribution: Setting::some_or_not_set(distribution),
|
||||
binary_quantized: Setting::some_or_not_set(quantized),
|
||||
},
|
||||
super::EmbedderOptions::OpenAi(super::openai::EmbedderOptions {
|
||||
url,
|
||||
@ -527,6 +593,7 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
|
||||
response: Setting::NotSet,
|
||||
headers: Setting::NotSet,
|
||||
distribution: Setting::some_or_not_set(distribution),
|
||||
binary_quantized: Setting::some_or_not_set(quantized),
|
||||
},
|
||||
super::EmbedderOptions::Ollama(super::ollama::EmbedderOptions {
|
||||
embedding_model,
|
||||
@ -547,6 +614,7 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
|
||||
response: Setting::NotSet,
|
||||
headers: Setting::NotSet,
|
||||
distribution: Setting::some_or_not_set(distribution),
|
||||
binary_quantized: Setting::some_or_not_set(quantized),
|
||||
},
|
||||
super::EmbedderOptions::UserProvided(super::manual::EmbedderOptions {
|
||||
dimensions,
|
||||
@ -564,6 +632,7 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
|
||||
response: Setting::NotSet,
|
||||
headers: Setting::NotSet,
|
||||
distribution: Setting::some_or_not_set(distribution),
|
||||
binary_quantized: Setting::some_or_not_set(quantized),
|
||||
},
|
||||
super::EmbedderOptions::Rest(super::rest::EmbedderOptions {
|
||||
api_key,
|
||||
@ -586,6 +655,7 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
|
||||
response: Setting::Set(response),
|
||||
distribution: Setting::some_or_not_set(distribution),
|
||||
headers: Setting::Set(headers),
|
||||
binary_quantized: Setting::some_or_not_set(quantized),
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -607,8 +677,11 @@ impl From<EmbeddingSettings> for EmbeddingConfig {
|
||||
response,
|
||||
distribution,
|
||||
headers,
|
||||
binary_quantized,
|
||||
} = value;
|
||||
|
||||
this.quantized = binary_quantized.set();
|
||||
|
||||
if let Some(source) = source.set() {
|
||||
match source {
|
||||
EmbedderSource::OpenAi => {
|
||||
|
Reference in New Issue
Block a user