Make clippy happy

This commit is contained in:
Clément Renault
2025-08-12 16:20:32 +02:00
committed by Louis Dureuil
parent fb68f1241c
commit 4645813ea8
3 changed files with 46 additions and 59 deletions

View File

@ -5,7 +5,6 @@ use std::sync::atomic::{AtomicU32, AtomicUsize, Ordering};
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use enum_iterator::Sequence;
use indexmap::IndexMap; use indexmap::IndexMap;
use itertools::Itertools; use itertools::Itertools;
use serde::Serialize; use serde::Serialize;

View File

@ -485,7 +485,7 @@ where
// If an embedder wasn't used in the typedchunk but must be binary quantized // If an embedder wasn't used in the typedchunk but must be binary quantized
// we should insert it in `dimension` // we should insert it in `dimension`
let index_version = self.index.get_version(&self.wtxn)?.unwrap(); let index_version = self.index.get_version(self.wtxn)?.unwrap();
for (name, action) in settings_diff.embedding_config_updates.iter() { for (name, action) in settings_diff.embedding_config_updates.iter() {
if action.is_being_quantized && !dimension.contains_key(name.as_str()) { if action.is_being_quantized && !dimension.contains_key(name.as_str()) {
let index = self.index.embedding_configs().embedder_id(self.wtxn, name)?.ok_or( let index = self.index.embedding_configs().embedder_id(self.wtxn, name)?.ok_or(

View File

@ -129,14 +129,12 @@ impl VectorStore {
self._arroy_items_in_store(rtxn, self.arroy_angular_db(), store_id, with_items) self._arroy_items_in_store(rtxn, self.arroy_angular_db(), store_id, with_items)
.map_err(Into::into) .map_err(Into::into)
} }
} else if self.quantized {
self._items_in_store(rtxn, self.quantized_db(), store_id, with_items)
.map_err(Into::into)
} else { } else {
if self.quantized { self._items_in_store(rtxn, self.angular_db(), store_id, with_items)
self._items_in_store(rtxn, self.quantized_db(), store_id, with_items) .map_err(Into::into)
.map_err(Into::into)
} else {
self._items_in_store(rtxn, self.angular_db(), store_id, with_items)
.map_err(Into::into)
}
} }
} }
@ -193,20 +191,18 @@ impl VectorStore {
.transpose()? .transpose()?
.map(|reader| reader.dimensions())) .map(|reader| reader.dimensions()))
} }
} else if self.quantized {
Ok(self
.readers(rtxn, self.quantized_db())
.next()
.transpose()?
.map(|reader| reader.dimensions()))
} else { } else {
if self.quantized { Ok(self
Ok(self .readers(rtxn, self.angular_db())
.readers(rtxn, self.quantized_db()) .next()
.next() .transpose()?
.transpose()? .map(|reader| reader.dimensions()))
.map(|reader| reader.dimensions()))
} else {
Ok(self
.readers(rtxn, self.angular_db())
.next()
.transpose()?
.map(|reader| reader.dimensions()))
}
} }
} }
@ -550,20 +546,18 @@ impl VectorStore {
} }
writer.contains_item(rtxn, item)? writer.contains_item(rtxn, item)?
} }
} else { } else if self.quantized {
if self.quantized { let writer = hannoy::Writer::new(self.quantized_db(), index, dimension);
let writer = hannoy::Writer::new(self.quantized_db(), index, dimension); if writer.is_empty(rtxn)? {
if writer.is_empty(rtxn)? { continue;
continue;
}
writer.contains_item(rtxn, item)?
} else {
let writer = hannoy::Writer::new(self.angular_db(), index, dimension);
if writer.is_empty(rtxn)? {
continue;
}
writer.contains_item(rtxn, item)?
} }
writer.contains_item(rtxn, item)?
} else {
let writer = hannoy::Writer::new(self.angular_db(), index, dimension);
if writer.is_empty(rtxn)? {
continue;
}
writer.contains_item(rtxn, item)?
}; };
if contains { if contains {
return Ok(contains); return Ok(contains);
@ -587,13 +581,11 @@ impl VectorStore {
self._arroy_nns_by_item(rtxn, self.arroy_angular_db(), item, limit, filter) self._arroy_nns_by_item(rtxn, self.arroy_angular_db(), item, limit, filter)
.map_err(Into::into) .map_err(Into::into)
} }
} else if self.quantized {
self._nns_by_item(rtxn, self.quantized_db(), item, limit, filter)
.map_err(Into::into)
} else { } else {
if self.quantized { self._nns_by_item(rtxn, self.angular_db(), item, limit, filter).map_err(Into::into)
self._nns_by_item(rtxn, self.quantized_db(), item, limit, filter)
.map_err(Into::into)
} else {
self._nns_by_item(rtxn, self.angular_db(), item, limit, filter).map_err(Into::into)
}
} }
} }
@ -669,14 +661,12 @@ impl VectorStore {
self._arroy_nns_by_vector(rtxn, self.arroy_angular_db(), vector, limit, filter) self._arroy_nns_by_vector(rtxn, self.arroy_angular_db(), vector, limit, filter)
.map_err(Into::into) .map_err(Into::into)
} }
} else if self.quantized {
self._nns_by_vector(rtxn, self.quantized_db(), vector, limit, filter)
.map_err(Into::into)
} else { } else {
if self.quantized { self._nns_by_vector(rtxn, self.angular_db(), vector, limit, filter)
self._nns_by_vector(rtxn, self.quantized_db(), vector, limit, filter) .map_err(Into::into)
.map_err(Into::into)
} else {
self._nns_by_vector(rtxn, self.angular_db(), vector, limit, filter)
.map_err(Into::into)
}
} }
} }
@ -754,18 +744,16 @@ impl VectorStore {
} }
} }
} }
} else { } else if self.quantized {
if self.quantized { for reader in self.readers(rtxn, self.quantized_db()) {
for reader in self.readers(rtxn, self.quantized_db()) { if let Some(vec) = reader?.item_vector(rtxn, item_id)? {
if let Some(vec) = reader?.item_vector(rtxn, item_id)? { vectors.push(vec);
vectors.push(vec);
}
} }
} else { }
for reader in self.readers(rtxn, self.angular_db()) { } else {
if let Some(vec) = reader?.item_vector(rtxn, item_id)? { for reader in self.readers(rtxn, self.angular_db()) {
vectors.push(vec); if let Some(vec) = reader?.item_vector(rtxn, item_id)? {
} vectors.push(vec);
} }
} }
} }