Compare commits

...

6 Commits

11 changed files with 379 additions and 476 deletions

View File

@ -1,16 +1,14 @@
use std::fs::File;
use std::marker::PhantomData;
use std::sync::atomic::Ordering;
use crossbeam_channel::{IntoIter, Receiver, SendError, Sender};
use grenad::Merger;
use heed::types::Bytes;
use memmap2::Mmap;
use super::extract::FacetKind;
use super::extract::{FacetKind, HashMapMerger};
use super::StdResult;
use crate::index::main_key::{DOCUMENTS_IDS_KEY, WORDS_FST_KEY};
use crate::update::new::KvReaderFieldId;
use crate::update::MergeDeladdCboRoaringBitmaps;
use crate::{DocumentId, Index};
/// The capacity of the channel is currently in number of messages.
@ -180,22 +178,22 @@ impl IntoIterator for WriterReceiver {
pub struct MergerSender {
sender: Sender<WriterOperation>,
/// The number of message we send in total in the channel.
send_count: std::cell::Cell<usize>,
send_count: std::sync::atomic::AtomicUsize,
/// The number of times we sent something in a channel that was full.
writer_contentious_count: std::cell::Cell<usize>,
writer_contentious_count: std::sync::atomic::AtomicUsize,
/// The number of times we sent something in a channel that was empty.
merger_contentious_count: std::cell::Cell<usize>,
merger_contentious_count: std::sync::atomic::AtomicUsize,
}
impl Drop for MergerSender {
fn drop(&mut self) {
eprintln!(
"Merger channel stats: {} sends, {} writer contentions ({}%), {} merger contentions ({}%)",
self.send_count.get(),
self.writer_contentious_count.get(),
(self.writer_contentious_count.get() as f32 / self.send_count.get() as f32) * 100.0,
self.merger_contentious_count.get(),
(self.merger_contentious_count.get() as f32 / self.send_count.get() as f32) * 100.0
self.send_count.load(Ordering::SeqCst),
self.writer_contentious_count.load(Ordering::SeqCst),
(self.writer_contentious_count.load(Ordering::SeqCst) as f32 / self.send_count.load(Ordering::SeqCst) as f32) * 100.0,
self.merger_contentious_count.load(Ordering::SeqCst),
(self.merger_contentious_count.load(Ordering::SeqCst) as f32 / self.send_count.load(Ordering::SeqCst) as f32) * 100.0
)
}
}
@ -230,12 +228,12 @@ impl MergerSender {
fn send(&self, op: WriterOperation) -> StdResult<(), SendError<()>> {
if self.sender.is_full() {
self.writer_contentious_count.set(self.writer_contentious_count.get() + 1);
self.writer_contentious_count.fetch_add(1, Ordering::SeqCst);
}
if self.sender.is_empty() {
self.merger_contentious_count.set(self.merger_contentious_count.get() + 1);
self.merger_contentious_count.fetch_add(1, Ordering::SeqCst);
}
self.send_count.set(self.send_count.get() + 1);
self.send_count.fetch_add(1, Ordering::SeqCst);
match self.sender.send(op) {
Ok(()) => Ok(()),
Err(SendError(_)) => Err(SendError(())),
@ -279,7 +277,7 @@ pub trait DatabaseType {
}
pub trait MergerOperationType {
fn new_merger_operation(merger: Merger<File, MergeDeladdCboRoaringBitmaps>) -> MergerOperation;
fn new_merger_operation(merger: HashMapMerger) -> MergerOperation;
}
impl DatabaseType for ExactWordDocids {
@ -287,7 +285,7 @@ impl DatabaseType for ExactWordDocids {
}
impl MergerOperationType for ExactWordDocids {
fn new_merger_operation(merger: Merger<File, MergeDeladdCboRoaringBitmaps>) -> MergerOperation {
fn new_merger_operation(merger: HashMapMerger) -> MergerOperation {
MergerOperation::ExactWordDocidsMerger(merger)
}
}
@ -297,7 +295,7 @@ impl DatabaseType for FidWordCountDocids {
}
impl MergerOperationType for FidWordCountDocids {
fn new_merger_operation(merger: Merger<File, MergeDeladdCboRoaringBitmaps>) -> MergerOperation {
fn new_merger_operation(merger: HashMapMerger) -> MergerOperation {
MergerOperation::FidWordCountDocidsMerger(merger)
}
}
@ -307,7 +305,7 @@ impl DatabaseType for WordDocids {
}
impl MergerOperationType for WordDocids {
fn new_merger_operation(merger: Merger<File, MergeDeladdCboRoaringBitmaps>) -> MergerOperation {
fn new_merger_operation(merger: HashMapMerger) -> MergerOperation {
MergerOperation::WordDocidsMerger(merger)
}
}
@ -317,7 +315,7 @@ impl DatabaseType for WordFidDocids {
}
impl MergerOperationType for WordFidDocids {
fn new_merger_operation(merger: Merger<File, MergeDeladdCboRoaringBitmaps>) -> MergerOperation {
fn new_merger_operation(merger: HashMapMerger) -> MergerOperation {
MergerOperation::WordFidDocidsMerger(merger)
}
}
@ -327,7 +325,7 @@ impl DatabaseType for WordPairProximityDocids {
}
impl MergerOperationType for WordPairProximityDocids {
fn new_merger_operation(merger: Merger<File, MergeDeladdCboRoaringBitmaps>) -> MergerOperation {
fn new_merger_operation(merger: HashMapMerger) -> MergerOperation {
MergerOperation::WordPairProximityDocidsMerger(merger)
}
}
@ -337,13 +335,13 @@ impl DatabaseType for WordPositionDocids {
}
impl MergerOperationType for WordPositionDocids {
fn new_merger_operation(merger: Merger<File, MergeDeladdCboRoaringBitmaps>) -> MergerOperation {
fn new_merger_operation(merger: HashMapMerger) -> MergerOperation {
MergerOperation::WordPositionDocidsMerger(merger)
}
}
impl MergerOperationType for FacetDocids {
fn new_merger_operation(merger: Merger<File, MergeDeladdCboRoaringBitmaps>) -> MergerOperation {
fn new_merger_operation(merger: HashMapMerger) -> MergerOperation {
MergerOperation::FacetDocidsMerger(merger)
}
}
@ -442,13 +440,13 @@ impl DocumentsSender<'_> {
}
pub enum MergerOperation {
ExactWordDocidsMerger(Merger<File, MergeDeladdCboRoaringBitmaps>),
FidWordCountDocidsMerger(Merger<File, MergeDeladdCboRoaringBitmaps>),
WordDocidsMerger(Merger<File, MergeDeladdCboRoaringBitmaps>),
WordFidDocidsMerger(Merger<File, MergeDeladdCboRoaringBitmaps>),
WordPairProximityDocidsMerger(Merger<File, MergeDeladdCboRoaringBitmaps>),
WordPositionDocidsMerger(Merger<File, MergeDeladdCboRoaringBitmaps>),
FacetDocidsMerger(Merger<File, MergeDeladdCboRoaringBitmaps>),
ExactWordDocidsMerger(HashMapMerger),
FidWordCountDocidsMerger(HashMapMerger),
WordDocidsMerger(HashMapMerger),
WordFidDocidsMerger(HashMapMerger),
WordPairProximityDocidsMerger(HashMapMerger),
WordPositionDocidsMerger(HashMapMerger),
FacetDocidsMerger(HashMapMerger),
DeleteDocument { docid: DocumentId },
InsertDocument { docid: DocumentId, document: Box<KvReaderFieldId> },
FinishedDocument,
@ -474,7 +472,7 @@ impl ExtractorSender {
pub fn send_searchable<D: MergerOperationType>(
&self,
merger: Merger<File, MergeDeladdCboRoaringBitmaps>,
merger: HashMapMerger,
) -> StdResult<(), SendError<()>> {
match self.0.send(D::new_merger_operation(merger)) {
Ok(()) => Ok(()),

View File

@ -1,183 +1,96 @@
use std::mem;
use std::num::NonZeroUsize;
use std::collections::HashMap;
use grenad::{MergeFunction, Sorter};
use roaring::RoaringBitmap;
use smallvec::SmallVec;
use super::lru::Lru;
use crate::update::del_add::{DelAdd, KvWriterDelAdd};
use crate::CboRoaringBitmapCodec;
const KEY_SIZE: usize = 12;
pub const KEY_SIZE: usize = 12;
#[derive(Debug)]
pub struct CboCachedSorter<MF> {
cache: Lru<SmallVec<[u8; KEY_SIZE]>, DelAddRoaringBitmap>,
sorter: Sorter<MF>,
deladd_buffer: Vec<u8>,
cbo_buffer: Vec<u8>,
pub struct CboCachedSorter {
cache: HashMap<SmallVec<[u8; KEY_SIZE]>, DelAddRoaringBitmap>,
total_insertions: usize,
fitted_in_key: usize,
}
impl<MF> CboCachedSorter<MF> {
pub fn new(cap: NonZeroUsize, sorter: Sorter<MF>) -> Self {
CboCachedSorter {
cache: Lru::new(cap),
sorter,
deladd_buffer: Vec::new(),
cbo_buffer: Vec::new(),
total_insertions: 0,
fitted_in_key: 0,
}
impl CboCachedSorter {
pub fn new() -> Self {
CboCachedSorter { cache: HashMap::new(), total_insertions: 0, fitted_in_key: 0 }
}
}
impl<MF: MergeFunction> CboCachedSorter<MF> {
pub fn insert_del_u32(&mut self, key: &[u8], n: u32) -> grenad::Result<(), MF::Error> {
impl CboCachedSorter {
pub fn insert_del_u32(&mut self, key: &[u8], n: u32) {
match self.cache.get_mut(key) {
Some(DelAddRoaringBitmap { del, add: _ }) => {
del.get_or_insert_with(PushOptimizedBitmap::default).insert(n);
del.get_or_insert_with(RoaringBitmap::default).insert(n);
}
None => {
self.total_insertions += 1;
self.fitted_in_key += (key.len() <= KEY_SIZE) as usize;
let value = DelAddRoaringBitmap::new_del_u32(n);
if let Some((key, deladd)) = self.cache.push(key.into(), value) {
self.write_entry(key, deladd)?;
}
assert!(self.cache.insert(key.into(), value).is_none());
}
}
Ok(())
}
pub fn insert_del(
&mut self,
key: &[u8],
bitmap: RoaringBitmap,
) -> grenad::Result<(), MF::Error> {
pub fn insert_del(&mut self, key: &[u8], bitmap: RoaringBitmap) {
match self.cache.get_mut(key) {
Some(DelAddRoaringBitmap { del, add: _ }) => {
del.get_or_insert_with(PushOptimizedBitmap::default).union_with_bitmap(bitmap);
*del.get_or_insert_with(RoaringBitmap::default) |= bitmap;
}
None => {
self.total_insertions += 1;
self.fitted_in_key += (key.len() <= KEY_SIZE) as usize;
let value = DelAddRoaringBitmap::new_del(bitmap);
if let Some((key, deladd)) = self.cache.push(key.into(), value) {
self.write_entry(key, deladd)?;
}
assert!(self.cache.insert(key.into(), value).is_none());
}
}
Ok(())
}
pub fn insert_add_u32(&mut self, key: &[u8], n: u32) -> grenad::Result<(), MF::Error> {
pub fn insert_add_u32(&mut self, key: &[u8], n: u32) {
match self.cache.get_mut(key) {
Some(DelAddRoaringBitmap { del: _, add }) => {
add.get_or_insert_with(PushOptimizedBitmap::default).insert(n);
add.get_or_insert_with(RoaringBitmap::default).insert(n);
}
None => {
self.total_insertions += 1;
self.fitted_in_key += (key.len() <= KEY_SIZE) as usize;
let value = DelAddRoaringBitmap::new_add_u32(n);
if let Some((key, deladd)) = self.cache.push(key.into(), value) {
self.write_entry(key, deladd)?;
}
assert!(self.cache.insert(key.into(), value).is_none());
}
}
Ok(())
}
pub fn insert_add(
&mut self,
key: &[u8],
bitmap: RoaringBitmap,
) -> grenad::Result<(), MF::Error> {
pub fn insert_add(&mut self, key: &[u8], bitmap: RoaringBitmap) {
match self.cache.get_mut(key) {
Some(DelAddRoaringBitmap { del: _, add }) => {
add.get_or_insert_with(PushOptimizedBitmap::default).union_with_bitmap(bitmap);
*add.get_or_insert_with(RoaringBitmap::default) |= bitmap;
}
None => {
self.total_insertions += 1;
self.fitted_in_key += (key.len() <= KEY_SIZE) as usize;
let value = DelAddRoaringBitmap::new_add(bitmap);
if let Some((key, deladd)) = self.cache.push(key.into(), value) {
self.write_entry(key, deladd)?;
}
assert!(self.cache.insert(key.into(), value).is_none());
}
}
Ok(())
}
pub fn insert_del_add_u32(&mut self, key: &[u8], n: u32) -> grenad::Result<(), MF::Error> {
pub fn insert_del_add_u32(&mut self, key: &[u8], n: u32) {
match self.cache.get_mut(key) {
Some(DelAddRoaringBitmap { del, add }) => {
del.get_or_insert_with(PushOptimizedBitmap::default).insert(n);
add.get_or_insert_with(PushOptimizedBitmap::default).insert(n);
del.get_or_insert_with(RoaringBitmap::default).insert(n);
add.get_or_insert_with(RoaringBitmap::default).insert(n);
}
None => {
self.total_insertions += 1;
self.fitted_in_key += (key.len() <= KEY_SIZE) as usize;
let value = DelAddRoaringBitmap::new_del_add_u32(n);
if let Some((key, deladd)) = self.cache.push(key.into(), value) {
self.write_entry(key, deladd)?;
}
assert!(self.cache.insert(key.into(), value).is_none());
}
}
Ok(())
}
fn write_entry<A: AsRef<[u8]>>(
&mut self,
key: A,
deladd: DelAddRoaringBitmap,
) -> grenad::Result<(), MF::Error> {
/// TODO we must create a serialization trait to correctly serialize bitmaps
self.deladd_buffer.clear();
let mut value_writer = KvWriterDelAdd::new(&mut self.deladd_buffer);
match deladd {
DelAddRoaringBitmap { del: Some(del), add: None } => {
self.cbo_buffer.clear();
CboRoaringBitmapCodec::serialize_into(&del.bitmap, &mut self.cbo_buffer);
value_writer.insert(DelAdd::Deletion, &self.cbo_buffer)?;
}
DelAddRoaringBitmap { del: None, add: Some(add) } => {
self.cbo_buffer.clear();
CboRoaringBitmapCodec::serialize_into(&add.bitmap, &mut self.cbo_buffer);
value_writer.insert(DelAdd::Addition, &self.cbo_buffer)?;
}
DelAddRoaringBitmap { del: Some(del), add: Some(add) } => {
self.cbo_buffer.clear();
CboRoaringBitmapCodec::serialize_into(&del.bitmap, &mut self.cbo_buffer);
value_writer.insert(DelAdd::Deletion, &self.cbo_buffer)?;
self.cbo_buffer.clear();
CboRoaringBitmapCodec::serialize_into(&add.bitmap, &mut self.cbo_buffer);
value_writer.insert(DelAdd::Addition, &self.cbo_buffer)?;
}
DelAddRoaringBitmap { del: None, add: None } => return Ok(()),
}
let bytes = value_writer.into_inner().unwrap();
self.sorter.insert(key, bytes)
}
pub fn direct_insert(&mut self, key: &[u8], val: &[u8]) -> grenad::Result<(), MF::Error> {
self.sorter.insert(key, val)
}
pub fn into_sorter(mut self) -> grenad::Result<Sorter<MF>, MF::Error> {
let default_arc = Lru::new(NonZeroUsize::MIN);
for (key, deladd) in mem::replace(&mut self.cache, default_arc) {
self.write_entry(key, deladd)?;
}
pub fn into_sorter(self) -> HashMap<SmallVec<[u8; KEY_SIZE]>, DelAddRoaringBitmap> {
eprintln!(
"LruCache stats: {} <= {KEY_SIZE} bytes ({}%) on a total of {} insertions",
self.fitted_in_key,
@ -185,66 +98,52 @@ impl<MF: MergeFunction> CboCachedSorter<MF> {
self.total_insertions,
);
Ok(self.sorter)
self.cache
}
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Default)]
pub struct DelAddRoaringBitmap {
pub(crate) del: Option<PushOptimizedBitmap>,
pub(crate) add: Option<PushOptimizedBitmap>,
pub(crate) del: Option<RoaringBitmap>,
pub(crate) add: Option<RoaringBitmap>,
}
impl DelAddRoaringBitmap {
fn new_del_add_u32(n: u32) -> Self {
DelAddRoaringBitmap {
del: Some(PushOptimizedBitmap::from_single(n)),
add: Some(PushOptimizedBitmap::from_single(n)),
del: Some(RoaringBitmap::from([n])),
add: Some(RoaringBitmap::from([n])),
}
}
fn new_del(bitmap: RoaringBitmap) -> Self {
DelAddRoaringBitmap { del: Some(PushOptimizedBitmap::from_bitmap(bitmap)), add: None }
DelAddRoaringBitmap { del: Some(bitmap), add: None }
}
fn new_del_u32(n: u32) -> Self {
DelAddRoaringBitmap { del: Some(PushOptimizedBitmap::from_single(n)), add: None }
DelAddRoaringBitmap { del: Some(RoaringBitmap::from([n])), add: None }
}
fn new_add(bitmap: RoaringBitmap) -> Self {
DelAddRoaringBitmap { del: None, add: Some(PushOptimizedBitmap::from_bitmap(bitmap)) }
DelAddRoaringBitmap { del: None, add: Some(bitmap) }
}
fn new_add_u32(n: u32) -> Self {
DelAddRoaringBitmap { del: None, add: Some(PushOptimizedBitmap::from_single(n)) }
}
}
#[derive(Debug, Clone, Default)]
struct PushOptimizedBitmap {
bitmap: RoaringBitmap,
}
impl PushOptimizedBitmap {
#[inline]
fn from_bitmap(bitmap: RoaringBitmap) -> PushOptimizedBitmap {
PushOptimizedBitmap { bitmap }
}
#[inline]
fn from_single(single: u32) -> PushOptimizedBitmap {
PushOptimizedBitmap { bitmap: RoaringBitmap::from([single]) }
}
#[inline]
fn insert(&mut self, n: u32) {
if !self.bitmap.push(n) {
self.bitmap.insert(n);
}
}
#[inline]
fn union_with_bitmap(&mut self, bitmap: RoaringBitmap) {
self.bitmap |= bitmap;
DelAddRoaringBitmap { del: None, add: Some(RoaringBitmap::from([n])) }
}
pub fn merge_with(&mut self, other: DelAddRoaringBitmap) {
self.del = match (self.del.take(), other.del) {
(None, None) => None,
(None, Some(other)) => Some(other),
(Some(this), None) => Some(this),
(Some(this), Some(other)) => Some(this | other),
};
self.add = match (self.add.take(), other.add) {
(None, None) => None,
(None, Some(other)) => Some(other),
(Some(this), None) => Some(this),
(Some(this), Some(other)) => Some(this | other),
};
}
}

View File

@ -1,8 +1,5 @@
use std::collections::HashSet;
use std::fmt::Debug;
use std::fs::File;
use grenad::{MergeFunction, Merger};
use heed::RoTxn;
use rayon::iter::{IntoParallelIterator, ParallelBridge, ParallelIterator};
use serde_json::Value;
@ -11,9 +8,9 @@ use super::super::cache::CboCachedSorter;
use super::facet_document::extract_document_facets;
use super::FacetKind;
use crate::facet::value_encoding::f64_into_bytes;
use crate::update::new::extract::DocidsExtractor;
use crate::update::new::extract::{DocidsExtractor, HashMapMerger};
use crate::update::new::{DocumentChange, ItemsPool};
use crate::update::{create_sorter, GrenadParameters, MergeDeladdCboRoaringBitmaps};
use crate::update::GrenadParameters;
use crate::{DocumentId, FieldId, GlobalFieldsIdsMap, Index, Result, MAX_FACET_VALUE_LENGTH};
pub struct FacetedDocidsExtractor;
@ -24,7 +21,7 @@ impl FacetedDocidsExtractor {
buffer: &mut Vec<u8>,
fields_ids_map: &mut GlobalFieldsIdsMap,
attributes_to_extract: &[&str],
cached_sorter: &mut CboCachedSorter<MergeDeladdCboRoaringBitmaps>,
cached_sorter: &mut CboCachedSorter,
document_change: DocumentChange,
) -> Result<()> {
match document_change {
@ -94,25 +91,20 @@ impl FacetedDocidsExtractor {
}
}
fn facet_fn_with_options<MF>(
fn facet_fn_with_options(
buffer: &mut Vec<u8>,
cached_sorter: &mut CboCachedSorter<MF>,
cache_fn: impl Fn(&mut CboCachedSorter<MF>, &[u8], u32) -> grenad::Result<(), MF::Error>,
cached_sorter: &mut CboCachedSorter,
cache_fn: impl Fn(&mut CboCachedSorter, &[u8], u32),
docid: DocumentId,
fid: FieldId,
value: &Value,
) -> Result<()>
where
MF: MergeFunction,
MF::Error: Debug,
grenad::Error<MF::Error>: Into<crate::Error>,
{
) -> Result<()> {
// Exists
// key: fid
buffer.clear();
buffer.push(FacetKind::Exists as u8);
buffer.extend_from_slice(&fid.to_be_bytes());
cache_fn(cached_sorter, &*buffer, docid).map_err(Into::into)?;
cache_fn(cached_sorter, &*buffer, docid);
match value {
// Number
@ -128,7 +120,7 @@ impl FacetedDocidsExtractor {
buffer.extend_from_slice(&ordered);
buffer.extend_from_slice(&n.to_be_bytes());
cache_fn(cached_sorter, &*buffer, docid).map_err(Into::into)
Ok(cache_fn(cached_sorter, &*buffer, docid))
} else {
Ok(())
}
@ -142,7 +134,7 @@ impl FacetedDocidsExtractor {
buffer.extend_from_slice(&fid.to_be_bytes());
buffer.push(1); // level 0
buffer.extend_from_slice(truncated.as_bytes());
cache_fn(cached_sorter, &*buffer, docid).map_err(Into::into)
Ok(cache_fn(cached_sorter, &*buffer, docid))
}
// Null
// key: fid
@ -150,7 +142,7 @@ impl FacetedDocidsExtractor {
buffer.clear();
buffer.push(FacetKind::Null as u8);
buffer.extend_from_slice(&fid.to_be_bytes());
cache_fn(cached_sorter, &*buffer, docid).map_err(Into::into)
Ok(cache_fn(cached_sorter, &*buffer, docid))
}
// Empty
// key: fid
@ -158,13 +150,13 @@ impl FacetedDocidsExtractor {
buffer.clear();
buffer.push(FacetKind::Empty as u8);
buffer.extend_from_slice(&fid.to_be_bytes());
cache_fn(cached_sorter, &*buffer, docid).map_err(Into::into)
Ok(cache_fn(cached_sorter, &*buffer, docid))
}
Value::Object(o) if o.is_empty() => {
buffer.clear();
buffer.push(FacetKind::Empty as u8);
buffer.extend_from_slice(&fid.to_be_bytes());
cache_fn(cached_sorter, &*buffer, docid).map_err(Into::into)
Ok(cache_fn(cached_sorter, &*buffer, docid))
}
// Otherwise, do nothing
/// TODO: What about Value::Bool?
@ -196,7 +188,7 @@ impl DocidsExtractor for FacetedDocidsExtractor {
fields_ids_map: &GlobalFieldsIdsMap,
indexer: GrenadParameters,
document_changes: impl IntoParallelIterator<Item = Result<DocumentChange>>,
) -> Result<Merger<File, MergeDeladdCboRoaringBitmaps>> {
) -> Result<HashMapMerger> {
let max_memory = indexer.max_memory_by_thread();
let rtxn = index.read_txn()?;
@ -205,23 +197,7 @@ impl DocidsExtractor for FacetedDocidsExtractor {
attributes_to_extract.iter().map(|s| s.as_ref()).collect();
let context_pool = ItemsPool::new(|| {
Ok((
index.read_txn()?,
fields_ids_map.clone(),
Vec::new(),
CboCachedSorter::new(
// TODO use a better value
100.try_into().unwrap(),
create_sorter(
grenad::SortAlgorithm::Stable,
MergeDeladdCboRoaringBitmaps,
indexer.chunk_compression_type,
indexer.chunk_compression_level,
indexer.max_nb_chunks,
max_memory,
),
),
))
Ok((index.read_txn()?, fields_ids_map.clone(), Vec::new(), CboCachedSorter::new()))
});
{
@ -243,7 +219,7 @@ impl DocidsExtractor for FacetedDocidsExtractor {
})?;
}
{
let mut builder = grenad::MergerBuilder::new(MergeDeladdCboRoaringBitmaps);
let mut builder = HashMapMerger::new();
let span =
tracing::trace_span!(target: "indexing::documents::extract", "merger_building");
let _entered = span.enter();
@ -252,14 +228,13 @@ impl DocidsExtractor for FacetedDocidsExtractor {
.into_items()
.par_bridge()
.map(|(_rtxn, _tokenizer, _fields_ids_map, cached_sorter)| {
let sorter = cached_sorter.into_sorter()?;
sorter.into_reader_cursors()
cached_sorter.into_sorter()
})
.collect();
for reader in readers {
builder.extend(reader?);
}
Ok(builder.build())
builder.extend(readers);
Ok(builder)
}
}
}

View File

@ -3,12 +3,15 @@ mod faceted;
mod lru;
mod searchable;
use std::fs::File;
use std::collections::HashMap;
use std::mem;
pub use faceted::*;
use grenad::Merger;
use rayon::iter::IntoParallelIterator;
use grenad::MergeFunction;
use rayon::iter::{IntoParallelIterator, ParallelIterator as _};
use rayon::slice::ParallelSliceMut as _;
pub use searchable::*;
use smallvec::SmallVec;
use super::DocumentChange;
use crate::update::{GrenadParameters, MergeDeladdCboRoaringBitmaps};
@ -20,7 +23,85 @@ pub trait DocidsExtractor {
fields_ids_map: &GlobalFieldsIdsMap,
indexer: GrenadParameters,
document_changes: impl IntoParallelIterator<Item = Result<DocumentChange>>,
) -> Result<Merger<File, MergeDeladdCboRoaringBitmaps>>;
) -> Result<HashMapMerger>;
}
pub struct HashMapMerger {
maps: Vec<HashMap<SmallVec<[u8; cache::KEY_SIZE]>, cache::DelAddRoaringBitmap>>,
}
impl HashMapMerger {
pub fn new() -> HashMapMerger {
HashMapMerger { maps: Vec::new() }
}
pub fn extend<I>(&mut self, iter: I)
where
I: IntoIterator<
Item = HashMap<SmallVec<[u8; cache::KEY_SIZE]>, cache::DelAddRoaringBitmap>,
>,
{
self.maps.extend(iter);
}
}
impl IntoIterator for HashMapMerger {
type Item = (SmallVec<[u8; 12]>, cache::DelAddRoaringBitmap);
type IntoIter = IntoIter;
fn into_iter(self) -> Self::IntoIter {
let mut entries = {
let span = tracing::trace_span!(target: "indexing::documents::merge", "into_par_iter");
let _entered = span.enter();
let entries: Vec<_> =
self.maps.into_par_iter().flat_map(|m| m.into_par_iter()).collect();
eprintln!("There are {} entries in the HashMapMerger", entries.len());
entries
};
{
let span =
tracing::trace_span!(target: "indexing::documents::merge", "par_sort_unstable_by");
let _entered = span.enter();
entries.par_sort_unstable_by(|(ka, _), (kb, _)| ka.cmp(kb));
IntoIter {
sorted_entries: entries.into_iter(),
current_key: None,
current_deladd: cache::DelAddRoaringBitmap::default(),
}
}
}
}
pub struct IntoIter {
sorted_entries: std::vec::IntoIter<(SmallVec<[u8; 12]>, cache::DelAddRoaringBitmap)>,
current_key: Option<SmallVec<[u8; 12]>>,
current_deladd: cache::DelAddRoaringBitmap,
}
impl Iterator for IntoIter {
type Item = (SmallVec<[u8; 12]>, cache::DelAddRoaringBitmap);
fn next(&mut self) -> Option<Self::Item> {
loop {
match self.sorted_entries.next() {
Some((k, deladd)) => {
if self.current_key.as_deref() == Some(k.as_slice()) {
self.current_deladd.merge_with(deladd);
} else {
let previous_key = self.current_key.replace(k);
let previous_deladd = mem::replace(&mut self.current_deladd, deladd);
if let Some(previous_key) = previous_key {
return Some((previous_key, previous_deladd));
}
}
}
None => {
let current_deladd = mem::take(&mut self.current_deladd);
return self.current_key.take().map(|ck| (ck, current_deladd));
}
}
}
}
}
/// TODO move in permissive json pointer

View File

@ -31,7 +31,7 @@ impl SearchableExtractor for FidWordCountDocidsExtractor {
index: &Index,
document_tokenizer: &DocumentTokenizer,
fields_ids_map: &mut GlobalFieldsIdsMap,
cached_sorter: &mut CboCachedSorter<MergeDeladdCboRoaringBitmaps>,
cached_sorter: &mut CboCachedSorter,
document_change: DocumentChange,
) -> Result<()> {
let mut key_buffer = Vec::new();
@ -52,7 +52,7 @@ impl SearchableExtractor for FidWordCountDocidsExtractor {
for (fid, count) in fid_word_count.iter() {
if *count <= MAX_COUNTED_WORDS {
let key = build_key(*fid, *count as u8, &mut key_buffer);
cached_sorter.insert_del_u32(key, inner.docid())?;
cached_sorter.insert_del_u32(key, inner.docid());
}
}
}
@ -85,11 +85,11 @@ impl SearchableExtractor for FidWordCountDocidsExtractor {
if *current_count != *new_count {
if *current_count <= MAX_COUNTED_WORDS {
let key = build_key(*fid, *current_count as u8, &mut key_buffer);
cached_sorter.insert_del_u32(key, inner.docid())?;
cached_sorter.insert_del_u32(key, inner.docid());
}
if *new_count <= MAX_COUNTED_WORDS {
let key = build_key(*fid, *new_count as u8, &mut key_buffer);
cached_sorter.insert_add_u32(key, inner.docid())?;
cached_sorter.insert_add_u32(key, inner.docid());
}
}
}
@ -106,7 +106,7 @@ impl SearchableExtractor for FidWordCountDocidsExtractor {
for (fid, count) in fid_word_count.iter() {
if *count <= MAX_COUNTED_WORDS {
let key = build_key(*fid, *count as u8, &mut key_buffer);
cached_sorter.insert_add_u32(key, inner.docid())?;
cached_sorter.insert_add_u32(key, inner.docid());
}
}
}

View File

@ -11,6 +11,7 @@ use super::tokenize_document::{tokenizer_builder, DocumentTokenizer};
use super::SearchableExtractor;
use crate::update::new::extract::cache::CboCachedSorter;
use crate::update::new::extract::perm_json_p::contained_in;
use crate::update::new::extract::HashMapMerger;
use crate::update::new::{DocumentChange, ItemsPool};
use crate::update::{create_sorter, GrenadParameters, MergeDeladdCboRoaringBitmaps};
use crate::{
@ -39,14 +40,14 @@ where
index: &Index,
document_tokenizer: &DocumentTokenizer,
fields_ids_map: &mut GlobalFieldsIdsMap,
cached_sorter: &mut CboCachedSorter<MergeDeladdCboRoaringBitmaps>,
cached_sorter: &mut CboCachedSorter,
document_change: DocumentChange,
) -> Result<()> {
match document_change {
DocumentChange::Deletion(inner) => {
let mut token_fn = |_fname: &str, fid, pos, word: &str| {
let key = Self::build_key(fid, pos, word);
cached_sorter.insert_del_u32(&key, inner.docid()).map_err(crate::Error::from)
Ok(cached_sorter.insert_del_u32(&key, inner.docid()))
};
document_tokenizer.tokenize_document(
inner.current(rtxn, index)?.unwrap(),
@ -57,7 +58,7 @@ where
DocumentChange::Update(inner) => {
let mut token_fn = |_fname: &str, fid, pos, word: &str| {
let key = Self::build_key(fid, pos, word);
cached_sorter.insert_del_u32(&key, inner.docid()).map_err(crate::Error::from)
Ok(cached_sorter.insert_del_u32(&key, inner.docid()))
};
document_tokenizer.tokenize_document(
inner.current(rtxn, index)?.unwrap(),
@ -67,14 +68,14 @@ where
let mut token_fn = |_fname: &str, fid, pos, word: &str| {
let key = Self::build_key(fid, pos, word);
cached_sorter.insert_add_u32(&key, inner.docid()).map_err(crate::Error::from)
Ok(cached_sorter.insert_add_u32(&key, inner.docid()))
};
document_tokenizer.tokenize_document(inner.new(), fields_ids_map, &mut token_fn)?;
}
DocumentChange::Insertion(inner) => {
let mut token_fn = |_fname: &str, fid, pos, word: &str| {
let key = Self::build_key(fid, pos, word);
cached_sorter.insert_add_u32(&key, inner.docid()).map_err(crate::Error::from)
Ok(cached_sorter.insert_add_u32(&key, inner.docid()))
};
document_tokenizer.tokenize_document(inner.new(), fields_ids_map, &mut token_fn)?;
}
@ -193,11 +194,11 @@ impl ProtoWordDocidsExtractor for WordPositionDocidsExtractor {
// V2
struct WordDocidsCachedSorters {
word_fid_docids: CboCachedSorter<MergeDeladdCboRoaringBitmaps>,
word_docids: CboCachedSorter<MergeDeladdCboRoaringBitmaps>,
exact_word_docids: CboCachedSorter<MergeDeladdCboRoaringBitmaps>,
word_position_docids: CboCachedSorter<MergeDeladdCboRoaringBitmaps>,
fid_word_count_docids: CboCachedSorter<MergeDeladdCboRoaringBitmaps>,
word_fid_docids: CboCachedSorter,
word_docids: CboCachedSorter,
exact_word_docids: CboCachedSorter,
word_position_docids: CboCachedSorter,
fid_word_count_docids: CboCachedSorter,
fid_word_count: HashMap<FieldId, (usize, usize)>,
current_docid: Option<DocumentId>,
}
@ -210,61 +211,11 @@ impl WordDocidsCachedSorters {
) -> Self {
let max_memory = max_memory.map(|max_memory| max_memory / 4);
let word_fid_docids = CboCachedSorter::new(
capacity,
create_sorter(
grenad::SortAlgorithm::Stable,
MergeDeladdCboRoaringBitmaps,
indexer.chunk_compression_type,
indexer.chunk_compression_level,
indexer.max_nb_chunks,
max_memory,
),
);
let word_docids = CboCachedSorter::new(
capacity,
create_sorter(
grenad::SortAlgorithm::Stable,
MergeDeladdCboRoaringBitmaps,
indexer.chunk_compression_type,
indexer.chunk_compression_level,
indexer.max_nb_chunks,
max_memory,
),
);
let exact_word_docids = CboCachedSorter::new(
capacity,
create_sorter(
grenad::SortAlgorithm::Stable,
MergeDeladdCboRoaringBitmaps,
indexer.chunk_compression_type,
indexer.chunk_compression_level,
indexer.max_nb_chunks,
max_memory,
),
);
let word_position_docids = CboCachedSorter::new(
capacity,
create_sorter(
grenad::SortAlgorithm::Stable,
MergeDeladdCboRoaringBitmaps,
indexer.chunk_compression_type,
indexer.chunk_compression_level,
indexer.max_nb_chunks,
max_memory,
),
);
let fid_word_count_docids = CboCachedSorter::new(
capacity,
create_sorter(
grenad::SortAlgorithm::Stable,
MergeDeladdCboRoaringBitmaps,
indexer.chunk_compression_type,
indexer.chunk_compression_level,
indexer.max_nb_chunks,
max_memory,
),
);
let word_fid_docids = CboCachedSorter::new();
let word_docids = CboCachedSorter::new();
let exact_word_docids = CboCachedSorter::new();
let word_position_docids = CboCachedSorter::new();
let fid_word_count_docids = CboCachedSorter::new();
Self {
word_fid_docids,
@ -288,22 +239,22 @@ impl WordDocidsCachedSorters {
) -> Result<()> {
let key = word.as_bytes();
if exact {
self.exact_word_docids.insert_add_u32(key, docid)?;
self.exact_word_docids.insert_add_u32(key, docid);
} else {
self.word_docids.insert_add_u32(key, docid)?;
self.word_docids.insert_add_u32(key, docid);
}
buffer.clear();
buffer.extend_from_slice(word.as_bytes());
buffer.push(0);
buffer.extend_from_slice(&position.to_be_bytes());
self.word_fid_docids.insert_add_u32(buffer, docid)?;
self.word_fid_docids.insert_add_u32(buffer, docid);
buffer.clear();
buffer.extend_from_slice(word.as_bytes());
buffer.push(0);
buffer.extend_from_slice(&field_id.to_be_bytes());
self.word_position_docids.insert_add_u32(buffer, docid)?;
self.word_position_docids.insert_add_u32(buffer, docid);
if self.current_docid.map_or(false, |id| docid != id) {
self.flush_fid_word_count(buffer)?;
@ -329,22 +280,22 @@ impl WordDocidsCachedSorters {
) -> Result<()> {
let key = word.as_bytes();
if exact {
self.exact_word_docids.insert_del_u32(key, docid)?;
self.exact_word_docids.insert_del_u32(key, docid);
} else {
self.word_docids.insert_del_u32(key, docid)?;
self.word_docids.insert_del_u32(key, docid);
}
buffer.clear();
buffer.extend_from_slice(word.as_bytes());
buffer.push(0);
buffer.extend_from_slice(&position.to_be_bytes());
self.word_fid_docids.insert_del_u32(buffer, docid)?;
self.word_fid_docids.insert_del_u32(buffer, docid);
buffer.clear();
buffer.extend_from_slice(word.as_bytes());
buffer.push(0);
buffer.extend_from_slice(&field_id.to_be_bytes());
self.word_position_docids.insert_del_u32(buffer, docid)?;
self.word_position_docids.insert_del_u32(buffer, docid);
if self.current_docid.map_or(false, |id| docid != id) {
self.flush_fid_word_count(buffer)?;
@ -366,15 +317,13 @@ impl WordDocidsCachedSorters {
buffer.clear();
buffer.extend_from_slice(&fid.to_be_bytes());
buffer.push(current_count as u8);
self.fid_word_count_docids
.insert_del_u32(buffer, self.current_docid.unwrap())?;
self.fid_word_count_docids.insert_del_u32(buffer, self.current_docid.unwrap());
}
if new_count <= MAX_COUNTED_WORDS {
buffer.clear();
buffer.extend_from_slice(&fid.to_be_bytes());
buffer.push(new_count as u8);
self.fid_word_count_docids
.insert_add_u32(buffer, self.current_docid.unwrap())?;
self.fid_word_count_docids.insert_add_u32(buffer, self.current_docid.unwrap());
}
}
}
@ -384,29 +333,29 @@ impl WordDocidsCachedSorters {
}
struct WordDocidsMergerBuilders {
word_fid_docids: MergerBuilder<File, MergeDeladdCboRoaringBitmaps>,
word_docids: MergerBuilder<File, MergeDeladdCboRoaringBitmaps>,
exact_word_docids: MergerBuilder<File, MergeDeladdCboRoaringBitmaps>,
word_position_docids: MergerBuilder<File, MergeDeladdCboRoaringBitmaps>,
fid_word_count_docids: MergerBuilder<File, MergeDeladdCboRoaringBitmaps>,
word_fid_docids: HashMapMerger,
word_docids: HashMapMerger,
exact_word_docids: HashMapMerger,
word_position_docids: HashMapMerger,
fid_word_count_docids: HashMapMerger,
}
pub struct WordDocidsMergers {
pub word_fid_docids: Merger<File, MergeDeladdCboRoaringBitmaps>,
pub word_docids: Merger<File, MergeDeladdCboRoaringBitmaps>,
pub exact_word_docids: Merger<File, MergeDeladdCboRoaringBitmaps>,
pub word_position_docids: Merger<File, MergeDeladdCboRoaringBitmaps>,
pub fid_word_count_docids: Merger<File, MergeDeladdCboRoaringBitmaps>,
pub word_fid_docids: HashMapMerger,
pub word_docids: HashMapMerger,
pub exact_word_docids: HashMapMerger,
pub word_position_docids: HashMapMerger,
pub fid_word_count_docids: HashMapMerger,
}
impl WordDocidsMergerBuilders {
fn new() -> Self {
Self {
word_fid_docids: MergerBuilder::new(MergeDeladdCboRoaringBitmaps),
word_docids: MergerBuilder::new(MergeDeladdCboRoaringBitmaps),
exact_word_docids: MergerBuilder::new(MergeDeladdCboRoaringBitmaps),
word_position_docids: MergerBuilder::new(MergeDeladdCboRoaringBitmaps),
fid_word_count_docids: MergerBuilder::new(MergeDeladdCboRoaringBitmaps),
word_fid_docids: HashMapMerger::new(),
word_docids: HashMapMerger::new(),
exact_word_docids: HashMapMerger::new(),
word_position_docids: HashMapMerger::new(),
fid_word_count_docids: HashMapMerger::new(),
}
}
@ -421,49 +370,44 @@ impl WordDocidsMergerBuilders {
current_docid: _,
} = other;
let mut word_fid_docids_readers = Ok(vec![]);
let mut word_docids_readers = Ok(vec![]);
let mut exact_word_docids_readers = Ok(vec![]);
let mut word_position_docids_readers = Ok(vec![]);
let mut fid_word_count_docids_readers = Ok(vec![]);
let mut word_fid_docids_readers = HashMap::new();
let mut word_docids_readers = HashMap::new();
let mut exact_word_docids_readers = HashMap::new();
let mut word_position_docids_readers = HashMap::new();
let mut fid_word_count_docids_readers = HashMap::new();
rayon::scope(|s| {
s.spawn(|_| {
word_fid_docids_readers =
word_fid_docids.into_sorter().and_then(|s| s.into_reader_cursors());
word_fid_docids_readers = word_fid_docids.into_sorter();
});
s.spawn(|_| {
word_docids_readers =
word_docids.into_sorter().and_then(|s| s.into_reader_cursors());
word_docids_readers = word_docids.into_sorter();
});
s.spawn(|_| {
exact_word_docids_readers =
exact_word_docids.into_sorter().and_then(|s| s.into_reader_cursors());
exact_word_docids_readers = exact_word_docids.into_sorter();
});
s.spawn(|_| {
word_position_docids_readers =
word_position_docids.into_sorter().and_then(|s| s.into_reader_cursors());
word_position_docids_readers = word_position_docids.into_sorter();
});
s.spawn(|_| {
fid_word_count_docids_readers =
fid_word_count_docids.into_sorter().and_then(|s| s.into_reader_cursors());
fid_word_count_docids_readers = fid_word_count_docids.into_sorter();
});
});
self.word_fid_docids.extend(word_fid_docids_readers?);
self.word_docids.extend(word_docids_readers?);
self.exact_word_docids.extend(exact_word_docids_readers?);
self.word_position_docids.extend(word_position_docids_readers?);
self.fid_word_count_docids.extend(fid_word_count_docids_readers?);
self.word_fid_docids.extend([word_fid_docids_readers]);
self.word_docids.extend([word_docids_readers]);
self.exact_word_docids.extend([exact_word_docids_readers]);
self.word_position_docids.extend([word_position_docids_readers]);
self.fid_word_count_docids.extend([fid_word_count_docids_readers]);
Ok(())
}
fn build(self) -> WordDocidsMergers {
WordDocidsMergers {
word_fid_docids: self.word_fid_docids.build(),
word_docids: self.word_docids.build(),
exact_word_docids: self.exact_word_docids.build(),
word_position_docids: self.word_position_docids.build(),
fid_word_count_docids: self.fid_word_count_docids.build(),
word_fid_docids: self.word_fid_docids,
word_docids: self.word_docids,
exact_word_docids: self.exact_word_docids,
word_position_docids: self.word_position_docids,
fid_word_count_docids: self.fid_word_count_docids,
}
}
}

View File

@ -2,7 +2,6 @@ use std::collections::VecDeque;
use std::rc::Rc;
use heed::RoTxn;
use itertools::merge_join_by;
use obkv::KvReader;
use super::tokenize_document::DocumentTokenizer;
@ -10,7 +9,6 @@ use super::SearchableExtractor;
use crate::proximity::{index_proximity, MAX_DISTANCE};
use crate::update::new::extract::cache::CboCachedSorter;
use crate::update::new::DocumentChange;
use crate::update::MergeDeladdCboRoaringBitmaps;
use crate::{FieldId, GlobalFieldsIdsMap, Index, Result};
pub struct WordPairProximityDocidsExtractor;
@ -33,7 +31,7 @@ impl SearchableExtractor for WordPairProximityDocidsExtractor {
index: &Index,
document_tokenizer: &DocumentTokenizer,
fields_ids_map: &mut GlobalFieldsIdsMap,
cached_sorter: &mut CboCachedSorter<MergeDeladdCboRoaringBitmaps>,
cached_sorter: &mut CboCachedSorter,
document_change: DocumentChange,
) -> Result<()> {
let mut key_buffer = Vec::new();
@ -96,14 +94,14 @@ impl SearchableExtractor for WordPairProximityDocidsExtractor {
del_word_pair_proximity.dedup_by(|(k1, _), (k2, _)| k1 == k2);
for ((w1, w2), prox) in del_word_pair_proximity.iter() {
let key = build_key(*prox, w1, w2, &mut key_buffer);
cached_sorter.insert_del_u32(key, docid)?;
cached_sorter.insert_del_u32(key, docid);
}
add_word_pair_proximity.sort_unstable();
add_word_pair_proximity.dedup_by(|(k1, _), (k2, _)| k1 == k2);
for ((w1, w2), prox) in add_word_pair_proximity.iter() {
let key = build_key(*prox, w1, w2, &mut key_buffer);
cached_sorter.insert_add_u32(key, docid)?;
cached_sorter.insert_add_u32(key, docid);
}
Ok(())
}

View File

@ -13,9 +13,9 @@ use rayon::iter::{IntoParallelIterator, ParallelBridge, ParallelIterator};
use tokenize_document::{tokenizer_builder, DocumentTokenizer};
use super::cache::CboCachedSorter;
use super::DocidsExtractor;
use super::{DocidsExtractor, HashMapMerger};
use crate::update::new::{DocumentChange, ItemsPool};
use crate::update::{create_sorter, GrenadParameters, MergeDeladdCboRoaringBitmaps};
use crate::update::{GrenadParameters, MergeDeladdCboRoaringBitmaps};
use crate::{GlobalFieldsIdsMap, Index, Result, MAX_POSITION_PER_ATTRIBUTE};
pub trait SearchableExtractor {
@ -24,7 +24,7 @@ pub trait SearchableExtractor {
fields_ids_map: &GlobalFieldsIdsMap,
indexer: GrenadParameters,
document_changes: impl IntoParallelIterator<Item = Result<DocumentChange>>,
) -> Result<Merger<File, MergeDeladdCboRoaringBitmaps>> {
) -> Result<HashMapMerger> {
let max_memory = indexer.max_memory_by_thread();
let rtxn = index.read_txn()?;
@ -60,18 +60,7 @@ pub trait SearchableExtractor {
index.read_txn()?,
&document_tokenizer,
fields_ids_map.clone(),
CboCachedSorter::new(
// TODO use a better value
1_000_000.try_into().unwrap(),
create_sorter(
grenad::SortAlgorithm::Stable,
MergeDeladdCboRoaringBitmaps,
indexer.chunk_compression_type,
indexer.chunk_compression_level,
indexer.max_nb_chunks,
max_memory,
),
),
CboCachedSorter::new(),
))
});
@ -93,7 +82,7 @@ pub trait SearchableExtractor {
})?;
}
{
let mut builder = grenad::MergerBuilder::new(MergeDeladdCboRoaringBitmaps);
let mut builder = HashMapMerger::new();
let span =
tracing::trace_span!(target: "indexing::documents::extract", "merger_building");
let _entered = span.enter();
@ -102,14 +91,11 @@ pub trait SearchableExtractor {
.into_items()
.par_bridge()
.map(|(_rtxn, _tokenizer, _fields_ids_map, cached_sorter)| {
let sorter = cached_sorter.into_sorter()?;
sorter.into_reader_cursors()
cached_sorter.into_sorter()
})
.collect();
for reader in readers {
builder.extend(reader?);
}
Ok(builder.build())
builder.extend(readers);
Ok(builder)
}
}
@ -118,7 +104,7 @@ pub trait SearchableExtractor {
index: &Index,
document_tokenizer: &DocumentTokenizer,
fields_ids_map: &mut GlobalFieldsIdsMap,
cached_sorter: &mut CboCachedSorter<MergeDeladdCboRoaringBitmaps>,
cached_sorter: &mut CboCachedSorter,
document_change: DocumentChange,
) -> Result<()>;
@ -134,7 +120,7 @@ impl<T: SearchableExtractor> DocidsExtractor for T {
fields_ids_map: &GlobalFieldsIdsMap,
indexer: GrenadParameters,
document_changes: impl IntoParallelIterator<Item = Result<DocumentChange>>,
) -> Result<Merger<File, MergeDeladdCboRoaringBitmaps>> {
) -> Result<HashMapMerger> {
Self::run_extraction(index, fields_ids_map, indexer, document_changes)
}
}

View File

@ -2,7 +2,7 @@ use std::borrow::Cow;
use std::collections::{BTreeMap, HashMap};
use std::sync::Arc;
use heed::types::{Bytes, DecodeIgnore};
use heed::types::Bytes;
use heed::RoTxn;
use memmap2::Mmap;
use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator};

View File

@ -14,7 +14,7 @@ use super::channel::*;
use super::document_change::DocumentChange;
use super::extract::*;
use super::merger::merge_grenad_entries;
use super::{StdResult, TopLevelMap};
use super::{ItemsPool, StdResult, TopLevelMap};
use crate::documents::{PrimaryKey, DEFAULT_PRIMARY_KEY};
use crate::update::new::channel::ExtractorSender;
use crate::update::GrenadParameters;
@ -213,16 +213,18 @@ where
let span =
tracing::trace_span!(target: "indexing::documents", parent: &current_span, "merge");
let _entered = span.enter();
let rtxn = index.read_txn().unwrap();
let rtxn_pool = ItemsPool::new(|| index.read_txn().map_err(Into::into));
merge_grenad_entries(
merger_receiver,
merger_sender,
&rtxn,
&rtxn_pool,
index,
global_fields_ids_map_clone,
)
})?;
let mut entries_count = 0;
for operation in writer_receiver {
let database = operation.database(index);
match operation.entry() {
@ -231,10 +233,15 @@ where
unreachable!("We tried to delete an unknown key")
}
}
EntryOperation::Write(e) => database.put(wtxn, e.key(), e.value())?,
EntryOperation::Write(e) => {
entries_count += 1;
database.put(wtxn, e.key(), e.value())?
}
}
}
eprintln!("We saw {entries_count}");
/// TODO handle the panicking threads
handle.join().unwrap()?;
handle2.join().unwrap()?;

View File

@ -1,33 +1,34 @@
use std::fs::File;
use std::io::{self};
use bincode::ErrorKind;
use grenad::Merger;
use heed::types::Bytes;
use heed::{Database, RoTxn};
use rayon::iter::{ParallelBridge, ParallelIterator as _};
use roaring::RoaringBitmap;
use super::channel::*;
use super::extract::FacetKind;
use super::{Deletion, DocumentChange, Insertion, KvReaderDelAdd, KvReaderFieldId, Update};
use super::extract::{FacetKind, HashMapMerger};
use super::{Deletion, DocumentChange, Insertion, ItemsPool, KvReaderFieldId, Update};
use crate::update::del_add::DelAdd;
use crate::update::new::channel::MergerOperation;
use crate::update::new::word_fst_builder::WordFstBuilder;
use crate::update::MergeDeladdCboRoaringBitmaps;
use crate::{CboRoaringBitmapCodec, Error, GeoPoint, GlobalFieldsIdsMap, Index, Result};
/// TODO We must return some infos/stats
#[tracing::instrument(level = "trace", skip_all, target = "indexing::documents", name = "merge")]
pub fn merge_grenad_entries(
pub fn merge_grenad_entries<'t>(
receiver: MergerReceiver,
sender: MergerSender,
rtxn: &RoTxn,
rtxn_pool: &ItemsPool<impl Fn() -> Result<RoTxn<'t>> + Send + Sync, RoTxn<'t>, Error>,
index: &Index,
mut global_fields_ids_map: GlobalFieldsIdsMap<'_>,
) -> Result<()> {
let mut buffer: Vec<u8> = Vec::new();
let mut documents_ids = index.documents_ids(rtxn)?;
let mut geo_extractor = GeoExtractor::new(rtxn, index)?;
let (mut documents_ids, mut geo_extractor) = rtxn_pool.with(|rtxn| {
let documents_ids = index.documents_ids(rtxn)?;
let geo_extractor = GeoExtractor::new(rtxn, index)?;
Ok((documents_ids, geo_extractor))
})?;
for merger_operation in receiver {
match merger_operation {
@ -39,10 +40,10 @@ pub fn merge_grenad_entries(
merger,
/// TODO do a MergerOperation::database(&Index) -> Database<Bytes, Bytes>.
index.exact_word_docids.remap_types(),
rtxn,
rtxn_pool,
&mut buffer,
sender.docids::<ExactWordDocids>(),
|_, _key| Ok(()),
// |_, _key| Ok(()),
)?;
}
MergerOperation::FidWordCountDocidsMerger(merger) => {
@ -51,15 +52,15 @@ pub fn merge_grenad_entries(
merge_and_send_docids(
merger,
index.field_id_word_count_docids.remap_types(),
rtxn,
rtxn_pool,
&mut buffer,
sender.docids::<FidWordCountDocids>(),
|_, _key| Ok(()),
// |_, _key| Ok(()),
)?;
}
MergerOperation::WordDocidsMerger(merger) => {
let words_fst = index.words_fst(rtxn)?;
let mut word_fst_builder = WordFstBuilder::new(&words_fst, 4)?;
// let words_fst = index.words_fst(rtxn)?;
// let mut word_fst_builder = WordFstBuilder::new(&words_fst, 4)?;
{
let span =
tracing::trace_span!(target: "indexing::documents::merge", "word_docids");
@ -68,10 +69,10 @@ pub fn merge_grenad_entries(
merge_and_send_docids(
merger,
index.word_docids.remap_types(),
rtxn,
rtxn_pool,
&mut buffer,
sender.docids::<WordDocids>(),
|deladd, key| word_fst_builder.register_word(deladd, key),
// |deladd, key| word_fst_builder.register_word(deladd, key),
)?;
}
@ -80,8 +81,8 @@ pub fn merge_grenad_entries(
tracing::trace_span!(target: "indexing::documents::merge", "words_fst");
let _entered = span.enter();
let (word_fst_mmap, prefix_fst_mmap) = word_fst_builder.build()?;
sender.main().write_words_fst(word_fst_mmap).unwrap();
// let (word_fst_mmap, prefix_fst_mmap) = word_fst_builder.build()?;
// sender.main().write_words_fst(word_fst_mmap).unwrap();
}
}
MergerOperation::WordFidDocidsMerger(merger) => {
@ -91,10 +92,10 @@ pub fn merge_grenad_entries(
merge_and_send_docids(
merger,
index.word_fid_docids.remap_types(),
rtxn,
rtxn_pool,
&mut buffer,
sender.docids::<WordFidDocids>(),
|_, _key| Ok(()),
// |_, _key| Ok(()),
)?;
}
MergerOperation::WordPairProximityDocidsMerger(merger) => {
@ -103,10 +104,10 @@ pub fn merge_grenad_entries(
merge_and_send_docids(
merger,
index.word_pair_proximity_docids.remap_types(),
rtxn,
rtxn_pool,
&mut buffer,
sender.docids::<WordPairProximityDocids>(),
|_, _key| Ok(()),
// |_, _key| Ok(()),
)?;
}
MergerOperation::WordPositionDocidsMerger(merger) => {
@ -115,10 +116,10 @@ pub fn merge_grenad_entries(
merge_and_send_docids(
merger,
index.word_position_docids.remap_types(),
rtxn,
rtxn_pool,
&mut buffer,
sender.docids::<WordPositionDocids>(),
|_, _key| Ok(()),
// |_, _key| Ok(()),
)?;
}
MergerOperation::InsertDocument { docid, document } => {
@ -129,15 +130,21 @@ pub fn merge_grenad_entries(
sender.documents().uncompressed(docid, &document).unwrap();
if let Some(geo_extractor) = geo_extractor.as_mut() {
let current = index.documents.remap_data_type::<Bytes>().get(rtxn, &docid)?;
let current: Option<&KvReaderFieldId> = current.map(Into::into);
let change = match current {
Some(current) => {
DocumentChange::Update(Update::create(docid, current.boxed(), document))
}
None => DocumentChange::Insertion(Insertion::create(docid, document)),
};
geo_extractor.manage_change(&mut global_fields_ids_map, &change)?;
rtxn_pool.with(|rtxn| {
let current =
index.documents.remap_data_type::<Bytes>().get(rtxn, &docid)?;
let current: Option<&KvReaderFieldId> = current.map(Into::into);
let change = match current {
Some(current) => DocumentChange::Update(Update::create(
docid,
current.boxed(),
document,
)),
None => DocumentChange::Insertion(Insertion::create(docid, document)),
};
geo_extractor.manage_change(&mut global_fields_ids_map, &change)?;
Ok(())
})?;
}
}
MergerOperation::DeleteDocument { docid } => {
@ -150,9 +157,13 @@ pub fn merge_grenad_entries(
sender.documents().delete(docid).unwrap();
if let Some(geo_extractor) = geo_extractor.as_mut() {
let current = index.document(rtxn, docid)?;
let change = DocumentChange::Deletion(Deletion::create(docid, current.boxed()));
geo_extractor.manage_change(&mut global_fields_ids_map, &change)?;
rtxn_pool.with(|rtxn| {
let current = index.document(rtxn, docid)?;
let change =
DocumentChange::Deletion(Deletion::create(docid, current.boxed()));
geo_extractor.manage_change(&mut global_fields_ids_map, &change)?;
Ok(())
})?;
}
}
MergerOperation::FinishedDocument => {
@ -165,7 +176,7 @@ pub fn merge_grenad_entries(
merge_and_send_facet_docids(
merger,
FacetDatabases::new(index),
rtxn,
rtxn_pool,
&mut buffer,
sender.facet_docids(),
)?;
@ -237,65 +248,69 @@ impl GeoExtractor {
}
#[tracing::instrument(level = "trace", skip_all, target = "indexing::merge")]
fn merge_and_send_docids(
merger: Merger<File, MergeDeladdCboRoaringBitmaps>,
#[inline(never)]
fn merge_and_send_docids<'t>(
merger: HashMapMerger,
database: Database<Bytes, Bytes>,
rtxn: &RoTxn<'_>,
rtxn_pool: &ItemsPool<impl Fn() -> Result<RoTxn<'t>> + Send + Sync, RoTxn<'t>, Error>,
buffer: &mut Vec<u8>,
docids_sender: impl DocidsSender,
mut register_key: impl FnMut(DelAdd, &[u8]) -> Result<()>,
docids_sender: impl DocidsSender + Sync,
// mut register_key: impl FnMut(DelAdd, &[u8]) -> Result<()> + Send + Sync,
) -> Result<()> {
let mut merger_iter = merger.into_stream_merger_iter().unwrap();
while let Some((key, deladd)) = merger_iter.next().unwrap() {
let current = database.get(rtxn, key)?;
let deladd: &KvReaderDelAdd = deladd.into();
let del = deladd.get(DelAdd::Deletion);
let add = deladd.get(DelAdd::Addition);
let now = std::time::Instant::now();
merger.into_iter().par_bridge().try_for_each(|(key, deladd)| {
rtxn_pool.with(|rtxn| {
let mut buffer = Vec::new();
let current = database.get(rtxn, &key)?;
match merge_cbo_bitmaps(current, deladd.del, deladd.add)? {
Operation::Write(bitmap) => {
let value = cbo_bitmap_serialize_into_vec(&bitmap, &mut buffer);
docids_sender.write(&key, value).unwrap();
// register_key(DelAdd::Addition, &key)?;
}
Operation::Delete => {
docids_sender.delete(&key).unwrap();
// register_key(DelAdd::Deletion, &key)?;
}
Operation::Ignore => (),
}
Ok(())
})
})?;
match merge_cbo_bitmaps(current, del, add)? {
Operation::Write(bitmap) => {
let value = cbo_bitmap_serialize_into_vec(&bitmap, buffer);
docids_sender.write(key, value).unwrap();
register_key(DelAdd::Addition, key)?;
}
Operation::Delete => {
docids_sender.delete(key).unwrap();
register_key(DelAdd::Deletion, key)?;
}
Operation::Ignore => (),
}
}
eprintln!("I took to merger hashmaps {:.2?}", now.elapsed());
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, target = "indexing::merge")]
fn merge_and_send_facet_docids(
merger: Merger<File, MergeDeladdCboRoaringBitmaps>,
#[inline(never)]
fn merge_and_send_facet_docids<'t>(
merger: HashMapMerger,
database: FacetDatabases,
rtxn: &RoTxn<'_>,
rtxn_pool: &ItemsPool<impl Fn() -> Result<RoTxn<'t>> + Send + Sync, RoTxn<'t>, Error>,
buffer: &mut Vec<u8>,
docids_sender: impl DocidsSender,
docids_sender: impl DocidsSender + Sync,
) -> Result<()> {
let mut merger_iter = merger.into_stream_merger_iter().unwrap();
while let Some((key, deladd)) = merger_iter.next().unwrap() {
let current = database.get(rtxn, key)?;
let deladd: &KvReaderDelAdd = deladd.into();
let del = deladd.get(DelAdd::Deletion);
let add = deladd.get(DelAdd::Addition);
match merge_cbo_bitmaps(current, del, add)? {
Operation::Write(bitmap) => {
let value = cbo_bitmap_serialize_into_vec(&bitmap, buffer);
docids_sender.write(key, value).unwrap();
let now = std::time::Instant::now();
merger.into_iter().par_bridge().try_for_each(|(key, deladd)| {
rtxn_pool.with(|rtxn| {
let mut buffer = Vec::new();
let current = database.get(rtxn, &key)?;
match merge_cbo_bitmaps(current, deladd.del, deladd.add)? {
Operation::Write(bitmap) => {
let value = cbo_bitmap_serialize_into_vec(&bitmap, &mut buffer);
docids_sender.write(&key, value).unwrap();
}
Operation::Delete => {
docids_sender.delete(&key).unwrap();
}
Operation::Ignore => (),
}
Operation::Delete => {
docids_sender.delete(key).unwrap();
}
Operation::Ignore => (),
}
}
Ok(())
})
})?;
eprintln!("I took to merger hashmaps {:.2?}", now.elapsed());
Ok(())
}
@ -348,12 +363,12 @@ enum Operation {
/// A function that merges the DelAdd CboRoaringBitmaps with the current bitmap.
fn merge_cbo_bitmaps(
current: Option<&[u8]>,
del: Option<&[u8]>,
add: Option<&[u8]>,
del: Option<RoaringBitmap>,
add: Option<RoaringBitmap>,
) -> Result<Operation> {
let current = current.map(CboRoaringBitmapCodec::deserialize_from).transpose()?;
let del = del.map(CboRoaringBitmapCodec::deserialize_from).transpose()?;
let add = add.map(CboRoaringBitmapCodec::deserialize_from).transpose()?;
// let del = del.map(CboRoaringBitmapCodec::deserialize_from).transpose()?;
// let add = add.map(CboRoaringBitmapCodec::deserialize_from).transpose()?;
match (current, del, add) {
(None, None, None) => Ok(Operation::Ignore), // but it's strange