Refactor indexation of the "facet-id-exists-docids" database

The idea is to directly create a sorted and merged list of bitmaps
in the form of a BTreeMap<FieldId, RoaringBitmap> instead of creating
a grenad::Reader where the keys are field_id and the values are docids.

Then we send that BTreeMap to the thing that handles TypedChunks, which
inserts its content into the database.
This commit is contained in:
Loïc Lecrenier
2022-07-19 09:57:28 +02:00
parent 1eb1e73bb3
commit aed8c69bcb
3 changed files with 92 additions and 39 deletions

View File

@ -1,16 +1,16 @@
use std::collections::HashSet;
use std::collections::{BTreeMap, HashSet};
use std::convert::TryInto;
use std::fs::File;
use std::io;
use std::mem::size_of;
use heed::zerocopy::AsBytes;
use roaring::RoaringBitmap;
use serde_json::Value;
use super::helpers::{create_sorter, keep_first, sorter_into_reader, GrenadParameters};
use crate::error::InternalError;
use crate::facet::value_encoding::f64_into_bytes;
use crate::update::index_documents::merge_cbo_roaring_bitmaps;
use crate::{DocumentId, FieldId, Result, BEU32};
/// Extracts the facet values of each faceted field of each document.
@ -22,7 +22,7 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
obkv_documents: grenad::Reader<R>,
indexer: GrenadParameters,
faceted_fields: &HashSet<FieldId>,
) -> Result<(grenad::Reader<File>, grenad::Reader<File>, grenad::Reader<File>)> {
) -> Result<(grenad::Reader<File>, grenad::Reader<File>, BTreeMap<FieldId, RoaringBitmap>)> {
let max_memory = indexer.max_memory_by_thread();
let mut fid_docid_facet_numbers_sorter = create_sorter(
@ -30,7 +30,7 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
indexer.chunk_compression_type,
indexer.chunk_compression_level,
indexer.max_nb_chunks,
max_memory.map(|m| m / 3),
max_memory.map(|m| m / 2),
);
let mut fid_docid_facet_strings_sorter = create_sorter(
@ -38,16 +38,10 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
indexer.chunk_compression_type,
indexer.chunk_compression_level,
indexer.max_nb_chunks,
max_memory.map(|m| m / 3),
max_memory.map(|m| m / 2),
);
let mut fid_docid_facet_exists_sorter = create_sorter(
merge_cbo_roaring_bitmaps,
indexer.chunk_compression_type,
indexer.chunk_compression_level,
indexer.max_nb_chunks,
max_memory.map(|m| m / 3),
);
let mut facet_exists_docids = BTreeMap::<FieldId, RoaringBitmap>::new();
let mut key_buffer = Vec::new();
let mut cursor = obkv_documents.into_cursor()?;
@ -65,7 +59,8 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
// Here, we know already that the document must be added to the “field id exists” database
let document: [u8; 4] = docid_bytes[..4].try_into().ok().unwrap();
let document = BEU32::from(document).get();
fid_docid_facet_exists_sorter.insert(&key_buffer, document.to_ne_bytes())?;
facet_exists_docids.entry(field_id).or_default().insert(document);
// For the other extraction tasks, prefix the key with the field_id and the document_id
key_buffer.extend_from_slice(&docid_bytes);
@ -99,7 +94,7 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
Ok((
sorter_into_reader(fid_docid_facet_numbers_sorter, indexer.clone())?,
sorter_into_reader(fid_docid_facet_strings_sorter, indexer.clone())?,
sorter_into_reader(fid_docid_facet_exists_sorter, indexer)?,
facet_exists_docids,
))
}

View File

@ -8,12 +8,13 @@ mod extract_word_docids;
mod extract_word_pair_proximity_docids;
mod extract_word_position_docids;
use std::collections::HashSet;
use std::collections::{BTreeMap, HashSet};
use std::fs::File;
use crossbeam_channel::Sender;
use log::debug;
use rayon::prelude::*;
use roaring::RoaringBitmap;
use self::extract_docid_word_positions::extract_docid_word_positions;
use self::extract_facet_number_docids::extract_facet_number_docids;
@ -72,12 +73,24 @@ pub(crate) fn data_from_obkv_documents(
let (
docid_word_positions_chunks,
(
docid_fid_facet_numbers_chunks,
(docid_fid_facet_strings_chunks, docid_fid_facet_exists_chunks),
),
(docid_fid_facet_numbers_chunks, (docid_fid_facet_strings_chunks, facet_exists_docids)),
) = result?;
// merge facet_exists_docids hashmaps and send them as a typed chunk
{
let lmdb_writer_sx = lmdb_writer_sx.clone();
rayon::spawn(move || {
let mut all = BTreeMap::default();
for facet_exists_docids in facet_exists_docids {
for (field_id, docids) in facet_exists_docids {
let docids0 = all.entry(field_id).or_default();
*docids0 |= docids;
}
}
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetExistsDocids(all)));
});
}
spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>(
docid_word_positions_chunks.clone(),
indexer.clone(),
@ -141,12 +154,6 @@ pub(crate) fn data_from_obkv_documents(
"field-id-facet-number-docids",
);
// spawn extraction task for field-id-facet-exists-docids
rayon::spawn(move || {
let reader = docid_fid_facet_exists_chunks.merge(merge_cbo_roaring_bitmaps, &indexer);
let _ = lmdb_writer_sx.send(reader.map(TypedChunk::FieldIdFacetExistsDocids));
});
Ok(())
}
@ -221,7 +228,7 @@ fn send_and_extract_flattened_documents_data(
grenad::Reader<CursorClonableMmap>,
(
grenad::Reader<CursorClonableMmap>,
(grenad::Reader<CursorClonableMmap>, grenad::Reader<File>),
(grenad::Reader<CursorClonableMmap>, BTreeMap<FieldId, RoaringBitmap>),
),
)> {
let flattened_documents_chunk =
@ -266,7 +273,7 @@ fn send_and_extract_flattened_documents_data(
let (
docid_fid_facet_numbers_chunk,
docid_fid_facet_strings_chunk,
docid_fid_facet_exists_chunk,
facet_exists_docids,
) = extract_fid_docid_facet_values(
flattened_documents_chunk.clone(),
indexer.clone(),
@ -291,7 +298,7 @@ fn send_and_extract_flattened_documents_data(
Ok((
docid_fid_facet_numbers_chunk,
(docid_fid_facet_strings_chunk, docid_fid_facet_exists_chunk),
(docid_fid_facet_strings_chunk, facet_exists_docids),
))
},
);