Introduce a function to retrieve the facet level range docids

This commit is contained in:
Clément Renault
2020-11-18 16:29:07 +01:00
parent 57d253aeda
commit 9ec95679e1
11 changed files with 423 additions and 164 deletions

View File

@ -0,0 +1,125 @@
use std::fs::File;
use grenad::{CompressionType, Reader, Writer, FileFuse};
use heed::types::{ByteSlice, DecodeIgnore};
use heed::{BytesEncode, Error};
use roaring::RoaringBitmap;
use crate::facet::FacetType;
use crate::heed_codec::{facet::FacetLevelValueI64Codec, CboRoaringBitmapCodec};
use crate::update::index_documents::{create_writer, writer_into_reader};
pub fn clear_field_levels(
wtxn: &mut heed::RwTxn,
db: heed::Database<ByteSlice, CboRoaringBitmapCodec>,
field_id: u8,
) -> heed::Result<()>
{
let range = (field_id, 1, i64::MIN, i64::MIN)..=(field_id, u8::MAX, i64::MAX, i64::MAX);
db.remap_key_type::<FacetLevelValueI64Codec>()
.delete_range(wtxn, &range)
.map(drop)
}
pub fn compute_facet_levels(
rtxn: &heed::RoTxn,
db: heed::Database<ByteSlice, CboRoaringBitmapCodec>,
compression_type: CompressionType,
compression_level: Option<u32>,
shrink_size: Option<u64>,
field_id: u8,
facet_type: FacetType,
) -> anyhow::Result<Reader<FileFuse>>
{
let last_level_size = 5;
let number_of_levels = 5;
let first_level_size = db.prefix_iter(rtxn, &[field_id])?
.remap_types::<DecodeIgnore, DecodeIgnore>()
.fold(Ok(0u64), |count, result| result.and(count).map(|c| c + 1))?;
// It is forbidden to keep a cursor and write in a database at the same time with LMDB
// therefore we write the facet levels entries into a grenad file before transfering them.
let mut writer = tempfile::tempfile().and_then(|file| {
create_writer(compression_type, compression_level, file)
})?;
let level_0_range = (field_id, 0, i64::MIN, i64::MIN)..=(field_id, 0, i64::MAX, i64::MAX);
let level_sizes_iter = levels_iterator(first_level_size, last_level_size, number_of_levels)
.enumerate()
.skip(1);
// TODO we must not create levels with identical group sizes.
for (level, size) in level_sizes_iter {
let level_entry_sizes = (first_level_size as f64 / size as f64).ceil() as usize;
let mut left = 0;
let mut right = 0;
let mut group_docids = RoaringBitmap::new();
let db = db.remap_key_type::<FacetLevelValueI64Codec>();
for (i, result) in db.range(rtxn, &level_0_range)?.enumerate() {
let ((_field_id, _level, value, _right), docids) = result?;
if i == 0 {
left = value;
} else if i % level_entry_sizes == 0 {
// we found the first bound of the next group, we must store the left
// and right bounds associated with the docids.
write_entry(&mut writer, field_id, level as u8, left, right, &group_docids)?;
// We save the left bound for the new group and also reset the docids.
group_docids = RoaringBitmap::new();
left = value;
}
// The right bound is always the bound we run through.
group_docids.union_with(&docids);
right = value;
}
if !group_docids.is_empty() {
write_entry(&mut writer, field_id, level as u8, left, right, &group_docids)?;
}
}
writer_into_reader(writer, shrink_size)
}
fn write_entry(
writer: &mut Writer<File>,
field_id: u8,
level: u8,
left: i64,
right: i64,
ids: &RoaringBitmap,
) -> anyhow::Result<()>
{
let key = (field_id, level, left, right);
let key = FacetLevelValueI64Codec::bytes_encode(&key).ok_or(Error::Encoding)?;
let data = CboRoaringBitmapCodec::bytes_encode(&ids).ok_or(Error::Encoding)?;
writer.insert(&key, &data)?;
Ok(())
}
fn levels_iterator(
first_level_size: u64, // biggest level
last_level_size: u64, // smallest level
number_of_levels: u64,
) -> impl Iterator<Item=u64>
{
// Go look at the function definitions here:
// https://docs.rs/easer/0.2.1/easer/index.html
// https://easings.net/#easeOutExpo
fn ease_out_expo(t: f64, b: f64, c: f64, d: f64) -> f64 {
if t == d {
b + c
} else {
c * (-2.0_f64.powf(-10.0 * t / d) + 1.0) + b
}
}
let b = last_level_size as f64;
let end = first_level_size as f64;
let c = end - b;
let d = number_of_levels;
(0..=d).map(move |t| ((end + b) - ease_out_expo(t as f64, b, c, d as f64)) as u64)
}

View File

@ -14,6 +14,7 @@ use memmap::Mmap;
use rayon::prelude::*;
use rayon::ThreadPool;
use crate::facet::FacetType;
use crate::index::Index;
use crate::update::UpdateIndexingStep;
use self::store::{Store, Readers};
@ -22,10 +23,12 @@ use self::merge_function::{
docid_word_positions_merge, documents_merge, facet_field_value_docids_merge,
};
pub use self::transform::{Transform, TransformOutput};
pub use self::facet_level::{clear_field_levels, compute_facet_levels};
use crate::MergeFn;
use super::UpdateBuilder;
mod facet_level;
mod merge_function;
mod store;
mod transform;
@ -327,7 +330,7 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
enum DatabaseType {
Main,
WordDocids,
FacetValuesDocids,
FacetLevel0ValuesDocids,
}
let faceted_fields = self.index.faceted_fields(self.wtxn)?;
@ -427,7 +430,7 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
(DatabaseType::Main, main_readers, main_merge as MergeFn),
(DatabaseType::WordDocids, word_docids_readers, word_docids_merge),
(
DatabaseType::FacetValuesDocids,
DatabaseType::FacetLevel0ValuesDocids,
facet_field_value_docids_readers,
facet_field_value_docids_merge,
),
@ -475,6 +478,9 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
// We write the external documents ids into the main database.
self.index.put_external_documents_ids(self.wtxn, &external_documents_ids)?;
// We get the faceted fields to be able to create the facet levels.
let faceted_fields = self.index.faceted_fields(self.wtxn)?;
// We merge the new documents ids with the existing ones.
documents_ids.union_with(&new_documents_ids);
documents_ids.union_with(&replaced_documents_ids);
@ -557,7 +563,7 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
write_method,
)?;
},
DatabaseType::FacetValuesDocids => {
DatabaseType::FacetLevel0ValuesDocids => {
debug!("Writing the facet values docids into LMDB on disk...");
let db = *self.index.facet_field_id_value_docids.as_polymorph();
write_into_lmdb_database(
@ -577,6 +583,35 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
});
}
debug!("Computing and writing the facet values levels docids into LMDB on disk...");
for (field_id, facet_type) in faceted_fields {
if facet_type == FacetType::String { continue }
clear_field_levels(
self.wtxn,
self.index.facet_field_id_value_docids,
field_id,
)?;
let content = compute_facet_levels(
self.wtxn,
self.index.facet_field_id_value_docids,
chunk_compression_type,
chunk_compression_level,
chunk_fusing_shrink_size,
field_id,
facet_type,
)?;
write_into_lmdb_database(
self.wtxn,
*self.index.facet_field_id_value_docids.as_polymorph(),
content,
|_, _| anyhow::bail!("invalid facet level merging"),
WriteMethod::GetMergePut,
)?;
}
debug_assert_eq!(database_count, total_databases);
info!("Transform output indexed in {:.02?}", before_indexing.elapsed());

View File

@ -19,7 +19,7 @@ use tempfile::tempfile;
use crate::facet::FacetType;
use crate::heed_codec::{BoRoaringBitmapCodec, CboRoaringBitmapCodec};
use crate::heed_codec::facet::{FacetValueStringCodec, FacetValueF64Codec, FacetValueI64Codec};
use crate::heed_codec::facet::{FacetValueStringCodec, FacetLevelValueF64Codec, FacetLevelValueI64Codec};
use crate::tokenizer::{simple_tokenizer, only_token};
use crate::update::UpdateIndexingStep;
use crate::{json_to_string, SmallVec8, SmallVec32, SmallString32, Position, DocumentId};
@ -337,8 +337,8 @@ impl Store {
for ((field_id, value), docids) in iter {
let result = match value {
String(s) => FacetValueStringCodec::bytes_encode(&(field_id, &s)).map(Cow::into_owned),
Float(f) => FacetValueF64Codec::bytes_encode(&(field_id, *f)).map(Cow::into_owned),
Integer(i) => FacetValueI64Codec::bytes_encode(&(field_id, i)).map(Cow::into_owned),
Float(f) => FacetLevelValueF64Codec::bytes_encode(&(field_id, 0, *f, *f)).map(Cow::into_owned),
Integer(i) => FacetLevelValueI64Codec::bytes_encode(&(field_id, 0, i, i)).map(Cow::into_owned),
};
let key = result.context("could not serialize facet key")?;
let bytes = CboRoaringBitmapCodec::bytes_encode(&docids)

View File

@ -412,7 +412,8 @@ mod tests {
let rtxn = index.read_txn().unwrap();
let fields_ids = index.faceted_fields(&rtxn).unwrap();
assert_eq!(fields_ids, hashmap!{ 1 => FacetType::Integer });
let count = index.facet_field_id_value_docids.len(&rtxn).unwrap();
// Only count the field_id 0 and level 0 facet values.
let count = index.facet_field_id_value_docids.prefix_iter(&rtxn, &[1, 0]).unwrap().count();
assert_eq!(count, 3);
drop(rtxn);
@ -425,7 +426,8 @@ mod tests {
wtxn.commit().unwrap();
let rtxn = index.read_txn().unwrap();
let count = index.facet_field_id_value_docids.len(&rtxn).unwrap();
// Only count the field_id 0 and level 0 facet values.
let count = index.facet_field_id_value_docids.prefix_iter(&rtxn, &[1, 0]).unwrap().count();
assert_eq!(count, 4);
drop(rtxn);
}