mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-28 01:01:00 +00:00
Add a database containing the docids where each field exists
This commit is contained in:
@ -30,6 +30,7 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> {
|
||||
word_prefix_position_docids,
|
||||
facet_id_f64_docids,
|
||||
facet_id_string_docids,
|
||||
facet_id_exists_docids,
|
||||
field_id_docid_facet_f64s,
|
||||
field_id_docid_facet_strings,
|
||||
documents,
|
||||
@ -69,6 +70,7 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> {
|
||||
field_id_word_count_docids.clear(self.wtxn)?;
|
||||
word_prefix_position_docids.clear(self.wtxn)?;
|
||||
facet_id_f64_docids.clear(self.wtxn)?;
|
||||
facet_id_exists_docids.clear(self.wtxn)?;
|
||||
facet_id_string_docids.clear(self.wtxn)?;
|
||||
field_id_docid_facet_f64s.clear(self.wtxn)?;
|
||||
field_id_docid_facet_strings.clear(self.wtxn)?;
|
||||
|
@ -170,6 +170,7 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
|
||||
word_position_docids,
|
||||
word_prefix_position_docids,
|
||||
facet_id_f64_docids,
|
||||
facet_id_exists_docids,
|
||||
facet_id_string_docids,
|
||||
field_id_docid_facet_f64s,
|
||||
field_id_docid_facet_strings,
|
||||
@ -424,11 +425,17 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
|
||||
}
|
||||
|
||||
// We delete the documents ids that are under the facet field id values.
|
||||
remove_docids_from_facet_field_id_number_docids(
|
||||
remove_docids_from_facet_field_id_docids(
|
||||
self.wtxn,
|
||||
facet_id_f64_docids,
|
||||
&self.to_delete_docids,
|
||||
)?;
|
||||
// We delete the documents ids that are under the facet field id values.
|
||||
remove_docids_from_facet_field_id_docids(
|
||||
self.wtxn,
|
||||
facet_id_exists_docids,
|
||||
&self.to_delete_docids,
|
||||
)?;
|
||||
|
||||
remove_docids_from_facet_field_id_string_docids(
|
||||
self.wtxn,
|
||||
@ -618,7 +625,7 @@ fn remove_docids_from_facet_field_id_string_docids<'a, C, D>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_docids_from_facet_field_id_number_docids<'a, C>(
|
||||
fn remove_docids_from_facet_field_id_docids<'a, C>(
|
||||
wtxn: &'a mut heed::RwTxn,
|
||||
db: &heed::Database<C, CboRoaringBitmapCodec>,
|
||||
to_remove: &RoaringBitmap,
|
||||
|
@ -0,0 +1,42 @@
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
|
||||
use heed::{BytesDecode, BytesEncode};
|
||||
|
||||
use super::helpers::{
|
||||
create_sorter, merge_cbo_roaring_bitmaps, sorter_into_reader, GrenadParameters,
|
||||
};
|
||||
use crate::heed_codec::facet::{FieldIdCodec, FieldIdDocIdCodec};
|
||||
use crate::Result;
|
||||
|
||||
/// Extracts the documents ids where this field appears.
|
||||
///
|
||||
/// Returns a grenad reader whose key is the field id encoded
|
||||
/// with `FieldIdCodec` and the value is a document_id (u32)
|
||||
/// encoded as native-endian bytes.
|
||||
#[logging_timer::time]
|
||||
pub fn extract_facet_exists_docids<R: io::Read + io::Seek>(
|
||||
docid_fid_facet_number: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
) -> Result<grenad::Reader<File>> {
|
||||
let max_memory = indexer.max_memory_by_thread();
|
||||
|
||||
let mut facet_exists_docids_sorter = create_sorter(
|
||||
merge_cbo_roaring_bitmaps,
|
||||
indexer.chunk_compression_type,
|
||||
indexer.chunk_compression_level,
|
||||
indexer.max_nb_chunks,
|
||||
max_memory,
|
||||
);
|
||||
|
||||
let mut cursor = docid_fid_facet_number.into_cursor()?;
|
||||
while let Some((key_bytes, _)) = cursor.move_on_next()? {
|
||||
let (field_id, document_id) = FieldIdDocIdCodec::bytes_decode(key_bytes).unwrap();
|
||||
|
||||
let key_bytes = FieldIdCodec::bytes_encode(&field_id).unwrap();
|
||||
|
||||
facet_exists_docids_sorter.insert(key_bytes, document_id.to_ne_bytes())?;
|
||||
}
|
||||
|
||||
sorter_into_reader(facet_exists_docids_sorter, indexer)
|
||||
}
|
@ -20,7 +20,7 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
||||
obkv_documents: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
faceted_fields: &HashSet<FieldId>,
|
||||
) -> Result<(grenad::Reader<File>, grenad::Reader<File>)> {
|
||||
) -> Result<(grenad::Reader<File>, grenad::Reader<File>, grenad::Reader<File>)> {
|
||||
let max_memory = indexer.max_memory_by_thread();
|
||||
|
||||
let mut fid_docid_facet_numbers_sorter = create_sorter(
|
||||
@ -28,7 +28,7 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
||||
indexer.chunk_compression_type,
|
||||
indexer.chunk_compression_level,
|
||||
indexer.max_nb_chunks,
|
||||
max_memory.map(|m| m / 2),
|
||||
max_memory.map(|m| m / 3),
|
||||
);
|
||||
|
||||
let mut fid_docid_facet_strings_sorter = create_sorter(
|
||||
@ -36,7 +36,15 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
||||
indexer.chunk_compression_type,
|
||||
indexer.chunk_compression_level,
|
||||
indexer.max_nb_chunks,
|
||||
max_memory.map(|m| m / 2),
|
||||
max_memory.map(|m| m / 3),
|
||||
);
|
||||
|
||||
let mut fid_docid_facet_exists_sorter = create_sorter(
|
||||
keep_first,
|
||||
indexer.chunk_compression_type,
|
||||
indexer.chunk_compression_level,
|
||||
indexer.max_nb_chunks,
|
||||
max_memory.map(|m| m / 3),
|
||||
);
|
||||
|
||||
let mut key_buffer = Vec::new();
|
||||
@ -46,15 +54,19 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
||||
|
||||
for (field_id, field_bytes) in obkv.iter() {
|
||||
if faceted_fields.contains(&field_id) {
|
||||
let value =
|
||||
serde_json::from_slice(field_bytes).map_err(InternalError::SerdeJson)?;
|
||||
let (numbers, strings) = extract_facet_values(&value);
|
||||
|
||||
key_buffer.clear();
|
||||
|
||||
// here, we know already that the document must be added to the “field id exists” database
|
||||
// prefix key with the field_id and the document_id
|
||||
|
||||
key_buffer.extend_from_slice(&field_id.to_be_bytes());
|
||||
key_buffer.extend_from_slice(&docid_bytes);
|
||||
fid_docid_facet_exists_sorter.insert(&key_buffer, ().as_bytes())?;
|
||||
|
||||
let value =
|
||||
serde_json::from_slice(field_bytes).map_err(InternalError::SerdeJson)?;
|
||||
|
||||
let (numbers, strings) = extract_facet_values(&value);
|
||||
|
||||
// insert facet numbers in sorter
|
||||
for number in numbers {
|
||||
@ -79,7 +91,8 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
||||
|
||||
Ok((
|
||||
sorter_into_reader(fid_docid_facet_numbers_sorter, indexer.clone())?,
|
||||
sorter_into_reader(fid_docid_facet_strings_sorter, indexer)?,
|
||||
sorter_into_reader(fid_docid_facet_strings_sorter, indexer.clone())?,
|
||||
sorter_into_reader(fid_docid_facet_exists_sorter, indexer)?,
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
mod extract_docid_word_positions;
|
||||
mod extract_facet_exists_docids;
|
||||
mod extract_facet_number_docids;
|
||||
mod extract_facet_string_docids;
|
||||
mod extract_fid_docid_facet_values;
|
||||
@ -16,6 +17,7 @@ use log::debug;
|
||||
use rayon::prelude::*;
|
||||
|
||||
use self::extract_docid_word_positions::extract_docid_word_positions;
|
||||
use self::extract_facet_exists_docids::extract_facet_exists_docids;
|
||||
use self::extract_facet_number_docids::extract_facet_number_docids;
|
||||
use self::extract_facet_string_docids::extract_facet_string_docids;
|
||||
use self::extract_fid_docid_facet_values::extract_fid_docid_facet_values;
|
||||
@ -53,7 +55,7 @@ pub(crate) fn data_from_obkv_documents(
|
||||
})
|
||||
.collect::<Result<()>>()?;
|
||||
|
||||
let result: Result<(Vec<_>, (Vec<_>, Vec<_>))> = flattened_obkv_chunks
|
||||
let result: Result<(Vec<_>, (Vec<_>, (Vec<_>, Vec<_>)))> = flattened_obkv_chunks
|
||||
.par_bridge()
|
||||
.map(|flattened_obkv_chunks| {
|
||||
send_and_extract_flattened_documents_data(
|
||||
@ -72,7 +74,10 @@ pub(crate) fn data_from_obkv_documents(
|
||||
|
||||
let (
|
||||
docid_word_positions_chunks,
|
||||
(docid_fid_facet_numbers_chunks, docid_fid_facet_strings_chunks),
|
||||
(
|
||||
docid_fid_facet_numbers_chunks,
|
||||
(docid_fid_facet_strings_chunks, docid_fid_facet_exists_chunks),
|
||||
),
|
||||
) = result?;
|
||||
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>(
|
||||
@ -137,6 +142,15 @@ pub(crate) fn data_from_obkv_documents(
|
||||
TypedChunk::FieldIdFacetNumberDocids,
|
||||
"field-id-facet-number-docids",
|
||||
);
|
||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>(
|
||||
docid_fid_facet_exists_chunks.clone(),
|
||||
indexer.clone(),
|
||||
lmdb_writer_sx.clone(),
|
||||
extract_facet_exists_docids,
|
||||
merge_cbo_roaring_bitmaps,
|
||||
TypedChunk::FieldIdFacetExistsDocids,
|
||||
"field-id-facet-exists-docids",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -197,6 +211,7 @@ fn send_original_documents_data(
|
||||
/// - docid_word_positions
|
||||
/// - docid_fid_facet_numbers
|
||||
/// - docid_fid_facet_strings
|
||||
/// - docid_fid_facet_exists
|
||||
fn send_and_extract_flattened_documents_data(
|
||||
flattened_documents_chunk: Result<grenad::Reader<File>>,
|
||||
indexer: GrenadParameters,
|
||||
@ -209,7 +224,10 @@ fn send_and_extract_flattened_documents_data(
|
||||
max_positions_per_attributes: Option<u32>,
|
||||
) -> Result<(
|
||||
grenad::Reader<CursorClonableMmap>,
|
||||
(grenad::Reader<CursorClonableMmap>, grenad::Reader<CursorClonableMmap>),
|
||||
(
|
||||
grenad::Reader<CursorClonableMmap>,
|
||||
(grenad::Reader<CursorClonableMmap>, grenad::Reader<CursorClonableMmap>),
|
||||
),
|
||||
)> {
|
||||
let flattened_documents_chunk =
|
||||
flattened_documents_chunk.and_then(|c| unsafe { as_cloneable_grenad(&c) })?;
|
||||
@ -250,12 +268,15 @@ fn send_and_extract_flattened_documents_data(
|
||||
Ok(docid_word_positions_chunk)
|
||||
},
|
||||
|| {
|
||||
let (docid_fid_facet_numbers_chunk, docid_fid_facet_strings_chunk) =
|
||||
extract_fid_docid_facet_values(
|
||||
flattened_documents_chunk.clone(),
|
||||
indexer.clone(),
|
||||
faceted_fields,
|
||||
)?;
|
||||
let (
|
||||
docid_fid_facet_numbers_chunk,
|
||||
docid_fid_facet_strings_chunk,
|
||||
docid_fid_facet_exists_chunk,
|
||||
) = extract_fid_docid_facet_values(
|
||||
flattened_documents_chunk.clone(),
|
||||
indexer.clone(),
|
||||
faceted_fields,
|
||||
)?;
|
||||
|
||||
// send docid_fid_facet_numbers_chunk to DB writer
|
||||
let docid_fid_facet_numbers_chunk =
|
||||
@ -273,7 +294,13 @@ fn send_and_extract_flattened_documents_data(
|
||||
docid_fid_facet_strings_chunk.clone(),
|
||||
)));
|
||||
|
||||
Ok((docid_fid_facet_numbers_chunk, docid_fid_facet_strings_chunk))
|
||||
let docid_fid_facet_exists_chunk =
|
||||
unsafe { as_cloneable_grenad(&docid_fid_facet_exists_chunk)? };
|
||||
|
||||
Ok((
|
||||
docid_fid_facet_numbers_chunk,
|
||||
(docid_fid_facet_strings_chunk, docid_fid_facet_exists_chunk),
|
||||
))
|
||||
},
|
||||
);
|
||||
|
||||
|
@ -1931,4 +1931,153 @@ mod tests {
|
||||
|
||||
assert_eq!(ids.len(), map.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn index_documents_check_exists_database_reindex() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let content = documents!([
|
||||
{
|
||||
"id": 0,
|
||||
"colour": 0,
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"colour": []
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"colour": {}
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"colour": null
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"colour": [1]
|
||||
},
|
||||
{
|
||||
"id": 5
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"colour": {
|
||||
"green": 1
|
||||
}
|
||||
}
|
||||
]);
|
||||
|
||||
let config = IndexerConfig::default();
|
||||
let indexing_config = IndexDocumentsConfig::default();
|
||||
let mut builder =
|
||||
IndexDocuments::new(&mut wtxn, &index, &config, indexing_config.clone(), |_| ())
|
||||
.unwrap();
|
||||
builder.add_documents(content).unwrap();
|
||||
builder.execute().unwrap();
|
||||
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = update::Settings::new(&mut wtxn, &index, &config);
|
||||
|
||||
let faceted_fields = hashset!(S("colour"));
|
||||
builder.set_filterable_fields(faceted_fields);
|
||||
builder.execute(|_| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let facets = index.faceted_fields(&rtxn).unwrap();
|
||||
assert_eq!(facets, hashset!(S("colour"), S("colour.green")));
|
||||
|
||||
let colour_id = index.fields_ids_map(&rtxn).unwrap().id("colour").unwrap();
|
||||
let colour_green_id = index.fields_ids_map(&rtxn).unwrap().id("colour.green").unwrap();
|
||||
|
||||
let bitmap_colour = index.facet_id_exists_docids.get(&rtxn, &colour_id).unwrap().unwrap();
|
||||
assert_eq!(bitmap_colour.into_iter().collect::<Vec<_>>(), vec![0, 1, 2, 3, 4, 6]);
|
||||
|
||||
let bitmap_colour_green =
|
||||
index.facet_id_exists_docids.get(&rtxn, &colour_green_id).unwrap().unwrap();
|
||||
assert_eq!(bitmap_colour_green.into_iter().collect::<Vec<_>>(), vec![6]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn index_documents_check_exists_database() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
let config = IndexerConfig::default();
|
||||
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = update::Settings::new(&mut wtxn, &index, &config);
|
||||
|
||||
let faceted_fields = hashset!(S("colour"));
|
||||
builder.set_filterable_fields(faceted_fields);
|
||||
builder.execute(|_| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
let content = documents!([
|
||||
{
|
||||
"id": 0,
|
||||
"colour": 0,
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"colour": []
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"colour": {}
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"colour": null
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"colour": [1]
|
||||
},
|
||||
{
|
||||
"id": 5
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"colour": {
|
||||
"green": 1
|
||||
}
|
||||
}
|
||||
]);
|
||||
|
||||
let indexing_config = IndexDocumentsConfig::default();
|
||||
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
|
||||
let mut builder =
|
||||
IndexDocuments::new(&mut wtxn, &index, &config, indexing_config.clone(), |_| ())
|
||||
.unwrap();
|
||||
builder.add_documents(content).unwrap();
|
||||
builder.execute().unwrap();
|
||||
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let facets = index.faceted_fields(&rtxn).unwrap();
|
||||
assert_eq!(facets, hashset!(S("colour"), S("colour.green")));
|
||||
|
||||
let colour_id = index.fields_ids_map(&rtxn).unwrap().id("colour").unwrap();
|
||||
let colour_green_id = index.fields_ids_map(&rtxn).unwrap().id("colour.green").unwrap();
|
||||
|
||||
let bitmap_colour = index.facet_id_exists_docids.get(&rtxn, &colour_id).unwrap().unwrap();
|
||||
assert_eq!(bitmap_colour.into_iter().collect::<Vec<_>>(), vec![0, 1, 2, 3, 4, 6]);
|
||||
|
||||
let bitmap_colour_green =
|
||||
index.facet_id_exists_docids.get(&rtxn, &colour_green_id).unwrap().unwrap();
|
||||
assert_eq!(bitmap_colour_green.into_iter().collect::<Vec<_>>(), vec![6]);
|
||||
}
|
||||
}
|
||||
|
@ -35,6 +35,7 @@ pub(crate) enum TypedChunk {
|
||||
WordPairProximityDocids(grenad::Reader<File>),
|
||||
FieldIdFacetStringDocids(grenad::Reader<File>),
|
||||
FieldIdFacetNumberDocids(grenad::Reader<File>),
|
||||
FieldIdFacetExistsDocids(grenad::Reader<File>),
|
||||
GeoPoints(grenad::Reader<File>),
|
||||
}
|
||||
|
||||
@ -146,6 +147,18 @@ pub(crate) fn write_typed_chunk_into_index(
|
||||
)?;
|
||||
is_merged_database = true;
|
||||
}
|
||||
TypedChunk::FieldIdFacetExistsDocids(facet_id_exists_docids_iter) => {
|
||||
append_entries_into_database(
|
||||
facet_id_exists_docids_iter,
|
||||
&index.facet_id_exists_docids,
|
||||
wtxn,
|
||||
index_is_empty,
|
||||
|value, _buffer| Ok(value),
|
||||
merge_cbo_roaring_bitmaps,
|
||||
)
|
||||
.unwrap();
|
||||
is_merged_database = true;
|
||||
}
|
||||
TypedChunk::WordPairProximityDocids(word_pair_proximity_docids_iter) => {
|
||||
append_entries_into_database(
|
||||
word_pair_proximity_docids_iter,
|
||||
|
Reference in New Issue
Block a user