mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-09-14 00:36:25 +00:00
Fix Pr comments
This commit is contained in:
@ -58,11 +58,12 @@ pub fn extract_fid_docid_facet_values<R: io::Read>(
|
||||
// insert facet numbers in sorter
|
||||
for number in numbers {
|
||||
key_buffer.truncate(size_of::<FieldId>() + size_of::<DocumentId>());
|
||||
let value_bytes = f64_into_bytes(number).unwrap(); // invalid float
|
||||
key_buffer.extend_from_slice(&value_bytes);
|
||||
key_buffer.extend_from_slice(&number.to_be_bytes());
|
||||
if let Some(value_bytes) = f64_into_bytes(number) {
|
||||
key_buffer.extend_from_slice(&value_bytes);
|
||||
key_buffer.extend_from_slice(&number.to_be_bytes());
|
||||
|
||||
fid_docid_facet_numbers_sorter.insert(&key_buffer, ().as_bytes())?;
|
||||
fid_docid_facet_numbers_sorter.insert(&key_buffer, ().as_bytes())?;
|
||||
}
|
||||
}
|
||||
|
||||
// insert normalized and original facet string in sorter
|
||||
|
@ -8,6 +8,8 @@ use super::helpers::{
|
||||
create_sorter, merge_cbo_roaring_bitmaps, read_u32_ne_bytes, sorter_into_reader,
|
||||
try_split_array_at, GrenadParameters, MergeFn,
|
||||
};
|
||||
use crate::error::SerializationError;
|
||||
use crate::index::db_name::DOCID_WORD_POSITIONS;
|
||||
use crate::proximity::extract_position;
|
||||
use crate::{DocumentId, FieldId, Result};
|
||||
|
||||
@ -36,7 +38,8 @@ pub fn extract_fid_word_count_docids<R: io::Read>(
|
||||
let mut current_document_id = None;
|
||||
|
||||
while let Some((key, value)) = docid_word_positions.next()? {
|
||||
let (document_id_bytes, _word_bytes) = try_split_array_at(key).unwrap();
|
||||
let (document_id_bytes, _word_bytes) = try_split_array_at(key)
|
||||
.ok_or_else(|| SerializationError::Decoding { db_name: Some(DOCID_WORD_POSITIONS) })?;
|
||||
let document_id = u32::from_be_bytes(document_id_bytes);
|
||||
|
||||
let curr_document_id = *current_document_id.get_or_insert(document_id);
|
||||
|
@ -8,6 +8,8 @@ use super::helpers::{
|
||||
create_sorter, merge_roaring_bitmaps, serialize_roaring_bitmap, sorter_into_reader,
|
||||
try_split_array_at, GrenadParameters,
|
||||
};
|
||||
use crate::error::SerializationError;
|
||||
use crate::index::db_name::DOCID_WORD_POSITIONS;
|
||||
use crate::Result;
|
||||
|
||||
/// Extracts the word and the documents ids where this word appear.
|
||||
@ -31,7 +33,8 @@ pub fn extract_word_docids<R: io::Read>(
|
||||
|
||||
let mut value_buffer = Vec::new();
|
||||
while let Some((key, _value)) = docid_word_positions.next()? {
|
||||
let (document_id_bytes, word_bytes) = try_split_array_at(key).unwrap();
|
||||
let (document_id_bytes, word_bytes) = try_split_array_at(key)
|
||||
.ok_or_else(|| SerializationError::Decoding { db_name: Some(DOCID_WORD_POSITIONS) })?;
|
||||
let document_id = u32::from_be_bytes(document_id_bytes);
|
||||
|
||||
let bitmap = RoaringBitmap::from_iter(Some(document_id));
|
||||
|
@ -5,7 +5,10 @@ use super::helpers::{
|
||||
create_sorter, merge_cbo_roaring_bitmaps, read_u32_ne_bytes, sorter_into_reader,
|
||||
try_split_array_at, GrenadParameters,
|
||||
};
|
||||
use crate::error::SerializationError;
|
||||
use crate::index::db_name::DOCID_WORD_POSITIONS;
|
||||
use crate::{DocumentId, Result};
|
||||
|
||||
/// Extracts the word positions and the documents ids where this word appear.
|
||||
///
|
||||
/// Returns a grenad reader with the list of extracted words at positions and
|
||||
@ -27,7 +30,8 @@ pub fn extract_word_level_position_docids<R: io::Read>(
|
||||
|
||||
let mut key_buffer = Vec::new();
|
||||
while let Some((key, value)) = docid_word_positions.next()? {
|
||||
let (document_id_bytes, word_bytes) = try_split_array_at(key).unwrap();
|
||||
let (document_id_bytes, word_bytes) = try_split_array_at(key)
|
||||
.ok_or_else(|| SerializationError::Decoding { db_name: Some(DOCID_WORD_POSITIONS) })?;
|
||||
let document_id = DocumentId::from_be_bytes(document_id_bytes);
|
||||
|
||||
for position in read_u32_ne_bytes(value) {
|
||||
|
@ -1,15 +1,14 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::{BinaryHeap, HashMap};
|
||||
use std::fs::File;
|
||||
use std::time::{Duration, Instant};
|
||||
use std::{cmp, io, mem, str, vec};
|
||||
|
||||
use log::debug;
|
||||
|
||||
use super::helpers::{
|
||||
create_sorter, merge_cbo_roaring_bitmaps, read_u32_ne_bytes, sorter_into_reader,
|
||||
try_split_array_at, GrenadParameters, MergeFn,
|
||||
};
|
||||
use crate::error::SerializationError;
|
||||
use crate::index::db_name::DOCID_WORD_POSITIONS;
|
||||
use crate::proximity::{positions_proximity, MAX_DISTANCE};
|
||||
use crate::{DocumentId, Result};
|
||||
|
||||
@ -32,16 +31,13 @@ pub fn extract_word_pair_proximity_docids<R: io::Read>(
|
||||
max_memory.map(|m| m / 2),
|
||||
);
|
||||
|
||||
let mut number_of_documents = 0;
|
||||
let mut total_time_aggregation = Duration::default();
|
||||
let mut total_time_grenad_insert = Duration::default();
|
||||
|
||||
// This map is assumed to not consume a lot of memory.
|
||||
let mut document_word_positions_heap = BinaryHeap::new();
|
||||
let mut current_document_id = None;
|
||||
|
||||
while let Some((key, value)) = docid_word_positions.next()? {
|
||||
let (document_id_bytes, word_bytes) = try_split_array_at(key).unwrap();
|
||||
let (document_id_bytes, word_bytes) = try_split_array_at(key)
|
||||
.ok_or_else(|| SerializationError::Decoding { db_name: Some(DOCID_WORD_POSITIONS) })?;
|
||||
let document_id = u32::from_be_bytes(document_id_bytes);
|
||||
let word = str::from_utf8(word_bytes)?;
|
||||
|
||||
@ -52,10 +48,7 @@ pub fn extract_word_pair_proximity_docids<R: io::Read>(
|
||||
curr_document_id,
|
||||
document_word_positions_heap,
|
||||
&mut word_pair_proximity_docids_sorter,
|
||||
&mut total_time_aggregation,
|
||||
&mut total_time_grenad_insert,
|
||||
)?;
|
||||
number_of_documents += 1;
|
||||
current_document_id = Some(document_id);
|
||||
}
|
||||
|
||||
@ -74,18 +67,9 @@ pub fn extract_word_pair_proximity_docids<R: io::Read>(
|
||||
document_id,
|
||||
document_word_positions_heap,
|
||||
&mut word_pair_proximity_docids_sorter,
|
||||
&mut total_time_aggregation,
|
||||
&mut total_time_grenad_insert,
|
||||
)?;
|
||||
}
|
||||
|
||||
debug!(
|
||||
"Number of documents {}
|
||||
- we took {:02?} to aggregate proximities
|
||||
- we took {:02?} to grenad insert those proximities",
|
||||
number_of_documents, total_time_aggregation, total_time_grenad_insert,
|
||||
);
|
||||
|
||||
sorter_into_reader(word_pair_proximity_docids_sorter, indexer)
|
||||
}
|
||||
|
||||
@ -97,10 +81,7 @@ fn document_word_positions_into_sorter<'b>(
|
||||
document_id: DocumentId,
|
||||
mut word_positions_heap: BinaryHeap<PeekedWordPosition<vec::IntoIter<u32>>>,
|
||||
word_pair_proximity_docids_sorter: &mut grenad::Sorter<MergeFn>,
|
||||
total_time_aggregation: &mut Duration,
|
||||
total_time_grenad_insert: &mut Duration,
|
||||
) -> Result<()> {
|
||||
let before_aggregating = Instant::now();
|
||||
let mut word_pair_proximity = HashMap::new();
|
||||
let mut ordered_peeked_word_positions = Vec::new();
|
||||
while !word_positions_heap.is_empty() {
|
||||
@ -152,8 +133,6 @@ fn document_word_positions_into_sorter<'b>(
|
||||
}
|
||||
}
|
||||
|
||||
*total_time_aggregation += before_aggregating.elapsed();
|
||||
|
||||
let mut key_buffer = Vec::new();
|
||||
for ((w1, w2), prox) in word_pair_proximity {
|
||||
key_buffer.clear();
|
||||
@ -162,9 +141,7 @@ fn document_word_positions_into_sorter<'b>(
|
||||
key_buffer.extend_from_slice(w2.as_bytes());
|
||||
key_buffer.push(prox as u8);
|
||||
|
||||
let before_grenad_insert = Instant::now();
|
||||
word_pair_proximity_docids_sorter.insert(&key_buffer, &document_id.to_ne_bytes())?;
|
||||
*total_time_grenad_insert += before_grenad_insert.elapsed();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -225,5 +225,6 @@ fn extract_documents_data(
|
||||
Ok((docid_fid_facet_numbers_chunk, docid_fid_facet_strings_chunk))
|
||||
},
|
||||
);
|
||||
|
||||
Ok((docid_word_positions_chunk?, docid_fid_facet_values_chunks?))
|
||||
}
|
||||
|
Reference in New Issue
Block a user