mirror of
				https://github.com/meilisearch/meilisearch.git
				synced 2025-11-04 09:56:28 +00:00 
			
		
		
		
	Intern more values
This commit is contained in:
		@@ -5,9 +5,8 @@ use fxhash::FxHashMap;
 | 
			
		||||
use heed::types::ByteSlice;
 | 
			
		||||
use heed::{BytesEncode, Database, RoTxn};
 | 
			
		||||
 | 
			
		||||
use super::interner::Interned;
 | 
			
		||||
use super::SearchContext;
 | 
			
		||||
use crate::Result;
 | 
			
		||||
use super::interner::{Interned, Interner};
 | 
			
		||||
use crate::{Index, Result};
 | 
			
		||||
 | 
			
		||||
/// A cache storing pointers to values in the LMDB databases.
 | 
			
		||||
///
 | 
			
		||||
@@ -26,7 +25,7 @@ pub struct DatabaseCache<'search> {
 | 
			
		||||
    pub exact_word_docids: FxHashMap<Interned<String>, Option<&'search [u8]>>,
 | 
			
		||||
    pub word_prefix_docids: FxHashMap<Interned<String>, Option<&'search [u8]>>,
 | 
			
		||||
}
 | 
			
		||||
impl<'search> SearchContext<'search> {
 | 
			
		||||
impl<'search> DatabaseCache<'search> {
 | 
			
		||||
    fn get_value<'v, K1, KC>(
 | 
			
		||||
        txn: &'search RoTxn,
 | 
			
		||||
        cache_key: K1,
 | 
			
		||||
@@ -50,82 +49,92 @@ impl<'search> SearchContext<'search> {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// Retrieve or insert the given value in the `word_docids` database.
 | 
			
		||||
    pub fn get_word_docids(&mut self, word: Interned<String>) -> Result<Option<&'search [u8]>> {
 | 
			
		||||
    pub fn get_word_docids(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        index: &Index,
 | 
			
		||||
        txn: &'search RoTxn,
 | 
			
		||||
        word_interner: &Interner<String>,
 | 
			
		||||
        word: Interned<String>,
 | 
			
		||||
    ) -> Result<Option<&'search [u8]>> {
 | 
			
		||||
        Self::get_value(
 | 
			
		||||
            self.txn,
 | 
			
		||||
            txn,
 | 
			
		||||
            word,
 | 
			
		||||
            self.word_interner.get(word).as_str(),
 | 
			
		||||
            &mut self.db_cache.word_docids,
 | 
			
		||||
            self.index.word_docids.remap_data_type::<ByteSlice>(),
 | 
			
		||||
            word_interner.get(word).as_str(),
 | 
			
		||||
            &mut self.word_docids,
 | 
			
		||||
            index.word_docids.remap_data_type::<ByteSlice>(),
 | 
			
		||||
        )
 | 
			
		||||
    }
 | 
			
		||||
    /// Retrieve or insert the given value in the `word_prefix_docids` database.
 | 
			
		||||
    pub fn get_word_prefix_docids(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        index: &Index,
 | 
			
		||||
        txn: &'search RoTxn,
 | 
			
		||||
        word_interner: &Interner<String>,
 | 
			
		||||
        prefix: Interned<String>,
 | 
			
		||||
    ) -> Result<Option<&'search [u8]>> {
 | 
			
		||||
        Self::get_value(
 | 
			
		||||
            self.txn,
 | 
			
		||||
            txn,
 | 
			
		||||
            prefix,
 | 
			
		||||
            self.word_interner.get(prefix).as_str(),
 | 
			
		||||
            &mut self.db_cache.word_prefix_docids,
 | 
			
		||||
            self.index.word_prefix_docids.remap_data_type::<ByteSlice>(),
 | 
			
		||||
            word_interner.get(prefix).as_str(),
 | 
			
		||||
            &mut self.word_prefix_docids,
 | 
			
		||||
            index.word_prefix_docids.remap_data_type::<ByteSlice>(),
 | 
			
		||||
        )
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    pub fn get_word_pair_proximity_docids(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        index: &Index,
 | 
			
		||||
        txn: &'search RoTxn,
 | 
			
		||||
        word_interner: &Interner<String>,
 | 
			
		||||
        word1: Interned<String>,
 | 
			
		||||
        word2: Interned<String>,
 | 
			
		||||
        proximity: u8,
 | 
			
		||||
    ) -> Result<Option<&'search [u8]>> {
 | 
			
		||||
        Self::get_value(
 | 
			
		||||
            self.txn,
 | 
			
		||||
            txn,
 | 
			
		||||
            (proximity, word1, word2),
 | 
			
		||||
            &(
 | 
			
		||||
                proximity,
 | 
			
		||||
                self.word_interner.get(word1).as_str(),
 | 
			
		||||
                self.word_interner.get(word2).as_str(),
 | 
			
		||||
            ),
 | 
			
		||||
            &mut self.db_cache.word_pair_proximity_docids,
 | 
			
		||||
            self.index.word_pair_proximity_docids.remap_data_type::<ByteSlice>(),
 | 
			
		||||
            &(proximity, word_interner.get(word1).as_str(), word_interner.get(word2).as_str()),
 | 
			
		||||
            &mut self.word_pair_proximity_docids,
 | 
			
		||||
            index.word_pair_proximity_docids.remap_data_type::<ByteSlice>(),
 | 
			
		||||
        )
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    pub fn get_word_prefix_pair_proximity_docids(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        index: &Index,
 | 
			
		||||
        txn: &'search RoTxn,
 | 
			
		||||
        word_interner: &Interner<String>,
 | 
			
		||||
        word1: Interned<String>,
 | 
			
		||||
        prefix2: Interned<String>,
 | 
			
		||||
        proximity: u8,
 | 
			
		||||
    ) -> Result<Option<&'search [u8]>> {
 | 
			
		||||
        Self::get_value(
 | 
			
		||||
            self.txn,
 | 
			
		||||
            txn,
 | 
			
		||||
            (proximity, word1, prefix2),
 | 
			
		||||
            &(
 | 
			
		||||
                proximity,
 | 
			
		||||
                self.word_interner.get(word1).as_str(),
 | 
			
		||||
                self.word_interner.get(prefix2).as_str(),
 | 
			
		||||
            ),
 | 
			
		||||
            &mut self.db_cache.word_prefix_pair_proximity_docids,
 | 
			
		||||
            self.index.word_prefix_pair_proximity_docids.remap_data_type::<ByteSlice>(),
 | 
			
		||||
            &(proximity, word_interner.get(word1).as_str(), word_interner.get(prefix2).as_str()),
 | 
			
		||||
            &mut self.word_prefix_pair_proximity_docids,
 | 
			
		||||
            index.word_prefix_pair_proximity_docids.remap_data_type::<ByteSlice>(),
 | 
			
		||||
        )
 | 
			
		||||
    }
 | 
			
		||||
    pub fn get_prefix_word_pair_proximity_docids(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        index: &Index,
 | 
			
		||||
        txn: &'search RoTxn,
 | 
			
		||||
        word_interner: &Interner<String>,
 | 
			
		||||
        left_prefix: Interned<String>,
 | 
			
		||||
        right: Interned<String>,
 | 
			
		||||
        proximity: u8,
 | 
			
		||||
    ) -> Result<Option<&'search [u8]>> {
 | 
			
		||||
        Self::get_value(
 | 
			
		||||
            self.txn,
 | 
			
		||||
            txn,
 | 
			
		||||
            (proximity, left_prefix, right),
 | 
			
		||||
            &(
 | 
			
		||||
                proximity,
 | 
			
		||||
                self.word_interner.get(left_prefix).as_str(),
 | 
			
		||||
                self.word_interner.get(right).as_str(),
 | 
			
		||||
                word_interner.get(left_prefix).as_str(),
 | 
			
		||||
                word_interner.get(right).as_str(),
 | 
			
		||||
            ),
 | 
			
		||||
            &mut self.db_cache.prefix_word_pair_proximity_docids,
 | 
			
		||||
            self.index.prefix_word_pair_proximity_docids.remap_data_type::<ByteSlice>(),
 | 
			
		||||
            &mut self.prefix_word_pair_proximity_docids,
 | 
			
		||||
            index.prefix_word_pair_proximity_docids.remap_data_type::<ByteSlice>(),
 | 
			
		||||
        )
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -432,7 +432,10 @@ results.{random} {{
 | 
			
		||||
        file: &mut File,
 | 
			
		||||
    ) {
 | 
			
		||||
        match &node {
 | 
			
		||||
            QueryNode::Term(LocatedQueryTerm { value, .. }) => match value {
 | 
			
		||||
            QueryNode::Term(LocatedQueryTerm { value, .. }) => match ctx
 | 
			
		||||
                .query_term_interner
 | 
			
		||||
                .get(*value)
 | 
			
		||||
            {
 | 
			
		||||
                QueryTerm::Phrase { phrase } => {
 | 
			
		||||
                    let phrase = ctx.phrase_interner.get(*phrase);
 | 
			
		||||
                    let phrase_str = phrase.description(&ctx.word_interner);
 | 
			
		||||
@@ -593,7 +596,7 @@ shape: class"
 | 
			
		||||
            graph.edges_store[edge_idx as usize].as_ref().unwrap();
 | 
			
		||||
        let source_node = &graph.query_graph.nodes[*source_node as usize];
 | 
			
		||||
        let source_node_desc = match source_node {
 | 
			
		||||
            QueryNode::Term(term) => match &term.value {
 | 
			
		||||
            QueryNode::Term(term) => match ctx.query_term_interner.get(term.value) {
 | 
			
		||||
                QueryTerm::Phrase { phrase } => {
 | 
			
		||||
                    let phrase = ctx.phrase_interner.get(*phrase);
 | 
			
		||||
                    phrase.description(&ctx.word_interner)
 | 
			
		||||
@@ -608,7 +611,7 @@ shape: class"
 | 
			
		||||
        };
 | 
			
		||||
        let dest_node = &graph.query_graph.nodes[*dest_node as usize];
 | 
			
		||||
        let dest_node_desc = match dest_node {
 | 
			
		||||
            QueryNode::Term(term) => match &term.value {
 | 
			
		||||
            QueryNode::Term(term) => match ctx.query_term_interner.get(term.value) {
 | 
			
		||||
                QueryTerm::Phrase { phrase } => {
 | 
			
		||||
                    let phrase = ctx.phrase_interner.get(*phrase);
 | 
			
		||||
                    phrase.description(&ctx.word_interner)
 | 
			
		||||
 
 | 
			
		||||
@@ -5,10 +5,14 @@ mod logger;
 | 
			
		||||
mod query_graph;
 | 
			
		||||
mod query_term;
 | 
			
		||||
mod ranking_rule_graph;
 | 
			
		||||
 | 
			
		||||
mod ranking_rules;
 | 
			
		||||
mod resolve_query_graph;
 | 
			
		||||
// TODO: documentation + comments
 | 
			
		||||
mod small_bitmap;
 | 
			
		||||
// TODO: documentation + comments
 | 
			
		||||
mod sort;
 | 
			
		||||
// TODO: documentation + comments
 | 
			
		||||
mod words;
 | 
			
		||||
 | 
			
		||||
pub use logger::{DefaultSearchLogger, SearchLogger};
 | 
			
		||||
@@ -19,16 +23,16 @@ use charabia::Tokenize;
 | 
			
		||||
use db_cache::DatabaseCache;
 | 
			
		||||
use heed::RoTxn;
 | 
			
		||||
use query_graph::{QueryGraph, QueryNode};
 | 
			
		||||
pub use ranking_rules::{
 | 
			
		||||
    apply_ranking_rules, RankingRule, RankingRuleOutput, RankingRuleOutputIter,
 | 
			
		||||
    RankingRuleOutputIterWrapper, RankingRuleQueryTrait,
 | 
			
		||||
};
 | 
			
		||||
pub use ranking_rules::{bucket_sort, RankingRule, RankingRuleOutput, RankingRuleQueryTrait};
 | 
			
		||||
use roaring::RoaringBitmap;
 | 
			
		||||
 | 
			
		||||
use self::interner::Interner;
 | 
			
		||||
use self::query_term::Phrase;
 | 
			
		||||
use self::resolve_query_graph::{resolve_query_graph, NodeDocIdsCache};
 | 
			
		||||
use self::query_term::{Phrase, WordDerivations};
 | 
			
		||||
use self::resolve_query_graph::{resolve_query_graph, QueryTermDocIdsCache};
 | 
			
		||||
use crate::search::new::graph_based_ranking_rule::GraphBasedRankingRule;
 | 
			
		||||
use crate::search::new::query_term::located_query_terms_from_string;
 | 
			
		||||
use crate::search::new::ranking_rule_graph::{ProximityGraph, TypoGraph};
 | 
			
		||||
use crate::search::new::words::Words;
 | 
			
		||||
use crate::{Filter, Index, Result, TermsMatchingStrategy};
 | 
			
		||||
 | 
			
		||||
pub enum BitmapOrAllRef<'s> {
 | 
			
		||||
@@ -42,7 +46,8 @@ pub struct SearchContext<'search> {
 | 
			
		||||
    pub db_cache: DatabaseCache<'search>,
 | 
			
		||||
    pub word_interner: Interner<String>,
 | 
			
		||||
    pub phrase_interner: Interner<Phrase>,
 | 
			
		||||
    pub node_docids_cache: NodeDocIdsCache,
 | 
			
		||||
    pub derivations_interner: Interner<WordDerivations>,
 | 
			
		||||
    pub query_term_docids: QueryTermDocIdsCache,
 | 
			
		||||
}
 | 
			
		||||
impl<'search> SearchContext<'search> {
 | 
			
		||||
    pub fn new(index: &'search Index, txn: &'search RoTxn<'search>) -> Self {
 | 
			
		||||
@@ -52,7 +57,8 @@ impl<'search> SearchContext<'search> {
 | 
			
		||||
            db_cache: <_>::default(),
 | 
			
		||||
            word_interner: <_>::default(),
 | 
			
		||||
            phrase_interner: <_>::default(),
 | 
			
		||||
            node_docids_cache: <_>::default(),
 | 
			
		||||
            derivations_interner: <_>::default(),
 | 
			
		||||
            query_term_docids: <_>::default(),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -129,5 +135,368 @@ pub fn execute_search<'search>(
 | 
			
		||||
 | 
			
		||||
    logger.initial_universe(&universe);
 | 
			
		||||
 | 
			
		||||
    apply_ranking_rules(ctx, &graph, &universe, from, length, logger)
 | 
			
		||||
    let words = &mut Words::new(TermsMatchingStrategy::Last);
 | 
			
		||||
    // let sort = &mut Sort::new(index, txn, "release_date".to_owned(), true)?;
 | 
			
		||||
    let proximity = &mut GraphBasedRankingRule::<ProximityGraph>::new("proximity".to_owned());
 | 
			
		||||
    let typo = &mut GraphBasedRankingRule::<TypoGraph>::new("typo".to_owned());
 | 
			
		||||
    // TODO: ranking rules given as argument
 | 
			
		||||
    let ranking_rules: Vec<&mut dyn RankingRule<'search, QueryGraph>> =
 | 
			
		||||
        vec![words, typo, proximity /*sort*/];
 | 
			
		||||
 | 
			
		||||
    bucket_sort(ctx, ranking_rules, &graph, &universe, from, length, logger)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod tests {
 | 
			
		||||
    // use crate::allocator::ALLOC;
 | 
			
		||||
    use std::fs::File;
 | 
			
		||||
    use std::io::{BufRead, BufReader, Cursor, Seek};
 | 
			
		||||
    use std::time::Instant;
 | 
			
		||||
 | 
			
		||||
    use big_s::S;
 | 
			
		||||
    use heed::EnvOpenOptions;
 | 
			
		||||
    use maplit::hashset;
 | 
			
		||||
 | 
			
		||||
    use crate::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
 | 
			
		||||
    // use crate::search::new::logger::detailed::DetailedSearchLogger;
 | 
			
		||||
    use crate::search::new::logger::DefaultSearchLogger;
 | 
			
		||||
    use crate::search::new::{execute_search, SearchContext};
 | 
			
		||||
    use crate::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings};
 | 
			
		||||
    use crate::{Criterion, Index, Object, Search, TermsMatchingStrategy};
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn search_wiki_new() {
 | 
			
		||||
        let mut options = EnvOpenOptions::new();
 | 
			
		||||
        options.map_size(100 * 1024 * 1024 * 1024); // 100 GB
 | 
			
		||||
 | 
			
		||||
        let index = Index::new(options, "data_wiki").unwrap();
 | 
			
		||||
        let txn = index.read_txn().unwrap();
 | 
			
		||||
 | 
			
		||||
        println!("nbr docids: {}", index.documents_ids(&txn).unwrap().len());
 | 
			
		||||
 | 
			
		||||
        // loop {
 | 
			
		||||
        let start = Instant::now();
 | 
			
		||||
 | 
			
		||||
        let mut logger = crate::search::new::logger::detailed::DetailedSearchLogger::new("log");
 | 
			
		||||
        let mut ctx = SearchContext::new(&index, &txn);
 | 
			
		||||
        let results = execute_search(
 | 
			
		||||
            &mut ctx,
 | 
			
		||||
            "zero config",
 | 
			
		||||
            None,
 | 
			
		||||
            0,
 | 
			
		||||
            20,
 | 
			
		||||
            // &mut DefaultSearchLogger,
 | 
			
		||||
            &mut logger,
 | 
			
		||||
        )
 | 
			
		||||
        .unwrap();
 | 
			
		||||
 | 
			
		||||
        logger.write_d2_description(&mut ctx);
 | 
			
		||||
 | 
			
		||||
        let elapsed = start.elapsed();
 | 
			
		||||
        println!("{}us", elapsed.as_micros());
 | 
			
		||||
 | 
			
		||||
        let _documents = index
 | 
			
		||||
            .documents(&txn, results.iter().copied())
 | 
			
		||||
            .unwrap()
 | 
			
		||||
            .into_iter()
 | 
			
		||||
            .map(|(id, obkv)| {
 | 
			
		||||
                let mut object = serde_json::Map::default();
 | 
			
		||||
                for (fid, fid_name) in index.fields_ids_map(&txn).unwrap().iter() {
 | 
			
		||||
                    let value = obkv.get(fid).unwrap();
 | 
			
		||||
                    let value: serde_json::Value = serde_json::from_slice(value).unwrap();
 | 
			
		||||
                    object.insert(fid_name.to_owned(), value);
 | 
			
		||||
                }
 | 
			
		||||
                (id, serde_json::to_string_pretty(&object).unwrap())
 | 
			
		||||
            })
 | 
			
		||||
            .collect::<Vec<_>>();
 | 
			
		||||
 | 
			
		||||
        println!("{}us: {:?}", elapsed.as_micros(), results);
 | 
			
		||||
        // }
 | 
			
		||||
        // for (id, _document) in documents {
 | 
			
		||||
        //     println!("{id}:");
 | 
			
		||||
        //     // println!("{document}");
 | 
			
		||||
        // }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn search_wiki_old() {
 | 
			
		||||
        let mut options = EnvOpenOptions::new();
 | 
			
		||||
        options.map_size(100 * 1024 * 1024 * 1024); // 100 GB
 | 
			
		||||
 | 
			
		||||
        let index = Index::new(options, "data_wiki").unwrap();
 | 
			
		||||
 | 
			
		||||
        let txn = index.read_txn().unwrap();
 | 
			
		||||
 | 
			
		||||
        let rr = index.criteria(&txn).unwrap();
 | 
			
		||||
        println!("{rr:?}");
 | 
			
		||||
 | 
			
		||||
        let start = Instant::now();
 | 
			
		||||
 | 
			
		||||
        let mut s = Search::new(&txn, &index);
 | 
			
		||||
        s.query("which a the releases from poison by the government");
 | 
			
		||||
        s.terms_matching_strategy(TermsMatchingStrategy::Last);
 | 
			
		||||
        s.criterion_implementation_strategy(crate::CriterionImplementationStrategy::OnlySetBased);
 | 
			
		||||
        let docs = s.execute().unwrap();
 | 
			
		||||
 | 
			
		||||
        let elapsed = start.elapsed();
 | 
			
		||||
 | 
			
		||||
        let documents = index
 | 
			
		||||
            .documents(&txn, docs.documents_ids.iter().copied())
 | 
			
		||||
            .unwrap()
 | 
			
		||||
            .into_iter()
 | 
			
		||||
            .map(|(id, obkv)| {
 | 
			
		||||
                let mut object = serde_json::Map::default();
 | 
			
		||||
                for (fid, fid_name) in index.fields_ids_map(&txn).unwrap().iter() {
 | 
			
		||||
                    let value = obkv.get(fid).unwrap();
 | 
			
		||||
                    let value: serde_json::Value = serde_json::from_slice(value).unwrap();
 | 
			
		||||
                    object.insert(fid_name.to_owned(), value);
 | 
			
		||||
                }
 | 
			
		||||
                (id, serde_json::to_string_pretty(&object).unwrap())
 | 
			
		||||
            })
 | 
			
		||||
            .collect::<Vec<_>>();
 | 
			
		||||
 | 
			
		||||
        println!("{}us: {:?}", elapsed.as_micros(), docs.documents_ids);
 | 
			
		||||
        for (id, _document) in documents {
 | 
			
		||||
            println!("{id}:");
 | 
			
		||||
            // println!("{document}");
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn search_movies_new() {
 | 
			
		||||
        let mut options = EnvOpenOptions::new();
 | 
			
		||||
        options.map_size(100 * 1024 * 1024 * 1024); // 100 GB
 | 
			
		||||
 | 
			
		||||
        let index = Index::new(options, "data_movies").unwrap();
 | 
			
		||||
        let txn = index.read_txn().unwrap();
 | 
			
		||||
 | 
			
		||||
        // let primary_key = index.primary_key(&txn).unwrap().unwrap();
 | 
			
		||||
        // let primary_key = index.fields_ids_map(&txn).unwrap().id(primary_key).unwrap();
 | 
			
		||||
        // loop {
 | 
			
		||||
        let start = Instant::now();
 | 
			
		||||
 | 
			
		||||
        let mut logger = crate::search::new::logger::detailed::DetailedSearchLogger::new("log");
 | 
			
		||||
        let mut ctx = SearchContext::new(&index, &txn);
 | 
			
		||||
        let results = execute_search(
 | 
			
		||||
            &mut ctx,
 | 
			
		||||
            "releases from poison by the government",
 | 
			
		||||
            None,
 | 
			
		||||
            0,
 | 
			
		||||
            20,
 | 
			
		||||
            // &mut DefaultSearchLogger,
 | 
			
		||||
            &mut logger,
 | 
			
		||||
        )
 | 
			
		||||
        .unwrap();
 | 
			
		||||
 | 
			
		||||
        logger.write_d2_description(&mut ctx);
 | 
			
		||||
 | 
			
		||||
        let elapsed = start.elapsed();
 | 
			
		||||
 | 
			
		||||
        // let ids = index
 | 
			
		||||
        //     .documents(&txn, results.iter().copied())
 | 
			
		||||
        //     .unwrap()
 | 
			
		||||
        //     .into_iter()
 | 
			
		||||
        //     .map(|x| {
 | 
			
		||||
        //         let obkv = &x.1;
 | 
			
		||||
        //         let id = obkv.get(primary_key).unwrap();
 | 
			
		||||
        //         let id: serde_json::Value = serde_json::from_slice(id).unwrap();
 | 
			
		||||
        //         id.as_str().unwrap().to_owned()
 | 
			
		||||
        //     })
 | 
			
		||||
        //     .collect::<Vec<_>>();
 | 
			
		||||
 | 
			
		||||
        println!("{}us: {results:?}", elapsed.as_micros());
 | 
			
		||||
        // println!("external ids: {ids:?}");
 | 
			
		||||
        // }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn search_movies_old() {
 | 
			
		||||
        let mut options = EnvOpenOptions::new();
 | 
			
		||||
        options.map_size(100 * 1024 * 1024 * 1024); // 100 GB
 | 
			
		||||
 | 
			
		||||
        let index = Index::new(options, "data_movies").unwrap();
 | 
			
		||||
 | 
			
		||||
        let txn = index.read_txn().unwrap();
 | 
			
		||||
 | 
			
		||||
        let rr = index.criteria(&txn).unwrap();
 | 
			
		||||
        println!("{rr:?}");
 | 
			
		||||
 | 
			
		||||
        let primary_key = index.primary_key(&txn).unwrap().unwrap();
 | 
			
		||||
        let primary_key = index.fields_ids_map(&txn).unwrap().id(primary_key).unwrap();
 | 
			
		||||
 | 
			
		||||
        let start = Instant::now();
 | 
			
		||||
 | 
			
		||||
        let mut s = Search::new(&txn, &index);
 | 
			
		||||
        s.query("which a the releases from poison by the government");
 | 
			
		||||
        s.terms_matching_strategy(TermsMatchingStrategy::Last);
 | 
			
		||||
        s.criterion_implementation_strategy(crate::CriterionImplementationStrategy::OnlySetBased);
 | 
			
		||||
        let docs = s.execute().unwrap();
 | 
			
		||||
 | 
			
		||||
        let elapsed = start.elapsed();
 | 
			
		||||
 | 
			
		||||
        let ids = index
 | 
			
		||||
            .documents(&txn, docs.documents_ids.iter().copied())
 | 
			
		||||
            .unwrap()
 | 
			
		||||
            .into_iter()
 | 
			
		||||
            .map(|x| {
 | 
			
		||||
                let obkv = &x.1;
 | 
			
		||||
                let id = obkv.get(primary_key).unwrap();
 | 
			
		||||
                let id: serde_json::Value = serde_json::from_slice(id).unwrap();
 | 
			
		||||
                id.as_str().unwrap().to_owned()
 | 
			
		||||
            })
 | 
			
		||||
            .collect::<Vec<_>>();
 | 
			
		||||
 | 
			
		||||
        println!("{}us: {:?}", elapsed.as_micros(), docs.documents_ids);
 | 
			
		||||
        println!("external ids: {ids:?}");
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn _settings_movies() {
 | 
			
		||||
        let mut options = EnvOpenOptions::new();
 | 
			
		||||
        options.map_size(100 * 1024 * 1024 * 1024); // 100 GB
 | 
			
		||||
 | 
			
		||||
        let index = Index::new(options, "data_movies").unwrap();
 | 
			
		||||
        let mut wtxn = index.write_txn().unwrap();
 | 
			
		||||
 | 
			
		||||
        let config = IndexerConfig::default();
 | 
			
		||||
        let mut builder = Settings::new(&mut wtxn, &index, &config);
 | 
			
		||||
 | 
			
		||||
        builder.set_min_word_len_one_typo(5);
 | 
			
		||||
        builder.set_min_word_len_two_typos(100);
 | 
			
		||||
        builder.set_sortable_fields(hashset! { S("release_date") });
 | 
			
		||||
        builder.set_criteria(vec![
 | 
			
		||||
            Criterion::Words,
 | 
			
		||||
            Criterion::Typo,
 | 
			
		||||
            Criterion::Proximity,
 | 
			
		||||
            Criterion::Asc("release_date".to_owned()),
 | 
			
		||||
        ]);
 | 
			
		||||
 | 
			
		||||
        builder.execute(|_| (), || false).unwrap();
 | 
			
		||||
        wtxn.commit().unwrap();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn _index_movies() {
 | 
			
		||||
        let mut options = EnvOpenOptions::new();
 | 
			
		||||
        options.map_size(100 * 1024 * 1024 * 1024); // 100 GB
 | 
			
		||||
 | 
			
		||||
        let index = Index::new(options, "data_movies").unwrap();
 | 
			
		||||
        let mut wtxn = index.write_txn().unwrap();
 | 
			
		||||
 | 
			
		||||
        let primary_key = "id";
 | 
			
		||||
        let searchable_fields = vec!["title", "overview"];
 | 
			
		||||
        let filterable_fields = vec!["release_date", "genres"];
 | 
			
		||||
 | 
			
		||||
        let config = IndexerConfig::default();
 | 
			
		||||
        let mut builder = Settings::new(&mut wtxn, &index, &config);
 | 
			
		||||
        builder.set_primary_key(primary_key.to_owned());
 | 
			
		||||
        let searchable_fields = searchable_fields.iter().map(|s| s.to_string()).collect();
 | 
			
		||||
        builder.set_searchable_fields(searchable_fields);
 | 
			
		||||
        let filterable_fields = filterable_fields.iter().map(|s| s.to_string()).collect();
 | 
			
		||||
        builder.set_filterable_fields(filterable_fields);
 | 
			
		||||
 | 
			
		||||
        builder.set_min_word_len_one_typo(5);
 | 
			
		||||
        builder.set_min_word_len_two_typos(100);
 | 
			
		||||
        builder.set_criteria(vec![Criterion::Words, Criterion::Proximity]);
 | 
			
		||||
        builder.execute(|_| (), || false).unwrap();
 | 
			
		||||
 | 
			
		||||
        let config = IndexerConfig::default();
 | 
			
		||||
        let indexing_config = IndexDocumentsConfig::default();
 | 
			
		||||
        let builder =
 | 
			
		||||
            IndexDocuments::new(&mut wtxn, &index, &config, indexing_config, |_| (), || false)
 | 
			
		||||
                .unwrap();
 | 
			
		||||
 | 
			
		||||
        let documents = documents_from(
 | 
			
		||||
            "/Users/meilisearch/Documents/milli2/benchmarks/datasets/movies.json",
 | 
			
		||||
            "json",
 | 
			
		||||
        );
 | 
			
		||||
        let (builder, user_error) = builder.add_documents(documents).unwrap();
 | 
			
		||||
        user_error.unwrap();
 | 
			
		||||
        builder.execute().unwrap();
 | 
			
		||||
        wtxn.commit().unwrap();
 | 
			
		||||
 | 
			
		||||
        index.prepare_for_closing().wait();
 | 
			
		||||
    }
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn _index_wiki() {
 | 
			
		||||
        let mut options = EnvOpenOptions::new();
 | 
			
		||||
        options.map_size(100 * 1024 * 1024 * 1024); // 100 GB
 | 
			
		||||
 | 
			
		||||
        let index = Index::new(options, "data_wiki").unwrap();
 | 
			
		||||
        let mut wtxn = index.write_txn().unwrap();
 | 
			
		||||
 | 
			
		||||
        // let primary_key = "id";
 | 
			
		||||
        let searchable_fields = vec!["body", "title", "url"];
 | 
			
		||||
        // let filterable_fields = vec![];
 | 
			
		||||
        let config = IndexerConfig::default();
 | 
			
		||||
        let mut builder = Settings::new(&mut wtxn, &index, &config);
 | 
			
		||||
        // builder.set_primary_key(primary_key.to_owned());
 | 
			
		||||
        let searchable_fields = searchable_fields.iter().map(|s| s.to_string()).collect();
 | 
			
		||||
        builder.set_searchable_fields(searchable_fields);
 | 
			
		||||
        // let filterable_fields = filterable_fields.iter().map(|s| s.to_string()).collect();
 | 
			
		||||
        // builder.set_filterable_fields(filterable_fields);
 | 
			
		||||
 | 
			
		||||
        // builder.set_min_word_len_one_typo(5);
 | 
			
		||||
        // builder.set_min_word_len_two_typos(100);
 | 
			
		||||
        builder.set_criteria(vec![Criterion::Words, Criterion::Typo, Criterion::Proximity]);
 | 
			
		||||
        builder.execute(|_| (), || false).unwrap();
 | 
			
		||||
 | 
			
		||||
        let config = IndexerConfig::default();
 | 
			
		||||
        let indexing_config =
 | 
			
		||||
            IndexDocumentsConfig { autogenerate_docids: true, ..Default::default() };
 | 
			
		||||
        let builder =
 | 
			
		||||
            IndexDocuments::new(&mut wtxn, &index, &config, indexing_config, |_| (), || false)
 | 
			
		||||
                .unwrap();
 | 
			
		||||
 | 
			
		||||
        let documents = documents_from(
 | 
			
		||||
            "/Users/meilisearch/Documents/milli2/benchmarks/datasets/smol-wiki-articles.csv",
 | 
			
		||||
            "csv",
 | 
			
		||||
        );
 | 
			
		||||
        let (builder, user_error) = builder.add_documents(documents).unwrap();
 | 
			
		||||
        user_error.unwrap();
 | 
			
		||||
        builder.execute().unwrap();
 | 
			
		||||
        wtxn.commit().unwrap();
 | 
			
		||||
 | 
			
		||||
        index.prepare_for_closing().wait();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn documents_from(filename: &str, filetype: &str) -> DocumentsBatchReader<impl BufRead + Seek> {
 | 
			
		||||
        let reader = File::open(filename)
 | 
			
		||||
            .unwrap_or_else(|_| panic!("could not find the dataset in: {}", filename));
 | 
			
		||||
        let reader = BufReader::new(reader);
 | 
			
		||||
        let documents = match filetype {
 | 
			
		||||
            "csv" => documents_from_csv(reader).unwrap(),
 | 
			
		||||
            "json" => documents_from_json(reader).unwrap(),
 | 
			
		||||
            "jsonl" => documents_from_jsonl(reader).unwrap(),
 | 
			
		||||
            otherwise => panic!("invalid update format {:?}", otherwise),
 | 
			
		||||
        };
 | 
			
		||||
        DocumentsBatchReader::from_reader(Cursor::new(documents)).unwrap()
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn documents_from_jsonl(reader: impl BufRead) -> crate::Result<Vec<u8>> {
 | 
			
		||||
        let mut documents = DocumentsBatchBuilder::new(Vec::new());
 | 
			
		||||
 | 
			
		||||
        for result in serde_json::Deserializer::from_reader(reader).into_iter::<Object>() {
 | 
			
		||||
            let object = result.unwrap();
 | 
			
		||||
            documents.append_json_object(&object)?;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        documents.into_inner().map_err(Into::into)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn documents_from_json(reader: impl BufRead) -> crate::Result<Vec<u8>> {
 | 
			
		||||
        let mut documents = DocumentsBatchBuilder::new(Vec::new());
 | 
			
		||||
 | 
			
		||||
        documents.append_json_array(reader)?;
 | 
			
		||||
 | 
			
		||||
        documents.into_inner().map_err(Into::into)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn documents_from_csv(reader: impl BufRead) -> crate::Result<Vec<u8>> {
 | 
			
		||||
        let csv = csv::Reader::from_reader(reader);
 | 
			
		||||
 | 
			
		||||
        let mut documents = DocumentsBatchBuilder::new(Vec::new());
 | 
			
		||||
        documents.append_csv(csv)?;
 | 
			
		||||
 | 
			
		||||
        documents.into_inner().map_err(Into::into)
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -3,7 +3,7 @@ use super::small_bitmap::SmallBitmap;
 | 
			
		||||
use super::SearchContext;
 | 
			
		||||
use crate::Result;
 | 
			
		||||
 | 
			
		||||
const QUERY_GRAPH_NODE_LENGTH_LIMIT: u16 = 64;
 | 
			
		||||
pub const QUERY_GRAPH_NODE_LENGTH_LIMIT: u16 = 64;
 | 
			
		||||
 | 
			
		||||
/// A node of the [`QueryGraph`].
 | 
			
		||||
///
 | 
			
		||||
@@ -148,7 +148,7 @@ impl QueryGraph {
 | 
			
		||||
            let mut new_nodes = vec![];
 | 
			
		||||
            let new_node_idx = graph.add_node(&prev0, QueryNode::Term(term0.clone()));
 | 
			
		||||
            new_nodes.push(new_node_idx);
 | 
			
		||||
            if term0.is_empty() {
 | 
			
		||||
            if term0.is_empty(&ctx.derivations_interner) {
 | 
			
		||||
                empty_nodes.push(new_node_idx);
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
@@ -159,7 +159,7 @@ impl QueryGraph {
 | 
			
		||||
                    if word_set.contains(ctx.word_interner.get(ngram2_str)) {
 | 
			
		||||
                        let ngram2 = LocatedQueryTerm {
 | 
			
		||||
                            value: QueryTerm::Word {
 | 
			
		||||
                                derivations: WordDerivations {
 | 
			
		||||
                                derivations: ctx.derivations_interner.insert(WordDerivations {
 | 
			
		||||
                                    original: ngram2_str,
 | 
			
		||||
                                    // TODO: could add a typo if it's an ngram?
 | 
			
		||||
                                    zero_typo: Box::new([ngram2_str]),
 | 
			
		||||
@@ -168,7 +168,7 @@ impl QueryGraph {
 | 
			
		||||
                                    use_prefix_db: false,
 | 
			
		||||
                                    synonyms: Box::new([]), // TODO: ngram synonyms
 | 
			
		||||
                                    split_words: None,      // TODO: maybe ngram split words?
 | 
			
		||||
                                },
 | 
			
		||||
                                }),
 | 
			
		||||
                            },
 | 
			
		||||
                            positions: ngram2_pos,
 | 
			
		||||
                        };
 | 
			
		||||
@@ -187,7 +187,7 @@ impl QueryGraph {
 | 
			
		||||
                    if word_set.contains(ctx.word_interner.get(ngram3_str)) {
 | 
			
		||||
                        let ngram3 = LocatedQueryTerm {
 | 
			
		||||
                            value: QueryTerm::Word {
 | 
			
		||||
                                derivations: WordDerivations {
 | 
			
		||||
                                derivations: ctx.derivations_interner.insert(WordDerivations {
 | 
			
		||||
                                    original: ngram3_str,
 | 
			
		||||
                                    // TODO: could add a typo if it's an ngram?
 | 
			
		||||
                                    zero_typo: Box::new([ngram3_str]),
 | 
			
		||||
@@ -197,7 +197,7 @@ impl QueryGraph {
 | 
			
		||||
                                    synonyms: Box::new([]), // TODO: ngram synonyms
 | 
			
		||||
                                    split_words: None,      // TODO: maybe ngram split words?
 | 
			
		||||
                                                            // would be nice for typos like su nflower
 | 
			
		||||
                                },
 | 
			
		||||
                                }),
 | 
			
		||||
                            },
 | 
			
		||||
                            positions: ngram3_pos,
 | 
			
		||||
                        };
 | 
			
		||||
@@ -277,9 +277,10 @@ impl QueryGraph {
 | 
			
		||||
        loop {
 | 
			
		||||
            let mut nodes_to_remove = vec![];
 | 
			
		||||
            for (node_idx, node) in self.nodes.iter().enumerate() {
 | 
			
		||||
                if !matches!(node, QueryNode::End | QueryNode::Deleted)
 | 
			
		||||
                    && (self.edges[node_idx].successors.is_empty()
 | 
			
		||||
                        || self.edges[node_idx].predecessors.is_empty())
 | 
			
		||||
                if (!matches!(node, QueryNode::End | QueryNode::Deleted)
 | 
			
		||||
                    && self.edges[node_idx].successors.is_empty())
 | 
			
		||||
                    || (!matches!(node, QueryNode::Start | QueryNode::Deleted)
 | 
			
		||||
                        && self.edges[node_idx].predecessors.is_empty())
 | 
			
		||||
                {
 | 
			
		||||
                    nodes_to_remove.push(node_idx as u16);
 | 
			
		||||
                }
 | 
			
		||||
 
 | 
			
		||||
@@ -29,7 +29,7 @@ impl Phrase {
 | 
			
		||||
 | 
			
		||||
/// A structure storing all the different ways to match
 | 
			
		||||
/// a term in the user's search query.
 | 
			
		||||
#[derive(Clone)]
 | 
			
		||||
#[derive(Clone, PartialEq, Eq, Hash)]
 | 
			
		||||
pub struct WordDerivations {
 | 
			
		||||
    /// The original word
 | 
			
		||||
    pub original: Interned<String>,
 | 
			
		||||
@@ -59,12 +59,12 @@ impl WordDerivations {
 | 
			
		||||
    /// Return an iterator over all the single words derived from the original word.
 | 
			
		||||
    ///
 | 
			
		||||
    /// This excludes synonyms, split words, and words stored in the prefix databases.
 | 
			
		||||
    pub fn all_derivations_except_prefix_db(
 | 
			
		||||
    pub fn all_single_word_derivations_except_prefix_db(
 | 
			
		||||
        &'_ self,
 | 
			
		||||
    ) -> impl Iterator<Item = Interned<String>> + Clone + '_ {
 | 
			
		||||
        self.zero_typo.iter().chain(self.one_typo.iter()).chain(self.two_typos.iter()).copied()
 | 
			
		||||
    }
 | 
			
		||||
    fn is_empty(&self) -> bool {
 | 
			
		||||
    pub fn is_empty(&self) -> bool {
 | 
			
		||||
        self.zero_typo.is_empty()
 | 
			
		||||
            && self.one_typo.is_empty()
 | 
			
		||||
            && self.two_typos.is_empty()
 | 
			
		||||
@@ -101,10 +101,10 @@ pub fn word_derivations(
 | 
			
		||||
            let prefix = Str::new(word).starts_with();
 | 
			
		||||
            let mut stream = fst.search(prefix).into_stream();
 | 
			
		||||
 | 
			
		||||
            while let Some(word) = stream.next() {
 | 
			
		||||
                let word = std::str::from_utf8(word)?.to_owned();
 | 
			
		||||
                let word_interned = ctx.word_interner.insert(word);
 | 
			
		||||
                zero_typo.push(word_interned);
 | 
			
		||||
            while let Some(derived_word) = stream.next() {
 | 
			
		||||
                let derived_word = std::str::from_utf8(derived_word)?.to_owned();
 | 
			
		||||
                let derived_word_interned = ctx.word_interner.insert(derived_word);
 | 
			
		||||
                zero_typo.push(derived_word_interned);
 | 
			
		||||
            }
 | 
			
		||||
        } else if fst.contains(word) {
 | 
			
		||||
            zero_typo.push(word_interned);
 | 
			
		||||
@@ -113,17 +113,19 @@ pub fn word_derivations(
 | 
			
		||||
        let dfa = build_dfa(word, 1, is_prefix);
 | 
			
		||||
        let starts = StartsWith(Str::new(get_first(word)));
 | 
			
		||||
        let mut stream = fst.search_with_state(Intersection(starts, &dfa)).into_stream();
 | 
			
		||||
        // TODO: There may be wayyy too many matches (e.g. in the thousands), how to reduce them?
 | 
			
		||||
 | 
			
		||||
        while let Some((derived_word, state)) = stream.next() {
 | 
			
		||||
            let derived_word = std::str::from_utf8(derived_word)?;
 | 
			
		||||
 | 
			
		||||
        while let Some((word, state)) = stream.next() {
 | 
			
		||||
            let word = std::str::from_utf8(word)?;
 | 
			
		||||
            let word_interned = ctx.word_interner.insert(word.to_owned());
 | 
			
		||||
            let d = dfa.distance(state.1);
 | 
			
		||||
            let derived_word_interned = ctx.word_interner.insert(derived_word.to_owned());
 | 
			
		||||
            match d.to_u8() {
 | 
			
		||||
                0 => {
 | 
			
		||||
                    zero_typo.push(word_interned);
 | 
			
		||||
                    zero_typo.push(derived_word_interned);
 | 
			
		||||
                }
 | 
			
		||||
                1 => {
 | 
			
		||||
                    one_typo.push(word_interned);
 | 
			
		||||
                    one_typo.push(derived_word_interned);
 | 
			
		||||
                }
 | 
			
		||||
                _ => panic!(),
 | 
			
		||||
            }
 | 
			
		||||
@@ -136,27 +138,28 @@ pub fn word_derivations(
 | 
			
		||||
        let automaton = Union(first, &second);
 | 
			
		||||
 | 
			
		||||
        let mut stream = fst.search_with_state(automaton).into_stream();
 | 
			
		||||
        // TODO: There may be wayyy too many matches (e.g. in the thousands), how to reduce them?
 | 
			
		||||
 | 
			
		||||
        while let Some((found_word, state)) = stream.next() {
 | 
			
		||||
            let found_word = std::str::from_utf8(found_word)?;
 | 
			
		||||
            let found_word_interned = ctx.word_interner.insert(found_word.to_owned());
 | 
			
		||||
        while let Some((derived_word, state)) = stream.next() {
 | 
			
		||||
            let derived_word = std::str::from_utf8(derived_word)?;
 | 
			
		||||
            let derived_word_interned = ctx.word_interner.insert(derived_word.to_owned());
 | 
			
		||||
            // in the case the typo is on the first letter, we know the number of typo
 | 
			
		||||
            // is two
 | 
			
		||||
            if get_first(found_word) != get_first(word) {
 | 
			
		||||
                two_typos.push(found_word_interned);
 | 
			
		||||
            if get_first(derived_word) != get_first(word) {
 | 
			
		||||
                two_typos.push(derived_word_interned);
 | 
			
		||||
            } else {
 | 
			
		||||
                // Else, we know that it is the second dfa that matched and compute the
 | 
			
		||||
                // correct distance
 | 
			
		||||
                let d = second_dfa.distance((state.1).0);
 | 
			
		||||
                match d.to_u8() {
 | 
			
		||||
                    0 => {
 | 
			
		||||
                        zero_typo.push(found_word_interned);
 | 
			
		||||
                        zero_typo.push(derived_word_interned);
 | 
			
		||||
                    }
 | 
			
		||||
                    1 => {
 | 
			
		||||
                        one_typo.push(found_word_interned);
 | 
			
		||||
                        one_typo.push(derived_word_interned);
 | 
			
		||||
                    }
 | 
			
		||||
                    2 => {
 | 
			
		||||
                        two_typos.push(found_word_interned);
 | 
			
		||||
                        two_typos.push(derived_word_interned);
 | 
			
		||||
                    }
 | 
			
		||||
                    _ => panic!(),
 | 
			
		||||
                }
 | 
			
		||||
@@ -223,10 +226,11 @@ fn split_best_frequency(
 | 
			
		||||
    Ok(best.map(|(_, left, right)| (left.to_owned(), right.to_owned())))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Clone)]
 | 
			
		||||
#[derive(Clone, PartialEq, Eq, Hash)]
 | 
			
		||||
pub enum QueryTerm {
 | 
			
		||||
    Phrase { phrase: Interned<Phrase> },
 | 
			
		||||
    Word { derivations: WordDerivations },
 | 
			
		||||
    // TODO: change to `Interned<WordDerivations>`?
 | 
			
		||||
    Word { derivations: Interned<WordDerivations> },
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl QueryTerm {
 | 
			
		||||
@@ -234,10 +238,12 @@ impl QueryTerm {
 | 
			
		||||
    pub fn original_single_word<'interner>(
 | 
			
		||||
        &self,
 | 
			
		||||
        word_interner: &'interner Interner<String>,
 | 
			
		||||
        derivations_interner: &'interner Interner<WordDerivations>,
 | 
			
		||||
    ) -> Option<&'interner str> {
 | 
			
		||||
        match self {
 | 
			
		||||
            QueryTerm::Phrase { phrase: _ } => None,
 | 
			
		||||
            QueryTerm::Word { derivations } => {
 | 
			
		||||
                let derivations = derivations_interner.get(*derivations);
 | 
			
		||||
                if derivations.is_empty() {
 | 
			
		||||
                    None
 | 
			
		||||
                } else {
 | 
			
		||||
@@ -257,12 +263,12 @@ pub struct LocatedQueryTerm {
 | 
			
		||||
 | 
			
		||||
impl LocatedQueryTerm {
 | 
			
		||||
    /// Return `true` iff the word derivations within the query term are empty
 | 
			
		||||
    pub fn is_empty(&self) -> bool {
 | 
			
		||||
        match &self.value {
 | 
			
		||||
    pub fn is_empty(&self, interner: &Interner<WordDerivations>) -> bool {
 | 
			
		||||
        match self.value {
 | 
			
		||||
            // TODO: phrases should be greedily computed, so that they can be excluded from
 | 
			
		||||
            // the query graph right from the start?
 | 
			
		||||
            QueryTerm::Phrase { phrase: _ } => false,
 | 
			
		||||
            QueryTerm::Word { derivations, .. } => derivations.is_empty(),
 | 
			
		||||
            QueryTerm::Word { derivations, .. } => interner.get(derivations).is_empty(),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -336,7 +342,9 @@ pub fn located_query_terms_from_string<'search>(
 | 
			
		||||
                            let word = token.lemma();
 | 
			
		||||
                            let derivations = word_derivations(ctx, word, nbr_typos(word), false)?;
 | 
			
		||||
                            let located_term = LocatedQueryTerm {
 | 
			
		||||
                                value: QueryTerm::Word { derivations },
 | 
			
		||||
                                value: QueryTerm::Word {
 | 
			
		||||
                                    derivations: ctx.derivations_interner.insert(derivations),
 | 
			
		||||
                                },
 | 
			
		||||
                                positions: position..=position,
 | 
			
		||||
                            };
 | 
			
		||||
                            located_terms.push(located_term);
 | 
			
		||||
@@ -347,7 +355,9 @@ pub fn located_query_terms_from_string<'search>(
 | 
			
		||||
                    let word = token.lemma();
 | 
			
		||||
                    let derivations = word_derivations(ctx, word, nbr_typos(word), true)?;
 | 
			
		||||
                    let located_term = LocatedQueryTerm {
 | 
			
		||||
                        value: QueryTerm::Word { derivations },
 | 
			
		||||
                        value: QueryTerm::Word {
 | 
			
		||||
                            derivations: ctx.derivations_interner.insert(derivations),
 | 
			
		||||
                        },
 | 
			
		||||
                        positions: position..=position,
 | 
			
		||||
                    };
 | 
			
		||||
                    located_terms.push(located_term);
 | 
			
		||||
@@ -409,8 +419,8 @@ pub fn ngram2(
 | 
			
		||||
        return None;
 | 
			
		||||
    }
 | 
			
		||||
    match (
 | 
			
		||||
        &x.value.original_single_word(&ctx.word_interner),
 | 
			
		||||
        &y.value.original_single_word(&ctx.word_interner),
 | 
			
		||||
        &x.value.original_single_word(&ctx.word_interner, &ctx.derivations_interner),
 | 
			
		||||
        &y.value.original_single_word(&ctx.word_interner, &ctx.derivations_interner),
 | 
			
		||||
    ) {
 | 
			
		||||
        (Some(w1), Some(w2)) => {
 | 
			
		||||
            let term = (
 | 
			
		||||
@@ -436,9 +446,9 @@ pub fn ngram3(
 | 
			
		||||
        return None;
 | 
			
		||||
    }
 | 
			
		||||
    match (
 | 
			
		||||
        &x.value.original_single_word(&ctx.word_interner),
 | 
			
		||||
        &y.value.original_single_word(&ctx.word_interner),
 | 
			
		||||
        &z.value.original_single_word(&ctx.word_interner),
 | 
			
		||||
        &x.value.original_single_word(&ctx.word_interner, &ctx.derivations_interner),
 | 
			
		||||
        &y.value.original_single_word(&ctx.word_interner, &ctx.derivations_interner),
 | 
			
		||||
        &z.value.original_single_word(&ctx.word_interner, &ctx.derivations_interner),
 | 
			
		||||
    ) {
 | 
			
		||||
        (Some(w1), Some(w2), Some(w3)) => {
 | 
			
		||||
            let term = (
 | 
			
		||||
 
 | 
			
		||||
@@ -14,29 +14,33 @@ pub fn visit_from_node(
 | 
			
		||||
    from_node: &QueryNode,
 | 
			
		||||
) -> Result<Option<(WordDerivations, i8)>> {
 | 
			
		||||
    Ok(Some(match from_node {
 | 
			
		||||
        QueryNode::Term(LocatedQueryTerm { value: value1, positions: pos1 }) => match value1 {
 | 
			
		||||
            QueryTerm::Word { derivations } => (derivations.clone(), *pos1.end()),
 | 
			
		||||
            QueryTerm::Phrase { phrase: phrase1 } => {
 | 
			
		||||
                let phrase1 = ctx.phrase_interner.get(*phrase1);
 | 
			
		||||
                if let Some(original) = *phrase1.words.last().unwrap() {
 | 
			
		||||
                    (
 | 
			
		||||
                        WordDerivations {
 | 
			
		||||
                            original,
 | 
			
		||||
                            zero_typo: Box::new([original]),
 | 
			
		||||
                            one_typo: Box::new([]),
 | 
			
		||||
                            two_typos: Box::new([]),
 | 
			
		||||
                            use_prefix_db: false,
 | 
			
		||||
                            synonyms: Box::new([]),
 | 
			
		||||
                            split_words: None,
 | 
			
		||||
                        },
 | 
			
		||||
                        *pos1.end(),
 | 
			
		||||
                    )
 | 
			
		||||
                } else {
 | 
			
		||||
                    // No word pairs if the phrase does not have a regular word as its last term
 | 
			
		||||
                    return Ok(None);
 | 
			
		||||
        QueryNode::Term(LocatedQueryTerm { value: value1, positions: pos1 }) => {
 | 
			
		||||
            match value1 {
 | 
			
		||||
                QueryTerm::Word { derivations } => {
 | 
			
		||||
                    (ctx.derivations_interner.get(*derivations).clone(), *pos1.end())
 | 
			
		||||
                }
 | 
			
		||||
                QueryTerm::Phrase { phrase: phrase1 } => {
 | 
			
		||||
                    let phrase1 = ctx.phrase_interner.get(*phrase1);
 | 
			
		||||
                    if let Some(original) = *phrase1.words.last().unwrap() {
 | 
			
		||||
                        (
 | 
			
		||||
                            WordDerivations {
 | 
			
		||||
                                original,
 | 
			
		||||
                                zero_typo: Box::new([original]),
 | 
			
		||||
                                one_typo: Box::new([]),
 | 
			
		||||
                                two_typos: Box::new([]),
 | 
			
		||||
                                use_prefix_db: false,
 | 
			
		||||
                                synonyms: Box::new([]),
 | 
			
		||||
                                split_words: None,
 | 
			
		||||
                            },
 | 
			
		||||
                            *pos1.end(),
 | 
			
		||||
                        )
 | 
			
		||||
                    } else {
 | 
			
		||||
                        // No word pairs if the phrase does not have a regular word as its last term
 | 
			
		||||
                        return Ok(None);
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        },
 | 
			
		||||
        }
 | 
			
		||||
        QueryNode::Start => (
 | 
			
		||||
            WordDerivations {
 | 
			
		||||
                original: ctx.word_interner.insert(String::new()),
 | 
			
		||||
@@ -58,6 +62,10 @@ pub fn visit_to_node<'search, 'from_data>(
 | 
			
		||||
    to_node: &QueryNode,
 | 
			
		||||
    from_node_data: &'from_data (WordDerivations, i8),
 | 
			
		||||
) -> Result<Vec<(u8, EdgeCondition<ProximityEdge>)>> {
 | 
			
		||||
    let SearchContext { index, txn, db_cache, word_interner, derivations_interner, .. } = ctx;
 | 
			
		||||
 | 
			
		||||
    // IMPORTANT! TODO: split words support
 | 
			
		||||
 | 
			
		||||
    let (derivations1, pos1) = from_node_data;
 | 
			
		||||
    let term2 = match &to_node {
 | 
			
		||||
        QueryNode::End => return Ok(vec![(0, EdgeCondition::Unconditional)]),
 | 
			
		||||
@@ -67,7 +75,9 @@ pub fn visit_to_node<'search, 'from_data>(
 | 
			
		||||
    let LocatedQueryTerm { value: value2, positions: pos2 } = term2;
 | 
			
		||||
 | 
			
		||||
    let (derivations2, pos2, ngram_len2) = match value2 {
 | 
			
		||||
        QueryTerm::Word { derivations } => (derivations.clone(), *pos2.start(), pos2.len()),
 | 
			
		||||
        QueryTerm::Word { derivations } => {
 | 
			
		||||
            (derivations_interner.get(*derivations).clone(), *pos2.start(), pos2.len())
 | 
			
		||||
        }
 | 
			
		||||
        QueryTerm::Phrase { phrase: phrase2 } => {
 | 
			
		||||
            let phrase2 = ctx.phrase_interner.get(*phrase2);
 | 
			
		||||
            if let Some(original) = *phrase2.words.first().unwrap() {
 | 
			
		||||
@@ -105,7 +115,8 @@ pub fn visit_to_node<'search, 'from_data>(
 | 
			
		||||
    // left term cannot be a prefix
 | 
			
		||||
    assert!(!updb1);
 | 
			
		||||
 | 
			
		||||
    let derivations1 = derivations1.all_derivations_except_prefix_db();
 | 
			
		||||
    // TODO: IMPORTANT! split words and synonyms support
 | 
			
		||||
    let derivations1 = derivations1.all_single_word_derivations_except_prefix_db();
 | 
			
		||||
    // TODO: eventually, we want to get rid of the uses from `orginal`
 | 
			
		||||
    let mut cost_proximity_word_pairs = BTreeMap::<u8, BTreeMap<u8, Vec<WordPair>>>::new();
 | 
			
		||||
 | 
			
		||||
@@ -115,8 +126,11 @@ pub fn visit_to_node<'search, 'from_data>(
 | 
			
		||||
                let cost = (proximity + ngram_len2 - 1) as u8;
 | 
			
		||||
                // TODO: if we had access to the universe here, we could already check whether
 | 
			
		||||
                // the bitmap corresponding to this word pair is disjoint with the universe or not
 | 
			
		||||
                if ctx
 | 
			
		||||
                if db_cache
 | 
			
		||||
                    .get_word_prefix_pair_proximity_docids(
 | 
			
		||||
                        index,
 | 
			
		||||
                        txn,
 | 
			
		||||
                        word_interner,
 | 
			
		||||
                        word1,
 | 
			
		||||
                        derivations2.original,
 | 
			
		||||
                        proximity as u8,
 | 
			
		||||
@@ -133,8 +147,11 @@ pub fn visit_to_node<'search, 'from_data>(
 | 
			
		||||
                            right_prefix: derivations2.original,
 | 
			
		||||
                        });
 | 
			
		||||
                }
 | 
			
		||||
                if ctx
 | 
			
		||||
                if db_cache
 | 
			
		||||
                    .get_prefix_word_pair_proximity_docids(
 | 
			
		||||
                        index,
 | 
			
		||||
                        txn,
 | 
			
		||||
                        word_interner,
 | 
			
		||||
                        derivations2.original,
 | 
			
		||||
                        word1,
 | 
			
		||||
                        proximity as u8 - 1,
 | 
			
		||||
@@ -155,14 +172,30 @@ pub fn visit_to_node<'search, 'from_data>(
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let derivations2 = derivations2.all_derivations_except_prefix_db();
 | 
			
		||||
    // TODO: add safeguard in case the cartesian product is too large?
 | 
			
		||||
    // TODO: important! support split words and synonyms as well
 | 
			
		||||
    let derivations2 = derivations2.all_single_word_derivations_except_prefix_db();
 | 
			
		||||
    // TODO: add safeguard in case the cartesian product is too large!
 | 
			
		||||
    // even if we restrict the word derivations to a maximum of 100, the size of the
 | 
			
		||||
    // caterisan product could reach a maximum of 10_000 derivations, which is way too much.
 | 
			
		||||
    // mMaybe prioritise the product of zero typo derivations, then the product of zero-typo/one-typo
 | 
			
		||||
    // + one-typo/zero-typo, then one-typo/one-typo, then ... until an arbitrary limit has been
 | 
			
		||||
    // reached
 | 
			
		||||
    let product_derivations = derivations1.cartesian_product(derivations2);
 | 
			
		||||
 | 
			
		||||
    for (word1, word2) in product_derivations {
 | 
			
		||||
        for proximity in 1..=(8 - ngram_len2) {
 | 
			
		||||
            let cost = (proximity + ngram_len2 - 1) as u8;
 | 
			
		||||
            if ctx.get_word_pair_proximity_docids(word1, word2, proximity as u8)?.is_some() {
 | 
			
		||||
            if db_cache
 | 
			
		||||
                .get_word_pair_proximity_docids(
 | 
			
		||||
                    index,
 | 
			
		||||
                    txn,
 | 
			
		||||
                    word_interner,
 | 
			
		||||
                    word1,
 | 
			
		||||
                    word2,
 | 
			
		||||
                    proximity as u8,
 | 
			
		||||
                )?
 | 
			
		||||
                .is_some()
 | 
			
		||||
            {
 | 
			
		||||
                cost_proximity_word_pairs
 | 
			
		||||
                    .entry(cost)
 | 
			
		||||
                    .or_default()
 | 
			
		||||
@@ -171,7 +204,16 @@ pub fn visit_to_node<'search, 'from_data>(
 | 
			
		||||
                    .push(WordPair::Words { left: word1, right: word2 });
 | 
			
		||||
            }
 | 
			
		||||
            if proximity > 1
 | 
			
		||||
                && ctx.get_word_pair_proximity_docids(word2, word1, proximity as u8 - 1)?.is_some()
 | 
			
		||||
                && db_cache
 | 
			
		||||
                    .get_word_pair_proximity_docids(
 | 
			
		||||
                        index,
 | 
			
		||||
                        txn,
 | 
			
		||||
                        word_interner,
 | 
			
		||||
                        word2,
 | 
			
		||||
                        word1,
 | 
			
		||||
                        proximity as u8 - 1,
 | 
			
		||||
                    )?
 | 
			
		||||
                    .is_some()
 | 
			
		||||
            {
 | 
			
		||||
                cost_proximity_word_pairs
 | 
			
		||||
                    .entry(cost)
 | 
			
		||||
 
 | 
			
		||||
@@ -9,19 +9,37 @@ pub fn compute_docids<'search>(
 | 
			
		||||
    edge: &ProximityEdge,
 | 
			
		||||
    universe: &RoaringBitmap,
 | 
			
		||||
) -> Result<RoaringBitmap> {
 | 
			
		||||
    let SearchContext { index, txn, db_cache, word_interner, .. } = ctx;
 | 
			
		||||
    let ProximityEdge { pairs, proximity } = edge;
 | 
			
		||||
    let mut pair_docids = RoaringBitmap::new();
 | 
			
		||||
    for pair in pairs.iter() {
 | 
			
		||||
        let bytes = match pair {
 | 
			
		||||
            WordPair::Words { left, right } => {
 | 
			
		||||
                ctx.get_word_pair_proximity_docids(*left, *right, *proximity)
 | 
			
		||||
            }
 | 
			
		||||
            WordPair::WordPrefix { left, right_prefix } => {
 | 
			
		||||
                ctx.get_word_prefix_pair_proximity_docids(*left, *right_prefix, *proximity)
 | 
			
		||||
            }
 | 
			
		||||
            WordPair::WordPrefixSwapped { left_prefix, right } => {
 | 
			
		||||
                ctx.get_prefix_word_pair_proximity_docids(*left_prefix, *right, *proximity)
 | 
			
		||||
            }
 | 
			
		||||
            WordPair::Words { left, right } => db_cache.get_word_pair_proximity_docids(
 | 
			
		||||
                index,
 | 
			
		||||
                txn,
 | 
			
		||||
                word_interner,
 | 
			
		||||
                *left,
 | 
			
		||||
                *right,
 | 
			
		||||
                *proximity,
 | 
			
		||||
            ),
 | 
			
		||||
            WordPair::WordPrefix { left, right_prefix } => db_cache
 | 
			
		||||
                .get_word_prefix_pair_proximity_docids(
 | 
			
		||||
                    index,
 | 
			
		||||
                    txn,
 | 
			
		||||
                    word_interner,
 | 
			
		||||
                    *left,
 | 
			
		||||
                    *right_prefix,
 | 
			
		||||
                    *proximity,
 | 
			
		||||
                ),
 | 
			
		||||
            WordPair::WordPrefixSwapped { left_prefix, right } => db_cache
 | 
			
		||||
                .get_prefix_word_pair_proximity_docids(
 | 
			
		||||
                    index,
 | 
			
		||||
                    txn,
 | 
			
		||||
                    word_interner,
 | 
			
		||||
                    *left_prefix,
 | 
			
		||||
                    *right,
 | 
			
		||||
                    *proximity,
 | 
			
		||||
                ),
 | 
			
		||||
        }?;
 | 
			
		||||
        // TODO: deserialize bitmap within a universe, and (maybe) using a bump allocator?
 | 
			
		||||
        let bitmap = universe
 | 
			
		||||
 
 | 
			
		||||
@@ -1,4 +1,3 @@
 | 
			
		||||
use heed::BytesDecode;
 | 
			
		||||
use roaring::RoaringBitmap;
 | 
			
		||||
 | 
			
		||||
use super::empty_paths_cache::EmptyPathsCache;
 | 
			
		||||
@@ -6,15 +5,14 @@ use super::{EdgeCondition, RankingRuleGraph, RankingRuleGraphTrait};
 | 
			
		||||
use crate::search::new::interner::Interned;
 | 
			
		||||
use crate::search::new::logger::SearchLogger;
 | 
			
		||||
use crate::search::new::query_term::{LocatedQueryTerm, Phrase, QueryTerm, WordDerivations};
 | 
			
		||||
use crate::search::new::resolve_query_graph::resolve_phrase;
 | 
			
		||||
use crate::search::new::small_bitmap::SmallBitmap;
 | 
			
		||||
use crate::search::new::{QueryGraph, QueryNode, SearchContext};
 | 
			
		||||
use crate::{Result, RoaringBitmapCodec};
 | 
			
		||||
use crate::Result;
 | 
			
		||||
 | 
			
		||||
#[derive(Clone)]
 | 
			
		||||
pub enum TypoEdge {
 | 
			
		||||
    Phrase { phrase: Interned<Phrase> },
 | 
			
		||||
    Word { derivations: WordDerivations, nbr_typos: u8 },
 | 
			
		||||
    Word { derivations: Interned<WordDerivations>, nbr_typos: u8 },
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub enum TypoGraph {}
 | 
			
		||||
@@ -35,32 +33,37 @@ impl RankingRuleGraphTrait for TypoGraph {
 | 
			
		||||
        edge: &Self::EdgeCondition,
 | 
			
		||||
        universe: &RoaringBitmap,
 | 
			
		||||
    ) -> Result<RoaringBitmap> {
 | 
			
		||||
        let SearchContext {
 | 
			
		||||
            index,
 | 
			
		||||
            txn,
 | 
			
		||||
            db_cache,
 | 
			
		||||
            word_interner,
 | 
			
		||||
            phrase_interner,
 | 
			
		||||
            derivations_interner,
 | 
			
		||||
            query_term_docids,
 | 
			
		||||
        } = ctx;
 | 
			
		||||
        match edge {
 | 
			
		||||
            TypoEdge::Phrase { phrase } => resolve_phrase(ctx, *phrase),
 | 
			
		||||
            TypoEdge::Word { derivations, nbr_typos } => {
 | 
			
		||||
                let words = match nbr_typos {
 | 
			
		||||
                    0 => &derivations.zero_typo,
 | 
			
		||||
                    1 => &derivations.one_typo,
 | 
			
		||||
                    2 => &derivations.two_typos,
 | 
			
		||||
                    _ => panic!(),
 | 
			
		||||
                };
 | 
			
		||||
                let mut docids = RoaringBitmap::new();
 | 
			
		||||
                for word in words.iter().copied() {
 | 
			
		||||
                    let Some(bytes) = ctx.get_word_docids(word)? else { continue };
 | 
			
		||||
                    // TODO: deserialize bitmap within a universe
 | 
			
		||||
                    let bitmap = universe
 | 
			
		||||
                        & RoaringBitmapCodec::bytes_decode(bytes).ok_or(heed::Error::Decoding)?;
 | 
			
		||||
                    docids |= bitmap;
 | 
			
		||||
                }
 | 
			
		||||
                if *nbr_typos == 0 {
 | 
			
		||||
                    if let Some(bytes) = ctx.get_word_prefix_docids(derivations.original)? {
 | 
			
		||||
                        // TODO: deserialize bitmap within a universe
 | 
			
		||||
                        let bitmap = universe
 | 
			
		||||
                            & RoaringBitmapCodec::bytes_decode(bytes)
 | 
			
		||||
                                .ok_or(heed::Error::Decoding)?;
 | 
			
		||||
                        docids |= bitmap;
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            &TypoEdge::Phrase { phrase } => Ok(universe
 | 
			
		||||
                & query_term_docids.get_phrase_docids(
 | 
			
		||||
                    index,
 | 
			
		||||
                    txn,
 | 
			
		||||
                    db_cache,
 | 
			
		||||
                    word_interner,
 | 
			
		||||
                    phrase_interner,
 | 
			
		||||
                    phrase,
 | 
			
		||||
                )?),
 | 
			
		||||
            TypoEdge::Word { derivations, .. } => {
 | 
			
		||||
                let docids = universe
 | 
			
		||||
                    & query_term_docids.get_word_derivations_docids(
 | 
			
		||||
                        index,
 | 
			
		||||
                        txn,
 | 
			
		||||
                        db_cache,
 | 
			
		||||
                        word_interner,
 | 
			
		||||
                        derivations_interner,
 | 
			
		||||
                        phrase_interner,
 | 
			
		||||
                        *derivations,
 | 
			
		||||
                    )?;
 | 
			
		||||
 | 
			
		||||
                Ok(docids)
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
@@ -74,43 +77,71 @@ impl RankingRuleGraphTrait for TypoGraph {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn build_step_visit_destination_node<'from_data, 'search: 'from_data>(
 | 
			
		||||
        _ctx: &mut SearchContext<'search>,
 | 
			
		||||
        ctx: &mut SearchContext<'search>,
 | 
			
		||||
        to_node: &QueryNode,
 | 
			
		||||
        _from_node_data: &'from_data Self::BuildVisitedFromNode,
 | 
			
		||||
    ) -> Result<Vec<(u8, EdgeCondition<Self::EdgeCondition>)>> {
 | 
			
		||||
        let SearchContext { derivations_interner, .. } = ctx;
 | 
			
		||||
        match to_node {
 | 
			
		||||
            QueryNode::Term(LocatedQueryTerm { value, .. }) => match value {
 | 
			
		||||
                &QueryTerm::Phrase { phrase } => {
 | 
			
		||||
            QueryNode::Term(LocatedQueryTerm { value, .. }) => match *value {
 | 
			
		||||
                QueryTerm::Phrase { phrase } => {
 | 
			
		||||
                    Ok(vec![(0, EdgeCondition::Conditional(TypoEdge::Phrase { phrase }))])
 | 
			
		||||
                }
 | 
			
		||||
                QueryTerm::Word { derivations } => {
 | 
			
		||||
                    let mut edges = vec![];
 | 
			
		||||
                    if !derivations.zero_typo.is_empty() || derivations.use_prefix_db {
 | 
			
		||||
                        edges.push((
 | 
			
		||||
                            0,
 | 
			
		||||
                            EdgeCondition::Conditional(TypoEdge::Word {
 | 
			
		||||
                                derivations: derivations.clone(),
 | 
			
		||||
                                nbr_typos: 0,
 | 
			
		||||
                            }),
 | 
			
		||||
                        ))
 | 
			
		||||
                    }
 | 
			
		||||
                    if !derivations.one_typo.is_empty() {
 | 
			
		||||
                        edges.push((
 | 
			
		||||
                            1,
 | 
			
		||||
                            EdgeCondition::Conditional(TypoEdge::Word {
 | 
			
		||||
                                derivations: derivations.clone(),
 | 
			
		||||
                                nbr_typos: 1,
 | 
			
		||||
                            }),
 | 
			
		||||
                        ))
 | 
			
		||||
                    }
 | 
			
		||||
                    if !derivations.two_typos.is_empty() {
 | 
			
		||||
                        edges.push((
 | 
			
		||||
                            2,
 | 
			
		||||
                            EdgeCondition::Conditional(TypoEdge::Word {
 | 
			
		||||
                                derivations: derivations.clone(),
 | 
			
		||||
                                nbr_typos: 2,
 | 
			
		||||
                            }),
 | 
			
		||||
                        ))
 | 
			
		||||
 | 
			
		||||
                    for nbr_typos in 0..=2 {
 | 
			
		||||
                        let derivations = derivations_interner.get(derivations).clone();
 | 
			
		||||
                        let new_derivations = match nbr_typos {
 | 
			
		||||
                            0 => {
 | 
			
		||||
                                // TODO: think about how split words and synonyms should be handled here
 | 
			
		||||
                                // TODO: what about ngrams?
 | 
			
		||||
                                // Maybe 2grams should have one typo by default and 3grams 2 typos by default
 | 
			
		||||
                                WordDerivations {
 | 
			
		||||
                                    original: derivations.original,
 | 
			
		||||
                                    synonyms: derivations.synonyms,
 | 
			
		||||
                                    split_words: None,
 | 
			
		||||
                                    zero_typo: derivations.zero_typo,
 | 
			
		||||
                                    one_typo: Box::new([]),
 | 
			
		||||
                                    two_typos: Box::new([]),
 | 
			
		||||
                                    use_prefix_db: derivations.use_prefix_db,
 | 
			
		||||
                                }
 | 
			
		||||
                            }
 | 
			
		||||
                            1 => {
 | 
			
		||||
                                // What about split words and synonyms here?
 | 
			
		||||
                                WordDerivations {
 | 
			
		||||
                                    original: derivations.original,
 | 
			
		||||
                                    synonyms: Box::new([]),
 | 
			
		||||
                                    split_words: derivations.split_words,
 | 
			
		||||
                                    zero_typo: Box::new([]),
 | 
			
		||||
                                    one_typo: derivations.one_typo,
 | 
			
		||||
                                    two_typos: Box::new([]),
 | 
			
		||||
                                    use_prefix_db: false, // false because all items from use_prefix_db haev 0 typos
 | 
			
		||||
                                }
 | 
			
		||||
                            }
 | 
			
		||||
                            2 => {
 | 
			
		||||
                                // What about split words and synonyms here?
 | 
			
		||||
                                WordDerivations {
 | 
			
		||||
                                    original: derivations.original,
 | 
			
		||||
                                    synonyms: Box::new([]),
 | 
			
		||||
                                    split_words: None,
 | 
			
		||||
                                    zero_typo: Box::new([]),
 | 
			
		||||
                                    one_typo: Box::new([]),
 | 
			
		||||
                                    two_typos: derivations.two_typos,
 | 
			
		||||
                                    use_prefix_db: false, // false because all items from use_prefix_db haev 0 typos
 | 
			
		||||
                                }
 | 
			
		||||
                            }
 | 
			
		||||
                            _ => panic!(),
 | 
			
		||||
                        };
 | 
			
		||||
                        if !new_derivations.is_empty() {
 | 
			
		||||
                            edges.push((
 | 
			
		||||
                                nbr_typos,
 | 
			
		||||
                                EdgeCondition::Conditional(TypoEdge::Word {
 | 
			
		||||
                                    derivations: derivations_interner.insert(new_derivations),
 | 
			
		||||
                                    nbr_typos,
 | 
			
		||||
                                }),
 | 
			
		||||
                            ))
 | 
			
		||||
                        }
 | 
			
		||||
                    }
 | 
			
		||||
                    Ok(edges)
 | 
			
		||||
                }
 | 
			
		||||
 
 | 
			
		||||
@@ -2,42 +2,23 @@ use roaring::RoaringBitmap;
 | 
			
		||||
 | 
			
		||||
use super::logger::SearchLogger;
 | 
			
		||||
use super::{QueryGraph, SearchContext};
 | 
			
		||||
use crate::search::new::graph_based_ranking_rule::GraphBasedRankingRule;
 | 
			
		||||
use crate::search::new::ranking_rule_graph::{ProximityGraph, TypoGraph};
 | 
			
		||||
use crate::search::new::words::Words;
 | 
			
		||||
// use crate::search::new::sort::Sort;
 | 
			
		||||
use crate::{Result, TermsMatchingStrategy};
 | 
			
		||||
 | 
			
		||||
pub trait RankingRuleOutputIter<'search, Query> {
 | 
			
		||||
    fn next_bucket(&mut self) -> Result<Option<RankingRuleOutput<Query>>>;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub struct RankingRuleOutputIterWrapper<'search, Query> {
 | 
			
		||||
    iter: Box<dyn Iterator<Item = Result<RankingRuleOutput<Query>>> + 'search>,
 | 
			
		||||
}
 | 
			
		||||
impl<'search, Query> RankingRuleOutputIterWrapper<'search, Query> {
 | 
			
		||||
    pub fn new(iter: Box<dyn Iterator<Item = Result<RankingRuleOutput<Query>>> + 'search>) -> Self {
 | 
			
		||||
        Self { iter }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
impl<'search, Query> RankingRuleOutputIter<'search, Query>
 | 
			
		||||
    for RankingRuleOutputIterWrapper<'search, Query>
 | 
			
		||||
{
 | 
			
		||||
    fn next_bucket(&mut self) -> Result<Option<RankingRuleOutput<Query>>> {
 | 
			
		||||
        match self.iter.next() {
 | 
			
		||||
            Some(x) => x.map(Some),
 | 
			
		||||
            None => Ok(None),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
use crate::Result;
 | 
			
		||||
 | 
			
		||||
/// An internal trait implemented by only [`PlaceholderQuery`] and [`QueryGraph`]
 | 
			
		||||
pub trait RankingRuleQueryTrait: Sized + Clone + 'static {}
 | 
			
		||||
 | 
			
		||||
/// A type describing a placeholder search
 | 
			
		||||
#[derive(Clone)]
 | 
			
		||||
pub struct PlaceholderQuery;
 | 
			
		||||
impl RankingRuleQueryTrait for PlaceholderQuery {}
 | 
			
		||||
impl RankingRuleQueryTrait for QueryGraph {}
 | 
			
		||||
 | 
			
		||||
/// A trait that must be implemented by all ranking rules.
 | 
			
		||||
///
 | 
			
		||||
/// It is generic over `'search`, the lifetime of the search context
 | 
			
		||||
/// (i.e. the read transaction and the cache) and over `Query`, which
 | 
			
		||||
/// can be either [`PlaceholderQuery`] or [`QueryGraph`].
 | 
			
		||||
pub trait RankingRule<'search, Query: RankingRuleQueryTrait> {
 | 
			
		||||
    fn id(&self) -> String;
 | 
			
		||||
 | 
			
		||||
@@ -76,6 +57,8 @@ pub trait RankingRule<'search, Query: RankingRuleQueryTrait> {
 | 
			
		||||
    );
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// Output of a ranking rule, consisting of the query to be used
 | 
			
		||||
/// by the child ranking rule and a set of document ids.
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
pub struct RankingRuleOutput<Q> {
 | 
			
		||||
    /// The query corresponding to the current bucket for the child ranking rule
 | 
			
		||||
@@ -84,25 +67,16 @@ pub struct RankingRuleOutput<Q> {
 | 
			
		||||
    pub candidates: RoaringBitmap,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// TODO: can make it generic over the query type (either query graph or placeholder) fairly easily
 | 
			
		||||
#[allow(clippy::too_many_arguments)]
 | 
			
		||||
pub fn apply_ranking_rules<'search>(
 | 
			
		||||
pub fn bucket_sort<'search, Q: RankingRuleQueryTrait>(
 | 
			
		||||
    ctx: &mut SearchContext<'search>,
 | 
			
		||||
    // TODO: ranking rules parameter
 | 
			
		||||
    query_graph: &QueryGraph,
 | 
			
		||||
    mut ranking_rules: Vec<&mut dyn RankingRule<'search, Q>>,
 | 
			
		||||
    query_graph: &Q,
 | 
			
		||||
    universe: &RoaringBitmap,
 | 
			
		||||
    from: usize,
 | 
			
		||||
    length: usize,
 | 
			
		||||
    logger: &mut dyn SearchLogger<QueryGraph>,
 | 
			
		||||
    logger: &mut dyn SearchLogger<Q>,
 | 
			
		||||
) -> Result<Vec<u32>> {
 | 
			
		||||
    logger.initial_query(query_graph);
 | 
			
		||||
    let words = &mut Words::new(TermsMatchingStrategy::Last);
 | 
			
		||||
    // let sort = &mut Sort::new(index, txn, "release_date".to_owned(), true)?;
 | 
			
		||||
    let proximity = &mut GraphBasedRankingRule::<ProximityGraph>::new("proximity".to_owned());
 | 
			
		||||
    let typo = &mut GraphBasedRankingRule::<TypoGraph>::new("typo".to_owned());
 | 
			
		||||
    // TODO: ranking rules given as argument
 | 
			
		||||
    let mut ranking_rules: Vec<&mut dyn RankingRule<'search, QueryGraph>> =
 | 
			
		||||
        vec![words, typo, proximity /*sort*/];
 | 
			
		||||
 | 
			
		||||
    logger.ranking_rules(&ranking_rules);
 | 
			
		||||
 | 
			
		||||
@@ -119,6 +93,9 @@ pub fn apply_ranking_rules<'search>(
 | 
			
		||||
 | 
			
		||||
    let mut cur_ranking_rule_index = 0;
 | 
			
		||||
 | 
			
		||||
    /// Finish iterating over the current ranking rule, yielding
 | 
			
		||||
    /// control to the parent (or finishing the search if not possible).
 | 
			
		||||
    /// Update the candidates accordingly and inform the logger.
 | 
			
		||||
    macro_rules! back {
 | 
			
		||||
        () => {
 | 
			
		||||
            assert!(candidates[cur_ranking_rule_index].is_empty());
 | 
			
		||||
@@ -140,8 +117,8 @@ pub fn apply_ranking_rules<'search>(
 | 
			
		||||
    let mut results = vec![];
 | 
			
		||||
    let mut cur_offset = 0usize;
 | 
			
		||||
 | 
			
		||||
    // Add the candidates to the results. Take the `from`, `limit`, and `cur_offset`
 | 
			
		||||
    // into account and inform the logger.
 | 
			
		||||
    /// Add the candidates to the results. Take the `from`, `limit`, and `cur_offset`
 | 
			
		||||
    /// into account and inform the logger.
 | 
			
		||||
    macro_rules! maybe_add_to_results {
 | 
			
		||||
        ($candidates:expr) => {
 | 
			
		||||
            let candidates = $candidates;
 | 
			
		||||
@@ -193,7 +170,6 @@ pub fn apply_ranking_rules<'search>(
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        let Some(next_bucket) = ranking_rules[cur_ranking_rule_index].next_bucket(ctx, logger, &candidates[cur_ranking_rule_index])? else {
 | 
			
		||||
            // TODO: add remaining candidates automatically here?
 | 
			
		||||
            back!();
 | 
			
		||||
            continue;
 | 
			
		||||
        };
 | 
			
		||||
 
 | 
			
		||||
@@ -1,74 +1,140 @@
 | 
			
		||||
#![allow(clippy::too_many_arguments)]
 | 
			
		||||
 | 
			
		||||
use std::collections::VecDeque;
 | 
			
		||||
 | 
			
		||||
use fxhash::FxHashMap;
 | 
			
		||||
use heed::BytesDecode;
 | 
			
		||||
use heed::{BytesDecode, RoTxn};
 | 
			
		||||
use roaring::{MultiOps, RoaringBitmap};
 | 
			
		||||
 | 
			
		||||
use super::interner::Interned;
 | 
			
		||||
use super::db_cache::DatabaseCache;
 | 
			
		||||
use super::interner::{Interned, Interner};
 | 
			
		||||
use super::query_graph::QUERY_GRAPH_NODE_LENGTH_LIMIT;
 | 
			
		||||
use super::query_term::{Phrase, QueryTerm, WordDerivations};
 | 
			
		||||
use super::small_bitmap::SmallBitmap;
 | 
			
		||||
use super::{QueryGraph, QueryNode, SearchContext};
 | 
			
		||||
use crate::{CboRoaringBitmapCodec, Result, RoaringBitmapCodec};
 | 
			
		||||
use crate::{CboRoaringBitmapCodec, Index, Result, RoaringBitmapCodec};
 | 
			
		||||
 | 
			
		||||
// TODO: manual performance metrics: access to DB, bitmap deserializations/operations, etc.
 | 
			
		||||
#[derive(Default)]
 | 
			
		||||
pub struct NodeDocIdsCache {
 | 
			
		||||
    pub cache: FxHashMap<u16, RoaringBitmap>,
 | 
			
		||||
pub struct QueryTermDocIdsCache {
 | 
			
		||||
    pub phrases: FxHashMap<Interned<Phrase>, RoaringBitmap>,
 | 
			
		||||
    pub derivations: FxHashMap<Interned<WordDerivations>, RoaringBitmap>,
 | 
			
		||||
}
 | 
			
		||||
impl<'search> SearchContext<'search> {
 | 
			
		||||
    fn get_node_docids<'cache>(
 | 
			
		||||
        &'cache mut self,
 | 
			
		||||
        term: &QueryTerm,
 | 
			
		||||
        node_idx: u16,
 | 
			
		||||
    ) -> Result<&'cache RoaringBitmap> {
 | 
			
		||||
        if self.node_docids_cache.cache.contains_key(&node_idx) {
 | 
			
		||||
            return Ok(&self.node_docids_cache.cache[&node_idx]);
 | 
			
		||||
impl QueryTermDocIdsCache {
 | 
			
		||||
    /// Get the document ids associated with the given phrase
 | 
			
		||||
    pub fn get_phrase_docids<'s, 'search>(
 | 
			
		||||
        &'s mut self,
 | 
			
		||||
        index: &Index,
 | 
			
		||||
        txn: &'search RoTxn,
 | 
			
		||||
        db_cache: &mut DatabaseCache<'search>,
 | 
			
		||||
        word_interner: &Interner<String>,
 | 
			
		||||
        phrase_interner: &Interner<Phrase>,
 | 
			
		||||
        phrase: Interned<Phrase>,
 | 
			
		||||
    ) -> Result<&'s RoaringBitmap> {
 | 
			
		||||
        if self.phrases.contains_key(&phrase) {
 | 
			
		||||
            return Ok(&self.phrases[&phrase]);
 | 
			
		||||
        };
 | 
			
		||||
        let docids = match term {
 | 
			
		||||
            QueryTerm::Phrase { phrase } => resolve_phrase(self, *phrase)?,
 | 
			
		||||
            QueryTerm::Word {
 | 
			
		||||
                derivations:
 | 
			
		||||
                    WordDerivations {
 | 
			
		||||
                        original,
 | 
			
		||||
                        zero_typo,
 | 
			
		||||
                        one_typo,
 | 
			
		||||
                        two_typos,
 | 
			
		||||
                        use_prefix_db,
 | 
			
		||||
                        synonyms,
 | 
			
		||||
                        split_words,
 | 
			
		||||
                    },
 | 
			
		||||
            } => {
 | 
			
		||||
                let mut or_docids = vec![];
 | 
			
		||||
                for word in zero_typo.iter().chain(one_typo.iter()).chain(two_typos.iter()).copied()
 | 
			
		||||
                {
 | 
			
		||||
                    if let Some(word_docids) = self.get_word_docids(word)? {
 | 
			
		||||
                        or_docids.push(word_docids);
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
                if *use_prefix_db {
 | 
			
		||||
                    if let Some(prefix_docids) = self.get_word_prefix_docids(*original)? {
 | 
			
		||||
                        or_docids.push(prefix_docids);
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
                let mut docids = or_docids
 | 
			
		||||
                    .into_iter()
 | 
			
		||||
                    .map(|slice| RoaringBitmapCodec::bytes_decode(slice).unwrap())
 | 
			
		||||
                    .collect::<Vec<_>>();
 | 
			
		||||
                for synonym in synonyms.iter().copied() {
 | 
			
		||||
                    // TODO: cache resolve_phrase?
 | 
			
		||||
                    docids.push(resolve_phrase(self, synonym)?);
 | 
			
		||||
                }
 | 
			
		||||
                if let Some(split_words) = split_words {
 | 
			
		||||
                    docids.push(resolve_phrase(self, *split_words)?);
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
                MultiOps::union(docids)
 | 
			
		||||
            }
 | 
			
		||||
        };
 | 
			
		||||
        let _ = self.node_docids_cache.cache.insert(node_idx, docids);
 | 
			
		||||
        let docids = &self.node_docids_cache.cache[&node_idx];
 | 
			
		||||
        let docids = resolve_phrase(index, txn, db_cache, word_interner, phrase_interner, phrase)?;
 | 
			
		||||
        let _ = self.phrases.insert(phrase, docids);
 | 
			
		||||
        let docids = &self.phrases[&phrase];
 | 
			
		||||
        Ok(docids)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// Get the document ids associated with the given word derivations
 | 
			
		||||
    pub fn get_word_derivations_docids<'s, 'search>(
 | 
			
		||||
        &'s mut self,
 | 
			
		||||
        index: &Index,
 | 
			
		||||
        txn: &'search RoTxn,
 | 
			
		||||
        db_cache: &mut DatabaseCache<'search>,
 | 
			
		||||
        word_interner: &Interner<String>,
 | 
			
		||||
        derivations_interner: &Interner<WordDerivations>,
 | 
			
		||||
        phrase_interner: &Interner<Phrase>,
 | 
			
		||||
        derivations: Interned<WordDerivations>,
 | 
			
		||||
    ) -> Result<&'s RoaringBitmap> {
 | 
			
		||||
        if self.derivations.contains_key(&derivations) {
 | 
			
		||||
            return Ok(&self.derivations[&derivations]);
 | 
			
		||||
        };
 | 
			
		||||
        let WordDerivations {
 | 
			
		||||
            original,
 | 
			
		||||
            synonyms,
 | 
			
		||||
            split_words,
 | 
			
		||||
            zero_typo,
 | 
			
		||||
            one_typo,
 | 
			
		||||
            two_typos,
 | 
			
		||||
            use_prefix_db,
 | 
			
		||||
        } = derivations_interner.get(derivations);
 | 
			
		||||
        let mut or_docids = vec![];
 | 
			
		||||
        for word in zero_typo.iter().chain(one_typo.iter()).chain(two_typos.iter()).copied() {
 | 
			
		||||
            if let Some(word_docids) = db_cache.get_word_docids(index, txn, word_interner, word)? {
 | 
			
		||||
                or_docids.push(word_docids);
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        if *use_prefix_db {
 | 
			
		||||
            // TODO: this will change if we decide to change from (original, zero_typo) to:
 | 
			
		||||
            // (debug_original, prefix_of, zero_typo)
 | 
			
		||||
            if let Some(prefix_docids) =
 | 
			
		||||
                db_cache.get_word_prefix_docids(index, txn, word_interner, *original)?
 | 
			
		||||
            {
 | 
			
		||||
                or_docids.push(prefix_docids);
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        let mut docids = or_docids
 | 
			
		||||
            .into_iter()
 | 
			
		||||
            .map(|slice| RoaringBitmapCodec::bytes_decode(slice).unwrap())
 | 
			
		||||
            .collect::<Vec<_>>();
 | 
			
		||||
        for synonym in synonyms.iter().copied() {
 | 
			
		||||
            // TODO: cache resolve_phrase?
 | 
			
		||||
            docids.push(resolve_phrase(
 | 
			
		||||
                index,
 | 
			
		||||
                txn,
 | 
			
		||||
                db_cache,
 | 
			
		||||
                word_interner,
 | 
			
		||||
                phrase_interner,
 | 
			
		||||
                synonym,
 | 
			
		||||
            )?);
 | 
			
		||||
        }
 | 
			
		||||
        if let Some(split_words) = split_words {
 | 
			
		||||
            docids.push(resolve_phrase(
 | 
			
		||||
                index,
 | 
			
		||||
                txn,
 | 
			
		||||
                db_cache,
 | 
			
		||||
                word_interner,
 | 
			
		||||
                phrase_interner,
 | 
			
		||||
                *split_words,
 | 
			
		||||
            )?);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        let docids = MultiOps::union(docids);
 | 
			
		||||
        let _ = self.derivations.insert(derivations, docids);
 | 
			
		||||
        let docids = &self.derivations[&derivations];
 | 
			
		||||
        Ok(docids)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// Get the document ids associated with the given query term.
 | 
			
		||||
    fn get_query_term_docids<'s, 'search>(
 | 
			
		||||
        &'s mut self,
 | 
			
		||||
        index: &Index,
 | 
			
		||||
        txn: &'search RoTxn,
 | 
			
		||||
        db_cache: &mut DatabaseCache<'search>,
 | 
			
		||||
        word_interner: &Interner<String>,
 | 
			
		||||
        derivations_interner: &Interner<WordDerivations>,
 | 
			
		||||
        phrase_interner: &Interner<Phrase>,
 | 
			
		||||
        term: &QueryTerm,
 | 
			
		||||
    ) -> Result<&'s RoaringBitmap> {
 | 
			
		||||
        match *term {
 | 
			
		||||
            QueryTerm::Phrase { phrase } => {
 | 
			
		||||
                self.get_phrase_docids(index, txn, db_cache, word_interner, phrase_interner, phrase)
 | 
			
		||||
            }
 | 
			
		||||
            QueryTerm::Word { derivations } => self.get_word_derivations_docids(
 | 
			
		||||
                index,
 | 
			
		||||
                txn,
 | 
			
		||||
                db_cache,
 | 
			
		||||
                word_interner,
 | 
			
		||||
                derivations_interner,
 | 
			
		||||
                phrase_interner,
 | 
			
		||||
                derivations,
 | 
			
		||||
            ),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn resolve_query_graph<'search>(
 | 
			
		||||
@@ -76,14 +142,23 @@ pub fn resolve_query_graph<'search>(
 | 
			
		||||
    q: &QueryGraph,
 | 
			
		||||
    universe: &RoaringBitmap,
 | 
			
		||||
) -> Result<RoaringBitmap> {
 | 
			
		||||
    // TODO: there is definitely a faster way to compute this big
 | 
			
		||||
    let SearchContext {
 | 
			
		||||
        index,
 | 
			
		||||
        txn,
 | 
			
		||||
        db_cache,
 | 
			
		||||
        word_interner,
 | 
			
		||||
        phrase_interner,
 | 
			
		||||
        derivations_interner,
 | 
			
		||||
        query_term_docids,
 | 
			
		||||
    } = ctx;
 | 
			
		||||
    // TODO: there is a faster way to compute this big
 | 
			
		||||
    // roaring bitmap expression
 | 
			
		||||
 | 
			
		||||
    let mut nodes_resolved = SmallBitmap::new(64);
 | 
			
		||||
    let mut nodes_resolved = SmallBitmap::new(QUERY_GRAPH_NODE_LENGTH_LIMIT);
 | 
			
		||||
    let mut path_nodes_docids = vec![RoaringBitmap::new(); q.nodes.len()];
 | 
			
		||||
 | 
			
		||||
    let mut next_nodes_to_visit = VecDeque::new();
 | 
			
		||||
    next_nodes_to_visit.push_front(q.root_node);
 | 
			
		||||
    next_nodes_to_visit.push_back(q.root_node);
 | 
			
		||||
 | 
			
		||||
    while let Some(node) = next_nodes_to_visit.pop_front() {
 | 
			
		||||
        let predecessors = &q.edges[node as usize].predecessors;
 | 
			
		||||
@@ -101,8 +176,15 @@ pub fn resolve_query_graph<'search>(
 | 
			
		||||
 | 
			
		||||
        let node_docids = match n {
 | 
			
		||||
            QueryNode::Term(located_term) => {
 | 
			
		||||
                let term = &located_term.value;
 | 
			
		||||
                let derivations_docids = ctx.get_node_docids(term, node)?;
 | 
			
		||||
                let derivations_docids = query_term_docids.get_query_term_docids(
 | 
			
		||||
                    index,
 | 
			
		||||
                    txn,
 | 
			
		||||
                    db_cache,
 | 
			
		||||
                    word_interner,
 | 
			
		||||
                    derivations_interner,
 | 
			
		||||
                    phrase_interner,
 | 
			
		||||
                    &located_term.value,
 | 
			
		||||
                )?;
 | 
			
		||||
                predecessors_docids & derivations_docids
 | 
			
		||||
            }
 | 
			
		||||
            QueryNode::Deleted => {
 | 
			
		||||
@@ -122,19 +204,24 @@ pub fn resolve_query_graph<'search>(
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        // This is currently slow but could easily be implemented very efficiently
 | 
			
		||||
        for prec in q.edges[node as usize].predecessors.iter() {
 | 
			
		||||
            if q.edges[prec as usize].successors.is_subset(&nodes_resolved) {
 | 
			
		||||
                path_nodes_docids[prec as usize].clear();
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    panic!()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn resolve_phrase(ctx: &mut SearchContext, phrase: Interned<Phrase>) -> Result<RoaringBitmap> {
 | 
			
		||||
    let Phrase { words } = ctx.phrase_interner.get(phrase).clone();
 | 
			
		||||
pub fn resolve_phrase<'search>(
 | 
			
		||||
    index: &Index,
 | 
			
		||||
    txn: &'search RoTxn,
 | 
			
		||||
    db_cache: &mut DatabaseCache<'search>,
 | 
			
		||||
    word_interner: &Interner<String>,
 | 
			
		||||
    phrase_interner: &Interner<Phrase>,
 | 
			
		||||
    phrase: Interned<Phrase>,
 | 
			
		||||
) -> Result<RoaringBitmap> {
 | 
			
		||||
    let Phrase { words } = phrase_interner.get(phrase).clone();
 | 
			
		||||
    let mut candidates = RoaringBitmap::new();
 | 
			
		||||
    let mut first_iter = true;
 | 
			
		||||
    let winsize = words.len().min(3);
 | 
			
		||||
@@ -158,7 +245,14 @@ pub fn resolve_phrase(ctx: &mut SearchContext, phrase: Interned<Phrase>) -> Resu
 | 
			
		||||
                .filter_map(|(index, word)| word.as_ref().map(|word| (index, word)))
 | 
			
		||||
            {
 | 
			
		||||
                if dist == 0 {
 | 
			
		||||
                    match ctx.get_word_pair_proximity_docids(s1, s2, 1)? {
 | 
			
		||||
                    match db_cache.get_word_pair_proximity_docids(
 | 
			
		||||
                        index,
 | 
			
		||||
                        txn,
 | 
			
		||||
                        word_interner,
 | 
			
		||||
                        s1,
 | 
			
		||||
                        s2,
 | 
			
		||||
                        1,
 | 
			
		||||
                    )? {
 | 
			
		||||
                        Some(m) => bitmaps.push(CboRoaringBitmapCodec::deserialize_from(m)?),
 | 
			
		||||
                        // If there are no documents for this pair, there will be no
 | 
			
		||||
                        // results for the phrase query.
 | 
			
		||||
@@ -167,9 +261,14 @@ pub fn resolve_phrase(ctx: &mut SearchContext, phrase: Interned<Phrase>) -> Resu
 | 
			
		||||
                } else {
 | 
			
		||||
                    let mut bitmap = RoaringBitmap::new();
 | 
			
		||||
                    for dist in 0..=dist {
 | 
			
		||||
                        if let Some(m) =
 | 
			
		||||
                            ctx.get_word_pair_proximity_docids(s1, s2, dist as u8 + 1)?
 | 
			
		||||
                        {
 | 
			
		||||
                        if let Some(m) = db_cache.get_word_pair_proximity_docids(
 | 
			
		||||
                            index,
 | 
			
		||||
                            txn,
 | 
			
		||||
                            word_interner,
 | 
			
		||||
                            s1,
 | 
			
		||||
                            s2,
 | 
			
		||||
                            dist as u8 + 1,
 | 
			
		||||
                        )? {
 | 
			
		||||
                            bitmap |= CboRoaringBitmapCodec::deserialize_from(m)?;
 | 
			
		||||
                        }
 | 
			
		||||
                    }
 | 
			
		||||
 
 | 
			
		||||
@@ -18,13 +18,6 @@ impl SmallBitmap {
 | 
			
		||||
        }
 | 
			
		||||
        s
 | 
			
		||||
    }
 | 
			
		||||
    pub fn from_array(xs: &[u16], universe_length: u16) -> Self {
 | 
			
		||||
        let mut s = Self::new(universe_length);
 | 
			
		||||
        for x in xs {
 | 
			
		||||
            s.insert(*x);
 | 
			
		||||
        }
 | 
			
		||||
        s
 | 
			
		||||
    }
 | 
			
		||||
    pub fn is_empty(&self) -> bool {
 | 
			
		||||
        match self {
 | 
			
		||||
            SmallBitmap::Tiny(set) => *set == 0,
 | 
			
		||||
@@ -81,27 +74,6 @@ impl SmallBitmap {
 | 
			
		||||
        };
 | 
			
		||||
        *set &= !(0b1 << x);
 | 
			
		||||
    }
 | 
			
		||||
    // fn iter_single(mut set: u64, mut visit: impl FnMut(u16) -> Result<()>) -> Result<()> {
 | 
			
		||||
    //     while set > 0 {
 | 
			
		||||
    //         let idx = set.trailing_zeros() as u16;
 | 
			
		||||
    //         visit(idx)?;
 | 
			
		||||
    //         set &= set - 1;
 | 
			
		||||
    //     }
 | 
			
		||||
    //     Ok(())
 | 
			
		||||
    // }
 | 
			
		||||
    // pub fn iter(&self, mut visit: impl FnMut(u16) -> Result<()>) -> Result<()> {
 | 
			
		||||
    //     match self {
 | 
			
		||||
    //         SmallBitmap::Tiny(set) => Self::iter_single(*set, &mut visit),
 | 
			
		||||
    //         SmallBitmap::Small(sets) => {
 | 
			
		||||
    //             let mut base = 0;
 | 
			
		||||
    //             for set in sets.iter() {
 | 
			
		||||
    //                 Self::iter_single(*set, |x| visit(base + x))?;
 | 
			
		||||
    //                 base += 64;
 | 
			
		||||
    //             }
 | 
			
		||||
    //             Ok(())
 | 
			
		||||
    //         }
 | 
			
		||||
    //     }
 | 
			
		||||
    // }
 | 
			
		||||
 | 
			
		||||
    pub fn intersection(&mut self, other: &SmallBitmap) {
 | 
			
		||||
        self.apply_op(other, |a, b| *a &= b);
 | 
			
		||||
 
 | 
			
		||||
@@ -1,10 +1,31 @@
 | 
			
		||||
use roaring::RoaringBitmap;
 | 
			
		||||
 | 
			
		||||
use super::logger::SearchLogger;
 | 
			
		||||
use super::{
 | 
			
		||||
    RankingRule, RankingRuleOutput, RankingRuleOutputIter, RankingRuleOutputIterWrapper,
 | 
			
		||||
    RankingRuleQueryTrait, SearchContext,
 | 
			
		||||
};
 | 
			
		||||
use super::{RankingRule, RankingRuleOutput, RankingRuleQueryTrait, SearchContext};
 | 
			
		||||
 | 
			
		||||
pub trait RankingRuleOutputIter<'search, Query> {
 | 
			
		||||
    fn next_bucket(&mut self) -> Result<Option<RankingRuleOutput<Query>>>;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub struct RankingRuleOutputIterWrapper<'search, Query> {
 | 
			
		||||
    iter: Box<dyn Iterator<Item = Result<RankingRuleOutput<Query>>> + 'search>,
 | 
			
		||||
}
 | 
			
		||||
impl<'search, Query> RankingRuleOutputIterWrapper<'search, Query> {
 | 
			
		||||
    pub fn new(iter: Box<dyn Iterator<Item = Result<RankingRuleOutput<Query>>> + 'search>) -> Self {
 | 
			
		||||
        Self { iter }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
impl<'search, Query> RankingRuleOutputIter<'search, Query>
 | 
			
		||||
    for RankingRuleOutputIterWrapper<'search, Query>
 | 
			
		||||
{
 | 
			
		||||
    fn next_bucket(&mut self) -> Result<Option<RankingRuleOutput<Query>>> {
 | 
			
		||||
        match self.iter.next() {
 | 
			
		||||
            Some(x) => x.map(Some),
 | 
			
		||||
            None => Ok(None),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
use crate::{
 | 
			
		||||
    // facet::FacetType,
 | 
			
		||||
    heed_codec::{facet::FacetGroupKeyCodec, ByteSliceRefCodec},
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user