rename located_query_terms_from_string -> located_query_terms_from_tokens

This commit is contained in:
Louis Dureuil
2023-05-02 18:53:01 +02:00
parent aa63091752
commit 7b8cc25625
5 changed files with 8 additions and 9 deletions

View File

@ -235,7 +235,7 @@ pub(crate) mod tests {
use charabia::{TokenKind, TokenizerBuilder};
use super::super::super::located_query_terms_from_string;
use super::super::super::located_query_terms_from_tokens;
use super::*;
use crate::index::tests::TempIndex;
@ -256,7 +256,7 @@ pub(crate) mod tests {
let mut ctx = SearchContext::new(&temp_index, &rtxn);
let tokenizer = TokenizerBuilder::new().build();
let tokens = tokenizer.tokenize("split this world");
let query_terms = located_query_terms_from_string(&mut ctx, tokens, None).unwrap();
let query_terms = located_query_terms_from_tokens(&mut ctx, tokens, None).unwrap();
let matching_words = MatchingWords::new(ctx, query_terms);
assert_eq!(

View File

@ -499,7 +499,7 @@ mod tests {
use charabia::TokenizerBuilder;
use matching_words::tests::temp_index_with_documents;
use super::super::located_query_terms_from_string;
use super::super::located_query_terms_from_tokens;
use super::*;
use crate::SearchContext;
@ -507,7 +507,7 @@ mod tests {
pub fn new_test(mut ctx: SearchContext, query: &'a str) -> Self {
let tokenizer = TokenizerBuilder::new().build();
let tokens = tokenizer.tokenize(query);
let query_terms = located_query_terms_from_string(&mut ctx, tokens, None).unwrap();
let query_terms = located_query_terms_from_tokens(&mut ctx, tokens, None).unwrap();
let matching_words = MatchingWords::new(ctx, query_terms);
Self::new(matching_words, TokenizerBuilder::new().build())
}