diff --git a/milli/src/search/new/matches/matching_words.rs b/milli/src/search/new/matches/matching_words.rs index e9a728a01..92248345e 100644 --- a/milli/src/search/new/matches/matching_words.rs +++ b/milli/src/search/new/matches/matching_words.rs @@ -235,7 +235,7 @@ pub(crate) mod tests { use charabia::{TokenKind, TokenizerBuilder}; - use super::super::super::located_query_terms_from_string; + use super::super::super::located_query_terms_from_tokens; use super::*; use crate::index::tests::TempIndex; @@ -256,7 +256,7 @@ pub(crate) mod tests { let mut ctx = SearchContext::new(&temp_index, &rtxn); let tokenizer = TokenizerBuilder::new().build(); let tokens = tokenizer.tokenize("split this world"); - let query_terms = located_query_terms_from_string(&mut ctx, tokens, None).unwrap(); + let query_terms = located_query_terms_from_tokens(&mut ctx, tokens, None).unwrap(); let matching_words = MatchingWords::new(ctx, query_terms); assert_eq!( diff --git a/milli/src/search/new/matches/mod.rs b/milli/src/search/new/matches/mod.rs index 0db2c3660..1974ae431 100644 --- a/milli/src/search/new/matches/mod.rs +++ b/milli/src/search/new/matches/mod.rs @@ -499,7 +499,7 @@ mod tests { use charabia::TokenizerBuilder; use matching_words::tests::temp_index_with_documents; - use super::super::located_query_terms_from_string; + use super::super::located_query_terms_from_tokens; use super::*; use crate::SearchContext; @@ -507,7 +507,7 @@ mod tests { pub fn new_test(mut ctx: SearchContext, query: &'a str) -> Self { let tokenizer = TokenizerBuilder::new().build(); let tokens = tokenizer.tokenize(query); - let query_terms = located_query_terms_from_string(&mut ctx, tokens, None).unwrap(); + let query_terms = located_query_terms_from_tokens(&mut ctx, tokens, None).unwrap(); let matching_words = MatchingWords::new(ctx, query_terms); Self::new(matching_words, TokenizerBuilder::new().build()) } diff --git a/milli/src/search/new/mod.rs b/milli/src/search/new/mod.rs index 2faf20a1d..0ba5613b5 100644 --- a/milli/src/search/new/mod.rs +++ b/milli/src/search/new/mod.rs @@ -36,7 +36,7 @@ use interner::{DedupInterner, Interner}; pub use logger::visual::VisualSearchLogger; pub use logger::{DefaultSearchLogger, SearchLogger}; use query_graph::{QueryGraph, QueryNode}; -use query_term::{located_query_terms_from_string, LocatedQueryTerm, Phrase, QueryTerm}; +use query_term::{located_query_terms_from_tokens, LocatedQueryTerm, Phrase, QueryTerm}; use ranking_rules::{ BoxRankingRule, PlaceholderQuery, RankingRule, RankingRuleOutput, RankingRuleQueryTrait, }; @@ -387,7 +387,7 @@ pub fn execute_search( let tokenizer = tokbuilder.build(); let tokens = tokenizer.tokenize(query); - let query_terms = located_query_terms_from_string(ctx, tokens, words_limit)?; + let query_terms = located_query_terms_from_tokens(ctx, tokens, words_limit)?; if query_terms.is_empty() { // Do a placeholder search instead None diff --git a/milli/src/search/new/query_term/mod.rs b/milli/src/search/new/query_term/mod.rs index 5f1a45d83..a8e121094 100644 --- a/milli/src/search/new/query_term/mod.rs +++ b/milli/src/search/new/query_term/mod.rs @@ -10,7 +10,7 @@ use std::ops::RangeInclusive; use compute_derivations::partially_initialized_term_from_word; use either::Either; pub use ntypo_subset::NTypoTermSubset; -pub use parse_query::{located_query_terms_from_string, make_ngram, number_of_typos_allowed}; +pub use parse_query::{located_query_terms_from_tokens, make_ngram, number_of_typos_allowed}; pub use phrase::Phrase; use super::interner::{DedupInterner, Interned}; diff --git a/milli/src/search/new/query_term/parse_query.rs b/milli/src/search/new/query_term/parse_query.rs index 734938551..dc317a0fb 100644 --- a/milli/src/search/new/query_term/parse_query.rs +++ b/milli/src/search/new/query_term/parse_query.rs @@ -5,8 +5,7 @@ use super::*; use crate::{Result, SearchContext, MAX_WORD_LENGTH}; /// Convert the tokenised search query into a list of located query terms. -// TODO: checking if the positions are correct for phrases, separators, ngrams -pub fn located_query_terms_from_string( +pub fn located_query_terms_from_tokens( ctx: &mut SearchContext, query: NormalizedTokenIter<&[u8]>, words_limit: Option,