mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-23 10:37:41 +08:00
rename located_query_terms_from_string -> located_query_terms_from_tokens
This commit is contained in:
parent
aa63091752
commit
7b8cc25625
@ -235,7 +235,7 @@ pub(crate) mod tests {
|
|||||||
|
|
||||||
use charabia::{TokenKind, TokenizerBuilder};
|
use charabia::{TokenKind, TokenizerBuilder};
|
||||||
|
|
||||||
use super::super::super::located_query_terms_from_string;
|
use super::super::super::located_query_terms_from_tokens;
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::index::tests::TempIndex;
|
use crate::index::tests::TempIndex;
|
||||||
|
|
||||||
@ -256,7 +256,7 @@ pub(crate) mod tests {
|
|||||||
let mut ctx = SearchContext::new(&temp_index, &rtxn);
|
let mut ctx = SearchContext::new(&temp_index, &rtxn);
|
||||||
let tokenizer = TokenizerBuilder::new().build();
|
let tokenizer = TokenizerBuilder::new().build();
|
||||||
let tokens = tokenizer.tokenize("split this world");
|
let tokens = tokenizer.tokenize("split this world");
|
||||||
let query_terms = located_query_terms_from_string(&mut ctx, tokens, None).unwrap();
|
let query_terms = located_query_terms_from_tokens(&mut ctx, tokens, None).unwrap();
|
||||||
let matching_words = MatchingWords::new(ctx, query_terms);
|
let matching_words = MatchingWords::new(ctx, query_terms);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -499,7 +499,7 @@ mod tests {
|
|||||||
use charabia::TokenizerBuilder;
|
use charabia::TokenizerBuilder;
|
||||||
use matching_words::tests::temp_index_with_documents;
|
use matching_words::tests::temp_index_with_documents;
|
||||||
|
|
||||||
use super::super::located_query_terms_from_string;
|
use super::super::located_query_terms_from_tokens;
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::SearchContext;
|
use crate::SearchContext;
|
||||||
|
|
||||||
@ -507,7 +507,7 @@ mod tests {
|
|||||||
pub fn new_test(mut ctx: SearchContext, query: &'a str) -> Self {
|
pub fn new_test(mut ctx: SearchContext, query: &'a str) -> Self {
|
||||||
let tokenizer = TokenizerBuilder::new().build();
|
let tokenizer = TokenizerBuilder::new().build();
|
||||||
let tokens = tokenizer.tokenize(query);
|
let tokens = tokenizer.tokenize(query);
|
||||||
let query_terms = located_query_terms_from_string(&mut ctx, tokens, None).unwrap();
|
let query_terms = located_query_terms_from_tokens(&mut ctx, tokens, None).unwrap();
|
||||||
let matching_words = MatchingWords::new(ctx, query_terms);
|
let matching_words = MatchingWords::new(ctx, query_terms);
|
||||||
Self::new(matching_words, TokenizerBuilder::new().build())
|
Self::new(matching_words, TokenizerBuilder::new().build())
|
||||||
}
|
}
|
||||||
|
@ -36,7 +36,7 @@ use interner::{DedupInterner, Interner};
|
|||||||
pub use logger::visual::VisualSearchLogger;
|
pub use logger::visual::VisualSearchLogger;
|
||||||
pub use logger::{DefaultSearchLogger, SearchLogger};
|
pub use logger::{DefaultSearchLogger, SearchLogger};
|
||||||
use query_graph::{QueryGraph, QueryNode};
|
use query_graph::{QueryGraph, QueryNode};
|
||||||
use query_term::{located_query_terms_from_string, LocatedQueryTerm, Phrase, QueryTerm};
|
use query_term::{located_query_terms_from_tokens, LocatedQueryTerm, Phrase, QueryTerm};
|
||||||
use ranking_rules::{
|
use ranking_rules::{
|
||||||
BoxRankingRule, PlaceholderQuery, RankingRule, RankingRuleOutput, RankingRuleQueryTrait,
|
BoxRankingRule, PlaceholderQuery, RankingRule, RankingRuleOutput, RankingRuleQueryTrait,
|
||||||
};
|
};
|
||||||
@ -387,7 +387,7 @@ pub fn execute_search(
|
|||||||
let tokenizer = tokbuilder.build();
|
let tokenizer = tokbuilder.build();
|
||||||
let tokens = tokenizer.tokenize(query);
|
let tokens = tokenizer.tokenize(query);
|
||||||
|
|
||||||
let query_terms = located_query_terms_from_string(ctx, tokens, words_limit)?;
|
let query_terms = located_query_terms_from_tokens(ctx, tokens, words_limit)?;
|
||||||
if query_terms.is_empty() {
|
if query_terms.is_empty() {
|
||||||
// Do a placeholder search instead
|
// Do a placeholder search instead
|
||||||
None
|
None
|
||||||
|
@ -10,7 +10,7 @@ use std::ops::RangeInclusive;
|
|||||||
use compute_derivations::partially_initialized_term_from_word;
|
use compute_derivations::partially_initialized_term_from_word;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
pub use ntypo_subset::NTypoTermSubset;
|
pub use ntypo_subset::NTypoTermSubset;
|
||||||
pub use parse_query::{located_query_terms_from_string, make_ngram, number_of_typos_allowed};
|
pub use parse_query::{located_query_terms_from_tokens, make_ngram, number_of_typos_allowed};
|
||||||
pub use phrase::Phrase;
|
pub use phrase::Phrase;
|
||||||
|
|
||||||
use super::interner::{DedupInterner, Interned};
|
use super::interner::{DedupInterner, Interned};
|
||||||
|
@ -5,8 +5,7 @@ use super::*;
|
|||||||
use crate::{Result, SearchContext, MAX_WORD_LENGTH};
|
use crate::{Result, SearchContext, MAX_WORD_LENGTH};
|
||||||
|
|
||||||
/// Convert the tokenised search query into a list of located query terms.
|
/// Convert the tokenised search query into a list of located query terms.
|
||||||
// TODO: checking if the positions are correct for phrases, separators, ngrams
|
pub fn located_query_terms_from_tokens(
|
||||||
pub fn located_query_terms_from_string(
|
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext,
|
||||||
query: NormalizedTokenIter<&[u8]>,
|
query: NormalizedTokenIter<&[u8]>,
|
||||||
words_limit: Option<usize>,
|
words_limit: Option<usize>,
|
||||||
|
Loading…
Reference in New Issue
Block a user