diff --git a/milli/src/search/new/matches/mod.rs b/milli/src/search/new/matches/mod.rs index a84b25923..5a4f0b914 100644 --- a/milli/src/search/new/matches/mod.rs +++ b/milli/src/search/new/matches/mod.rs @@ -245,27 +245,6 @@ impl<'t, 'tokenizer> Matcher<'t, 'tokenizer, '_, '_> { self } - /// Returns boundaries of the words that match the query. - pub fn matches(&mut self) -> Vec { - match &self.matches { - None => self.compute_matches().matches(), - Some((tokens, matches)) => matches - .iter() - .map(|m| MatchBounds { - start: tokens[match m.position { - MatchPosition::Word { token_position, .. } => token_position, - MatchPosition::Phrase { - token_positions: (first_token_position, _), - .. - } => first_token_position, - }] - .byte_start, - length: m.match_len, - }) - .collect(), - } - } - fn get_match_pos(&self, m: &Match, wt: WT, fl: FL) -> usize { match m.position { MatchPosition::Word { word_position, token_position } => match wt { @@ -287,6 +266,20 @@ impl<'t, 'tokenizer> Matcher<'t, 'tokenizer, '_, '_> { } } + /// Returns boundaries of the words that match the query. + pub fn matches(&mut self) -> Vec { + match &self.matches { + None => self.compute_matches().matches(), + Some((tokens, matches)) => matches + .iter() + .map(|m| MatchBounds { + start: tokens[self.get_match_pos(m, WT::Token, FL::First)].byte_start, + length: m.match_len, + }) + .collect(), + } + } + /// Returns the bounds in byte index of the crop window. fn crop_bounds( &self,