2022-09-08 19:28:17 +08:00
|
|
|
pub use self::facet::{FacetDistribution, Filter, DEFAULT_VALUES_PER_FACET};
|
2022-03-16 00:28:57 +08:00
|
|
|
use self::fst_utils::{Complement, Intersection, StartsWith, Union};
|
2022-04-12 22:31:58 +08:00
|
|
|
pub use self::matches::{
|
|
|
|
FormatOptions, MatchBounds, Matcher, MatcherBuilder, MatchingWord, MatchingWords,
|
|
|
|
};
|
2023-03-23 16:35:53 +08:00
|
|
|
use crate::{
|
|
|
|
execute_search, AscDesc, DefaultSearchLogger, DocumentId, Index, Result, SearchContext,
|
|
|
|
};
|
|
|
|
use fst::automaton::Str;
|
|
|
|
use fst::{Automaton, IntoStreamer, Streamer};
|
|
|
|
use levenshtein_automata::{LevenshteinAutomatonBuilder as LevBuilder, DFA};
|
|
|
|
use once_cell::sync::Lazy;
|
|
|
|
use roaring::bitmap::RoaringBitmap;
|
|
|
|
use std::borrow::Cow;
|
|
|
|
use std::collections::hash_map::{Entry, HashMap};
|
|
|
|
use std::fmt;
|
|
|
|
use std::result::Result as StdResult;
|
|
|
|
use std::str::Utf8Error;
|
2021-06-15 17:51:32 +08:00
|
|
|
|
2020-11-20 17:54:41 +08:00
|
|
|
// Building these factories is not free.
|
|
|
|
static LEVDIST0: Lazy<LevBuilder> = Lazy::new(|| LevBuilder::new(0, true));
|
|
|
|
static LEVDIST1: Lazy<LevBuilder> = Lazy::new(|| LevBuilder::new(1, true));
|
|
|
|
static LEVDIST2: Lazy<LevBuilder> = Lazy::new(|| LevBuilder::new(2, true));
|
|
|
|
|
2022-08-31 19:03:36 +08:00
|
|
|
pub mod facet;
|
2022-03-16 00:28:57 +08:00
|
|
|
mod fst_utils;
|
2022-03-22 22:22:14 +08:00
|
|
|
mod matches;
|
2023-02-21 16:40:41 +08:00
|
|
|
pub mod new;
|
2021-06-17 00:33:33 +08:00
|
|
|
mod query_tree;
|
2020-11-20 17:54:41 +08:00
|
|
|
|
|
|
|
pub struct Search<'a> {
|
|
|
|
query: Option<String>,
|
2021-10-22 07:59:38 +08:00
|
|
|
// this should be linked to the String in the query
|
2021-10-22 20:33:18 +08:00
|
|
|
filter: Option<Filter<'a>>,
|
2020-11-20 17:54:41 +08:00
|
|
|
offset: usize,
|
|
|
|
limit: usize,
|
2021-08-23 17:37:18 +08:00
|
|
|
sort_criteria: Option<Vec<AscDesc>>,
|
2022-08-22 23:37:36 +08:00
|
|
|
terms_matching_strategy: TermsMatchingStrategy,
|
2021-03-10 18:16:30 +08:00
|
|
|
authorize_typos: bool,
|
2021-04-14 01:10:58 +08:00
|
|
|
words_limit: usize,
|
2022-07-12 23:56:50 +08:00
|
|
|
exhaustive_number_hits: bool,
|
2022-12-12 23:54:31 +08:00
|
|
|
criterion_implementation_strategy: CriterionImplementationStrategy,
|
2020-11-20 17:54:41 +08:00
|
|
|
rtxn: &'a heed::RoTxn<'a>,
|
|
|
|
index: &'a Index,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> Search<'a> {
|
|
|
|
pub fn new(rtxn: &'a heed::RoTxn, index: &'a Index) -> Search<'a> {
|
2021-03-10 18:16:30 +08:00
|
|
|
Search {
|
|
|
|
query: None,
|
2021-06-01 21:25:17 +08:00
|
|
|
filter: None,
|
2021-03-10 18:16:30 +08:00
|
|
|
offset: 0,
|
|
|
|
limit: 20,
|
2021-08-23 17:37:18 +08:00
|
|
|
sort_criteria: None,
|
2022-08-22 23:37:36 +08:00
|
|
|
terms_matching_strategy: TermsMatchingStrategy::default(),
|
2021-03-10 18:16:30 +08:00
|
|
|
authorize_typos: true,
|
2022-07-12 23:56:50 +08:00
|
|
|
exhaustive_number_hits: false,
|
2021-04-14 01:10:58 +08:00
|
|
|
words_limit: 10,
|
2022-12-12 23:54:31 +08:00
|
|
|
criterion_implementation_strategy: CriterionImplementationStrategy::default(),
|
2021-03-10 18:16:30 +08:00
|
|
|
rtxn,
|
|
|
|
index,
|
|
|
|
}
|
2020-11-20 17:54:41 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn query(&mut self, query: impl Into<String>) -> &mut Search<'a> {
|
|
|
|
self.query = Some(query.into());
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn offset(&mut self, offset: usize) -> &mut Search<'a> {
|
|
|
|
self.offset = offset;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn limit(&mut self, limit: usize) -> &mut Search<'a> {
|
|
|
|
self.limit = limit;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2021-08-23 17:37:18 +08:00
|
|
|
pub fn sort_criteria(&mut self, criteria: Vec<AscDesc>) -> &mut Search<'a> {
|
|
|
|
self.sort_criteria = Some(criteria);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2022-08-22 23:37:36 +08:00
|
|
|
pub fn terms_matching_strategy(&mut self, value: TermsMatchingStrategy) -> &mut Search<'a> {
|
|
|
|
self.terms_matching_strategy = value;
|
2021-03-10 18:16:30 +08:00
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn authorize_typos(&mut self, value: bool) -> &mut Search<'a> {
|
|
|
|
self.authorize_typos = value;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2021-04-14 01:10:58 +08:00
|
|
|
pub fn words_limit(&mut self, value: usize) -> &mut Search<'a> {
|
|
|
|
self.words_limit = value;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2021-10-22 20:33:18 +08:00
|
|
|
pub fn filter(&mut self, condition: Filter<'a>) -> &mut Search<'a> {
|
2021-06-01 21:25:17 +08:00
|
|
|
self.filter = Some(condition);
|
2020-11-20 17:54:41 +08:00
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2022-10-17 20:41:57 +08:00
|
|
|
/// Force the search to exhastivelly compute the number of candidates,
|
|
|
|
/// this will increase the search time but allows finite pagination.
|
2022-07-12 23:56:50 +08:00
|
|
|
pub fn exhaustive_number_hits(&mut self, exhaustive_number_hits: bool) -> &mut Search<'a> {
|
|
|
|
self.exhaustive_number_hits = exhaustive_number_hits;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2022-12-12 23:54:31 +08:00
|
|
|
pub fn criterion_implementation_strategy(
|
|
|
|
&mut self,
|
|
|
|
strategy: CriterionImplementationStrategy,
|
|
|
|
) -> &mut Search<'a> {
|
|
|
|
self.criterion_implementation_strategy = strategy;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2022-03-31 15:54:49 +08:00
|
|
|
fn is_typo_authorized(&self) -> Result<bool> {
|
|
|
|
let index_authorizes_typos = self.index.authorize_typos(self.rtxn)?;
|
|
|
|
// only authorize typos if both the index and the query allow it.
|
|
|
|
Ok(self.authorize_typos && index_authorizes_typos)
|
|
|
|
}
|
|
|
|
|
2021-06-14 22:46:19 +08:00
|
|
|
pub fn execute(&self) -> Result<SearchResult> {
|
2023-03-23 16:35:53 +08:00
|
|
|
let mut ctx = SearchContext::new(self.index, self.rtxn);
|
|
|
|
execute_search(
|
|
|
|
&mut ctx,
|
|
|
|
&self.query,
|
|
|
|
self.terms_matching_strategy,
|
|
|
|
&self.filter,
|
|
|
|
self.offset,
|
|
|
|
self.limit,
|
|
|
|
Some(self.words_limit),
|
|
|
|
&mut DefaultSearchLogger,
|
|
|
|
&mut DefaultSearchLogger,
|
|
|
|
)
|
2020-11-20 17:54:41 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl fmt::Debug for Search<'_> {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
2021-03-10 18:16:30 +08:00
|
|
|
let Search {
|
|
|
|
query,
|
2021-06-01 21:25:17 +08:00
|
|
|
filter,
|
2021-03-10 18:16:30 +08:00
|
|
|
offset,
|
|
|
|
limit,
|
2021-08-23 17:37:18 +08:00
|
|
|
sort_criteria,
|
2022-08-22 23:37:36 +08:00
|
|
|
terms_matching_strategy,
|
2021-03-10 18:16:30 +08:00
|
|
|
authorize_typos,
|
2021-04-14 01:10:58 +08:00
|
|
|
words_limit,
|
2022-07-12 23:56:50 +08:00
|
|
|
exhaustive_number_hits,
|
2022-12-12 23:54:31 +08:00
|
|
|
criterion_implementation_strategy,
|
2021-03-10 18:16:30 +08:00
|
|
|
rtxn: _,
|
|
|
|
index: _,
|
|
|
|
} = self;
|
2020-11-20 17:54:41 +08:00
|
|
|
f.debug_struct("Search")
|
2020-11-22 22:40:11 +08:00
|
|
|
.field("query", query)
|
2021-06-01 21:25:17 +08:00
|
|
|
.field("filter", filter)
|
2020-11-22 22:40:11 +08:00
|
|
|
.field("offset", offset)
|
|
|
|
.field("limit", limit)
|
2021-08-23 17:37:18 +08:00
|
|
|
.field("sort_criteria", sort_criteria)
|
2022-08-22 23:37:36 +08:00
|
|
|
.field("terms_matching_strategy", terms_matching_strategy)
|
2021-03-10 18:16:30 +08:00
|
|
|
.field("authorize_typos", authorize_typos)
|
2022-07-12 23:56:50 +08:00
|
|
|
.field("exhaustive_number_hits", exhaustive_number_hits)
|
2022-12-12 23:54:31 +08:00
|
|
|
.field("criterion_implementation_strategy", criterion_implementation_strategy)
|
2021-04-14 01:10:58 +08:00
|
|
|
.field("words_limit", words_limit)
|
2020-11-20 17:54:41 +08:00
|
|
|
.finish()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-02-07 00:50:47 +08:00
|
|
|
#[derive(Default, Debug)]
|
2020-11-20 17:54:41 +08:00
|
|
|
pub struct SearchResult {
|
2021-02-25 00:44:35 +08:00
|
|
|
pub matching_words: MatchingWords,
|
2020-12-29 02:08:53 +08:00
|
|
|
pub candidates: RoaringBitmap,
|
2020-11-20 17:54:41 +08:00
|
|
|
// TODO those documents ids should be associated with their criteria scores.
|
|
|
|
pub documents_ids: Vec<DocumentId>,
|
|
|
|
}
|
2021-03-03 19:03:31 +08:00
|
|
|
|
2022-12-12 23:54:31 +08:00
|
|
|
#[derive(Debug, Default, Clone, Copy)]
|
|
|
|
pub enum CriterionImplementationStrategy {
|
|
|
|
OnlyIterative,
|
|
|
|
OnlySetBased,
|
|
|
|
#[default]
|
|
|
|
Dynamic,
|
|
|
|
}
|
|
|
|
|
2022-08-18 23:36:08 +08:00
|
|
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
|
|
|
pub enum TermsMatchingStrategy {
|
|
|
|
// remove last word first
|
|
|
|
Last,
|
|
|
|
// all words are mandatory
|
|
|
|
All,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Default for TermsMatchingStrategy {
|
|
|
|
fn default() -> Self {
|
|
|
|
Self::Last
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-05 18:02:24 +08:00
|
|
|
pub type WordDerivationsCache = HashMap<(String, bool, u8), Vec<(String, u8)>>;
|
|
|
|
|
|
|
|
pub fn word_derivations<'c>(
|
2021-02-25 00:44:35 +08:00
|
|
|
word: &str,
|
|
|
|
is_prefix: bool,
|
|
|
|
max_typo: u8,
|
|
|
|
fst: &fst::Set<Cow<[u8]>>,
|
2021-03-05 18:02:24 +08:00
|
|
|
cache: &'c mut WordDerivationsCache,
|
2021-06-14 22:46:19 +08:00
|
|
|
) -> StdResult<&'c [(String, u8)], Utf8Error> {
|
2021-03-05 18:02:24 +08:00
|
|
|
match cache.entry((word.to_string(), is_prefix, max_typo)) {
|
|
|
|
Entry::Occupied(entry) => Ok(entry.into_mut()),
|
|
|
|
Entry::Vacant(entry) => {
|
2023-03-21 17:44:40 +08:00
|
|
|
// println!("word derivations {word} {is_prefix} {max_typo}");
|
2021-03-05 18:02:24 +08:00
|
|
|
let mut derived_words = Vec::new();
|
2022-01-21 06:23:07 +08:00
|
|
|
if max_typo == 0 {
|
|
|
|
if is_prefix {
|
|
|
|
let prefix = Str::new(word).starts_with();
|
|
|
|
let mut stream = fst.search(prefix).into_stream();
|
|
|
|
|
|
|
|
while let Some(word) = stream.next() {
|
|
|
|
let word = std::str::from_utf8(word)?;
|
|
|
|
derived_words.push((word.to_string(), 0));
|
|
|
|
}
|
2022-01-25 17:06:27 +08:00
|
|
|
} else if fst.contains(word) {
|
|
|
|
derived_words.push((word.to_string(), 0));
|
2022-01-21 01:35:11 +08:00
|
|
|
}
|
2022-10-14 04:02:54 +08:00
|
|
|
} else if max_typo == 1 {
|
|
|
|
let dfa = build_dfa(word, 1, is_prefix);
|
|
|
|
let starts = StartsWith(Str::new(get_first(word)));
|
|
|
|
let mut stream = fst.search_with_state(Intersection(starts, &dfa)).into_stream();
|
|
|
|
|
|
|
|
while let Some((word, state)) = stream.next() {
|
|
|
|
let word = std::str::from_utf8(word)?;
|
|
|
|
let d = dfa.distance(state.1);
|
|
|
|
derived_words.push((word.to_string(), d.to_u8()));
|
|
|
|
}
|
2022-01-21 01:35:11 +08:00
|
|
|
} else {
|
2022-10-14 04:02:54 +08:00
|
|
|
let starts = StartsWith(Str::new(get_first(word)));
|
|
|
|
let first = Intersection(build_dfa(word, 1, is_prefix), Complement(&starts));
|
|
|
|
let second_dfa = build_dfa(word, 2, is_prefix);
|
|
|
|
let second = Intersection(&second_dfa, &starts);
|
|
|
|
let automaton = Union(first, &second);
|
|
|
|
|
|
|
|
let mut stream = fst.search_with_state(automaton).into_stream();
|
|
|
|
|
|
|
|
while let Some((found_word, state)) = stream.next() {
|
|
|
|
let found_word = std::str::from_utf8(found_word)?;
|
|
|
|
// in the case the typo is on the first letter, we know the number of typo
|
|
|
|
// is two
|
|
|
|
if get_first(found_word) != get_first(word) {
|
|
|
|
derived_words.push((found_word.to_string(), 2));
|
|
|
|
} else {
|
|
|
|
// Else, we know that it is the second dfa that matched and compute the
|
|
|
|
// correct distance
|
|
|
|
let d = second_dfa.distance((state.1).0);
|
|
|
|
derived_words.push((found_word.to_string(), d.to_u8()));
|
2022-01-21 06:23:07 +08:00
|
|
|
}
|
|
|
|
}
|
2022-01-21 01:35:11 +08:00
|
|
|
}
|
2022-01-21 06:23:07 +08:00
|
|
|
Ok(entry.insert(derived_words))
|
2021-06-17 00:33:33 +08:00
|
|
|
}
|
2021-03-05 18:02:24 +08:00
|
|
|
}
|
2021-03-03 19:03:31 +08:00
|
|
|
}
|
2021-02-25 00:44:35 +08:00
|
|
|
|
2022-01-21 01:35:11 +08:00
|
|
|
fn get_first(s: &str) -> &str {
|
|
|
|
match s.chars().next() {
|
|
|
|
Some(c) => &s[..c.len_utf8()],
|
2022-01-25 17:06:27 +08:00
|
|
|
None => panic!("unexpected empty query"),
|
2022-01-21 01:35:11 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-25 00:44:35 +08:00
|
|
|
pub fn build_dfa(word: &str, typos: u8, is_prefix: bool) -> DFA {
|
|
|
|
let lev = match typos {
|
|
|
|
0 => &LEVDIST0,
|
|
|
|
1 => &LEVDIST1,
|
|
|
|
_ => &LEVDIST2,
|
|
|
|
};
|
|
|
|
|
|
|
|
if is_prefix {
|
|
|
|
lev.build_prefix_dfa(word)
|
|
|
|
} else {
|
|
|
|
lev.build_dfa(word)
|
|
|
|
}
|
|
|
|
}
|
2022-03-31 15:54:49 +08:00
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod test {
|
|
|
|
use super::*;
|
2022-03-31 20:06:23 +08:00
|
|
|
use crate::index::tests::TempIndex;
|
2022-03-31 15:54:49 +08:00
|
|
|
|
2023-02-20 22:33:10 +08:00
|
|
|
#[cfg(feature = "default")]
|
2023-02-02 01:39:54 +08:00
|
|
|
#[test]
|
|
|
|
fn test_kanji_language_detection() {
|
|
|
|
let index = TempIndex::new();
|
|
|
|
|
|
|
|
index
|
|
|
|
.add_documents(documents!([
|
|
|
|
{ "id": 0, "title": "The quick (\"brown\") fox can't jump 32.3 feet, right? Brr, it's 29.3°F!" },
|
|
|
|
{ "id": 1, "title": "東京のお寿司。" },
|
|
|
|
{ "id": 2, "title": "הַשּׁוּעָל הַמָּהִיר (״הַחוּם״) לֹא יָכוֹל לִקְפֹּץ 9.94 מֶטְרִים, נָכוֹן? ברר, 1.5°C- בַּחוּץ!" }
|
|
|
|
]))
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
let txn = index.write_txn().unwrap();
|
|
|
|
let mut search = Search::new(&txn, &index);
|
|
|
|
|
|
|
|
search.query("東京");
|
|
|
|
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
|
|
|
|
|
|
|
assert_eq!(documents_ids, vec![1]);
|
|
|
|
}
|
|
|
|
|
2022-03-31 15:54:49 +08:00
|
|
|
#[test]
|
|
|
|
fn test_is_authorized_typos() {
|
|
|
|
let index = TempIndex::new();
|
|
|
|
let mut txn = index.write_txn().unwrap();
|
|
|
|
|
|
|
|
let mut search = Search::new(&txn, &index);
|
|
|
|
|
|
|
|
// default is authorized
|
|
|
|
assert!(search.is_typo_authorized().unwrap());
|
|
|
|
|
|
|
|
search.authorize_typos(false);
|
|
|
|
assert!(!search.is_typo_authorized().unwrap());
|
|
|
|
|
|
|
|
index.put_authorize_typos(&mut txn, false).unwrap();
|
|
|
|
txn.commit().unwrap();
|
|
|
|
|
|
|
|
let txn = index.read_txn().unwrap();
|
|
|
|
let mut search = Search::new(&txn, &index);
|
|
|
|
|
|
|
|
assert!(!search.is_typo_authorized().unwrap());
|
|
|
|
|
|
|
|
search.authorize_typos(true);
|
|
|
|
assert!(!search.is_typo_authorized().unwrap());
|
|
|
|
}
|
2022-04-01 17:05:18 +08:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_one_typos_tolerance() {
|
|
|
|
let fst = fst::Set::from_iter(["zealand"].iter()).unwrap().map_data(Cow::Owned).unwrap();
|
|
|
|
let mut cache = HashMap::new();
|
|
|
|
let found = word_derivations("zealend", false, 1, &fst, &mut cache).unwrap();
|
|
|
|
|
|
|
|
assert_eq!(found, &[("zealand".to_string(), 1)]);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_one_typos_first_letter() {
|
|
|
|
let fst = fst::Set::from_iter(["zealand"].iter()).unwrap().map_data(Cow::Owned).unwrap();
|
|
|
|
let mut cache = HashMap::new();
|
|
|
|
let found = word_derivations("sealand", false, 1, &fst, &mut cache).unwrap();
|
|
|
|
|
|
|
|
assert_eq!(found, &[]);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_two_typos_tolerance() {
|
|
|
|
let fst = fst::Set::from_iter(["zealand"].iter()).unwrap().map_data(Cow::Owned).unwrap();
|
|
|
|
let mut cache = HashMap::new();
|
|
|
|
let found = word_derivations("zealemd", false, 2, &fst, &mut cache).unwrap();
|
|
|
|
|
|
|
|
assert_eq!(found, &[("zealand".to_string(), 2)]);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_two_typos_first_letter() {
|
|
|
|
let fst = fst::Set::from_iter(["zealand"].iter()).unwrap().map_data(Cow::Owned).unwrap();
|
|
|
|
let mut cache = HashMap::new();
|
|
|
|
let found = word_derivations("sealand", false, 2, &fst, &mut cache).unwrap();
|
|
|
|
|
|
|
|
assert_eq!(found, &[("zealand".to_string(), 2)]);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_prefix() {
|
|
|
|
let fst = fst::Set::from_iter(["zealand"].iter()).unwrap().map_data(Cow::Owned).unwrap();
|
|
|
|
let mut cache = HashMap::new();
|
|
|
|
let found = word_derivations("ze", true, 0, &fst, &mut cache).unwrap();
|
|
|
|
|
|
|
|
assert_eq!(found, &[("zealand".to_string(), 0)]);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_bad_prefix() {
|
|
|
|
let fst = fst::Set::from_iter(["zealand"].iter()).unwrap().map_data(Cow::Owned).unwrap();
|
|
|
|
let mut cache = HashMap::new();
|
|
|
|
let found = word_derivations("se", true, 0, &fst, &mut cache).unwrap();
|
|
|
|
|
|
|
|
assert_eq!(found, &[]);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_prefix_with_typo() {
|
|
|
|
let fst = fst::Set::from_iter(["zealand"].iter()).unwrap().map_data(Cow::Owned).unwrap();
|
|
|
|
let mut cache = HashMap::new();
|
|
|
|
let found = word_derivations("zae", true, 1, &fst, &mut cache).unwrap();
|
|
|
|
|
|
|
|
assert_eq!(found, &[("zealand".to_string(), 1)]);
|
|
|
|
}
|
2022-03-31 15:54:49 +08:00
|
|
|
}
|