mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-30 09:04:59 +08:00
Fix detailed logger
This commit is contained in:
parent
fdd02105ac
commit
3b0737a092
@ -8,7 +8,9 @@ use roaring::RoaringBitmap;
|
|||||||
|
|
||||||
use crate::search::new::interner::{Interned, MappedInterner};
|
use crate::search::new::interner::{Interned, MappedInterner};
|
||||||
use crate::search::new::query_graph::QueryNodeData;
|
use crate::search::new::query_graph::QueryNodeData;
|
||||||
use crate::search::new::query_term::{LocatedQueryTerm, QueryTerm};
|
use crate::search::new::query_term::{
|
||||||
|
Lazy, LocatedQueryTermSubset, OneTypoTerm, QueryTerm, TwoTypoTerm, ZeroTypoTerm,
|
||||||
|
};
|
||||||
use crate::search::new::ranking_rule_graph::{
|
use crate::search::new::ranking_rule_graph::{
|
||||||
DeadEndsCache, Edge, ProximityCondition, ProximityGraph, RankingRuleGraph,
|
DeadEndsCache, Edge, ProximityCondition, ProximityGraph, RankingRuleGraph,
|
||||||
RankingRuleGraphTrait, TypoCondition, TypoGraph,
|
RankingRuleGraphTrait, TypoCondition, TypoGraph,
|
||||||
@ -45,16 +47,16 @@ pub enum SearchEvents {
|
|||||||
paths: Vec<Vec<Interned<ProximityCondition>>>,
|
paths: Vec<Vec<Interned<ProximityCondition>>>,
|
||||||
dead_ends_cache: DeadEndsCache<ProximityCondition>,
|
dead_ends_cache: DeadEndsCache<ProximityCondition>,
|
||||||
universe: RoaringBitmap,
|
universe: RoaringBitmap,
|
||||||
distances: MappedInterner<QueryNode, Vec<u16>>,
|
costs: MappedInterner<QueryNode, Vec<u64>>,
|
||||||
cost: u16,
|
cost: u64,
|
||||||
},
|
},
|
||||||
TypoState {
|
TypoState {
|
||||||
graph: RankingRuleGraph<TypoGraph>,
|
graph: RankingRuleGraph<TypoGraph>,
|
||||||
paths: Vec<Vec<Interned<TypoCondition>>>,
|
paths: Vec<Vec<Interned<TypoCondition>>>,
|
||||||
dead_ends_cache: DeadEndsCache<TypoCondition>,
|
dead_ends_cache: DeadEndsCache<TypoCondition>,
|
||||||
universe: RoaringBitmap,
|
universe: RoaringBitmap,
|
||||||
distances: MappedInterner<QueryNode, Vec<u16>>,
|
costs: MappedInterner<QueryNode, Vec<u64>>,
|
||||||
cost: u16,
|
cost: u64,
|
||||||
},
|
},
|
||||||
RankingRuleSkipBucket {
|
RankingRuleSkipBucket {
|
||||||
ranking_rule_idx: usize,
|
ranking_rule_idx: usize,
|
||||||
@ -171,15 +173,15 @@ impl SearchLogger<QueryGraph> for DetailedSearchLogger {
|
|||||||
paths_map: &[Vec<Interned<ProximityCondition>>],
|
paths_map: &[Vec<Interned<ProximityCondition>>],
|
||||||
dead_ends_cache: &DeadEndsCache<ProximityCondition>,
|
dead_ends_cache: &DeadEndsCache<ProximityCondition>,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
distances: &MappedInterner<QueryNode, Vec<u16>>,
|
costs: &MappedInterner<QueryNode, Vec<u64>>,
|
||||||
cost: u16,
|
cost: u64,
|
||||||
) {
|
) {
|
||||||
self.events.push(SearchEvents::ProximityState {
|
self.events.push(SearchEvents::ProximityState {
|
||||||
graph: query_graph.clone(),
|
graph: query_graph.clone(),
|
||||||
paths: paths_map.to_vec(),
|
paths: paths_map.to_vec(),
|
||||||
dead_ends_cache: dead_ends_cache.clone(),
|
dead_ends_cache: dead_ends_cache.clone(),
|
||||||
universe: universe.clone(),
|
universe: universe.clone(),
|
||||||
distances: distances.clone(),
|
costs: costs.clone(),
|
||||||
cost,
|
cost,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -190,15 +192,15 @@ impl SearchLogger<QueryGraph> for DetailedSearchLogger {
|
|||||||
paths_map: &[Vec<Interned<TypoCondition>>],
|
paths_map: &[Vec<Interned<TypoCondition>>],
|
||||||
dead_ends_cache: &DeadEndsCache<TypoCondition>,
|
dead_ends_cache: &DeadEndsCache<TypoCondition>,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
distances: &MappedInterner<QueryNode, Vec<u16>>,
|
costs: &MappedInterner<QueryNode, Vec<u64>>,
|
||||||
cost: u16,
|
cost: u64,
|
||||||
) {
|
) {
|
||||||
self.events.push(SearchEvents::TypoState {
|
self.events.push(SearchEvents::TypoState {
|
||||||
graph: query_graph.clone(),
|
graph: query_graph.clone(),
|
||||||
paths: paths_map.to_vec(),
|
paths: paths_map.to_vec(),
|
||||||
dead_ends_cache: dead_ends_cache.clone(),
|
dead_ends_cache: dead_ends_cache.clone(),
|
||||||
universe: universe.clone(),
|
universe: universe.clone(),
|
||||||
distances: distances.clone(),
|
costs: costs.clone(),
|
||||||
cost,
|
cost,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -358,7 +360,7 @@ results.{cur_ranking_rule}{cur_activated_id} {{
|
|||||||
paths,
|
paths,
|
||||||
dead_ends_cache,
|
dead_ends_cache,
|
||||||
universe,
|
universe,
|
||||||
distances,
|
costs,
|
||||||
cost,
|
cost,
|
||||||
} => {
|
} => {
|
||||||
let cur_ranking_rule = timestamp.len() - 1;
|
let cur_ranking_rule = timestamp.len() - 1;
|
||||||
@ -373,15 +375,15 @@ results.{cur_ranking_rule}{cur_activated_id} {{
|
|||||||
graph,
|
graph,
|
||||||
paths,
|
paths,
|
||||||
dead_ends_cache,
|
dead_ends_cache,
|
||||||
distances.clone(),
|
costs.clone(),
|
||||||
&mut new_file,
|
&mut new_file,
|
||||||
);
|
);
|
||||||
writeln!(
|
writeln!(
|
||||||
&mut file,
|
&mut file,
|
||||||
"{id} {{
|
"{id} {{
|
||||||
link: \"{id}.d2.svg\"
|
link: \"{id}.d2.svg\"
|
||||||
tooltip: \"cost {cost}, universe len: {}\"
|
tooltip: \"cost {cost}, universe len: {}\"
|
||||||
}}",
|
}}",
|
||||||
universe.len()
|
universe.len()
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -391,7 +393,7 @@ results.{cur_ranking_rule}{cur_activated_id} {{
|
|||||||
paths,
|
paths,
|
||||||
dead_ends_cache,
|
dead_ends_cache,
|
||||||
universe,
|
universe,
|
||||||
distances,
|
costs,
|
||||||
cost,
|
cost,
|
||||||
} => {
|
} => {
|
||||||
let cur_ranking_rule = timestamp.len() - 1;
|
let cur_ranking_rule = timestamp.len() - 1;
|
||||||
@ -406,7 +408,7 @@ results.{cur_ranking_rule}{cur_activated_id} {{
|
|||||||
graph,
|
graph,
|
||||||
paths,
|
paths,
|
||||||
dead_ends_cache,
|
dead_ends_cache,
|
||||||
distances.clone(),
|
costs.clone(),
|
||||||
&mut new_file,
|
&mut new_file,
|
||||||
);
|
);
|
||||||
writeln!(
|
writeln!(
|
||||||
@ -424,74 +426,101 @@ results.{cur_ranking_rule}{cur_activated_id} {{
|
|||||||
writeln!(&mut file, "}}").unwrap();
|
writeln!(&mut file, "}}").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn query_node_d2_desc<R: RankingRuleGraphTrait>(
|
fn query_node_d2_desc(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext,
|
||||||
node_idx: Interned<QueryNode>,
|
node_idx: Interned<QueryNode>,
|
||||||
node: &QueryNode,
|
node: &QueryNode,
|
||||||
_distances: &[u16],
|
_costs: &[u64],
|
||||||
file: &mut File,
|
file: &mut File,
|
||||||
) {
|
) {
|
||||||
match &node.data {
|
match &node.data {
|
||||||
QueryNodeData::Term(LocatedQueryTerm { value, .. }) => {
|
QueryNodeData::Term(LocatedQueryTermSubset {
|
||||||
|
term_subset,
|
||||||
|
positions: _,
|
||||||
|
term_ids: _,
|
||||||
|
}) => {
|
||||||
let QueryTerm {
|
let QueryTerm {
|
||||||
original,
|
original,
|
||||||
|
is_multiple_words: _,
|
||||||
|
is_prefix: _,
|
||||||
|
max_nbr_typos,
|
||||||
zero_typo,
|
zero_typo,
|
||||||
one_typo,
|
one_typo,
|
||||||
two_typos,
|
two_typo,
|
||||||
use_prefix_db,
|
} = ctx.term_interner.get(term_subset.original);
|
||||||
synonyms,
|
|
||||||
split_words,
|
|
||||||
prefix_of,
|
|
||||||
is_prefix: _,
|
|
||||||
is_ngram: _,
|
|
||||||
phrase,
|
|
||||||
} = ctx.term_interner.get(*value);
|
|
||||||
|
|
||||||
let original = ctx.word_interner.get(*original);
|
let original = ctx.word_interner.get(*original);
|
||||||
writeln!(
|
writeln!(
|
||||||
file,
|
file,
|
||||||
"{node_idx} : \"{original}\" {{
|
"{node_idx} : \"{original}\" {{
|
||||||
shape: class"
|
shape: class
|
||||||
|
max_nbr_typo: {max_nbr_typos}"
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
let ZeroTypoTerm { phrase, zero_typo, prefix_of, synonyms, use_prefix_db } =
|
||||||
|
zero_typo;
|
||||||
|
|
||||||
for w in zero_typo.iter().copied() {
|
for w in zero_typo.iter().copied() {
|
||||||
let w = ctx.word_interner.get(w);
|
if term_subset.zero_typo_subset.contains_word(w) {
|
||||||
writeln!(file, "\"{w}\" : 0").unwrap();
|
let w = ctx.word_interner.get(w);
|
||||||
|
writeln!(file, "\"{w}\" : 0").unwrap();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
for w in prefix_of.iter().copied() {
|
for w in prefix_of.iter().copied() {
|
||||||
let w = ctx.word_interner.get(w);
|
if term_subset.zero_typo_subset.contains_word(w) {
|
||||||
writeln!(file, "\"{w}\" : 0P").unwrap();
|
let w = ctx.word_interner.get(w);
|
||||||
}
|
writeln!(file, "\"{w}\" : 0P").unwrap();
|
||||||
for w in one_typo.iter().copied() {
|
}
|
||||||
let w = ctx.word_interner.get(w);
|
|
||||||
writeln!(file, "\"{w}\" : 1").unwrap();
|
|
||||||
}
|
|
||||||
for w in two_typos.iter().copied() {
|
|
||||||
let w = ctx.word_interner.get(w);
|
|
||||||
writeln!(file, "\"{w}\" : 2").unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(phrase) = phrase {
|
if let Some(phrase) = phrase {
|
||||||
let phrase = ctx.phrase_interner.get(*phrase);
|
if term_subset.zero_typo_subset.contains_phrase(*phrase) {
|
||||||
let phrase_str = phrase.description(&ctx.word_interner);
|
let phrase = ctx.phrase_interner.get(*phrase);
|
||||||
writeln!(file, "\"{phrase_str}\" : phrase").unwrap();
|
let phrase_str = phrase.description(&ctx.word_interner);
|
||||||
}
|
writeln!(file, "\"{phrase_str}\" : phrase").unwrap();
|
||||||
if let Some(split_words) = split_words {
|
}
|
||||||
let phrase = ctx.phrase_interner.get(*split_words);
|
|
||||||
let phrase_str = phrase.description(&ctx.word_interner);
|
|
||||||
writeln!(file, "\"{phrase_str}\" : split_words").unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for synonym in synonyms.iter().copied() {
|
for synonym in synonyms.iter().copied() {
|
||||||
let phrase = ctx.phrase_interner.get(synonym);
|
if term_subset.zero_typo_subset.contains_phrase(synonym) {
|
||||||
let phrase_str = phrase.description(&ctx.word_interner);
|
let phrase = ctx.phrase_interner.get(synonym);
|
||||||
writeln!(file, "\"{phrase_str}\" : synonym").unwrap();
|
let phrase_str = phrase.description(&ctx.word_interner);
|
||||||
|
writeln!(file, "\"{phrase_str}\" : synonym").unwrap();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if let Some(use_prefix_db) = use_prefix_db {
|
if let Some(use_prefix_db) = use_prefix_db {
|
||||||
let p = ctx.word_interner.get(*use_prefix_db);
|
if term_subset.zero_typo_subset.contains_word(*use_prefix_db) {
|
||||||
writeln!(file, "use prefix DB : {p}").unwrap();
|
let p = ctx.word_interner.get(*use_prefix_db);
|
||||||
|
writeln!(file, "use prefix DB : {p}").unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Lazy::Init(one_typo) = one_typo {
|
||||||
|
let OneTypoTerm { split_words, one_typo } = one_typo;
|
||||||
|
|
||||||
|
for w in one_typo.iter().copied() {
|
||||||
|
if term_subset.one_typo_subset.contains_word(w) {
|
||||||
|
let w = ctx.word_interner.get(w);
|
||||||
|
writeln!(file, "\"{w}\" : 1").unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(split_words) = split_words {
|
||||||
|
if term_subset.one_typo_subset.contains_phrase(*split_words) {
|
||||||
|
let phrase = ctx.phrase_interner.get(*split_words);
|
||||||
|
let phrase_str = phrase.description(&ctx.word_interner);
|
||||||
|
writeln!(file, "\"{phrase_str}\" : split_words").unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Lazy::Init(two_typo) = two_typo {
|
||||||
|
let TwoTypoTerm { two_typos } = two_typo;
|
||||||
|
for w in two_typos.iter().copied() {
|
||||||
|
if term_subset.two_typo_subset.contains_word(w) {
|
||||||
|
let w = ctx.word_interner.get(w);
|
||||||
|
writeln!(file, "\"{w}\" : 2").unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// for d in distances.iter() {
|
|
||||||
// writeln!(file, "\"d_{d}\" : distance").unwrap();
|
|
||||||
// }
|
|
||||||
|
|
||||||
writeln!(file, "}}").unwrap();
|
writeln!(file, "}}").unwrap();
|
||||||
}
|
}
|
||||||
@ -514,7 +543,7 @@ shape: class"
|
|||||||
if matches!(node.data, QueryNodeData::Deleted) {
|
if matches!(node.data, QueryNodeData::Deleted) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Self::query_node_d2_desc::<TypoGraph>(ctx, node_id, node, &[], file);
|
Self::query_node_d2_desc(ctx, node_id, node, &[], file);
|
||||||
|
|
||||||
for edge in node.successors.iter() {
|
for edge in node.successors.iter() {
|
||||||
writeln!(file, "{node_id} -> {edge};\n").unwrap();
|
writeln!(file, "{node_id} -> {edge};\n").unwrap();
|
||||||
@ -526,7 +555,7 @@ shape: class"
|
|||||||
graph: &RankingRuleGraph<R>,
|
graph: &RankingRuleGraph<R>,
|
||||||
paths: &[Vec<Interned<R::Condition>>],
|
paths: &[Vec<Interned<R::Condition>>],
|
||||||
_dead_ends_cache: &DeadEndsCache<R::Condition>,
|
_dead_ends_cache: &DeadEndsCache<R::Condition>,
|
||||||
distances: MappedInterner<QueryNode, Vec<u16>>,
|
costs: MappedInterner<QueryNode, Vec<u64>>,
|
||||||
file: &mut File,
|
file: &mut File,
|
||||||
) {
|
) {
|
||||||
writeln!(file, "direction: right").unwrap();
|
writeln!(file, "direction: right").unwrap();
|
||||||
@ -536,12 +565,12 @@ shape: class"
|
|||||||
if matches!(&node.data, QueryNodeData::Deleted) {
|
if matches!(&node.data, QueryNodeData::Deleted) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let distances = &distances.get(node_idx);
|
let costs = &costs.get(node_idx);
|
||||||
Self::query_node_d2_desc::<R>(ctx, node_idx, node, distances, file);
|
Self::query_node_d2_desc(ctx, node_idx, node, costs, file);
|
||||||
}
|
}
|
||||||
for (_edge_id, edge) in graph.edges_store.iter() {
|
for (_edge_id, edge) in graph.edges_store.iter() {
|
||||||
let Some(edge) = edge else { continue };
|
let Some(edge) = edge else { continue };
|
||||||
let Edge { source_node, dest_node, condition: details, cost } = edge;
|
let Edge { source_node, dest_node, condition: details, cost, nodes_to_skip: _ } = edge;
|
||||||
|
|
||||||
match &details {
|
match &details {
|
||||||
None => {
|
None => {
|
||||||
@ -561,7 +590,7 @@ shape: class"
|
|||||||
}
|
}
|
||||||
writeln!(file, "}}").unwrap();
|
writeln!(file, "}}").unwrap();
|
||||||
|
|
||||||
// writeln!(file, "Distances {{").unwrap();
|
// writeln!(file, "costs {{").unwrap();
|
||||||
// Self::paths_d2_description(graph, paths, file);
|
// Self::paths_d2_description(graph, paths, file);
|
||||||
// writeln!(file, "}}").unwrap();
|
// writeln!(file, "}}").unwrap();
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user