diff --git a/axiom-profiler-GUI/assets/html/style.css b/axiom-profiler-GUI/assets/html/style.css index 4c9e9d72..9da7b0f5 100644 --- a/axiom-profiler-GUI/assets/html/style.css +++ b/axiom-profiler-GUI/assets/html/style.css @@ -41,9 +41,6 @@ html { -ms-user-select: none; user-select: none; } -.margin-left-half { - margin-left: 0.5em; -} div.page .full-page { position: absolute; @@ -55,6 +52,7 @@ div.page .full-page { div.page .overlay { background-color: #fff; + z-index: 10; } div.page .overlay .close-button { position: absolute; @@ -397,8 +395,13 @@ li input.td-matcher, li input.td-formatter { padding-inline-start: 20px; } +.expand-collapse { + cursor: pointer; +} + .quant-graph { max-height: 400px; + margin: 5px; } /* Graph view */ diff --git a/axiom-profiler-GUI/src/screen/file/summary.rs b/axiom-profiler-GUI/src/screen/file/summary.rs index 0462ffc4..6570d731 100644 --- a/axiom-profiler-GUI/src/screen/file/summary.rs +++ b/axiom-profiler-GUI/src/screen/file/summary.rs @@ -47,13 +47,11 @@ pub fn Summary(props: &SummaryProps) -> Html { let cfg = use_context::>().unwrap(); let term_display = use_context::>().unwrap(); - let mut config = cfg.config.display; - config.ast_depth_limit = NonMaxU32::new(3); let inner = SummaryPropsInner { parser: props.parser.clone(), analysis: props.analysis.clone(), term_display: term_display.clone(), - config, + config: cfg.config.display, }; let graph = props.analysis.clone().map(|analysis| { @@ -66,13 +64,14 @@ pub fn Summary(props: &SummaryProps) -> Html { }); html! {
- +
- + + - +
{graph}
} } @@ -178,20 +177,18 @@ pub fn CostQuants(props: &SummaryPropsInner) -> Html { } #[function_component] -pub fn CostProofs(props: &SummaryPropsInner) -> Html { +pub fn CostLemmas(props: &SummaryPropsInner) -> Html { props.analysis.as_ref().map(|analysis| { let parser = props.parser.parser.borrow(); let analysis = analysis.borrow(); - let ctxt = props.ctxt(&parser); - let proofs = analysis.proofs.hypothesis_cost.iter().take(DISPLAY_TOP_N); - let proofs = proofs.map(|&(p, c)| { - let p = format!("{}", parser[p].result.with(&ctxt)); - let right = Html::from_html_unchecked(p.into()); + let lemmas = analysis.proofs.lemmas_cost.iter().take(DISPLAY_TOP_N); + let lemmas = lemmas.map(|&(l, c)| { let left = format_to_html(c as u64); + let right = collapsed_expanded(l, props.ctxt(&parser)); (left, right) }).collect::>(); - html! { - + html! { + } }).unwrap_or_default() } @@ -199,13 +196,11 @@ pub fn CostProofs(props: &SummaryPropsInner) -> Html { #[function_component] pub fn CostCdcl(props: &SummaryPropsInner) -> Html { let parser = props.parser.parser.borrow(); - let ctxt = props.ctxt(&parser); let cdcl = props.parser.cdcl.uncut_assigns.iter().take(DISPLAY_TOP_N); let cdcl = cdcl .map(|&(p, c)| { - let p = format!("{}", p.with(&ctxt)); - let right = Html::from_html_unchecked(p.into()); let left = format_to_html(c); + let right = collapsed_expanded(p, props.ctxt(&parser)); (left, right) }) .collect::>(); @@ -238,3 +233,48 @@ fn quants_ordered<'a, T: Ord + Copy + Debug, const REVERSE: bool>( } quants } + +fn collapsed_expanded<'a, D: Default>( + display: impl DisplayWithCtxt, D> + Copy, + mut ctxt: DisplayCtxt<'a>, +) -> Html { + let expanded = format!("{}", display.with(&ctxt)); + ctxt.config.ast_depth_limit = NonMaxU32::new(2); + let collapsed = format!("{}", display.with(&ctxt)); + if expanded == collapsed { + Html::from_html_unchecked(expanded.into()) + } else { + let expanded = Html::from_html_unchecked(expanded.into()); + let collapsed = Html::from_html_unchecked(collapsed.into()); + html! {} + } +} + +#[derive(Properties, Clone, PartialEq)] +pub struct EofProps { + pub collapsed: Html, + pub expanded: Html, +} + +#[function_component] +pub fn ExpandOnFocus(props: &EofProps) -> Html { + let focused = use_state_eq(|| false); + let f = *focused; + let focused_ref = focused.clone(); + let onblur = Callback::from(move |_| focused_ref.set(false)); + + let moved = use_mut_ref(|| false); + let moved_ref = moved.clone(); + let onmousedown = Callback::from(move |_| *moved_ref.borrow_mut() = false); + let moved_ref = moved.clone(); + let onmousemove = Callback::from(move |_| *moved_ref.borrow_mut() = true); + let (focused_ref, moved_ref) = (focused, moved); + let onmouseup = Callback::from(move |_| { + if !*moved_ref.borrow() { + focused_ref.set(!f) + } + }); + html! {
+ {if f { props.expanded.clone() } else { props.collapsed.clone() }} +
} +} diff --git a/axiom-profiler-GUI/src/screen/graphviz.rs b/axiom-profiler-GUI/src/screen/graphviz.rs index b4f46695..a6252460 100644 --- a/axiom-profiler-GUI/src/screen/graphviz.rs +++ b/axiom-profiler-GUI/src/screen/graphviz.rs @@ -180,8 +180,8 @@ impl match *self { Instantiation(inst) => match &parser[parser[inst].match_].kind { MatchKind::TheorySolving { axiom_id, .. } => { - let namespace = &parser[axiom_id.namespace]; - let id = axiom_id.id.map(|id| format!("[{id}]")).unwrap_or_default(); + let (namespace, id) = axiom_id.to_string_components(&parser.strings); + let id = id.map(|id| format!("[{id}]")).unwrap_or_default(); format!("{namespace}{id}") } MatchKind::MBQI { quant, .. } diff --git a/axiom-profiler-GUI/src/screen/inst_graph/display/node_info.rs b/axiom-profiler-GUI/src/screen/inst_graph/display/node_info.rs index 2b9d0ed1..ba7f88db 100644 --- a/axiom-profiler-GUI/src/screen/inst_graph/display/node_info.rs +++ b/axiom-profiler-GUI/src/screen/inst_graph/display/node_info.rs @@ -232,7 +232,7 @@ impl<'a, 'b> NodeInfo<'a, 'b> { .node .kind() .inst() - .and_then(|i| self.ctxt.parser[i].z3_generation) + .and_then(|i| self.ctxt.parser[i].kind.z3_generation()) { extra_info.push(("z3 gen", z3gen.to_string())); } @@ -405,6 +405,7 @@ impl<'a, 'b> EdgeInfo<'a, 'b> { use VisibleEdgeKind::*; match self.kind { Direct(_, EdgeKind::Yield) => "Yield".to_string(), + Direct(_, EdgeKind::Asserted) => "Asserted".to_string(), Direct(_, EdgeKind::Blame { pattern_term }) => { format!("Blame pattern #{pattern_term}") } diff --git a/axiom-profiler-GUI/src/screen/inst_graph/mode.rs b/axiom-profiler-GUI/src/screen/inst_graph/mode.rs index 20808bf2..f9fbd331 100644 --- a/axiom-profiler-GUI/src/screen/inst_graph/mode.rs +++ b/axiom-profiler-GUI/src/screen/inst_graph/mode.rs @@ -25,7 +25,7 @@ impl GraphMode { true } (Self::Inst, _) => false, - (Self::Proof, Instantiation(..) | Proof(..)) => true, + (Self::Proof, Instantiation(..) | ENode(..) | Proof(..)) => true, (Self::Proof, _) => false, (Self::Cdcl, Cdcl(..)) => true, (Self::Cdcl, _) => false, diff --git a/axiom-profiler-GUI/src/utils/tab.rs b/axiom-profiler-GUI/src/utils/tab.rs index 021dbfcb..e7a97fe2 100644 --- a/axiom-profiler-GUI/src/utils/tab.rs +++ b/axiom-profiler-GUI/src/utils/tab.rs @@ -58,9 +58,9 @@ pub struct DetailContainerProps { #[function_component] pub fn DetailContainer(props: &DetailContainerProps) -> Html { - html! {
+ html! {
{props.children.clone()} -
} + } } // Tree diff --git a/smt-log-parser/src/analysis/dependencies.rs b/smt-log-parser/src/analysis/dependencies.rs index 1f03cf49..1a813818 100644 --- a/smt-log-parser/src/analysis/dependencies.rs +++ b/smt-log-parser/src/analysis/dependencies.rs @@ -65,7 +65,7 @@ impl QuantifierAnalysis { } let direct_dep = &mut qinfo.direct_deps[i]; - let created_by = parser[blame.enode()].created_by; + let created_by = parser[blame.enode()].blame.inst(); let created_by = created_by.and_then(|iidx| parser.get_inst(iidx).match_.kind.quant_idx()); *direct_dep.enode.entry(created_by).or_default() += 1; diff --git a/smt-log-parser/src/analysis/graph/analysis/cost.rs b/smt-log-parser/src/analysis/graph/analysis/cost.rs index 945cea21..685567d4 100644 --- a/smt-log-parser/src/analysis/graph/analysis/cost.rs +++ b/smt-log-parser/src/analysis/graph/analysis/cost.rs @@ -89,16 +89,16 @@ impl CostInitialiser for DefaultCost { } fn transfer( &mut self, - node: &Node, + child: &Node, _from_idx: RawNodeIndex, - idx: usize, - incoming: &[Self::Observed], + parent_idx: usize, + parents: &[Self::Observed], ) -> f64 { - let total = incoming.iter().sum::(); - if total == 0 { + let total = parents.iter().sum::(); + if total == 0 || child.kind().proof().is_some() { return 0.0; } - node.cost * incoming[idx] as f64 / total as f64 + child.cost * parents[parent_idx] as f64 / total as f64 } } @@ -127,17 +127,17 @@ impl CostInitialiser for ProofCost { &mut self, node: &Node, _from_idx: RawNodeIndex, - idx: usize, - incoming: &[Self::Observed], + child_idx: usize, + children: &[Self::Observed], ) -> f64 { if node.kind().proof().is_none() { return 0.0; } - let total = incoming.iter().sum::(); + let total = children.iter().sum::(); if total == 0 { return 0.0; } - node.cost * incoming[idx] as f64 / total as f64 + node.cost * children[child_idx] as f64 / total as f64 } } diff --git a/smt-log-parser/src/analysis/graph/analysis/matching_loop/explain.rs b/smt-log-parser/src/analysis/graph/analysis/matching_loop/explain.rs index e16205c7..f87834bb 100644 --- a/smt-log-parser/src/analysis/graph/analysis/matching_loop/explain.rs +++ b/smt-log-parser/src/analysis/graph/analysis/matching_loop/explain.rs @@ -334,7 +334,7 @@ impl MlExplainer { fn add_enode(&mut self, parser: &Z3Parser, enode: ENodeIdx, result_enode: NodeIndex) -> bool { let enode_data = &parser[enode]; - let created_by = enode_data.created_by; + let created_by = enode_data.blame.inst(); let created_by = created_by.and_then(|cb| self.instantiations.get(&cb)); if let Some(created_by) = created_by { self.graph @@ -375,7 +375,7 @@ impl MlExplainer { let eq_expl = &self.equalities().given[eq]; match eq_expl { &EqualityExpl::Literal { eq, .. } => { - let created_by = self.parser[eq].created_by; + let created_by = self.parser[eq].blame.inst(); let created_by = created_by.and_then(|iidx| self.explainer.instantiations.get(&iidx)); if let Some(created_by) = created_by { diff --git a/smt-log-parser/src/analysis/graph/analysis/matching_loop/signature.rs b/smt-log-parser/src/analysis/graph/analysis/matching_loop/signature.rs index c3b814c3..ddbaf524 100644 --- a/smt-log-parser/src/analysis/graph/analysis/matching_loop/signature.rs +++ b/smt-log-parser/src/analysis/graph/analysis/matching_loop/signature.rs @@ -52,7 +52,7 @@ impl MlSignature { let eq_len = blame.equalities().count(); let blame = blame.enode(); let eblame = &parser[blame]; - let Some(created_by) = eblame.created_by else { + let Some(created_by) = eblame.blame.inst() else { return (InstParent::Const(blame), eq_len); }; match parser[parser[created_by].match_].kind.quant_idx() { diff --git a/smt-log-parser/src/analysis/graph/raw.rs b/smt-log-parser/src/analysis/graph/raw.rs index 98b6f4a2..6f4ff49f 100644 --- a/smt-log-parser/src/analysis/graph/raw.rs +++ b/smt-log-parser/src/analysis/graph/raw.rs @@ -15,8 +15,8 @@ use petgraph::{ use crate::{ graph_idx, items::{ - CdclIdx, ENodeIdx, EqGivenIdx, EqTransIdx, EqualityExpl, GraphIdx, InstIdx, ProofIdx, - StackIdx, TransitiveExplSegmentKind, + CdclIdx, ENodeBlame, ENodeIdx, EqGivenIdx, EqTransIdx, EqualityExpl, GraphIdx, InstIdx, + ProofIdx, StackIdx, TransitiveExplSegmentKind, }, DiGraph, FxHashMap, FxHashSet, NonMaxU32, Result, Z3Parser, }; @@ -101,11 +101,8 @@ impl RawInstGraph { stats, }; - // Add instantiation blamed and yield edges + // Add instantiation blamed edges for (idx, inst) in parser.insts.insts.iter_enumerated() { - for yields in inst.yields_terms.iter() { - self_.add_edge(idx, *yields, EdgeKind::Yield); - } for (i, blame) in parser.insts.matches[inst.match_] .pattern_matches() .enumerate() @@ -125,6 +122,15 @@ impl RawInstGraph { } } + // Add enode blamed edges + for (idx, enode) in parser.egraph.enodes.iter_enumerated() { + match enode.blame { + ENodeBlame::Inst(iidx) => self_.add_edge(iidx, idx, EdgeKind::Asserted), + ENodeBlame::Proof(pidx) => self_.add_edge(pidx, idx, EdgeKind::Yield), + ENodeBlame::BoolConst | ENodeBlame::Unknown => (), + } + } + // Add given equality created edges for (idx, eq) in parser.egraph.equalities.given.iter_enumerated() { match eq { @@ -571,6 +577,8 @@ impl NodeKind { pub enum EdgeKind { /// Instantiation -> ENode Yield, + /// Proof (asserted) -> ENode + Asserted, /// ENode -> Instantiation Blame { pattern_term: u16 }, /// TransEquality -> Instantiation diff --git a/smt-log-parser/src/analysis/proofs.rs b/smt-log-parser/src/analysis/proofs.rs index ebc1474e..88c7d0fd 100644 --- a/smt-log-parser/src/analysis/proofs.rs +++ b/smt-log-parser/src/analysis/proofs.rs @@ -1,36 +1,26 @@ use std::cmp::Reverse; -use crate::{items::ProofIdx, F64Ord, FxHashMap, Z3Parser}; +use crate::{items::TermIdx, F64Ord, FxHashMap, Z3Parser}; use super::InstGraph; pub struct ProofAnalysis { - /// The cost approximation that it took to disprove the hypothesis (may have - /// been disproved in conjunction with other hypotheses). - pub hypothesis_cost: Vec<(ProofIdx, f64)>, + /// The cost approximation that it took to prove each lemma (by + /// contradiction). + pub lemmas_cost: Vec<(TermIdx, f64)>, } impl ProofAnalysis { pub fn new(parser: &Z3Parser, graph: &InstGraph) -> Self { - let mut hypothesis = FxHashMap::<_, f64>::default(); - for (idx, _) in parser.proofs().iter_enumerated() { - let node = &graph.raw[idx]; - if !node.proof.proves_false() { + let mut lemmas = FxHashMap::<_, f64>::default(); + for (idx, proof) in parser.proofs().iter_enumerated() { + if !proof.kind.is_lemma() { continue; } - let cost = node.cost; - let hypotheses = graph.raw.hypotheses(parser, idx); - if hypotheses.is_empty() { - // proved unsat - continue; - } - let cost_div = cost / hypotheses.len() as f64; - for h in hypotheses { - *hypothesis.entry(h).or_default() += cost_div; - } + *lemmas.entry(proof.result).or_default() += graph.raw[idx].cost; } - let mut hypothesis_cost = hypothesis.into_iter().collect::>(); - hypothesis_cost.sort_by_key(|&(idx, cost)| (Reverse(F64Ord(cost)), idx)); - Self { hypothesis_cost } + let mut lemmas_cost = lemmas.into_iter().collect::>(); + lemmas_cost.sort_unstable_by_key(|&(lemma, cost)| (Reverse(F64Ord(cost)), lemma)); + Self { lemmas_cost } } } diff --git a/smt-log-parser/src/display_with.rs b/smt-log-parser/src/display_with.rs index dc306213..fb7a0199 100644 --- a/smt-log-parser/src/display_with.rs +++ b/smt-log-parser/src/display_with.rs @@ -486,10 +486,8 @@ impl DisplayWithCtxt, ()> for &MatchKind { quant.fmt_with(f, ctxt, data) } MatchKind::TheorySolving { axiom_id, .. } => { - write!(f, "[TheorySolving] {}#", &ctxt.parser[axiom_id.namespace])?; - if let Some(id) = axiom_id.id { - write!(f, "{id}")?; - } + write!(f, "[TheorySolving] ")?; + axiom_id.fmt_with(f, ctxt, data)?; Ok(()) } MatchKind::Axiom { axiom, .. } => { @@ -617,9 +615,13 @@ impl<'a: 'b, 'b> DisplayWithCtxt, ()> for &'a TermId { ctxt: &DisplayCtxt<'b>, _data: &mut (), ) -> fmt::Result { - let namespace = &ctxt.parser[self.namespace]; - let id = self.id.map(|id| id.to_string()).unwrap_or_default(); - write!(f, "{namespace}#{id}") + let (namespace, id) = self.to_string_components(&ctxt.parser.strings); + write!(f, "{namespace}#")?; + if let Some(id) = id { + write!(f, "{id}") + } else { + Ok(()) + } } } diff --git a/smt-log-parser/src/error.rs b/smt-log-parser/src/error.rs index 06c6f4fa..d5601efe 100644 --- a/smt-log-parser/src/error.rs +++ b/smt-log-parser/src/error.rs @@ -37,7 +37,7 @@ pub enum Error { InvalidVersion(semver::Error), // Id parsing - InvalidIdNumber(ParseIntError), + InvalidIdNumber(nonmax::ParseIntError), InvalidIdHash(String), UnknownId(TermId), @@ -85,6 +85,9 @@ pub enum Error { PopConflictMismatch, InvalidFrameInteger(ParseIntError), + // Proof + ProvedVar(TermIdx), + // CDCL NoConflict, BoolLiteral, diff --git a/smt-log-parser/src/items/enode.rs b/smt-log-parser/src/items/enode.rs index 7924a1b2..5351e295 100644 --- a/smt-log-parser/src/items/enode.rs +++ b/smt-log-parser/src/items/enode.rs @@ -3,22 +3,44 @@ use mem_dbg::{MemDbg, MemSize}; use crate::{FxHashMap, NonMaxU32}; -use super::{ENodeIdx, EqGivenIdx, EqTransIdx, InstIdx, StackIdx, TermIdx}; +use super::{ENodeIdx, EqGivenIdx, EqTransIdx, InstIdx, ProofIdx, StackIdx, TermIdx}; #[cfg_attr(feature = "mem_dbg", derive(MemSize, MemDbg))] #[derive(Debug)] pub struct ENode { pub frame: StackIdx, - pub created_by: Option, + pub blame: ENodeBlame, pub owner: TermIdx, - pub z3_generation: Option, + pub z3_generation: NonMaxU32, pub(crate) equalities: Vec, /// This will never contain a `TransitiveExpl::to` pointing to itself. It /// may contain `TransitiveExpl::given_len` of 0, but only when /// `get_simple_path` fails but `can_mismatch` is true. pub(crate) transitive: FxHashMap, - pub(crate) self_transitive: Option, +} + +#[cfg_attr(feature = "mem_dbg", derive(MemSize, MemDbg))] +#[cfg_attr(feature = "mem_dbg", copy_type)] +#[derive(Debug, Clone, Copy)] +pub enum ENodeBlame { + /// The `ENode` was created by an instantiation. + Inst(InstIdx), + /// The `ENode` was created by a proof step. + Proof(ProofIdx), + /// The `ENode` represents either `#1` or `#2`. + BoolConst, + /// We don't know why the `ENode` was created. + Unknown, +} + +impl ENodeBlame { + pub fn inst(self) -> Option { + match self { + Self::Inst(inst) => Some(inst), + _ => None, + } + } } #[cfg_attr(feature = "mem_dbg", derive(MemSize, MemDbg))] diff --git a/smt-log-parser/src/items/inst.rs b/smt-log-parser/src/items/inst.rs index 8029847f..087d0716 100644 --- a/smt-log-parser/src/items/inst.rs +++ b/smt-log-parser/src/items/inst.rs @@ -215,9 +215,8 @@ impl fmt::Display for Fingerprint { #[derive(Debug, Clone)] pub struct Instantiation { pub match_: MatchIdx, - pub fingerprint: Fingerprint, + pub kind: InstantiationKind, pub proof_id: InstProofLink, - pub z3_generation: Option, /// The enodes that were yielded by the instantiation along with the /// generalised terms for them (`MaybeSynthIdx::Parsed` if the yielded term /// doesn't contain any quantified variables) @@ -225,6 +224,34 @@ pub struct Instantiation { pub frame: StackIdx, } +#[cfg_attr(feature = "mem_dbg", derive(MemSize, MemDbg))] +#[cfg_attr(feature = "mem_dbg", copy_type)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Clone, Copy)] +pub enum InstantiationKind { + Axiom, + NonAxiom { + fingerprint: Fingerprint, + z3_generation: NonMaxU32, + }, +} + +impl InstantiationKind { + pub fn fingerprint(&self) -> Fingerprint { + match self { + Self::NonAxiom { fingerprint, .. } => *fingerprint, + _ => Fingerprint(0), + } + } + + pub fn z3_generation(&self) -> Option { + match self { + Self::NonAxiom { z3_generation, .. } => Some(*z3_generation), + _ => None, + } + } +} + /// A Z3 instantiation. #[cfg_attr(feature = "mem_dbg", derive(MemSize, MemDbg))] #[cfg_attr(feature = "mem_dbg", copy_type)] diff --git a/smt-log-parser/src/items/proof.rs b/smt-log-parser/src/items/proof.rs index 0a234a53..5700c92e 100644 --- a/smt-log-parser/src/items/proof.rs +++ b/smt-log-parser/src/items/proof.rs @@ -577,6 +577,10 @@ impl ProofStepKind { pub fn is_quant_inst(self) -> bool { matches!(self, ProofStepKind::PR_QUANT_INST) } + + pub fn is_lemma(self) -> bool { + matches!(self, ProofStepKind::PR_LEMMA) + } } static SEARCH_MAP: OnceLock> = OnceLock::new(); diff --git a/smt-log-parser/src/items/term.rs b/smt-log-parser/src/items/term.rs index c2752f5a..f1e4ac97 100644 --- a/smt-log-parser/src/items/term.rs +++ b/smt-log-parser/src/items/term.rs @@ -58,7 +58,7 @@ impl TermKind { #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[derive(Debug, Clone, Copy, Default, Hash, PartialEq, Eq)] pub struct TermId { - pub namespace: IString, + pub namespace: Option, pub id: Option, } impl TermId { @@ -68,21 +68,29 @@ impl TermId { pub fn parse(strings: &mut StringTable, value: &str) -> Result { let hash_idx = value.bytes().position(|b| b == b'#'); let hash_idx = hash_idx.ok_or_else(|| Error::InvalidIdHash(value.to_string()))?; - let namespace = IString(strings.get_or_intern(&value[..hash_idx])); + let namespace = (hash_idx != 0).then(|| IString(strings.get_or_intern(&value[..hash_idx]))); let id = &value[hash_idx + 1..]; let id = match id { "" => None, - id => Some(NonMaxU32::new(id.parse::().map_err(Error::InvalidIdNumber)?).unwrap()), + id => Some(id.parse::().map_err(Error::InvalidIdNumber)?), }; Ok(Self { namespace, id }) } pub fn order(&self) -> u32 { self.id.map(|id| id.get() + 1).unwrap_or_default() } + + pub fn to_string_components<'a>( + &self, + strings: &'a StringTable, + ) -> (&'a str, Option) { + let namespace = self.namespace.map(|ns| &strings[*ns]).unwrap_or_default(); + (namespace, self.id) + } pub fn to_string(&self, strings: &StringTable) -> String { - let namespace = &strings[*self.namespace]; - let id = self.id.map(|id| id.to_string()).unwrap_or_default(); - format!("{}#{}", namespace, id) + let (namespace, id) = self.to_string_components(strings); + let id = id.map(|id| id.to_string()).unwrap_or_default(); + format!("{namespace}#{id}") } } @@ -93,25 +101,27 @@ impl TermId { #[cfg_attr(feature = "mem_dbg", derive(MemSize, MemDbg))] #[derive(Debug)] pub struct TermIdToIdxMap { - empty_string: IString, empty_namespace: Vec>, namespace_map: FxHashMap>>, } -impl TermIdToIdxMap { - pub fn new(strings: &mut StringTable) -> Self { + +impl Default for TermIdToIdxMap { + fn default() -> Self { Self { - empty_string: IString(strings.get_or_intern_static("")), empty_namespace: Vec::new(), namespace_map: FxHashMap::default(), } } - fn get_vec_mut(&mut self, namespace: IString) -> Result<&mut Vec>> { - if self.empty_string == namespace { - // Special handling of common case for empty namespace - Ok(&mut self.empty_namespace) - } else { +} + +impl TermIdToIdxMap { + fn get_vec_mut(&mut self, namespace: Option) -> Result<&mut Vec>> { + if let Some(namespace) = namespace { self.namespace_map.try_reserve(1)?; Ok(self.namespace_map.entry(namespace).or_default()) + } else { + // Special handling of common case for empty namespace + Ok(&mut self.empty_namespace) } } pub fn register_term(&mut self, id: TermId, idx: K) -> Result<()> { @@ -128,11 +138,11 @@ impl TermIdToIdxMap { vec[id_idx].replace(idx); Ok(()) } - fn get_vec(&self, namespace: IString) -> Option<&Vec>> { - if self.empty_string == namespace { - Some(&self.empty_namespace) - } else { + fn get_vec(&self, namespace: Option) -> Option<&Vec>> { + if let Some(namespace) = namespace { self.namespace_map.get(&namespace) + } else { + Some(&self.empty_namespace) } } pub fn get_term(&self, id: &TermId) -> Option { diff --git a/smt-log-parser/src/mem_dbg/utils.rs b/smt-log-parser/src/mem_dbg/utils.rs index 504adf2b..f0ef74ea 100644 --- a/smt-log-parser/src/mem_dbg/utils.rs +++ b/smt-log-parser/src/mem_dbg/utils.rs @@ -1,7 +1,10 @@ #[cfg(feature = "mem_dbg")] use mem_dbg::{MemDbg, MemSize}; -use core::ops::{Deref, DerefMut}; +use core::{ + hash::{Hash, Hasher}, + ops::{Deref, DerefMut}, +}; use super::{FxHashMap, TiVec}; @@ -9,7 +12,7 @@ use super::{FxHashMap, TiVec}; #[cfg_attr(feature = "mem_dbg", derive(MemSize, MemDbg))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone)] pub enum BoxSlice { Large(Box<[T]>), Small(T), @@ -89,6 +92,43 @@ impl core::borrow::BorrowMut<[T]> for BoxSlice { } } +impl PartialEq for BoxSlice +where + [T]: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + (**self).eq(&**other) + } +} +impl Eq for BoxSlice where [T]: Eq {} + +impl PartialOrd for BoxSlice +where + [T]: PartialOrd, +{ + fn partial_cmp(&self, other: &Self) -> Option { + (**self).partial_cmp(&**other) + } +} + +impl Ord for BoxSlice +where + [T]: Ord, +{ + fn cmp(&self, other: &Self) -> core::cmp::Ordering { + (**self).cmp(&**other) + } +} + +impl Hash for BoxSlice +where + [T]: Hash, +{ + fn hash(&self, state: &mut H) { + (**self).hash(state) + } +} + // InternMap #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -98,14 +138,12 @@ struct ValueRef { } #[derive(Debug)] -pub struct InternMap, V: Eq + core::hash::Hash + ?Sized + 'static> { +pub struct InternMap, V: Eq + Hash + ?Sized + 'static> { map: FxHashMap, K>, interned: TiVec>, } -impl, V: ?Sized + Eq + core::hash::Hash + 'static> Default - for InternMap -{ +impl, V: ?Sized + Eq + Hash + 'static> Default for InternMap { fn default() -> Self { Self { map: Default::default(), @@ -114,7 +152,7 @@ impl, V: ?Sized + Eq + core::hash::Hash + 'static> Default } } -impl, V: ?Sized + Eq + core::hash::Hash + 'static> InternMap { +impl, V: ?Sized + Eq + Hash + 'static> InternMap { pub fn intern(&mut self, v: Box) -> K { // SAFETY: `v` is stored in the `interned` vector, behind a `Box` so it // will not be moved or dropped until the whole `InternMap` is dropped. @@ -134,7 +172,7 @@ impl, V: ?Sized + Eq + core::hash::Hash + 'static> InternM } } -impl, V: ?Sized + Eq + core::hash::Hash + 'static> Deref for InternMap { +impl, V: ?Sized + Eq + Hash + 'static> Deref for InternMap { type Target = TiVec>; fn deref(&self) -> &Self::Target { &self.interned diff --git a/smt-log-parser/src/parsers/z3/egraph.rs b/smt-log-parser/src/parsers/z3/egraph.rs index aef9cfc5..788b4bcd 100644 --- a/smt-log-parser/src/parsers/z3/egraph.rs +++ b/smt-log-parser/src/parsers/z3/egraph.rs @@ -10,19 +10,23 @@ use petgraph::{ use crate::{ items::{ - ENode, ENodeIdx, EqGivenIdx, EqGivenUse, EqTransIdx, Equality, EqualityExpl, InstIdx, - TermIdx, TransitiveExpl, TransitiveExplSegment, TransitiveExplSegmentKind, + ENode, ENodeBlame, ENodeIdx, EqGivenIdx, EqGivenUse, EqTransIdx, Equality, EqualityExpl, + InstIdx, ProofIdx, TermIdx, TransitiveExpl, TransitiveExplSegment, + TransitiveExplSegmentKind, }, BoxSlice, Error, FxHashMap, NonMaxU32, Result, TiVec, }; -use super::stack::Stack; +use super::{stack::Stack, terms::Terms}; #[cfg_attr(feature = "mem_dbg", derive(MemSize, MemDbg))] #[derive(Debug, Default)] pub struct EGraph { term_to_enode: FxHashMap, pub(crate) enodes: TiVec, + /// Which terms have we seen in a proof and might therefore get an enode + /// from them? + proofs: FxHashMap, pub equalities: Equalities, } @@ -54,11 +58,32 @@ impl Equalities { } impl EGraph { + pub fn get_blame( + &self, + tidx: TermIdx, + inst: Option, + terms: &Terms, + stack: &Stack, + ) -> ENodeBlame { + if terms.is_bool_const(tidx) { + return ENodeBlame::BoolConst; + } + if let Some(inst) = inst { + return ENodeBlame::Inst(inst); + } + let proof = self.proofs.get(&tidx).copied(); + let proof = proof.filter(|&p| stack.is_active_or_global(terms[p].frame)); + if let Some(proof) = proof { + return ENodeBlame::Proof(proof); + } + ENodeBlame::Unknown + } + pub fn new_enode( &mut self, - created_by: Option, + blame: ENodeBlame, term: TermIdx, - z3_generation: Option, + z3_generation: NonMaxU32, stack: &Stack, ) -> Result { // TODO: why does this happen sometimes? @@ -72,12 +97,11 @@ impl EGraph { self.enodes.raw.try_reserve(1)?; let enode = self.enodes.push_and_get_key(ENode { frame: stack.active_frame(), - created_by, + blame, owner: term, z3_generation, equalities: Vec::new(), transitive: FxHashMap::default(), - self_transitive: None, }); self.insert_tte(term, enode, stack)?; Ok(enode) @@ -92,6 +116,34 @@ impl EGraph { self.enodes[enode].owner } + pub(super) fn new_proof( + &mut self, + term: TermIdx, + proof: ProofIdx, + terms: &Terms, + stack: &Stack, + ) -> Result<()> { + if !terms[proof].kind.is_asserted() { + return Ok(()); + } + terms.app_walk(term, |tidx, app| { + self.proofs.try_reserve(1)?; + match self.proofs.entry(tidx) { + Entry::Occupied(mut o) => { + if stack.is_active_or_global(terms[*o.get()].frame) { + return Ok(&[]); + } else { + o.insert(proof); + } + } + Entry::Vacant(v) => { + v.insert(proof); + } + } + Ok(&app.child_ids) + }) + } + pub fn new_given_equality( &mut self, from: ENodeIdx, diff --git a/smt-log-parser/src/parsers/z3/stm2.rs b/smt-log-parser/src/parsers/z3/stm2.rs index 6648d5d7..fdaafa83 100644 --- a/smt-log-parser/src/parsers/z3/stm2.rs +++ b/smt-log-parser/src/parsers/z3/stm2.rs @@ -67,7 +67,7 @@ impl EventLog { term: &Term, strings: &StringTable, ) -> Result<()> { - if !strings[*term.id.namespace].is_empty() { + if term.id.namespace.is_some() { return Ok(()); } let Some(iname) = term.kind.app_name() else { diff --git a/smt-log-parser/src/parsers/z3/terms.rs b/smt-log-parser/src/parsers/z3/terms.rs index 5d03f0cf..70213829 100644 --- a/smt-log-parser/src/parsers/z3/terms.rs +++ b/smt-log-parser/src/parsers/z3/terms.rs @@ -1,3 +1,5 @@ +use std::collections::hash_map::Entry; + #[cfg(feature = "mem_dbg")] use mem_dbg::{MemDbg, MemSize}; use typed_index_collections::TiSlice; @@ -6,7 +8,7 @@ use crate::{ error::Either, items::{ InstProofLink, Instantiation, Meaning, ProofIdx, ProofStep, ProofStepKind, QuantIdx, Term, - TermId, TermIdToIdxMap, TermIdx, + TermId, TermIdToIdxMap, TermIdx, TermKind, }, Error, FxHashMap, Result, StringTable, TiVec, }; @@ -34,14 +36,16 @@ pub struct TermStorage + Copy, V: HasTermId> { terms: TiVec, } -impl + Copy, V: HasTermId> TermStorage { - pub(super) fn new(strings: &mut StringTable) -> Self { +impl + Copy, V: HasTermId> Default for TermStorage { + fn default() -> Self { Self { - term_id_map: TermIdToIdxMap::new(strings), + term_id_map: TermIdToIdxMap::default(), terms: TiVec::default(), } } +} +impl + Copy, V: HasTermId> TermStorage { pub(super) fn new_term(&mut self, term: V) -> Result { self.terms.raw.try_reserve(1)?; let id = term.term_id(); @@ -70,10 +74,32 @@ impl + Copy, V: HasTermId> TermStorage { pub(super) fn terms(&self) -> &TiSlice { &self.terms } + + /// Perform a top-down dfs walk of the AST rooted at `idx` calling `f` on + /// each node. If `f` returns `None` then the walk is terminated early. + /// Otherwise the walk is restricted to the children returned by `f`. + pub fn ast_walk( + &self, + idx: K, + mut f: impl FnMut(K, &V) -> core::result::Result<&[K], T>, + ) -> Option + where + usize: From, + { + let mut todo = vec![idx]; + while let Some(idx) = todo.pop() { + let next = &self.terms[idx]; + match f(idx, next) { + Ok(to_walk) => todo.extend_from_slice(to_walk), + Err(t) => return Some(t), + } + } + None + } } #[cfg_attr(feature = "mem_dbg", derive(MemSize, MemDbg))] -#[derive(Debug)] +#[derive(Debug, Default)] pub struct Terms { pub(super) app_terms: TermStorage, pub(super) proof_terms: TermStorage, @@ -82,15 +108,6 @@ pub struct Terms { } impl Terms { - pub(super) fn new(strings: &mut StringTable) -> Self { - Self { - app_terms: TermStorage::new(strings), - proof_terms: TermStorage::new(strings), - - meanings: FxHashMap::default(), - } - } - pub(crate) fn meaning(&self, tidx: TermIdx) -> Option<&Meaning> { self.meanings.get(&tidx) } @@ -123,7 +140,6 @@ impl Terms { pub(super) fn new_meaning(&mut self, term: TermIdx, meaning: Meaning) -> Result<&Meaning> { self.meanings.try_reserve(1)?; - use std::collections::hash_map::Entry; match self.meanings.entry(term) { Entry::Occupied(old) => assert_eq!(old.get(), &meaning), Entry::Vacant(empty) => { @@ -133,6 +149,23 @@ impl Terms { Ok(&self.meanings[&term]) } + /// Perform a top-down walk of the AST rooted at `tidx` calling `f` on each + /// node of kind `App` and walking the children that are returned. `Quant` + /// and `Var` nodes are skipped. + pub(super) fn app_walk( + &self, + tidx: TermIdx, + mut f: impl FnMut(TermIdx, &Term) -> core::result::Result<&[TermIdx], T>, + ) -> core::result::Result<(), T> { + self.app_terms + .ast_walk(tidx, |tidx, term| match term.kind { + TermKind::Var(_) => Ok(&[]), + TermKind::App(_) => f(tidx, term), + TermKind::Quant(_) => Ok(&[]), + }) + .map_or(Ok(()), Err) + } + /// Heuristic to get body of instantiated quantifier. See documentation of /// [`InstProofLink::ProofsDisabled`]. pub(super) fn last_term_from_instance(&self, strings: &StringTable) -> Option { @@ -156,6 +189,18 @@ impl Terms { }) .map(|(idx, _)| idx) } + + pub fn is_true_const(&self, tidx: TermIdx) -> bool { + let id = self[tidx].id; + id.namespace.is_none() && id.id.is_some_and(|id| id.get() == 1) + } + pub fn is_false_const(&self, tidx: TermIdx) -> bool { + let id = self[tidx].id; + id.namespace.is_none() && id.id.is_some_and(|id| id.get() == 2) + } + pub fn is_bool_const(&self, tidx: TermIdx) -> bool { + self.is_true_const(tidx) || self.is_false_const(tidx) + } } impl std::ops::Index for Terms { diff --git a/smt-log-parser/src/parsers/z3/z3parser.rs b/smt-log-parser/src/parsers/z3/z3parser.rs index 8b4c3dd6..ca7a42b0 100644 --- a/smt-log-parser/src/parsers/z3/z3parser.rs +++ b/smt-log-parser/src/parsers/z3/z3parser.rs @@ -48,7 +48,7 @@ impl Default for Z3Parser { let mut strings = StringTable::with_hasher(fxhash::FxBuildHasher::default()); Self { version_info: VersionInfo::default(), - terms: Terms::new(&mut strings), + terms: Terms::default(), synth_terms: Default::default(), quantifiers: Default::default(), insts: Default::default(), @@ -89,7 +89,8 @@ impl Z3Parser { // Very rarely in v4.8.17 & v4.11.2, an `[attach-enode]` is not // emitted. Create it here. // TODO: log somewhere when this happens. - self.egraph.new_enode(None, idx, None, &self.stack)?; + self.egraph + .new_enode(ENodeBlame::Unknown, idx, NonMaxU32::ZERO, &self.stack)?; self.events.new_enode(); return self.egraph.get_enode(idx, &self.stack); } @@ -484,12 +485,15 @@ impl Z3LogParser for Z3Parser { } fn mk_app<'a>(&mut self, mut l: impl Iterator) -> Result<()> { - let full_id = self.parse_new_full_id(l.next())?; + let full_id = l.next().ok_or(E::UnexpectedNewline)?; + let id = TermId::parse(&mut self.strings, full_id)?; let name = l.next().ok_or(E::UnexpectedNewline)?; + debug_assert!(name != "true" || full_id == "#1"); + debug_assert!(name != "false" || full_id == "#2"); let name = self.mk_string(name)?; let child_ids = self.gobble_term_children(l)?; let term = Term { - id: full_id, + id, kind: TermKind::App(name), child_ids, }; @@ -528,6 +532,8 @@ impl Z3LogParser for Z3Parser { frame: self.stack.active_frame(), }; let proof_idx = self.terms.proof_terms.new_term(proof_step)?; + self.egraph + .new_proof(result, proof_idx, &self.terms, &self.stack)?; self.events.new_proof_step( proof_idx, &self.terms[proof_idx], @@ -580,18 +586,21 @@ impl Z3LogParser for Z3Parser { return idx.map(|_| ()); } }; - let z3_generation = Self::parse_z3_generation(&mut l)?; + let z3_gen = Self::parse_z3_generation(&mut l)?.ok_or(E::UnexpectedNewline)?; // Return if there is unexpectedly more data Self::expect_completed(l)?; debug_assert!(self[idx].kind.app_name().is_some()); let created_by = self.insts.inst_stack.last_mut(); let iidx = created_by.as_ref().map(|(i, _)| *i); - let enode = self - .egraph - .new_enode(iidx, idx, z3_generation, &self.stack)?; + let blame = self.egraph.get_blame(idx, iidx, &self.terms, &self.stack); + let enode = self.egraph.new_enode(blame, idx, z3_gen, &self.stack)?; self.events.new_enode(); - if let Some((_, yields_terms)) = created_by { + if let Some((i, yields_terms)) = created_by { + debug_assert!(!self.insts.insts[*i] + .kind + .z3_generation() + .is_some_and(|g| g != z3_gen)); // If `None` then this is a ground term not created by an instantiation. yields_terms.try_reserve(1)?; yields_terms.push(enode); @@ -821,6 +830,16 @@ impl Z3LogParser for Z3Parser { }; Self::expect_completed(proof)?; let z3_generation = Self::parse_z3_generation(&mut l)?; + let kind = if let Some(z3_generation) = z3_generation { + debug_assert!(!fingerprint.is_zero()); + InstantiationKind::NonAxiom { + fingerprint, + z3_generation, + } + } else { + debug_assert!(fingerprint.is_zero()); + InstantiationKind::Axiom + }; let match_ = self .insts @@ -828,9 +847,8 @@ impl Z3LogParser for Z3Parser { .ok_or(E::UnknownFingerprint(fingerprint))?; let inst = Instantiation { match_, - fingerprint, + kind, proof_id, - z3_generation, yields_terms: Default::default(), frame: self.stack.active_frame(), }; @@ -1039,24 +1057,21 @@ impl Z3Parser { /// Returns the size in AST nodes of the term `tidx`. Note that z3 eagerly /// reduces terms such as `1 + 1` to `2` meaning that a matching loop can be /// constant in this size metric! - pub fn ast_size(&self, tidx: TermIdx) -> Option { + pub fn ast_size(&self, tidx: TermIdx) -> core::result::Result { let mut size = 0; - let mut todo = vec![tidx]; - while let Some(next) = todo.pop() { + let walk = self.terms.app_terms.ast_walk(tidx, |tidx, term| { size += 1; - let term = &self[next]; match term.kind { - TermKind::Var(_) => return None, - TermKind::App(_) => { - todo.extend_from_slice(&term.child_ids); - } + TermKind::Var(_) => Err(tidx), + TermKind::App(_) => Ok(&term.child_ids), // TODO: decide if we want to return a size for quantifiers TermKind::Quant(_) => { - todo.push(*term.child_ids.last().unwrap()); + let body = term.child_ids.last().map(core::slice::from_ref); + Ok(body.unwrap_or_default()) } } - } - Some(size) + }); + walk.map_or(Ok(size), Err) } pub fn inst_ast_size(&self, iidx: InstIdx) -> u32 {