diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index 7b4d569c4d06..afe95896341b 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -6,7 +6,7 @@ //! by the [`DataFlowGraph`] automatically as new instructions are pushed. //! - Check whether any input values have been constrained to be equal to a value of a simpler form //! by a [constrain instruction][Instruction::Constrain]. If so, replace the input value with the simpler form. -//! - Check whether the instruction [can_be_deduplicated][Instruction::can_be_deduplicated()] +//! - Check whether the instruction [can_be_replaced][Instruction::can_be_replaced()] //! by duplicate instruction earlier in the same block. //! //! These operations are done in parallel so that they can each benefit from each other @@ -54,9 +54,6 @@ use fxhash::FxHashMap as HashMap; impl Ssa { /// Performs constant folding on each instruction. /// - /// It will not look at constraints to inform simplifications - /// based on the stated equivalence of two instructions. - /// /// See [`constant_folding`][self] module for more information. #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn fold_constants(mut self) -> Ssa { @@ -191,12 +188,9 @@ pub(crate) struct BrilligInfo<'a> { brillig_functions: &'a BTreeMap, } -/// HashMap from `(Instruction, side_effects_enabled_var)` to the results of the instruction. +/// HashMap from (Instruction, side_effects_enabled_var) to the results of the instruction. /// Stored as a two-level map to avoid cloning Instructions during the `.get` call. /// -/// The `side_effects_enabled_var` is optional because we only use them when `Instruction::requires_acir_gen_predicate` -/// is true _and_ the constraint information is also taken into account. -/// /// In addition to each result, the original BasicBlockId is stored as well. This allows us /// to deduplicate instructions across blocks as long as the new block dominates the original. type InstructionResultCache = HashMap, ResultCache>>; @@ -229,7 +223,6 @@ impl<'brillig> Context<'brillig> { fn fold_constants_in_block(&mut self, function: &mut Function, block: BasicBlockId) { let instructions = function.dfg[block].take_instructions(); - // Default side effect condition variable with an enabled state. let mut side_effects_enabled_var = function.dfg.make_constant(FieldElement::one(), Type::bool()); @@ -314,10 +307,8 @@ impl<'brillig> Context<'brillig> { /// Fetches an [`Instruction`] by its [`InstructionId`] and fully resolves its inputs. fn resolve_instruction( instruction_id: InstructionId, - block: BasicBlockId, dfg: &DataFlowGraph, - dom: &mut DominatorTree, - constraint_simplification_mapping: Option<&HashMap>>, + constraint_simplification_mapping: &HashMap, ) -> Instruction { let instruction = dfg[instruction_id].clone(); @@ -328,30 +319,19 @@ impl<'brillig> Context<'brillig> { // constraints to the cache. fn resolve_cache( dfg: &DataFlowGraph, - dom: &mut DominatorTree, - cache: Option<&HashMap>>, + cache: &HashMap, value_id: ValueId, - block: BasicBlockId, ) -> ValueId { let resolved_id = dfg.resolve(value_id); - let Some(cached_values) = cache.and_then(|cache| cache.get(&resolved_id)) else { - return resolved_id; - }; - - for (cached_block, cached_value) in cached_values { - // We can only use the simplified value if it was simplified in a block that dominates the current one - if dom.dominates(*cached_block, block) { - return resolve_cache(dfg, dom, cache, *cached_value, block); - } + match cache.get(&resolved_id) { + Some(cached_value) => resolve_cache(dfg, cache, *cached_value), + None => resolved_id, } - - resolved_id } // Resolve any inputs to ensure that we're comparing like-for-like instructions. - instruction.map_values(|value_id| { - resolve_cache(dfg, dom, constraint_simplification_mapping, value_id, block) - }) + instruction + .map_values(|value_id| resolve_cache(dfg, constraint_simplification_mapping, value_id)) } /// Pushes a new [`Instruction`] into the [`DataFlowGraph`] which applies any optimizations @@ -385,32 +365,44 @@ impl<'brillig> Context<'brillig> { } fn cache_instruction( - &mut self, &mut self, instruction: Instruction, instruction_results: Vec, dfg: &DataFlowGraph, side_effects_enabled_var: ValueId, block: BasicBlockId, - block: BasicBlockId, ) { if self.use_constraint_info { // If the instruction was a constraint, then create a link between the two `ValueId`s // to map from the more complex to the simpler value. if let Instruction::Constrain(lhs, rhs, _) = instruction { // These `ValueId`s should be fully resolved now. - if let Some((complex, simple)) = simplify(dfg, lhs, rhs) { - self.get_constraint_map(side_effects_enabled_var) - .entry(complex) - .or_default() - .push((block, simple)); + match (&dfg[lhs], &dfg[rhs]) { + // Ignore trivial constraints + (Value::NumericConstant { .. }, Value::NumericConstant { .. }) => (), + + // Prefer replacing with constants where possible. + (Value::NumericConstant { .. }, _) => { + self.get_constraint_map(side_effects_enabled_var).insert(rhs, lhs); + } + (_, Value::NumericConstant { .. }) => { + self.get_constraint_map(side_effects_enabled_var).insert(lhs, rhs); + } + // Otherwise prefer block parameters over instruction results. + // This is as block parameters are more likely to be a single witness rather than a full expression. + (Value::Param { .. }, Value::Instruction { .. }) => { + self.get_constraint_map(side_effects_enabled_var).insert(rhs, lhs); + } + (Value::Instruction { .. }, Value::Param { .. }) => { + self.get_constraint_map(side_effects_enabled_var).insert(lhs, rhs); + } + (_, _) => (), } } } // If the instruction doesn't have side-effects and if it won't interact with enable_side_effects during acir_gen, // we cache the results so we can reuse them if the same instruction appears again later in the block. - // Others have side effects representing failure, which are implicit in the ACIR code and can also be deduplicated. if instruction.can_be_deduplicated(dfg, self.use_constraint_info) { let use_predicate = self.use_constraint_info && instruction.requires_acir_gen_predicate(dfg); @@ -425,8 +417,6 @@ impl<'brillig> Context<'brillig> { } } - /// Get the simplification mapping from complex to simpler instructions, - /// which all depend on the same side effect condition variable. fn get_constraint_map( &mut self, side_effects_enabled_var: ValueId, @@ -1351,43 +1341,4 @@ mod test { let ssa = ssa.fold_constants_with_brillig(&brillig); assert_normalized_ssa_equals(ssa, expected); } - - #[test] - fn deduplicates_side_effecting_intrinsics() { - let src = " - // After EnableSideEffectsIf removal: - acir(inline) fn main f0 { - b0(v0: Field, v1: Field, v2: u1): - v4 = call is_unconstrained() -> u1 - v7 = call to_be_radix(v0, u32 256) -> [u8; 1] // `a.to_be_radix(256)`; - inc_rc v7 - v8 = call to_be_radix(v0, u32 256) -> [u8; 1] // duplicate load of `a` - inc_rc v8 - v9 = cast v2 as Field // `if c { a.to_be_radix(256) }` - v10 = mul v0, v9 // attaching `c` to `a` - v11 = call to_be_radix(v10, u32 256) -> [u8; 1] // calling `to_radix(c * a)` - inc_rc v11 - enable_side_effects v2 // side effect var for `c` shifted down by removal - return - } - "; - let ssa = Ssa::from_str(src).unwrap(); - let expected = " - acir(inline) fn main f0 { - b0(v0: Field, v1: Field, v2: u1): - v4 = call is_unconstrained() -> u1 - v7 = call to_be_radix(v0, u32 256) -> [u8; 1] - inc_rc v7 - inc_rc v7 - v8 = cast v2 as Field - v9 = mul v0, v8 - v10 = call to_be_radix(v9, u32 256) -> [u8; 1] - inc_rc v10 - enable_side_effects v2 - return - } - "; - let ssa = ssa.fold_constants_using_constraints(); - assert_normalized_ssa_equals(ssa, expected); - } -} +} \ No newline at end of file