diff --git a/crates/noirc_evaluator/src/ssa/opt/constant_folding.rs b/crates/noirc_evaluator/src/ssa/opt/constant_folding.rs index 65700460b24..05c14bb23f3 100644 --- a/crates/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/crates/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -8,7 +8,7 @@ use crate::ssa::{ dfg::{DataFlowGraph, InsertInstructionResult}, function::Function, instruction::{Instruction, InstructionId}, - value::ValueId, + value::{Value, ValueId}, }, ssa_gen::Ssa, }; @@ -66,6 +66,7 @@ impl Context { // Cache of instructions without any side-effects along with their outputs. let mut cached_instruction_results: HashMap> = HashMap::new(); + let mut constrained_values: HashMap = HashMap::new(); for instruction_id in instructions { Self::fold_constants_into_instruction( @@ -73,6 +74,7 @@ impl Context { block, instruction_id, &mut cached_instruction_results, + &mut constrained_values, ); } self.block_queue.extend(function.dfg[block].successors()); @@ -83,8 +85,9 @@ impl Context { block: BasicBlockId, id: InstructionId, instruction_result_cache: &mut HashMap>, + constrained_values: &mut HashMap, ) { - let instruction = Self::resolve_instruction(id, dfg); + let instruction = Self::resolve_instruction(id, dfg, constrained_values); let old_results = dfg.instruction_results(id).to_vec(); // If a copy of this instruction exists earlier in the block, then reuse the previous results. @@ -96,20 +99,46 @@ impl Context { // Otherwise, try inserting the instruction again to apply any optimizations using the newly resolved inputs. let new_results = Self::push_instruction(id, instruction.clone(), &old_results, block, dfg); - // If the instruction is pure then we cache the results so we can reuse them if - // the same instruction appears again later in the block. - if instruction.is_pure(dfg) { - instruction_result_cache.insert(instruction, new_results.clone()); - } + Self::cache_instruction( + instruction, + new_results.clone(), + dfg, + instruction_result_cache, + constrained_values, + ); + Self::replace_result_ids(dfg, &old_results, &new_results); } /// Fetches an [`Instruction`] by its [`InstructionId`] and fully resolves its inputs. - fn resolve_instruction(instruction_id: InstructionId, dfg: &DataFlowGraph) -> Instruction { + fn resolve_instruction( + instruction_id: InstructionId, + dfg: &DataFlowGraph, + constrained_values: &mut HashMap, + ) -> Instruction { let instruction = dfg[instruction_id].clone(); + // Alternate between resolving `value_id` in the `dfg` and checking to see if the resolved value + // has been constrained to be equal to some simpler value in the current block. + // + // This allows us to reach a stable final `ValueId` for each instruction input as we add more + // constraints to the cache. + fn resolve_cache( + dfg_resolver: impl Fn(ValueId) -> ValueId, + cache: &HashMap, + value_id: ValueId, + ) -> ValueId { + let resolved_id = dfg_resolver(value_id); + match cache.get(&resolved_id) { + Some(cached_value) => resolve_cache(dfg_resolver, cache, *cached_value), + None => resolved_id, + } + } + // Resolve any inputs to ensure that we're comparing like-for-like instructions. - instruction.map_values(|value_id| dfg.resolve(value_id)) + instruction.map_values(|value_id| { + resolve_cache(|value_id| dfg.resolve(value_id), constrained_values, value_id) + }) } /// Pushes a new [`Instruction`] into the [`DataFlowGraph`] which applies any optimizations @@ -142,6 +171,45 @@ impl Context { new_results } + fn cache_instruction( + instruction: Instruction, + instruction_results: Vec, + dfg: &DataFlowGraph, + instruction_result_cache: &mut HashMap>, + constraint_cache: &mut HashMap, + ) { + if let Instruction::Constrain(lhs, rhs) = instruction { + // These `ValueId`s should be fully resolved now. + match (&dfg[lhs], &dfg[rhs]) { + // Ignore trivial constraints + (Value::NumericConstant { .. }, Value::NumericConstant { .. }) => (), + + // Prefer replacing with constants where possible. + (Value::NumericConstant { .. }, _) => { + constraint_cache.insert(rhs, lhs); + } + (_, Value::NumericConstant { .. }) => { + constraint_cache.insert(lhs, rhs); + } + // Otherwise prefer block parameters over instruction results. + // This is as block parameters are more likely to be a single witness rather than a full expression. + (Value::Param { .. }, Value::Instruction { .. }) => { + constraint_cache.insert(rhs, lhs); + } + (Value::Instruction { .. }, Value::Param { .. }) => { + constraint_cache.insert(lhs, rhs); + } + (_, _) => (), + } + } + + // If the instruction doesn't have side-effects, cache the results so we can reuse them if + // the same instruction appears again later in the block. + if instruction.is_pure(dfg) { + instruction_result_cache.insert(instruction, instruction_results); + } + } + /// Replaces a set of [`ValueId`]s inside the [`DataFlowGraph`] with another. fn replace_result_ids( dfg: &mut DataFlowGraph,