Skip to content

Commit

Permalink
feat: use Constrain instructions to replace values with constants
Browse files Browse the repository at this point in the history
  • Loading branch information
TomAFrench committed Sep 2, 2023
1 parent e160f84 commit 1f12948
Showing 1 changed file with 77 additions and 9 deletions.
86 changes: 77 additions & 9 deletions crates/noirc_evaluator/src/ssa/opt/constant_folding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use crate::ssa::{
dfg::{DataFlowGraph, InsertInstructionResult},
function::Function,
instruction::{Instruction, InstructionId},
value::ValueId,
value::{Value, ValueId},
},
ssa_gen::Ssa,
};
Expand Down Expand Up @@ -66,13 +66,15 @@ impl Context {

// Cache of instructions without any side-effects along with their outputs.
let mut cached_instruction_results: HashMap<Instruction, Vec<ValueId>> = HashMap::new();
let mut constrained_values: HashMap<ValueId, ValueId> = HashMap::new();

for instruction_id in instructions {
Self::fold_constants_into_instruction(
&mut function.dfg,
block,
instruction_id,
&mut cached_instruction_results,
&mut constrained_values,
);
}
self.block_queue.extend(function.dfg[block].successors());
Expand All @@ -83,8 +85,9 @@ impl Context {
block: BasicBlockId,
id: InstructionId,
instruction_result_cache: &mut HashMap<Instruction, Vec<ValueId>>,
constrained_values: &mut HashMap<ValueId, ValueId>,
) {
let instruction = Self::resolve_instruction(id, dfg);
let instruction = Self::resolve_instruction(id, dfg, constrained_values);
let old_results = dfg.instruction_results(id).to_vec();

// If a copy of this instruction exists earlier in the block, then reuse the previous results.
Expand All @@ -96,20 +99,46 @@ impl Context {
// Otherwise, try inserting the instruction again to apply any optimizations using the newly resolved inputs.
let new_results = Self::push_instruction(id, instruction.clone(), &old_results, block, dfg);

// If the instruction is pure then we cache the results so we can reuse them if
// the same instruction appears again later in the block.
if instruction.is_pure(dfg) {
instruction_result_cache.insert(instruction, new_results.clone());
}
Self::cache_instruction(
instruction,
new_results.clone(),
dfg,
instruction_result_cache,
constrained_values,
);

Self::replace_result_ids(dfg, &old_results, &new_results);
}

/// Fetches an [`Instruction`] by its [`InstructionId`] and fully resolves its inputs.
fn resolve_instruction(instruction_id: InstructionId, dfg: &DataFlowGraph) -> Instruction {
fn resolve_instruction(
instruction_id: InstructionId,
dfg: &DataFlowGraph,
constrained_values: &mut HashMap<ValueId, ValueId>,
) -> Instruction {
let instruction = dfg[instruction_id].clone();

// Alternate between resolving `value_id` in the `dfg` and checking to see if the resolved value
// has been constrained to be equal to some simpler value in the current block.
//
// This allows us to reach a stable final `ValueId` for each instruction input as we add more
// constraints to the cache.
fn resolve_cache(
dfg_resolver: impl Fn(ValueId) -> ValueId,
cache: &HashMap<ValueId, ValueId>,
value_id: ValueId,
) -> ValueId {
let resolved_id = dfg_resolver(value_id);
match cache.get(&resolved_id) {
Some(cached_value) => resolve_cache(dfg_resolver, cache, *cached_value),
None => resolved_id,
}
}

// Resolve any inputs to ensure that we're comparing like-for-like instructions.
instruction.map_values(|value_id| dfg.resolve(value_id))
instruction.map_values(|value_id| {
resolve_cache(|value_id| dfg.resolve(value_id), constrained_values, value_id)
})
}

/// Pushes a new [`Instruction`] into the [`DataFlowGraph`] which applies any optimizations
Expand Down Expand Up @@ -142,6 +171,45 @@ impl Context {
new_results
}

fn cache_instruction(
instruction: Instruction,
instruction_results: Vec<ValueId>,
dfg: &DataFlowGraph,
instruction_result_cache: &mut HashMap<Instruction, Vec<ValueId>>,
constraint_cache: &mut HashMap<ValueId, ValueId>,
) {
if let Instruction::Constrain(lhs, rhs) = instruction {
// These `ValueId`s should be fully resolved now.
match (&dfg[lhs], &dfg[rhs]) {
// Ignore trivial constraints
(Value::NumericConstant { .. }, Value::NumericConstant { .. }) => (),

// Prefer replacing with constants where possible.
(Value::NumericConstant { .. }, _) => {
constraint_cache.insert(rhs, lhs);
}
(_, Value::NumericConstant { .. }) => {
constraint_cache.insert(lhs, rhs);
}
// Otherwise prefer block parameters over instruction results.
// This is as block parameters are more likely to be a single witness rather than a full expression.
(Value::Param { .. }, Value::Instruction { .. }) => {
constraint_cache.insert(rhs, lhs);
}
(Value::Instruction { .. }, Value::Param { .. }) => {
constraint_cache.insert(lhs, rhs);
}
(_, _) => (),
}
}

// If the instruction doesn't have side-effects, cache the results so we can reuse them if
// the same instruction appears again later in the block.
if instruction.is_pure(dfg) {
instruction_result_cache.insert(instruction, instruction_results);
}
}

/// Replaces a set of [`ValueId`]s inside the [`DataFlowGraph`] with another.
fn replace_result_ids(
dfg: &mut DataFlowGraph,
Expand Down

0 comments on commit 1f12948

Please sign in to comment.