Skip to content

Commit

Permalink
.
Browse files Browse the repository at this point in the history
  • Loading branch information
TomAFrench committed Nov 28, 2024
1 parent 6039177 commit c7f1d73
Showing 1 changed file with 30 additions and 79 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
//! by the [`DataFlowGraph`] automatically as new instructions are pushed.
//! - Check whether any input values have been constrained to be equal to a value of a simpler form
//! by a [constrain instruction][Instruction::Constrain]. If so, replace the input value with the simpler form.
//! - Check whether the instruction [can_be_deduplicated][Instruction::can_be_deduplicated()]
//! - Check whether the instruction [can_be_replaced][Instruction::can_be_replaced()]
//! by duplicate instruction earlier in the same block.
//!
//! These operations are done in parallel so that they can each benefit from each other
Expand Down Expand Up @@ -54,9 +54,6 @@ use fxhash::FxHashMap as HashMap;
impl Ssa {
/// Performs constant folding on each instruction.
///
/// It will not look at constraints to inform simplifications
/// based on the stated equivalence of two instructions.
///
/// See [`constant_folding`][self] module for more information.
#[tracing::instrument(level = "trace", skip(self))]
pub(crate) fn fold_constants(mut self) -> Ssa {
Expand Down Expand Up @@ -191,12 +188,9 @@ pub(crate) struct BrilligInfo<'a> {
brillig_functions: &'a BTreeMap<FunctionId, Function>,
}

/// HashMap from `(Instruction, side_effects_enabled_var)` to the results of the instruction.
/// HashMap from (Instruction, side_effects_enabled_var) to the results of the instruction.
/// Stored as a two-level map to avoid cloning Instructions during the `.get` call.
///
/// The `side_effects_enabled_var` is optional because we only use them when `Instruction::requires_acir_gen_predicate`
/// is true _and_ the constraint information is also taken into account.
///
/// In addition to each result, the original BasicBlockId is stored as well. This allows us
/// to deduplicate instructions across blocks as long as the new block dominates the original.
type InstructionResultCache = HashMap<Instruction, HashMap<Option<ValueId>, ResultCache>>;
Expand Down Expand Up @@ -229,7 +223,6 @@ impl<'brillig> Context<'brillig> {
fn fold_constants_in_block(&mut self, function: &mut Function, block: BasicBlockId) {
let instructions = function.dfg[block].take_instructions();

// Default side effect condition variable with an enabled state.
let mut side_effects_enabled_var =
function.dfg.make_constant(FieldElement::one(), Type::bool());

Expand Down Expand Up @@ -314,10 +307,8 @@ impl<'brillig> Context<'brillig> {
/// Fetches an [`Instruction`] by its [`InstructionId`] and fully resolves its inputs.
fn resolve_instruction(
instruction_id: InstructionId,
block: BasicBlockId,
dfg: &DataFlowGraph,
dom: &mut DominatorTree,
constraint_simplification_mapping: Option<&HashMap<ValueId, Vec<(BasicBlockId, ValueId)>>>,
constraint_simplification_mapping: &HashMap<ValueId, ValueId>,
) -> Instruction {
let instruction = dfg[instruction_id].clone();

Expand All @@ -328,30 +319,19 @@ impl<'brillig> Context<'brillig> {
// constraints to the cache.
fn resolve_cache(
dfg: &DataFlowGraph,
dom: &mut DominatorTree,
cache: Option<&HashMap<ValueId, Vec<(BasicBlockId, ValueId)>>>,
cache: &HashMap<ValueId, ValueId>,
value_id: ValueId,
block: BasicBlockId,
) -> ValueId {
let resolved_id = dfg.resolve(value_id);
let Some(cached_values) = cache.and_then(|cache| cache.get(&resolved_id)) else {
return resolved_id;
};

for (cached_block, cached_value) in cached_values {
// We can only use the simplified value if it was simplified in a block that dominates the current one
if dom.dominates(*cached_block, block) {
return resolve_cache(dfg, dom, cache, *cached_value, block);
}
match cache.get(&resolved_id) {
Some(cached_value) => resolve_cache(dfg, cache, *cached_value),
None => resolved_id,
}

resolved_id
}

// Resolve any inputs to ensure that we're comparing like-for-like instructions.
instruction.map_values(|value_id| {
resolve_cache(dfg, dom, constraint_simplification_mapping, value_id, block)
})
instruction
.map_values(|value_id| resolve_cache(dfg, constraint_simplification_mapping, value_id))
}

/// Pushes a new [`Instruction`] into the [`DataFlowGraph`] which applies any optimizations
Expand Down Expand Up @@ -385,32 +365,44 @@ impl<'brillig> Context<'brillig> {
}

fn cache_instruction(
&mut self,
&mut self,
instruction: Instruction,
instruction_results: Vec<ValueId>,
dfg: &DataFlowGraph,
side_effects_enabled_var: ValueId,
block: BasicBlockId,
block: BasicBlockId,
) {
if self.use_constraint_info {
// If the instruction was a constraint, then create a link between the two `ValueId`s
// to map from the more complex to the simpler value.
if let Instruction::Constrain(lhs, rhs, _) = instruction {
// These `ValueId`s should be fully resolved now.
if let Some((complex, simple)) = simplify(dfg, lhs, rhs) {
self.get_constraint_map(side_effects_enabled_var)
.entry(complex)
.or_default()
.push((block, simple));
match (&dfg[lhs], &dfg[rhs]) {
// Ignore trivial constraints
(Value::NumericConstant { .. }, Value::NumericConstant { .. }) => (),

// Prefer replacing with constants where possible.
(Value::NumericConstant { .. }, _) => {
self.get_constraint_map(side_effects_enabled_var).insert(rhs, lhs);
}
(_, Value::NumericConstant { .. }) => {
self.get_constraint_map(side_effects_enabled_var).insert(lhs, rhs);
}
// Otherwise prefer block parameters over instruction results.
// This is as block parameters are more likely to be a single witness rather than a full expression.
(Value::Param { .. }, Value::Instruction { .. }) => {
self.get_constraint_map(side_effects_enabled_var).insert(rhs, lhs);
}
(Value::Instruction { .. }, Value::Param { .. }) => {
self.get_constraint_map(side_effects_enabled_var).insert(lhs, rhs);
}
(_, _) => (),
}
}
}

// If the instruction doesn't have side-effects and if it won't interact with enable_side_effects during acir_gen,
// we cache the results so we can reuse them if the same instruction appears again later in the block.
// Others have side effects representing failure, which are implicit in the ACIR code and can also be deduplicated.
if instruction.can_be_deduplicated(dfg, self.use_constraint_info) {
let use_predicate =
self.use_constraint_info && instruction.requires_acir_gen_predicate(dfg);
Expand All @@ -425,8 +417,6 @@ impl<'brillig> Context<'brillig> {
}
}

/// Get the simplification mapping from complex to simpler instructions,
/// which all depend on the same side effect condition variable.
fn get_constraint_map(
&mut self,
side_effects_enabled_var: ValueId,
Expand Down Expand Up @@ -1351,43 +1341,4 @@ mod test {
let ssa = ssa.fold_constants_with_brillig(&brillig);
assert_normalized_ssa_equals(ssa, expected);
}

#[test]
fn deduplicates_side_effecting_intrinsics() {
let src = "
// After EnableSideEffectsIf removal:
acir(inline) fn main f0 {
b0(v0: Field, v1: Field, v2: u1):
v4 = call is_unconstrained() -> u1
v7 = call to_be_radix(v0, u32 256) -> [u8; 1] // `a.to_be_radix(256)`;
inc_rc v7
v8 = call to_be_radix(v0, u32 256) -> [u8; 1] // duplicate load of `a`
inc_rc v8
v9 = cast v2 as Field // `if c { a.to_be_radix(256) }`
v10 = mul v0, v9 // attaching `c` to `a`
v11 = call to_be_radix(v10, u32 256) -> [u8; 1] // calling `to_radix(c * a)`
inc_rc v11
enable_side_effects v2 // side effect var for `c` shifted down by removal
return
}
";
let ssa = Ssa::from_str(src).unwrap();
let expected = "
acir(inline) fn main f0 {
b0(v0: Field, v1: Field, v2: u1):
v4 = call is_unconstrained() -> u1
v7 = call to_be_radix(v0, u32 256) -> [u8; 1]
inc_rc v7
inc_rc v7
v8 = cast v2 as Field
v9 = mul v0, v8
v10 = call to_be_radix(v9, u32 256) -> [u8; 1]
inc_rc v10
enable_side_effects v2
return
}
";
let ssa = ssa.fold_constants_using_constraints();
assert_normalized_ssa_equals(ssa, expected);
}
}
}

0 comments on commit c7f1d73

Please sign in to comment.