Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add Acir debug information #1864

Merged
merged 29 commits into from
Jul 12, 2023
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
b749264
generates acir debug information
guipublic Jul 5, 2023
7745e8c
reset location
guipublic Jul 5, 2023
c4a5912
Merge branch 'master' into gd/debug_location
guipublic Jul 6, 2023
f2d847c
merge from master
guipublic Jul 6, 2023
9e65375
Display the location in the error and use index from acvm
guipublic Jul 6, 2023
f338a72
update cargo.toml
guipublic Jul 6, 2023
3bf33a5
clean-up: restore unit tests
guipublic Jul 6, 2023
f52d26e
restore the prover.toml
guipublic Jul 6, 2023
e72fd88
Merge branch 'master' into gd/debug_location
guipublic Jul 7, 2023
20a34bf
notify unsatisfied constraint only once
guipublic Jul 7, 2023
b863806
set the location and use it when inserting an instruction
guipublic Jul 7, 2023
bf27d1a
Merge remote-tracking branch 'origin/master' into gd/debug_location
kevaundray Jul 10, 2023
0ee7a8f
update to the corresponding acvm PR
guipublic Jul 11, 2023
95992e4
Merge branch 'master' into gd/debug_location
guipublic Jul 11, 2023
1ffb258
update cargo toml
guipublic Jul 11, 2023
0ceb640
Merge branch 'master' into gd/debug_location
guipublic Jul 11, 2023
90afcae
code review: remove temp code
guipublic Jul 12, 2023
7e48183
Code review
guipublic Jul 12, 2023
8bd17d9
Code review
guipublic Jul 12, 2023
e066dea
Update crates/noirc_errors/src/debug_info.rs
kevaundray Jul 12, 2023
0d21f7e
Merge remote-tracking branch 'origin/master' into gd/debug_location
kevaundray Jul 12, 2023
d440022
fix typo
kevaundray Jul 12, 2023
2ce27f7
fix merge
kevaundray Jul 12, 2023
97b3f8d
minor nit
kevaundray Jul 12, 2023
f1587b8
revert all test changes
kevaundray Jul 12, 2023
566648d
add noirc_errors as a dependency to nargo
kevaundray Jul 12, 2023
82241b6
preprocess_program returns DebugInfo
kevaundray Jul 12, 2023
aa7a8ea
modify all methods which call preprocess_program
kevaundray Jul 12, 2023
b0c34b6
change unwrap to expect
kevaundray Jul 12, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion crates/noirc_evaluator/src/ssa_refactor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ pub fn experimental_create_circuit(
show_output: bool,
) -> Result<(Circuit, Abi), RuntimeError> {
let func_sig = program.main_function_signature.clone();
let GeneratedAcir { current_witness_index, opcodes, return_witnesses } =
let GeneratedAcir { current_witness_index, opcodes, return_witnesses, .. } =
optimize_into_acir(program, show_output, enable_logging);

let abi = gen_abi(func_sig, return_witnesses.clone());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ use acvm::{
FieldElement,
};
use iter_extended::vecmap;
use noirc_errors::Location;
use std::collections::HashMap;
use std::{borrow::Cow, hash::Hash};

Expand Down Expand Up @@ -123,6 +124,10 @@ impl AcirContext {
self.add_data(var_data)
}

pub(crate) fn set_location(&mut self, location: Option<Location>) {
self.acir_ir.current_location = location;
}

/// True if the given AcirVar refers to a constant one value
pub(crate) fn is_constant_one(&self, var: &AcirVar) -> bool {
match self.vars[var] {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
//! `GeneratedAcir` is constructed as part of the `acir_gen` pass to accumulate all of the ACIR
//! program as it is being converted from SSA form.
use std::collections::HashMap;

use crate::brillig::brillig_gen::brillig_directive;

use super::errors::AcirGenError;
Expand All @@ -18,6 +20,7 @@ use acvm::{
FieldElement,
};
use iter_extended::{try_vecmap, vecmap};
use noirc_errors::Location;
use num_bigint::BigUint;

#[derive(Debug, Default)]
Expand All @@ -36,6 +39,13 @@ pub(crate) struct GeneratedAcir {
/// Note: This may contain repeated indices, which is necessary for later mapping into the
/// abi's return type.
pub(crate) return_witnesses: Vec<Witness>,

/// Correspondance between an opcode index (in opcodes) and the source code location which generated it
locations: HashMap<usize, Location>,

/// Source code location of the current instruction being processed
/// None if we do not know the location
pub(crate) current_location: Option<Location>,
}

impl GeneratedAcir {
Expand All @@ -47,6 +57,9 @@ impl GeneratedAcir {
/// Adds a new opcode into ACIR.
fn push_opcode(&mut self, opcode: AcirOpcode) {
self.opcodes.push(opcode);
if let Some(location) = self.current_location {
self.locations.insert(self.opcodes.len() - 1, location);
}
}

/// Updates the witness index counter and returns
Expand Down
3 changes: 2 additions & 1 deletion crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ impl Context {
allow_log_ops: bool,
) {
let instruction = &dfg[instruction_id];

self.acir_context.set_location(dfg.get_location(&instruction_id));
match instruction {
Instruction::Binary(binary) => {
let result_acir_var = self
Expand Down Expand Up @@ -337,6 +337,7 @@ impl Context {
unreachable!("Expected all load instructions to be removed before acir_gen")
}
}
self.acir_context.set_location(None);
}

fn gen_brillig_for(&self, func: &Function, brillig: &Brillig) -> Vec<Opcode> {
Expand Down
18 changes: 18 additions & 0 deletions crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ use super::{

use acvm::FieldElement;
use iter_extended::vecmap;
use noirc_errors::Location;

/// The DataFlowGraph contains most of the actual data in a function including
/// its blocks, instructions, and values. This struct is largely responsible for
Expand Down Expand Up @@ -69,6 +70,8 @@ pub(crate) struct DataFlowGraph {
/// for that of another. This information is purely used for printing the SSA, and has no
/// material effect on the SSA itself.
replaced_value_ids: HashMap<ValueId, ValueId>,

locations: HashMap<InstructionId, Location>,
guipublic marked this conversation as resolved.
Show resolved Hide resolved
}

impl DataFlowGraph {
Expand Down Expand Up @@ -137,6 +140,7 @@ impl DataFlowGraph {
instruction: Instruction,
block: BasicBlockId,
ctrl_typevars: Option<Vec<Type>>,
location: Option<Location>,
) -> InsertInstructionResult {
use InsertInstructionResult::*;
match instruction.simplify(self, block) {
Expand All @@ -145,6 +149,9 @@ impl DataFlowGraph {
SimplifyResult::None => {
let id = self.make_instruction(instruction, ctrl_typevars);
self.blocks[block].insert_instruction(id);
if let Some(location) = location {
self.locations.insert(id, location);
}
InsertInstructionResult::Results(self.instruction_results(id))
}
}
Expand Down Expand Up @@ -362,6 +369,17 @@ impl DataFlowGraph {
destination.instructions_mut().append(&mut instructions);
destination.set_terminator(terminator);
}

pub(crate) fn get_location(&self, id: &InstructionId) -> Option<Location> {
self.locations.get(id).cloned()
}

pub(crate) fn get_value_location(&self, id: &ValueId) -> Option<Location> {
match &self.values[*id] {
Value::Instruction { instruction, .. } => self.get_location(instruction),
_ => None,
}
}
}

impl std::ops::Index<InstructionId> for DataFlowGraph {
Expand Down
21 changes: 15 additions & 6 deletions crates/noirc_evaluator/src/ssa_refactor/ir/function_inserter.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use std::collections::HashMap;

use iter_extended::vecmap;
use noirc_errors::Location;

use super::{
basic_block::BasicBlockId,
Expand Down Expand Up @@ -55,20 +56,24 @@ impl<'f> FunctionInserter<'f> {
self.values.insert(key, value);
}

pub(crate) fn map_instruction(&mut self, id: InstructionId) -> Instruction {
self.function.dfg[id].clone().map_values(|id| self.resolve(id))
pub(crate) fn map_instruction(&mut self, id: InstructionId) -> (Instruction, Option<Location>) {
(
self.function.dfg[id].clone().map_values(|id| self.resolve(id)),
self.function.dfg.get_location(&id),
)
}

pub(crate) fn push_instruction(&mut self, id: InstructionId, block: BasicBlockId) {
let instruction = self.map_instruction(id);
self.push_instruction_value(instruction, id, block);
let (instruction, location) = self.map_instruction(id);
self.push_instruction_value(instruction, id, block, location);
}

pub(crate) fn push_instruction_value(
&mut self,
instruction: Instruction,
id: InstructionId,
block: BasicBlockId,
location: Option<Location>,
) -> InsertInstructionResult {
let results = self.function.dfg.instruction_results(id);
let results = vecmap(results, |id| self.function.dfg.resolve(*id));
Expand All @@ -77,8 +82,12 @@ impl<'f> FunctionInserter<'f> {
.requires_ctrl_typevars()
.then(|| vecmap(&results, |result| self.function.dfg.type_of_value(*result)));

let new_results =
self.function.dfg.insert_instruction_and_results(instruction, block, ctrl_typevars);
let new_results = self.function.dfg.insert_instruction_and_results(
instruction,
block,
ctrl_typevars,
location,
);

Self::insert_new_instruction_results(&mut self.values, &results, &new_results);
new_results
Expand Down
17 changes: 11 additions & 6 deletions crates/noirc_evaluator/src/ssa_refactor/opt/constant_folding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,12 +71,17 @@ impl Context {
.requires_ctrl_typevars()
.then(|| vecmap(&old_results, |result| function.dfg.type_of_value(*result)));

let new_results =
match function.dfg.insert_instruction_and_results(instruction, block, ctrl_typevars) {
InsertInstructionResult::SimplifiedTo(new_result) => vec![new_result],
InsertInstructionResult::Results(new_results) => new_results.to_vec(),
InsertInstructionResult::InstructionRemoved => vec![],
};
let location = function.dfg.get_location(&id);
let new_results = match function.dfg.insert_instruction_and_results(
instruction,
block,
ctrl_typevars,
location,
) {
InsertInstructionResult::SimplifiedTo(new_result) => vec![new_result],
InsertInstructionResult::Results(new_results) => new_results.to_vec(),
InsertInstructionResult::InstructionRemoved => vec![],
};
assert_eq!(old_results.len(), new_results.len());
for (old_result, new_result) in old_results.iter().zip(new_results) {
function.dfg.set_value_from_id(*old_result, new_result);
Expand Down
84 changes: 61 additions & 23 deletions crates/noirc_evaluator/src/ssa_refactor/opt/flatten_cfg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ use std::{

use acvm::FieldElement;
use iter_extended::vecmap;
use noirc_errors::Location;

use crate::ssa_refactor::{
ir::{
Expand Down Expand Up @@ -263,7 +264,8 @@ impl<'f> Context<'f> {
let then_branch =
self.inline_branch(block, then_block, old_condition, then_condition, one);

let else_condition = self.insert_instruction(Instruction::Not(then_condition));
let else_condition =
self.insert_instruction(Instruction::Not(then_condition), None);
let zero = FieldElement::zero();

// Make sure the else branch sees the previous values of each store
Expand Down Expand Up @@ -317,7 +319,7 @@ impl<'f> Context<'f> {

if let Some((_, previous_condition)) = self.conditions.last() {
let and = Instruction::binary(BinaryOp::And, *previous_condition, condition);
let new_condition = self.insert_instruction(and);
let new_condition = self.insert_instruction(and, None);
self.conditions.push((end_block, new_condition));
} else {
self.conditions.push((end_block, condition));
Expand All @@ -327,9 +329,17 @@ impl<'f> Context<'f> {
/// Insert a new instruction into the function's entry block.
/// Unlike push_instruction, this function will not map any ValueIds.
/// within the given instruction, nor will it modify self.values in any way.
fn insert_instruction(&mut self, instruction: Instruction) -> ValueId {
fn insert_instruction(
&mut self,
instruction: Instruction,
location: Option<Location>,
) -> ValueId {
let block = self.inserter.function.entry_block();
self.inserter.function.dfg.insert_instruction_and_results(instruction, block, None).first()
self.inserter
.function
.dfg
.insert_instruction_and_results(instruction, block, None, location)
.first()
}

/// Inserts a new instruction into the function's entry block, using the given
Expand All @@ -342,7 +352,12 @@ impl<'f> Context<'f> {
ctrl_typevars: Option<Vec<Type>>,
) -> InsertInstructionResult {
let block = self.inserter.function.entry_block();
self.inserter.function.dfg.insert_instruction_and_results(instruction, block, ctrl_typevars)
self.inserter.function.dfg.insert_instruction_and_results(
instruction,
block,
ctrl_typevars,
None,
)
}

/// Checks the branch condition on the top of the stack and uses it to build and insert an
Expand Down Expand Up @@ -451,20 +466,38 @@ impl<'f> Context<'f> {
"Expected values merged to be of the same type but found {then_type} and {else_type}"
);

let then_location = self.inserter.function.dfg.get_value_location(&then_value);
let else_location = self.inserter.function.dfg.get_value_location(&else_value);
let merge_location = then_location.or(else_location);

// We must cast the bool conditions to the actual numeric type used by each value.
let then_condition = self.insert_instruction(Instruction::Cast(then_condition, then_type));
let else_condition = self.insert_instruction(Instruction::Cast(else_condition, else_type));
let then_condition =
self.insert_instruction(Instruction::Cast(then_condition, then_type), then_location);
let else_condition =
self.insert_instruction(Instruction::Cast(else_condition, else_type), else_location);

let mul = Instruction::binary(BinaryOp::Mul, then_condition, then_value);
let then_value =
self.inserter.function.dfg.insert_instruction_and_results(mul, block, None).first();
let then_value = self
.inserter
.function
.dfg
.insert_instruction_and_results(mul, block, None, merge_location)
.first();

let mul = Instruction::binary(BinaryOp::Mul, else_condition, else_value);
let else_value =
self.inserter.function.dfg.insert_instruction_and_results(mul, block, None).first();
let else_value = self
.inserter
.function
.dfg
.insert_instruction_and_results(mul, block, None, merge_location)
.first();

let add = Instruction::binary(BinaryOp::Add, then_value, else_value);
self.inserter.function.dfg.insert_instruction_and_results(add, block, None).first()
self.inserter
.function
.dfg
.insert_instruction_and_results(add, block, None, merge_location)
.first()
}

/// Inline one branch of a jmpif instruction.
Expand Down Expand Up @@ -649,12 +682,12 @@ impl<'f> Context<'f> {
/// with a different InstructionId from the original. The results of the given instruction
/// will also be mapped to the results of the new instruction.
fn push_instruction(&mut self, id: InstructionId) {
let instruction = self.inserter.map_instruction(id);
let instruction = self.handle_instruction_side_effects(instruction);
let (instruction, location) = self.inserter.map_instruction(id);
let instruction = self.handle_instruction_side_effects(instruction, location);
let is_allocate = matches!(instruction, Instruction::Allocate);

let entry = self.inserter.function.entry_block();
let results = self.inserter.push_instruction_value(instruction, id, entry);
let results = self.inserter.push_instruction_value(instruction, id, entry, location);

// Remember an allocate was created local to this branch so that we do not try to merge store
// values across branches for it later.
Expand All @@ -665,17 +698,22 @@ impl<'f> Context<'f> {

/// If we are currently in a branch, we need to modify constrain instructions
/// to multiply them by the branch's condition (see optimization #1 in the module comment).
fn handle_instruction_side_effects(&mut self, instruction: Instruction) -> Instruction {
fn handle_instruction_side_effects(
&mut self,
instruction: Instruction,
location: Option<Location>,
) -> Instruction {
if let Some((_, condition)) = self.conditions.last().copied() {
match instruction {
Instruction::Constrain(value) => {
let mul = self.insert_instruction(Instruction::binary(
BinaryOp::Mul,
value,
condition,
));
let eq =
self.insert_instruction(Instruction::binary(BinaryOp::Eq, mul, condition));
let mul = self.insert_instruction(
Instruction::binary(BinaryOp::Mul, value, condition),
location,
);
let eq = self.insert_instruction(
Instruction::binary(BinaryOp::Eq, mul, condition),
location,
);
Instruction::Constrain(eq)
}
Instruction::Store { address, value } => {
Expand Down
4 changes: 3 additions & 1 deletion crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs
Original file line number Diff line number Diff line change
Expand Up @@ -379,14 +379,16 @@ impl<'function> PerFunctionContext<'function> {
/// function being inlined into.
fn push_instruction(&mut self, id: InstructionId) {
let instruction = self.source_function.dfg[id].map_values(|id| self.translate_value(id));
let location = self.source_function.dfg.get_location(&id);
let results = self.source_function.dfg.instruction_results(id);
let results = vecmap(results, |id| self.source_function.dfg.resolve(*id));

let ctrl_typevars = instruction
.requires_ctrl_typevars()
.then(|| vecmap(&results, |result| self.source_function.dfg.type_of_value(*result)));

let new_results = self.context.builder.insert_instruction(instruction, ctrl_typevars);
let new_results =
self.context.builder.insert_instruction(instruction, ctrl_typevars, location);
Self::insert_new_instruction_results(&mut self.values, &results, new_results);
}

Expand Down
Loading