Skip to content

Commit

Permalink
feat: add flatten node (#64)
Browse files Browse the repository at this point in the history
Co-authored-by: Alexander Camuto <[email protected]>
  • Loading branch information
jasonmorton and alexander-camuto authored Dec 1, 2022
1 parent 7d30630 commit 7d86386
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 1 deletion.
2 changes: 1 addition & 1 deletion examples/onnx
Submodule onnx updated from eaab99 to a595f3
7 changes: 7 additions & 0 deletions src/circuit/fused.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ use std::marker::PhantomData;
pub enum FusedOp {
Identity,
Reshape(Vec<usize>),
Flatten(Vec<usize>),
Add,
Sub,
Sum,
Expand All @@ -38,6 +39,7 @@ impl fmt::Display for FusedOp {
match self {
FusedOp::Identity => write!(f, "identity"),
FusedOp::Reshape(new_dims) => write!(f, "reshape to {:?}", new_dims),
FusedOp::Flatten(new_dims) => write!(f, "flatten to {:?}", new_dims),
FusedOp::Add => write!(f, "add"),
FusedOp::Sub => write!(f, "sub"),
FusedOp::Sum => write!(f, "sum"),
Expand Down Expand Up @@ -254,6 +256,11 @@ impl<F: FieldExt + TensorType> FusedConfig<F> {
t.reshape(&new_dims);
t
}
FusedOp::Flatten(new_dims) => {
let mut t = inputs[0].clone();
t.reshape(&new_dims);
t
}
FusedOp::Add => add(&inputs),
FusedOp::Sub => sub(&inputs),
FusedOp::Mult => mult(&inputs),
Expand Down
12 changes: 12 additions & 0 deletions src/graph/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ impl OpKind {
pub fn new(name: &str) -> Self {
match name {
"Clip" => OpKind::ReLU(1),
"Prelu" => OpKind::ReLU(1),
"Sigmoid" => OpKind::Sigmoid(1),
"Div" => OpKind::Div(1),
"Const" => OpKind::Const,
Expand All @@ -67,6 +68,7 @@ impl OpKind {
"SumPool" => OpKind::Fused(FusedOp::SumPool((1, 1), (1, 1), (1, 1))),
"GlobalAvgPool" => OpKind::Fused(FusedOp::GlobalSumPool),
"Reshape" => OpKind::Fused(FusedOp::Reshape(Vec::new())),
"Flatten" => OpKind::Fused(FusedOp::Flatten(Vec::new())),
"BatchNorm" => OpKind::Fused(FusedOp::BatchNorm),
"Pad" => OpKind::Fused(FusedOp::Identity),
c => {
Expand Down Expand Up @@ -675,6 +677,16 @@ impl Node {
mn.out_scale = input_node.out_scale;
mn.out_dims = input_node.out_dims.clone();
}
FusedOp::Flatten(_) => {
let input_node = &inputs[0];
let new_dims: Vec<usize> =
vec![inputs[0].out_dims.iter().product::<usize>()];
mn.opkind = OpKind::Fused(FusedOp::Flatten(new_dims.clone()));
mn.output_max = input_node.output_max;
mn.in_scale = input_node.out_scale;
mn.out_scale = input_node.out_scale;
mn.out_dims = new_dims;
}
FusedOp::Reshape(_) => {
let input_node = &inputs[0];
let shape_const_node = &inputs[1];
Expand Down
5 changes: 5 additions & 0 deletions tests/integration_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,11 @@ fn test_ff_mock() {
test_onnx_mock("1l_mlp".to_string());
}

#[test]
fn test_flatten_mock() {
test_onnx_mock("1l_flatten".to_string());
}

#[test]
fn test_avg_mock() {
test_onnx_mock("1l_average".to_string());
Expand Down

0 comments on commit 7d86386

Please sign in to comment.