Skip to content

Commit

Permalink
clean the codes
Browse files Browse the repository at this point in the history
  • Loading branch information
luotao1 committed Jul 14, 2017
1 parent e700bf6 commit f525390
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 27 deletions.
7 changes: 2 additions & 5 deletions paddle/framework/op_desc.proto
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,6 @@ message OpDesc {
// type of this Operator, such as "add", "sub", "fc".
required string type = 3;

// the name of this Operator.
required string name = 4;

// Attributes of this Operator. e.g., scale=3.0 in cosine op.
repeated AttrDesc attrs = 5;
};
repeated AttrDesc attrs = 4;
};
1 change: 0 additions & 1 deletion paddle/framework/recurrent_network_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ void RecurrentOp::Init(const OpDesc& op_desc, AttributeMap& attrs) {
outputs_.push_back(output);
}

name_ = op_desc.name();
net_name_ = inputs_.at(GetAttr<int>("step_net"));
step_scopes_name_ = outputs_.back();

Expand Down
15 changes: 1 addition & 14 deletions paddle/framework/recurrent_network_op.h
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ class PlainNet {
// TODO:
// 1. No-padding computing for sequences with indifinite length in one batch.
// 2. Hierarchical RNN for sequence with sub-sequence.
// 3. Multi-inputs with indifinate length for RecurrentOp.
// 3. External Memory.
// 4. More Complex RNN architecture, such as Gated Feedback RNN.
// Refer to: https://arxiv.org/pdf/1502.02367.pdf

Expand Down Expand Up @@ -143,11 +143,6 @@ class RecurrentOp : public OperatorBase {
*/
void ConcatOutputs(ScopePtr scope) const;

/*
* Create a `Net` which is shared across all steps.
*/
// void CreateStepNet(ScopePtr scope) const;

/*
* the step scopes as the father scope. The step scopes will be stored in
* the father scope as a variable whose name is specified by
Expand All @@ -158,11 +153,6 @@ class RecurrentOp : public OperatorBase {
*/
void CreateScopes(ScopePtr scope) const;

/*
* Create memories in each step scope.
*/
// void CreateMemories(ScopePtr scope) const;

/*
* Link memory in previous step scope to current scope.
*/
Expand Down Expand Up @@ -213,9 +203,6 @@ class RecurrentOp : public OperatorBase {
// TODO copy from OpBase's
mutable std::vector<MemoryAttr> memory_attrs_;

// this op's name, used as a unique key in father scope.
// TODO repace it with OpBase's interface if supported.
std::string name_;
// name of rnn op's step net, the step net will be shared by both `Forward`
// and `Backward`, so we store it as a variable in father's scope, with a
// unique key specified by `net_name_`.
Expand Down
11 changes: 4 additions & 7 deletions paddle/framework/recurrent_network_op_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ namespace framework {
namespace fake {
class FcOp : public OperatorBase {
public:
FcOp(const OpDesc& desc) : name_(desc.name()) {}
FcOp(const OpDesc& desc) {}

virtual void InferShape(ScopePtr scope) const override {
for (const auto& output : outputs_) {
Expand Down Expand Up @@ -54,7 +54,7 @@ class FcOp : public OperatorBase {

class AddOp : public OperatorBase {
public:
AddOp(const OpDesc& desc) : name_(desc.name()) {}
AddOp(const OpDesc& desc) {}

virtual void InferShape(ScopePtr scope) const override {
for (const auto& output : outputs_) {
Expand Down Expand Up @@ -136,7 +136,6 @@ class RecurrentOpTest : public ::testing::Test {
OpDesc op_desc;

op_desc.set_type("rnn_op");
op_desc.set_name("rnn");
op_desc.add_inputs("x");
op_desc.add_inputs("h_boot"); // initial memory
op_desc.add_inputs("step_net"); // step net
Expand Down Expand Up @@ -204,22 +203,20 @@ class RecurrentOpTest : public ::testing::Test {
OpDesc CreateFcOpDesc() {
OpDesc op_desc;
op_desc.set_type("fc");
op_desc.set_name("fc");
op_desc.add_inputs("rnn/h_pre");
op_desc.add_inputs("rnn/w");
op_desc.add_outputs("rnn/s");
// s = h_pre * check
// rnn/s = rnn/h_pre * rnn/w
return op_desc;
}

OpDesc CreateAddOpDesc() {
OpDesc op_desc;
op_desc.set_type("add");
op_desc.set_name("add");
op_desc.add_inputs("rnn/x");
op_desc.add_inputs("rnn/s");
op_desc.add_outputs("rnn/h");
// h = x + s
// rnn/h = rnn/x + rnn/s
return op_desc;
}

Expand Down

0 comments on commit f525390

Please sign in to comment.