Skip to content

Commit

Permalink
Merge pull request #1872 from qingqing01/config_helper_test
Browse files Browse the repository at this point in the history
Generate protostr automatically when adding a new test for trainer_config_helpers.
  • Loading branch information
luotao1 authored Apr 25, 2017
2 parents 5f92400 + 0c91dd1 commit 80277b5
Show file tree
Hide file tree
Showing 4 changed files with 129 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,6 @@ last_first_seq test_expand_layer test_ntm_layers test_hsigmoid
img_layers img_trans_layers util_layers simple_rnn_layers unused_layers test_cost_layers
test_rnn_group shared_fc shared_lstm shared_gru test_cost_layers_with_weight
test_spp_layer test_bilinear_interp test_maxout test_bi_grumemory math_ops
test_seq_concat_reshape)
test_seq_concat_reshape test_pad)

export whole_configs=(test_split_datasource)
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,18 @@ for conf in ${configs[*]}
do
echo "Generating " $conf
$1 -m paddle.utils.dump_config $conf.py > $protostr/$conf.protostr.unittest
if [ ! -f "$protostr/$conf.protostr" ]; then
cp $protostr/$conf.protostr.unittest $protostr/$conf.protostr
fi
cat ${conf}.py |$1 test_config_parser_for_non_file_config.py > $protostr/$conf.protostr.non_file_config.unittest
done

for conf in ${whole_configs[*]}
do
echo "Generating " $conf
$1 -m paddle.utils.dump_config $conf.py "" --whole > $protostr/$conf.protostr.unittest
if [ ! -f "$protostr/$conf.protostr" ]; then
cp $protostr/$conf.protostr.unittest $protostr/$conf.protostr
fi
cat ${conf}.py |$1 test_config_parser_for_non_file_config.py --whole > $protostr/$conf.protostr.non_file_config.unittest
done
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
type: "nn"
layers {
name: "data"
type: "data"
size: 2016
active_type: ""
height: 48
width: 42
}
layers {
name: "__conv_0__"
type: "exconv"
size: 32256
active_type: ""
inputs {
input_layer_name: "data"
input_parameter_name: "___conv_0__.w0"
conv_conf {
filter_size: 3
channels: 1
stride: 1
padding: 1
groups: 1
filter_channels: 1
output_x: 42
img_size: 42
caffe_mode: true
filter_size_y: 3
padding_y: 1
stride_y: 1
output_y: 48
img_size_y: 48
}
}
bias_parameter_name: "___conv_0__.wbias"
num_filters: 16
shared_biases: true
height: 48
width: 42
}
layers {
name: "__pool_0__"
type: "pool"
size: 8064
active_type: ""
inputs {
input_layer_name: "__conv_0__"
pool_conf {
pool_type: "max-projection"
channels: 16
size_x: 2
stride: 2
output_x: 21
img_size: 42
padding: 0
size_y: 2
stride_y: 2
output_y: 24
img_size_y: 48
padding_y: 0
}
}
height: 24
width: 21
}
layers {
name: "__pad_0__"
type: "pad"
size: 14175
active_type: ""
inputs {
input_layer_name: "__pool_0__"
pad_conf {
image_conf {
channels: 16
img_size: 21
img_size_y: 24
}
pad_c: 2
pad_c: 3
pad_h: 1
pad_h: 2
pad_w: 3
pad_w: 1
}
}
height: 27
width: 25
}
parameters {
name: "___conv_0__.w0"
size: 144
initial_mean: 0.0
initial_std: 0.471404520791
initial_strategy: 0
initial_smart: false
}
parameters {
name: "___conv_0__.wbias"
size: 16
initial_mean: 0.0
initial_std: 0.0
dims: 16
dims: 1
initial_strategy: 0
initial_smart: false
}
input_layer_names: "data"
output_layer_names: "__pad_0__"
sub_models {
name: "root"
layer_names: "data"
layer_names: "__conv_0__"
layer_names: "__pool_0__"
layer_names: "__pad_0__"
input_layer_names: "data"
output_layer_names: "__pad_0__"
is_recurrent_layer_group: false
}

Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

settings(batch_size=1000, learning_rate=1e-5)

data = data_layer(name='data', size=2304, height=48, width=42)
data = data_layer(name='data', size=2016, height=48, width=42)

conv = img_conv_layer(
input=data,
Expand All @@ -13,8 +13,7 @@
act=LinearActivation(),
bias_attr=True)

pool = img_pool_layer(
input=conv, num_channels=8, pool_size=2, stride=2, pool_type=MaxPooling())
pool = img_pool_layer(input=conv, pool_size=2, stride=2, pool_type=MaxPooling())

pad = pad_layer(input=pool, pad_c=[2, 3], pad_h=[1, 2], pad_w=[3, 1])

Expand Down

0 comments on commit 80277b5

Please sign in to comment.