Skip to content

Commit

Permalink
Fix backward_clip num inputs and type of clip params (apache#15688)
Browse files Browse the repository at this point in the history
* Fix backward_clip num inputs and type of clip params

* Clip test

* Trigger CI

* Changes to clip docs

* Fix docstring

* Trigger CI
  • Loading branch information
ptrendx authored and Ubuntu committed Aug 20, 2019
1 parent 02b6116 commit 5438902
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 10 deletions.
8 changes: 4 additions & 4 deletions src/operator/tensor/matrix_op-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -1481,7 +1481,7 @@ struct ClipParam : public dmlc::Parameter<ClipParam> {
struct clip {
template<typename DType>
MSHADOW_XINLINE static void Map(index_t i, DType* out, const DType* datas,
DType a_min, DType a_max) {
const float a_min, const float a_max) {
DType data = datas[i];
if (data > a_max) {
out[i] = a_max;
Expand All @@ -1497,7 +1497,7 @@ struct clip {
struct clip_grad {
template<typename DType>
MSHADOW_XINLINE static void Map(index_t i, DType* out, const DType* grad, const DType* datas,
DType a_min, DType a_max) {
const float a_min, const float a_max) {
DType data = datas[i];
if (data > a_max) {
out[i] = 0;
Expand All @@ -1524,7 +1524,7 @@ void Clip(const nnvm::NodeAttrs& attrs,
MSHADOW_TYPE_SWITCH(outputs[0].type_flag_, DType, {
mxnet_op::Kernel<mxnet::op::clip, xpu>::Launch(s, outputs[0].Size(), outputs[0].dptr<DType>(),
inputs[0].dptr<DType>(),
DType(param.a_min), DType(param.a_max));
param.a_min, param.a_max);
});
}

Expand Down Expand Up @@ -1553,7 +1553,7 @@ void ClipGrad_(const nnvm::NodeAttrs& attrs,
Stream<xpu> *s = ctx.get_stream<xpu>();
MSHADOW_TYPE_SWITCH(outputs[0].type_flag_, DType, {
Kernel<clip_grad, xpu>::Launch(s, outputs[0].Size(), outputs[0].dptr<DType>(),
inputs[0].dptr<DType>(), inputs[1].dptr<DType>(), DType(param.a_min), DType(param.a_max));
inputs[0].dptr<DType>(), inputs[1].dptr<DType>(), param.a_min, param.a_max);
});
}

Expand Down
8 changes: 5 additions & 3 deletions src/operator/tensor/matrix_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -702,9 +702,11 @@ MXNET_ADD_SPARSE_OP_ALIAS(clip)
.describe(R"code(Clips (limits) the values in an array.
Given an interval, values outside the interval are clipped to the interval edges.
Clipping ``x`` between `a_min` and `a_x` would be::
Clipping ``x`` between `a_min` and `a_max` would be::
clip(x, a_min, a_max) = max(min(x, a_max), a_min))
.. math::
clip(x, a_min, a_max) = \max(\min(x, a_max), a_min))
Example::
Expand Down Expand Up @@ -766,7 +768,7 @@ parameter values:
.add_arguments(ClipParam::__FIELDS__());

NNVM_REGISTER_OP(_backward_clip)
.set_num_inputs(1)
.set_num_inputs(2)
.set_num_outputs(1)
.set_attr_parser(ParamParser<ClipParam>)
.set_attr<nnvm::TIsBackward>("TIsBackward", true)
Expand Down
16 changes: 13 additions & 3 deletions tests/python/unittest/test_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -4174,15 +4174,25 @@ def test_special_functions_using_scipy():


@with_seed()
@unittest.skip("Flaky test, tracked at https://github.com/apache/incubator-mxnet/issues/12901")
def test_clip():
data = mx.symbol.Variable('data')
shape = (30, 30)
data_tmp = np.random.uniform(-1, 1, shape)
data_tmp = np.random.uniform(-1, 1, shape).astype('float32')
test = mx.sym.clip(data, a_max=0.6, a_min=-0.6)
check_symbolic_forward(test, [data_tmp], [np.clip(data_tmp, -0.6, 0.6)])
check_symbolic_backward(test, [data_tmp], [np.ones(shape)],
[np.where(data_tmp < 0.6, [1], [0]) * np.where(data_tmp > -0.6, [1], [0])])
[np.where(data_tmp <= 0.6, [1], [0]) * np.where(data_tmp >= -0.6, [1], [0])])

# Test monitor on symbol using clip

def simple_callback(name, arr):
pass

exe = test.simple_bind(ctx=mx.current_context(), data=shape)
exe.set_monitor_callback(simple_callback, monitor_all=True)
exe.forward(is_train=True)
exe.backward(out_grads=mx.nd.ones(shape))
mx.nd.waitall()


@with_seed()
Expand Down

0 comments on commit 5438902

Please sign in to comment.