Skip to content

Commit

Permalink
llvm, TransferFunction/ReLU: Add implementation of derivative using b…
Browse files Browse the repository at this point in the history
…ase output

Both Python and compiled.
Add tests.

Signed-off-by: Jan Vesely <[email protected]>
  • Loading branch information
jvesely committed Nov 14, 2022
1 parent 957bb61 commit f933b5d
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1579,9 +1579,12 @@ def _gen_llvm_transfer(self, builder, index, ctx, vi, vo, params, state, *, tags
# Maxnum for some reason needs full function prototype
max_f = ctx.get_builtin("maxnum", [ctx.float_ty])
var = builder.load(ptri)
val = builder.fsub(var, bias)
if "derivative_out" in tags:
val = builder.fdiv(var, gain)
else:
val = builder.fsub(var, bias)

if "derivative" in tags:
if "derivative" in tags or "derivative_out" in tags:
predicate = builder.fcmp_ordered('>', val, val.type(0))
val = builder.select(predicate, gain, builder.fmul(gain, leak))
else:
Expand Down Expand Up @@ -1615,10 +1618,14 @@ def derivative(self, input=None, output=None, context=None):
leak = self._get_current_parameter_value(LEAK, context)
bias = self._get_current_parameter_value(BIAS, context)

value = np.empty_like(input)
value[(input - bias) > 0] = gain
value[(input - bias) <= 0] = gain * leak
if input is not None:
# Use input if provided
variable = np.array(input) - bias
else:
# Infer input from output
variable = np.array(output) / gain

value = np.where(variable > 0, gain, gain * leak)
return value

# **********************************************************************************************************************
Expand Down
5 changes: 4 additions & 1 deletion tests/functions/test_transfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@
gaussian_helper = np.e**(-(test_var - RAND2)**2 / (2 * RAND1**2)) / np.sqrt(2 * np.pi * RAND1)
gaussian_helper = RAND3 * gaussian_helper + RAND4

relu_helper = np.maximum(RAND1 * (test_var - RAND2), RAND3 * RAND1 *(test_var - RAND2))

def gaussian_distort_helper(seed):
state = np.random.RandomState([seed])
# compensate for construction
Expand All @@ -39,7 +41,7 @@ def gaussian_distort_helper(seed):
pytest.param(Functions.Exponential, test_var, {kw.SCALE:RAND1, kw.RATE:RAND2}, RAND1 * np.exp(RAND2 * test_var), id="EXPONENTIAL"),
pytest.param(Functions.Logistic, test_var, {kw.GAIN:RAND1, kw.X_0:RAND2, kw.OFFSET:RAND3, kw.SCALE:RAND4}, RAND4 / (1 + np.exp(-(RAND1 * (test_var - RAND2)) + RAND3)), id="LOGISTIC"),
pytest.param(Functions.Tanh, test_var, {kw.GAIN:RAND1, kw.BIAS:RAND2, kw.X_0:RAND3, kw.OFFSET:RAND4}, tanh_helper, id="TANH"),
pytest.param(Functions.ReLU, test_var, {kw.GAIN:RAND1, kw.BIAS:RAND2, kw.LEAK:RAND3}, np.maximum(RAND1 * (test_var - RAND2), RAND3 * RAND1 *(test_var - RAND2)), id="RELU"),
pytest.param(Functions.ReLU, test_var, {kw.GAIN:RAND1, kw.BIAS:RAND2, kw.LEAK:RAND3}, relu_helper, id="RELU"),
# Angle doesn't have a helper using 'test_var', hardcode the input as well
pytest.param(Functions.Angle, [0.5488135, 0.71518937, 0.60276338, 0.54488318, 0.4236548,
0.64589411, 0.43758721, 0.891773, 0.96366276, 0.38344152], {},
Expand Down Expand Up @@ -164,6 +166,7 @@ def test_transfer_derivative(func, variable, params, expected, benchmark, func_m


derivative_out_test_data = [
(Functions.ReLU, relu_helper, {kw.GAIN:RAND1, kw.BIAS:RAND2, kw.LEAK:RAND3}, np.where((test_var - RAND2) > 0, RAND1, RAND1 * RAND3)),
(Functions.SoftMax, softmax_helper, {kw.GAIN:RAND1, kw.OUTPUT_TYPE:kw.MAX_VAL, kw.PER_ITEM:False},
[-0.010680386821751537, -0.011118109698906909, -0.01082040340318878, -0.010670257514724047, -0.010362498859374309,
-0.010933660158663306, -0.010397412260182806, -0.011602329078808718, 0.09684744183944892, -0.010262384043848513]),
Expand Down

0 comments on commit f933b5d

Please sign in to comment.