diff --git a/src/TensorFlowNET.Core/APIs/tf.nn.cs b/src/TensorFlowNET.Core/APIs/tf.nn.cs index 397c68c7c..112c48628 100644 --- a/src/TensorFlowNET.Core/APIs/tf.nn.cs +++ b/src/TensorFlowNET.Core/APIs/tf.nn.cs @@ -101,6 +101,8 @@ public Tensor embedding_lookup(Tensor @params, name: name); public IActivation relu() => new relu(); + + public IActivation swish() => new swish(); public IActivation tanh() => new tanh(); @@ -111,6 +113,9 @@ public Tensor tanh(Tensor x, string name = null) public Tensor relu(Tensor features, string name = null) => gen_nn_ops.relu(features, name); + public Tensor relu6(Tensor features, string name = null) + => gen_nn_ops.relu6(features, name); + public Tensor[] fused_batch_norm(Tensor x, Tensor scale, Tensor offset, diff --git a/src/TensorFlowNET.Core/Keras/Activations/Activations.cs b/src/TensorFlowNET.Core/Keras/Activations/Activations.cs index f0d59ed62..37264104a 100644 --- a/src/TensorFlowNET.Core/Keras/Activations/Activations.cs +++ b/src/TensorFlowNET.Core/Keras/Activations/Activations.cs @@ -32,6 +32,7 @@ public interface IActivationsApi Activation Linear { get; } Activation Relu { get; } + Activation Relu6 { get; } Activation Sigmoid { get; } diff --git a/src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs b/src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs index 3fd98e7a8..57273eb08 100644 --- a/src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs +++ b/src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs @@ -180,6 +180,9 @@ public ILayer LayerNormalization(Axis? axis, public ILayer Normalization(Shape? input_shape = null, int? axis = -1, float? mean = null, float? variance = null, bool invert = false); public ILayer LeakyReLU(float alpha = 0.3f); + public ILayer ReLU6(); + + public IRnnCell LSTMCell(int uints, string activation = "tanh", string recurrent_activation = "sigmoid", diff --git a/src/TensorFlowNET.Keras/Activations.cs b/src/TensorFlowNET.Keras/Activations.cs index ce5b4eb13..d3801902f 100644 --- a/src/TensorFlowNET.Keras/Activations.cs +++ b/src/TensorFlowNET.Keras/Activations.cs @@ -20,6 +20,11 @@ public class Activations: IActivationsApi Name = "relu", ActivationFunction = (features, name) => tf.Context.ExecuteOp("Relu", name, new ExecuteOpArgs(features)) }; + private static Activation _relu6 = new Activation() + { + Name = "relu6", + ActivationFunction = (features, name) => tf.Context.ExecuteOp("Relu6", name, new ExecuteOpArgs(features)) + }; private static Activation _sigmoid = new Activation() { Name = "sigmoid", @@ -55,6 +60,7 @@ static Activations() _nameActivationMap = new Dictionary(); RegisterActivation(_relu); + RegisterActivation(_relu6); RegisterActivation(_linear); RegisterActivation(_sigmoid); RegisterActivation(_softmax); @@ -65,6 +71,7 @@ static Activations() public Activation Linear => _linear; public Activation Relu => _relu; + public Activation Relu6 => _relu6; public Activation Sigmoid => _sigmoid; diff --git a/src/TensorFlowNET.Keras/Layers/Activation/ReLu6.cs b/src/TensorFlowNET.Keras/Layers/Activation/ReLu6.cs new file mode 100644 index 000000000..5af3f7677 --- /dev/null +++ b/src/TensorFlowNET.Keras/Layers/Activation/ReLu6.cs @@ -0,0 +1,25 @@ +using System; +using System.Collections.Generic; +using System.Text; +using Tensorflow.Keras.ArgsDefinition; +using Tensorflow.Keras.Engine; +using Tensorflow.Common.Types; +using static Tensorflow.Binding; + +namespace Tensorflow.Keras.Layers +{ + /// + /// Leaky version of a Rectified Linear Unit. + /// + public class ReLu6 : Layer + { + public ReLu6() : base(new LayerArgs { }) + { + } + + protected override Tensors Call(Tensors inputs, Tensors state = null, bool? training = null, IOptionalArgs? optional_args = null) + { + return tf.nn.relu6(inputs); + } + } +} diff --git a/src/TensorFlowNET.Keras/Layers/LayersApi.cs b/src/TensorFlowNET.Keras/Layers/LayersApi.cs index bcc19dc22..e2adb23d0 100644 --- a/src/TensorFlowNET.Keras/Layers/LayersApi.cs +++ b/src/TensorFlowNET.Keras/Layers/LayersApi.cs @@ -735,6 +735,15 @@ public ILayer LeakyReLU(float alpha = 0.3f) }); + /// + /// Leaky version of a Rectified Linear Unit. + /// + /// Negative slope coefficient. + /// + public ILayer ReLU6() + => new ReLu6(); + + public IRnnCell SimpleRNNCell( int units, string activation = "tanh",