diff --git a/src/NNlib.jl b/src/NNlib.jl index c6212554e..4b199df31 100644 --- a/src/NNlib.jl +++ b/src/NNlib.jl @@ -2,7 +2,7 @@ module NNlib using Requires, Libdl -export σ, sigmoid, relu, leakyrelu, elu, swish, selu, softplus, softsign, logσ, logsigmoid, +export σ, sigmoid, relu, leakyrelu, elu,gelu, swish, selu, softplus, softsign, logσ, logsigmoid, softmax, logsoftmax, maxpool, meanpool include("numeric.jl") diff --git a/src/activation.jl b/src/activation.jl index 9f45b25d4..5e78d9dfc 100644 --- a/src/activation.jl +++ b/src/activation.jl @@ -66,6 +66,14 @@ You can also specify the coefficient explicitly, e.g. `elu(x, 1)`. """ elu(x, α = one(x)) = ifelse(x ≥ 0, x/1, α * (exp(x) - one(x))) +""" + gelu(x) = 0.5x*(1 + tanh(√(2/π)*(x + 0.044715x^3))) + +[Gaussian Error Linear Unit](https://arxiv.org/pdf/1606.08415.pdf) +activation function. +""" +gelu(x) = 0.5x*(1 + tanh(√(2/π)*(x + 0.044715x^3))) + """ swish(x) = x * σ(x)