From 012fb557f4cdac5ab2aed9b469b9c8bfd3441c45 Mon Sep 17 00:00:00 2001 From: americast Date: Sat, 7 Jul 2018 09:49:47 +0530 Subject: [PATCH 1/2] relu_nnpack --- src/activation.jl | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/src/activation.jl b/src/activation.jl index d5c5092ec..cb6ff19f3 100644 --- a/src/activation.jl +++ b/src/activation.jl @@ -45,8 +45,27 @@ const logsigmoid = logσ [Rectified Linear Unit](https://en.wikipedia.org/wiki/Rectifier_(neural_networks)) activation function. """ -relu(x) = max(zero(x), x) +relu_(x) = max(zero(x), x) + +function relu_(x, out) + ptp = ccall((:pthreadpool_create, :libnnpack), Ptr{Void}, (Csize_t,), 1) + input = Cfloat.(x) + + ccall((:nnp_initialize,"libnnpack"),Void,(),) + ccall((:nnp_relu_output,"libnnpack"),Void,(Csize_t, Csize_t, Ptr{Cfloat}, Ptr{Cfloat}, Cfloat, Ptr{Void}), Csize_t(size(input, 2)), Csize_t(size(input, 1)), input, out, Cfloat(0), ptp) + + return convert(typeof(x), out) +end +function relu(x) + @show length(size(x)) + if (is_linux() || is_mac()) && length(size(x)) > 0 + out = zeros(Cfloat, size(x)) + return relu_(x, out) + else + return relu_(x) + end +end """ leakyrelu(x) = max(0.01x, x) From 29f25ece627e76f3d50f5b7970941c5c9eedb360 Mon Sep 17 00:00:00 2001 From: americast Date: Sun, 8 Jul 2018 12:05:12 +0530 Subject: [PATCH 2/2] Fixes --- src/activation.jl | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/src/activation.jl b/src/activation.jl index cb6ff19f3..d852b5db5 100644 --- a/src/activation.jl +++ b/src/activation.jl @@ -45,26 +45,21 @@ const logsigmoid = logσ [Rectified Linear Unit](https://en.wikipedia.org/wiki/Rectifier_(neural_networks)) activation function. """ -relu_(x) = max(zero(x), x) - -function relu_(x, out) +function relu_(x) ptp = ccall((:pthreadpool_create, :libnnpack), Ptr{Void}, (Csize_t,), 1) input = Cfloat.(x) - + out = zeros(Cfloat, size(x)) ccall((:nnp_initialize,"libnnpack"),Void,(),) ccall((:nnp_relu_output,"libnnpack"),Void,(Csize_t, Csize_t, Ptr{Cfloat}, Ptr{Cfloat}, Cfloat, Ptr{Void}), Csize_t(size(input, 2)), Csize_t(size(input, 1)), input, out, Cfloat(0), ptp) - return convert(typeof(x), out) + return out end function relu(x) - @show length(size(x)) if (is_linux() || is_mac()) && length(size(x)) > 0 - out = zeros(Cfloat, size(x)) - return relu_(x, out) - else return relu_(x) end + return max(zero(x), x) end """