From 8dcc5ba76bd9b20460d3f44937f9f780b63d5909 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 7 Jan 2023 21:00:28 -0500 Subject: [PATCH] un-revert the removal of the active=true method --- src/deprecations.jl | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/src/deprecations.jl b/src/deprecations.jl index 27c7bc2264..a99297649b 100644 --- a/src/deprecations.jl +++ b/src/deprecations.jl @@ -186,17 +186,6 @@ function update!(opt::Optimise.AbstractOptimiser, ::Params, grads::Union{Tuple, end -function dropout(rng, x, p; dims=:, active::Bool=true) - if active - NNlib.dropout(rng, x, p; dims) - else - Base.depwarn("Flux.dropout(...; active=false) is deprecated. Please branch outside the function, or call dropout(x, 0) if you must.", :dropout) - return x - end -end -dropout(x, p; kwargs...) = dropout(NNlib._rng_from_array(x), x, p; kwargs...) - - # v0.14 deprecations # Enable these when 0.14 is released, and delete const ClipGrad = Optimise.ClipValue etc: