Skip to content

Commit

Permalink
Remove MIOpen softmax & batchnorm (#630)
Browse files Browse the repository at this point in the history
  • Loading branch information
pxl-th authored May 15, 2024
1 parent ace0a2e commit c2fe803
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 33 deletions.
5 changes: 2 additions & 3 deletions src/dnn/MIOpen.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ using CEnum
using ..AMDGPU
import AMDGPU: ROCArray, LockedObject, HandleCache, HIP, library_state
import AMDGPU: libMIOpen_path
import AMDGPU.Runtime.Mem # TODO remove?
import .HIP: hipStream_t

include("libMIOpen.jl")
Expand Down Expand Up @@ -89,8 +88,8 @@ stream() = lib_state().stream
include("descriptors.jl")
include("convolution.jl")
include("pooling.jl")
include("softmax.jl")
include("activations.jl")
include("batchnorm.jl")
# include("softmax.jl")
# include("batchnorm.jl")

end
13 changes: 7 additions & 6 deletions test/dnn/miopen.jl
Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,17 @@ end
include("pool.jl")
end

@testset "Softmax" begin
include("softmax.jl")
end
# NOTE: https://github.com/ROCm/MIOpen/issues/2966
# @testset "Softmax" begin
# include("softmax.jl")
# end

@testset "Activations" begin
include("activations.jl")
end

@testset "Batch Normalization" begin
include("batchnorm.jl")
end
# @testset "Batch Normalization" begin
# include("batchnorm.jl")
# end

end
32 changes: 8 additions & 24 deletions test/dnn/softmax.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,38 +12,22 @@
# Regular softmax.

yd = MIOpen.softmax(xd; dims)
if T == Float16
@test !any(isnan.(Array(yd)))
else
y = NNlib.softmax(x; dims)
@test Array(yd) y atol=atol
end
y = NNlib.softmax(x; dims)
@test Array(yd) y atol=atol

dxd = MIOpen.∇softmax(dyd, yd; dims)
if T == Float16
@test !any(isnan.(Array(dxd)))
else
dx = NNlib.∇softmax_data(dy, y; dims)
@test Array(dxd) dx atol=atol
end
dx = NNlib.∇softmax_data(dy, y; dims)
@test Array(dxd) dx atol=atol

# Log softmax.

yd = MIOpen.logsoftmax(xd; dims)
if T == Float16
@test !any(isnan.(Array(yd)))
else
y = NNlib.logsoftmax(x; dims)
@test Array(yd) y atol=atol
end
y = NNlib.logsoftmax(x; dims)
@test Array(yd) y atol=atol

dxd = MIOpen.∇logsoftmax(dyd, yd; dims)
if T == Float16
@test !any(isnan.(Array(dxd)))
else
dx = NNlib.∇logsoftmax_data(dy, y; dims)
@test Array(dxd) dx atol=atol
end
dx = NNlib.∇logsoftmax_data(dy, y; dims)
@test Array(dxd) dx atol=atol
end
end
end

0 comments on commit c2fe803

Please sign in to comment.