Skip to content
This repository has been archived by the owner on Nov 4, 2024. It is now read-only.

Commit

Permalink
test: check for FP64 support
Browse files Browse the repository at this point in the history
  • Loading branch information
avik-pal committed Sep 21, 2024
1 parent ed85651 commit e8b9675
Show file tree
Hide file tree
Showing 13 changed files with 140 additions and 88 deletions.
4 changes: 3 additions & 1 deletion test/common_ops/activation_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@
apply_act_fast(f::F, x) where {F} = sum(abs2, fast_activation!!(f, copy(x)))
apply_act_fast2(f::F, x) where {F} = sum(abs2, fast_activation(f, x))

@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "$f: $T" for f in [identity, relu, sigmoid, sigmoid_fast, softplus,
logsigmoid, gelu, swish, lisht, tanh, tanh_fast],
T in [Float16, Float32, Float64]

!fp64 && T == Float64 && continue

x = rand(rng, T, 4, 3) |> aType

y1 = apply_act(f, x)
Expand Down
4 changes: 3 additions & 1 deletion test/common_ops/bias_act_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,15 @@
end
(f::__Fix1)(x, b) = f.f(f.act, x, b)

@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "$act, $T, $sz" for act in [
identity, relu, sigmoid, sigmoid_fast, softplus,
logsigmoid, gelu, swish, lisht, tanh, tanh_fast],
T in [Float16, Float32, Float64],
sz in [(2, 2, 3, 4), (4, 5)]

!fp64 && T == Float64 && continue

x = rand(rng, T, sz) |> aType
b = rand(rng, T, sz[end - 1]) |> aType

Expand Down
15 changes: 10 additions & 5 deletions test/common_ops/conv_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -92,44 +92,49 @@ export expand, convfilter, calc_padding, anonact, TEST_BLOCKS, run_conv_testing
end

@testitem "Fused Conv: Group 1" tags=[:conv] setup=[SharedTestSetup, ConvSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "$(Tw) x $(Tx) hasbias: $(hasbias) activation: $(activation) kernel: $(kernel) padding: $(padding) stride: $(stride) groups: $(groups)" for ((Tx, Tw), hasbias, activation, (kernel, padding, stride, groups)) in TEST_BLOCKS[1]
!fp64 && (Tx == Float64 || Tw == Float64) && continue
run_conv_testing(generate_fixed_array, activation, kernel, stride,
padding, hasbias, groups, Tw, Tx, aType, mode, ongpu)
end
end
end

@testitem "Fused Conv: Group 2" tags=[:conv] setup=[SharedTestSetup, ConvSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "$(Tw) x $(Tx) hasbias: $(hasbias) activation: $(activation) kernel: $(kernel) padding: $(padding) stride: $(stride) groups: $(groups)" for ((Tx, Tw), hasbias, activation, (kernel, padding, stride, groups)) in TEST_BLOCKS[2]
!fp64 && (Tx == Float64 || Tw == Float64) && continue
run_conv_testing(generate_fixed_array, activation, kernel, stride,
padding, hasbias, groups, Tw, Tx, aType, mode, ongpu)
end
end
end

@testitem "Fused Conv: Group 3" tags=[:conv] setup=[SharedTestSetup, ConvSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "$(Tw) x $(Tx) hasbias: $(hasbias) activation: $(activation) kernel: $(kernel) padding: $(padding) stride: $(stride) groups: $(groups)" for ((Tx, Tw), hasbias, activation, (kernel, padding, stride, groups)) in TEST_BLOCKS[3]
!fp64 && (Tx == Float64 || Tw == Float64) && continue
run_conv_testing(generate_fixed_array, activation, kernel, stride,
padding, hasbias, groups, Tw, Tx, aType, mode, ongpu)
end
end
end

@testitem "Fused Conv: Group 4" tags=[:conv] setup=[SharedTestSetup, ConvSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "$(Tw) x $(Tx) hasbias: $(hasbias) activation: $(activation) kernel: $(kernel) padding: $(padding) stride: $(stride) groups: $(groups)" for ((Tx, Tw), hasbias, activation, (kernel, padding, stride, groups)) in TEST_BLOCKS[4]
!fp64 && (Tx == Float64 || Tw == Float64) && continue
run_conv_testing(generate_fixed_array, activation, kernel, stride,
padding, hasbias, groups, Tw, Tx, aType, mode, ongpu)
end
end
end

@testitem "Fused Conv: Group 5" tags=[:conv] setup=[SharedTestSetup, ConvSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "$(Tw) x $(Tx) hasbias: $(hasbias) activation: $(activation) kernel: $(kernel) padding: $(padding) stride: $(stride) groups: $(groups)" for ((Tx, Tw), hasbias, activation, (kernel, padding, stride, groups)) in TEST_BLOCKS[5]
!fp64 && (Tx == Float64 || Tw == Float64) && continue
run_conv_testing(generate_fixed_array, activation, kernel, stride,
padding, hasbias, groups, Tw, Tx, aType, mode, ongpu)
end
Expand Down
15 changes: 10 additions & 5 deletions test/common_ops/dense_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -79,40 +79,45 @@ export ALL_TEST_CONFIGS, TEST_BLOCKS, run_dense_testing
end

@testitem "Fused Dense: Group 1" tags=[:dense] setup=[SharedTestSetup, DenseSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $Tw x $Tx, size $M x $N, bias $hasbias, activation $activation" for ((Tx, Tw), M, N, hasbias, activation) in TEST_BLOCKS[1]
!fp64 && (Tx == Float64 || Tw == Float64) && continue
run_dense_testing(Tw, Tx, M, N, hasbias, activation, aType, mode, ongpu)
end
end
end

@testitem "Fused Dense: Group 2" tags=[:dense] setup=[SharedTestSetup, DenseSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $Tw x $Tx, size $M x $N, bias $hasbias, activation $activation" for ((Tx, Tw), M, N, hasbias, activation) in TEST_BLOCKS[2]
!fp64 && (Tx == Float64 || Tw == Float64) && continue
run_dense_testing(Tw, Tx, M, N, hasbias, activation, aType, mode, ongpu)
end
end
end

@testitem "Fused Dense: Group 3" tags=[:dense] setup=[SharedTestSetup, DenseSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $Tw x $Tx, size $M x $N, bias $hasbias, activation $activation" for ((Tx, Tw), M, N, hasbias, activation) in TEST_BLOCKS[3]
!fp64 && (Tx == Float64 || Tw == Float64) && continue
run_dense_testing(Tw, Tx, M, N, hasbias, activation, aType, mode, ongpu)
end
end
end

@testitem "Fused Dense: Group 4" tags=[:dense] setup=[SharedTestSetup, DenseSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $Tw x $Tx, size $M x $N, bias $hasbias, activation $activation" for ((Tx, Tw), M, N, hasbias, activation) in TEST_BLOCKS[4]
!fp64 && (Tx == Float64 || Tw == Float64) && continue
run_dense_testing(Tw, Tx, M, N, hasbias, activation, aType, mode, ongpu)
end
end
end

@testitem "Fused Dense: Group 5" tags=[:dense] setup=[SharedTestSetup, DenseSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $Tw x $Tx, size $M x $N, bias $hasbias, activation $activation" for ((Tx, Tw), M, N, hasbias, activation) in TEST_BLOCKS[5]
!fp64 && (Tx == Float64 || Tw == Float64) && continue
run_dense_testing(Tw, Tx, M, N, hasbias, activation, aType, mode, ongpu)
end
end
Expand Down
12 changes: 9 additions & 3 deletions test/common_ops/dropout_tests.jl
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
@testitem "Dropout" tags=[:other_ops] setup=[SharedTestSetup] begin
rng = StableRNG(12345)

@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "$T, $x_shape, $dims" for T in (Float16, Float32, Float64),
x_shape in ((2, 3), (2, 2, 3), (2, 2, 3, 1), (2, 2, 1, 3, 1)),
dims in (:, 1, (1, 2))

!fp64 && T == Float64 && continue

x = randn(rng, T, x_shape) |> aType

@test @inferred(dropout(rng, x, T(0.5), Val(true), T(2), dims)) isa Any
Expand Down Expand Up @@ -46,10 +48,12 @@ end

rng = StableRNG(12345)

@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "$T: $x_shape" for T in (Float16, Float32, Float64),
x_shape in ((2, 3), (2, 2, 3), (2, 2, 3, 1), (2, 2, 1, 3, 1))

!fp64 && T == Float64 && continue

x = randn(rng, T, x_shape) |> aType
mask = rand(T, x_shape) |> aType

Expand Down Expand Up @@ -133,10 +137,12 @@ end

rng = StableRNG(12345)

@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "$T: $x_shape" for T in (Float16, Float32, Float64),
x_shape in ((2, 3), (2, 2, 3), (2, 2, 3, 1), (2, 2, 1, 3, 1))

!fp64 && T == Float64 && continue

x = randn(rng, T, x_shape) |> aType

@test @inferred(alpha_dropout(rng, x, T(0.5), Val(true))) isa Any
Expand Down
19 changes: 13 additions & 6 deletions test/normalization/batchnorm_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -123,52 +123,59 @@ export setup_batchnorm, ALL_TEST_CONFIGS, TEST_BLOCKS, run_batchnorm_testing
end

@testitem "Batch Norm: Group 1" tags=[:batch_norm] setup=[SharedTestSetup, BatchNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $act $affine $track_stats" for (T, sz, training, affine, track_stats, act) in TEST_BLOCKS[1]
!fp64 && T == Float64 && continue
run_batchnorm_testing(generate_fixed_array, T, sz, training,
affine, track_stats, act, aType, mode, ongpu)
end
end
end

@testitem "Batch Norm: Group 2" tags=[:batch_norm] setup=[SharedTestSetup, BatchNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $act $affine $track_stats" for (T, sz, training, affine, track_stats, act) in TEST_BLOCKS[2]
!fp64 && T == Float64 && continue
run_batchnorm_testing(generate_fixed_array, T, sz, training,
affine, track_stats, act, aType, mode, ongpu)
end
end
end

@testitem "Batch Norm: Group 3" tags=[:batch_norm] setup=[SharedTestSetup, BatchNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $act $affine $track_stats" for (T, sz, training, affine, track_stats, act) in TEST_BLOCKS[3]
!fp64 && T == Float64 && continue
run_batchnorm_testing(generate_fixed_array, T, sz, training,
affine, track_stats, act, aType, mode, ongpu)
end
end
end

@testitem "Batch Norm: Group 4" tags=[:batch_norm] setup=[SharedTestSetup, BatchNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $act $affine $track_stats" for (T, sz, training, affine, track_stats, act) in TEST_BLOCKS[4]
!fp64 && T == Float64 && continue
run_batchnorm_testing(generate_fixed_array, T, sz, training,
affine, track_stats, act, aType, mode, ongpu)
end
end
end

@testitem "Batch Norm: Group 5" tags=[:batch_norm] setup=[SharedTestSetup, BatchNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $act $affine $track_stats" for (T, sz, training, affine, track_stats, act) in TEST_BLOCKS[5]
!fp64 && T == Float64 && continue
run_batchnorm_testing(generate_fixed_array, T, sz, training,
affine, track_stats, act, aType, mode, ongpu)
end
end
end

@testitem "Batch Norm: Mixed Precision" tags=[:batch_norm] setup=[SharedTestSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
!fp64 && aType == Float64 && continue

x = rand(Float64, 4, 4, 6, 2) |> aType
scale = rand(Float32, 6) |> aType
bias = rand(Float32, 6) |> aType
Expand Down
15 changes: 10 additions & 5 deletions test/normalization/groupnorm_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -93,40 +93,45 @@ export setup_groupnorm, ALL_TEST_CONFIGS, TEST_BLOCKS, run_groupnorm_testing
end

@testitem "Group Norm: Group 1" tags=[:group_norm] setup=[SharedTestSetup, GroupNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $groups, $affine, $act" for (T, sz, groups, affine, act) in TEST_BLOCKS[1]
!fp64 && T == Float64 && continue
run_groupnorm_testing(T, sz, groups, affine, act, aType, mode, ongpu)
end
end
end

@testitem "Group Norm: Group 2" tags=[:group_norm] setup=[SharedTestSetup, GroupNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $groups, $affine, $act" for (T, sz, groups, affine, act) in TEST_BLOCKS[2]
!fp64 && T == Float64 && continue
run_groupnorm_testing(T, sz, groups, affine, act, aType, mode, ongpu)
end
end
end

@testitem "Group Norm: Group 3" tags=[:group_norm] setup=[SharedTestSetup, GroupNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $groups, $affine, $act" for (T, sz, groups, affine, act) in TEST_BLOCKS[3]
!fp64 && T == Float64 && continue
run_groupnorm_testing(T, sz, groups, affine, act, aType, mode, ongpu)
end
end
end

@testitem "Group Norm: Group 4" tags=[:group_norm] setup=[SharedTestSetup, GroupNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $groups, $affine, $act" for (T, sz, groups, affine, act) in TEST_BLOCKS[4]
!fp64 && T == Float64 && continue
run_groupnorm_testing(T, sz, groups, affine, act, aType, mode, ongpu)
end
end
end

@testitem "Group Norm: Group 5" tags=[:group_norm] setup=[SharedTestSetup, GroupNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $groups, $affine, $act" for (T, sz, groups, affine, act) in TEST_BLOCKS[5]
!fp64 && T == Float64 && continue
run_groupnorm_testing(T, sz, groups, affine, act, aType, mode, ongpu)
end
end
Expand Down
15 changes: 10 additions & 5 deletions test/normalization/instancenorm_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,9 @@ end

@testitem "Instance Norm: Group 1" tags=[:instance_norm] setup=[
SharedTestSetup, InstanceNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $training $act" for (T, sz, training, act) in TEST_BLOCKS[1]
!fp64 && T == Float64 && continue
run_instancenorm_testing(
generate_fixed_array, T, sz, training, act, aType, mode, ongpu)
end
Expand All @@ -94,8 +95,9 @@ end

@testitem "Instance Norm: Group 2" tags=[:instance_norm] setup=[
SharedTestSetup, InstanceNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $training $act" for (T, sz, training, act) in TEST_BLOCKS[2]
!fp64 && T == Float64 && continue
run_instancenorm_testing(
generate_fixed_array, T, sz, training, act, aType, mode, ongpu)
end
Expand All @@ -104,8 +106,9 @@ end

@testitem "Instance Norm: Group 3" tags=[:instance_norm] setup=[
SharedTestSetup, InstanceNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $training $act" for (T, sz, training, act) in TEST_BLOCKS[3]
!fp64 && T == Float64 && continue
run_instancenorm_testing(
generate_fixed_array, T, sz, training, act, aType, mode, ongpu)
end
Expand All @@ -114,8 +117,9 @@ end

@testitem "Instance Norm: Group 4" tags=[:instance_norm] setup=[
SharedTestSetup, InstanceNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $training $act" for (T, sz, training, act) in TEST_BLOCKS[4]
!fp64 && T == Float64 && continue
run_instancenorm_testing(
generate_fixed_array, T, sz, training, act, aType, mode, ongpu)
end
Expand All @@ -124,8 +128,9 @@ end

@testitem "Instance Norm: Group 5" tags=[:instance_norm] setup=[
SharedTestSetup, InstanceNormSetup] begin
@testset "$mode" for (mode, aType, ongpu) in MODES
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $training $act" for (T, sz, training, act) in TEST_BLOCKS[5]
!fp64 && T == Float64 && continue
run_instancenorm_testing(
generate_fixed_array, T, sz, training, act, aType, mode, ongpu)
end
Expand Down
Loading

0 comments on commit e8b9675

Please sign in to comment.