Skip to content

Commit

Permalink
fix: print error in CI
Browse files Browse the repository at this point in the history
  • Loading branch information
avik-pal committed Nov 21, 2024
1 parent efd87e8 commit a1ea977
Show file tree
Hide file tree
Showing 7 changed files with 18 additions and 147 deletions.
35 changes: 1 addition & 34 deletions lib/LuxLib/test/normalization/batchnorm_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ const ALL_TEST_CONFIGS = Iterators.product(
(identity, sigmoid_fast, anonact))

const TEST_BLOCKS = collect(Iterators.partition(
ALL_TEST_CONFIGS, ceil(Int, length(ALL_TEST_CONFIGS) / 5)))
ALL_TEST_CONFIGS, ceil(Int, length(ALL_TEST_CONFIGS) / 2)))

export setup_batchnorm, ALL_TEST_CONFIGS, TEST_BLOCKS, run_batchnorm_testing

Expand Down Expand Up @@ -128,39 +128,6 @@ end
end
end

@testitem "Batch Norm: Group 3" tags=[:normalization] setup=[
SharedTestSetup, BatchNormSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $act $affine $track_stats" for (T, sz, training, affine, track_stats, act) in TEST_BLOCKS[3]
!fp64 && T == Float64 && continue
run_batchnorm_testing(generate_fixed_array, T, sz, training,
affine, track_stats, act, aType)
end
end
end

@testitem "Batch Norm: Group 4" tags=[:normalization] setup=[
SharedTestSetup, BatchNormSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $act $affine $track_stats" for (T, sz, training, affine, track_stats, act) in TEST_BLOCKS[4]
!fp64 && T == Float64 && continue
run_batchnorm_testing(generate_fixed_array, T, sz, training,
affine, track_stats, act, aType)
end
end
end

@testitem "Batch Norm: Group 5" tags=[:normalization] setup=[
SharedTestSetup, BatchNormSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $act $affine $track_stats" for (T, sz, training, affine, track_stats, act) in TEST_BLOCKS[5]
!fp64 && T == Float64 && continue
run_batchnorm_testing(generate_fixed_array, T, sz, training,
affine, track_stats, act, aType)
end
end
end

@testitem "Batch Norm: Mixed Precision" tags=[:normalization] setup=[SharedTestSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
!fp64 && aType == Float64 && continue
Expand Down
32 changes: 1 addition & 31 deletions lib/LuxLib/test/normalization/groupnorm_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ const ALL_TEST_CONFIGS = Iterators.product([Float32, Float64],
(identity, sigmoid_fast, anonact))

const TEST_BLOCKS = collect(Iterators.partition(
ALL_TEST_CONFIGS, ceil(Int, length(ALL_TEST_CONFIGS) / 5)))
ALL_TEST_CONFIGS, ceil(Int, length(ALL_TEST_CONFIGS) / 2)))

export setup_groupnorm, ALL_TEST_CONFIGS, TEST_BLOCKS, run_groupnorm_testing

Expand All @@ -110,33 +110,3 @@ end
end
end
end

@testitem "Group Norm: Group 3" tags=[:normalization] setup=[
SharedTestSetup, GroupNormSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $groups, $affine, $act" for (T, sz, groups, affine, act) in TEST_BLOCKS[3]
!fp64 && T == Float64 && continue
run_groupnorm_testing(T, sz, groups, affine, act, aType, mode, ongpu)
end
end
end

@testitem "Group Norm: Group 4" tags=[:normalization] setup=[
SharedTestSetup, GroupNormSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $groups, $affine, $act" for (T, sz, groups, affine, act) in TEST_BLOCKS[4]
!fp64 && T == Float64 && continue
run_groupnorm_testing(T, sz, groups, affine, act, aType, mode, ongpu)
end
end
end

@testitem "Group Norm: Group 5" tags=[:normalization] setup=[
SharedTestSetup, GroupNormSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $groups, $affine, $act" for (T, sz, groups, affine, act) in TEST_BLOCKS[5]
!fp64 && T == Float64 && continue
run_groupnorm_testing(T, sz, groups, affine, act, aType, mode, ongpu)
end
end
end
32 changes: 1 addition & 31 deletions lib/LuxLib/test/normalization/instancenorm_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ const ALL_TEST_CONFIGS = Iterators.product(
(Val(true), Val(false)), (identity, sigmoid_fast, anonact))

const TEST_BLOCKS = collect(Iterators.partition(
ALL_TEST_CONFIGS, ceil(Int, length(ALL_TEST_CONFIGS) / 5)))
ALL_TEST_CONFIGS, ceil(Int, length(ALL_TEST_CONFIGS) / 2)))

export setup_instancenorm, ALL_TEST_CONFIGS, TEST_BLOCKS, run_instancenorm_testing

Expand All @@ -96,33 +96,3 @@ end
end
end
end

@testitem "Instance Norm: Group 3" tags=[:normalization] setup=[
SharedTestSetup, InstanceNormSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $training $act" for (T, sz, training, act) in TEST_BLOCKS[3]
!fp64 && T == Float64 && continue
run_instancenorm_testing(generate_fixed_array, T, sz, training, act, aType)
end
end
end

@testitem "Instance Norm: Group 4" tags=[:normalization] setup=[
SharedTestSetup, InstanceNormSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $training $act" for (T, sz, training, act) in TEST_BLOCKS[4]
!fp64 && T == Float64 && continue
run_instancenorm_testing(generate_fixed_array, T, sz, training, act, aType)
end
end
end

@testitem "Instance Norm: Group 5" tags=[:normalization] setup=[
SharedTestSetup, InstanceNormSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $sz, $training $act" for (T, sz, training, act) in TEST_BLOCKS[5]
!fp64 && T == Float64 && continue
run_instancenorm_testing(generate_fixed_array, T, sz, training, act, aType)
end
end
end
35 changes: 1 addition & 34 deletions lib/LuxLib/test/normalization/layernorm_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ for T in (Float32, Float64),
end

const TEST_BLOCKS = collect(Iterators.partition(
ALL_TEST_CONFIGS, ceil(Int, length(ALL_TEST_CONFIGS) / 5)))
ALL_TEST_CONFIGS, ceil(Int, length(ALL_TEST_CONFIGS) / 2)))

export ALL_TEST_CONFIGS, TEST_BLOCKS, run_layernorm_testing

Expand Down Expand Up @@ -106,39 +106,6 @@ end
end
end

@testitem "Layer Norm: Group 3" tags=[:normalization] setup=[
SharedTestSetup, LayerNormSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $x_shape, $act" for (T, x_shape, affine_shape, act) in TEST_BLOCKS[3]
!fp64 && T == Float64 && continue
run_layernorm_testing(
generate_fixed_array, aType, T, x_shape, affine_shape, act, ongpu, mode)
end
end
end

@testitem "Layer Norm: Group 4" tags=[:normalization] setup=[
SharedTestSetup, LayerNormSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $x_shape, $act" for (T, x_shape, affine_shape, act) in TEST_BLOCKS[4]
!fp64 && T == Float64 && continue
run_layernorm_testing(
generate_fixed_array, aType, T, x_shape, affine_shape, act, ongpu, mode)
end
end
end

@testitem "Layer Norm: Group 5" tags=[:normalization] setup=[
SharedTestSetup, LayerNormSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
@testset "eltype $T, size $x_shape, $act" for (T, x_shape, affine_shape, act) in TEST_BLOCKS[5]
!fp64 && T == Float64 && continue
run_layernorm_testing(
generate_fixed_array, aType, T, x_shape, affine_shape, act, ongpu, mode)
end
end
end

@testitem "Layer Norm: Error Checks" tags=[:normalization] setup=[SharedTestSetup] begin
@testset "$mode" for (mode, aType, ongpu, fp64) in MODES
!fp64 && continue
Expand Down
2 changes: 1 addition & 1 deletion lib/LuxTestUtils/src/autodiff.jl
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ function test_gradients(f, args...; skip_backends=[], broken_backends=[],
end
catch err
err isa InterruptException && rethrow()
Error(:test, local_test_expr, err, Base.current_exceptions(), source)
Error(:test_error, local_test_expr, err, Base.current_exceptions(), source)
end
end
Test.record(get_testset(), result)
Expand Down
21 changes: 9 additions & 12 deletions test/helpers/loss_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -152,10 +152,9 @@ end

@test @inferred(Zygote.gradient(celoss, ŷ, y)) isa Any

# Failure only on CI
__f = Base.Fix2(celoss, y)
@test_gradients(__f, ŷ; atol=1.0f-3,
rtol=1.0f-3, skip_backends=VERSION v"1.11-" ? [AutoEnzyme()] : [])
# XXX: Failure only on CI
@test_gradients(Base.Fix2(celoss, y), ŷ; atol=1.0f-3, rtol=1.0f-3)
# rtol=1.0f-3, skip_backends=VERSION ≥ v"1.11-" ? [AutoEnzyme()] : [])
end

@testset "Logit CrossEntropyLoss" begin
Expand All @@ -177,10 +176,9 @@ end

@test @inferred(Zygote.gradient(logitceloss, logŷ, y)) isa Any

# Failure only on CI
__f = Base.Fix2(logitceloss, y)
@test_gradients(__f, logŷ; atol=1.0f-3,
rtol=1.0f-3, skip_backends=VERSION v"1.11-" ? [AutoEnzyme()] : [])
# XXX: Failure only on CI
@test_gradients(Base.Fix2(logitceloss, y), logŷ; atol=1.0f-3, rtol=1.0f-3)
# rtol=1.0f-3, skip_backends=VERSION ≥ v"1.11-" ? [AutoEnzyme()] : [])
end

logŷ, y = randn(3) |> aType, rand(3) |> aType
Expand Down Expand Up @@ -307,10 +305,9 @@ end
@jet KLDivergenceLoss()(ŷ, y)
@test @inferred(Zygote.gradient(KLDivergenceLoss(), ŷ, y)) isa Any

# Failure only on CI
__f = Base.Fix2(KLDivergenceLoss(), y)
@test_gradients(__f, ŷ; atol=1.0f-3,
rtol=1.0f-3, skip_backends=VERSION v"1.11-" ? [AutoEnzyme()] : [])
# XXX: Failure only on CI
@test_gradients(Base.Fix2(KLDivergenceLoss(), y), ŷ; atol=1.0f-3, rtol=1.0f-3)
# rtol=1.0f-3, skip_backends=VERSION ≥ v"1.11-" ? [AutoEnzyme()] : [])
end

@testset "HingeLoss" begin
Expand Down
8 changes: 4 additions & 4 deletions test/layers/recurrent_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,10 @@ end
@test !hasproperty(ps, :hidden_state)
end

# Failure only on CI
skip_backends = VERSION v"1.11-" && act === identity ? [AutoEnzyme()] : []
@test_gradients(loss_loop, rnncell, x, ps, st; atol=1.0f-3, rtol=1.0f-3,
skip_backends)
# XXX: Failure only on CI
# skip_backends = VERSION ≥ v"1.11-" && act === identity ? [AutoEnzyme()] : []
@test_gradients(loss_loop, rnncell, x, ps, st; atol=1.0f-3, rtol=1.0f-3)
# skip_backends)
end
end

Expand Down

0 comments on commit a1ea977

Please sign in to comment.