From 87e09f9c51f06472f5324c3cb88cdfd773843e4a Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Sat, 21 Sep 2024 23:23:47 -0400 Subject: [PATCH] test: minor test fixes --- test/Project.toml | 2 +- test/contrib/freeze_tests.jl | 6 ++++-- test/helpers/compact_tests.jl | 9 ++------- test/layers/basic_tests.jl | 4 ++-- test/layers/normalize_tests.jl | 11 ++++++----- 5 files changed, 15 insertions(+), 17 deletions(-) diff --git a/test/Project.toml b/test/Project.toml index 7663a0d3d..998901f64 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -57,7 +57,7 @@ LinearAlgebra = "1.10" Logging = "1.10" LuxCore = "1.0" LuxLib = "1.3" -LuxTestUtils = "1.2.1" +LuxTestUtils = "1.3" MLDataDevices = "1.1" MLUtils = "0.4.3" NNlib = "0.9.24" diff --git a/test/contrib/freeze_tests.jl b/test/contrib/freeze_tests.jl index aa2eafc1f..fd713a34d 100644 --- a/test/contrib/freeze_tests.jl +++ b/test/contrib/freeze_tests.jl @@ -35,7 +35,8 @@ @jet m(x, ps_c, st) __f = (x, ps) -> sum(first(m(x, ps, st))) - @test_gradients(__f, x, ps_c; atol=1.0f-3, rtol=1.0f-3) + @test_gradients(__f, x, ps_c; atol=1.0f-3, rtol=1.0f-3, + enzyme_set_runtime_activity=true) end @testset "LuxDL/Lux.jl#427" begin @@ -84,7 +85,8 @@ end @jet fd(x, ps, st) __f = (x, ps) -> sum(first(fd(x, ps, st))) - @test_gradients(__f, x, ps; atol=1.0f-3, rtol=1.0f-3) + @test_gradients(__f, x, ps; atol=1.0f-3, rtol=1.0f-3, + enzyme_set_runtime_activity=true) fd = Lux.Experimental.freeze(d, ()) @test fd === d diff --git a/test/helpers/compact_tests.jl b/test/helpers/compact_tests.jl index 13e799d4b..49988960b 100644 --- a/test/helpers/compact_tests.jl +++ b/test/helpers/compact_tests.jl @@ -316,7 +316,7 @@ @compact(W=randn(d_out, d_in), b=zeros(d_out), incr=1) do x y = W * x incr *= 10 - return act.(y .+ b) .+ incr + @return act.(y .+ b) .+ incr end end @@ -329,12 +329,7 @@ @test st_new.incr == 10 _, st_new = model(x, ps, st_new) @test st_new.incr == 100 - - # By default creates a closure so type cannot be inferred - inf_type = Core.Compiler._return_type( - model, Tuple{typeof(x), typeof(ps), typeof(st)}).parameters - @test inf_type[1] === Any - @test inf_type[2] === NamedTuple + @test @inferred(model(x, ps, st)) isa Any function ScaledDense2(; d_in=5, d_out=7, act=relu) @compact(W=randn(d_out, d_in), b=zeros(d_out), incr=1) do x diff --git a/test/layers/basic_tests.jl b/test/layers/basic_tests.jl index 8909b3ccd..3c4164c03 100644 --- a/test/layers/basic_tests.jl +++ b/test/layers/basic_tests.jl @@ -173,7 +173,7 @@ end @testitem "Dense StaticArrays" setup=[SharedTestSetup] tags=[:core_layers] begin using StaticArrays, Enzyme, ForwardDiff, ComponentArrays - if LuxTestUtils.ENZYME_TESTING_ENABLED && pkgversion(Enzyme) ≥ v"0.12.36" + if LuxTestUtils.ENZYME_TESTING_ENABLED N = 8 d = Lux.Dense(N => N) ps = (; @@ -186,7 +186,7 @@ end ps -> sum(d(x, ps, (;))[1]) end grad1 = ForwardDiff.gradient(fun, ComponentVector(ps)) - grad2 = Enzyme.gradient(Enzyme.Reverse, fun, ps) + grad2 = Enzyme.gradient(Enzyme.Reverse, fun, ps)[1] @test maximum(abs, grad1 .- ComponentVector(grad2)) < 1e-6 end end diff --git a/test/layers/normalize_tests.jl b/test/layers/normalize_tests.jl index 110ccc9c0..b64c3f05d 100644 --- a/test/layers/normalize_tests.jl +++ b/test/layers/normalize_tests.jl @@ -128,7 +128,7 @@ end __f = let m = m, x = x, st = st ps -> sum(first(m(x, ps, st))) end - @test_gradients(__f, ps; atol=1.0f-3, rtol=1.0f-3) + @test_gradients(__f, ps; atol=1.0f-3, rtol=1.0f-3, enzyme_set_runtime_activity=true) @testset "affine: $affine" for affine in (true, false) m = GroupNorm(2, 2; affine) @@ -380,11 +380,11 @@ end if affine __f = (x, ps) -> sum(first(layer(x, ps, st))) @test_gradients(__f, x, ps; atol=1.0f-3, rtol=1.0f-3, - skip_backends=[AutoFiniteDiff()]) + skip_backends=[AutoFiniteDiff()], enzyme_set_runtime_activity=true) else __f = x -> sum(first(layer(x, ps, st))) @test_gradients(__f, x; atol=1.0f-3, rtol=1.0f-3, - skip_backends=[AutoFiniteDiff()]) + skip_backends=[AutoFiniteDiff()], enzyme_set_runtime_activity=true) end for act in (sigmoid, tanh) @@ -399,12 +399,13 @@ end if affine __f = (x, ps) -> sum(first(layer(x, ps, st))) @test_gradients(__f, x, ps; atol=1.0f-3, - rtol=1.0f-3, + rtol=1.0f-3, enzyme_set_runtime_activity=true, skip_backends=[AutoFiniteDiff()]) else __f = x -> sum(first(layer(x, ps, st))) @test_gradients(__f, x; atol=1.0f-3, rtol=1.0f-3, - skip_backends=[AutoFiniteDiff()]) + skip_backends=[AutoFiniteDiff()], + enzyme_set_runtime_activity=true) end end end