From 8ef660663e14d06e9e33308fc864496b2f627ec8 Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Sat, 21 Sep 2024 23:15:56 -0400 Subject: [PATCH] revert: "test: enable enzyme testing for recurrent models" This reverts commit fe967bdac463e51c70ffb299c5d85d3899a73b6d. --- test/layers/recurrent_tests.jl | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/test/layers/recurrent_tests.jl b/test/layers/recurrent_tests.jl index c15e48b94..e249e200f 100644 --- a/test/layers/recurrent_tests.jl +++ b/test/layers/recurrent_tests.jl @@ -26,7 +26,7 @@ @test_throws ErrorException ps.train_state @test_gradients(loss_loop_rnncell, ps; atol=1.0f-3, rtol=1.0f-3, - soft_fail=[AutoFiniteDiff()]) + soft_fail=[AutoFiniteDiff()], broken_backends=[AutoEnzyme()]) end end @@ -84,7 +84,7 @@ end end @test_gradients(loss_loop_lstmcell, ps; atol=1.0f-3, rtol=1.0f-3, - soft_fail=[AutoFiniteDiff()]) + soft_fail=[AutoFiniteDiff()], broken_backends=[AutoEnzyme()]) @test_throws ErrorException ps.train_state @test_throws ErrorException ps.train_memory @@ -195,7 +195,7 @@ end end @test_gradients(loss_loop_grucell, ps; atol=1e-3, rtol=1e-3, - soft_fail=[AutoFiniteDiff()]) + soft_fail=[AutoFiniteDiff()], broken_backends=[AutoEnzyme()]) @test_throws ErrorException ps.train_state end @@ -282,7 +282,7 @@ end end @test_gradients(loss_loop_rnn, ps; atol=1e-3, rtol=1e-3, - soft_fail=[AutoFiniteDiff()]) + broken_backends=[AutoEnzyme()], soft_fail=[AutoFiniteDiff()]) end end end @@ -327,11 +327,11 @@ end __f = p -> sum(first(rnn(x, p, st))) @test_gradients(__f, ps; atol=1e-3, rtol=1e-3, - soft_fail=[AutoFiniteDiff()]) + skip_backends=[AutoEnzyme()], soft_fail=[AutoFiniteDiff()]) __f = p -> sum(Base.Fix1(sum, abs2), first(rnn_seq(x, p, st))) @test_gradients(__f, ps; atol=1e-3, rtol=1e-3, - soft_fail=[AutoFiniteDiff()]) + skip_backends=[AutoEnzyme()], soft_fail=[AutoFiniteDiff()]) end # Batched Time Series without data batches @@ -362,11 +362,11 @@ end __f = p -> sum(first(rnn(x, p, st))) @test_gradients(__f, ps; atol=1e-3, rtol=1e-3, - soft_fail=[AutoFiniteDiff()]) + skip_backends=[AutoEnzyme()], soft_fail=[AutoFiniteDiff()]) __f = p -> sum(Base.Fix1(sum, abs2), first(rnn_seq(x, p, st))) @test_gradients(__f, ps; atol=1e-3, rtol=1e-3, - soft_fail=[AutoFiniteDiff()]) + skip_backends=[AutoEnzyme()], soft_fail=[AutoFiniteDiff()]) end end end @@ -415,13 +415,17 @@ end @test all(x -> size(x) == (5, 2), y_[1]) __f = p -> sum(Base.Fix1(sum, abs2), first(bi_rnn(x, p, st))) - @test_gradients(__f, ps; atol=1e-3, rtol=1e-3) + @test_gradients(__f, ps; atol=1e-3, + rtol=1e-3, + broken_backends=Sys.isapple() ? [AutoEnzyme()] : []) __f = p -> begin (y1, y2), st_ = bi_rnn_no_merge(x, p, st) return sum(Base.Fix1(sum, abs2), y1) + sum(Base.Fix1(sum, abs2), y2) end - @test_gradients(__f, ps; atol=1e-3, rtol=1e-3) + @test_gradients(__f, ps; atol=1e-3, + rtol=1e-3, + broken_backends=Sys.isapple() ? [AutoEnzyme()] : []) @testset "backward_cell: $_backward_cell" for _backward_cell in ( RNNCell, LSTMCell, GRUCell) @@ -449,13 +453,17 @@ end @test all(x -> size(x) == (5, 2), y_[1]) __f = p -> sum(Base.Fix1(sum, abs2), first(bi_rnn(x, p, st))) - @test_gradients(__f, ps; atol=1e-3, rtol=1e-3) + @test_gradients(__f, ps; atol=1e-3, + rtol=1e-3, + broken_backends=Sys.isapple() ? [AutoEnzyme()] : []) __f = p -> begin (y1, y2), st_ = bi_rnn_no_merge(x, p, st) return sum(Base.Fix1(sum, abs2), y1) + sum(Base.Fix1(sum, abs2), y2) end - @test_gradients(__f, ps; atol=1e-3, rtol=1e-3) + @test_gradients(__f, ps; atol=1e-3, + rtol=1e-3, + broken_backends=Sys.isapple() ? [AutoEnzyme()] : []) end end end