From 8ef1271740af52a94c973a943cc622b836fa2a65 Mon Sep 17 00:00:00 2001 From: Christopher Rackauckas Date: Wed, 31 Jul 2024 19:18:54 -0400 Subject: [PATCH] Fix a bunch of depwarns --- test/newton_neural_ode_tests.jl | 8 ++++---- test/second_order_ode_tests.jl | 6 +++--- test/spline_layer_tests.jl | 4 ++-- test/stiff_nested_ad_tests.jl | 2 +- test/tensor_product_tests.jl | 6 +++--- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/test/newton_neural_ode_tests.jl b/test/newton_neural_ode_tests.jl index 35ec5aab9..62e429237 100644 --- a/test/newton_neural_ode_tests.jl +++ b/test/newton_neural_ode_tests.jl @@ -34,10 +34,10 @@ optprob = Optimization.OptimizationProblem(optf, psd) res = Optimization.solve(optprob, NewtonTrustRegion(); maxiters = 100, callback = cb) - @test loss_function(res.minimizer) < l1 + @test loss_function(res.u) < l1 res = Optimization.solve(optprob, OptimizationOptimJL.Optim.KrylovTrustRegion(); maxiters = 100, callback = cb) - @test loss_function(res.minimizer) < l1 + @test loss_function(res.u) < l1 @info "ROCK2" nODE = NeuralODE(NN, tspan, ROCK2(); reltol = 1.0f-4, saveat = [tspan[end]]) @@ -55,8 +55,8 @@ optprob = Optimization.OptimizationProblem(optfunc, psd) res = Optimization.solve(optprob, NewtonTrustRegion(); maxiters = 100, callback = cb) - @test loss_function(res.minimizer) < l1 + @test loss_function(res.u) < l1 res = Optimization.solve(optprob, OptimizationOptimJL.Optim.KrylovTrustRegion(); maxiters = 100, callback = cb) - @test loss_function(res.minimizer) < l1 + @test loss_function(res.u) < l1 end diff --git a/test/second_order_ode_tests.jl b/test/second_order_ode_tests.jl index 45641dbe3..5078afa01 100644 --- a/test/second_order_ode_tests.jl +++ b/test/second_order_ode_tests.jl @@ -39,7 +39,7 @@ (x, p) -> loss_n_ode(x), Optimization.AutoZygote()) optprob = Optimization.OptimizationProblem(optfunc, p) res = Optimization.solve(optprob, Adam(0.01f0); callback = callback, maxiters = 100) - l2 = loss_n_ode(res.minimizer) + l2 = loss_n_ode(res.u) @test l2 < l1 function predict(p) @@ -59,7 +59,7 @@ (x, p) -> loss_n_ode(x), Optimization.AutoZygote()) optprob = Optimization.OptimizationProblem(optfunc, p) res = Optimization.solve(optprob, Adam(0.01f0); callback = callback, maxiters = 100) - l2 = loss_n_ode(res.minimizer) + l2 = loss_n_ode(res.u) @test l2 < l1 function predict(p) @@ -79,6 +79,6 @@ (x, p) -> loss_n_ode(x), Optimization.AutoZygote()) optprob = Optimization.OptimizationProblem(optfunc, p) res = Optimization.solve(optprob, Adam(0.01f0); callback = callback, maxiters = 100) - l2 = loss_n_ode(res.minimizer) + l2 = loss_n_ode(res.u) @test l2 < l1 end diff --git a/test/spline_layer_tests.jl b/test/spline_layer_tests.jl index f8140f984..9883d3fb9 100644 --- a/test/spline_layer_tests.jl +++ b/test/spline_layer_tests.jl @@ -26,9 +26,9 @@ optprob = Optimization.OptimizationProblem(optfunc, ps) res = Optimization.solve(optprob, Adam(0.1); callback = callback, maxiters = 100) - optprob = Optimization.OptimizationProblem(optfunc, res.minimizer) + optprob = Optimization.OptimizationProblem(optfunc, res.u) res = Optimization.solve(optprob, Adam(0.1); callback = callback, maxiters = 100) - opt = res.minimizer + opt = res.u data_validate_vals = rand(100) data_validate_fn = f.(data_validate_vals) diff --git a/test/stiff_nested_ad_tests.jl b/test/stiff_nested_ad_tests.jl index 4742936f4..697300563 100644 --- a/test/stiff_nested_ad_tests.jl +++ b/test/stiff_nested_ad_tests.jl @@ -39,7 +39,7 @@ optprob = Optimization.OptimizationProblem(optfunc, ps) res = Optimization.solve( optprob, Adam(0.1); callback = callback(solver), maxiters = 100) - loss2 = loss_n_ode(lux_model, res.minimizer) + loss2 = loss_n_ode(lux_model, res.u) @test loss2 < loss1 end end diff --git a/test/tensor_product_tests.jl b/test/tensor_product_tests.jl index fa96bb2d8..78bbf909f 100644 --- a/test/tensor_product_tests.jl +++ b/test/tensor_product_tests.jl @@ -26,12 +26,12 @@ optprob = Optimization.OptimizationProblem(optfunc, ps) res = Optimization.solve( optprob, OptimizationOptimisers.Adam(0.1); callback = cb, maxiters = 100) - optprob = Optimization.OptimizationProblem(optfunc, res.minimizer) + optprob = Optimization.OptimizationProblem(optfunc, res.u) res = Optimization.solve( optprob, OptimizationOptimisers.Adam(0.01); callback = cb, maxiters = 100) - optprob = Optimization.OptimizationProblem(optfunc, res.minimizer) + optprob = Optimization.OptimizationProblem(optfunc, res.u) res = Optimization.solve(optprob, BFGS(); callback = cb, maxiters = 200) - opt = res.minimizer + opt = res.u data_validate_vals = [rand(N) for k in 1:100] data_validate_fn = f.(data_validate_vals)