Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ignore more packages in JET tests, lower inf failure limit #2601

Merged
merged 2 commits into from
Jan 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion perf/Manifest.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

julia_version = "1.10.0"
manifest_format = "2.0"
project_hash = "f4c48efdb4bd69597601a8d0d01e26ff9df53b02"
project_hash = "3c2a77c695622e90d7845846b4e4c54862493742"

[[deps.ADTypes]]
git-tree-sha1 = "41c37aa88889c171f1300ceac1313c06e891d245"
Expand Down
1 change: 1 addition & 0 deletions perf/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ AtmosphericProfilesLibrary = "86bc3604-9858-485a-bdbe-831ec50de11d"
BenchmarkTools = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e"
CLIMAParameters = "6eacf6c3-8458-43b9-ae03-caf5306d3d53"
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
CairoMakie = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0"
ClimaAnalysis = "29b5916a-a76c-4e73-9657-3c8fd22e65e6"
ClimaAtmos = "b2c96348-7fb7-4fe0-8da9-78d88439e717"
Expand Down
2 changes: 1 addition & 1 deletion perf/flame.jl
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ allocs_limit["flame_perf_target_threaded"] = 1_276_864
allocs_limit["flame_perf_target_callbacks"] = 386_584
allocs_limit["flame_perf_gw"] = 3_268_961_856
allocs_limit["flame_perf_target_prognostic_edmfx_aquaplanet"] = 445_664
allocs_limit["flame_gpu_implicit_barowave_moist"] = 4169216
allocs_limit["flame_gpu_implicit_barowave_moist"] = 4178384
# Ideally, we would like to track all the allocations, but this becomes too
# expensive there is too many of them. Here, we set the default sample rate to
# 1, but lower it to a smaller value when we expect the job to produce lots of
Expand Down
5 changes: 4 additions & 1 deletion perf/jet.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
redirect_stderr(IOContext(stderr, :stacktrace_types_limited => Ref(false)))
import Random
import HDF5, NCDatasets, CUDA
Random.seed!(1234)
import ClimaAtmos as CA

Expand All @@ -15,4 +16,6 @@ import JET

import SciMLBase
SciMLBase.step!(integrator) # Make sure no errors
JET.@test_opt SciMLBase.step!(integrator)
JET.@test_opt ignored_modules = (HDF5, CUDA, NCDatasets) SciMLBase.step!(
integrator,
)
7 changes: 5 additions & 2 deletions perf/jet_report_nfailures.jl
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
# This script assumes that `integrator` is defined.
import JET
import HDF5
import HDF5, NCDatasets, CUDA
# Suggested in: https://github.com/aviatesk/JET.jl/issues/455
macro n_failures(ex)
return :(
let result = JET.@report_opt ignored_modules = (HDF5,) $(ex)
let result =
JET.@report_opt ignored_modules = (HDF5, CUDA, NCDatasets) $(
ex
)
length(JET.get_reports(result.analyzer, result.result))
end
)
Expand Down
9 changes: 6 additions & 3 deletions perf/jet_test_nfailures.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,14 @@ import JET
import SciMLBase
SciMLBase.step!(integrator) # Make sure no errors

import HDF5
import HDF5, NCDatasets, CUDA
# Suggested in: https://github.com/aviatesk/JET.jl/issues/455
macro n_failures(ex)
return :(
let result = JET.@report_opt ignored_modules = (HDF5,) $(ex)
let result =
JET.@report_opt ignored_modules = (HDF5, NCDatasets, CUDA) $(
ex
)
length(JET.get_reports(result.analyzer, result.result))
end
)
Expand All @@ -35,7 +38,7 @@ using Test
# inference. By increasing this counter, we acknowledge that
# we have introduced an inference failure. We hope to drive
# this number down to 0.
n_allowed_failures = 94
n_allowed_failures = 42
@show n
@test n ≤ n_allowed_failures
if n < n_allowed_failures
Expand Down
Loading