Skip to content

Commit

Permalink
weak scaling h_elem 84 dt 50
Browse files Browse the repository at this point in the history
  • Loading branch information
juliasloan25 committed Mar 11, 2024
1 parent 885737d commit f77dfab
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 73 deletions.
114 changes: 57 additions & 57 deletions .buildkite/gpu/pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,52 +39,52 @@ steps:

- wait

- group: "CHAP GPU strong scaling"
steps:
# - group: "CHAP GPU strong scaling"
# steps:

- label: "GPU AMIP CHAP - strong scaling - 1 GPU"
key: "gpu_amip_chap"
command:
- >
julia --threads=3 --color=yes --project=experiments/AMIP experiments/AMIP/coupler_driver.jl
--config_file $GPU_CONFIG_PATH/gpu_amip_chap.yml
artifact_paths: "gpu_amip_chap/*"
agents:
slurm_gpus_per_task: 1
slurm_cpus_per_task: 4
slurm_ntasks: 1
slurm_mem: 32G
# - label: "GPU AMIP CHAP - strong scaling - 1 GPU"
# key: "gpu_amip_chap"
# command:
# - >
# julia --threads=3 --color=yes --project=experiments/AMIP experiments/AMIP/coupler_driver.jl
# --config_file $GPU_CONFIG_PATH/gpu_amip_chap.yml
# artifact_paths: "gpu_amip_chap/*"
# agents:
# slurm_gpus_per_task: 1
# slurm_cpus_per_task: 4
# slurm_ntasks: 1
# slurm_mem: 32G

- label: "GPU AMIP CHAP - strong scaling - 2 GPUs"
key: "gpu_amip_chap_2process"
command:
- >
srun --cpu-bind=threads --cpus-per-task=4
julia --threads=3 --color=yes --project=experiments/AMIP experiments/AMIP/coupler_driver.jl
--config_file $GPU_CONFIG_PATH/gpu_amip_chap_2process.yml
artifact_paths: "gpu_amip_chap_2process/*"
agents:
slurm_gpus_per_task: 1
slurm_cpus_per_task: 4
slurm_ntasks: 2
slurm_mem: 32G
# - label: "GPU AMIP CHAP - strong scaling - 2 GPUs"
# key: "gpu_amip_chap_2process"
# command:
# - >
# srun --cpu-bind=threads --cpus-per-task=4
# julia --threads=3 --color=yes --project=experiments/AMIP experiments/AMIP/coupler_driver.jl
# --config_file $GPU_CONFIG_PATH/gpu_amip_chap_2process.yml
# artifact_paths: "gpu_amip_chap_2process/*"
# agents:
# slurm_gpus_per_task: 1
# slurm_cpus_per_task: 4
# slurm_ntasks: 2
# slurm_mem: 32G

- label: "GPU AMIP CHAP - strong scaling - 4 GPUs"
key: "gpu_amip_chap_4process"
command:
- >
srun --cpu-bind=threads --cpus-per-task=4
julia --threads=3 --color=yes --project=experiments/AMIP experiments/AMIP/coupler_driver.jl
--config_file $GPU_CONFIG_PATH/gpu_amip_chap_4process.yml
artifact_paths: "gpu_amip_chap_4process/*"
agents:
slurm_gpus_per_task: 1
slurm_cpus_per_task: 4
slurm_ntasks: 4
slurm_mem: 32G
# - label: "GPU AMIP CHAP - strong scaling - 4 GPUs"
# key: "gpu_amip_chap_4process"
# command:
# - >
# srun --cpu-bind=threads --cpus-per-task=4
# julia --threads=3 --color=yes --project=experiments/AMIP experiments/AMIP/coupler_driver.jl
# --config_file $GPU_CONFIG_PATH/gpu_amip_chap_4process.yml
# artifact_paths: "gpu_amip_chap_4process/*"
# agents:
# slurm_gpus_per_task: 1
# slurm_cpus_per_task: 4
# slurm_ntasks: 4
# slurm_mem: 32G

# - group: "CHAP GPU weak scaling"
# steps:
- group: "CHAP GPU weak scaling"
steps:

# - label: "GPU AMIP CHAP - weak scaling - 1 GPU"
# key: "gpu_amip_chap_ws"
Expand Down Expand Up @@ -115,18 +115,18 @@ steps:
# slurm_time: 8:00:00
# slurm_exclusive:

# - label: "GPU AMIP CHAP - weak scaling - 4 GPUs"
# key: "gpu_amip_chap_ws_4process"
# command:
# - >
# srun --cpu-bind=threads --cpus-per-task=4
# julia --threads=3 --color=yes --project=experiments/AMIP experiments/AMIP/coupler_driver.jl
# --config_file $GPU_CONFIG_PATH/gpu_amip_chap_ws_4process.yml
# artifact_paths: "gpu_amip_chap_ws_4process/*"
# agents:
# slurm_gpus_per_task: 1
# slurm_cpus_per_task: 4
# slurm_ntasks: 4
# slurm_mem: 32G
# slurm_time: 8:00:00
# slurm_exclusive:
- label: "GPU AMIP CHAP - weak scaling - 4 GPUs"
key: "gpu_amip_chap_ws_4process"
command:
- >
srun --cpu-bind=threads --cpus-per-task=4
julia --threads=3 --color=yes --project=experiments/AMIP experiments/AMIP/coupler_driver.jl
--config_file $GPU_CONFIG_PATH/gpu_amip_chap_ws_4process.yml
artifact_paths: "gpu_amip_chap_ws_4process/*"
agents:
slurm_gpus_per_task: 1
slurm_cpus_per_task: 4
slurm_ntasks: 4
slurm_mem: 32G
slurm_time: 8:00:00
slurm_exclusive:
2 changes: 1 addition & 1 deletion config/gpu_configs/gpu_amip_chap_ws_4process.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ dt_save_state_to_disk: "Inf"
dt_save_to_sol: "Inf"
energy_check: false
evolving_ocean: false
h_elem: 86
h_elem: 84
hourly_checkpoint: false
job_id: "gpu_amip_chap_ws_4process"
land_albedo_type: "map_static"
Expand Down
15 changes: 0 additions & 15 deletions src/Regridder.jl
Original file line number Diff line number Diff line change
Expand Up @@ -507,24 +507,9 @@ function update_surface_fractions!(cs::CoupledSimulation)
cs.surface_fractions.ice .= max.(min.(ice_d, FT(1) .- land_s), FT(0))
cs.surface_fractions.ocean .= max.(FT(1) .- (cs.surface_fractions.ice .+ land_s), FT(0))

sf_sum = cs.surface_fractions.ice .+ cs.surface_fractions.land .+ cs.surface_fractions.ocean
if abs(minimum(sf_sum) - FT(1)) > eps(FT) || abs(maximum(sf_sum) - FT(1)) > eps(FT)
@show minimum(FT(1) .- (cs.surface_fractions.ice .+ land_s))
@show maximum(FT(1) .- (cs.surface_fractions.ice .+ land_s))
# @show cs.surface_fractions.ice
# @show cs.surface_fractions.land
# @show cs.surface_fractions.ocean
# @show cs.surface_fractions.ice .+ cs.surface_fractions.land .+ cs.surface_fractions.ocean
# @show minimum(cs.surface_fractions.ice .+ cs.surface_fractions.land .+ cs.surface_fractions.ocean)
# @show maximum(cs.surface_fractions.ice .+ cs.surface_fractions.land .+ cs.surface_fractions.ocean)
end
comms_ctx = axes(land_s).grid.topology.context
ClimaComms.barrier(comms_ctx)

@show minimum(cs.surface_fractions.ice) >= FT(0)
@show minimum(cs.surface_fractions.land) >= FT(0)
@show minimum(cs.surface_fractions.ocean) >= FT(0)

@assert minimum(cs.surface_fractions.ice .+ cs.surface_fractions.land .+ cs.surface_fractions.ocean) FT(1)
@assert maximum(cs.surface_fractions.ice .+ cs.surface_fractions.land .+ cs.surface_fractions.ocean) FT(1)

Expand Down

0 comments on commit f77dfab

Please sign in to comment.