Skip to content

Commit

Permalink
Implemented node limit in scaling
Browse files Browse the repository at this point in the history
#!!!!!! WARNING: FLAKEHEAVEN FAILED !!!!!!: 

#:
  • Loading branch information
Thomas Baumann committed Oct 17, 2024
1 parent 8aeefd5 commit f61d328
Showing 1 changed file with 8 additions and 0 deletions.
8 changes: 8 additions & 0 deletions pySDC/projects/GPU/analysis_scripts/parallel_scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ class ScalingConfig(object):
max_steps_space = None
max_steps_space_weak = None
sbatch_options = []
max_nodes = 9999

def __init__(self, space_time_parallel):
if space_time_parallel in ['False', False]:
Expand All @@ -44,6 +45,10 @@ def run_scaling_test(self, strong=True):
for i in range(max_steps):
res, procs = self.get_resolution_and_tasks(strong, i)

_nodes = np.prod(procs) // self.tasks_per_node
if _nodes > self.max_nodes:
break

sbatch_options = [
f'-n {np.prod(procs)}',
f'-p {self.partition}',
Expand Down Expand Up @@ -101,13 +106,15 @@ class CPUConfig(ScalingConfig):
cluster = 'jusuf'
partition = 'batch'
tasks_per_node = 16
max_nodes = 144


class GPUConfig(ScalingConfig):
cluster = 'booster'
partition = 'booster'
tasks_per_node = 4
useGPU = True
max_nodes = 936


class GrayScottSpaceScalingCPU(CPUConfig, ScalingConfig):
Expand All @@ -127,6 +134,7 @@ class GrayScottSpaceScalingGPU(GPUConfig, ScalingConfig):
max_steps_space = 7
max_steps_space_weak = 5
tasks_time = 4
max_nodes = 64


def plot_scalings(strong, problem, kwargs): # pragma: no cover
Expand Down

0 comments on commit f61d328

Please sign in to comment.