Skip to content

Commit

Permalink
Merge remote-tracking branch 'thomas/GS_WP' into neuralpint
Browse files Browse the repository at this point in the history
  • Loading branch information
tlunet committed Dec 28, 2024
2 parents e1999ef + 09a7eb6 commit 4f6bf82
Show file tree
Hide file tree
Showing 6 changed files with 44 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -192,12 +192,7 @@ def _round_step_size(dt, fac, digits):

def get_new_step_size(self, controller, S, **kwargs):
"""
Enforce an upper and lower limit to the step size here.
Be aware that this is only tested when a new step size has been determined. That means if you set an initial
value for the step size outside of the limits, and you don't do any further step size control, that value will
go through.
Also, the final step is adjusted such that we reach Tend as best as possible, which might give step sizes below
the lower limit set here.
Round step size here
Args:
controller (pySDC.Controller): The controller
Expand Down
6 changes: 4 additions & 2 deletions pySDC/implementations/problem_classes/RayleighBenard.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,8 +131,10 @@ def __init__(
self.Dz = S1 @ Dz
self.Dzz = S2 @ Dzz

kappa = (Rayleigh * Prandtl) ** (-1 / 2.0)
nu = (Rayleigh / Prandtl) ** (-1 / 2.0)
# compute rescaled Rayleigh number to extract viscosity and thermal diffusivity
Ra = Rayleigh / (abs(BCs['T_top'] - BCs['T_bottom']) * self.axes[1].L ** 3)
kappa = (Ra * Prandtl) ** (-1 / 2.0)
nu = (Ra / Prandtl) ** (-1 / 2.0)

# construct operators
L_lhs = {
Expand Down
7 changes: 6 additions & 1 deletion pySDC/projects/GPU/analysis_scripts/parallel_scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@ def plot_scaling_test(self, ax, quantity='time', **plotting_params): # pragma:
timing_step = get_sorted(stats, type='timing_step')

t_mean = np.mean([me[1] for me in timing_step])
t_min = np.min([me[1] for me in timing_step][1:])

if quantity == 'throughput':
timings[np.prod(procs) / self.tasks_per_node] = experiment.res**self.ndim / t_mean
Expand All @@ -147,6 +148,8 @@ def plot_scaling_test(self, ax, quantity='time', **plotting_params): # pragma:
timings[np.prod(procs) / self.tasks_per_node] = t_mean
elif quantity == 'time_per_task':
timings[np.prod(procs)] = t_mean
elif quantity == 'min_time_per_task':
timings[np.prod(procs)] = t_min
else:
raise NotImplementedError
except (FileNotFoundError, ValueError):
Expand All @@ -167,6 +170,7 @@ def plot_scaling_test(self, ax, quantity='time', **plotting_params): # pragma:
'throughput_per_task': 'throughput / DoF/s',
'time': r'$t_\mathrm{step}$ / s',
'time_per_task': r'$t_\mathrm{step}$ / s',
'min_time_per_task': r'minimal $t_\mathrm{step}$ / s',
'efficiency': 'efficiency / DoF/s/task',
}
ax.set_ylabel(labels[quantity])
Expand Down Expand Up @@ -358,6 +362,7 @@ def plot_scalings(problem, **kwargs): # pragma: no cover
('RBC', 'throughput'): {'x': [1 / 10, 64], 'y': [2e4, 2e4 * 640]},
('RBC', 'time'): {'x': [1 / 10, 64], 'y': [60, 60 / 640]},
('RBC', 'time_per_task'): {'x': [1, 640], 'y': [60, 60 / 640]},
('RBC', 'min_time_per_task'): {'x': [1, 640], 'y': [60, 60 / 640]},
('RBC', 'throughput_per_task'): {'x': [1 / 1, 640], 'y': [2e4, 2e4 * 640]},
}

Expand All @@ -373,7 +378,7 @@ def plot_scalings(problem, **kwargs): # pragma: no cover
fig.savefig(path, bbox_inches='tight')
print(f'Saved {path!r}', flush=True)

for quantity in ['time', 'throughput', 'time_per_task', 'throughput_per_task'][::-1]:
for quantity in ['time', 'throughput', 'time_per_task', 'throughput_per_task', 'min_time_per_task'][::-1]:
fig, ax = plt.subplots(figsize=figsize_by_journal('TUHH_thesis', 1, 0.6))
for config in configs:
config.plot_scaling_test(ax=ax, quantity=quantity)
Expand Down
32 changes: 31 additions & 1 deletion pySDC/projects/GPU/analysis_scripts/plot_RBC_matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,36 @@ def plot_ultraspherical():
plt.show()


def plot_DCT():
fig, axs = plt.subplots(1, 3, figsize=figsize_by_journal('TUHH_thesis', 1, 0.28), sharey=True)

N = 8
color = 'black'

x = np.linspace(0, 3, N)
y = x**3 - 4 * x**2
axs[0].plot(y, marker='o', color=color)

y_m = np.append(y, y[::-1])
axs[1].scatter(np.arange(2 * N)[::2], y_m[::2], marker='<', color=color)
axs[1].scatter(np.arange(2 * N)[1::2], y_m[1::2], marker='>', color=color)
axs[1].plot(np.arange(2 * N), y_m, color=color)

v = y_m[::2]
axs[2].plot(np.arange(N), v, color=color, marker='x')

axs[0].set_title('original')
axs[1].set_title('mirrored')
axs[2].set_title('periodically reordered')

for ax in axs:
# ax.set_xlabel(r'$n$')
ax.set_yticks([])
fig.savefig('plots/DCT_via_FFT.pdf', bbox_inches='tight', dpi=300)


if __name__ == '__main__':
setup_mpl()
plot_ultraspherical()
plot_DCT()
# plot_ultraspherical()
plt.show()
2 changes: 1 addition & 1 deletion pySDC/projects/GPU/configs/RBC_configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,7 @@ def get_controller_params(self, *args, **kwargs):
class RayleighBenard_large(RayleighBenardRegular):
# res_per_plume = 256
# vertical_res = 1024
Ra = 2e7
Ra = 3.2e8
relaxation_steps = 5

def get_description(self, *args, **kwargs):
Expand Down
2 changes: 1 addition & 1 deletion pySDC/projects/Resilience/RBC.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

import numpy as np

PROBLEM_PARAMS = {'Rayleigh': 2e4, 'nx': 256, 'nz': 128, 'max_cached_factorizations': 30}
PROBLEM_PARAMS = {'Rayleigh': 3.2e5, 'nx': 256, 'nz': 128, 'max_cached_factorizations': 30}


def u_exact(self, t, u_init=None, t_init=None, recompute=False, _t0=None):
Expand Down

0 comments on commit 4f6bf82

Please sign in to comment.