Skip to content
Snippets Groups Projects
Unverified Commit 251b5ed7 authored by Thomas Baumann's avatar Thomas Baumann Committed by GitHub
Browse files

Many small things... (#523)

parent 4a098adb
No related branches found
No related tags found
No related merge requests found
......@@ -42,6 +42,7 @@ def figsize_by_journal(journal, scale, ratio): # pragma: no cover
textwidths = {
'JSC_beamer': 426.79135,
'Springer_Numerical_Algorithms': 338.58778,
'Springer_proceedings': 347.12354,
'JSC_thesis': 434.26027,
'TUHH_thesis': 426.79135,
}
......@@ -50,6 +51,7 @@ def figsize_by_journal(journal, scale, ratio): # pragma: no cover
'JSC_beamer': 214.43411,
'JSC_thesis': 635.5,
'TUHH_thesis': 631.65118,
'Springer_proceedings': 549.13828,
}
assert (
journal in textwidths.keys()
......
......@@ -192,12 +192,7 @@ class StepSizeRounding(ConvergenceController):
def get_new_step_size(self, controller, S, **kwargs):
"""
Enforce an upper and lower limit to the step size here.
Be aware that this is only tested when a new step size has been determined. That means if you set an initial
value for the step size outside of the limits, and you don't do any further step size control, that value will
go through.
Also, the final step is adjusted such that we reach Tend as best as possible, which might give step sizes below
the lower limit set here.
Round step size here
Args:
controller (pySDC.Controller): The controller
......
......@@ -122,7 +122,7 @@ class LogToFile(Hooks):
)
if not os.path.isdir(self.path):
os.mkdir(self.path)
os.makedirs(self.path, exist_ok=True)
def log_to_file(self, step, level_number, condition, process_solution=None):
if level_number > 0:
......@@ -188,3 +188,15 @@ class LogToFileAfterXs(LogToFile):
if L.time + L.dt >= self.t_next_log and not step.status.restart:
super().post_step(step, level_number)
self.t_next_log = max([L.time + L.dt, self.t_next_log]) + self.time_increment
def pre_run(self, step, level_number):
L = step.levels[level_number]
L.uend = L.u[0]
def process_solution(L):
return {
**type(self).process_solution(L),
't': L.time,
}
self.log_to_file(step, level_number, type(self).logging_condition(L), process_solution=process_solution)
......@@ -9,12 +9,14 @@ def generate_directories():
'''
import os
for name in ['jobscripts', 'slurm-out']:
for name in ['jobscripts', 'slurm-out', 'nsys_profiles']:
path = f'{PROJECT_PATH}/etc/{name}'
os.makedirs(path, exist_ok=True)
def get_jobscript_text(sbatch_options, srun_options, command, cluster):
def get_jobscript_text(
sbatch_options, srun_options, command, cluster, name='Coffeebreak', nsys_profiling=False, OMP_NUM_THREADS=1
):
"""
Generate the text for a jobscript
......@@ -23,25 +25,33 @@ def get_jobscript_text(sbatch_options, srun_options, command, cluster):
srun_options (list): Options for the srun command
command (str): python (!) command. Will be prefaced by `python <path>/`
cluster (str): Name of the cluster you want to run on
name (str): Jobname
nsys_profiling (bool): Whether to generate an NSIGHT Systems profile
Returns:
str: Content of jobscript
"""
msg = '#!/usr/bin/bash\n\n'
msg += f'#SBATCH -J {name}\n'
for op in DEFAULT_SBATCH_OPTIONS + sbatch_options:
msg += f'#SBATCH {op}\n'
msg += f'\nexport OMP_NUM_THREADS={OMP_NUM_THREADS}\n'
msg += f'\nsource {PROJECT_PATH}/etc/venv_{cluster.lower()}/activate.sh\n'
srun_cmd = 'srun'
for op in DEFAULT_SRUN_OPTIONS + srun_options:
srun_cmd += f' {op}'
if nsys_profiling:
srun_cmd += f' nsys profile --trace=mpi,ucx,cuda,nvtx --output={PROJECT_PATH}/etc/nsys_profiles/{name}.%q{{SLURM_PROCID}}_%q{{SLURM_NTASKS}} --force-overwrite true'
msg += f'\n{srun_cmd} python {PROJECT_PATH}/{command}'
return msg
def write_jobscript(sbatch_options, srun_options, command, cluster, submit=True):
def write_jobscript(sbatch_options, srun_options, command, cluster, submit=True, **kwargs):
"""
Generate a jobscript.
......@@ -54,11 +64,12 @@ def write_jobscript(sbatch_options, srun_options, command, cluster, submit=True)
"""
generate_directories()
text = get_jobscript_text(sbatch_options, srun_options, command, cluster)
text = get_jobscript_text(sbatch_options, srun_options, command, cluster, **kwargs)
path = f'{PROJECT_PATH}/etc/jobscripts/{command.replace(" ", "").replace("/", "_")}-{cluster}.sh'
with open(path, 'w') as file:
file.write(text)
print(f'Written jobscript {path!r}')
if submit:
import os
......
......@@ -11,3 +11,4 @@ module load FFTW
module load mpi4py
module load FFmpeg/.6.0
module load SciPy-Stack
module load texlive
......@@ -10,4 +10,5 @@ module load Python
module load FFTW
module load mpi4py
module load FFmpeg/.6.0
module load SciPy-Stack
# module load SciPy-Stack
module load texlive
......@@ -16,4 +16,3 @@ FFTW_LIBRARY_DIR="/p/software/jusuf/stages/2024/software/FFTW/3.3.10-GCC-12.3.0/
python3 -m pip install -e /p/project1/ccstma/baumann7/qmat
python3 -m pip install -r "${ABSOLUTE_PATH}"/requirements.txt
python3 -m pip install -e /p/project1/ccstma/baumann7/pySDC/
......@@ -26,6 +26,10 @@ class efficient_sweeper:
if self.params.initial_guess == 'spread':
L.u[m] = P.dtype_u(L.u[0])
L.f[m] = P.eval_f(L.u[m], L.time + L.dt * self.coll.nodes[m - 1])
elif self.params.initial_guess == 'copy':
L.f[0] = P.eval_f(L.u[0], L.time)
L.u[m] = P.dtype_u(L.u[0])
L.f[m] = P.dtype_f(L.f[0])
# start with zero everywhere
elif self.params.initial_guess == 'zero':
L.u[m] = P.dtype_u(init=P.init, val=0.0)
......
......@@ -6,3 +6,4 @@ module load FFTW
module load Python/3.11.3
module load mpi4py
module load FFmpeg/.6.0
module load texlive
......@@ -80,6 +80,11 @@ def test_step_size_slope_limiter():
limiter.get_new_step_size(controller, S)
assert L.status.dt_new == 1
L.params.dt = 1
L.status.dt_new = 1 - 1e-1
limiter.get_new_step_size(controller, S)
assert L.status.dt_new == 1
@pytest.mark.base
def test_step_size_limiter():
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment