diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3ea088fd0a6811089514f8039c3b62a84bba11ee..23ea5074d7a13d58add2cb8e684ceff1bbbb0923 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,10 @@
 
 :arrow_left: [Back to main page](./README.md)
 
+-   April 11, 2025: Version 5.6 adds a framework for MPI-parallel I/O, developed by [\@tlunet](https://github.com/tlunet), making it easier to visualize the data obtained with pySDC on HPC machines in software such as ParaView.
+    Also, pySDC is now compatible with the finite element library [Firedrake](https://github.com/firedrakeproject/firedrake) and the geophysical fluid dynamics library
+    [Gusto](https://github.com/firedrakeproject/gusto), thanks to [\@jshipton](https://github.com/jshipton) and [\@brownbaerchen](https://github.com/brownbaerchen).
+    The former allows to setup PDEs with finite element discretizations in pySDC and then solve in time with SDC and PFASST, while the latter allows to setup a geophysical fluid dynamics problem and then use pySDC with any SDC setup as a timestepper in Gusto.
 -   June 24, 2024: Major summer cleanup with Version 5.5. [\@tlunet](https://github.com/tlunet) extracted all quadrature-related stuff into his new standalone code 
     [qmat](https://github.com/Parallel-in-Time/qmat), which makes pySDC much more focussed and both parts easier to maintain. 
     [\@lisawim](https://github.com/lisawim) worked a lot on the DAE sweepers (including an MPI-parallel version), while [\@brownbaerchen](https://github.com/brownbaerchen) has fun with GPUs.
diff --git a/CITATION.cff b/CITATION.cff
index b87274cd4161fb15d8558a082931d759bedd62d7..d284ec02e2ae388c201088c6015ff5054728a1ca 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -28,11 +28,15 @@ authors:
     given-names: Jakob
     orcid: https://orcid.org/0000-0001-6280-8396
     affiliation: "Jülich Supercomputing Centre, Forschungszentrum Jülich GmbH, 52425 Jülich, Germany"
+  - family-names: Shipton
+    given-names: Jemma
+    orcid: https://orcid.org/0000-0002-8635-0831
+    affiliation: "Department of Mathematics and Statistics, University of Exeter, Exeter, UK"
 
 
-version: 5.5.2
+version: 5.6
 doi: 10.5281/zenodo.594191
-date-released: 2024-09-23
+date-released: 2025-04-11
 keywords: 
   - "parallel-in-time"
   - "spectral deferred corrections"
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 9a3eb65f0d4a3a8af54f7ccc1c3ef42f91f1151a..53e913c2bcc87f4d24672296d4594b2ae3fc7dea 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -70,9 +70,9 @@ author = 'Robert Speck, Thibaut Lunet, Thomas Baumann, Lisa Wimmer, Ikrom Akramo
 # built documents.
 #
 # The short X.Y version.
-version = '5.5'
+version = '5.6'
 # The full version, including alpha/beta/rc tags.
-release = '5.5.2'
+release = '5.6
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
diff --git a/etc/environment-mpi4py.yml b/etc/environment-mpi4py.yml
index 16b96df6d43e03386e46df5f4b8179ea38c4fa50..dd0556be4e46021e1a160a2ba15fb13bb5ae21ad 100644
--- a/etc/environment-mpi4py.yml
+++ b/etc/environment-mpi4py.yml
@@ -14,3 +14,4 @@ dependencies:
   - pip
   - pip:
     - qmat>=0.1.8
+    - pytest-isolate-mpi
diff --git a/pySDC/core/problem.py b/pySDC/core/problem.py
index cdfabeb897c532abbd0dc0245ad89ceab082d269..55d76b18d9507ae1a63d7bec6565afcc26f6aff6 100644
--- a/pySDC/core/problem.py
+++ b/pySDC/core/problem.py
@@ -80,6 +80,18 @@ class Problem(RegisterParams):
     def get_default_sweeper_class(cls):
         raise NotImplementedError(f'No default sweeper class implemented for {cls} problem!')
 
+    def setUpFieldsIO(self):
+        """
+        Set up FieldsIO for MPI with the space decomposition of this problem
+        """
+        pass
+
+    def getOutputFile(self, fileName):
+        raise NotImplementedError(f'No output implemented file for {type(self).__name__}')
+
+    def processSolutionForOutput(self, u):
+        return u
+
     def eval_f(self, u, t):
         """
         Abstract interface to RHS computation of the ODE
diff --git a/pySDC/helpers/fieldsIO.py b/pySDC/helpers/fieldsIO.py
index 3c1588af74a014b5aeb5022f775e1b5ef4b36939..2155896773faa393855995f8d9105ca188a537ed 100644
--- a/pySDC/helpers/fieldsIO.py
+++ b/pySDC/helpers/fieldsIO.py
@@ -203,9 +203,10 @@ class FieldsIO:
         assert not self.initialized, "FieldsIO already initialized"
 
         if not self.ALLOW_OVERWRITE:
-            assert not os.path.isfile(
-                self.fileName
-            ), f"file {self.fileName!r} already exists, use FieldsIO.ALLOW_OVERWRITE = True to allow overwriting"
+            if os.path.isfile(self.fileName):
+                raise FileExistsError(
+                    f"file {self.fileName!r} already exists, use FieldsIO.ALLOW_OVERWRITE = True to allow overwriting"
+                )
 
         with open(self.fileName, "w+b") as f:
             self.hBase.tofile(f)
diff --git a/pySDC/helpers/spectral_helper.py b/pySDC/helpers/spectral_helper.py
index d74aab8733f256272f0cb99a5ea6e1fd13fe3a34..a801503a7c53302248bb50a261d1ca6bfc8cca62 100644
--- a/pySDC/helpers/spectral_helper.py
+++ b/pySDC/helpers/spectral_helper.py
@@ -1044,7 +1044,7 @@ class SpectralHelper:
         """
         Remove a BC from the matrix. This is useful e.g. when you add a non-scalar BC and then need to selectively
         remove single BCs again, as in incompressible Navier-Stokes, for instance.
-        Forward arguments for the boundary conditions using `kwargs`. Refer to documentation of 1D bases for details.
+        Forwards arguments for the boundary conditions using `kwargs`. Refer to documentation of 1D bases for details.
 
         Args:
             component (str): Name of the component the BC should act on
@@ -1311,15 +1311,15 @@ class SpectralHelper:
         """
         Get grid in spectral space
         """
-        grids = [self.axes[i].get_wavenumbers()[self.local_slice[i]] for i in range(len(self.axes))][::-1]
-        return self.xp.meshgrid(*grids)
+        grids = [self.axes[i].get_wavenumbers()[self.local_slice[i]] for i in range(len(self.axes))]
+        return self.xp.meshgrid(*grids, indexing='ij')
 
     def get_grid(self):
         """
         Get grid in physical space
         """
-        grids = [self.axes[i].get_1dgrid()[self.local_slice[i]] for i in range(len(self.axes))][::-1]
-        return self.xp.meshgrid(*grids)
+        grids = [self.axes[i].get_1dgrid()[self.local_slice[i]] for i in range(len(self.axes))]
+        return self.xp.meshgrid(*grids, indexing='ij')
 
     def get_fft(self, axes=None, direction='object', padding=None, shape=None):
         """
@@ -1853,6 +1853,26 @@ class SpectralHelper:
         """
         return M.tocsc()[self.local_slice[axis], self.local_slice[axis]]
 
+    def expand_matrix_ND(self, matrix, aligned):
+        sp = self.sparse_lib
+        axes = np.delete(np.arange(self.ndim), aligned)
+        ndim = len(axes) + 1
+
+        if ndim == 1:
+            return matrix
+        elif ndim == 2:
+            axis = axes[0]
+            I1D = sp.eye(self.axes[axis].N)
+
+            mats = [None] * ndim
+            mats[aligned] = self.get_local_slice_of_1D_matrix(matrix, aligned)
+            mats[axis] = self.get_local_slice_of_1D_matrix(I1D, axis)
+
+            return sp.kron(*mats)
+
+        else:
+            raise NotImplementedError(f'Matrix expansion not implemented for {ndim} dimensions!')
+
     def get_filter_matrix(self, axis, **kwargs):
         """
         Get bandpass filter along `axis`. See the documentation `get_filter_matrix` in the 1D bases for what kwargs are
@@ -1878,31 +1898,10 @@ class SpectralHelper:
         Returns:
             sparse differentiation matrix
         """
-        sp = self.sparse_lib
-        ndim = self.ndim
-
-        if ndim == 1:
-            D = self.axes[0].get_differentiation_matrix(**kwargs)
-        elif ndim == 2:
-            for axis in axes:
-                axis2 = (axis + 1) % ndim
-                D1D = self.axes[axis].get_differentiation_matrix(**kwargs)
-
-                if len(axes) > 1:
-                    I1D = sp.eye(self.axes[axis2].N)
-                else:
-                    I1D = self.axes[axis2].get_Id()
-
-                mats = [None] * ndim
-                mats[axis] = self.get_local_slice_of_1D_matrix(D1D, axis)
-                mats[axis2] = self.get_local_slice_of_1D_matrix(I1D, axis2)
-
-                if axis == axes[0]:
-                    D = sp.kron(*mats)
-                else:
-                    D = D @ sp.kron(*mats)
-        else:
-            raise NotImplementedError(f'Differentiation matrix not implemented for {ndim} dimension!')
+        D = self.expand_matrix_ND(self.axes[axes[0]].get_differentiation_matrix(**kwargs), axes[0])
+        for axis in axes[1:]:
+            _D = self.axes[axis].get_differentiation_matrix(**kwargs)
+            D = D @ self.expand_matrix_ND(_D, axis)
 
         return D
 
@@ -1916,31 +1915,10 @@ class SpectralHelper:
         Returns:
             sparse integration matrix
         """
-        sp = self.sparse_lib
-        ndim = len(self.axes)
-
-        if ndim == 1:
-            S = self.axes[0].get_integration_matrix()
-        elif ndim == 2:
-            for axis in axes:
-                axis2 = (axis + 1) % ndim
-                S1D = self.axes[axis].get_integration_matrix()
-
-                if len(axes) > 1:
-                    I1D = sp.eye(self.axes[axis2].N)
-                else:
-                    I1D = self.axes[axis2].get_Id()
-
-                mats = [None] * ndim
-                mats[axis] = self.get_local_slice_of_1D_matrix(S1D, axis)
-                mats[axis2] = self.get_local_slice_of_1D_matrix(I1D, axis2)
-
-                if axis == axes[0]:
-                    S = sp.kron(*mats)
-                else:
-                    S = S @ sp.kron(*mats)
-        else:
-            raise NotImplementedError(f'Integration matrix not implemented for {ndim} dimension!')
+        S = self.expand_matrix_ND(self.axes[axes[0]].get_integration_matrix(), axes[0])
+        for axis in axes[1:]:
+            _S = self.axes[axis].get_integration_matrix()
+            S = S @ self.expand_matrix_ND(_S, axis)
 
         return S
 
@@ -1951,27 +1929,10 @@ class SpectralHelper:
         Returns:
             sparse identity matrix
         """
-        sp = self.sparse_lib
-        ndim = self.ndim
-        I = sp.eye(np.prod(self.init[0][1:]), dtype=complex)
-
-        if ndim == 1:
-            I = self.axes[0].get_Id()
-        elif ndim == 2:
-            for axis in range(ndim):
-                axis2 = (axis + 1) % ndim
-                I1D = self.axes[axis].get_Id()
-
-                I1D2 = sp.eye(self.axes[axis2].N)
-
-                mats = [None] * ndim
-                mats[axis] = self.get_local_slice_of_1D_matrix(I1D, axis)
-                mats[axis2] = self.get_local_slice_of_1D_matrix(I1D2, axis2)
-
-                I = I @ sp.kron(*mats)
-        else:
-            raise NotImplementedError(f'Identity matrix not implemented for {ndim} dimension!')
-
+        I = self.expand_matrix_ND(self.axes[0].get_Id(), 0)
+        for axis in range(1, self.ndim):
+            _I = self.axes[axis].get_Id()
+            I = I @ self.expand_matrix_ND(_I, axis)
         return I
 
     def get_Dirichlet_recombination_matrix(self, axis=-1):
@@ -1984,26 +1945,8 @@ class SpectralHelper:
         Returns:
             sparse matrix
         """
-        sp = self.sparse_lib
-        ndim = len(self.axes)
-
-        if ndim == 1:
-            C = self.axes[0].get_Dirichlet_recombination_matrix()
-        elif ndim == 2:
-            axis2 = (axis + 1) % ndim
-            C1D = self.axes[axis].get_Dirichlet_recombination_matrix()
-
-            I1D = self.axes[axis2].get_Id()
-
-            mats = [None] * ndim
-            mats[axis] = self.get_local_slice_of_1D_matrix(C1D, axis)
-            mats[axis2] = self.get_local_slice_of_1D_matrix(I1D, axis2)
-
-            C = sp.kron(*mats)
-        else:
-            raise NotImplementedError(f'Basis change matrix not implemented for {ndim} dimension!')
-
-        return C
+        C1D = self.axes[axis].get_Dirichlet_recombination_matrix()
+        return self.expand_matrix_ND(C1D, axis)
 
     def get_basis_change_matrix(self, axes=None, **kwargs):
         """
@@ -2018,30 +1961,9 @@ class SpectralHelper:
         """
         axes = tuple(-i - 1 for i in range(self.ndim)) if axes is None else axes
 
-        sp = self.sparse_lib
-        ndim = len(self.axes)
-
-        if ndim == 1:
-            C = self.axes[0].get_basis_change_matrix(**kwargs)
-        elif ndim == 2:
-            for axis in axes:
-                axis2 = (axis + 1) % ndim
-                C1D = self.axes[axis].get_basis_change_matrix(**kwargs)
-
-                if len(axes) > 1:
-                    I1D = sp.eye(self.axes[axis2].N)
-                else:
-                    I1D = self.axes[axis2].get_Id()
-
-                mats = [None] * ndim
-                mats[axis] = self.get_local_slice_of_1D_matrix(C1D, axis)
-                mats[axis2] = self.get_local_slice_of_1D_matrix(I1D, axis2)
-
-                if axis == axes[0]:
-                    C = sp.kron(*mats)
-                else:
-                    C = C @ sp.kron(*mats)
-        else:
-            raise NotImplementedError(f'Basis change matrix not implemented for {ndim} dimension!')
+        C = self.expand_matrix_ND(self.axes[axes[0]].get_basis_change_matrix(**kwargs), axes[0])
+        for axis in axes[1:]:
+            _C = self.axes[axis].get_basis_change_matrix(**kwargs)
+            C = C @ self.expand_matrix_ND(_C, axis)
 
         return C
diff --git a/pySDC/implementations/hooks/log_solution.py b/pySDC/implementations/hooks/log_solution.py
index 9cd5ba8e8d5394291be78a85e7f59cf85f1b67c4..e7e4501017aad97ac6ef6a2c17b3aac86137fb73 100644
--- a/pySDC/implementations/hooks/log_solution.py
+++ b/pySDC/implementations/hooks/log_solution.py
@@ -2,6 +2,8 @@ from pySDC.core.hooks import Hooks
 import pickle
 import os
 import numpy as np
+from pySDC.helpers.fieldsIO import FieldsIO
+from pySDC.core.errors import DataError
 
 
 class LogSolution(Hooks):
@@ -68,7 +70,7 @@ class LogSolutionAfterIteration(Hooks):
         )
 
 
-class LogToFile(Hooks):
+class LogToPickleFile(Hooks):
     r"""
     Hook for logging the solution to file after the step using pickle.
 
@@ -171,7 +173,7 @@ class LogToFile(Hooks):
             return pickle.load(file)
 
 
-class LogToFileAfterXs(LogToFile):
+class LogToPickleFileAfterXS(LogToPickleFile):
     r'''
     Log to file after certain amount of time has passed instead of after every step
     '''
@@ -200,3 +202,62 @@ class LogToFileAfterXs(LogToFile):
             }
 
         self.log_to_file(step, level_number, type(self).logging_condition(L), process_solution=process_solution)
+
+
+class LogToFile(Hooks):
+    filename = 'myRun.pySDC'
+    time_increment = 0
+    allow_overwriting = False
+
+    def __init__(self):
+        super().__init__()
+        self.outfile = None
+        self.t_next_log = 0
+        FieldsIO.ALLOW_OVERWRITE = self.allow_overwriting
+
+    def pre_run(self, step, level_number):
+        if level_number > 0:
+            return None
+        L = step.levels[level_number]
+
+        # setup outfile
+        if os.path.isfile(self.filename) and L.time > 0:
+            L.prob.setUpFieldsIO()
+            self.outfile = FieldsIO.fromFile(self.filename)
+            self.logger.info(
+                f'Set up file {self.filename!r} for writing output. This file already contains solutions up to t={self.outfile.times[-1]:.4f}.'
+            )
+        else:
+            self.outfile = L.prob.getOutputFile(self.filename)
+            self.logger.info(f'Set up file {self.filename!r} for writing output.')
+
+            # write initial conditions
+            if L.time not in self.outfile.times:
+                self.outfile.addField(time=L.time, field=L.prob.processSolutionForOutput(L.u[0]))
+                self.logger.info(f'Written initial conditions at t={L.time:4f} to file')
+
+    def post_step(self, step, level_number):
+        if level_number > 0:
+            return None
+
+        L = step.levels[level_number]
+
+        if self.t_next_log == 0:
+            self.t_next_log = L.time + self.time_increment
+
+        if L.time + L.dt >= self.t_next_log and not step.status.restart:
+            value_exists = True in [abs(me - (L.time + L.dt)) < np.finfo(float).eps * 1000 for me in self.outfile.times]
+            if value_exists and not self.allow_overwriting:
+                raise DataError(f'Already have recorded data for time {L.time + L.dt} in this file!')
+            self.outfile.addField(time=L.time + L.dt, field=L.prob.processSolutionForOutput(L.uend))
+            self.logger.info(f'Written solution at t={L.time+L.dt:.4f} to file')
+            self.t_next_log = max([L.time + L.dt, self.t_next_log]) + self.time_increment
+
+    @classmethod
+    def load(cls, index):
+        data = {}
+        file = FieldsIO.fromFile(cls.filename)
+        file_entry = file.readField(idx=index)
+        data['u'] = file_entry[1]
+        data['t'] = file_entry[0]
+        return data
diff --git a/pySDC/implementations/problem_classes/Burgers.py b/pySDC/implementations/problem_classes/Burgers.py
index ef957f54d0dc33851b0688a73c1399c95ee6d96c..7ff48e16ad550f3edb7e2c9a1a451cd782246950 100644
--- a/pySDC/implementations/problem_classes/Burgers.py
+++ b/pySDC/implementations/problem_classes/Burgers.py
@@ -188,7 +188,7 @@ class Burgers2D(GenericSpectralLinear):
         components = ['u', 'v', 'ux', 'uz', 'vx', 'vz']
         super().__init__(bases=bases, components=components, spectral_space=False, **kwargs)
 
-        self.Z, self.X = self.get_grid()
+        self.X, self.Z = self.get_grid()
 
         # prepare matrices
         Dx = self.get_differentiation_matrix(axes=(0,))
diff --git a/pySDC/implementations/problem_classes/HeatEquation_Chebychev.py b/pySDC/implementations/problem_classes/HeatEquation_Chebychev.py
index 7a3423ec9792dd242d01494711b911879eecf1b2..718465180479cd9687a58743362d4161b4b4f349 100644
--- a/pySDC/implementations/problem_classes/HeatEquation_Chebychev.py
+++ b/pySDC/implementations/problem_classes/HeatEquation_Chebychev.py
@@ -264,7 +264,7 @@ class Heat2DChebychev(GenericSpectralLinear):
 
         super().__init__(bases, components, Dirichlet_recombination=False, spectral_space=False, **kwargs)
 
-        self.Y, self.X = self.get_grid()
+        self.X, self.Y = self.get_grid()
 
         I = self.get_Id()
         self.Dx = self.get_differentiation_matrix(axes=(0,))
@@ -372,7 +372,7 @@ class Heat2DUltraspherical(GenericSpectralLinear):
 
         super().__init__(bases, components, Dirichlet_recombination=False, spectral_space=False, **kwargs)
 
-        self.Y, self.X = self.get_grid()
+        self.X, self.Y = self.get_grid()
 
         self.Dxx = self.get_differentiation_matrix(axes=(0,), p=2)
         self.Dyy = self.get_differentiation_matrix(axes=(1,), p=2)
diff --git a/pySDC/implementations/problem_classes/RayleighBenard.py b/pySDC/implementations/problem_classes/RayleighBenard.py
index 69a93a3d0fd046d184dc207563d332f00e8dd122..e69a86d4962097738aa67d35cf1417203a248681 100644
--- a/pySDC/implementations/problem_classes/RayleighBenard.py
+++ b/pySDC/implementations/problem_classes/RayleighBenard.py
@@ -109,8 +109,8 @@ class RayleighBenard(GenericSpectralLinear):
         components = ['u', 'v', 'T', 'p']
         super().__init__(bases, components, comm=comm, **kwargs)
 
-        self.Z, self.X = self.get_grid()
-        self.Kz, self.Kx = self.get_wavenumbers()
+        self.X, self.Z = self.get_grid()
+        self.Kx, self.Kz = self.get_wavenumbers()
 
         # construct 2D matrices
         Dzz = self.get_differentiation_matrix(axes=(1,), p=2)
diff --git a/pySDC/implementations/problem_classes/TestEquation_0D.py b/pySDC/implementations/problem_classes/TestEquation_0D.py
index 4276e96e9771d0916c8c687eca751d320981896b..5262b165fd17268c16bfdf9bc0e380917b339385 100644
--- a/pySDC/implementations/problem_classes/TestEquation_0D.py
+++ b/pySDC/implementations/problem_classes/TestEquation_0D.py
@@ -3,6 +3,7 @@ import scipy.sparse as nsp
 
 from pySDC.core.problem import Problem, WorkCounter
 from pySDC.implementations.datatype_classes.mesh import mesh, imex_mesh
+from pySDC.helpers.fieldsIO import Scalar
 
 
 class testequation0d(Problem):
@@ -146,6 +147,15 @@ class testequation0d(Problem):
         me[:] = u_init * self.xp.exp((t - t_init) * self.lambdas)
         return me
 
+    def getOutputFile(self, fileName):
+        fOut = Scalar(np.complex128, fileName=fileName)
+        fOut.setHeader(self.lambdas.size)
+        fOut.initialize()
+        return fOut
+
+    def processSolutionForOutput(self, u):
+        return u.flatten()
+
 
 class test_equation_IMEX(Problem):
     dtype_f = imex_mesh
diff --git a/pySDC/implementations/problem_classes/generic_spectral.py b/pySDC/implementations/problem_classes/generic_spectral.py
index 4f0fa34266c8de2ad76382b875862c00a66d438f..a8c4bc260beabbe6590d990f7c78b259dc44d164 100644
--- a/pySDC/implementations/problem_classes/generic_spectral.py
+++ b/pySDC/implementations/problem_classes/generic_spectral.py
@@ -2,6 +2,7 @@ from pySDC.core.problem import Problem, WorkCounter
 from pySDC.helpers.spectral_helper import SpectralHelper
 import numpy as np
 from pySDC.core.errors import ParameterError
+from pySDC.helpers.fieldsIO import Rectilinear
 
 
 class GenericSpectralLinear(Problem):
@@ -333,6 +334,30 @@ class GenericSpectralLinear(Problem):
 
             return sol
 
+    def setUpFieldsIO(self):
+        Rectilinear.setupMPI(
+            comm=self.comm,
+            iLoc=[me.start for me in self.local_slice],
+            nLoc=[me.stop - me.start for me in self.local_slice],
+        )
+
+    def getOutputFile(self, fileName):
+        self.setUpFieldsIO()
+
+        coords = [me.get_1dgrid() for me in self.spectral.axes]
+        assert np.allclose([len(me) for me in coords], self.spectral.global_shape[1:])
+
+        fOut = Rectilinear(np.float64, fileName=fileName)
+        fOut.setHeader(nVar=len(self.components), coords=coords)
+        fOut.initialize()
+        return fOut
+
+    def processSolutionForOutput(self, u):
+        if self.spectral_space:
+            return np.array(self.itransform(u).real)
+        else:
+            return np.array(u.real)
+
 
 def compute_residual_DAE(self, stage=''):
     """
diff --git a/pySDC/projects/GPU/configs/GS_configs.py b/pySDC/projects/GPU/configs/GS_configs.py
index 5fde8848d938fd5d6f05c716f0df7fef34d4d572..8c590e879623a8d1ac511f819bea2dfce57120a2 100644
--- a/pySDC/projects/GPU/configs/GS_configs.py
+++ b/pySDC/projects/GPU/configs/GS_configs.py
@@ -36,7 +36,7 @@ class GrayScott(Config):
 
     def get_LogToFile(self, ranks=None):
         import numpy as np
-        from pySDC.implementations.hooks.log_solution import LogToFileAfterXs as LogToFile
+        from pySDC.implementations.hooks.log_solution import LogToPickleFileAfterXS as LogToFile
 
         LogToFile.path = f'{self.base_path}/data/'
         LogToFile.file_name = f'{self.get_path(ranks=ranks)}-solution'
diff --git a/pySDC/projects/GPU/configs/RBC_configs.py b/pySDC/projects/GPU/configs/RBC_configs.py
index 28cea340c6c904e8741823bef2a713677ada8d08..10076257d1103eba805fc67ce83880fbff38845f 100644
--- a/pySDC/projects/GPU/configs/RBC_configs.py
+++ b/pySDC/projects/GPU/configs/RBC_configs.py
@@ -31,7 +31,7 @@ class RayleighBenardRegular(Config):
 
     def get_LogToFile(self, ranks=None):
         import numpy as np
-        from pySDC.implementations.hooks.log_solution import LogToFileAfterXs as LogToFile
+        from pySDC.implementations.hooks.log_solution import LogToPickleFileAfterXS as LogToFile
 
         LogToFile.path = f'{self.base_path}/data/'
         LogToFile.file_name = f'{self.get_path(ranks=ranks)}-solution'
diff --git a/pySDC/projects/GPU/tests/test_configs.py b/pySDC/projects/GPU/tests/test_configs.py
index ac25d813ba4d3a38f5ab920e597ad60778bd6542..ab27a2621eb35da752540fd9d579c8e66d06d315 100644
--- a/pySDC/projects/GPU/tests/test_configs.py
+++ b/pySDC/projects/GPU/tests/test_configs.py
@@ -64,7 +64,7 @@ def test_run_experiment(restart_idx=0):
             return desc
 
         def get_LogToFile(self, ranks=None):
-            from pySDC.implementations.hooks.log_solution import LogToFileAfterXs as LogToFile
+            from pySDC.implementations.hooks.log_solution import LogToPickleFileAfterXS as LogToFile
 
             LogToFile.path = './data/'
             LogToFile.file_name = f'{self.get_path(ranks=ranks)}-solution'
diff --git a/pySDC/tests/test_helpers/test_spectral_helper.py b/pySDC/tests/test_helpers/test_spectral_helper.py
index 627632b14b98132b28e1318eaf7465b3dafdada8..3a2f9d2136a3ae86294beffe0f7e7e453a68d222 100644
--- a/pySDC/tests/test_helpers/test_spectral_helper.py
+++ b/pySDC/tests/test_helpers/test_spectral_helper.py
@@ -22,7 +22,7 @@ def test_integration_matrix2D(nx, nz, variant, axes, useMPI=False, **kwargs):
     helper.add_axis(base='cheby', N=nz)
     helper.setup_fft()
 
-    Z, X = helper.get_grid()
+    X, Z = helper.get_grid()
 
     conv = helper.get_basis_change_matrix()
     S = helper.get_integration_matrix(axes=axes)
@@ -68,7 +68,7 @@ def test_differentiation_matrix2D(nx, nz, variant, axes, bx, bz, useMPI=False, *
     helper.add_axis(base=bz, N=nz)
     helper.setup_fft()
 
-    Z, X = helper.get_grid()
+    X, Z = helper.get_grid()
     conv = helper.get_basis_change_matrix()
     D = helper.get_differentiation_matrix(axes)
 
@@ -123,7 +123,7 @@ def test_identity_matrix2D(nx, nz, variant, bx, useMPI=False, **kwargs):
     helper.add_axis(base='cheby', N=nz)
     helper.setup_fft()
 
-    Z, X = helper.get_grid()
+    X, Z = helper.get_grid()
     conv = helper.get_basis_change_matrix()
     I = helper.get_Id()
 
@@ -215,9 +215,9 @@ def _test_transform_dealias(
     u2_hat_expect = helper.u_init_forward
     u_expect = helper.u_init
     u_expect_pad = helper_pad.u_init
-    Kz, Kx = helper.get_wavenumbers()
-    Z, X = helper.get_grid()
-    Z_pad, X_pad = helper_pad.get_grid()
+    Kx, Kz = helper.get_wavenumbers()
+    X, Z = helper.get_grid()
+    X_pad, Z_pad = helper_pad.get_grid()
 
     if axis == -2:
         f = nx // 3
@@ -476,7 +476,7 @@ def test_tau_method2D(variant, nz, nx, bc_val, bc=-1, useMPI=False, plotting=Fal
     helper.add_component(['u'])
     helper.setup_fft()
 
-    Z, X = helper.get_grid()
+    X, Z = helper.get_grid()
     x = X[:, 0]
     z = Z[0, :]
     shape = helper.init[0][1:]
diff --git a/pySDC/tests/test_hooks/test_log_to_file.py b/pySDC/tests/test_hooks/test_log_to_file.py
index d786b2110c4ecc88f1c407e5e432d01aae2b004a..f5eb06fe418039560f3ccf7d55608332ee9280f1 100644
--- a/pySDC/tests/test_hooks/test_log_to_file.py
+++ b/pySDC/tests/test_hooks/test_log_to_file.py
@@ -1,12 +1,22 @@
 import pytest
 
 
-def run(hook, Tend=0):
-    from pySDC.implementations.problem_classes.TestEquation_0D import testequation0d
-    from pySDC.implementations.sweeper_classes.generic_implicit import generic_implicit
+def run(hook, Tend=0, ODE=True, t0=0):
     from pySDC.implementations.controller_classes.controller_nonMPI import controller_nonMPI
+    from pySDC.helpers.fieldsIO import FieldsIO
 
-    level_params = {'dt': 1.0e-1}
+    if ODE:
+        from pySDC.implementations.problem_classes.TestEquation_0D import testequation0d as problem_class
+        from pySDC.implementations.sweeper_classes.generic_implicit import generic_implicit as sweeper_class
+
+        problem_params = {'u0': 1.0}
+    else:
+        from pySDC.implementations.problem_classes.RayleighBenard import RayleighBenard as problem_class
+        from pySDC.implementations.sweeper_classes.imex_1st_order import imex_1st_order as sweeper_class
+
+        problem_params = {'nx': 16, 'nz': 8, 'spectral_space': False}
+
+    level_params = {'dt': 1.0e-2}
 
     sweeper_params = {
         'num_nodes': 1,
@@ -15,71 +25,146 @@ def run(hook, Tend=0):
 
     description = {
         'level_params': level_params,
-        'sweeper_class': generic_implicit,
-        'problem_class': testequation0d,
+        'sweeper_class': sweeper_class,
+        'problem_class': problem_class,
         'sweeper_params': sweeper_params,
-        'problem_params': {},
+        'problem_params': problem_params,
         'step_params': {'maxiter': 1},
     }
 
     controller_params = {
         'hook_class': hook,
-        'logger_level': 30,
+        'logger_level': 15,
     }
     controller = controller_nonMPI(1, controller_params, description)
     if Tend > 0:
         prob = controller.MS[0].levels[0].prob
         u0 = prob.u_exact(0)
+        if t0 > 0:
+            u0[:] = hook.load(-1)['u']
 
-        _, stats = controller.run(u0, 0, Tend)
+        _, stats = controller.run(u0, t0, Tend)
         return u0, stats
 
 
 @pytest.mark.base
-def test_errors():
-    from pySDC.implementations.hooks.log_solution import LogToFile
+def test_errors_pickle():
+    from pySDC.implementations.hooks.log_solution import LogToPickleFile
     import os
 
     with pytest.raises(ValueError):
-        run(LogToFile)
+        run(LogToPickleFile)
 
-    LogToFile.path = os.getcwd()
-    run(LogToFile)
+    LogToPickleFile.path = os.getcwd()
+    run(LogToPickleFile)
 
     path = f'{os.getcwd()}/tmp'
-    LogToFile.path = path
-    run(LogToFile)
+    LogToPickleFile.path = path
+    run(LogToPickleFile)
     os.path.isdir(path)
 
     with pytest.raises(ValueError):
-        LogToFile.path = __file__
-        run(LogToFile)
+        LogToPickleFile.path = __file__
+        run(LogToPickleFile)
+
+
+@pytest.mark.base
+def test_errors_FieldsIO(tmpdir):
+    from pySDC.implementations.hooks.log_solution import LogToFile as hook
+    from pySDC.core.errors import DataError
+    import os
+
+    path = f'{tmpdir}/FieldsIO_test.pySDC'
+    hook.filename = path
+
+    run_kwargs = {'hook': hook, 'Tend': 0.2, 'ODE': True}
+
+    # create file
+    run(**run_kwargs)
+
+    # test that we cannot overwrite if we don't want to
+    hook.allow_overwriting = False
+    with pytest.raises(FileExistsError):
+        run(**run_kwargs)
+
+    # test that we can overwrite if we do want to
+    hook.allow_overwriting = True
+    run(**run_kwargs)
+
+    # test that we cannot add solutions at times that already exist
+    hook.allow_overwriting = False
+    with pytest.raises(DataError):
+        run(**run_kwargs, t0=0.1)
 
 
 @pytest.mark.base
-def test_logging():
-    from pySDC.implementations.hooks.log_solution import LogToFile, LogSolution
+@pytest.mark.parametrize('use_pickle', [True, False])
+def test_logging(tmpdir, use_pickle, ODE=True):
+    from pySDC.implementations.hooks.log_solution import LogToPickleFile, LogSolution, LogToFile
     from pySDC.helpers.stats_helper import get_sorted
     import os
     import pickle
     import numpy as np
 
-    path = f'{os.getcwd()}/tmp'
-    LogToFile.path = path
-    Tend = 2
+    path = tmpdir
+    Tend = 0.2
+
+    if use_pickle:
+        logging_hook = LogToPickleFile
+        LogToPickleFile.path = path
+    else:
+        logging_hook = LogToFile
+        logging_hook.filename = f'{path}/FieldsIO_test.pySDC'
 
-    u0, stats = run([LogToFile, LogSolution], Tend=Tend)
+    u0, stats = run([logging_hook, LogSolution], Tend=Tend, ODE=ODE)
     u = [(0.0, u0)] + get_sorted(stats, type='u')
 
     u_file = []
     for i in range(len(u)):
-        data = LogToFile.load(i)
+        data = logging_hook.load(i)
         u_file += [(data['t'], data['u'])]
 
     for us, uf in zip(u, u_file):
-        assert us[0] == uf[0]
-        assert np.allclose(us[1], uf[1])
+        assert us[0] == uf[0], 'time does not match'
+        assert np.allclose(us[1], uf[1]), 'solution does not match'
+
+
+@pytest.mark.base
+def test_restart(tmpdir, ODE=True):
+    from pySDC.implementations.hooks.log_solution import LogSolution, LogToFile
+    import numpy as np
+
+    Tend = 0.2
+
+    # run the whole thing
+    logging_hook = LogToFile
+    logging_hook.filename = f'{tmpdir}/file.pySDC'
+
+    _, _ = run([logging_hook], Tend=Tend, ODE=ODE)
+
+    u_continuous = []
+    for i in range(20):
+        data = logging_hook.load(i)
+        u_continuous += [(data['t'], data['u'])]
+
+    # run again with a restart in the middle
+    logging_hook.filename = f'{tmpdir}/file2.pySDC'
+    _, _ = run(logging_hook, Tend=0.1, ODE=ODE)
+    _, _ = run(logging_hook, Tend=0.2, t0=0.1, ODE=ODE)
+
+    u_restart = []
+    for i in range(20):
+        data = logging_hook.load(i)
+        u_restart += [(data['t'], data['u'])]
+
+    assert np.allclose([me[0] for me in u_restart], [me[0] for me in u_continuous]), 'Times don\'t match'
+    for u1, u2 in zip(u_restart, u_continuous):
+        assert np.allclose(u1[1], u2[1]), 'solution does not match'
 
 
-if __name__ == '__main__':
-    test_logging()
+@pytest.mark.mpi4py
+@pytest.mark.mpi(ranks=[1, 4])
+def test_loggingMPI(tmpdir, comm, mpi_ranks):
+    # `mpi_ranks` is a pytest fixture required by pytest-isolate-mpi. Do not remove.
+    tmpdir = comm.bcast(tmpdir)
+    test_logging(tmpdir, False, False)
diff --git a/pyproject.toml b/pyproject.toml
index 5ea5109db25086878ff9e878d652b797427ffd77..37ad344377961a4eb0082f71fbfe5dcc18a24c95 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "flit_core.buildapi"
 
 [project]
 name = 'pySDC'
-version = '5.5.2'
+version = '5.6'
 description = 'A Python implementation of spectral deferred correction methods and the likes'
 license = {text = "BSD-2-Clause"}
 readme = 'README.md'
@@ -16,6 +16,7 @@ authors=[
     {name='Ikrom Akramov', email='ikrom.akramov@tuhh.de'},
     {name='Giacomo Rosilho De Souza', email='giacomo.rosilhodesouza@usi.ch'},
     {name='Jakob Fritz', email='j.fritz@fz-juelich.de'},
+    {name='Jemma Shipton', email='j.shipton@exeter.ac.uk'},
     ]