diff --git a/Overlays/jurecadc_overlay/b/BullMPI-settings/BullMPI-settings-4.1-CUDA.eb b/Overlays/jurecadc_overlay/b/BullMPI-settings/BullMPI-settings-4.1-CUDA.eb
new file mode 100644
index 0000000000000000000000000000000000000000..5244b176f66f11853447fe9dfc0db14b88dfe0ff
--- /dev/null
+++ b/Overlays/jurecadc_overlay/b/BullMPI-settings/BullMPI-settings-4.1-CUDA.eb
@@ -0,0 +1,53 @@
+easyblock = 'SystemBundle'
+
+name = 'BullMPI-settings'
+version = '4.1'
+versionsuffix = 'CUDA'
+
+homepage = ''
+description = '''This module loads the default BullMPI configuration. It relies on UCX and enables the UCX CUDA
+transports.
+'''
+
+toolchain = SYSTEM
+
+source_urls = []
+
+sources = []
+modextravars = {
+    'SLURM_MPI_TYPE': 'pspmix',
+    'OMPI_MCA_mca_base_component_show_load_errors': '1',
+    'OMPI_MCA_mpi_param_check': '1',
+    'OMPI_MCA_mpi_show_handle_leaks': '1',
+    'OMPI_MCA_mpi_warn_on_fork': '1',
+    # Disable uct for the time being due to:
+    # https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx
+    # Also openib, since it is deprecated and should be substituted by the UCX support in the pml
+    'OMPI_MCA_btl': '^uct,openib',
+    'OMPI_MCA_btl_openib_allow_ib': '1',
+    'OMPI_MCA_bml_r2_show_unreach_errors': '0',
+    'OMPI_MCA_pml': 'ucx',
+    'OMPI_MCA_osc': '^rdma',
+    'OMPI_MCA_opal_abort_print_stack': '1',
+    'OMPI_MCA_opal_set_max_sys_limits': '1',
+    'OMPI_MCA_opal_event_include': 'epoll',
+    'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0',
+    # OMPIO does not seem to work reliably on our system
+    'OMPI_MCA_io': 'romio321',
+}
+
+modluafooter = '''
+if mode()=="load" then
+    if isloaded("UCX-settings/RC") then
+        try_load("UCX-settings/RC-CUDA")
+    elseif isloaded("UCX-settings/UD") then
+        try_load("UCX-settings/UD-CUDA")
+    elseif isloaded("UCX-settings/DC") then
+        try_load("UCX-settings/DC-CUDA")
+    elseif not isloaded("UCX-settings") then
+        try_load("UCX-settings/RC-CUDA")
+    end
+end
+'''
+
+moduleclass = 'system'
diff --git a/Overlays/jurecadc_overlay/b/BullMPI-settings/BullMPI-settings-4.1-UCX.eb b/Overlays/jurecadc_overlay/b/BullMPI-settings/BullMPI-settings-4.1-UCX.eb
new file mode 100644
index 0000000000000000000000000000000000000000..4f046d796824962e51c96292f3b480882c729aba
--- /dev/null
+++ b/Overlays/jurecadc_overlay/b/BullMPI-settings/BullMPI-settings-4.1-UCX.eb
@@ -0,0 +1,37 @@
+easyblock = 'SystemBundle'
+
+name = 'BullMPI-settings'
+version = '4.1'
+versionsuffix = 'UCX'
+
+homepage = ''
+description = 'This module loads the default BullMPI configuration. It relies on UCX.'
+
+toolchain = SYSTEM
+
+source_urls = []
+
+sources = []
+modextravars = {
+    'SLURM_MPI_TYPE': 'pspmix',
+    'OMPI_MCA_mca_base_component_show_load_errors': '1',
+    'OMPI_MCA_mpi_param_check': '1',
+    'OMPI_MCA_mpi_show_handle_leaks': '1',
+    'OMPI_MCA_mpi_warn_on_fork': '1',
+    # Disable uct for the time being due to:
+    # https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx
+    # Also openib, since it is deprecated and should be substituted by the UCX support in the pml
+    'OMPI_MCA_btl': '^uct,openib',
+    'OMPI_MCA_btl_openib_allow_ib': '1',
+    'OMPI_MCA_bml_r2_show_unreach_errors': '0',
+    'OMPI_MCA_pml': 'ucx',
+    'OMPI_MCA_osc': '^rdma',
+    'OMPI_MCA_opal_abort_print_stack': '1',
+    'OMPI_MCA_opal_set_max_sys_limits': '1',
+    'OMPI_MCA_opal_event_include': 'epoll',
+    'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0',
+    # OMPIO does not seem to work reliably on our system
+    'OMPI_MCA_io': 'romio321',
+}
+
+moduleclass = 'system'
diff --git a/Overlays/jurecadc_overlay/b/BullMPI/BullMPI-4.1.4-GCC-11.3.0.eb b/Overlays/jurecadc_overlay/b/BullMPI/BullMPI-4.1.4-GCC-11.3.0.eb
new file mode 100644
index 0000000000000000000000000000000000000000..8fadbbf13ce87fc0ce78a10ffda287fca76c53ac
--- /dev/null
+++ b/Overlays/jurecadc_overlay/b/BullMPI/BullMPI-4.1.4-GCC-11.3.0.eb
@@ -0,0 +1,72 @@
+easyblock = 'ConfigureMake'
+
+name = 'BullMPI'
+version = '4.1.4'
+
+homepage = 'https://www.open-mpi.org/'
+description = """BullMPI is an MPI runtime based on OpenMPI with specific optimizations"""
+
+toolchain = {'name': 'GCC', 'version': '11.3.0'}
+toolchainopts = {'pic': True}
+
+sources = [f'{name}_{version}.tar']
+checksums = ['afa4514cc203bb5e2646afea6826429f27b983979e4d8764402087e8a3bc3ed9']
+
+osdependencies = [
+    # needed for --with-verbs
+    ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'),
+]
+
+builddependencies = [
+    ('pkgconf', '1.8.0'),
+    ('Perl', '5.34.1'),
+    ('Autotools', '20220317'),
+]
+
+dependencies = [
+    ('zlib', '1.2.12'),
+    ('hwloc', '2.7.1'),
+    ('UCX', 'default'),
+    ('CUDA', '11.7', '', SYSTEM),
+    ('libevent', '2.1.12'),
+    ('PMIx', '3.2.3'),  # We rely on this version since it is the newest supported by psmgmt
+    ('UCC', 'default'),
+]
+
+start_dir = f'openmpi-{version}'
+
+unpack_options = f'&& rpm2cpio openmpi-bull-gnu-{version}-1.Bull.1.0.src.rpm | cpio -idmv && '
+unpack_options += f'tar zxvf openmpi-{version}.tar.gz'
+
+# We need to remove -march=native from CFLAGS, otherwise the compilation fails when trying to compile the op:avx
+# component, due to lack of avx512 support. avx512 is safe to enable, even on non-avx512 architectures, since it is used
+# in just one file, and the execution of that code is determined at runtime, depending on the processor ISA
+# preconfigopts = 'CFLAGS="-O2 -ftree-vectorize -fno-math-errno -fPIC" '
+# prebuildopts = preconfigopts
+
+# General OpenMPI options
+configopts = '--without-orte '
+configopts += '--without-psm2 '
+configopts += '--disable-oshmem '
+configopts += '--with-ime=/opt/ddn/ime '
+configopts += '--with-gpfs '
+configopts += '--enable-shared '
+configopts += '--with-hwloc=$EBROOTHWLOC '  # hwloc support
+configopts += '--with-ucx=$EBROOTUCX '
+configopts += '--with-verbs '
+configopts += '--with-libevent=$EBROOTLIBEVENT '
+configopts += '--with-cuda=$EBROOTCUDA '
+
+# to enable SLURM integration (site-specific)
+configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte'
+
+local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"]
+sanity_check_paths = {
+    'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] +
+             ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] +
+             ["include/%s.h" % x for x in ["mpi-ext", "mpif-config",
+                                           "mpif", "mpi", "mpi_portable_platform"]],
+    'dirs': [],
+}
+
+moduleclass = 'mpi'