diff --git a/Custom_EasyBlocks/nvhpc.py b/Custom_EasyBlocks/nvhpc.py
index 002c1571593f4ecf81ec5e1cf591da8831ebfdd6..ea6bd75dce5f4e08d71cfb7cd822e596798573cc 100644
--- a/Custom_EasyBlocks/nvhpc.py
+++ b/Custom_EasyBlocks/nvhpc.py
@@ -104,15 +104,13 @@ class EB_NVHPC(PackedBinary):
 
         # EULA for NVHPC must be accepted via --accept-eula-for EasyBuild configuration option,
         # or via 'accept_eula = True' in easyconfig file
-        self.check_accepted_eula(
-            more_info='https://docs.nvidia.com/hpc-sdk/eula/index.html')
+        self.check_accepted_eula(more_info='https://docs.nvidia.com/hpc-sdk/eula/index.html')
 
         default_cuda_version = self.cfg['default_cuda_version']
         if default_cuda_version is None:
             module_cuda_version_full = get_software_version('CUDA')
             if module_cuda_version_full is not None:
-                default_cuda_version = '.'.join(
-                    module_cuda_version_full.split('.')[:2])
+                default_cuda_version = '.'.join(module_cuda_version_full.split('.')[:2])
             else:
                 error_msg = "A default CUDA version is needed for installation of NVHPC. "
                 error_msg += "It can not be determined automatically and needs to be added manually. "
@@ -122,8 +120,7 @@ class EB_NVHPC(PackedBinary):
 
         # Parse default_compute_capability from different sources (CLI has priority)
         ec_default_compute_capability = self.cfg['cuda_compute_capabilities']
-        cfg_default_compute_capability = build_option(
-            'cuda_compute_capabilities')
+        cfg_default_compute_capability = build_option('cuda_compute_capabilities')
         if cfg_default_compute_capability is not None:
             default_compute_capability = cfg_default_compute_capability
         elif ec_default_compute_capability and ec_default_compute_capability is not None:
@@ -138,16 +135,14 @@ class EB_NVHPC(PackedBinary):
         if isinstance(default_compute_capability, list):
             _before_default_compute_capability = default_compute_capability
             default_compute_capability = _before_default_compute_capability[0]
-            warning_msg = "Replaced list of compute capabilities {} ".format(
-                _before_default_compute_capability)
-            warning_msg += "with first element of list {}".format(
-                default_compute_capability)
-            print_warning(warning_msg)
+            if len(_before_default_compute_capability) > 1:
+                warning_msg = "Replaced list of compute capabilities {} ".format(_before_default_compute_capability)
+                warning_msg += "with first element of list: {}".format(default_compute_capability)
+                print_warning(warning_msg)
 
         # Remove dot-divider for CC; error out if it is not a string
         if isinstance(default_compute_capability, str):
-            default_compute_capability = default_compute_capability.replace(
-                '.', '')
+            default_compute_capability = default_compute_capability.replace('.', '')
         else:
             raise EasyBuildError("Unexpected non-string value encountered for compute capability: %s",
                                  default_compute_capability)
@@ -156,24 +151,20 @@ class EB_NVHPC(PackedBinary):
             'NVHPC_INSTALL_DIR': self.installdir,
             'NVHPC_SILENT': 'true',
             'NVHPC_DEFAULT_CUDA': str(default_cuda_version),  # 10.2, 11.0
-            # 70, 80; single value, no list!
-            'NVHPC_STDPAR_CUDACC': str(default_compute_capability),
-        }
-        cmd = "%s ./install" % ' '.join(['%s=%s' %
-                                        x for x in sorted(nvhpc_env_vars.items())])
+            'NVHPC_STDPAR_CUDACC': str(default_compute_capability),  # 70, 80; single value, no list!
+            }
+        cmd = "%s ./install" % ' '.join(['%s=%s' % x for x in sorted(nvhpc_env_vars.items())])
         run_cmd(cmd, log_all=True, simple=True)
 
         # make sure localrc uses GCC in PATH, not always the system GCC, and does not use a system g77 but gfortran
-        install_abs_subdir = os.path.join(
-            self.installdir, self.nvhpc_install_subdir)
+        install_abs_subdir = os.path.join(self.installdir, self.nvhpc_install_subdir)
         compilers_subdir = os.path.join(install_abs_subdir, "compilers")
-        makelocalrc_filename = os.path.join(
-            compilers_subdir, "bin", "makelocalrc")
+        makelocalrc_filename = os.path.join(compilers_subdir, "bin", "makelocalrc")
         for line in fileinput.input(makelocalrc_filename, inplace='1', backup='.orig'):
             line = re.sub(r"^PATH=/", r"#PATH=/", line)
             sys.stdout.write(line)
 
-        cmd = "%s -x %s -g77 /" % (makelocalrc_filename, compilers_subdir)
+        cmd = "%s -x %s -g77 gfortran" % (makelocalrc_filename, compilers_subdir)
         run_cmd(cmd, log_all=True, simple=True)
 
         # If an OS libnuma is NOT found, makelocalrc creates symbolic links to libpgnuma.so
@@ -192,16 +183,14 @@ class EB_NVHPC(PackedBinary):
                           siterc_path, SITERC_LIBRARY_PATH)
 
         # The cuda nvvp tar file has broken permissions
-        adjust_permissions(self.installdir, stat.S_IWUSR,
-                           add=True, onlydirs=True)
+        adjust_permissions(self.installdir, stat.S_IWUSR, add=True, onlydirs=True)
 
     def sanity_check_step(self):
         """Custom sanity check for NVHPC"""
         prefix = self.nvhpc_install_subdir
         compiler_names = ['nvc', 'nvc++', 'nvfortran']
 
-        files = [os.path.join(prefix, 'compilers', 'bin', x)
-                 for x in compiler_names]
+        files = [os.path.join(prefix, 'compilers', 'bin', x) for x in compiler_names]
         if LooseVersion(self.version) < LooseVersion('21.3'):
             files.append(os.path.join(prefix, 'compilers', 'bin', 'siterc'))
 
@@ -211,8 +200,7 @@ class EB_NVHPC(PackedBinary):
                      os.path.join(prefix, 'compilers', 'include'), os.path.join(prefix, 'compilers', 'man')]
         }
         custom_commands = ["%s -v" % compiler for compiler in compiler_names]
-        super(EB_NVHPC, self).sanity_check_step(
-            custom_paths=custom_paths, custom_commands=custom_commands)
+        super(EB_NVHPC, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
 
     def _nvhpc_extended_components(self, dirs, basepath, env_vars_dirs):
         """
@@ -232,8 +220,7 @@ class EB_NVHPC(PackedBinary):
         """Prefix subdirectories in NVHPC install dir considered for environment variables defined in module file."""
         dirs = super(EB_NVHPC, self).make_module_req_guess()
         for key in dirs:
-            dirs[key] = [os.path.join(
-                self.nvhpc_install_subdir, 'compilers', d) for d in dirs[key]]
+            dirs[key] = [os.path.join(self.nvhpc_install_subdir, 'compilers', d) for d in dirs[key]]
 
         # $CPATH should not be defined in module for NVHPC, it causes problems
         # cfr. https://github.com/easybuilders/easybuild-easyblocks/issues/830
@@ -251,91 +238,72 @@ class EB_NVHPC(PackedBinary):
         # NVHPC is shipped with a compiled OpenMPI installation
         # Enable it by setting according environment variables
         if self.cfg['module_nvhpc_own_mpi']:
-            self.nvhpc_mpi_basedir = os.path.join(
-                self.nvhpc_install_subdir, "comm_libs", "mpi")
+            self.nvhpc_mpi_basedir = os.path.join(self.nvhpc_install_subdir, "comm_libs", "mpi")
             env_vars_dirs = {
                 'PATH': 'bin',
                 'CPATH': 'include',
                 'LD_LIBRARY_PATH': 'lib'
             }
-            self._nvhpc_extended_components(
-                dirs, self.nvhpc_mpi_basedir, env_vars_dirs)
+            self._nvhpc_extended_components(dirs, self.nvhpc_mpi_basedir, env_vars_dirs)
         # Math Libraries:
         # NVHPC is shipped with math libraries (in a dedicated folder)
         # Enable them by setting according environment variables
         if self.cfg['module_add_math_libs']:
-            self.nvhpc_math_basedir = os.path.join(
-                self.nvhpc_install_subdir, "math_libs")
+            self.nvhpc_math_basedir = os.path.join(self.nvhpc_install_subdir, "math_libs")
             env_vars_dirs = {
                 'CPATH': 'include',
                 'LD_LIBRARY_PATH': 'lib64'
             }
-            self._nvhpc_extended_components(
-                dirs, self.nvhpc_math_basedir, env_vars_dirs)
+            self._nvhpc_extended_components(dirs, self.nvhpc_math_basedir, env_vars_dirs)
         # GPU Profilers:
         # NVHPC is shipped with NVIDIA's GPU profilers (Nsight Compute/Nsight Systems)
         # Enable them by setting the according environment variables
         if self.cfg['module_add_profilers']:
-            self.nvhpc_profilers_basedir = os.path.join(
-                self.nvhpc_install_subdir, "profilers")
+            self.nvhpc_profilers_basedir = os.path.join(self.nvhpc_install_subdir, "profilers")
             env_vars_dirs = {
                 'PATH': ['Nsight_Compute', 'Nsight_Systems/bin']
             }
-            self._nvhpc_extended_components(
-                dirs, self.nvhpc_profilers_basedir, env_vars_dirs)
+            self._nvhpc_extended_components(dirs, self.nvhpc_profilers_basedir, env_vars_dirs)
         # NCCL:
         # NVHPC is shipped with NCCL
         # Enable it by setting the according environment variables
         if self.cfg['module_add_nccl']:
-            self.nvhpc_nccl_basedir = os.path.join(
-                self.nvhpc_install_subdir, "comm_libs", "nccl")
+            self.nvhpc_nccl_basedir = os.path.join(self.nvhpc_install_subdir, "comm_libs", "nccl")
             env_vars_dirs = {
                 'CPATH': 'include',
                 'LD_LIBRARY_PATH': 'lib'
             }
-            self._nvhpc_extended_components(
-                dirs, self.nvhpc_nccl_basedir, env_vars_dirs)
+            self._nvhpc_extended_components(dirs, self.nvhpc_nccl_basedir, env_vars_dirs)
         # NVSHMEM:
         # NVHPC is shipped with NVSHMEM
         # Enable it by setting the according environment variables
         if self.cfg['module_add_nvshmem']:
-            self.nvhpc_nvshmem_basedir = os.path.join(
-                self.nvhpc_install_subdir, "comm_libs", "nvshmem")
+            self.nvhpc_nvshmem_basedir = os.path.join(self.nvhpc_install_subdir, "comm_libs", "nvshmem")
             env_vars_dirs = {
                 'CPATH': 'include',
                 'LD_LIBRARY_PATH': 'lib'
             }
-            self._nvhpc_extended_components(
-                dirs, self.nvhpc_nvshmem_basedir, env_vars_dirs)
+            self._nvhpc_extended_components(dirs, self.nvhpc_nvshmem_basedir, env_vars_dirs)
         # CUDA:
         # NVHPC is shipped with CUDA (possibly multiple versions)
         # Rather use this CUDA than an external CUDA (via $CUDA_HOME) by setting according environment variables
         if self.cfg['module_add_cuda']:
-            self.nvhpc_cuda_basedir = os.path.join(
-                self.nvhpc_install_subdir, "cuda")
+            self.nvhpc_cuda_basedir = os.path.join(self.nvhpc_install_subdir, "cuda")
             env_vars_dirs = {
                 'PATH': 'bin',
                 'LD_LIBRARY_PATH': 'lib64',
                 'CPATH': 'include'
             }
-            self._nvhpc_extended_components(
-                dirs, self.nvhpc_cuda_basedir, env_vars_dirs)
+            self._nvhpc_extended_components(dirs, self.nvhpc_cuda_basedir, env_vars_dirs)
         return dirs
 
     def make_module_extra(self):
         """Add environment variable for NVHPC location"""
         txt = super(EB_NVHPC, self).make_module_extra()
         txt += self.module_generator.set_environment('NVHPC', self.installdir)
-        # NVHPC 22.7+ requires the variable NVHPC_CUDA_HOME for external CUDA. CUDA_HOME has been deprecated.
         if LooseVersion(self.version) >= LooseVersion('22.7'):
-            # If CUDA is not available then avoid setting NVHPC_CUDA_HOME altogether
-            # and rely on the internal one
-            if not self.cfg['module_add_cuda']:
-                cuda = get_software_root('CUDA')
-                if cuda:
-                    txt += self.module_generator.set_environment(
-                        'NVHPC_CUDA_HOME', cuda)
-                else:
-                    raise EasyBuildError(
-                        "No external CUDA available and opting-out of internal CUDA. Don't know what to do")
+            # NVHPC 22.7+ requires the variable NVHPC_CUDA_HOME for external CUDA. CUDA_HOME has been deprecated.
+            if not self.cfg['module_add_cuda'] and get_software_root('CUDA'):
+                txt += self.module_generator.set_environment('NVHPC_CUDA_HOME', os.getenv('CUDA_HOME'))
         return txt
+