diff --git a/src/pages/dashboard.py b/src/pages/dashboard.py
index 027a8d588145b520e3e760558a3ba0b6404d8b10..8761c1ad1f015680f30ad06bc94bd6b670377a1e 100644
--- a/src/pages/dashboard.py
+++ b/src/pages/dashboard.py
@@ -23,7 +23,8 @@ from time import sleep
 from deployment_settings import (
         KEY,
         UNICORE_BASE, UFTP_BASE,
-        UNICORE_USER, UNICORE_PASSWORD
+        UNICORE_USER, UNICORE_PASSWORD,
+        EURAD_OUTPUT
         )
 from utils import transfer_results_from_HPC, get_filenames
 from mlair.time_series import plot as plot_ml_time_series
@@ -580,7 +581,11 @@ def generate_eurad_scen_output_body(language_id, context, jobnr):
         if not os.path.isfile(infile):
             filenames = transfer_results_from_HPC(jobnr, start_date, iscen, fc_length, DATA_PATH)
             os.symlink(filenames['download_filename'], infile)
-            os.symlink(filenames['base_download_filename'], basefile)
+            # the "standard" scenario IS the base run!
+            if iscen == 0:
+                os.symlink(filenames['download_filename'], basefile)
+            else:
+                os.symlink(filenames['base_download_filename'], basefile)
 
         nc_file = EURAD_netCDF(infile)
         timestep_list = nc_file.get_time_stamps()
@@ -898,7 +903,7 @@ def get_my_jobs_from_db(user_id=None, language_id=0):
         convoc_status_dict = dict(convoc_status_from_db)
 
         # if status != 'finished':
-        # use /p/project/cjicg21/schroeder5/Destine_AQ/SCRIPTS/ECFLOW/query_status.bash jobnr
+        # use /p/project/cjicg21/{UNICORE_USER}/Destine_AQ/SCRIPTS/ECFLOW/query_status.bash jobnr
         # to determine the actual status
         # ==> there should be a refresh, whenever this tab is reloaded!
 
@@ -1134,7 +1139,7 @@ def eurad_im_job_run(run_button, region, startdate, forecast_length, user_dict):
     base_url = f"{UNICORE_BASE}JURECA/rest/core"
     credentials = uc_credentials.UsernamePassword(UNICORE_USER, UNICORE_PASSWORD)
     client = uc_client.Client(credentials, base_url)
-    job_description = {'Executable': "/p/project/cjicg21/schroeder5/Destine_AQ/start_destine_demonstrator.sh",
+    job_description = {'Executable': f"/p/project/cjicg21/{UNICORE_USER}/Destine_AQ/start_destine_demonstrator.sh",
                        'Job type': "ON_LOGIN_NODE",
                        'Arguments': [jobnr, "0", startdate, str(forecast_hours)], }
     job = client.new_job(job_description)
@@ -1246,7 +1251,7 @@ def eurad_scen_job_run(run_button, region, startdate, forecast_length, emi_scen,
     base_url = f"{UNICORE_BASE}JURECA/rest/core"
     credentials = uc_credentials.UsernamePassword(UNICORE_USER, UNICORE_PASSWORD)
     client = uc_client.Client(credentials, base_url)
-    job_description = {'Executable': "/p/project/cjicg21/schroeder5/Destine_AQ/start_destine_demonstrator.sh",
+    job_description = {'Executable': f"/p/project/cjicg21/{UNICORE_USER}/Destine_AQ/start_destine_demonstrator.sh",
                        'Job type': "ON_LOGIN_NODE",
                        'Arguments': [jobnr, str(emi_scen), startdate, str(forecast_hours)], }
     job = client.new_job(job_description)
@@ -1305,6 +1310,10 @@ def mlair_plots_download(download_button, job_dict, plot_dict):
     Input("ml_downscaling_data_download", "n_clicks"),
     [State("job-info", "data"),
      State("plot-info", "data")],
+    background=True,
+    running=[
+        (Output("ml_downscaling_data_download", "disabled"), True, False),
+    ],
     prevent_initial_call=True
 )
 def ml_downscaling_output_download(download_button, job_dict, plot_dict):
@@ -1333,14 +1342,14 @@ def ml_downscaling_output_download(download_button, job_dict, plot_dict):
             base_url = f"{UNICORE_BASE}JURECA/rest/core"
             credentials = uc_credentials.UsernamePassword(UNICORE_USER, UNICORE_PASSWORD)
             client = uc_client.Client(credentials, base_url)
-            job_description = {'Executable': f"/p/project/deepacf/intelliaq/schroeder5/downscaling_maelstrom/HPC_batch_scripts/inference_destine_test_{variable}.sh",
+            job_description = {'Executable': f"/p/project/cjicg21/{UNICORE_USER}/downscaling_maelstrom/HPC_batch_scripts/inference_destine_test_{variable}.sh",
                                'Arguments': [ f"{jobnr.upper()}/CTM/", year, month, day ] }
             job = client.new_job(job_description)
             # wait for the job to finish:
             while str(job.status) != 'JobStatus.SUCCESSFUL':
                 sleep(3)
             # transfer the result file
-            base_directory = f"{UNICORE_USER}/{jobnr.upper()}/CTM/{year}{month}/{day}"
+            base_directory = f"{EURAD_OUTPUT}/{jobnr.upper()}/CTM/{year}{month}/{day}"
             transport = uc_client.Transport(credentials)
             scratch = uc_client.Storage(transport, f"{base_url}/storages/SCRATCH")
             dfile = scratch.stat(f"{base_directory}/downscaling_{variable}_{year}{month}{day}00.nc")
diff --git a/utils/utils.py b/utils/utils.py
index 51ac43637bfe2e9dfe0d5c6e036acaf42156baaa..9b56752b32103399328a1cedd20f9f4fc9e2a72a 100644
--- a/utils/utils.py
+++ b/utils/utils.py
@@ -6,7 +6,8 @@ from datetime import date
 
 from deployment_settings import (
         UNICORE_BASE, 
-        UNICORE_USER, UNICORE_PASSWORD
+        UNICORE_USER, UNICORE_PASSWORD,
+        EURAD_OUTPUT
         )
 
 base_url = f"{UNICORE_BASE}JURECA/rest/core"
@@ -44,7 +45,7 @@ def transfer_results_from_HPC(jobnr, model_date, iscen, fc_length, data_path):
 
     # only transfer file if results are not already there (from another jobnr);
     # in the latter case, a link to the already downloaded data is sufficient
-    base_directory = f"{UNICORE_USER}/{jobnr.upper()}/CTM/{yy}{mm}/{dd}"
+    base_directory = f"{EURAD_OUTPUT}/{jobnr.upper()}/CTM/{yy}{mm}/{dd}"
     if not os.path.isfile(str(data_path.joinpath(filenames['download_filename']))):
         transport = uc_client.Transport(credentials)
         scratch = uc_client.Storage(transport, f"{base_url}/storages/SCRATCH")
@@ -52,7 +53,7 @@ def transfer_results_from_HPC(jobnr, model_date, iscen, fc_length, data_path):
         dfile.download(str(data_path.joinpath(filenames['download_filename'])))
     # for emission scenarios: also transfer the base run (if necessary)
     if iscen != 0:
-        base_directory = f"{UNICORE_USER}/B{jobnr.upper()}/CTM/{yy}{mm}/{dd}"
+        base_directory = f"{EURAD_OUTPUT}/B{jobnr.upper()}/CTM/{yy}{mm}/{dd}"
         if not os.path.isfile(str(data_path.joinpath(filenames['base_download_filename']))):
             transport = uc_client.Transport(credentials)
             scratch = uc_client.Storage(transport, f"{base_url}/storages/SCRATCH")