diff --git a/src/eurad_plot.py b/src/eurad_plot.py
index f4ea0f2c842dbd1ab7a94eb41a352137e5306e4b..9cae7a70b053e7b74049bf7080387eb5145d23b6 100644
--- a/src/eurad_plot.py
+++ b/src/eurad_plot.py
@@ -1,10 +1,32 @@
 import os
-from eurad.info import generate_plot, generate_time_series_plot
+import eurad.eurad_vis as ev
+from eurad.eurad_netcdf import EURAD_netCDF
+from eurad.info import generate_time_series_plot
 # just to make it run
 from pathlib import Path
 
 
-def get_euradim_plot(jobnr, timestep, species, station):
+def get_euradim_plot(language_id, jobnr, timestep, species, station, ldiff, ltarget):
+    # regarding target_plots: value can be given by user (later)
+    #                         for now use default values
+    # https://www.umweltbundesamt.de/themen/luft/luftschadstoffe-im-ueberblick/ozon
+    # chapter "Schwellen- und Zielwerte"
+    # https://www.umweltbundesamt.de/themen/luft/luftschadstoffe-im-ueberblick/stickstoffoxide
+    # chapter "Grenzwerte"
+    # https://www.umweltbundesamt.de/themen/luft/luftschadstoffe-im-ueberblick/feinstaub
+    # chapter "Grenzwerte"
+    # the following units should be "µg/m3"
+    t_values = { "O3": 120,
+                 "NO": 200,
+                 "NO2": 200,
+                 "PM2.5": 25 }
+
+    # for the demonstrator, all plots should be done in "ug/m**3"
+    unit="ug/m**3"
+
+    lang = "en"
+    if language_id == 1:
+        lang = "de"
 
     # just to make it run
     APP_HOME = Path.cwd()
@@ -16,20 +38,37 @@ def get_euradim_plot(jobnr, timestep, species, station):
 
     if station:
         station = station.split(',')[0]  # extract station code
-    output_file = "job_{}_time_{}_species_{}_station_{}.png".format(jobnr, timestep, species, station)
-    output_path = str(ASSETS_PATH.joinpath(output_file))
         # path relative to mlworkflowinterface
-    if not os.path.isfile(output_path):
-        filename = infile
-            # TODO: path based on jobnr
-        # print("plotting map:", jobnr, timestep, species, station)
-        generate_plot(filename, output_path, species, timestep, station)
+    filename = infile
+
+    nc_file = EURAD_netCDF(infile)
+    if ltarget:
+        output_file = "job_{}_time_{}_species_{}_station_{}_target_{}.png".format(jobnr, timestep, species, station, language_id)
+        output_path = str(ASSETS_PATH.joinpath(output_file))
+        if not os.path.isfile(output_path):
+            ev.plot_area_target(nc_file, species, timestep, output_path, t_values[species], unit=unit, lang=lang)
+    elif ldiff:
+        output_file = "job_{}_time_{}_species_{}_station_{}_diff_{}.png".format(jobnr, timestep, species, station, language_id)
+        output_path = str(ASSETS_PATH.joinpath(output_file))
+        base_file = str(DATA_PATH.joinpath(f'base_{jobnr}.nc'))
+        base_nc_file = EURAD_netCDF(base_file)
+        if not os.path.isfile(output_path):
+            ev.plot_area_diff(nc_file, base_nc_file, species, timestep, output_path, unit=unit, lang=lang)
+    else:
+        output_file = "job_{}_time_{}_species_{}_station_{}_{}.png".format(jobnr, timestep, species, station, language_id)
+        output_path = str(ASSETS_PATH.joinpath(output_file))
+        if not os.path.isfile(output_path):
+            ev.plot_area(nc_file, species, timestep, output_path, unit=unit, lang=lang)
 
     return os.path.join("assets", "generated_plots", "eurad", output_file)
         # path relative to website root (src)
 
 
-def get_timeseries_plot(jobnr, station, species, timestep):
+def get_timeseries_plot(language_id, jobnr, station, species, timestep):
+
+    lang = "en"
+    if language_id == 1:
+        lang = "de"
 
     # just to make it run
     APP_HOME = Path.cwd()
@@ -41,7 +80,7 @@ def get_timeseries_plot(jobnr, station, species, timestep):
 
     if station:
         station = station.split(',')[0]  # extract station code
-    output_file = "job_{}_station_{}_species_{}_time_{}.png".format(jobnr, station, species, timestep)
+    output_file = "job_{}_station_{}_species_{}_time_{}_{}.png".format(jobnr, station, species, timestep, language_id)
     output_path = str(ASSETS_PATH.joinpath(output_file))
     # Pfad relativ zu mlworkflowinterface
 
@@ -49,6 +88,7 @@ def get_timeseries_plot(jobnr, station, species, timestep):
         # Erstellen eines Dummy-Plots mit matplotlib
         filename = infile
         # print("plotting time series:", jobnr, timestep, species, station)
+        # time series have no language package (yet)
         generate_time_series_plot(filename, output_path, station, species, timestamp=timestep, figsize=(5,3))
 
     return os.path.join("assets", "generated_plots", "eurad", output_file)
diff --git a/src/pages/dashboard.py b/src/pages/dashboard.py
index 3e15fe99b713e25ec864d639aec656e3e4579305..8684ed1535bf154236692054ae411c553248f7c5 100644
--- a/src/pages/dashboard.py
+++ b/src/pages/dashboard.py
@@ -11,6 +11,7 @@ from pathlib import Path
 import eurad.info as info
 import numpy as np
 from eurad_plot import get_euradim_plot, get_timeseries_plot
+from eurad.eurad_netcdf import EURAD_netCDF
 import os
 import zipfile
 from cryptography.fernet import Fernet
@@ -28,6 +29,7 @@ from mlair.time_series import plot as plot_ml_time_series
 import pages.dashboard_translations as guitr
 import pages.dashboard_constants as guiconst
 
+
 # the following should be done with static files!
 APP_HOME = Path.cwd()
 IMAGE_PATH = APP_HOME.joinpath("static", "images")
@@ -337,21 +339,23 @@ ml_fcast_result_modal = html.Div([
         ], id="ml_fcast_result_modal_container")
 
 def generate_eurad_im_body(language_id=0):
+    disabled_days = [ dt.datetime.strptime('2017-01-27','%Y-%m-%d') + dt.timedelta(days=i) for i in range(14) ]
+    disabled_days += [ dt.datetime.strptime('2017-02-16','%Y-%m-%d') + dt.timedelta(days=i) for i in range(523) ]
     return [
     dbc.Row([
         dbc.Col(dbc.Label(guitr.date_label[language_id]), width=3),
         dbc.Col(dcc.DatePickerSingle(id="eurad_im_start_date",
-                                     date=dt.date(2018, 7, 18),
+                                     date=guiconst.min_date_allowed,
                                      display_format=guitr.date_format[language_id],
                                      first_day_of_week=guitr.first_day_of_week[language_id],
-                                     min_date_allowed=dt.date(2018, 7, 18),
-                                     max_date_allowed=dt.date(2018, 7, 31),
-                                     initial_visible_month=dt.date(2018, 7, 1))),
+                                     min_date_allowed=guiconst.min_date_allowed,
+                                     max_date_allowed=guiconst.max_date_allowed,
+                                     disabled_days=disabled_days,
+                                     initial_visible_month=guiconst.initial_visible_month)),
         dbc.Col(dbc.Label(f"{guitr.forecast_length_label[language_id]}:")),
         dbc.Col(dcc.Dropdown(value=guitr.forecast_length_options[language_id][-1],
                              options=[{'label': guitr.forecast_length_options[language_id][i],
                                        'value': guitr.forecast_length_options[language_id][i],
-                                       'disabled': True
                                        } for i in range(len(guitr.forecast_length_options[language_id]))],
                              id="eurad_im_forecast_length"))
     ], class_name="row mt-3"),
@@ -371,12 +375,14 @@ def generate_eurad_im_body(language_id=0):
      Output("plot-info", "data", allow_duplicate=True)],
     [Input('time-step-dropdown-output', 'value'),
      Input('variable-dropdown-output', 'value'),
-     Input('station-dropdown-output', 'value')],
+     Input('station-dropdown-output', 'value'),
+     Input('eurad_im_target', 'value'),
+     Input('eurad_scen_diff', 'value')],
     [State("user-info", "data"),
      State("job-info", "data")],
     prevent_initial_call=True
 )
-def update_image(selected_time_step, selected_variable, selected_station, users_dict, jobs_dict):
+def update_image(selected_time_step, selected_variable, selected_station, ltarget, ldiff, users_dict, jobs_dict):
     plotinfo_dict = {}
     if selected_time_step and selected_variable and selected_variable != 'NoneAvailable':
       
@@ -386,8 +392,13 @@ def update_image(selected_time_step, selected_variable, selected_station, users_
         except:
             language_id = 0
 
-        image_path = get_euradim_plot(jobnr, selected_time_step, selected_variable, selected_station)
-        timeseries_image_path = get_timeseries_plot(jobnr, selected_station, selected_variable, selected_time_step)
+        job_props = get_db_job_entry(jobnr)
+        application = job_props['application']
+        lltarget = True if ((application == 0) and ltarget) else False
+        lldiff = True if ((application == 2) and ldiff) else False
+
+        image_path = get_euradim_plot(language_id, jobnr, selected_time_step, selected_variable, selected_station, lldiff, lltarget)
+        timeseries_image_path = get_timeseries_plot(language_id, jobnr, selected_station, selected_variable, selected_time_step)
 
         first_image = html.Img(src=image_path, className='image-fit-container')
         second_image = html.Img(src=timeseries_image_path, className='image-fit-container')
@@ -410,19 +421,23 @@ def generate_eurad_im_output_body(language_id, context, jobnr):
         job_props = get_db_job_entry(jobnr)
         start_date = job_props['start_date']
         region = job_props['region']
+        fc_length =  job_props['forecast_length']
 
         if not os.path.isfile(infile):
-            trans_filename = transfer_results_from_HPC(jobnr, start_date, 0, DATA_PATH)
-            os.symlink(trans_filename, infile)
+            filenames = transfer_results_from_HPC(jobnr, start_date, 0, fc_length, DATA_PATH)
+            os.symlink(filenames['download_filename'], infile)
 
-        timestep_list = info.get_available_time_stamps(infile)
-        timestep_strings = [np.datetime_as_string(ts, unit="m") for ts in timestep_list]
+        nc_file = EURAD_netCDF(infile)
+        timestep_list = nc_file.get_time_stamps()
+        timestep_strings = [np.datetime_as_string(ts)[:16] for ts in timestep_list['time'].values]
 
-        start_date = pd.to_datetime(timestep_list[0]).strftime(guitr.date_format2[language_id])
-        fc_length = (len(timestep_list)-1) // 24
+        start_date = pd.to_datetime(timestep_strings[0]).strftime(guitr.date_format3[language_id])
         fc_length_str = "{} {}{}".format(fc_length, guitr.day_label[language_id], guitr.day_plural_label[language_id] if fc_length > 1 else "")
 
         variables_list = info.get_available_variables(infile)
+        # for unit conversion, PRS and TEM were added
+        variables_list.remove('PRS')
+        variables_list.remove('TEM')
 
         stations_list = info.get_available_stations()
         stations_list = sorted(stations_list)
@@ -461,10 +476,10 @@ def generate_eurad_im_output_body(language_id, context, jobnr):
     dbc.Row([
         dbc.Col(html.Br(), width=12),
         dbc.Col([
-            dbc.Checkbox(label=f"{guitr.show_target_plot_label[language_id]}", value=0)
+            dbc.Checkbox(id="eurad_im_target", label=f"{guitr.show_target_plot_label[language_id]}", value=False)
         ], width=12),
         dbc.Col([
-            dbc.Checkbox(label=f"{guitr.show_downscaling_label[language_id]}", value=0, disabled=True)
+            dbc.Checkbox(id="eurad_im_downscaling", label=f"{guitr.show_downscaling_label[language_id]}", value=0, disabled=True)
         ], width=12)
     ], class_name="row mt-3f"),
 ]
@@ -511,25 +526,30 @@ def generate_eurad_scen_output_body(language_id, context, jobnr):
 
     if jobnr and (jobnr != 'bla'):
         infile = str(DATA_PATH.joinpath(f'{jobnr}.nc'))
+        basefile = str(DATA_PATH.joinpath(f'base_{jobnr}.nc'))
         job_props = get_db_job_entry(jobnr)
         start_date = job_props['start_date']
         iscen = job_props['emis_scen']
         ireg = job_props['region']
         region = guitr.region_text[language_id][ireg]
+        fc_length = job_props['forecast_length']
 
         if not os.path.isfile(infile):
-            trans_filename = transfer_results_from_HPC(jobnr, start_date, iscen, DATA_PATH)
-            os.symlink(trans_filename, infile)
+            filenames = transfer_results_from_HPC(jobnr, start_date, iscen, fc_length, DATA_PATH)
+            os.symlink(filenames['download_filename'], infile)
+            os.symlink(filenames['base_download_filename'], basefile)
 
-        timestep_list = info.get_available_time_stamps(infile)
-        timestep_strings = [np.datetime_as_string(ts, unit="m") for ts in timestep_list]
-        # TODO: Zeit in Stunden seit Start
+        nc_file = EURAD_netCDF(infile)
+        timestep_list = nc_file.get_time_stamps()
+        timestep_strings = [np.datetime_as_string(ts)[:16] for ts in timestep_list['time'].values]
 
-        start_date = pd.to_datetime(timestep_list[0]).strftime(guitr.date_format2[language_id])
-        fc_length = (len(timestep_list)-1) // 24
+        start_date = pd.to_datetime(timestep_strings[0]).strftime(guitr.date_format3[language_id])
         fc_length_str = "{} {}{}".format(fc_length, guitr.day_label[language_id], guitr.day_plural_label[language_id] if fc_length > 1 else "")
 
         variables_list = info.get_available_variables(infile)
+        # for unit conversion, PRS and TEM were added
+        variables_list.remove('PRS')
+        variables_list.remove('TEM')
 
         stations_list = info.get_available_stations()
         stations_list = sorted(stations_list)
@@ -570,28 +590,33 @@ def generate_eurad_scen_output_body(language_id, context, jobnr):
         dbc.Row([
             dbc.Col(html.Br(), width=12),
             dbc.Col([
-                dbc.Checkbox(label=f"{guitr.show_downscaling_label[language_id]}", value=0)
+                dbc.Checkbox(id="eurad_scen_diff", label=f"{guitr.show_diff_plot_label[language_id]}", value=False)
+            ], width=12),
+            dbc.Col([
+                dbc.Checkbox(id="eurad_scen_downscaling", label=f"{guitr.show_downscaling_label[language_id]}", value=0, disabled=True)
             ], style={"display": "flex"}),
         ], class_name="row mt-3f"),
     ]
 
 
 def generate_eurad_scen_body(language_id):
+    disabled_days = [ dt.datetime.strptime('2017-01-27','%Y-%m-%d') + dt.timedelta(days=i) for i in range(14) ]
+    disabled_days += [ dt.datetime.strptime('2017-02-16','%Y-%m-%d') + dt.timedelta(days=i) for i in range(523) ]
     return [
     dbc.Row([
-        dbc.Col(dbc.Label(f"{guitr.date_label[language_id]}"), width=3),
+        dbc.Col(dbc.Label(guitr.date_label[language_id]), width=3),
         dbc.Col(dcc.DatePickerSingle(id="eurad_scen_start_date",
-                                     date=dt.date(2018, 7, 18),
+                                     date=guiconst.min_date_allowed,
                                      display_format=guitr.date_format[language_id],
                                      first_day_of_week=guitr.first_day_of_week[language_id],
-                                     min_date_allowed=dt.date(2018, 7, 18),
-                                     max_date_allowed=dt.date(2018, 7, 31),
-                                     initial_visible_month=dt.date(2018, 7, 1))),
+                                     min_date_allowed=guiconst.min_date_allowed,
+                                     max_date_allowed=guiconst.max_date_allowed,
+                                     disabled_days=disabled_days,
+                                     initial_visible_month=guiconst.initial_visible_month)),
         dbc.Col(dbc.Label(f"{guitr.forecast_length_label[language_id]}:")),
         dbc.Col(dcc.Dropdown(value=guitr.forecast_length_options[language_id][-1],
                              options=[{'label': guitr.forecast_length_options[language_id][i],
                                        'value': guitr.forecast_length_options[language_id][i],
-                                       'disabled': True
                                        } for i in range(len(guitr.forecast_length_options[language_id]))],
                              id="eurad_scen_forecast_length"))
     ], class_name="row mt-3"),
@@ -831,9 +856,11 @@ def get_my_jobs_from_db(user_id=None, language_id=0):
                                  guitr.jobs_columns[language_id][2]: dt.datetime.strptime(job[2],'%Y-%m-%d %H:%M').strftime(guitr.date_format2[language_id]),
                                  guitr.jobs_columns[language_id][3]: "{} {}{}".format(job[3], guitr.day_label[language_id], guitr.day_plural_label[language_id] if job[3] > 1 else ""),
                                  guitr.jobs_columns[language_id][4]: guitr.region_text[language_id][job[4]],
-                                 guitr.jobs_columns[language_id][5]: "{}".format(guitr.species_options[language_id][job[5]] if job[5] is not None else ""),
+                                 guitr.jobs_columns[language_id][5]: "{}".format(guitr.species_options[language_id][job[5]] if job[5] is not None \
+                                                                     else ",".join(guitr.species_options[language_id])),
                                  guitr.jobs_columns[language_id][6]: "{}".format(guitr.metrics_options[language_id][job[6]] if job[6] is not None else ""),
-                                 guitr.jobs_columns[language_id][7]: "{}".format(emis_info[language_id][" Name"][job[7]] if job[7] is not None else ""),
+                                 guitr.jobs_columns[language_id][7]: "{}".format(emis_info[language_id][" Name"][job[7]] if job[7] is not None \
+                                                                     else (guitr.default_text[language_id] if job[0] == 0 else "")),
                                  guitr.jobs_columns[language_id][8]: dt.datetime.strptime(job[8],'%Y-%m-%d %H:%M').strftime(guitr.date_format2[language_id]),
                                  guitr.jobs_columns[language_id][9]: job[9]})
     return data_from_db
@@ -1022,6 +1049,7 @@ def eurad_im_job_run(run_button, region, startdate, forecast_length, user_dict):
     new_job_dict['region'] = region
     new_job_dict['start_date'] = startdate + ' 00:00'
     new_job_dict['forecast_length'] = int(forecast_length.split()[0])
+    forecast_hours = new_job_dict['forecast_length'] * 24
     # take the coding for the status from the controlled vocabulary!
     # set it to "waiting" since it will just be submitted
     new_job_dict['status'] = 2
@@ -1038,7 +1066,7 @@ def eurad_im_job_run(run_button, region, startdate, forecast_length, user_dict):
     client = uc_client.Client(credentials, base_url)
     job_description = {'Executable': "/p/project/cjicg21/schroeder5/Destine_AQ/start_destine_demonstrator.sh",
                        'Job type': "ON_LOGIN_NODE",
-                       'Arguments': [jobnr, "0", startdate, str(new_job_dict['forecast_length'])], }
+                       'Arguments': [jobnr, "0", startdate, str(forecast_hours)], }
     job = client.new_job(job_description)
 
     # let's wait while the job is still running
@@ -1070,15 +1098,6 @@ def eurad_im_job_run(run_button, region, startdate, forecast_length, user_dict):
     prevent_initial_call=True
 )
 def ml_fcast_job_run(run_button, region, startdate, forecast_length, species, metrics, user_dict):
-
-#   this needs to be activated with the right job on JURECA
-#   (at least for the demonstrator, all MLAir runs have already been accomplished)
-#   base_url = f"{UNICORE_BASE}JURECA/rest/core"
-#   credentials = uc_credentials.UsernamePassword(UNICORE_USER, UNICORE_PASSWORD)
-#   client = uc_client.Client(credentials, base_url)
-#   job_description = {'Executable': "/p/project/cjicg21/schroeder5/Destine_AQ/start_destine_demonstrator.sh", "Job type": "ON_LOGIN_NODE", 'Arguments':[], }
-#   job = client.new_job(job_description)
-
     user_id = json.loads(user_dict)["user_id"]
     language_id = json.loads(user_dict)["language_id"]
     new_job_dict = {}
@@ -1088,10 +1107,12 @@ def ml_fcast_job_run(run_button, region, startdate, forecast_length, species, me
     new_job_dict['application'] = 1
     new_job_dict['region'] = region
     new_job_dict['start_date'] = startdate + ' 00:00'
-    new_job_dict['forecast_length'] = int(forecast_length.split()[0])
+    new_job_dict['forecast_length'] = int(forecast_length.split()[0])*24
     # also take the coding from the controlled vocabulary!
     # set it to "waiting" since it will just be submitted
-    new_job_dict['status'] = 2
+    # job is not submitted --> job is already finished
+    # new_job_dict['status'] = 2
+    new_job_dict['status'] = 0
     # at the moment only ozone is available!
     new_job_dict['species'] = species
     # at the moment only dma8eu is available!
@@ -1099,16 +1120,17 @@ def ml_fcast_job_run(run_button, region, startdate, forecast_length, species, me
     # MLAir does not have any emission scenario
     new_job_dict['emis_scen'] = None
 
-    # submit job
-    base_url = f"{UNICORE_BASE}JURECA/rest/core"
-    credentials = uc_credentials.UsernamePassword(UNICORE_USER, UNICORE_PASSWORD)
-    client = uc_client.Client(credentials, base_url)
-    job_description = {'Executable': "/p/project/deepacf/intelliaq/schroeder5/ecflow_mlair/start_destine_mlair_demonstrator.sh", "Job type": "ON_LOGIN_NODE", 'Arguments':[jobnr], }
-    job = client.new_job(job_description)
+# do not submit the job
+#   # submit job
+#   base_url = f"{UNICORE_BASE}JURECA/rest/core"
+#   credentials = uc_credentials.UsernamePassword(UNICORE_USER, UNICORE_PASSWORD)
+#   client = uc_client.Client(credentials, base_url)
+#   job_description = {'Executable': "/p/project/deepacf/intelliaq/schroeder5/ecflow_mlair/start_destine_mlair_demonstrator.sh", "Job type": "ON_LOGIN_NODE", 'Arguments':[jobnr], }
+#   job = client.new_job(job_description)
 
-    # let's wait while the job is still running
-    # otherwise, the job status will not be able to be determined
-    job.poll()
+#   # let's wait while the job is still running
+#   # otherwise, the job status will not be able to be determined
+#   job.poll()
 
     # now create job in db (otherwise the status cannot be determined!)
 
@@ -1145,6 +1167,7 @@ def eurad_scen_job_run(run_button, region, startdate, forecast_length, emi_scen,
     new_job_dict['start_date'] = startdate + ' 00:00'
     new_job_dict['forecast_length'] = int(forecast_length.split()[0])
     new_job_dict['emis_scen'] = emi_scen
+    forecast_hours = new_job_dict['forecast_length'] * 24
     # take the coding for the status from the controlled vocabulary!
     # set it to "waiting" since it will just be submitted
     new_job_dict['status'] = 2
@@ -1157,7 +1180,9 @@ def eurad_scen_job_run(run_button, region, startdate, forecast_length, emi_scen,
     base_url = f"{UNICORE_BASE}JURECA/rest/core"
     credentials = uc_credentials.UsernamePassword(UNICORE_USER, UNICORE_PASSWORD)
     client = uc_client.Client(credentials, base_url)
-    job_description = {'Executable': "/p/project/cjicg21/schroeder5/Destine_AQ/start_destine_demonstrator.sh", "Job type": "ON_LOGIN_NODE", 'Arguments':[jobnr, str(emi_scen)], }
+    job_description = {'Executable': "/p/project/cjicg21/schroeder5/Destine_AQ/start_destine_demonstrator.sh",
+                       'Job type': "ON_LOGIN_NODE",
+                       'Arguments': [jobnr, str(emi_scen), startdate, str(forecast_hours)], }
     job = client.new_job(job_description)
 
     # let's wait while the job is still running
diff --git a/src/pages/dashboard_constants.py b/src/pages/dashboard_constants.py
index 64897b62782954f2059965ebe208c497a96a492b..980572637f12dff056c4c5bdc57f04e2745568b9 100644
--- a/src/pages/dashboard_constants.py
+++ b/src/pages/dashboard_constants.py
@@ -1,7 +1,7 @@
 import datetime as dt
 
-min_date_allowed=dt.date(2017, 1, 18)
-max_date_allowed=dt.date(2018, 8, 10)
+min_date_allowed=dt.date(2017, 1, 21)
+max_date_allowed=dt.date(2018, 8, 9)
 initial_visible_month=dt.date(2017, 1, 1)
 
 # Explanation of conversion from concentration to mixing ratio:
@@ -20,11 +20,9 @@ initial_visible_month=dt.date(2017, 1, 1)
 # T: temperature in K
 # R: specific gas constant (287.058 J/kg*K)
 # Molecular Mass Calculator (http://www.bmrb.wisc.edu/metabolomics/mol_mass.php)
-conversion_factor = { "NO": 0.80182,
-                      "NO2": 0.52297,
+conversion_factor = { "NO2": 0.52297,
                       "ozone": 0.50124,
                       "PM2.5": 1.0 }
-ml_names = { "NO": "NO",
-             "NO2": "NO2",
+ml_names = { "NO2": "NO2",
              "ozone": "O3",
              "PM2.5": "PM2P5" }
diff --git a/src/pages/dashboard_translations.py b/src/pages/dashboard_translations.py
index 4ef6b89b0ee6001638b61a7a8a1f9ee631a97a45..93e4883c768afcac50efbbe454df8ec4de665ad6 100644
--- a/src/pages/dashboard_translations.py
+++ b/src/pages/dashboard_translations.py
@@ -97,6 +97,7 @@ close_label = ["close", "Schließen"]
 date_label = ["date", "Datum"]
 date_format = ["M/D/Y", "D.M.Y"]
 date_format2 = ['%Y-%m-%d %H:%M', '%d.%m.%Y %H:%M']
+date_format3 = ['%Y-%m-%dT%H:%M', '%d.%m.%YT%H:%M']
 first_day_of_week = [0, 1]
 forecast_length_label = ["forecast length", "Vorhersagedauer"]
 region_label = ["region", "Region"]
@@ -108,8 +109,8 @@ state_label = ["state:", "Bundesland:"]
 species_label = ["species", "Spezies"]
 station_label = ["station", "Station"]
 output_metrics_label = ["output metrics", "Ausgabemetrik"]
-species_options = [["ozone", "NO", "NO2", "PM2.5"],
-                   ["Ozon", "Stickstoffmonoxid", "Stickstoffdioxid", "Feinstaub (PM2.5)"]]
+species_options = [["ozone", "NO2", "PM2.5"],
+                   ["Ozon", "Stickstoffdioxid", "Feinstaub (PM2.5)"]]
 metrics_options = [["dma8eu", "mean"],
                    ["dma8eu", "Mittelwert"]]
 emis_scen_label = ["emission scenario", "Emissionsszenario"]
@@ -126,7 +127,9 @@ im_download_label = ["Download Plots", "Plots herunterladen"]
 downscaling_label = ["Postprocessing with ML-Downscaling", "Postprocessing mit ML-Downscaling"]
 show_downscaling_label = ["Show results with ML downscaling", "Ergebnisse mit ML-Downscaling anzeigen"]
 show_target_plot_label = ["Show target plot", "Ergebnisse als Schwellwertplot anzeigen"]
+show_diff_plot_label = ["Show difference plot", "Ergebnisse als Differenzplot anzeigen"]
 out_option_label = ["output option", "Ausgabe-Option"]
 mlair_legend_obs_label = ["measurements", "Beobachtungen"]
 mlair_legend_fc_label = ["forecasts", "Vorhersagen"]
 mlair_conc_label = ["concentration", "Konzentration"]
+default_text = ["default", "Standard"]
diff --git a/utils/utils.py b/utils/utils.py
index d906c5d193e09581d5aec3450f2b8957766466f8..89e04c3896b13f82da4df5bf67d2dc98ae1b8dd2 100644
--- a/utils/utils.py
+++ b/utils/utils.py
@@ -12,26 +12,49 @@ from deployment_settings import (
 base_url = f"{UNICORE_BASE}JURECA/rest/core"
 
 
-def transfer_results_from_HPC(jobnr, model_date, iscen, data_path):
+def get_filenames(jobnr, yy, mm, dd, iscen, fc_length):
+    timestep_date = date(int(yy), int(mm), int(dd))
+    baseid = runid = "digitwin"
+    if iscen != 0:
+        runid = f"digitwin{iscen:03}"
 
-    # Authentification/Authorization
-    credentials = uc_credentials.UsernamePassword(UNICORE_USER, UNICORE_PASSWORD)
+    remote_filename = f"ctmout_{runid}_cutcropped_{timestep_date.strftime('%j')}_de3.nc"
+    download_filename = f"ctmout_{runid}_cutcropped_{fc_length*24}_{yy}_{timestep_date.strftime('%j')}_de3.nc"
+    base_remote_filename = f"ctmout_{baseid}_cutcropped_{timestep_date.strftime('%j')}_de3.nc"
+    base_download_filename = f"ctmout_{baseid}_cutcropped_{fc_length*24}_{yy}_{timestep_date.strftime('%j')}_de3.nc"
+
+    filenames = { "remote_filename": remote_filename,
+                  "download_filename": download_filename,
+                  "base_remote_filename": base_remote_filename,
+                  "base_download_filename": base_download_filename }
+    return filenames
+
+
+def transfer_results_from_HPC(jobnr, model_date, iscen, fc_length, data_path):
 
     yy,mm,dd=model_date.split('-')
     dd=dd.split(' ')[0]
-    timestep_date = date(int(yy), int(mm), int(dd))
-    base_directory = f"{UNICORE_USER}/{jobnr.upper()}/CTM/{yy}{mm}/{dd}"
-    if iscen == 0:
-        download_filename = f"ctmout_digitwin_cutcropped_{timestep_date.strftime('%j')}_de3.nc"
-    else:
-        download_filename = f"ctmout_digitwin{iscen:03}_cutcropped_{timestep_date.strftime('%j')}_de3.nc"
+    # get all filenames for the transfer
+    filenames = get_filenames(jobnr, yy, mm, dd, iscen, fc_length)
+
+    # Authentification/Authorization
+    credentials = uc_credentials.UsernamePassword(UNICORE_USER, UNICORE_PASSWORD)
 
     # only transfer file if results are not already there (from another jobnr);
     # in the latter case, a link to the already downloaded data is sufficient
-    if not os.path.isfile(str(data_path.joinpath(download_filename))):
+    base_directory = f"{UNICORE_USER}/{jobnr.upper()}/CTM/{yy}{mm}/{dd}"
+    if not os.path.isfile(str(data_path.joinpath(filenames['download_filename']))):
         transport = uc_client.Transport(credentials)
         scratch = uc_client.Storage(transport, f"{base_url}/storages/SCRATCH")
-        dfile = scratch.stat(f"{base_directory}/{download_filename}")
-        dfile.download(str(data_path.joinpath(download_filename)))
-
-    return download_filename
+        dfile = scratch.stat(f"{base_directory}/{filenames['remote_filename']}")
+        dfile.download(str(data_path.joinpath(filenames['download_filename'])))
+    # for emission scenarios: also transfer the base run (if necessary)
+    if iscen != 0:
+        base_directory = f"{UNICORE_USER}/B{jobnr.upper()}/CTM/{yy}{mm}/{dd}"
+        if not os.path.isfile(str(data_path.joinpath(filenames['base_download_filename']))):
+            transport = uc_client.Transport(credentials)
+            scratch = uc_client.Storage(transport, f"{base_url}/storages/SCRATCH")
+            dfile = scratch.stat(f"{base_directory}/{filenames['base_remote_filename']}")
+            dfile.download(str(data_path.joinpath(filenames['base_download_filename'])))
+
+    return filenames