diff --git a/prepare_userdb.py b/prepare_userdb.py index b373e2df65027dbaecd76050039e8eae8d90b95f..af72c2e3a8da4a4a900c29d7105c356a5fa65d34 100644 --- a/prepare_userdb.py +++ b/prepare_userdb.py @@ -1,6 +1,6 @@ import sqlite3 -conn = sqlite3.connect('src/destine_de370c_users.db') +conn = sqlite3.connect('src/static/data/destine_de370c_users.db') print("Opened database successfully") # First, create the controlled vocabulary @@ -56,6 +56,23 @@ conn.execute("INSERT INTO convoc_status (id,job_status) \ conn.execute("INSERT INTO convoc_status (id,job_status) \ VALUES (3, 'aborted')") +conn.commit() + +# There is the need for controlled_vocabulary for regions of interest +# 0: finished +# 1: active +conn.execute('''CREATE TABLE convoc_region + (id INT PRIMARY KEY NOT NULL, + region_name TEXT NOT NULL);''') + +conn.execute("INSERT INTO convoc_region (id,region_name) \ + VALUES (0, 'North Rhine-Westphalia')") +conn.execute("INSERT INTO convoc_region (id,region_name) \ + VALUES (1, 'Berlin-Brandenburg')") + +conn.commit() + + # Second, create the dashboard tables conn.execute('''CREATE TABLE users (id INT PRIMARY KEY NOT NULL, @@ -64,61 +81,66 @@ conn.execute('''CREATE TABLE users FOREIGN KEY(language) REFERENCES convoc_language(id));''') conn.execute("INSERT INTO users (id,name,language) \ - VALUES (1, 'Sabine English', 0)") + VALUES (1, 'User English', 0)") conn.execute("INSERT INTO users (id,name,language) \ - VALUES (2, 'Sabine Deutsch', 1)") + VALUES (2, 'Benutzer Deutsch', 1)") conn.commit() conn.execute('''CREATE TABLE jobs - (id CHAR(50) NOT NULL, - user_id INT NOT NULL, - status INT NOT NULL, - application INT NOT NULL, - FOREIGN KEY(user_id) REFERENCES user(id), - FOREIGN KEY(status) REFERENCES convoc_status(id), - FOREIGN KEY(application) REFERENCES convoc_application(id));''') - -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('832tgjhingj1', 1, 1, 0)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('jjkl7t3li97m', 1, 0, 1)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('7zhtglaza8ah', 1, 1, 2)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('ji54Fdr0z99m', 1, 0, 0)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('7zknt6702rx5', 1, 3, 0)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('klj836kahg2l', 1, 0, 2)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('a89uj20gxybb', 1, 2, 2)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('832tgjhingj1', 2, 1, 0)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('jjkl7t3li97m', 2, 0, 1)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('7zhtglaza8ah', 2, 1, 2)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('ji54Fdr0z99m', 2, 0, 0)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('7zknt6702rx5', 2, 3, 0)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('klj836kahg2l', 2, 0, 2)") -conn.execute("INSERT INTO jobs (id,user_id,application,status) \ - VALUES ('a89uj20gxybb', 2, 2, 2)") + (id CHAR(50) NOT NULL, + user_id INT NOT NULL, + status INT NOT NULL, + application INT NOT NULL, + start_date datetime NOT NULL, + forecast_length INT NOT NULL, + region INT NOT NULL, + creation_date datetime NOT NULL, + FOREIGN KEY(user_id) REFERENCES user(id), + FOREIGN KEY(status) REFERENCES convoc_status(id), + FOREIGN KEY(application) REFERENCES convoc_application(id), + FOREIGN KEY(region) REFERENCES convoc_region(id));''') + +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('832tgjhingj1', 1, 0, 1, '2018-07-18 00:00', 4, 1, '2024-01-12 16:03')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('jjkl7t3li97m', 1, 1, 0, '2017-01-25 00:00', 3, 0, '2024-01-09 10:14')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('7zhtglaza8ah', 1, 2, 1, '2018-07-18 00:00', 4, 1, '2024-01-15 08:01')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('ji54Fdr0z99m', 1, 0, 0, '2018-07-18 00:00', 1, 0, '2023-12-18 13:57')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('7zknt6702rx5', 1, 0, 3, '2017-01-25 00:00', 3, 0, '2023-12-12 11:27')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('klj836kahg2l', 1, 2, 0, '2018-07-18 00:00', 1, 0, '2023-12-21 17:59')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('a89uj20gxybb', 1, 2, 2, '2017-01-25 00:00', 4, 0, '2024-01-02 14:35')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('832tgjhingj1', 2, 0, 1, '2018-07-18 00:00', 4, 1, '2024-01-12 16:03')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('jjkl7t3li97m', 2, 1, 0, '2017-01-25 00:00', 3, 0, '2024-01-09 10:14')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('7zhtglaza8ah', 2, 2, 1, '2018-07-18 00:00', 4, 1, '2024-01-15 08:01')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('ji54Fdr0z99m', 2, 0, 0, '2018-07-18 00:00', 1, 0, '2023-12-18 13:57')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('7zknt6702rx5', 2, 0, 3, '2017-01-25 00:00', 3, 0, '2023-12-12 11:27')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('klj836kahg2l', 2, 2, 0, '2018-07-18 00:00', 1, 0, '2023-12-21 17:59')") +conn.execute("INSERT INTO jobs (id,user_id,application,status,start_date,forecast_length, region, creation_date) \ + VALUES ('a89uj20gxybb', 2, 2, 2, '2017-01-25 00:00', 4, 0, '2024-01-02 14:35')") conn.commit() conn.execute('''CREATE TABLE results - (id INT NOT NULL, - user_id INT NOT NULL, - job_id CHAR(50) NOT NULL, - application INT NOT NULL, - used_options CHAR(50) NOT NULL, - FOREIGN KEY(user_id) REFERENCES user(id), - FOREIGN KEY(job_id) REFERENCES jobs(id), + (id INT NOT NULL, + user_id INT NOT NULL, + job_id CHAR(50) NOT NULL, + application INT NOT NULL, + used_options CHAR(50) NOT NULL, + FOREIGN KEY(user_id) REFERENCES user(id), + FOREIGN KEY(job_id) REFERENCES jobs(id), FOREIGN KEY(application) REFERENCES convoc_application(id));''') conn.execute("INSERT INTO results (user_id,id,job_id,application,used_options) \ @@ -142,4 +164,4 @@ conn.commit() print("Example tables successfully created") -conn.close() \ No newline at end of file +conn.close() diff --git a/src/eurad_plot.py b/src/eurad_plot.py index bdc582bcc6cc7e807df27c11c745c876b30454a4..1ec179339ac420ed2f1b7011374c4f135418c98b 100644 --- a/src/eurad_plot.py +++ b/src/eurad_plot.py @@ -11,8 +11,8 @@ def get_euradim_plot(jobnr, timestep, species, station): DATA_PATH = APP_HOME.joinpath("static", "data") ASSETS_PATH = APP_HOME.joinpath("assets", "generated_plots") - # This needs to come from outside! - infile = str(DATA_PATH.joinpath('ctmout_wrf_cut_199_h09.nc')) + # Output file is linked to jobnr + infile = str(DATA_PATH.joinpath(f'{jobnr}.nc')) if station: station = station.split(',')[0] # extract station code @@ -36,8 +36,8 @@ def get_timeseries_plot(jobnr, station, species, timestep): DATA_PATH = APP_HOME.joinpath("static", "data") ASSETS_PATH = APP_HOME.joinpath("assets", "generated_plots") - # This needs to come from outside! - infile = str(DATA_PATH.joinpath('ctmout_wrf_cut_199_h09.nc')) + # Output file is linked to jobnr + infile = str(DATA_PATH.joinpath(f'{jobnr}.nc')) if station: station = station.split(',')[0] # extract station code diff --git a/src/pages/dashboard.py b/src/pages/dashboard.py index 79dcc3941c71afd4d39dc7527decc407e3b2c874..b6c578bbe9c49fc35b435a6c49389c7d6d2c91e6 100644 --- a/src/pages/dashboard.py +++ b/src/pages/dashboard.py @@ -12,11 +12,13 @@ import eurad.info as info import numpy as np from eurad_plot import get_euradim_plot, get_timeseries_plot import os +import zipfile # the following should be done with static files! APP_HOME = Path.cwd() IMAGE_PATH = APP_HOME.joinpath("static", "images") DATA_PATH = APP_HOME.joinpath("static", "data") +ASSETS_PATH = APP_HOME.joinpath("assets", "generated_plots") dash.register_page(__name__, title="DestinE Air Quality Use Case", path="/") @@ -43,10 +45,10 @@ description = [["The DestinE Air Quality Use Case demonstrates interactive trigg "Die Dokumentation des Anwendungsfalls finden Sie ", html.A("hier", href="https://www.fz-juelich.de/de/iek/iek-8/projekte/" + "destination-earth-use-case-fuer-luftqualitaet-de370c")]] -jobs_columns = [["Jobnr", "Status", "Application"], - ["Jobnr", "Status", "Anwendung"]] -results_columns = [["Result_ID", "Jobnr", "Application", "Used Options"], - ["Resultats_ID", "Jobnr", "Anwendung", "Verwendete Optionen"]] +jobs_columns = [["Application", "Status", "Start date", "Forecast Length", "Region", "Creation Date", "Jobnr"], + ["Anwendung", "Status", "Startdatum", "Vorhersagedauer", "Region", "Erstellungsdatum", "Jobnr"]] +results_columns = [["Result_ID", "Jobnr", "Application", "Used Options"], + ["Resultats_ID", "Jobnr", "Anwendung", "Verwendete Optionen"]] eurad_scenarios = ["avoid all anthropogenic emissions", "home office", "industry", @@ -57,13 +59,15 @@ status_text = [["finished", "active", "waiting", "aborted"], ["beendet", "aktiv", "in der Warteschlange", "abgebrochen"]] application_text = [["Field forecasting with EURAD-IM", "Point forecasting with MLAir", "EURAD-IM emission scenarios"], ["Gebietsvorhersage mit EURAD-IM", "Stationsvorhersage mit MLAir", "EURAD-IM Emissionsszenarien"]] +region_text = [["North Rhine-Westphalia", "Berlin-Brandenburg"], + ["Nordrhein-Westfalen", "Berlin-Brandenburg"]] description_text = [["This application produces high-resolution, i.e. 1 km or less, on-demand results over a " + "user-selected area for a forecast period of up to 4 days. Here, users can select the forecast " + "start date (default today) and the forecast length (shorter runs require less time and " + "resources), and the regional domain of the simulation. There are two EURAD-IM model " + "configurations applied in the background of this application: i) the default 9 km resolution " + "model covering all of Europe, and ii) region-specific 1 km configurations, initially limited " - + "to Northrhine-Westphalia and Brandenburg. Users can select whether they wish to perform a " + + "to North Rhine-Westphalia and Brandenburg. Users can select whether they wish to perform a " + "dedicated 1 km EURAD-IM simulation, or whether the 1 km results shall be obtained with ML " + "downscaling from the results of a 9 km simulation. For more information about EURAD-IM and ML " + "downscaling, please refer to the description tab.", @@ -109,9 +113,11 @@ description_text = [["This application produces high-resolution, i.e. 1 km or le + "Informationen über EURAD-IM Emissionen finden sich im Beschreibungs-Tab."]] user_label = ["user:", "Benutzer:"] run_label = ["run", "Starten"] +run2_label = ["Run", "Lauf"] close_label = ["close", "Schließen"] date_label = ["date:", "Datum:"] date_format = ["M/D/Y", "D.M.Y"] +date_format2 = ['%Y-%m-%d %H:%M', '%d.%m.%Y %H:%M'] first_day_of_week = [0, 1] min_date_allowed = "2017-02-20" max_date_allowed = "2018-08-09" @@ -138,6 +144,7 @@ day_label = ["day", "Tag"] day_plural_label = ["s", "e"] save_label = ["Save Results", "Ergebnisse sichern"] download_label = ["Download Data", "Daten herunterladen"] +im_download_label = ["Download Plots", "Plots herunterladen"] downscaling_label = ["Postprocessing with ML-Downscaling", "Postprocessing mit ML-Downscaling"] show_downscaling_label = ["Show results with ML downscaling", "Ergebnisse mit ML-Downscaling anzeigen"] out_option_label = ["output option", "Ausgabe-Option"] @@ -292,7 +299,7 @@ def generate_ml_fcast_output_modal(jobnr=None, lisopen=False, language_id=0): return html.Div([ dbc.Modal( [ - dbc.ModalTitle(f"Run {jobnr}", style={"fontSize": 15}), + dbc.ModalTitle(f"{run2_label[language_id]} {jobnr}", style={"fontSize": 15}), dbc.ModalHeader(dbc.ModalTitle(application_text[language_id][1])), dbc.ModalBody(generate_ml_fcast_output_body(language_id)), dbc.ModalFooter([dbc.Button(f"{close_label[language_id]}", id="ml_fcast_output_close", @@ -309,7 +316,7 @@ def generate_ml_fcast_output_modal(jobnr=None, lisopen=False, language_id=0): def generate_ml_fcast_result_modal(jobnr=None, lisopen=False, language_id=0): return dbc.Modal( [ - dbc.ModalTitle(f"Run {jobnr}", style={"fontSize": 15}), + dbc.ModalTitle(f"{run2_label[language_id]} {jobnr}", style={"fontSize": 15}), dbc.ModalHeader(dbc.ModalTitle(application_text[language_id][1])), dbc.ModalBody(generate_ml_fcast_output_body(language_id)), dbc.ModalFooter(dbc.Button(f"{close_label[language_id]}", id="ml_fcast_result_close", @@ -344,37 +351,52 @@ def generate_eurad_im_body(language_id=0): @callback( [Output('image-container-output', 'children'), - Output('image-container-timeseries-output', 'children')], + Output('image-container-timeseries-output', 'children'), + Output("plot-info", "data")], [Input('time-step-dropdown-output', 'value'), Input('variable-dropdown-output', 'value'), - Input('station-dropdown-output', 'value')] + Input('station-dropdown-output', 'value')], + [State("user-info", "data"), + State("job-info", "data")] ) -def update_image(selected_time_step, selected_variable, selected_station): +def update_image(selected_time_step, selected_variable, selected_station, users_dict, jobs_dict): + plotinfo_dict = {} if selected_time_step and selected_variable: + + jobnr = json.loads(jobs_dict)["jobnr"] + try: + language_id = json.loads(users_dict)["language_id"] + except: + language_id = 0 - image_path = get_euradim_plot("dummyjob", selected_time_step, selected_variable, selected_station) - timeseries_image_path = get_timeseries_plot("dummyjob", selected_station, selected_variable, selected_time_step) + image_path = get_euradim_plot(jobnr, selected_time_step, selected_variable, selected_station) + timeseries_image_path = get_timeseries_plot(jobnr, selected_station, selected_variable, selected_time_step) first_image = html.Img(src=image_path, className='image-fit-container') second_image = html.Img(src=timeseries_image_path, className='image-fit-container') + + plotinfo_dict["time_step"] = selected_time_step + plotinfo_dict["variable"] = selected_variable + plotinfo_dict["station"] = selected_station.split(',')[0] + plotinfo_json = json.dumps(plotinfo_dict) - return first_image, second_image + return first_image, second_image, plotinfo_json - return None, None + plotinfo_json = json.dumps(plotinfo_dict) + return None, None, plotinfo_json def generate_eurad_im_output_body(language_id, context): - # This needs to come from outside! - infile = str(DATA_PATH.joinpath('ctmout_wrf_cut_199_h09.nc')) + infile = str(DATA_PATH.joinpath('ctmout_wrf_cutcropped_199_h09.nc')) timestep_list = info.get_available_time_stamps(infile) timestep_strings = [np.datetime_as_string(ts, unit="m") for ts in timestep_list] # TODO: Zeit in Stunden seit Start - start_date = pd.to_datetime(timestep_list[0]).strftime("%d %B %Y") + start_date = pd.to_datetime(timestep_list[0]).strftime(date_format2[language_id]) fc_length = (len(timestep_list)-1) // 24 fc_length_str = "{} {}{}".format(fc_length, day_label[language_id], day_plural_label[language_id] if fc_length > 1 else "") @@ -385,21 +407,27 @@ def generate_eurad_im_output_body(language_id, context): return [ dbc.Row([dbc.Label(f"{start_date_label[language_id]}: {start_date}, {forecast_length_label[language_id]}: {fc_length_str}")]), dbc.Row([ - dbc.Col(dbc.Label(f"{time_step_label[language_id]}"), width=3), + dbc.Col(dbc.Label(f"{time_step_label[language_id]}:"), width=3), dbc.Col(dcc.Dropdown(value=timestep_strings[0], options=timestep_strings, id='time-step-dropdown-{}'.format(context)), width=3) ], class_name="row mt-3"), dbc.Row([ dbc.Col(dbc.Label("Variable:"), width=3), dbc.Col(dcc.Dropdown(value=variables_list[0], options=variables_list, id='variable-dropdown-{}'.format(context)), width=3), - dbc.Col(dbc.Label(f"{location_label[language_id]}:"), width=3), - dbc.Col(dcc.Dropdown(value=stations_list[0], options=stations_list, id='station-dropdown-{}'.format(context)), width=3) + dbc.Col(dbc.Label(f"{location_label[language_id]}:"), width=1), + dbc.Col(dcc.Dropdown(value=stations_list[0], options=stations_list, id='station-dropdown-{}'.format(context)), width=5) ], class_name="row mt-3"), dbc.Row([ dbc.Col(html.Div(id='image-container-{}'.format(context)), width=6), dbc.Col([html.Div(id='image-container-timeseries-{}'.format(context)), dbc.Col(dbc.Button(f"{save_label[language_id]}", class_name="fzj_style_btn"), width=6), dbc.Col(html.Br()), - dbc.Button(f"{download_label[language_id]}", class_name="fzj_style_btn")], width=6), + html.Div([dbc.Button(f"{download_label[language_id]}", id="eurad_im_output_download", + class_name="fzj_style_btn"), + dcc.Download(id="eurad_im_download_result")]), + dbc.Col(html.Br()), + html.Div([dbc.Button(f"{im_download_label[language_id]}", id="eurad_im_plots_download", + class_name="fzj_style_btn"), + dcc.Download(id="eurad_im_download_plots")])], width=6), ], class_name="row mt-3"), dbc.Row([ dbc.Col(html.Br(), width=12), @@ -414,7 +442,7 @@ def generate_eurad_im_output_modal(jobnr=None, lisopen=False, language_id=0): return html.Div([ dbc.Modal( [ - dbc.ModalTitle(f"Run {jobnr}", style={"fontSize": 15}, id="eurad_im_output_modal_title"), + dbc.ModalTitle(f"{run2_label[language_id]} {jobnr}", style={"fontSize": 15}, id="eurad_im_output_modal_title"), dbc.ModalHeader(dbc.ModalTitle(application_text[language_id][0])), dbc.ModalBody(generate_eurad_im_output_body(language_id, "output")), dbc.ModalFooter([dbc.Button(f"{close_label[language_id]}", id="eurad_im_output_close", @@ -431,7 +459,7 @@ def generate_eurad_im_output_modal(jobnr=None, lisopen=False, language_id=0): def generate_eurad_im_result_modal(jobnr=None, lisopen=False, language_id=0): return dbc.Modal( [ - dbc.ModalTitle(f"Run {jobnr}", style={"fontSize": 15}), + dbc.ModalTitle(f"{run2_label[language_id]} {jobnr}", style={"fontSize": 15}), dbc.ModalHeader(dbc.ModalTitle(application_text[language_id][0])), dbc.ModalBody(generate_eurad_im_output_body(language_id, "result")), dbc.ModalFooter(dbc.Button(f"{close_label[language_id]}", id="eurad_im_result_close", @@ -503,7 +531,7 @@ def generate_eurad_scen_output_modal(container_id="eurad_scen_output_modal_conta return html.Div([ dbc.Modal( [ - dbc.ModalTitle(f"Run {jobnr}", style={"fontSize": 15}), + dbc.ModalTitle(f"{run2_label[language_id]} {jobnr}", style={"fontSize": 15}), dbc.ModalHeader(dbc.ModalTitle(application_text[language_id][2])), dbc.ModalBody(generate_eurad_scen_output_body(language_id)), dbc.ModalFooter([dbc.Button(f"{close_label[language_id]}", id="eurad_scen_output_close", @@ -521,7 +549,7 @@ def generate_eurad_scen_result_modal(container_id="eurad_scen_result_modal_conta return html.Div([ dbc.Modal( [ - dbc.ModalTitle(f"Run {jobnr}", style={"fontSize": 15}), + dbc.ModalTitle(f"{run2_label[language_id]} {jobnr}", style={"fontSize": 15}), dbc.ModalHeader(dbc.ModalTitle(application_text[language_id][2])), dbc.ModalBody(generate_eurad_scen_output_body(language_id)), dbc.ModalFooter([dbc.Button(f"{close_label[language_id]}", id="eurad_scen_result_modal_close", @@ -613,13 +641,17 @@ def get_my_jobs_from_db(user_id=None, language_id=0): if user_id: conn = sqlite3.connect(DATA_PATH.joinpath('destine_de370c_users.db')) cur = conn.cursor() - cur.execute(f"SELECT * FROM jobs WHERE user_id={user_id}") + cur.execute(f"SELECT application, status, start_date, forecast_length, region, creation_date, id FROM jobs WHERE user_id={user_id}") data_rows_from_db = cur.fetchall() for job in data_rows_from_db: - data_from_db.append({jobs_columns[language_id][0]: job[0], - jobs_columns[language_id][1]: status_text[language_id][job[3]], - jobs_columns[language_id][2]: application_text[language_id][job[2]]}) + data_from_db.append({jobs_columns[language_id][0]: application_text[language_id][job[0]], + jobs_columns[language_id][1]: status_text[language_id][job[1]], + jobs_columns[language_id][2]: dt.datetime.strptime(job[2],'%Y-%m-%d %H:%M').strftime(date_format2[language_id]), + jobs_columns[language_id][3]: "{} {}{}".format(job[3], day_label[language_id], day_plural_label[language_id] if job[3] > 1 else ""), + jobs_columns[language_id][4]: region_text[language_id][job[4]], + jobs_columns[language_id][5]: dt.datetime.strptime(job[5],'%Y-%m-%d %H:%M').strftime(date_format2[language_id]), + jobs_columns[language_id][6]: job[6]}) conn.close() return data_from_db @@ -739,7 +771,9 @@ layout = html.Div([ # dcc.Store stores the jobs information dcc.Store(id='job-info'), # dcc.Store stores the results information - dcc.Store(id='results-info') + dcc.Store(id='results-info'), + # dcc.Store stores the plots information + dcc.Store(id='plot-info'), ]) @@ -820,6 +854,49 @@ def eurad_im_modal(open_button, close_button): return False +@callback( + Output("eurad_im_download_result", "data"), + Input("eurad_im_output_download", "n_clicks"), + [State("job-info", "data"), + State("plot-info", "data")], + prevent_initial_call=True +) +def eurad_im_output_download(download_button, job_dict, user_dict): + jobid = json.loads(job_dict)["jobnr"] + infile = str(DATA_PATH.joinpath(f'{jobid}.nc')) + return dcc.send_file( + infile + ) + + +@callback( + Output("eurad_im_download_plots", "data"), + Input("eurad_im_plots_download", "n_clicks"), + [State("job-info", "data"), + State("plot-info", "data")], + prevent_initial_call=True +) +def eurad_im_plots_download(download_button, job_dict, plot_dict): + jobid = json.loads(job_dict)["jobnr"] + station = json.loads(plot_dict)["station"] + species = json.loads(plot_dict)["variable"] + timestamp = json.loads(plot_dict)["time_step"] + + # file names according to users selection + infile1 = f'job_{jobid}_station_{station}_species_{species}_time_{timestamp}.png' + infile2 = f'job_{jobid}_time_{timestamp}_species_{species}_station_{station}.png' + files = [infile1, infile2] + zipname = str(ASSETS_PATH.joinpath(f'job_{jobid}_time_{timestamp}_species_{species}_station_{station}.zip')) + + zipf = zipfile.ZipFile(zipname, 'w', zipfile.ZIP_DEFLATED) + for file in files: + zipf.write(str(ASSETS_PATH.joinpath(file)),file) + zipf.close() + return dcc.send_file( + zipname + ) + + @callback( Output("eurad_im_output_modal", "is_open"), Input("eurad_im_output_close", "n_clicks"), @@ -914,8 +991,8 @@ def postprocess_job(rows, derived_virtual_selected_rows, users_dict): eurad_scen_output_modal_isopen = False if derived_virtual_selected_rows != []: status = rows[derived_virtual_selected_rows[0]][jobs_columns[language_id][1]] - application = rows[derived_virtual_selected_rows[0]][jobs_columns[language_id][2]] - jobnr = rows[derived_virtual_selected_rows[0]][jobs_columns[language_id][0]] + application = rows[derived_virtual_selected_rows[0]][jobs_columns[language_id][0]] + jobnr = rows[derived_virtual_selected_rows[0]][jobs_columns[language_id][6]] if status == status_text[language_id][0]: eurad_im_output_modal_isopen = (application == application_text[language_id][0]) ml_fcast_output_modal_isopen = (application == application_text[language_id][1])