diff --git a/video_prediction_tools/HPC_scripts_prior/data_extraction_era5_template.sh b/video_prediction_tools/HPC_scripts_prior/data_extraction_era5_template.sh
deleted file mode 100644
index 518fe84998745905092c0d7a7e62e97119d8909e..0000000000000000000000000000000000000000
--- a/video_prediction_tools/HPC_scripts_prior/data_extraction_era5_template.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash -x
-## Controlling Batch-job : Need input
-#SBATCH --account=<Project name>
-#SBATCH --nodes=1
-#SBATCH --ntasks=13
-##SBATCH --ntasks-per-node=13
-#SBATCH --cpus-per-task=1
-#SBATCH --output=data_extraction_era5-out.%j
-#SBATCH --error=data_extraction_era5-err.%j
-#SBATCH --time=04:20:00
-#SBATCH --partition=batch
-#SBATCH --gres=gpu:0
-#SBATCH --mail-type=ALL
-#SBATCH --mail-user=me@somewhere.com
-
-##Load basic Python module: Need input
-#module load Python
-
-
-##Create and activate a virtual environment: Need input 
-#VENV_NAME=<my_venv>
-#Python -m venv ../virtual_envs/${VENV_NAME}
-#source ../virtual_envs/${VENV_NAME}/bin/activate
-
-
-## Install required packages
-# set PYTHONPATH...
-BASE_DIR="$(pwd)"
-WORKING_DIR=="$(BASE_DIR "$dir")"
-export PYTHONPATH=${WORKING_DIR}/virtual_envs/${VENV_NAME}/lib/python3.8/site-packages:$PYTHONPATH
-export PYTHONPATH=${WORKING_DIR}:$PYTHONPATH
-export PYTHONPATH=${WORKING_DIR}/utils:$PYTHONPATH
-export PYTHONPATH=${WORKING_DIR}/model_modules:$PYTHONPATH
-export PYTHONPATH=${WORKING_DIR}/postprocess:$PYTHONPATH
-# ... install requirements
-pip install --no-cache-dir -r ../env_setup/requirements.txt
-
-
-# Declare path-variables (dest_dir will be set and configured automatically via generate_runscript.py)
-source_dir=/my/path/to/era5
-destination_dir=/my/path/to/extracted/data
-varmap_file=/my/path/to/varmapping/file
-
-years=( "2015" )
-
-# Run data extraction
-for year in "${years[@]}"; do
-  echo "Perform ERA5-data extraction for year ${year}"
-  srun python ../main_scripts/main_data_extraction.py  --source_dir ${source_dir} --target_dir ${destination_dir} \
-                                                       --year ${year} --varslist_path ${varmap_file}
-done
diff --git a/video_prediction_tools/HPC_scripts_prior/meta_postprocess_era5_template.sh b/video_prediction_tools/HPC_scripts_prior/meta_postprocess_era5_template.sh
deleted file mode 100644
index 7d9dcd10cabf0b44ae75ead14711059c5c167d3c..0000000000000000000000000000000000000000
--- a/video_prediction_tools/HPC_scripts_prior/meta_postprocess_era5_template.sh
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/bin/bash -x
-## Controlling Batch-job: Need input
-#SBATCH --account=<Project name>
-#SBATCH --nodes=1
-#SBATCH --ntasks=13
-##SBATCH --ntasks-per-node=13
-#SBATCH --cpus-per-task=1
-#SBATCH --output=Data_Preprocess_step1_era5-out.%j
-#SBATCH --error=Data_Preprocess_step1era5-err.%j
-#SBATCH --time=04:20:00
-#SBATCH --partition=batch
-#SBATCH --gres=gpu:0
-#SBATCH --mail-type=ALL
-#SBATCH --mail-user=me@somewhere.com
-
-##Load basic Python module: Need input
-#module load Python
-
-
-##Create and activate a virtual environment: Need input
-#VENV_NAME=<my_venv>
-#Python -m venv ../virtual_envs/${VENV_NAME}
-#source ../virtual_envs/${VENV_NAME}/bin/activate
-
-## Install required packages
-# set PYTHONPATH...
-WORKING_DIR="$(pwd)"
-BASE_DIR=="$(WORKING_DIR "$dir")"
-export PYTHONPATH=${BASE_DIR}/virtual_envs/${VENV_NAME}/lib/python3.8/site-packages:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}/utils:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}/model_modules:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}/postprocess:$PYTHONPATH
-# ... install requirements
-pip install --no-cache-dir -r ../env_setup/requirements.txt
-
-# Name of virtual environment
-VENV_NAME=venv_hdfml
-# Name of container image (must be available in working directory)
-CONTAINER_IMG="${WORK_DIR}/tensorflow_21.09-tf1-py3.sif"
-WRAPPER="${BASE_DIR}/env_setup/wrapper_container.sh"
-
-# sanity checks
-if [[ ! -f ${CONTAINER_IMG} ]]; then
-  echo "ERROR: Cannot find required TF1.15 container image '${CONTAINER_IMG}'."
-  exit 1
-fi
-
-if [[ ! -f ${WRAPPER} ]]; then
-  echo "ERROR: Cannot find wrapper-script '${WRAPPER}' for TF1.15 container image."
-  exit 1
-fi
-
-# Declare input parameters
-root_dir=/p/project/deepacf/deeprain/video_prediction_shared_folder/
-analysis_config=video_prediction_tools/meta_postprocess_config/meta_config.json
-metric=mse
-exp_id=test
-enable_skill_scores=True
-
-srun python ../main_scripts/main_meta_postprocess.py  --root_dir ${root_dir} --analysis_config ${analysis_config} \
-                                                       --metric ${metric} --exp_id ${exp_id} --enable_skill_scores ${enable_skill_scores}
diff --git a/video_prediction_tools/HPC_scripts_prior/preprocess_data_era5_step1_template.sh b/video_prediction_tools/HPC_scripts_prior/preprocess_data_era5_step1_template.sh
deleted file mode 100644
index cc500654c49e4b619399aa8685d51b7299836d42..0000000000000000000000000000000000000000
--- a/video_prediction_tools/HPC_scripts_prior/preprocess_data_era5_step1_template.sh
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/bash -x
-## Controlling Batch-job : Need input
-#SBATCH --account=<Project name>
-#SBATCH --nodes=1
-#SBATCH --ntasks=13
-##SBATCH --ntasks-per-node=13
-#SBATCH --cpus-per-task=1
-#SBATCH --output=Data_Preprocess_step1_era5-out.%j
-#SBATCH --error=Data_Preprocess_step1era5-err.%j
-#SBATCH --time=04:20:00
-#SBATCH --partition=batch
-#SBATCH --gres=gpu:0
-#SBATCH --mail-type=ALL
-#SBATCH --mail-user=me@somewhere.com
-
-##Load basic Python module: Need input
-#module load Python
-
-
-##Create and activate a virtual environment : Need input
-#VENV_NAME=<my_venv>
-#Python -m venv ../virtual_envs/${VENV_NAME}
-#source ../virtual_envs/${VENV_NAME}/bin/activate
-
-
-## Install required packages
-# set PYTHONPATH...
-BASE_DIR="$(pwd)"
-WORKING_DIR=="$(BASE_DIR "$dir")"
-export PYTHONPATH=${WORKING_DIR}/virtual_envs/${VENV_NAME}/lib/python3.8/site-packages:$PYTHONPATH
-export PYTHONPATH=${WORKING_DIR}:$PYTHONPATH
-export PYTHONPATH=${WORKING_DIR}/utils:$PYTHONPATH
-export PYTHONPATH=${WORKING_DIR}/model_modules:$PYTHONPATH
-export PYTHONPATH=${WORKING_DIR}/postprocess:$PYTHONPATH
-# ... install requirements
-pip install --no-cache-dir -r ../env_setup/requirements.txt
-
-
-# select years for dataset
-declare -a years=(
-                 "2017"
-                  )
-
-max_year=`echo "${years[*]}" | sort -nr | head -n1`
-min_year=`echo "${years[*]}" | sort -nr | tail -n1`
-# set some paths
-# note, that destination_dir is used during runtime to set a proper experiment directory
-exp_id=xxx                                          # experiment identifier is set by 'generate_workflow_runscripts.sh'
-source_dir=${SAVE_DIR}/extractedData
-destination_dir=${SAVE_DIR}/preprocessedData/era5-Y${min_year}to${max_year}M01to12
-script_dir=`pwd`
-
-for year in "${years[@]}";
-    do
-        echo "Year $year"
-        echo "source_dir ${source_dir}/${year}"
-        mpirun -np 2 python ../../workflow_parallel_frame_prediction/DataPreprocess/mpi_stager_v2_process_netCDF.py \
-         --source_dir ${source_dir} -scr_dir ${script_dir} -exp_dir ${exp_id} \
-         --destination_dir ${destination_dir} --years ${years} --vars T2 MSL gph500 --lat_s 74 --lat_e 202 --lon_s 550 --lon_e 710
-    done
-
-
-
-
diff --git a/video_prediction_tools/HPC_scripts_prior/preprocess_data_era5_step2_template.sh b/video_prediction_tools/HPC_scripts_prior/preprocess_data_era5_step2_template.sh
deleted file mode 100644
index 1afb89088c008666c974adfc1bebe96e0c68f169..0000000000000000000000000000000000000000
--- a/video_prediction_tools/HPC_scripts_prior/preprocess_data_era5_step2_template.sh
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/bin/bash -x
-## Controlling Batch-job: Need input
-#SBATCH --account=<Project name>
-#SBATCH --nodes=1
-#SBATCH --ntasks=13
-##SBATCH --ntasks-per-node=13
-#SBATCH --cpus-per-task=1
-#SBATCH --output=Data_Preprocess_step1_era5-out.%j
-#SBATCH --error=Data_Preprocess_step1era5-err.%j
-#SBATCH --time=04:20:00
-#SBATCH --partition=batch
-#SBATCH --gres=gpu:0
-#SBATCH --mail-type=ALL
-#SBATCH --mail-user=me@somewhere.com
-
-##Load basic Python module: Need input
-#module load Python
-
-
-##Create and activate a virtual environment: Need input
-#VENV_NAME=<my_venv>
-#Python -m venv ../virtual_envs/${VENV_NAME}
-#source ../virtual_envs/${VENV_NAME}/bin/activate
-
-## Install required packages
-# set PYTHONPATH...
-WORKING_DIR="$(pwd)"
-BASE_DIR=="$(WORKING_DIR "$dir")"
-export PYTHONPATH=${BASE_DIR}/virtual_envs/${VENV_NAME}/lib/python3.8/site-packages:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}/utils:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}/model_modules:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}/postprocess:$PYTHONPATH
-# ... install requirements
-pip install --no-cache-dir -r ../env_setup/requirements.txt
-
-# Name of virtual environment
-VENV_NAME=venv_hdfml
-# Name of container image (must be available in working directory)
-CONTAINER_IMG="${WORK_DIR}/tensorflow_21.09-tf1-py3.sif"
-WRAPPER="${BASE_DIR}/env_setup/wrapper_container.sh"
-
-# sanity checks
-if [[ ! -f ${CONTAINER_IMG} ]]; then
-  echo "ERROR: Cannot find required TF1.15 container image '${CONTAINER_IMG}'."
-  exit 1
-fi
-
-if [[ ! -f ${WRAPPER} ]]; then
-  echo "ERROR: Cannot find wrapper-script '${WRAPPER}' for TF1.15 container image."
-  exit 1
-fi
-
-# clean-up modules to avoid conflicts between host and container settings
-module purge
-
-# declare directory-variables which will be modified by config_runscript.py
-source_dir=/my/path/to/pkl/files/
-destination_dir=/my/path/to/tfrecords/files
-
-sequence_length=24
-sequences_per_file=10
-# run Preprocessing (step 2 where Tf-records are generated)
-# run postprocessing/generation of model results including evaluation metrics
-export CUDA_VISIBLE_DEVICES=0
-## One node, single GPU
-srun --mpi=pspmix --cpu-bind=none \
-     singularity exec --nv "${CONTAINER_IMG}" "${WRAPPER}" ${VENV_NAME} \
-     python3 ../main_scripts/main_preprocess_data_step2.py -source_dir ${source_dir} -dest_dir ${destination_dir} \
-     -sequence_length ${sequence_length} -sequences_per_file ${sequences_per_file}
-
diff --git a/video_prediction_tools/HPC_scripts_prior/train_model_era5_template.sh b/video_prediction_tools/HPC_scripts_prior/train_model_era5_template.sh
deleted file mode 100644
index 0f7b054908d09087dc266751157959f906e33fd8..0000000000000000000000000000000000000000
--- a/video_prediction_tools/HPC_scripts_prior/train_model_era5_template.sh
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/bin/bash -x
-## Controlling Batch-job: Need input
-#SBATCH --account=<Project name>
-#SBATCH --nodes=1
-#SBATCH --ntasks=13
-##SBATCH --ntasks-per-node=13
-#SBATCH --cpus-per-task=1
-#SBATCH --output=Data_Preprocess_step1_era5-out.%j
-#SBATCH --error=Data_Preprocess_step1era5-err.%j
-#SBATCH --time=04:20:00
-#SBATCH --partition=batch
-#SBATCH --gres=gpu:0
-#SBATCH --mail-type=ALL
-#SBATCH --mail-user=me@somewhere.com
-
-##Load basic Python module: Need input
-#module load Python
-
-
-##Create and activate a virtual environment: Need input
-#VENV_NAME=<my_venv>
-#Python -m venv ../virtual_envs/${VENV_NAME}
-#source ../virtual_envs/${VENV_NAME}/bin/activate
-
-## Install required packages
-# set PYTHONPATH...
-WORKING_DIR="$(pwd)"
-BASE_DIR=="$(WORKING_DIR "$dir")"
-export PYTHONPATH=${BASE_DIR}/virtual_envs/${VENV_NAME}/lib/python3.8/site-packages:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}/utils:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}/model_modules:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}/postprocess:$PYTHONPATH
-# ... install requirements
-pip install --no-cache-dir -r ../env_setup/requirements.txt
-
-# Name of virtual environment
-VENV_NAME=venv_hdfml
-# Name of container image (must be available in working directory)
-CONTAINER_IMG="${WORK_DIR}/tensorflow_21.09-tf1-py3.sif"
-WRAPPER="${BASE_DIR}/env_setup/wrapper_container.sh"
-
-# sanity checks
-if [[ ! -f ${CONTAINER_IMG} ]]; then
-  echo "ERROR: Cannot find required TF1.15 container image '${CONTAINER_IMG}'."
-  exit 1
-fi
-
-if [[ ! -f ${WRAPPER} ]]; then
-  echo "ERROR: Cannot find wrapper-script '${WRAPPER}' for TF1.15 container image."
-  exit 1
-fi
-
-# clean-up modules to avoid conflicts between host and container settings
-module purge
-
-
-# declare directory-variables which will be modified by generate_runscript.py
-source_dir=/my/path/to/tfrecords/files
-destination_dir=/my/model/output/path
-
-# valid identifiers for model-argument are: convLSTM, savp, mcnet and vae
-model=convLSTM
-datasplit_dict=${destination_dir}/data_split.json
-model_hparams=${destination_dir}/model_hparams.json
-
-# run training in container
-export CUDA_VISIBLE_DEVICES=0
-## One node, single GPU 
-srun --mpi=pspmix --cpu-bind=none \
-     singularity exec --nv "${CONTAINER_IMG}" "${WRAPPER}" ${VIRT_ENV_NAME} \
-     python3 "${BASE_DIR}"/main_scripts/main_train_models.py --input_dir ${source_dir} --datasplit_dict ${datasplit_dict} \
-     --dataset era5 --model ${model} --model_hparams_dict ${model_hparams} --output_dir ${destination_dir}/
-
diff --git a/video_prediction_tools/HPC_scripts_prior/visualize_postprocess_era5_template.sh b/video_prediction_tools/HPC_scripts_prior/visualize_postprocess_era5_template.sh
deleted file mode 100644
index e7f169337b5bddb47fc62116bce6b2af96991d7d..0000000000000000000000000000000000000000
--- a/video_prediction_tools/HPC_scripts_prior/visualize_postprocess_era5_template.sh
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/bin/bash -x
-## Controlling Batch-job: Need input
-#SBATCH --account=<Project name>
-#SBATCH --nodes=1
-#SBATCH --ntasks=13
-##SBATCH --ntasks-per-node=13
-#SBATCH --cpus-per-task=1
-#SBATCH --output=Data_Preprocess_step1_era5-out.%j
-#SBATCH --error=Data_Preprocess_step1era5-err.%j
-#SBATCH --time=04:20:00
-#SBATCH --partition=batch
-#SBATCH --gres=gpu:0
-#SBATCH --mail-type=ALL
-#SBATCH --mail-user=me@somewhere.com
-
-##Load basic Python module: Need input
-#module load Python
-
-
-##Create and activate a virtual environment: Need input
-#VENV_NAME=<my_venv>
-#Python -m venv ../virtual_envs/${VENV_NAME}
-#source ../virtual_envs/${VENV_NAME}/bin/activate
-
-## Install required packages
-# set PYTHONPATH...
-WORKING_DIR="$(pwd)"
-BASE_DIR=="$(WORKING_DIR "$dir")"
-export PYTHONPATH=${BASE_DIR}/virtual_envs/${VENV_NAME}/lib/python3.8/site-packages:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}/utils:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}/model_modules:$PYTHONPATH
-export PYTHONPATH=${BASE_DIR}/postprocess:$PYTHONPATH
-# ... install requirements
-pip install --no-cache-dir -r ../env_setup/requirements.txt
-
-# Name of virtual environment
-VENV_NAME=venv_hdfml
-# Name of container image (must be available in working directory)
-CONTAINER_IMG="${WORK_DIR}/tensorflow_21.09-tf1-py3.sif"
-WRAPPER="${BASE_DIR}/env_setup/wrapper_container.sh"
-
-# sanity checks
-if [[ ! -f ${CONTAINER_IMG} ]]; then
-  echo "ERROR: Cannot find required TF1.15 container image '${CONTAINER_IMG}'."
-  exit 1
-fi
-
-if [[ ! -f ${WRAPPER} ]]; then
-  echo "ERROR: Cannot find wrapper-script '${WRAPPER}' for TF1.15 container image."
-  exit 1
-fi
-
-
-# declare directory-variables which will be modified by generate_runscript.py
-# Note: source_dir is only needed for retrieving the base-directory
-source_dir=/my/source/dir/
-checkpoint_dir=/my/trained/model/dir
-results_dir=/my/results/dir
-lquick=""
-
-# run postprocessing/generation of model results including evaluation metrics
-export CUDA_VISIBLE_DEVICES=0
-## One node, single GPU
-srun --mpi=pspmix --cpu-bind=none \
-     singularity exec --nv "${CONTAINER_IMG}" "${WRAPPER}" ${VIRT_ENV_NAME} \
-     python3 ../main_scripts/main_visualize_postprocess.py --checkpoint  ${checkpoint_dir} --mode test  \
-                                                           --results_dir ${results_dir} --batch_size 4 \
-                                                           --num_stochastic_samples 1 ${lquick} \
-                                                           > postprocess_era5-out_all."${SLURM_JOB_ID}"
-
diff --git a/video_prediction_tools/env_setup/create_env_non_HPC.sh b/video_prediction_tools/env_setup/create_env_non_HPC.sh
deleted file mode 100644
index 2adc5cf578d9ae75cb8ab192c322bb9256480fc8..0000000000000000000000000000000000000000
--- a/video_prediction_tools/env_setup/create_env_non_HPC.sh
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env bash
-#
-# __authors__ = Bing Gong
-# __date__  = '2022_02_20'
-
-unset PYTHONPATH
-
-ENV_NAME=$1
-THIS_DIR="$(pwd)"
-WORKING_DIR="$(dirname "$THIS_DIR")"
-VENV_BASE=${WORKING_DIR}/virtual_envs
-VENV_DIR=${WORKING_DIR}/virtual_envs/${ENV_NAME}
-ACT_VENV="${VENV_DIR}/bin/activate"
-
-# check if directory to virtual environment is parsed
-if [ -z "$1" ]; then
-  echo "ERROR: Provide a name to set up the virtual environment."
-  return
-fi
-
-
-#Create virtual enviornment
-if ! [[ -d "${VENV_BASE}" ]]; then
-	mkdir "${VENV_BASE}"
-        echo "Installing virtualenv under ${VENV_BASE}..."
-        cd "${VENV_BASE}"
-        python3 -m virtualenv -p python3 ${ENV_NAME} 
-	#activate source directory
-        source ${VENV_DIR}/bin/activate
-fi
-
-#Install site packages
-pip install --no-cache-dir -r requirements_non_HPC.txt
-echo "The site-packages is installed for non_HPC users"
-
-## Add modules from the project
-unset PYTHONPATH
-export PYTHONPATH=${WORKING_DIR}:$PYTHONPATH
-export PYTHONPATH=${WORKING_DIR}/utils:$PYTHONPATH
-export PYTHONPATH=${WORKING_DIR}/model_modules:$PYTHONPATH
-export PYTHONPATH=${WORKING_DIR}/postprocess:$PYTHONPATH
-
-
-#ensure the PYTHONPATH is appended when activating the virtual enviornemnt
-echo 'export PYTHONPATH='${WORKING_DIR}':$PYTHONPATH' >> ${ACT_VENV}
-echo 'export PYTHONPATH='${WORKING_DIR}'/utils:$PYTHONPATH' >> ${ACT_VENV}
-echo 'export PYTHONPATH='${WORKING_DIR}'/model_modules:$PYTHONPATH' >> ${ACT_VENV}
-echo 'export PYTHONPATH='${WORKING_DIR}'/postprocess:$PYTHONPATH' >> ${ACT_VENV}
-
-
-# get back to basic directory
-cd "${WORKING_DIR}" || exit
-
-