Skip to content
Snippets Groups Projects
Commit 50df54e9 authored by Bing Gong's avatar Bing Gong
Browse files

add other scripts for runing non-HPC systems, and add the requirement_non_hpc.txt

parent ac6a8c22
Branches
Tags
No related merge requests found
Pipeline #92726 failed
#!/usr/bin/env bash
#
# __authors__ = Bing Gong
# __date__ = '2022_02_20'
unset PYTHONPATH
ENV_NAME=$1
THIS_DIR="$(pwd)"
WORKING_DIR="$(dirname "$THIS_DIR")"
VENV_BASE=${WORKING_DIR}/virtual_envs
VENV_DIR=${WORKING_DIR}/virtual_envs/${ENV_NAME}
# check if directory to virtual environment is parsed
if [ -z "$1" ]; then
echo "ERROR: Provide a name to set up the virtual environment."
return
fi
#Create virtual enviornment
if ! [[ -d "${VENV_BASE}" ]]; then
mkdir "${VENV_BASE}"
echo "Installing virtualenv under ${VENV_BASE}..."
cd "${VENV_BASE}"
python3 -m virtualenv -p python3 ${ENV_NAME}
#activate source directory
source ${VENV_DIR}/bin/activate
fi
#Install site packages
pip install --no-cache-dir -r requirements_non_HPC.txt
echo "The site-packages is installed for non_HPC users"
## Add modules from the project
unset PYTHONPATH
export PYTHONPATH=${WORKING_DIR}:$PYTHONPATH
export PYTHONPATH=${WORKING_DIR}/utils:$PYTHONPATH
export PYTHONPATH=${WORKING_DIR}/model_modules:$PYTHONPATH
export PYTHONPATH=${WORKING_DIR}/postprocess:$PYTHONPATH
# get back to basic directory
cd "${WORKING_DIR}" || exit
matplotlib==3.3.0
mpi4py==3.0.1
pandas==0.25.3
xarray==0.16.0
basemap==1.3.0
imageio==2.15.0 # although this numpy-version is in the container, we set it here to avoid any further installation
scikit-image==0.17.2
opencv-python-headless==4.2.0.34
netcdf4==1.5.8
#metadata==0.2
normalization==0.4
utils==1.0.1
tensorflow==1.13.1
#!/bin/bash -x
#User's input : your virtual enviornment name
VIRT_ENV_NAME=venv_test
echo "Activating virtual environment..."
source ../virtual_envs/${VIRT_ENV_NAME}/bin/activate
# select years and variables for dataset and define target domain
years=( "2016" )
variables=( "var167" )
sw_corner=( 10 20)
nyx=( 24 24 )
source_dir=/home/b.gong/data_era5
destination_dir=/home/b.gong/preprocessed_data
n_nodes=2
for year in "${years[@]}"; do
echo "start preprocessing data for year ${year}"
mpirun -n ${n_nodes} python ../main_scripts/main_preprocess_data_step1.py \
--source_dir ${source_dir} --destination_dir ${destination_dir} --years "${year}" \
--vars "${variables[0]}" \
--sw_corner "${sw_corner[0]}" "${sw_corner[1]}" --nyx "${nyx[0]}" "${nyx[1]}"
done
#!/bin/bash -x
#User's input : your virtual enviornment name
VIRT_ENV_NAME=venv_test
echo "Activating virtual environment..."
source ../virtual_envs/${VIRT_ENV_NAME}/bin/activate
sequence_length=20
sequences_per_file=10
source_dir=/home/b.gong/preprocessed_data-24x24-990N2010E-var167/pickle
base_dir="$(dirname "$source_dir")"
destination_dir=${base_dir}/tfrecords
n_nodes=2
mpirun -n ${n_nodes} python3 ../main_scripts/main_preprocess_data_step2.py -source_dir ${source_dir} -dest_dir ${destination_dir} \
-sequence_length ${sequence_length} -sequences_per_file ${sequences_per_file}
#!/bin/bash -x
#User's input : your virtual enviornment name
VIRT_ENV_NAME=venv_test
echo "Activating virtual environment..."
source ../virtual_envs/${VIRT_ENV_NAME}/bin/activate
#the source directory contains the tfrecords
source_dir=/home/b.gong/preprocessed_data-24x24-990N2010E-var167/tfrecords
destination_dir=/home/b.gong/model/
#select models
model=convLSTM
mkdir ${destination_dir}
cp ../hparams/era5/${model}/model_hparams_template.json ${destination_dir}/model_hparams.json
cp ../data_split/era5/datasplit.json ${destination_dir}/data_split.json
vim ${destination_dir}/data_split.json
vim ${destination_dir}/model_hparams.json
datasplit_dict=${destination_dir}/data_split.json
model_hparams=${destination_dir}/model_hparams.json
python3 ../main_scripts/main_train_models.py --input_dir ${source_dir} --datasplit_dict ${datasplit_dict} \
--dataset era5 --model ${model} --model_hparams_dict ${model_hparams} --output_dir ${destination_dir}/
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment