Skip to content
Snippets Groups Projects
Commit e75c7ab8 authored by gong1's avatar gong1
Browse files

remove useless files and move files to new structure

parent 40ef67f1
Branches
Tags
No related merge requests found
Pipeline #45695 failed
Showing
with 0 additions and 607 deletions
#!/usr/bin/env bash
# for choosing the model convLSTM vae mcnet savp
export model=convLSTM
export model_hparams=../hparams/era5/${model}/model_hparams.json
#create a subfolder with create time and user names, which can be consider as hyperparameter tunning folder. This can avoid overwrite the prevoius trained model using differ#ent hypermeters
export hyperdir="$(date +"%Y%m%dT%H%M")_"$USER""
echo "model: ${model}"
echo "hparams: ${model_hparams}"
echo "experiment dir: ${hyperdir}"
#!/usr/bin/env bash
sed -i "s|source_dir=.*|source_dir=${SAVE_DIR}preprocessedData/|g" DataPreprocess_to_tf.sh
sed -i "s|destination_dir=.*|destination_dir=${SAVE_DIR}preprocessedData/|g" DataPreprocess_to_tf.sh
sed -i "s|source_dir=.*|source_dir=${SAVE_DIR}preprocessedData/|g" train_era5.sh
sed -i "s|destination_dir=.*|destination_dir=${SAVE_DIR}models/|g" train_era5.sh
sed -i "s|source_dir=.*|source_dir=${SAVE_DIR}preprocessedData/|g" generate_era5.sh
sed -i "s|checkpoint_dir=.*|checkpoint_dir=${SAVE_DIR}models/|g" generate_era5.sh
sed -i "s|results_dir=.*|results_dir=${SAVE_DIR}results/|g" generate_era5.sh
#!/usr/bin/env bash
# exit if any command fails
set -e
if [ "$#" -eq 2 ]; then
if [ $1 = "bair" ]; then
echo "IMAGE_SIZE argument is only applicable to kth dataset" >&2
exit 1
fi
elif [ "$#" -ne 1 ]; then
echo "Usage: $0 DATASET_NAME [IMAGE_SIZE]" >&2
exit 1
fi
if [ $1 = "bair" ]; then
TARGET_DIR=./data/bair
mkdir -p ${TARGET_DIR}
TAR_FNAME=bair_robot_pushing_dataset_v0.tar
URL=http://rail.eecs.berkeley.edu/datasets/${TAR_FNAME}
echo "Downloading '$1' dataset (this takes a while)"
#wget ${URL} -O ${TARGET_DIR}/${TAR_FNAME} Bing: on MacOS system , use curl instead of wget
curl ${URL} -O ${TARGET_DIR}/${TAR_FNAME}
tar -xvf ${TARGET_DIR}/${TAR_FNAME} --strip-components=1 -C ${TARGET_DIR}
rm ${TARGET_DIR}/${TAR_FNAME}
mkdir -p ${TARGET_DIR}/val
# reserve a fraction of the training set for validation
mv ${TARGET_DIR}/train/traj_256_to_511.tfrecords ${TARGET_DIR}/val/
elif [ $1 = "kth" ]; then
if [ "$#" -eq 2 ]; then
IMAGE_SIZE=$2
TARGET_DIR=./data/kth_${IMAGE_SIZE}
else
IMAGE_SIZE=64
TARGET_DIR=./data/kth
fi
echo ${TARGET_DIR} ${IMAGE_SIZE}
mkdir -p ${TARGET_DIR}
mkdir -p ${TARGET_DIR}/raw
echo "Downloading '$1' dataset (this takes a while)"
# TODO Bing: for save time just use walking, need to change back if all the data are needed
#for ACTION in walking jogging running boxing handwaving handclapping; do
# for ACTION in walking; do
# echo "Action: '$ACTION' "
# ZIP_FNAME=${ACTION}.zip
# URL=http://www.nada.kth.se/cvap/actions/${ZIP_FNAME}
# # wget ${URL} -O ${TARGET_DIR}/raw/${ZIP_FNAME}
# echo "Start downloading action '$ACTION' ULR '$URL' "
# curl ${URL} -O ${TARGET_DIR}/raw/${ZIP_FNAME}
# unzip ${TARGET_DIR}/raw/${ZIP_FNAME} -d ${TARGET_DIR}/raw/${ACTION}
# echo "Action '$ACTION' data download and unzip "
# done
FRAME_RATE=25
# mkdir -p ${TARGET_DIR}/processed
# # download files with metadata specifying the subsequences
# TAR_FNAME=kth_meta.tar.gz
# URL=http://rail.eecs.berkeley.edu/models/savp/data/${TAR_FNAME}
# echo "Downloading '${TAR_FNAME}' ULR '$URL' "
# #wget ${URL} -O ${TARGET_DIR}/processed/${TAR_FNAME}
# curl ${URL} -O ${TARGET_DIR}/processed/${TAR_FNAME}
# tar -xzvf ${TARGET_DIR}/processed/${TAR_FNAME} --strip 1 -C ${TARGET_DIR}/processed
# convert the videos into sequence of downscaled images
echo "Processing '$1' dataset"
#TODO Bing, just use walking for test
#for ACTION in walking jogging running boxing handwaving handclapping; do
#Todo Bing: remove the comments below after testing
for ACTION in walking running; do
for VIDEO_FNAME in ${TARGET_DIR}/raw/${ACTION}/*.avi; do
FNAME=$(basename ${VIDEO_FNAME})
FNAME=${FNAME%_uncomp.avi}
echo "FNAME '$FNAME' "
# sometimes the directory is not created, so try until it is
while [ ! -d "${TARGET_DIR}/processed/${ACTION}/${FNAME}" ]; do
mkdir -p ${TARGET_DIR}/processed/${ACTION}/${FNAME}
done
ffmpeg -i ${VIDEO_FNAME} -r ${FRAME_RATE} -f image2 -s ${IMAGE_SIZE}x${IMAGE_SIZE} \
${TARGET_DIR}/processed/${ACTION}/${FNAME}/image-%03d_${IMAGE_SIZE}x${IMAGE_SIZE}.png
done
done
python video_prediction/datasets/kth_dataset.py ${TARGET_DIR}/processed ${TARGET_DIR} ${IMAGE_SIZE}
rm -rf ${TARGET_DIR}/raw
rm -rf ${TARGET_DIR}/processed
else
echo "Invalid dataset name: '$1' (choose from 'bair', 'kth')" >&2
exit 1
fi
echo "Succesfully finished downloadi\
ng and preprocessing dataset '$1'"
#!/usr/bin/env bash
# exit if any command fails
set -e
#if [ "$#" -eq 2 ]; then
# if [ $1 = "bair" ]; then
# echo "IMAGE_SIZE argument is only applicable to kth dataset" >&2
# exit 1
# fi
#elif [ "$#" -ne 1 ]; then
# echo "Usage: $0 DATASET_NAME [IMAGE_SIZE]" >&2
# exit 1
#fi
#if [ $1 = "bair" ]; then
# TARGET_DIR=./data/bair
# mkdir -p ${TARGET_DIR}
# TAR_FNAME=bair_robot_pushing_dataset_v0.tar
# URL=http://rail.eecs.berkeley.edu/datasets/${TAR_FNAME}
# echo "Downloading '$1' dataset (this takes a while)"
# #wget ${URL} -O ${TARGET_DIR}/${TAR_FNAME} Bing: on MacOS system , use curl instead of wget
# curl ${URL} -O ${TARGET_DIR}/${TAR_FNAME}
# tar -xvf ${TARGET_DIR}/${TAR_FNAME} --strip-components=1 -C ${TARGET_DIR}
# rm ${TARGET_DIR}/${TAR_FNAME}
# mkdir -p ${TARGET_DIR}/val
# # reserve a fraction of the training set for validation
# mv ${TARGET_DIR}/train/traj_256_to_511.tfrecords ${TARGET_DIR}/val/
#elif [ $1 = "kth" ]; then
# if [ "$#" -eq 2 ]; then
# IMAGE_SIZE=$2
# TARGET_DIR=./data/kth_${IMAGE_SIZE}
# else
# IMAGE_SIZE=64
# fi
# echo ${TARGET_DIR} ${IMAGE_SIZE}
# mkdir -p ${TARGET_DIR}
# mkdir -p ${TARGET_DIR}/raw
# echo "Downloading '$1' dataset (this takes a while)"
# TODO Bing: for save time just use walking, need to change back if all the data are needed
#for ACTION in walking jogging running boxing handwaving handclapping; do
# for ACTION in walking; do
# echo "Action: '$ACTION' "
# ZIP_FNAME=${ACTION}.zip
# URL=http://www.nada.kth.se/cvap/actions/${ZIP_FNAME}
# # wget ${URL} -O ${TARGET_DIR}/raw/${ZIP_FNAME}
# echo "Start downloading action '$ACTION' ULR '$URL' "
# curl ${URL} -O ${TARGET_DIR}/raw/${ZIP_FNAME}
# unzip ${TARGET_DIR}/raw/${ZIP_FNAME} -d ${TARGET_DIR}/raw/${ACTION}
# echo "Action '$ACTION' data download and unzip "
# done
# FRAME_RATE=25
# mkdir -p ${TARGET_DIR}/processed
# # download files with metadata specifying the subsequences
# TAR_FNAME=kth_meta.tar.gz
# URL=http://rail.eecs.berkeley.edu/models/savp/data/${TAR_FNAME}
# echo "Downloading '${TAR_FNAME}' ULR '$URL' "
# #wget ${URL} -O ${TARGET_DIR}/processed/${TAR_FNAME}
# curl ${URL} -O ${TARGET_DIR}/processed/${TAR_FNAME}
# tar -xzvf ${TARGET_DIR}/processed/${TAR_FNAME} --strip 1 -C ${TARGET_DIR}/processed
# convert the videos into sequence of downscaled images
# echo "Processing '$1' dataset"
# #TODO Bing, just use walking for test
# #for ACTION in walking jogging running boxing handwaving handclapping; do
# #Todo Bing: remove the comments below after testing
# for ACTION in walking; do
# for VIDEO_FNAME in ${TARGET_DIR}/raw/${ACTION}/*.avi; do
# FNAME=$(basename ${VIDEO_FNAME})
# FNAME=${FNAME%_uncomp.avi}
# echo "FNAME '$FNAME' "
# # sometimes the directory is not created, so try until it is
# while [ ! -d "${TARGET_DIR}/processed/${ACTION}/${FNAME}" ]; do
# mkdir -p ${TARGET_DIR}/processed/${ACTION}/${FNAME}
# done
# ffmpeg -i ${VIDEO_FNAME} -r ${FRAME_RATE} -f image2 -s ${IMAGE_SIZE}x${IMAGE_SIZE} \
# ${TARGET_DIR}/processed/${ACTION}/${FNAME}/image-%03d_${IMAGE_SIZE}x${IMAGE_SIZE}.png
# done
# done
# python video_prediction/datasets/kth_dataset.py ${TARGET_DIR}/processed ${TARGET_DIR} ${IMAGE_SIZE}
# rm -rf ${TARGET_DIR}/raw
# rm -rf ${TARGET_DIR}/processed
while [[ $# -gt 0 ]] #of the number of passed argument is greater than 0
do
key="$1"
case $key in
-d|--data)
DATA="$2"
shift
shift
;;
-i|--input_dir)
INPUT_DIR="$2"
shift
shift
;;
-o|--output_dir)
OUTPUT_DIR="$2"
shift
shift
;;
esac
done
echo "DATA = ${DATA} "
echo "OUTPUT_DIRECTORY = ${OUTPUT_DIR}"
if [ -d $INPUT_DIR ]; then
echo "INPUT DIRECTORY = ${INPUT_DIR}"
else
echo "INPUT DIRECTORY '$INPUT_DIR' DOES NOT EXIST"
exit 1
fi
if [ $DATA = "era5" ]; then
mkdir -p ${OUTPUT_DIR}
python video_prediction/datasets/era5_dataset.py $INPUT_DIR ${OUTPUT_DIR}
else
echo "dataset name: '$DATA' (choose from 'era5')" >&2
exit 1
fi
echo "Succesfully finished downloading and preprocessing dataset '$DATA' "
\ No newline at end of file
#!/usr/bin/env bash
# exit if any command fails
set -e
if [ "$#" -eq 2 ]; then
if [ $1 = "bair" ]; then
echo "IMAGE_SIZE argument is only applicable to kth dataset" >&2
exit 1
fi
elif [ "$#" -ne 1 ]; then
echo "Usage: $0 DATASET_NAME [IMAGE_SIZE]" >&2
exit 1
fi
if [ $1 = "bair" ]; then
TARGET_DIR=./data/bair
mkdir -p ${TARGET_DIR}
TAR_FNAME=bair_robot_pushing_dataset_v0.tar
URL=http://rail.eecs.berkeley.edu/datasets/${TAR_FNAME}
echo "Downloading '$1' dataset (this takes a while)"
#wget ${URL} -O ${TARGET_DIR}/${TAR_FNAME} Bing: on MacOS system , use curl instead of wget
curl ${URL} -O ${TARGET_DIR}/${TAR_FNAME}
tar -xvf ${TARGET_DIR}/${TAR_FNAME} --strip-components=1 -C ${TARGET_DIR}
rm ${TARGET_DIR}/${TAR_FNAME}
mkdir -p ${TARGET_DIR}/val
# reserve a fraction of the training set for validation
mv ${TARGET_DIR}/train/traj_256_to_511.tfrecords ${TARGET_DIR}/val/
elif [ $1 = "kth" ]; then
if [ "$#" -eq 2 ]; then
IMAGE_SIZE=$2
TARGET_DIR=./data/kth_${IMAGE_SIZE}
else
IMAGE_SIZE=64
TARGET_DIR=./data/kth
fi
echo ${TARGET_DIR} ${IMAGE_SIZE}
mkdir -p ${TARGET_DIR}
mkdir -p ${TARGET_DIR}/raw
echo "Downloading '$1' dataset (this takes a while)"
# TODO Bing: for save time just use walking, need to change back if all the data are needed
#for ACTION in walking jogging running boxing handwaving handclapping; do
# for ACTION in walking; do
# echo "Action: '$ACTION' "
# ZIP_FNAME=${ACTION}.zip
# URL=http://www.nada.kth.se/cvap/actions/${ZIP_FNAME}
# # wget ${URL} -O ${TARGET_DIR}/raw/${ZIP_FNAME}
# echo "Start downloading action '$ACTION' ULR '$URL' "
# curl ${URL} -O ${TARGET_DIR}/raw/${ZIP_FNAME}
# unzip ${TARGET_DIR}/raw/${ZIP_FNAME} -d ${TARGET_DIR}/raw/${ACTION}
# echo "Action '$ACTION' data download and unzip "
# done
FRAME_RATE=25
# mkdir -p ${TARGET_DIR}/processed
# # download files with metadata specifying the subsequences
# TAR_FNAME=kth_meta.tar.gz
# URL=http://rail.eecs.berkeley.edu/models/savp/data/${TAR_FNAME}
# echo "Downloading '${TAR_FNAME}' ULR '$URL' "
# #wget ${URL} -O ${TARGET_DIR}/processed/${TAR_FNAME}
# curl ${URL} -O ${TARGET_DIR}/processed/${TAR_FNAME}
# tar -xzvf ${TARGET_DIR}/processed/${TAR_FNAME} --strip 1 -C ${TARGET_DIR}/processed
# convert the videos into sequence of downscaled images
echo "Processing '$1' dataset"
#TODO Bing, just use walking for test
#for ACTION in walking jogging running boxing handwaving handclapping; do
#Todo Bing: remove the comments below after testing
for ACTION in walking; do
for VIDEO_FNAME in ${TARGET_DIR}/raw/${ACTION}/*.avi; do
FNAME=$(basename ${VIDEO_FNAME})
FNAME=${FNAME%_uncomp.avi}
echo "FNAME '$FNAME' "
# sometimes the directory is not created, so try until it is
while [ ! -d "${TARGET_DIR}/processed/${ACTION}/${FNAME}" ]; do
mkdir -p ${TARGET_DIR}/processed/${ACTION}/${FNAME}
done
ffmpeg -i ${VIDEO_FNAME} -r ${FRAME_RATE} -f image2 -s ${IMAGE_SIZE}x${IMAGE_SIZE} \
${TARGET_DIR}/processed/${ACTION}/${FNAME}/image-%03d_${IMAGE_SIZE}x${IMAGE_SIZE}.png
done
done
python video_prediction/datasets/kth_dataset.py ${TARGET_DIR}/processed ${TARGET_DIR} ${IMAGE_SIZE}
rm -rf ${TARGET_DIR}/raw
rm -rf ${TARGET_DIR}/processed
else
echo "Invalid dataset name: '$1' (choose from 'bair', 'kth')" >&2
exit 1
fi
echo "Succesfully finished downloading and preprocessing dataset '$1'"
#!/usr/bin/env bash
set -e
#
#MODEL=savp
##train_mode: end_to_end, pre_trained
#TRAIN_MODE=end_to_end
#EXP_NAME=era5_size_64_64_3_3t_norm
MODEL=$1
TRAIN_MODE=$2
EXP_NAME=$3
RETRAIN=1 #if we continue training the model or using the existing end-to-end model, 1 means continue training, and 1 means use the existing one
DATA_ETL_DIR=/p/scratch/deepacf/${USER}/
DATA_EXTRA_DIR=${DATA_ETL_DIR}/extractedData/${EXP_NAME}
DATA_PREPROCESS_DIR=${DATA_ETL_DIR}/preprocessedData/${EXP_NAME}
DATA_PREPROCESS_TF_DIR=./data/${EXP_NAME}
RESULTS_OUTPUT_DIR=./results_test_samples/${EXP_NAME}/${TRAIN_MODE}/
if [ $MODEL==savp ]; then
method_dir=ours_savp
elif [ $MODEL==gan ]; then
method_dir=ours_gan
elif [ $MODEL==vae ]; then
method_dir=ours_vae
else
echo "model does not exist" 2>&1
exit 1
fi
if [ "$TRAIN_MODE" == pre_trained ]; then
TRAIN_OUTPUT_DIR=./pretrained_models/kth/${method_dir}
else
TRAIN_OUTPUT_DIR=./logs/${EXP_NAME}/${TRAIN_MODE}
fi
CHECKPOINT_DIR=${TRAIN_OUTPUT_DIR}/${method_dir}
echo "===========================WORKFLOW SETUP===================="
echo "Model ${MODEL}"
echo "TRAIN MODE ${TRAIN_MODE}"
echo "Method_dir ${method_dir}"
echo "DATA_ETL_DIR ${DATA_ETL_DIR}"
echo "DATA_EXTRA_DIR ${DATA_EXTRA_DIR}"
echo "DATA_PREPROCESS_DIR ${DATA_PREPROCESS_DIR}"
echo "DATA_PREPROCESS_TF_DIR ${DATA_PREPROCESS_TF_DIR}"
echo "TRAIN_OUTPUT_DIR ${TRAIN_OUTPUT_DIR}"
echo "============================================================="
##############Datat Preprocessing################
#To hkl data
if [ -d "$DATA_PREPROCESS_DIR" ]; then
echo "The Preprocessed Data (.hkl ) exist"
else
python ../workflow_video_prediction/DataPreprocess/benchmark/mpi_stager_v2_process_netCDF.py \
--input_dir ${DATA_EXTRA_DIR} --destination_dir ${DATA_PREPROCESS_DIR}
fi
#Change the .hkl data to .tfrecords files
if [ -d "$DATA_PREPROCESS_TF_DIR" ]
then
echo "Step2: The Preprocessed Data (tf.records) exist"
else
echo "Step2: start, hkl. files to tf.records"
python ./video_prediction/datasets/era5_dataset_v2.py --source_dir ${DATA_PREPROCESS_DIR}/splits \
--destination_dir ${DATA_PREPROCESS_TF_DIR}
echo "Step2: finish"
fi
#########Train##########################
if [ "$TRAIN_MODE" == "pre_trained" ]; then
echo "step3: Using kth pre_trained model"
elif [ "$TRAIN_MODE" == "end_to_end" ]; then
echo "step3: End-to-end training"
if [ "$RETRAIN" == 1 ]; then
echo "Using the existing end-to-end model"
else
echo "Step3: Training Starts "
python ./scripts/train_v2.py --input_dir $DATA_PREPROCESS_TF_DIR --dataset era5 \
--model ${MODEL} --model_hparams_dict hparams/kth/${method_dir}/model_hparams.json \
--output_dir ${TRAIN_OUTPUT_DIR} --checkpoint ${CHECKPOINT_DIR_DIR}
echo "Training ends "
fi
else
echo "TRAIN_MODE is end_to_end or pre_trained"
exit 1
fi
#########Generate results#################
echo "Step4: Postprocessing start"
python ./scripts/generate_transfer_learning_finetune.py --input_dir ${DATA_PREPROCESS_TF_DIR} \
--dataset_hparams sequence_length=20 --checkpoint ${CHECKPOINT_DIR_DIR} --mode test --results_dir ${RESULTS_OUTPUT_DIR} \
--batch_size 4 --dataset era5
\ No newline at end of file
#!/usr/bin/env bash
set -e
#
#MODEL=savp
##train_mode: end_to_end, pre_trained
#TRAIN_MODE=end_to_end
#EXP_NAME=era5_size_64_64_3_3t_norm
MODEL=$1
TRAIN_MODE=$2
EXP_NAME=$3
RETRAIN=1 #if we continue training the model or using the existing end-to-end model, 1 means continue training, and 1 means use the existing one
DATA_ETL_DIR=/p/scratch/deepacf/${USER}/
DATA_ETL_DIR=/p/scratch/deepacf/${USER}/
DATA_EXTRA_DIR=${DATA_ETL_DIR}/extractedData/${EXP_NAME}
DATA_PREPROCESS_DIR=${DATA_ETL_DIR}/preprocessedData/${EXP_NAME}
DATA_PREPROCESS_TF_DIR=./data/${EXP_NAME}
RESULTS_OUTPUT_DIR=./results_test_samples/${EXP_NAME}/${TRAIN_MODE}/
if [ $MODEL==savp ]; then
method_dir=ours_savp
elif [ $MODEL==gan ]; then
method_dir=ours_gan
elif [ $MODEL==vae ]; then
method_dir=ours_vae
else
echo "model does not exist" 2>&1
exit 1
fi
if [ "$TRAIN_MODE" == pre_trained ]; then
TRAIN_OUTPUT_DIR=./pretrained_models/kth/${method_dir}
else
TRAIN_OUTPUT_DIR=./logs/${EXP_NAME}/${TRAIN_MODE}
fi
CHECKPOINT_DIR=${TRAIN_OUTPUT_DIR}/${method_dir}
echo "===========================WORKFLOW SETUP===================="
echo "Model ${MODEL}"
echo "TRAIN MODE ${TRAIN_MODE}"
echo "Method_dir ${method_dir}"
echo "DATA_ETL_DIR ${DATA_ETL_DIR}"
echo "DATA_EXTRA_DIR ${DATA_EXTRA_DIR}"
echo "DATA_PREPROCESS_DIR ${DATA_PREPROCESS_DIR}"
echo "DATA_PREPROCESS_TF_DIR ${DATA_PREPROCESS_TF_DIR}"
echo "TRAIN_OUTPUT_DIR ${TRAIN_OUTPUT_DIR}"
echo "============================================================="
##############Datat Preprocessing################
#To hkl data
#if [ -d "$DATA_PREPROCESS_DIR" ]; then
# echo "The Preprocessed Data (.hkl ) exist"
#else
# python ../workflow_video_prediction/DataPreprocess/benchmark/mpi_stager_v2_process_netCDF.py \
# --input_dir ${DATA_EXTRA_DIR} --destination_dir ${DATA_PREPROCESS_DIR}
#fi
####Change the .hkl data to .tfrecords files
if [ -d "$DATA_PREPROCESS_TF_DIR" ]
then
echo "Step2: The Preprocessed Data (tf.records) exist"
else
echo "Step2: start, hkl. files to tf.records"
python ./video_prediction/datasets/era5_dataset_v2.py --source_dir ${DATA_PREPROCESS_DIR}/splits \
--destination_dir ${DATA_PREPROCESS_TF_DIR}
echo "Step2: finish"
fi
#########Train##########################
if [ "$TRAIN_MODE" == "pre_trained" ]; then
echo "step3: Using kth pre_trained model"
elif [ "$TRAIN_MODE" == "end_to_end" ]; then
echo "step3: End-to-end training"
if [ "$RETRAIN" == 1 ]; then
echo "Using the existing end-to-end model"
else
echo "Training Starts "
python ./scripts/train_v2.py --input_dir $DATA_PREPROCESS_TF_DIR --dataset era5 \
--model ${MODEL} --model_hparams_dict hparams/kth/${method_dir}/model_hparams.json \
--output_dir ${TRAIN_OUTPUT_DIR} --checkpoint ${CHECKPOINT_DIR}
echo "Training ends "
fi
else
echo "TRAIN_MODE is end_to_end or pre_trained"
exit 1
fi
#########Generate results#################
echo "Step4: Postprocessing start"
python ./scripts/generate_transfer_learning_finetune.py --input_dir ${DATA_PREPROCESS_TF_DIR} \
--dataset_hparams sequence_length=20 --checkpoint ${CHECKPOINT_DIR} --mode test --results_dir ${RESULTS_OUTPUT_DIR} \
--batch_size 4 --dataset era5
#!/usr/bin/env bash
set -e
#
#MODEL=savp
##train_mode: end_to_end, pre_trained
#TRAIN_MODE=end_to_end
#EXP_NAME=era5_size_64_64_3_3t_norm
MODEL=$1
TRAIN_MODE=$2
EXP_NAME=$3
RETRAIN=1 #if we continue training the model or using the existing end-to-end model, 1 means continue training, and 1 means use the existing one
DATA_ETL_DIR=/home/${USER}/
DATA_ETL_DIR=/p/scratch/deepacf/${USER}/
DATA_EXTRA_DIR=${DATA_ETL_DIR}/extractedData/${EXP_NAME}
DATA_PREPROCESS_DIR=${DATA_ETL_DIR}/preprocessedData/${EXP_NAME}
DATA_PREPROCESS_TF_DIR=./data/${EXP_NAME}
RESULTS_OUTPUT_DIR=./results_test_samples/${EXP_NAME}/${TRAIN_MODE}/
if [ $MODEL==savp ]; then
method_dir=ours_savp
elif [ $MODEL==gan ]; then
method_dir=ours_gan
elif [ $MODEL==vae ]; then
method_dir=ours_vae
else
echo "model does not exist" 2>&1
exit 1
fi
if [ "$TRAIN_MODE" == pre_trained ]; then
TRAIN_OUTPUT_DIR=./pretrained_models/kth/${method_dir}
else
TRAIN_OUTPUT_DIR=./logs/${EXP_NAME}/${TRAIN_MODE}
fi
CHECKPOINT_DIR=${TRAIN_OUTPUT_DIR}/${method_dir}
echo "===========================WORKFLOW SETUP===================="
echo "Model ${MODEL}"
echo "TRAIN MODE ${TRAIN_MODE}"
echo "Method_dir ${method_dir}"
echo "DATA_ETL_DIR ${DATA_ETL_DIR}"
echo "DATA_EXTRA_DIR ${DATA_EXTRA_DIR}"
echo "DATA_PREPROCESS_DIR ${DATA_PREPROCESS_DIR}"
echo "DATA_PREPROCESS_TF_DIR ${DATA_PREPROCESS_TF_DIR}"
echo "TRAIN_OUTPUT_DIR ${TRAIN_OUTPUT_DIR}"
echo "============================================================="
##############Datat Preprocessing################
#To hkl data
#if [ -d "$DATA_PREPROCESS_DIR" ]; then
# echo "The Preprocessed Data (.hkl ) exist"
#else
# python ../workflow_video_prediction/DataPreprocess/benchmark/mpi_stager_v2_process_netCDF.py \
# --input_dir ${DATA_EXTRA_DIR} --destination_dir ${DATA_PREPROCESS_DIR}
#fi
####Change the .hkl data to .tfrecords files
if [ -d "$DATA_PREPROCESS_TF_DIR" ]
then
echo "Step2: The Preprocessed Data (tf.records) exist"
else
echo "Step2: start, hkl. files to tf.records"
python ./video_prediction/datasets/era5_dataset_v2.py --source_dir ${DATA_PREPROCESS_DIR}/splits \
--destination_dir ${DATA_PREPROCESS_TF_DIR}
echo "Step2: finish"
fi
#########Train##########################
if [ "$TRAIN_MODE" == "pre_trained" ]; then
echo "step3: Using kth pre_trained model"
elif [ "$TRAIN_MODE" == "end_to_end" ]; then
echo "step3: End-to-end training"
if [ "$RETRAIN" == 1 ]; then
echo "Using the existing end-to-end model"
else
echo "Training Starts "
python ./scripts/train_v2.py --input_dir $DATA_PREPROCESS_TF_DIR --dataset era5 \
--model ${MODEL} --model_hparams_dict hparams/kth/${method_dir}/model_hparams.json \
--output_dir ${TRAIN_OUTPUT_DIR} --checkpoint ${CHECKPOINT_DIR}
echo "Training ends "
fi
else
echo "TRAIN_MODE is end_to_end or pre_trained"
exit 1
fi
#########Generate results#################
echo "Step4: Postprocessing start"
python ./scripts/generate_transfer_learning_finetune.py --input_dir ${DATA_PREPROCESS_TF_DIR} \
--dataset_hparams sequence_length=20 --checkpoint ${CHECKPOINT_DIR} --mode test --results_dir ${RESULTS_OUTPUT_DIR} \
--batch_size 4 --dataset era5
This is the presentation materials for VP group
## 2020-03-01 - 2020-03-31
https://docs.google.com/presentation/d/18EJKBJJ2LHI7uNU_l8s_Cm-aGZhw9tkoQ8BxqYZfkWk/edit#slide=id.g71f805bc32_0_80
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment