Skip to content
Snippets Groups Projects
Commit 4138319f authored by Bing Gong's avatar Bing Gong
Browse files

fix bug for readme and bash

parent 450d90a4
Branches
Tags
No related merge requests found
...@@ -13,7 +13,7 @@ This project need to work with [Workflow_parallel_frame_prediction project](http ...@@ -13,7 +13,7 @@ This project need to work with [Workflow_parallel_frame_prediction project](http
- Clone this repo: - Clone this repo:
```bash ```bash
git clone -b master https://gitlab.version.fz-juelich.de/gong1/video_prediction_savp.git git clone -b master https://gitlab.version.fz-juelich.de/gong1/video_prediction_savp.git
git clone -b master https://gitlab.version.fz-juelich.de/gong1/workflow_parallel_frame_prediction git clone -b master https://gitlab.version.fz-juelich.de/gong1/workflow_parallel_frame_prediction.git
cd video_prediction_savp cd video_prediction_savp
``` ```
- Install TensorFlow >= 1.9 and dependencies from http://tensorflow.org/ - Install TensorFlow >= 1.9 and dependencies from http://tensorflow.org/
......
...@@ -9,6 +9,7 @@ set -e ...@@ -9,6 +9,7 @@ set -e
MODEL=$1 MODEL=$1
TRAIN_MODE=$2 TRAIN_MODE=$2
EXP_NAME=$3 EXP_NAME=$3
RETRAIN=1 #if we continue training the model or using the existing end-to-end model, 1 means continue training, and 1 means use the existing one
DATA_ETL_DIR=/p/scratch/deepacf/${USER}/ DATA_ETL_DIR=/p/scratch/deepacf/${USER}/
DATA_EXTRA_DIR=${DATA_ETL_DIR}/extractedData/${EXP_NAME} DATA_EXTRA_DIR=${DATA_ETL_DIR}/extractedData/${EXP_NAME}
DATA_PREPROCESS_DIR=${DATA_ETL_DIR}/preprocessedData/${EXP_NAME} DATA_PREPROCESS_DIR=${DATA_ETL_DIR}/preprocessedData/${EXP_NAME}
...@@ -69,11 +70,16 @@ fi ...@@ -69,11 +70,16 @@ fi
if [ "$TRAIN_MODE" == "pre_trained" ]; then if [ "$TRAIN_MODE" == "pre_trained" ]; then
echo "step3: Using kth pre_trained model" echo "step3: Using kth pre_trained model"
elif [ "$TRAIN_MODE" == "end_to_end" ]; then elif [ "$TRAIN_MODE" == "end_to_end" ]; then
echo "step3: End-to-end training"
if [ "$RETRAIN" == 1 ]; then
echo "Using the existing end-to-end model"
else
echo "Step3: Training Starts " echo "Step3: Training Starts "
python ./scripts/train_v2.py --input_dir $DATA_PREPROCESS_TF_DIR --dataset era5 \ python ./scripts/train_v2.py --input_dir $DATA_PREPROCESS_TF_DIR --dataset era5 \
--model ${MODEL} --model_hparams_dict hparams/kth/${method_dir}/model_hparams.json \ --model ${MODEL} --model_hparams_dict hparams/kth/${method_dir}/model_hparams.json \
--output_dir ${TRAIN_OUTPUT_DIR} --output_dir ${TRAIN_OUTPUT_DIR} --checkpoint ${CHECKPOINT_DIR_DIR}
echo "Training ends " echo "Training ends "
fi
else else
echo "TRAIN_MODE is end_to_end or pre_trained" echo "TRAIN_MODE is end_to_end or pre_trained"
exit 1 exit 1
......
...@@ -9,6 +9,8 @@ set -e ...@@ -9,6 +9,8 @@ set -e
MODEL=$1 MODEL=$1
TRAIN_MODE=$2 TRAIN_MODE=$2
EXP_NAME=$3 EXP_NAME=$3
RETRAIN=1 #if we continue training the model or using the existing end-to-end model, 1 means continue training, and 1 means use the existing one
DATA_ETL_DIR=/p/scratch/deepacf/${USER}/
DATA_ETL_DIR=/p/scratch/deepacf/${USER}/ DATA_ETL_DIR=/p/scratch/deepacf/${USER}/
DATA_EXTRA_DIR=${DATA_ETL_DIR}/extractedData/${EXP_NAME} DATA_EXTRA_DIR=${DATA_ETL_DIR}/extractedData/${EXP_NAME}
DATA_PREPROCESS_DIR=${DATA_ETL_DIR}/preprocessedData/${EXP_NAME} DATA_PREPROCESS_DIR=${DATA_ETL_DIR}/preprocessedData/${EXP_NAME}
...@@ -69,11 +71,16 @@ fi ...@@ -69,11 +71,16 @@ fi
if [ "$TRAIN_MODE" == "pre_trained" ]; then if [ "$TRAIN_MODE" == "pre_trained" ]; then
echo "step3: Using kth pre_trained model" echo "step3: Using kth pre_trained model"
elif [ "$TRAIN_MODE" == "end_to_end" ]; then elif [ "$TRAIN_MODE" == "end_to_end" ]; then
echo "Step3: Training Starts " echo "step3: End-to-end training"
if [ "$RETRAIN" == 1 ]; then
echo "Using the existing end-to-end model"
else
echo "Training Starts "
python ./scripts/train_v2.py --input_dir $DATA_PREPROCESS_TF_DIR --dataset era5 \ python ./scripts/train_v2.py --input_dir $DATA_PREPROCESS_TF_DIR --dataset era5 \
--model ${MODEL} --model_hparams_dict hparams/kth/${method_dir}/model_hparams.json \ --model ${MODEL} --model_hparams_dict hparams/kth/${method_dir}/model_hparams.json \
--output_dir ${TRAIN_OUTPUT_DIR} --output_dir ${TRAIN_OUTPUT_DIR} --checkpoint ${CHECKPOINT_DIR}
echo "Training ends " echo "Training ends "
fi
else else
echo "TRAIN_MODE is end_to_end or pre_trained" echo "TRAIN_MODE is end_to_end or pre_trained"
exit 1 exit 1
...@@ -82,5 +89,5 @@ fi ...@@ -82,5 +89,5 @@ fi
#########Generate results################# #########Generate results#################
echo "Step4: Postprocessing start" echo "Step4: Postprocessing start"
python ./scripts/generate_transfer_learning_finetune.py --input_dir ${DATA_PREPROCESS_TF_DIR} \ python ./scripts/generate_transfer_learning_finetune.py --input_dir ${DATA_PREPROCESS_TF_DIR} \
--dataset_hparams sequence_length=20 --checkpoint ${CHECKPOINT_DIR_DIR} --mode test --results_dir ${RESULTS_OUTPUT_DIR} \ --dataset_hparams sequence_length=20 --checkpoint ${CHECKPOINT_DIR} --mode test --results_dir ${RESULTS_OUTPUT_DIR} \
--batch_size 4 --dataset era5 --batch_size 4 --dataset era5
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment