From 4138319f0fc81bbede0021ecfd93b0224cadeefc Mon Sep 17 00:00:00 2001
From: Bing Gong <gongbing1112@gmail.com>
Date: Tue, 17 Mar 2020 15:14:46 +0100
Subject: [PATCH] fix bug for readme and bash

---
 README.md                   |  2 +-
 bash/workflow_era5.sh       | 16 +++++++++++-----
 bash/workflow_era5_macOS.sh | 19 +++++++++++++------
 3 files changed, 25 insertions(+), 12 deletions(-)

diff --git a/README.md b/README.md
index a99357c3..ecb63787 100644
--- a/README.md
+++ b/README.md
@@ -13,7 +13,7 @@ This project need to work with [Workflow_parallel_frame_prediction project](http
 - Clone this repo:
 ```bash
 git clone -b master https://gitlab.version.fz-juelich.de/gong1/video_prediction_savp.git
-git clone -b master https://gitlab.version.fz-juelich.de/gong1/workflow_parallel_frame_prediction
+git clone -b master https://gitlab.version.fz-juelich.de/gong1/workflow_parallel_frame_prediction.git
 cd video_prediction_savp
 ```
 - Install TensorFlow >= 1.9 and dependencies from http://tensorflow.org/
diff --git a/bash/workflow_era5.sh b/bash/workflow_era5.sh
index e343d83f..01d16bfd 100755
--- a/bash/workflow_era5.sh
+++ b/bash/workflow_era5.sh
@@ -9,6 +9,7 @@ set -e
 MODEL=$1
 TRAIN_MODE=$2
 EXP_NAME=$3
+RETRAIN=1 #if we continue training the model or using the existing end-to-end model, 1 means continue training, and 1 means use the existing one
 DATA_ETL_DIR=/p/scratch/deepacf/${USER}/
 DATA_EXTRA_DIR=${DATA_ETL_DIR}/extractedData/${EXP_NAME}
 DATA_PREPROCESS_DIR=${DATA_ETL_DIR}/preprocessedData/${EXP_NAME}
@@ -69,11 +70,16 @@ fi
 if [ "$TRAIN_MODE" == "pre_trained" ]; then
     echo "step3: Using kth pre_trained model"
 elif [ "$TRAIN_MODE" == "end_to_end" ]; then
-    echo "Step3: Training Starts "
-    python ./scripts/train_v2.py --input_dir $DATA_PREPROCESS_TF_DIR --dataset era5  \
-    --model ${MODEL} --model_hparams_dict hparams/kth/${method_dir}/model_hparams.json \
-    --output_dir ${TRAIN_OUTPUT_DIR}
-    echo "Training ends "
+    echo "step3: End-to-end training"
+    if [ "$RETRAIN" == 1 ]; then
+        echo "Using the existing end-to-end model"
+    else
+        echo "Step3: Training Starts "
+        python ./scripts/train_v2.py --input_dir $DATA_PREPROCESS_TF_DIR --dataset era5  \
+        --model ${MODEL} --model_hparams_dict hparams/kth/${method_dir}/model_hparams.json \
+        --output_dir ${TRAIN_OUTPUT_DIR} --checkpoint ${CHECKPOINT_DIR_DIR}
+        echo "Training ends "
+    fi
 else
     echo "TRAIN_MODE is end_to_end or pre_trained"
     exit 1
diff --git a/bash/workflow_era5_macOS.sh b/bash/workflow_era5_macOS.sh
index baa9f953..78b5101d 100755
--- a/bash/workflow_era5_macOS.sh
+++ b/bash/workflow_era5_macOS.sh
@@ -9,6 +9,8 @@ set -e
 MODEL=$1
 TRAIN_MODE=$2
 EXP_NAME=$3
+RETRAIN=1 #if we continue training the model or using the existing end-to-end model, 1 means continue training, and 1 means use the existing one
+DATA_ETL_DIR=/p/scratch/deepacf/${USER}/
 DATA_ETL_DIR=/p/scratch/deepacf/${USER}/
 DATA_EXTRA_DIR=${DATA_ETL_DIR}/extractedData/${EXP_NAME}
 DATA_PREPROCESS_DIR=${DATA_ETL_DIR}/preprocessedData/${EXP_NAME}
@@ -69,11 +71,16 @@ fi
 if [ "$TRAIN_MODE" == "pre_trained" ]; then
     echo "step3: Using kth pre_trained model"
 elif [ "$TRAIN_MODE" == "end_to_end" ]; then
-    echo "Step3: Training Starts "
-    python ./scripts/train_v2.py --input_dir $DATA_PREPROCESS_TF_DIR --dataset era5  \
-    --model ${MODEL} --model_hparams_dict hparams/kth/${method_dir}/model_hparams.json \
-    --output_dir ${TRAIN_OUTPUT_DIR}
-    echo "Training ends "
+    echo "step3: End-to-end training"
+    if [ "$RETRAIN" == 1 ]; then
+        echo "Using the existing end-to-end model"
+    else
+        echo "Training Starts "
+        python ./scripts/train_v2.py --input_dir $DATA_PREPROCESS_TF_DIR --dataset era5  \
+        --model ${MODEL} --model_hparams_dict hparams/kth/${method_dir}/model_hparams.json \
+        --output_dir ${TRAIN_OUTPUT_DIR} --checkpoint ${CHECKPOINT_DIR}
+        echo "Training ends "
+    fi
 else
     echo "TRAIN_MODE is end_to_end or pre_trained"
     exit 1
@@ -82,5 +89,5 @@ fi
 #########Generate results#################
 echo "Step4: Postprocessing start"
 python ./scripts/generate_transfer_learning_finetune.py --input_dir ${DATA_PREPROCESS_TF_DIR} \
---dataset_hparams sequence_length=20 --checkpoint ${CHECKPOINT_DIR_DIR} --mode test --results_dir ${RESULTS_OUTPUT_DIR} \
+--dataset_hparams sequence_length=20 --checkpoint ${CHECKPOINT_DIR} --mode test --results_dir ${RESULTS_OUTPUT_DIR} \
 --batch_size 4 --dataset era5
\ No newline at end of file
-- 
GitLab