Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
AMBS
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
esde
machine-learning
AMBS
Commits
d096cf34
Commit
d096cf34
authored
5 years ago
by
Bing Gong
Browse files
Options
Downloads
Patches
Plain Diff
add bash folder
parent
d28660d7
No related branches found
No related tags found
No related merge requests found
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
bash/download_and_preprocess_dataset_era5.sh
+125
-0
125 additions, 0 deletions
bash/download_and_preprocess_dataset_era5.sh
bash/workflow_era5.sh
+56
-0
56 additions, 0 deletions
bash/workflow_era5.sh
with
181 additions
and
0 deletions
bash/download_and_preprocess_dataset_era5.sh
0 → 100644
+
125
−
0
View file @
d096cf34
#!/usr/bin/env bash
# exit if any command fails
set
-e
#if [ "$#" -eq 2 ]; then
# if [ $1 = "bair" ]; then
# echo "IMAGE_SIZE argument is only applicable to kth dataset" >&2
# exit 1
# fi
#elif [ "$#" -ne 1 ]; then
# echo "Usage: $0 DATASET_NAME [IMAGE_SIZE]" >&2
# exit 1
#fi
#if [ $1 = "bair" ]; then
# TARGET_DIR=./data/bair
# mkdir -p ${TARGET_DIR}
# TAR_FNAME=bair_robot_pushing_dataset_v0.tar
# URL=http://rail.eecs.berkeley.edu/datasets/${TAR_FNAME}
# echo "Downloading '$1' dataset (this takes a while)"
# #wget ${URL} -O ${TARGET_DIR}/${TAR_FNAME} Bing: on MacOS system , use curl instead of wget
# curl ${URL} -O ${TARGET_DIR}/${TAR_FNAME}
# tar -xvf ${TARGET_DIR}/${TAR_FNAME} --strip-components=1 -C ${TARGET_DIR}
# rm ${TARGET_DIR}/${TAR_FNAME}
# mkdir -p ${TARGET_DIR}/val
# # reserve a fraction of the training set for validation
# mv ${TARGET_DIR}/train/traj_256_to_511.tfrecords ${TARGET_DIR}/val/
#elif [ $1 = "kth" ]; then
# if [ "$#" -eq 2 ]; then
# IMAGE_SIZE=$2
# TARGET_DIR=./data/kth_${IMAGE_SIZE}
# else
# IMAGE_SIZE=64
# fi
# echo ${TARGET_DIR} ${IMAGE_SIZE}
# mkdir -p ${TARGET_DIR}
# mkdir -p ${TARGET_DIR}/raw
# echo "Downloading '$1' dataset (this takes a while)"
# TODO Bing: for save time just use walking, need to change back if all the data are needed
#for ACTION in walking jogging running boxing handwaving handclapping; do
# for ACTION in walking; do
# echo "Action: '$ACTION' "
# ZIP_FNAME=${ACTION}.zip
# URL=http://www.nada.kth.se/cvap/actions/${ZIP_FNAME}
# # wget ${URL} -O ${TARGET_DIR}/raw/${ZIP_FNAME}
# echo "Start downloading action '$ACTION' ULR '$URL' "
# curl ${URL} -O ${TARGET_DIR}/raw/${ZIP_FNAME}
# unzip ${TARGET_DIR}/raw/${ZIP_FNAME} -d ${TARGET_DIR}/raw/${ACTION}
# echo "Action '$ACTION' data download and unzip "
# done
# FRAME_RATE=25
# mkdir -p ${TARGET_DIR}/processed
# # download files with metadata specifying the subsequences
# TAR_FNAME=kth_meta.tar.gz
# URL=http://rail.eecs.berkeley.edu/models/savp/data/${TAR_FNAME}
# echo "Downloading '${TAR_FNAME}' ULR '$URL' "
# #wget ${URL} -O ${TARGET_DIR}/processed/${TAR_FNAME}
# curl ${URL} -O ${TARGET_DIR}/processed/${TAR_FNAME}
# tar -xzvf ${TARGET_DIR}/processed/${TAR_FNAME} --strip 1 -C ${TARGET_DIR}/processed
# convert the videos into sequence of downscaled images
# echo "Processing '$1' dataset"
# #TODO Bing, just use walking for test
# #for ACTION in walking jogging running boxing handwaving handclapping; do
# #Todo Bing: remove the comments below after testing
# for ACTION in walking; do
# for VIDEO_FNAME in ${TARGET_DIR}/raw/${ACTION}/*.avi; do
# FNAME=$(basename ${VIDEO_FNAME})
# FNAME=${FNAME%_uncomp.avi}
# echo "FNAME '$FNAME' "
# # sometimes the directory is not created, so try until it is
# while [ ! -d "${TARGET_DIR}/processed/${ACTION}/${FNAME}" ]; do
# mkdir -p ${TARGET_DIR}/processed/${ACTION}/${FNAME}
# done
# ffmpeg -i ${VIDEO_FNAME} -r ${FRAME_RATE} -f image2 -s ${IMAGE_SIZE}x${IMAGE_SIZE} \
# ${TARGET_DIR}/processed/${ACTION}/${FNAME}/image-%03d_${IMAGE_SIZE}x${IMAGE_SIZE}.png
# done
# done
# python video_prediction/datasets/kth_dataset.py ${TARGET_DIR}/processed ${TARGET_DIR} ${IMAGE_SIZE}
# rm -rf ${TARGET_DIR}/raw
# rm -rf ${TARGET_DIR}/processed
while
[[
$#
-gt
0
]]
#of the number of passed argument is greater than 0
do
key
=
"
$1
"
case
$key
in
-d
|
--data
)
DATA
=
"
$2
"
shift
shift
;;
-i
|
--input_dir
)
INPUT_DIR
=
"
$2
"
shift
shift
;;
-o
|
--output_dir
)
OUTPUT_DIR
=
"
$2
"
shift
shift
;;
esac
done
echo
"DATA =
${
DATA
}
"
echo
"OUTPUT_DIRECTORY =
${
OUTPUT_DIR
}
"
if
[
-d
$INPUT_DIR
]
;
then
echo
"INPUT DIRECTORY =
${
INPUT_DIR
}
"
else
echo
"INPUT DIRECTORY '
$INPUT_DIR
' DOES NOT EXIST"
exit
1
fi
if
[
$DATA
=
"era5"
]
;
then
mkdir
-p
${
OUTPUT_DIR
}
python3 video_prediction/datasets/era5_dataset.py
${
INPUT_DIR
}
${
OUTPUT_DIR
}
else
echo
"dataset name: '
$DATA
' (choose from 'era5')"
>
&2
exit
1
fi
echo
"Succesfully finished downloading and preprocessing dataset '
$DATA
' "
\ No newline at end of file
This diff is collapsed.
Click to expand it.
bash/workflow_era5.sh
0 → 100755
+
56
−
0
View file @
d096cf34
#!/usr/bin/env bash
set
-e
model
=
"savp"
#train_mode: end_to_end, pre_trained, fine_tune
train_mode
=
"end_to_end"
is_pretrain
=
False
exp_name
=
"era5_size_64_64_3_3t_norm"
if
[
$model
==
"savp"
]
then
method_dir
=
"ours_savp"
elif
[
$model
==
"gan"
]
then
method_dir
=
"ours_gan"
elif
[
$model
==
"vae"
]
then
method_dir
=
"ours_vae"
else
echo
"model does not exist"
2>&1
exit
1
fi
raw_dataset_input
=
./splits/
${
exp_name
}
prep_data_input
=
./data/
${
exp_name
}
train_output
=
./logs/
${
exp_name
}
results_output
=
./results_test_samples/
${
exp_name
}
/
${
method_dir
}
##############Datat Preprocessing################
#For parallel on HPC
python ../workflow_video_prediction/DataPreprocess/benchmark/mpi_stager_v2_process_netCDF.py
#On local machine
#python ...
#Change the .hkl data to .tfrecords data
python ./video_prediction/datasets/era5_dataset_v2.py
${
raw_dataset_input
}
${
prep_data_input
}
#########Train##########################
python ./scripts/train_v2.py
--input_dir
${
prep_data_input
}
--dataset
era5
\
--model
savp
--model_hparams_dict
hparams/kth/
${
method_dir
}
/model_hparams.json
\
--output_dir
logs/era5_64_64_3_3t_norm/
${
train_mode
}
/
${
method_dir
}
\
#--checkpoint pretrained_models/kth/ours_savp
#########Generate results#################
python ./scripts/generate_transfer_learning_finetune.py
--input_dir
${
prep_data_input
}
\
--dataset_hparams
sequence_length
=
20
--checkpoint
${
train_output
}
\
--mode
test
--results_dir
${
results_output
}
\
--batch_size
4
--dataset
era5
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment