Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
AMBS
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
esde
machine-learning
AMBS
Commits
334a8911
Commit
334a8911
authored
4 years ago
by
stadtler1
Browse files
Options
Downloads
Patches
Plain Diff
Added a more detailed timing and pkl files that save the timing information.
parent
c3beec2c
No related branches found
No related tags found
No related merge requests found
Pipeline
#45950
failed
4 years ago
Stage: build
Stage: test
Stage: deploy
Changes
1
Pipelines
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
video_prediction_savp/scripts/train_dummy.py
+32
-3
32 additions, 3 deletions
video_prediction_savp/scripts/train_dummy.py
with
32 additions
and
3 deletions
video_prediction_savp/scripts/train_dummy.py
+
32
−
3
View file @
334a8911
...
@@ -193,8 +193,20 @@ def save_results_to_pkl(train_losses,val_losses, output_dir):
...
@@ -193,8 +193,20 @@ def save_results_to_pkl(train_losses,val_losses, output_dir):
with
open
(
os
.
path
.
join
(
output_dir
,
"
val_losses.pkl
"
),
"
wb
"
)
as
f
:
with
open
(
os
.
path
.
join
(
output_dir
,
"
val_losses.pkl
"
),
"
wb
"
)
as
f
:
pkl
.
dump
(
val_losses
,
f
)
pkl
.
dump
(
val_losses
,
f
)
# +++ Scarlet 20200917
def
save_timing_to_pkl
(
total_time
,
training_time
,
time_per_iteration
,
output_dir
):
with
open
(
os
.
path
.
join
(
output_dir
,
"
timing_total_time.pkl
"
),
"
wb
"
)
as
f
:
pkl
.
dump
(
total_time
,
f
)
with
open
(
os
.
path
.
join
(
output_dir
,
"
timing_training_time.pkl
"
),
"
wb
"
)
as
f
:
pkl
.
dump
(
training_time
,
f
)
with
open
(
os
.
path
.
join
(
output_dir
,
"
timing_per_iteration_time.pkl
"
),
"
wb
"
)
as
f
:
pkl
.
dump
(
time_per_iteration
,
f
)
# --- Scarlet 20200917
def
main
():
def
main
():
# +++ Scarlet 20200917
timeit_start_total_time
=
time
.
time
()
# --- Scarlet 20200917
parser
=
argparse
.
ArgumentParser
()
parser
=
argparse
.
ArgumentParser
()
parser
.
add_argument
(
"
--input_dir
"
,
type
=
str
,
required
=
True
,
help
=
"
either a directory containing subdirectories
"
parser
.
add_argument
(
"
--input_dir
"
,
type
=
str
,
required
=
True
,
help
=
"
either a directory containing subdirectories
"
...
@@ -273,7 +285,15 @@ def main():
...
@@ -273,7 +285,15 @@ def main():
print
(
"
number of exmaples per epoch:
"
,
num_examples_per_epoch
)
print
(
"
number of exmaples per epoch:
"
,
num_examples_per_epoch
)
steps_per_epoch
=
int
(
num_examples_per_epoch
/
batch_size
)
steps_per_epoch
=
int
(
num_examples_per_epoch
/
batch_size
)
#number of steps totally equal to the number of steps per each echo multiple by number of epochs
#number of steps totally equal to the number of steps per each echo multiple by number of epochs
total_steps
=
steps_per_epoch
*
max_epochs
# Please comment in again this line:
#total_steps = steps_per_epoch * max_epochs
#+++++ Scarlet Booster testing ONLY!
total_steps
=
1
#----- Scarlet
global_step
=
tf
.
train
.
get_or_create_global_step
()
global_step
=
tf
.
train
.
get_or_create_global_step
()
#mock total_steps only for fast debugging
#mock total_steps only for fast debugging
#total_steps = 10
#total_steps = 10
...
@@ -292,6 +312,9 @@ def main():
...
@@ -292,6 +312,9 @@ def main():
# step is relative to the start_step
# step is relative to the start_step
train_losses
=
[]
train_losses
=
[]
val_losses
=
[]
val_losses
=
[]
# +++ Scarlet 20200917
time_per_iteration
=
[]
# --- Scarlet 20200917
run_start_time
=
time
.
time
()
run_start_time
=
time
.
time
()
for
step
in
range
(
start_step
,
total_steps
):
for
step
in
range
(
start_step
,
total_steps
):
#global_step = sess.run(global_step)
#global_step = sess.run(global_step)
...
@@ -367,6 +390,7 @@ def main():
...
@@ -367,6 +390,7 @@ def main():
timeit_end
=
time
.
time
()
timeit_end
=
time
.
time
()
# --- Scarlet 20200813
# --- Scarlet 20200813
print
(
"
time needed for this step
"
,
timeit_end
-
timeit_start
,
'
s
'
)
print
(
"
time needed for this step
"
,
timeit_end
-
timeit_start
,
'
s
'
)
time_per_iteration
.
append
(
timeit_end
-
timeit_start
)
if
step
%
20
==
0
:
if
step
%
20
==
0
:
# I save the pickle file and plot here inside the loop in case the training process cannot finished after job is done.
# I save the pickle file and plot here inside the loop in case the training process cannot finished after job is done.
save_results_to_pkl
(
train_losses
,
val_losses
,
args
.
output_dir
)
save_results_to_pkl
(
train_losses
,
val_losses
,
args
.
output_dir
)
...
@@ -385,6 +409,11 @@ def main():
...
@@ -385,6 +409,11 @@ def main():
# +++ Scarlet 20200814
# +++ Scarlet 20200814
print
(
"
Total training time:
"
,
train_time
/
60.
,
"
min
"
)
print
(
"
Total training time:
"
,
train_time
/
60.
,
"
min
"
)
# +++ Scarlet 20200814
# +++ Scarlet 20200814
# +++ Scarlet 20200917
total_run_time
=
time
.
time
()
-
timeit_start_total_time
print
(
"
Total run time:
"
,
total_run_time
/
60.
,
"
min
"
)
save_timing_to_pkl
(
total_run_time
,
train_time
,
time_per_iteration
,
args
.
output_dir
)
# +++ Scarlet 20200917
if
__name__
==
'
__main__
'
:
if
__name__
==
'
__main__
'
:
main
()
main
()
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment