WIP: add some trainig info
This commit is contained in:
parent
b0983e4d76
commit
95f64c4f02
|
@ -86,7 +86,7 @@ lambda_adv: 4.0 # Loss balancing coefficient.
|
|||
batch_size: 6 # Batch size.
|
||||
batch_max_steps: 25500 # Length of each audio in batch. Make sure dividable by hop_size.
|
||||
pin_memory: true # Whether to pin memory in Pytorch DataLoader.
|
||||
num_workers: 0 # Number of workers in Pytorch DataLoader.
|
||||
num_workers: 4 # Number of workers in Pytorch DataLoader.
|
||||
remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps.
|
||||
allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory.
|
||||
|
||||
|
|
|
@ -76,6 +76,8 @@ class Trainer(object):
|
|||
else:
|
||||
max_iteration = self.stop_trigger.period
|
||||
|
||||
p = tqdm.tqdm()
|
||||
|
||||
while True:
|
||||
self.observation = {}
|
||||
# set observation as the report target
|
||||
|
@ -84,12 +86,13 @@ class Trainer(object):
|
|||
# updating parameters and state
|
||||
with scope(self.observation):
|
||||
update()
|
||||
print(self.observation)
|
||||
p.update()
|
||||
print(self.observation)
|
||||
|
||||
# execute extension when necessary
|
||||
for name, entry in extensions:
|
||||
if entry.trigger(self):
|
||||
entry.extension(self)
|
||||
# execute extension when necessary
|
||||
for name, entry in extensions:
|
||||
if entry.trigger(self):
|
||||
entry.extension(self)
|
||||
|
||||
if stop_trigger(self):
|
||||
print("Training Done!")
|
||||
|
|
Loading…
Reference in New Issue