Merge pull request #82 from iclementine/develop
fix: the condition to init DataParallel
This commit is contained in:
commit
ede6835bd2
|
@ -46,7 +46,7 @@ class Experiment(ExperimentBase):
|
||||||
n_mels=config.data.n_mels,
|
n_mels=config.data.n_mels,
|
||||||
kernel_size=config.model.kernel_size)
|
kernel_size=config.model.kernel_size)
|
||||||
|
|
||||||
if self.parallel > 1:
|
if self.parallel:
|
||||||
model = paddle.DataParallel(model)
|
model = paddle.DataParallel(model)
|
||||||
optimizer = paddle.optimizer.Adam(
|
optimizer = paddle.optimizer.Adam(
|
||||||
config.training.lr, parameters=model.parameters())
|
config.training.lr, parameters=model.parameters())
|
||||||
|
|
|
@ -49,7 +49,7 @@ class Experiment(ExperimentBase):
|
||||||
loss_type=config.model.loss_type,
|
loss_type=config.model.loss_type,
|
||||||
log_scale_min=config.model.log_scale_min)
|
log_scale_min=config.model.log_scale_min)
|
||||||
|
|
||||||
if self.parallel > 1:
|
if self.parallel:
|
||||||
model = paddle.DataParallel(model)
|
model = paddle.DataParallel(model)
|
||||||
|
|
||||||
lr_scheduler = paddle.optimizer.lr.StepDecay(
|
lr_scheduler = paddle.optimizer.lr.StepDecay(
|
||||||
|
|
Loading…
Reference in New Issue