add det distill combined

This commit is contained in:
LDOUBLEV 2021-07-06 07:54:39 +00:00
parent e174e9eddf
commit 202a0b5b85
1 changed files with 7 additions and 5 deletions

View File

@ -44,15 +44,17 @@ class CombinedLoss(nn.Layer):
def forward(self, input, batch, **kargs):
loss_dict = {}
loss_all = 0.
for idx, loss_func in enumerate(self.loss_func):
loss = loss_func(input, batch, **kargs)
if isinstance(loss, paddle.Tensor):
loss = {"loss_{}_{}".format(str(loss), idx): loss}
weight = self.loss_weight[idx]
loss = {
"{}_{}".format(key, idx): loss[key] * weight
for key in loss
}
for key in loss:
if key == "loss":
loss_all += loss[key] * weight
# else:
# loss[f"{key}_{idx}"] = loss[key]
loss_dict.update(loss)
loss_dict["loss"] = paddle.add_n(list(loss_dict.values()))
loss_dict["loss"] = loss_all
return loss_dict