add det distill combined
This commit is contained in:
parent
e174e9eddf
commit
202a0b5b85
|
@ -44,15 +44,17 @@ class CombinedLoss(nn.Layer):
|
|||
|
||||
def forward(self, input, batch, **kargs):
|
||||
loss_dict = {}
|
||||
loss_all = 0.
|
||||
for idx, loss_func in enumerate(self.loss_func):
|
||||
loss = loss_func(input, batch, **kargs)
|
||||
if isinstance(loss, paddle.Tensor):
|
||||
loss = {"loss_{}_{}".format(str(loss), idx): loss}
|
||||
weight = self.loss_weight[idx]
|
||||
loss = {
|
||||
"{}_{}".format(key, idx): loss[key] * weight
|
||||
for key in loss
|
||||
}
|
||||
for key in loss:
|
||||
if key == "loss":
|
||||
loss_all += loss[key] * weight
|
||||
# else:
|
||||
# loss[f"{key}_{idx}"] = loss[key]
|
||||
loss_dict.update(loss)
|
||||
loss_dict["loss"] = paddle.add_n(list(loss_dict.values()))
|
||||
loss_dict["loss"] = loss_all
|
||||
return loss_dict
|
||||
|
|
Loading…
Reference in New Issue