From cbf4987396fde2d7b19e3cd36ccfecbe1a357ea5 Mon Sep 17 00:00:00 2001 From: qazal Date: Fri, 22 Dec 2023 22:53:16 +0200 Subject: [PATCH] fix NegativeLogLikelihoodLoss --- extra/onnx_ops.py | 4 ++-- test/external/external_test_onnx_backend.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/extra/onnx_ops.py b/extra/onnx_ops.py index eee407b90e..2b931f52e3 100644 --- a/extra/onnx_ops.py +++ b/extra/onnx_ops.py @@ -359,12 +359,12 @@ def NegativeLogLikelihoodLoss(x: Tensor, target: Tensor, weight=None, ignore_ind x = x.reshape((N, C, -1)) target = target.reshape((N, -1)) if weight is not None: - mask = target.unsqueeze(-1) == Tensor.arange(C).repeat((N, 1, 1)) + mask = (target.unsqueeze(-1) == Tensor.arange(C).repeat((N, 1, 1))).float() weight = (mask * weight).sum(axis=-1) if ignore_index is not None: cond = target == ignore_index weight = cond.where(0, weight) if weight is not None else cond.where(Tensor.zeros(*target.shape), 1) - mask = target[:, None, :] == Tensor.arange(C).reshape([1, C] + [1]*(len(x.shape) -2)) + mask = (target[:, None, :] == Tensor.arange(C).reshape([1, C] + [1]*(len(x.shape) -2))).float() loss = (-mask * x).sum(axis=1) * (1 if weight is None else weight) if reduction == "mean": return loss.mean() if weight is None else loss.sum() / weight.sum() if reduction == "sum": return loss.sum() diff --git a/test/external/external_test_onnx_backend.py b/test/external/external_test_onnx_backend.py index 00215918cb..4dbea6b744 100644 --- a/test/external/external_test_onnx_backend.py +++ b/test/external/external_test_onnx_backend.py @@ -31,6 +31,7 @@ class TinygradBackend(Backend): net_feed_input = [x for x in input_all if x not in input_initializer] print("prepare", cls, device, net_feed_input) run_onnx = get_run_onnx(model) + #print(model) return TinygradModel(run_onnx, net_feed_input) @classmethod