Tiny backend test_ops fix part1 (#9338)

* extract name methods from https://github.com/tinygrad/tinygrad/pull/9302

* t.grad.numpy() -> t.grad.cpu().numpy()

* revert TORCH_DEBUG change

* revert dtype change in aten.sum
This commit is contained in:
Anish Umale
2025-03-03 23:06:51 +05:30
committed by GitHub
parent 0d4ba7dd87
commit bafa40fe12
2 changed files with 35 additions and 2 deletions

View File

@@ -357,7 +357,7 @@ class TestOps(unittest.TestCase):
(tt*(tt != 0)).sum().backward()
t = torch.tensor(tt.numpy(), requires_grad=True)
(t*(t != 0)).sum().backward()
np.testing.assert_allclose(t.grad.numpy(), tt.grad.numpy(), rtol=1e-5)
np.testing.assert_allclose(t.grad.cpu().numpy(), tt.grad.numpy(), rtol=1e-5)
def test_cmp_lt_backwards(self):
# new grad zeroes these out
@@ -373,7 +373,7 @@ class TestOps(unittest.TestCase):
(tt*(tt < 0)).sum().backward()
t = torch.tensor(tt.numpy(), requires_grad=True)
(t*(t < 0)).sum().backward()
np.testing.assert_allclose(t.grad.numpy(), tt.grad.numpy(), rtol=1e-5)
np.testing.assert_allclose(t.grad.cpu().numpy(), tt.grad.numpy(), rtol=1e-5)
# TODO: fix backward of these functions
def test_trunc(self):