* replace relu for maximum

* fix for other backend

* clean up RELU and GT0

* tests for maximum

* had to clean that up

* why reverse a maximum?
This commit is contained in:
George Hotz
2023-02-25 01:21:16 -08:00
committed by GitHub
parent 176ad29974
commit 2c5e13a513
11 changed files with 38 additions and 31 deletions

View File

@@ -52,6 +52,10 @@ def helper_test_op(shps, torch_fxn, tinygrad_fxn=None, atol=1e-6, rtol=1e-3, gra
class TestOps(unittest.TestCase):
def test_maximum(self):
helper_test_op([(45,65), (45,65)], torch.maximum, Tensor.maximum)
def test_minimum(self):
helper_test_op([(45,65), (45,65)], torch.minimum, Tensor.minimum)
def test_add(self):
helper_test_op([(45,65), (45,65)], lambda x,y: x+y, Tensor.add)
def test_add_simple(self):