abs as non-first class operation using relu (#171)

* abs (non-first class)

* whitespace
This commit is contained in:
Marcel Bischoff
2020-12-09 15:20:34 -05:00
committed by GitHub
parent 4c55c7208f
commit 5d46df638a
2 changed files with 5 additions and 0 deletions

View File

@@ -55,6 +55,8 @@ class TestOps(unittest.TestCase):
helper_test_op([(45,65)], lambda x: x.relu(), Tensor.relu, gpu=self.gpu)
def test_leakyrelu(self):
helper_test_op([(45,65)], lambda x: torch.nn.functional.leaky_relu(x,0.01), Tensor.leakyrelu, gpu=self.gpu)
def test_abs(self):
helper_test_op([(45,65)], lambda x: torch.abs(x), Tensor.abs, gpu=self.gpu)
def test_sigmoid(self):
helper_test_op([(45,65)], lambda x: x.sigmoid(), Tensor.sigmoid, gpu=self.gpu)
def test_dot(self):

View File

@@ -203,6 +203,9 @@ class Tensor:
def leakyrelu(self, neg_slope=0.01):
return self.relu() + (-neg_slope*self).relu()
def abs(self):
return self.relu() + (-1.0*self).relu()*(-1.0)
# An instantiation of the Function is the Context
class Function:
def __init__(self, *tensors):