mirror of
https://github.com/tinygrad/tinygrad.git
synced 2026-01-10 15:38:29 -05:00
leaky relu as geohot suggested (#167)
This commit is contained in:
@@ -53,6 +53,8 @@ class TestOps(unittest.TestCase):
|
||||
helper_test_op([(45,65)], lambda x: x.sqrt(), Tensor.sqrt, gpu=self.gpu)
|
||||
def test_relu(self):
|
||||
helper_test_op([(45,65)], lambda x: x.relu(), Tensor.relu, gpu=self.gpu)
|
||||
def test_leakyrelu(self):
|
||||
helper_test_op([(45,65)], lambda x: torch.nn.functional.leaky_relu(x,0.01), Tensor.leakyrelu, gpu=self.gpu)
|
||||
def test_sigmoid(self):
|
||||
helper_test_op([(45,65)], lambda x: x.sigmoid(), Tensor.sigmoid, gpu=self.gpu)
|
||||
def test_dot(self):
|
||||
|
||||
@@ -200,6 +200,9 @@ class Tensor:
|
||||
def tanh(self):
|
||||
return 2.0 * ((2.0 * self).sigmoid()) - 1.0
|
||||
|
||||
def leakyrelu(self, neg_slope=0.01):
|
||||
return self.relu() + (-neg_slope*self).relu()
|
||||
|
||||
# An instantiation of the Function is the Context
|
||||
class Function:
|
||||
def __init__(self, *tensors):
|
||||
|
||||
Reference in New Issue
Block a user