diff --git a/docs/tensor/elementwise.md b/docs/tensor/elementwise.md index 91eccb5651..15e42b022e 100644 --- a/docs/tensor/elementwise.md +++ b/docs/tensor/elementwise.md @@ -29,6 +29,7 @@ Elementwise ops operate on a per element basis. They don't change the shape of t ::: tinygrad.Tensor.relu ::: tinygrad.Tensor.sigmoid +::: tinygrad.Tensor.hardsigmoid ::: tinygrad.Tensor.elu ::: tinygrad.Tensor.celu ::: tinygrad.Tensor.swish diff --git a/test/test_ops.py b/test/test_ops.py index 1e6303eedf..9b3036a57d 100644 --- a/test/test_ops.py +++ b/test/test_ops.py @@ -643,6 +643,11 @@ class TestOps(unittest.TestCase): helper_test_op([(45,65)], torch.sigmoid, Tensor.sigmoid, low=300, high=400) helper_test_op([(45,65)], torch.sigmoid, Tensor.sigmoid, low=-400, high=-300) helper_test_op([()], torch.sigmoid, Tensor.sigmoid) + def test_hardsigmoid(self): + helper_test_op([(45,65)], torch.nn.functional.hardsigmoid, Tensor.hardsigmoid) + helper_test_op([(45,65)], torch.sigmoid, Tensor.sigmoid, low=300, high=400) + helper_test_op([(45,65)], torch.sigmoid, Tensor.sigmoid, low=-400, high=-300) + helper_test_op([()], torch.nn.functional.hardsigmoid, Tensor.hardsigmoid) def test_softplus(self): helper_test_op([(45,65)], torch.nn.functional.softplus, Tensor.softplus, grad_atol=1e-6) helper_test_op([()], torch.nn.functional.softplus, Tensor.softplus, grad_atol=1e-6) diff --git a/tinygrad/tensor.py b/tinygrad/tensor.py index 57fc1a32f6..e90ad802d2 100644 --- a/tinygrad/tensor.py +++ b/tinygrad/tensor.py @@ -2344,6 +2344,20 @@ class Tensor(SimpleMathTrait): # pylint: disable=abstract-method ``` """ return F.Sigmoid.apply(self.cast(least_upper_float(self.dtype))) + def hardsigmoid(self, alpha:float=1/6, beta:float=0.5): + """ + Applies the Hardsigmoid function element-wise. + NOTE: default `alpha` and `beta` values is taken from torch + + - Described: https://paperswithcode.com/method/hard-sigmoid + - See: https://pytorch.org/docs/stable/generated/torch.nn.functional.hardsigmoid.html + + ```python exec="true" source="above" session="tensor" result="python" + print(Tensor([-3., -2., -1., 0., 1., 2., 3.]).hardsigmoid().numpy()) + ``` + """ + return (alpha * self + beta).relu() - (alpha * self + beta - 1).relu() + def sqrt(self): """ Computes the square root of the tensor element-wise.