diff --git a/docs/tensor/elementwise.md b/docs/tensor/elementwise.md index 4893658953..0bf376747e 100644 --- a/docs/tensor/elementwise.md +++ b/docs/tensor/elementwise.md @@ -35,6 +35,7 @@ Elementwise ops operate on a per element basis. They don't change the shape of t ::: tinygrad.Tensor.relu ::: tinygrad.Tensor.sigmoid +::: tinygrad.Tensor.logsigmoid ::: tinygrad.Tensor.hardsigmoid ::: tinygrad.Tensor.elu ::: tinygrad.Tensor.celu diff --git a/extra/torch_backend/backend.py b/extra/torch_backend/backend.py index 17c8083f83..cf428eaede 100644 --- a/extra/torch_backend/backend.py +++ b/extra/torch_backend/backend.py @@ -376,6 +376,7 @@ decomps = [ aten.softshrink, aten.hardshrink, aten.log_sigmoid_forward, + aten.log_sigmoid_backward, aten.isneginf, aten.isposinf, aten.nan_to_num, diff --git a/test/test_ops.py b/test/test_ops.py index 59062df383..2c76d7f01d 100644 --- a/test/test_ops.py +++ b/test/test_ops.py @@ -946,6 +946,11 @@ class TestOps(unittest.TestCase): self.assertAlmostEqual(sigmoid(x)[0].gradient(x)[0].item(), 0.0) x = Tensor([-300.0]) self.assertAlmostEqual(sigmoid(x)[0].gradient(x)[0].item(), 0.0) + + def test_logsigmoid(self): + helper_test_op([(45,65)], torch.nn.functional.logsigmoid, Tensor.logsigmoid) + helper_test_op([()], torch.nn.functional.logsigmoid, Tensor.logsigmoid) + def test_hardsigmoid(self): helper_test_op([(45,65)], torch.nn.functional.hardsigmoid, Tensor.hardsigmoid) helper_test_op([()], torch.nn.functional.hardsigmoid, Tensor.hardsigmoid) diff --git a/tinygrad/tensor.py b/tinygrad/tensor.py index 278b3ba114..76989d3c1d 100644 --- a/tinygrad/tensor.py +++ b/tinygrad/tensor.py @@ -2957,6 +2957,18 @@ class Tensor(MathTrait): """ return (1 + (self * (-1/math.log(2))).exp2()).reciprocal() + def logsigmoid(self) -> Tensor: + """ + Applies the LogSigmoid function element-wise. + + - See: https://docs.pytorch.org/docs/stable/generated/torch.nn.functional.logsigmoid.html + + ```python exec="true" source="above" session="tensor" result="python" + print(Tensor([-3., -2., -1., 0., 1., 2., 3.]).logsigmoid().numpy()) + ``` + """ + return -(-self).softplus() + def hardsigmoid(self, alpha:float=1/6, beta:float=0.5) -> Tensor: """ Applies the Hardsigmoid function element-wise.