Tensor.logsigmoid (#10955)

This commit is contained in:
chenyu
2025-06-24 11:16:14 -04:00
committed by GitHub
parent f15247d2d2
commit 18e264a449
4 changed files with 19 additions and 0 deletions

View File

@@ -35,6 +35,7 @@ Elementwise ops operate on a per element basis. They don't change the shape of t
::: tinygrad.Tensor.relu
::: tinygrad.Tensor.sigmoid
::: tinygrad.Tensor.logsigmoid
::: tinygrad.Tensor.hardsigmoid
::: tinygrad.Tensor.elu
::: tinygrad.Tensor.celu

View File

@@ -376,6 +376,7 @@ decomps = [
aten.softshrink,
aten.hardshrink,
aten.log_sigmoid_forward,
aten.log_sigmoid_backward,
aten.isneginf,
aten.isposinf,
aten.nan_to_num,

View File

@@ -946,6 +946,11 @@ class TestOps(unittest.TestCase):
self.assertAlmostEqual(sigmoid(x)[0].gradient(x)[0].item(), 0.0)
x = Tensor([-300.0])
self.assertAlmostEqual(sigmoid(x)[0].gradient(x)[0].item(), 0.0)
def test_logsigmoid(self):
helper_test_op([(45,65)], torch.nn.functional.logsigmoid, Tensor.logsigmoid)
helper_test_op([()], torch.nn.functional.logsigmoid, Tensor.logsigmoid)
def test_hardsigmoid(self):
helper_test_op([(45,65)], torch.nn.functional.hardsigmoid, Tensor.hardsigmoid)
helper_test_op([()], torch.nn.functional.hardsigmoid, Tensor.hardsigmoid)

View File

@@ -2957,6 +2957,18 @@ class Tensor(MathTrait):
"""
return (1 + (self * (-1/math.log(2))).exp2()).reciprocal()
def logsigmoid(self) -> Tensor:
"""
Applies the LogSigmoid function element-wise.
- See: https://docs.pytorch.org/docs/stable/generated/torch.nn.functional.logsigmoid.html
```python exec="true" source="above" session="tensor" result="python"
print(Tensor([-3., -2., -1., 0., 1., 2., 3.]).logsigmoid().numpy())
```
"""
return -(-self).softplus()
def hardsigmoid(self, alpha:float=1/6, beta:float=0.5) -> Tensor:
"""
Applies the Hardsigmoid function element-wise.