mirror of
https://github.com/tinygrad/tinygrad.git
synced 2026-01-24 14:28:09 -05:00
114 lines
2.7 KiB
Markdown
114 lines
2.7 KiB
Markdown
## Reduce Ops
|
|
|
|
::: tinygrad.Tensor.sum
|
|
::: tinygrad.Tensor.max
|
|
::: tinygrad.Tensor.min
|
|
::: tinygrad.Tensor.any
|
|
::: tinygrad.Tensor.all
|
|
::: tinygrad.Tensor.mean
|
|
::: tinygrad.Tensor.var
|
|
::: tinygrad.Tensor.std
|
|
::: tinygrad.Tensor.softmax
|
|
::: tinygrad.Tensor.log_softmax
|
|
::: tinygrad.Tensor.logsumexp
|
|
::: tinygrad.Tensor.argmax
|
|
::: tinygrad.Tensor.argmin
|
|
|
|
## Processing Ops
|
|
|
|
::: tinygrad.Tensor.avg_pool2d
|
|
::: tinygrad.Tensor.max_pool2d
|
|
::: tinygrad.Tensor.conv2d
|
|
::: tinygrad.Tensor.conv_transpose2d
|
|
::: tinygrad.Tensor.dot
|
|
::: tinygrad.Tensor.matmul
|
|
::: tinygrad.Tensor.einsum
|
|
::: tinygrad.Tensor.cumsum
|
|
::: tinygrad.Tensor.triu
|
|
::: tinygrad.Tensor.tril
|
|
::: tinygrad.Tensor.interpolate
|
|
|
|
## Unary Ops (math)
|
|
|
|
::: tinygrad.Tensor.logical_not
|
|
::: tinygrad.Tensor.neg
|
|
::: tinygrad.Tensor.log
|
|
::: tinygrad.Tensor.log2
|
|
::: tinygrad.Tensor.exp
|
|
::: tinygrad.Tensor.exp2
|
|
::: tinygrad.Tensor.sqrt
|
|
::: tinygrad.Tensor.rsqrt
|
|
::: tinygrad.Tensor.sin
|
|
::: tinygrad.Tensor.cos
|
|
::: tinygrad.Tensor.tan
|
|
::: tinygrad.Tensor.trunc
|
|
::: tinygrad.Tensor.ceil
|
|
::: tinygrad.Tensor.floor
|
|
::: tinygrad.Tensor.round
|
|
::: tinygrad.Tensor.lerp
|
|
::: tinygrad.Tensor.square
|
|
::: tinygrad.Tensor.clip
|
|
::: tinygrad.Tensor.sign
|
|
::: tinygrad.Tensor.abs
|
|
::: tinygrad.Tensor.reciprocal
|
|
|
|
## Unary Ops (activation)
|
|
|
|
::: tinygrad.Tensor.relu
|
|
::: tinygrad.Tensor.sigmoid
|
|
::: tinygrad.Tensor.elu
|
|
::: tinygrad.Tensor.celu
|
|
::: tinygrad.Tensor.swish
|
|
::: tinygrad.Tensor.silu
|
|
::: tinygrad.Tensor.relu6
|
|
::: tinygrad.Tensor.hardswish
|
|
::: tinygrad.Tensor.tanh
|
|
::: tinygrad.Tensor.sinh
|
|
::: tinygrad.Tensor.cosh
|
|
::: tinygrad.Tensor.atanh
|
|
::: tinygrad.Tensor.asinh
|
|
::: tinygrad.Tensor.acosh
|
|
::: tinygrad.Tensor.hardtanh
|
|
::: tinygrad.Tensor.gelu
|
|
::: tinygrad.Tensor.quick_gelu
|
|
::: tinygrad.Tensor.leakyrelu
|
|
::: tinygrad.Tensor.mish
|
|
::: tinygrad.Tensor.softplus
|
|
::: tinygrad.Tensor.softsign
|
|
|
|
## Elementwise Ops (broadcasted)
|
|
|
|
::: tinygrad.Tensor.add
|
|
::: tinygrad.Tensor.sub
|
|
::: tinygrad.Tensor.mul
|
|
::: tinygrad.Tensor.div
|
|
::: tinygrad.Tensor.xor
|
|
::: tinygrad.Tensor.lshift
|
|
::: tinygrad.Tensor.rshift
|
|
::: tinygrad.Tensor.pow
|
|
::: tinygrad.Tensor.maximum
|
|
::: tinygrad.Tensor.minimum
|
|
::: tinygrad.Tensor.where
|
|
|
|
## Neural Network Ops (functional)
|
|
|
|
::: tinygrad.Tensor.linear
|
|
::: tinygrad.Tensor.sequential
|
|
::: tinygrad.Tensor.layernorm
|
|
::: tinygrad.Tensor.batchnorm
|
|
::: tinygrad.Tensor.dropout
|
|
::: tinygrad.Tensor.one_hot
|
|
::: tinygrad.Tensor.scaled_dot_product_attention
|
|
::: tinygrad.Tensor.binary_crossentropy
|
|
::: tinygrad.Tensor.binary_crossentropy_logits
|
|
::: tinygrad.Tensor.sparse_categorical_crossentropy
|
|
|
|
## Casting Ops
|
|
|
|
::: tinygrad.Tensor.cast
|
|
::: tinygrad.Tensor.bitcast
|
|
::: tinygrad.Tensor.float
|
|
::: tinygrad.Tensor.half
|
|
::: tinygrad.Tensor.int
|
|
::: tinygrad.Tensor.bool
|