improvements to docs

This commit is contained in:
George Hotz
2024-08-07 09:57:24 -07:00
parent 7677361d90
commit 0a8668cf30
6 changed files with 114 additions and 162 deletions

View File

@@ -0,0 +1,72 @@
Elementwise ops operate on a per element basis. They don't change the shape of the tensor.
## Unary Ops (math)
::: tinygrad.Tensor.logical_not
::: tinygrad.Tensor.neg
::: tinygrad.Tensor.log
::: tinygrad.Tensor.log2
::: tinygrad.Tensor.exp
::: tinygrad.Tensor.exp2
::: tinygrad.Tensor.sqrt
::: tinygrad.Tensor.rsqrt
::: tinygrad.Tensor.sin
::: tinygrad.Tensor.cos
::: tinygrad.Tensor.tan
::: tinygrad.Tensor.trunc
::: tinygrad.Tensor.ceil
::: tinygrad.Tensor.floor
::: tinygrad.Tensor.round
::: tinygrad.Tensor.lerp
::: tinygrad.Tensor.square
::: tinygrad.Tensor.clip
::: tinygrad.Tensor.sign
::: tinygrad.Tensor.abs
::: tinygrad.Tensor.reciprocal
## Unary Ops (activation)
::: tinygrad.Tensor.relu
::: tinygrad.Tensor.sigmoid
::: tinygrad.Tensor.elu
::: tinygrad.Tensor.celu
::: tinygrad.Tensor.swish
::: tinygrad.Tensor.silu
::: tinygrad.Tensor.relu6
::: tinygrad.Tensor.hardswish
::: tinygrad.Tensor.tanh
::: tinygrad.Tensor.sinh
::: tinygrad.Tensor.cosh
::: tinygrad.Tensor.atanh
::: tinygrad.Tensor.asinh
::: tinygrad.Tensor.acosh
::: tinygrad.Tensor.hardtanh
::: tinygrad.Tensor.gelu
::: tinygrad.Tensor.quick_gelu
::: tinygrad.Tensor.leakyrelu
::: tinygrad.Tensor.mish
::: tinygrad.Tensor.softplus
::: tinygrad.Tensor.softsign
## Elementwise Ops (broadcasted)
::: tinygrad.Tensor.add
::: tinygrad.Tensor.sub
::: tinygrad.Tensor.mul
::: tinygrad.Tensor.div
::: tinygrad.Tensor.xor
::: tinygrad.Tensor.lshift
::: tinygrad.Tensor.rshift
::: tinygrad.Tensor.pow
::: tinygrad.Tensor.maximum
::: tinygrad.Tensor.minimum
::: tinygrad.Tensor.where
## Casting Ops
::: tinygrad.Tensor.cast
::: tinygrad.Tensor.bitcast
::: tinygrad.Tensor.float
::: tinygrad.Tensor.half
::: tinygrad.Tensor.int
::: tinygrad.Tensor.bool

View File

@@ -5,32 +5,3 @@
heading_level: 2
members: false
show_source: false
## Properties
::: tinygrad.Tensor.shape
::: tinygrad.Tensor.dtype
::: tinygrad.Tensor.device
## Data Access
::: tinygrad.Tensor.data
::: tinygrad.Tensor.item
::: tinygrad.Tensor.tolist
::: tinygrad.Tensor.numpy
## tinygrad ops
::: tinygrad.Tensor.schedule_with_vars
::: tinygrad.Tensor.schedule
::: tinygrad.Tensor.realize
::: tinygrad.Tensor.replace
::: tinygrad.Tensor.assign
::: tinygrad.Tensor.detach
::: tinygrad.Tensor.to
::: tinygrad.Tensor.to_
::: tinygrad.Tensor.shard
::: tinygrad.Tensor.shard_
::: tinygrad.Tensor.contiguous
::: tinygrad.Tensor.contiguous_backward
::: tinygrad.Tensor.backward

View File

@@ -1,4 +1,4 @@
## Reduce Ops
## Reduce
::: tinygrad.Tensor.sum
::: tinygrad.Tensor.max
@@ -14,7 +14,7 @@
::: tinygrad.Tensor.argmax
::: tinygrad.Tensor.argmin
## Processing Ops
## Processing
::: tinygrad.Tensor.avg_pool2d
::: tinygrad.Tensor.max_pool2d
@@ -28,69 +28,7 @@
::: tinygrad.Tensor.tril
::: tinygrad.Tensor.interpolate
## Unary Ops (math)
::: tinygrad.Tensor.logical_not
::: tinygrad.Tensor.neg
::: tinygrad.Tensor.log
::: tinygrad.Tensor.log2
::: tinygrad.Tensor.exp
::: tinygrad.Tensor.exp2
::: tinygrad.Tensor.sqrt
::: tinygrad.Tensor.rsqrt
::: tinygrad.Tensor.sin
::: tinygrad.Tensor.cos
::: tinygrad.Tensor.tan
::: tinygrad.Tensor.trunc
::: tinygrad.Tensor.ceil
::: tinygrad.Tensor.floor
::: tinygrad.Tensor.round
::: tinygrad.Tensor.lerp
::: tinygrad.Tensor.square
::: tinygrad.Tensor.clip
::: tinygrad.Tensor.sign
::: tinygrad.Tensor.abs
::: tinygrad.Tensor.reciprocal
## Unary Ops (activation)
::: tinygrad.Tensor.relu
::: tinygrad.Tensor.sigmoid
::: tinygrad.Tensor.elu
::: tinygrad.Tensor.celu
::: tinygrad.Tensor.swish
::: tinygrad.Tensor.silu
::: tinygrad.Tensor.relu6
::: tinygrad.Tensor.hardswish
::: tinygrad.Tensor.tanh
::: tinygrad.Tensor.sinh
::: tinygrad.Tensor.cosh
::: tinygrad.Tensor.atanh
::: tinygrad.Tensor.asinh
::: tinygrad.Tensor.acosh
::: tinygrad.Tensor.hardtanh
::: tinygrad.Tensor.gelu
::: tinygrad.Tensor.quick_gelu
::: tinygrad.Tensor.leakyrelu
::: tinygrad.Tensor.mish
::: tinygrad.Tensor.softplus
::: tinygrad.Tensor.softsign
## Elementwise Ops (broadcasted)
::: tinygrad.Tensor.add
::: tinygrad.Tensor.sub
::: tinygrad.Tensor.mul
::: tinygrad.Tensor.div
::: tinygrad.Tensor.xor
::: tinygrad.Tensor.lshift
::: tinygrad.Tensor.rshift
::: tinygrad.Tensor.pow
::: tinygrad.Tensor.maximum
::: tinygrad.Tensor.minimum
::: tinygrad.Tensor.where
## Neural Network Ops (functional)
## Neural Network (functional)
::: tinygrad.Tensor.linear
::: tinygrad.Tensor.sequential
@@ -102,21 +40,3 @@
::: tinygrad.Tensor.binary_crossentropy
::: tinygrad.Tensor.binary_crossentropy_logits
::: tinygrad.Tensor.sparse_categorical_crossentropy
## Casting Ops
::: tinygrad.Tensor.cast
::: tinygrad.Tensor.bitcast
::: tinygrad.Tensor.float
::: tinygrad.Tensor.half
::: tinygrad.Tensor.int
::: tinygrad.Tensor.bool
## Tensor Properties
::: tinygrad.Tensor.ndim
::: tinygrad.Tensor.numel
::: tinygrad.Tensor.element_size
::: tinygrad.Tensor.nbytes
::: tinygrad.Tensor.is_floating_point
::: tinygrad.Tensor.size

34
docs/tensor/properties.md Normal file
View File

@@ -0,0 +1,34 @@
## Basic
::: tinygrad.Tensor.shape
::: tinygrad.Tensor.dtype
::: tinygrad.Tensor.device
::: tinygrad.Tensor.ndim
::: tinygrad.Tensor.numel
::: tinygrad.Tensor.element_size
::: tinygrad.Tensor.nbytes
::: tinygrad.Tensor.is_floating_point
::: tinygrad.Tensor.size
## Data Access
::: tinygrad.Tensor.data
::: tinygrad.Tensor.item
::: tinygrad.Tensor.tolist
::: tinygrad.Tensor.numpy
## tinygrad ops
::: tinygrad.Tensor.schedule_with_vars
::: tinygrad.Tensor.schedule
::: tinygrad.Tensor.realize
::: tinygrad.Tensor.replace
::: tinygrad.Tensor.assign
::: tinygrad.Tensor.detach
::: tinygrad.Tensor.to
::: tinygrad.Tensor.to_
::: tinygrad.Tensor.shard
::: tinygrad.Tensor.shard_
::: tinygrad.Tensor.contiguous
::: tinygrad.Tensor.contiguous_backward
::: tinygrad.Tensor.backward