update rsmprop

This commit is contained in:
f0ti
2020-10-23 14:46:45 +02:00
parent c5f726ec2e
commit 0b87aaca1e
3 changed files with 12 additions and 13 deletions

View File

@@ -39,13 +39,13 @@ print(y.grad) # dz/dy
### Neural networks?
It turns out, a decent autograd tensor library is 90% of what you need for neural networks. Add an optimizer (SGD and Adam implemented) from tinygrad.optim, write some boilerplate minibatching code, and you have all you need.
It turns out, a decent autograd tensor library is 90% of what you need for neural networks. Add an optimizer (SGD, RMSprop and Adam implemented) from tinygrad.optim, write some boilerplate minibatching code, and you have all you need.
### Neural network example (from test/test_mnist.py)
```python
from tinygrad.tensor import Tensor
import tinygrad.optim as tinygrad_optim
import tinygrad.optim as optim
from tinygrad.utils import layer_init_uniform
class TinyBobNet:
@@ -57,8 +57,7 @@ class TinyBobNet:
return x.dot(self.l1).relu().dot(self.l2).logsoftmax()
model = TinyBobNet()
optim = tinygrad_optim.SGD([model.l1, model.l2], lr=0.001) # or
optim = tinygrad_optim.RMSprop([model.l1, model.l2], lr=0.001)
optim = optim.SGD([model.l1, model.l2], lr=0.001)
# ... and complete like pytorch, with (x,y) data