switch beautiful_mnist to use new optimizer [pr] (#8231)

* switch beautiful_mnist to use new optimizer [pr]

* fix abstractions3 + docs

* fix OptimizerGroup with schedule_step api
This commit is contained in:
George Hotz
2024-12-13 18:27:16 -08:00
committed by GitHub
parent e0956c518c
commit e9ee39df22
7 changed files with 61 additions and 29 deletions

View File

@@ -26,10 +26,11 @@ l1n, l2n = l1.numpy(), l2.numpy()
from tinygrad.nn.optim import SGD
optim = SGD([l1, l2])
Tensor.training = True
X, Y = X_train[(samples:=Tensor.randint(128, high=X_train.shape[0]))], Y_train[samples]
optim.zero_grad()
model(X).sparse_categorical_crossentropy(Y).backward()
optim._step() # this will step the optimizer without running realize
optim.schedule_step() # this will step the optimizer without running realize
# *****
# 3. Create a schedule.

View File

@@ -31,4 +31,8 @@
::: tinygrad.Tensor.shard_
::: tinygrad.Tensor.contiguous
::: tinygrad.Tensor.contiguous_backward
## Gradient
::: tinygrad.Tensor.gradient
::: tinygrad.Tensor.backward