mirror of
https://github.com/tinygrad/tinygrad.git
synced 2026-01-09 15:08:02 -05:00
add AdamW optimizer (#716)
* add AdamW optimizer * one liner Adam optimizer
This commit is contained in:
@@ -2,7 +2,7 @@ import numpy as np
|
||||
import torch
|
||||
import unittest
|
||||
from tinygrad.tensor import Tensor
|
||||
from tinygrad.nn.optim import Adam, SGD, RMSprop
|
||||
from tinygrad.nn.optim import Adam, SGD, RMSprop, AdamW
|
||||
|
||||
np.random.seed(1337)
|
||||
x_init = np.random.randn(1,4).astype(np.float32)
|
||||
@@ -41,6 +41,7 @@ class TestOptim(unittest.TestCase):
|
||||
def _test_sgd(self, steps, opts, atol, rtol): self._test_optim(SGD, torch.optim.SGD, steps, opts, atol, rtol)
|
||||
def _test_rmsprop(self, steps, opts, atol, rtol): self._test_optim(RMSprop, torch.optim.RMSprop, steps, opts, atol, rtol)
|
||||
def _test_adam(self, steps, opts, atol, rtol): self._test_optim(Adam, torch.optim.Adam, steps, opts, atol, rtol)
|
||||
def _test_adamw(self, steps, opts, atol, rtol): self._test_optim(AdamW, torch.optim.AdamW, steps, opts, atol, rtol)
|
||||
|
||||
def test_sgd(self): self._test_sgd(1, {'lr': 0.001}, 1e-6, 0)
|
||||
def test_sgd_high_lr(self): self._test_sgd(1, {'lr': 10}, 1e-6, 1e-5)
|
||||
@@ -58,12 +59,17 @@ class TestOptim(unittest.TestCase):
|
||||
def test_rmsprop_high_lr(self): self._test_rmsprop(1, {'lr': 10, 'alpha': 0.99}, 1e-5, 1e-5)
|
||||
def test_adam(self): self._test_adam(1, {'lr': 0.001}, 1e-5, 0)
|
||||
def test_adam_high_lr(self): self._test_adam(1, {'lr': 10}, 1e-5, 1e-5)
|
||||
def test_adamw(self): self._test_adamw(1, {'lr': 0.001}, 1e-5, 0)
|
||||
def test_adamw_high_lr(self): self._test_adamw(1, {'lr': 10}, 1e-5, 1e-5)
|
||||
|
||||
def test_multistep_rmsprop(self): self._test_rmsprop(10, {'lr': 0.001}, 1e-5, 0)
|
||||
def test_multistep_rmsprop_high_lr(self): self._test_rmsprop(10, {'lr': 10}, 1e-5, 3e-4)
|
||||
|
||||
def test_multistep_adam(self): self._test_adam(10, {'lr': 0.001}, 1e-5, 0)
|
||||
def test_multistep_adam_high_lr(self): self._test_adam(10, {'lr': 10}, 1e-5, 3e-4)
|
||||
|
||||
def test_multistep_adamw(self): self._test_adamw(10, {'lr': 0.001}, 1e-5, 0)
|
||||
def test_multistep_adamw_high_lr(self): self._test_adamw(10, {'lr': 10}, 1e-5, 3e-4)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
Reference in New Issue
Block a user