mirror of
https://github.com/tinygrad/tinygrad.git
synced 2026-02-08 21:55:14 -05:00
make maximum split the grad like torch when equal (#738)
* make maximum split grad * added test for maximum split grad when equal * minor expr simplification * (2-eq)/2 only once * update test bc one more sum output child stays
This commit is contained in:
2
test/external/external_test_opt.py
vendored
2
test/external/external_test_opt.py
vendored
@@ -190,7 +190,7 @@ class TestOpt(unittest.TestCase):
|
||||
# TODO: this should be 4, but the sum output child stays around
|
||||
# with pushing_permutes it can be 3
|
||||
# TODO: broken with optim fixes
|
||||
assert len(GlobalCounters.cache) in [4,5,6,7], f"optimizer didn't fold conv-backward SGD, got {len(GlobalCounters.cache)}"
|
||||
assert len(GlobalCounters.cache) in [4,5,6,7,8], f"optimizer didn't fold conv-backward SGD, got {len(GlobalCounters.cache)}"
|
||||
Tensor.training = False
|
||||
|
||||
def test_fold_conv_batchnorm_sgd(self):
|
||||
|
||||
Reference in New Issue
Block a user