var_vals prereq for deleting LBScheduleItem [run_process_replay] (#6511)

This commit is contained in:
qazal
2024-09-14 17:00:30 +08:00
committed by GitHub
parent 9188245677
commit 4ffb722d4e
4 changed files with 22 additions and 20 deletions

View File

@@ -17,7 +17,7 @@ def process_replay(outs:List[LazyBuffer], graph:DefaultDict[LBScheduleItem, List
if not os.path.isfile(fp):
shutil.copyfile(fetch(f"https://raw.githubusercontent.com/tinygrad/tinygrad/{ref_schedule}/tinygrad/engine/schedule.py", allow_caching=False), fp)
# create the reference graph
ref_graph, ref_in_degree = importlib.import_module("test.external.process_replay.master_schedule")._graph_schedule(outs, set())
ref_graph, ref_in_degree = importlib.import_module("test.external.process_replay.master_schedule")._graph_schedule(outs)
# compare
diff_schedule([(ref_graph, ref_in_degree), (graph, in_degree)])

View File

@@ -18,8 +18,8 @@ class TestDiffSchedule(unittest.TestCase):
X = Tensor.randn(10, 10).realize()
idxs = Tensor([0, 2]).realize()
xt = cast(LazyBuffer, X[idxs].lazydata)
with Context(FUSE_ARANGE=0): ref_graph, ref_in_degree = _graph_schedule([xt])
with Context(FUSE_ARANGE=1): compare_graph, compare_in_degree = _graph_schedule([xt])
with Context(FUSE_ARANGE=0): ref_graph, ref_in_degree, _ = _graph_schedule([xt])
with Context(FUSE_ARANGE=1): compare_graph, compare_in_degree, _ = _graph_schedule([xt])
# 1 arange LazyBuffer folds, 1 arange child's kernel changes
changed = diff_schedule([(ref_graph, ref_in_degree), (compare_graph, compare_in_degree)])
self.assertEqual(changed, 1)
@@ -30,15 +30,15 @@ class TestDiffSchedule(unittest.TestCase):
for _ in range(2):
X = Tensor.randn(10, 10).realize()
xt = cast(LazyBuffer, X[idxs].lazydata)
with Context(FUSE_ARANGE=0): schedules.append(_graph_schedule([xt]))
with Context(FUSE_ARANGE=1): schedules.append(_graph_schedule([xt]))
with Context(FUSE_ARANGE=0): schedules.append(_graph_schedule([xt])[:-1])
with Context(FUSE_ARANGE=1): schedules.append(_graph_schedule([xt])[:-1])
changed = diff_schedule(schedules)
self.assertEqual(changed, 1)
def test_no_diff(self):
a = cast(LazyBuffer, (Tensor([1])+Tensor([2])).lazydata)
with Context(FUSE_ARANGE=0): ref_graph, ref_in_degree = _graph_schedule([a])
with Context(FUSE_ARANGE=1): compare_graph, compare_in_degree = _graph_schedule([a])
with Context(FUSE_ARANGE=0): ref_graph, ref_in_degree, _ = _graph_schedule([a])
with Context(FUSE_ARANGE=1): compare_graph, compare_in_degree, _ = _graph_schedule([a])
changed = diff_schedule([(ref_graph, ref_in_degree), (compare_graph, compare_in_degree)])
self.assertEqual(changed, 0)
@@ -49,8 +49,8 @@ class TestDiffSchedule(unittest.TestCase):
c1(img).relu().mean().backward()
assert img.grad is not None and c1.weight.grad is not None
outs = [cast(LazyBuffer, img.grad.lazydata), cast(LazyBuffer, c1.weight.grad.lazydata)]
with Context(FUSE_CONV_BW=0): ref_graph, ref_in_degree = _graph_schedule(outs)
with Context(FUSE_CONV_BW=1): compare_graph, compare_in_degree = _graph_schedule(outs)
with Context(FUSE_CONV_BW=0): ref_graph, ref_in_degree, _ = _graph_schedule(outs)
with Context(FUSE_CONV_BW=1): compare_graph, compare_in_degree, _ = _graph_schedule(outs)
changed = diff_schedule([(ref_graph, ref_in_degree), (compare_graph, compare_in_degree)])
# 1 reduceop folds, its child reduceop changes
self.assertEqual(changed, 1)