Files
tinygrad/test/test_winograd.py
Friedrich Carl Eichenroth 75676ab8e1 Profiling-helper (#2321)
* change profiler

* remove unused imports

* remove unused imports

* change lazybuffer references

* remove unused line

* remove unused import

* remove unused stuff

* add types

* typing

* typing

* typing

* trigger actions

* -1 loc

* fixup

* trigger actions

* revert lazy typing changes

* WIP profiler helper

* replace old start & stop profiler

* fixup

* linting

* Update llama.py

---------

Co-authored-by: George Hotz <72895+geohot@users.noreply.github.com>
2023-11-16 14:15:56 -08:00

38 lines
1.1 KiB
Python

import unittest
from tinygrad.helpers import Timing, CI, Profiling
from tinygrad.tensor import Tensor
from tinygrad.ops import LoadOps
from tinygrad.codegen.linearizer import Linearizer
class TestWinograd(unittest.TestCase):
def setUp(self):
self.old = Tensor.wino
Tensor.wino = 1
def tearDown(self): Tensor.wino = self.old
def test_speed(self):
x = Tensor.empty(1,4,9,9)
w = Tensor.empty(4,4,3,3)
with Timing("running conv: "):
out = Tensor.conv2d(x, w)
with Timing("scheduling: "):
sched = out.lazydata.schedule()
for i,s in enumerate(sched):
if s.ast.op in LoadOps: continue
ops = s.ast.get_lazyops()
with Timing(f"linearize {i} with {len(ops):4d} ops: "):
l = Linearizer(s.ast)
l.hand_coded_optimizations()
l.linearize()
def test_profile(self):
x,w = Tensor.rand(1,4,9,9).realize(), Tensor.rand(4,4,3,3).realize()
with Profiling(enabled=not CI, sort='time'):
out = Tensor.conv2d(x,w).realize()
out.numpy()
if __name__ == '__main__':
unittest.main(verbosity=2)