From 88de42fb6e7ecedc2a1e6d9880da49c8db00dcdc Mon Sep 17 00:00:00 2001 From: George Hotz Date: Sun, 5 Jun 2022 12:13:05 -0700 Subject: [PATCH] document graph mode --- README.md | 9 ++++++++- test/test_mnist.py | 5 +++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 8d938a7147..739ddecc3e 100644 --- a/README.md +++ b/README.md @@ -119,7 +119,7 @@ Relu, Log, Exp # unary ops Sum, Max # reduce ops (with axis argument) Add, Sub, Mul, Pow # binary ops (with broadcasting) Reshape, Transpose, Slice # movement ops -Matmul, Conv2D # processing ops +Matmul, Conv2D(NCHW) # processing ops ``` While more ops may be added, I think this base is stable. @@ -162,6 +162,13 @@ See `examples/yolov3.py` tinygrad will always be below 1000 lines. If it isn't, we will revert commits until tinygrad becomes smaller. +### Drawing Execution Graph +```bash +GRAPH=1 python3 test/test_mnist.py TestMNIST.test_conv_onestep +dot -Tsvg /tmp/net.dot -o /tmp/net.svg +open /tmp/net.svg +``` + ### Running tests ```bash diff --git a/test/test_mnist.py b/test/test_mnist.py index 0b4f883cc0..6ab4f3001b 100644 --- a/test/test_mnist.py +++ b/test/test_mnist.py @@ -46,6 +46,11 @@ class TinyConvNet: return x.dot(self.l1).logsoftmax() class TestMNIST(unittest.TestCase): + def test_conv_onestep(self): + np.random.seed(1337) + model = TinyConvNet() + optimizer = optim.Adam(model.parameters(), lr=0.001) + train(model, X_train, Y_train, optimizer, steps=1) def test_conv(self): np.random.seed(1337)