move frontend dir to nn [pr] (#12470)

This commit is contained in:
George Hotz
2025-10-07 10:42:22 +08:00
committed by GitHub
parent f664bcc8bd
commit 0f25b4b289
22 changed files with 18 additions and 19 deletions

View File

@@ -1,6 +1,6 @@
import sys, time
from tinygrad import TinyJit, GlobalCounters, fetch, getenv
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.nn.onnx import OnnxRunner
from extra.onnx_helpers import get_example_inputs, validate
def load_onnx_model(onnx_file):

View File

@@ -8,7 +8,7 @@ import numpy as np
import subprocess
import tensorflow as tf
import tf2onnx
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.nn.onnx import OnnxRunner
from tinygrad.tensor import Tensor
from tinygrad.helpers import to_mv
from extra.export_model import export_model_clang, compile_net, jit_model

View File

@@ -10,7 +10,7 @@ from tinygrad.helpers import DEBUG, getenv
from tinygrad.engine.realize import CompiledRunner
import onnx
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.nn.onnx import OnnxRunner
OPENPILOT_MODEL = sys.argv[1] if len(sys.argv) > 1 else "https://github.com/commaai/openpilot/raw/v0.9.7/selfdrive/modeld/models/supercombo.onnx"
OUTPUT = sys.argv[2] if len(sys.argv) > 2 else "/tmp/openpilot.pkl"

View File

@@ -1,7 +1,7 @@
import sys
from tinygrad import Tensor, fetch, GlobalCounters, dtypes
from tinygrad.uop.ops import UOp
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.nn.onnx import OnnxRunner
from tinygrad.schedule.kernelize import get_kernelize_map
from tinygrad.schedule.rangeify import get_rangeify_map
from tinygrad.helpers import RANGEIFY

View File

@@ -27,7 +27,7 @@ class Model(nn.Module):
if __name__ == "__main__":
if getenv("TINY_BACKEND"):
import tinygrad.frontend.torch # noqa: F401
import tinygrad.nn.torch # noqa: F401
device = torch.device("tiny")
else:
device = torch.device({"METAL":"mps","NV":"cuda"}.get(Device.DEFAULT, "cpu"))

View File

@@ -2,7 +2,7 @@
import os
from ultralytics import YOLO
from pathlib import Path
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.nn.onnx import OnnxRunner
from extra.onnx_helpers import get_example_inputs
os.chdir("/tmp")

View File

@@ -1,7 +1,7 @@
import onnx, yaml, tempfile, time, argparse, json
from pathlib import Path
from typing import Any
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.nn.onnx import OnnxRunner
from extra.onnx_helpers import validate, get_example_inputs
from extra.huggingface_onnx.huggingface_manager import DOWNLOADS_DIR, snapshot_download_with_retry

View File

@@ -1,6 +1,6 @@
from tinygrad import Tensor
from tinygrad.tensor import _to_np_dtype
from tinygrad.frontend.onnx import OnnxRunner, OnnxValue
from tinygrad.nn.onnx import OnnxRunner, OnnxValue
import numpy as np
import onnxruntime as ort

View File

@@ -1,6 +1,6 @@
import unittest
import torch
import tinygrad.frontend.torch
import tinygrad.nn.torch
torch.set_default_device("tiny")
import numpy as np

View File

@@ -1,7 +1,7 @@
import unittest
from tinygrad.helpers import getenv
import torch
import tinygrad.frontend.torch
import tinygrad.nn.torch
torch.set_default_device("tiny")
import numpy as np

View File

@@ -31,7 +31,6 @@ setup(name='tinygrad',
'tinygrad.codegen.opt',
'tinygrad.codegen.late',
'tinygrad.engine',
'tinygrad.frontend',
'tinygrad.nn',
'tinygrad.renderer',
'tinygrad.runtime',

View File

@@ -1,6 +1,6 @@
import time, sys, hashlib
from pathlib import Path
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.nn.onnx import OnnxRunner
from tinygrad import Tensor, dtypes, TinyJit
from tinygrad.helpers import IMAGE, GlobalCounters, fetch, colored, getenv, trange
import numpy as np

View File

@@ -4,7 +4,7 @@ import torch
torch.set_num_threads(1)
import onnxruntime as ort
from onnx2torch import convert
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.nn.onnx import OnnxRunner
from tinygrad.helpers import OSX, DEBUG, fetch, getenv
from tinygrad.dtype import _to_np_dtype
from tinygrad import Tensor, Device, dtypes

View File

@@ -6,7 +6,7 @@ import numpy as np
from tinygrad import Tensor, Device, dtypes
from tinygrad.helpers import getenv, OSX
from tinygrad.device import is_dtype_supported
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.nn.onnx import OnnxRunner
# pip3 install tabulate
pytest_plugins = 'onnx.backend.test.report',

View File

@@ -5,7 +5,7 @@
from typing import Any
import unittest, onnx, tempfile
from tinygrad import dtypes, Tensor
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.nn.onnx import OnnxRunner
import numpy as np
from extra.onnx_helpers import validate
from onnx.defs import ONNX_DOMAIN, AI_ONNX_PREVIEW_TRAINING_DOMAIN

View File

@@ -3,7 +3,7 @@ import numpy as np
from tinygrad import dtypes, Tensor
from tinygrad.uop.ops import Ops
from tinygrad.device import is_dtype_supported
from tinygrad.frontend.onnx import OnnxRunner, OnnxDataType
from tinygrad.nn.onnx import OnnxRunner, OnnxDataType
from hypothesis import given, strategies as st
# copied from test_const_folding.py

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env python
import unittest
import numpy as np
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.nn.onnx import OnnxRunner
from tinygrad.device import Device
from tinygrad.helpers import fetch, Context

View File

@@ -8,7 +8,7 @@ from tinygrad.tensor import _to_np_dtype
from tinygrad.device import is_dtype_supported
if getenv("TINY_BACKEND"):
import tinygrad.frontend.torch # noqa: F401 # pylint: disable=unused-import
import tinygrad.nn.torch # noqa: F401 # pylint: disable=unused-import
torch.set_default_device("tiny")
if CI:

View File

@@ -68,7 +68,7 @@ class TestQuantizeOnnxCPU(unittest.TestCase):
import onnx # noqa: F401 # pylint: disable=unused-import
except ImportError:
raise unittest.SkipTest()
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.nn.onnx import OnnxRunner
out_file = get_quantized_model(sz)
run_onnx = OnnxRunner(out_file)
inp = Tensor(np.random.uniform(size=(sz, sz)).astype(np.float32))