onnx parser (#10435)

* onnx parser

* fix compile, lint

* onnx.load -> onnx_load

* compatible with ModelProto

* fix test external_test_onnx_ops.py

* fix tests

* fix signed int

* reduce to 261 lines

* fix TypeProto.Optional

* debug for _parse_message, add TypeProto.Sequence, cleanup

* onnx_load from Tensor

* remove BufferedReader

* 174 lines and reduce tensor copy

* cleanup

* use onnx_load in external_model_benchmark.py

* fix qcom test

* [onnx] parser support external data

---------

Co-authored-by: b1tg <b1tg@users.noreply.github.com>
Co-authored-by: chenyu <chenyu@fastmail.com>
This commit is contained in:
b1tg
2025-06-10 00:44:28 +08:00
committed by GitHub
parent cfa65bea05
commit 24d328e313
13 changed files with 273 additions and 50 deletions

View File

@@ -1,8 +1,7 @@
from tinygrad import Tensor
from tinygrad.tensor import _to_np_dtype
from tinygrad.frontend.onnx import OnnxRunner
from tinygrad.frontend.onnx import OnnxRunner, onnx_load
from extra.onnx import OnnxValue
import onnx
import numpy as np
import onnxruntime as ort
@@ -47,7 +46,7 @@ def get_example_inputs(graph_inputs:dict[str, OnnxValue], config={}):
return ret
def validate(onnx_file, inputs, rtol=1e-5, atol=1e-5):
run_onnx = OnnxRunner(onnx.load(onnx_file))
run_onnx = OnnxRunner(onnx_load(onnx_file))
ort_options = ort.SessionOptions()
ort_options.log_severity_level = 3