move graph.py and jit.py into features (#3376)

* move graph.py into features

* move jit into features

* fix quickstart
This commit is contained in:
George Hotz
2024-02-12 17:34:34 +01:00
committed by GitHub
parent 0f6cde243d
commit 41efaa848c
41 changed files with 77 additions and 47 deletions

View File

@@ -7,7 +7,7 @@ from tinygrad.nn.state import get_parameters
from tinygrad.nn import optim
from tinygrad import Tensor, GlobalCounters
from tinygrad.helpers import getenv
from tinygrad.jit import CacheCollector
from tinygrad.features.jit import CacheCollector
def tensors_allocated():
return sum(isinstance(x, Tensor) for x in gc.get_objects())

View File

@@ -9,7 +9,7 @@ import numpy as np
from PIL import Image
from tinygrad.tensor import Tensor
from tinygrad.helpers import getenv, fetch, Timing
from tinygrad.jit import TinyJit
from tinygrad.features.jit import TinyJit
from extra.models.efficientnet import EfficientNet
np.set_printoptions(suppress=True)

View File

@@ -3,7 +3,7 @@ start = time.perf_counter()
from pathlib import Path
import numpy as np
from tinygrad import Tensor, Device, dtypes, GlobalCounters
from tinygrad.jit import TinyJit
from tinygrad.features.jit import TinyJit
from tinygrad.nn.state import get_parameters, load_state_dict, safe_load
from tinygrad.helpers import getenv, Timing
from examples.mlperf import helpers
@@ -103,7 +103,7 @@ def eval_retinanet():
coco_eval = COCOeval(coco, iouType="bbox")
coco_evalimgs, evaluated_imgs, ncats, narea = [], [], len(coco_eval.params.catIds), len(coco_eval.params.areaRng)
from tinygrad.jit import TinyJit
from tinygrad.features.jit import TinyJit
mdlrun = TinyJit(lambda x: mdl(input_fixup(x)).realize())
n, bs = 0, 8

View File

@@ -13,7 +13,7 @@ from tinygrad import Device, GlobalCounters, dtypes, Tensor
from tinygrad.helpers import Timing, Context, getenv, fetch, colored
from tinygrad.nn import Conv2d, Linear, GroupNorm, LayerNorm, Embedding
from tinygrad.nn.state import torch_load, load_state_dict, get_state_dict
from tinygrad.jit import TinyJit
from tinygrad.features.jit import TinyJit
class AttnBlock:
def __init__(self, in_channels):

View File

@@ -9,7 +9,7 @@ from tinygrad import nn, dtypes
from tinygrad.helpers import fetch
from tinygrad.nn.state import torch_load
from tinygrad.tensor import Tensor
from tinygrad.jit import TinyJit
from tinygrad.features.jit import TinyJit
from unidecode import unidecode
LRELU_SLOPE = 0.1

View File

@@ -6,7 +6,7 @@ import base64
import multiprocessing
import numpy as np
from typing import Optional, Union, Literal, List
from tinygrad.jit import TinyJit
from tinygrad.features.jit import TinyJit
from tinygrad.nn.state import torch_load, load_state_dict
from tinygrad.helpers import getenv, DEBUG, CI, fetch
import tinygrad.nn as nn