mirror of
https://github.com/tinygrad/tinygrad.git
synced 2026-01-06 21:53:53 -05:00
clean up unused imports in examples and update CI linting (#11024)
* clean up unused imports in examples * enable unused import checking in examples * lint * ignore F541 and F841 - focus on unused imports only * clean up * restore tinygrad.frontend.torch for TINY_BACKEND * tiny change
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
import sys, time, pickle
|
||||
import sys, time
|
||||
from tinygrad import TinyJit, GlobalCounters, fetch, getenv
|
||||
from tinygrad.frontend.onnx import OnnxRunner, onnx_load
|
||||
from extra.onnx_helpers import get_example_inputs, validate
|
||||
|
||||
@@ -4,7 +4,7 @@ sys.path.append(os.getcwd())
|
||||
|
||||
from io import StringIO
|
||||
from contextlib import redirect_stdout
|
||||
from tinygrad import Tensor, nn, Device, dtypes
|
||||
from tinygrad import Tensor, nn
|
||||
from tinygrad.helpers import Timing, colored, getenv, fetch
|
||||
from extra.models.llama import Transformer, convert_from_huggingface, fix_bf16
|
||||
from sentencepiece import SentencePieceProcessor
|
||||
|
||||
@@ -8,7 +8,7 @@ import numpy as np
|
||||
from typing import Optional
|
||||
from extra.lr_scheduler import OneCycleLR
|
||||
from tinygrad import nn, dtypes, Tensor, Device, GlobalCounters, TinyJit, Variable
|
||||
from tinygrad.nn.state import get_state_dict, get_parameters
|
||||
from tinygrad.nn.state import get_state_dict
|
||||
from tinygrad.nn import optim
|
||||
from tinygrad.helpers import Context, BEAM, WINO, getenv, colored, prod
|
||||
from extra.bench_log import BenchEvent, WallTimeEvent
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
if "NOOPT" not in os.environ: os.environ["NOOPT"] = "1"
|
||||
from tinygrad import Device, nn, Tensor, dtypes, Variable
|
||||
from tinygrad import Device, nn, Tensor, dtypes
|
||||
Device.DEFAULT = "CPU"
|
||||
from train_gpt2 import GPT, GPTConfig
|
||||
from tinygrad.helpers import dedup, to_function_name, flatten, getenv, GlobalCounters, ansilen, to_function_name
|
||||
from tinygrad.engine.realize import get_kernel, run_schedule
|
||||
from tinygrad.helpers import dedup, flatten, getenv, GlobalCounters, to_function_name
|
||||
from tinygrad.engine.realize import get_kernel
|
||||
from tinygrad.engine.memory import memory_planner
|
||||
from tinygrad.uop.ops import Ops
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ class Model(nn.Module):
|
||||
|
||||
if __name__ == "__main__":
|
||||
if getenv("TINY_BACKEND"):
|
||||
import tinygrad.frontend.torch
|
||||
import tinygrad.frontend.torch # noqa: F401
|
||||
device = torch.device("tiny")
|
||||
else:
|
||||
device = torch.device({"METAL":"mps","NV":"cuda"}.get(Device.DEFAULT, "cpu"))
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
from tinygrad import Tensor, TinyJit, dtypes, GlobalCounters
|
||||
from tinygrad.nn import Conv2d, GroupNorm
|
||||
from tinygrad.nn.state import safe_load, load_state_dict, get_state_dict
|
||||
from tinygrad.nn.state import safe_load, load_state_dict
|
||||
from tinygrad.helpers import fetch, trange, colored, Timing
|
||||
from extra.models.clip import Embedder, FrozenClosedClipEmbedder, FrozenOpenClipEmbedder
|
||||
from extra.models.unet import UNetModel, Upsample, Downsample, timestep_embedding
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
from examples.beautiful_mnist import Model
|
||||
from tinygrad import Tensor, nn, getenv, GlobalCounters, Variable
|
||||
from tinygrad.nn.datasets import mnist
|
||||
from tinygrad.helpers import trange, DEBUG
|
||||
from tinygrad.helpers import trange
|
||||
|
||||
# STEPS=70 python3 examples/stunning_mnist.py
|
||||
# NOTE: it's broken with STACK=1, why?
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
#!POPCORN gpu A100
|
||||
# not a stable API, but works
|
||||
|
||||
import torch, functools
|
||||
import torch
|
||||
from tinygrad import Tensor, TinyJit, Device
|
||||
from tinygrad.engine.realize import CompiledRunner
|
||||
from tinygrad.helpers import get_single_element, Context, OSX
|
||||
from tinygrad.helpers import Context, OSX
|
||||
from tinygrad.dtype import _from_torch_dtype
|
||||
|
||||
@TinyJit
|
||||
|
||||
@@ -2,8 +2,6 @@ import sys
|
||||
import random
|
||||
import json
|
||||
import numpy
|
||||
from pathlib import Path
|
||||
from PIL import Image
|
||||
from tinygrad.tensor import Tensor
|
||||
from tinygrad.nn.optim import SGD
|
||||
from tinygrad.nn.state import safe_save, safe_load, get_state_dict, load_state_dict
|
||||
|
||||
@@ -5,7 +5,7 @@ from typing import Optional, Union, Literal, List
|
||||
|
||||
from tinygrad import Tensor, TinyJit, Variable, nn
|
||||
from tinygrad.nn.state import torch_load, load_state_dict
|
||||
from tinygrad.helpers import getenv, DEBUG, fetch
|
||||
from tinygrad.helpers import getenv, fetch
|
||||
|
||||
import numpy as np
|
||||
import librosa
|
||||
|
||||
@@ -4,7 +4,6 @@ from ultralytics import YOLO
|
||||
from pathlib import Path
|
||||
from tinygrad.frontend.onnx import OnnxRunner, onnx_load
|
||||
from extra.onnx_helpers import get_example_inputs
|
||||
from tinygrad.tensor import Tensor
|
||||
|
||||
os.chdir("/tmp")
|
||||
if not Path("yolov8n-seg.onnx").is_file():
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
from tinygrad.nn import Conv2d, BatchNorm2d
|
||||
from tinygrad.tensor import Tensor
|
||||
from tinygrad.device import is_dtype_supported
|
||||
from tinygrad import dtypes
|
||||
import numpy as np
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
|
||||
10
ruff.toml
10
ruff.toml
@@ -36,9 +36,17 @@ line-length = 150
|
||||
|
||||
exclude = [
|
||||
"docs/",
|
||||
"examples/",
|
||||
"extra/",
|
||||
"tinygrad/runtime/autogen",
|
||||
"test/external/mlperf_resnet",
|
||||
"test/external/mlperf_unet3d",
|
||||
]
|
||||
|
||||
# detect unused imports in examples
|
||||
[lint.per-file-ignores]
|
||||
"examples/**/*.py" = [
|
||||
"W6", "E71", "E72", "E112", "E113", "E203", "E272", "E275",
|
||||
"E303", "E304", "E501", "E702", "E703", "E731", "W191",
|
||||
"W291", "W293", "UP039", "C416", "RET506", "RET507", "A",
|
||||
"FURB110", "RUF018", "F541", "F841"
|
||||
]
|
||||
Reference in New Issue
Block a user