mirror of
https://github.com/tinygrad/tinygrad.git
synced 2026-01-09 06:58:11 -05:00
rename to get_kernelize_map (#10465)
This commit is contained in:
@@ -78,7 +78,7 @@ print("******** third, the UOp ***********")
|
||||
|
||||
from tinygrad.engine.realize import run_schedule
|
||||
from tinygrad.engine.schedule import create_schedule_with_vars
|
||||
from tinygrad.engine.grouper import get_becomes_map
|
||||
from tinygrad.engine.grouper import get_kernelize_map
|
||||
|
||||
# allocate some values + load in values
|
||||
a = UOp.new_buffer(DEVICE, 1, dtypes.int32)
|
||||
@@ -91,7 +91,7 @@ out = a + b
|
||||
s = UOp(Ops.SINK, dtypes.void, (out,))
|
||||
|
||||
# group the computation into kernels
|
||||
becomes_map = get_becomes_map(s)
|
||||
becomes_map = get_kernelize_map(s)
|
||||
|
||||
# the compute maps to an assign
|
||||
assign = becomes_map[a+b]
|
||||
|
||||
@@ -2,7 +2,7 @@ import sys, onnx
|
||||
from tinygrad import Tensor, fetch, GlobalCounters
|
||||
from tinygrad.uop import UOp
|
||||
from tinygrad.frontend.onnx import OnnxRunner
|
||||
from tinygrad.engine.grouper import get_becomes_map
|
||||
from tinygrad.engine.grouper import get_kernelize_map
|
||||
from tinygrad.engine.schedule import create_schedule_with_vars
|
||||
from tinygrad.engine.realize import run_schedule
|
||||
|
||||
@@ -35,7 +35,7 @@ if __name__ == "__main__":
|
||||
if not in_target_path[s]:
|
||||
independent_set[s] = None
|
||||
independent = UOp.sink(*independent_set.keys())
|
||||
kernelized = get_becomes_map(independent)
|
||||
kernelized = get_kernelize_map(independent)
|
||||
independent = independent.substitute(kernelized)
|
||||
schedule, var_vals, becomes_map = create_schedule_with_vars(independent)
|
||||
run_schedule(schedule)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import os, multiprocessing, logging, pickle, sqlite3, difflib, functools, warnings, itertools
|
||||
from typing import Callable, cast
|
||||
from tinygrad.helpers import VERSION, Context, ContextVar, colored, db_connection, getenv, tqdm
|
||||
from tinygrad.engine.grouper import get_becomes_map
|
||||
from tinygrad.engine.grouper import get_kernelize_map
|
||||
from tinygrad.codegen.kernel import Kernel, Opt
|
||||
from tinygrad.renderer import Renderer
|
||||
from tinygrad.uop.ops import UOp, Ops
|
||||
@@ -34,7 +34,7 @@ class ProcessReplayWarning(Warning): pass
|
||||
|
||||
def recreate_sched(big_sink:UOp) -> list[UOp]:
|
||||
UOp.unique_num = itertools.count(max([u.arg for u in big_sink.toposort() if u.op is Ops.UNIQUE], default=0)+1)
|
||||
becomes_map = get_becomes_map(big_sink)
|
||||
becomes_map = get_kernelize_map(big_sink)
|
||||
sched_sink = big_sink.substitute(becomes_map)
|
||||
return [u.arg.ast for u in sched_sink.toposort() if u.op is Ops.KERNEL]
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ from tinygrad.uop.ops import PatternMatcher, UOp, Ops, GroupOp, UPat, graph_rewr
|
||||
from tinygrad.codegen.symbolic import symbolic_simple
|
||||
from tinygrad.uop.spec import type_verify, shape_spec
|
||||
from tinygrad.helpers import CI, DEBUG, FUSE_ARANGE, SPLIT_REDUCEOP, GlobalCounters, Context, getenv, all_same, temp
|
||||
from tinygrad.engine.grouper import view_left, view_right, sym, get_becomes_map, Kernel, create_ast, merge_views, create_kernels
|
||||
from tinygrad.engine.grouper import view_left, view_right, sym, get_kernelize_map, Kernel, create_ast, merge_views, create_kernels
|
||||
from tinygrad.engine.schedule import ScheduleItem, create_schedule_with_vars
|
||||
from tinygrad.engine.realize import CompiledRunner, run_schedule, lower_schedule
|
||||
|
||||
@@ -29,7 +29,7 @@ def check_schedule(t:Union[Tensor, List[Tensor], UOp], allowed:int, to_prerealiz
|
||||
else:
|
||||
assert isinstance(t, UOp), f"can't schedule {t}"
|
||||
sink = UOp.sink(t) if t.op is not Ops.SINK else t
|
||||
becomes_map = get_becomes_map(sink)
|
||||
becomes_map = get_kernelize_map(sink)
|
||||
sched, _, __ = create_schedule_with_vars(sink.substitute(becomes_map))
|
||||
# test lowering all the ScheduleItems to ExecItems
|
||||
kernel_cnt = len([si for si,ei in lower_schedule(sched.copy()) if isinstance(ei.prg, CompiledRunner) or not filter_sink])
|
||||
|
||||
@@ -489,7 +489,7 @@ def get_name(becomes_map:dict[UOp, UOp]) -> str:
|
||||
return f"Schedule {pluralize('Kernel', len(set(assigned_kernels)))}"
|
||||
|
||||
@track_rewrites(name_fxn=get_name)
|
||||
def get_becomes_map(big_sink:UOp) -> dict[UOp, UOp]:
|
||||
def get_kernelize_map(big_sink:UOp) -> dict[UOp, UOp]:
|
||||
# multi + merge_views + simplify
|
||||
tensor_map = graph_rewrite_map(big_sink, multi_pm+replace_allreduce+do_fuse+merge_views+sym+replace_contiguous, ctx={}, name="merge_views")
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ from tinygrad.device import Device, Buffer
|
||||
from tinygrad.engine.realize import run_schedule
|
||||
from tinygrad.engine.memory import memory_planner
|
||||
from tinygrad.engine.schedule import ScheduleItem, create_schedule_with_vars
|
||||
from tinygrad.engine.grouper import get_becomes_map
|
||||
from tinygrad.engine.grouper import get_kernelize_map
|
||||
|
||||
# *** all in scope Tensors are here. this gets relevant UOps ***
|
||||
|
||||
@@ -235,7 +235,7 @@ class Tensor(SimpleMathTrait):
|
||||
# verify Tensors match the spec
|
||||
if __debug__: type_verify(list(big_sink.toposort()), tensor_uop_spec)
|
||||
|
||||
becomes_map = get_becomes_map(big_sink)
|
||||
becomes_map = get_kernelize_map(big_sink)
|
||||
_apply_map_to_tensors(becomes_map, name="Apply Kernelize Map")
|
||||
return self
|
||||
|
||||
|
||||
Reference in New Issue
Block a user