diff --git a/tinygrad/mixin/movement.py b/tinygrad/mixin/movement.py index 68b64c7b9b..47ca3640e8 100644 --- a/tinygrad/mixin/movement.py +++ b/tinygrad/mixin/movement.py @@ -107,6 +107,12 @@ class MovementMixin: ret = self._mop(Ops.RESHAPE, arg=new_shape) return self if ret.shape == self.shape else ret + def pad(self, arg:tuple[tuple[sint, sint] | None, ...]) -> Self: + if self.ndim != len(arg): + raise ValueError(f"{self.ndim=} != {len(arg)=}") + ret = self._mop(Ops.PAD, tuple(x if x is not None else (0, 0) for x in arg)) + return self if ret.shape == self.shape else ret + def shrink(self, arg: tuple[tuple[sint, sint] | None, ...]) -> Self: """ Returns a tensor that shrinks the each axis based on input arg. diff --git a/tinygrad/uop/ops.py b/tinygrad/uop/ops.py index c55847a08d..705c78082a 100644 --- a/tinygrad/uop/ops.py +++ b/tinygrad/uop/ops.py @@ -650,16 +650,6 @@ class UOp(OpMixin, metaclass=UOpMetaClass): if ret.shape == self.shape and same_shape_noop: return self return ret - # in these four, if the shape doesn't change we can return self - #def reshape(self, arg:tuple[sint, ...]): return self._mop(Ops.RESHAPE, arg, same_shape_noop=True) - #def expand(self, arg:tuple[sint, ...]): return self._mop(Ops.EXPAND, arg, same_shape_noop=True) - #def shrink(self, arg:tuple[tuple[sint, sint], ...]): return self._mop(Ops.SHRINK, arg, same_shape_noop=True) - def pad(self, arg:tuple[tuple[sint, sint], ...]): return self._mop(Ops.PAD, arg, same_shape_noop=True) - - # in these two, we have custom logic to check if they are a no-op - #def permute(self, arg:tuple[int, ...]): return self._mop(Ops.PERMUTE, arg, same_shape_noop=False) if arg != tuple(range(len(self.shape))) else self - #def flip(self, arg:tuple[bool, ...]): return self._mop(Ops.FLIP, arg, same_shape_noop=False) if any(arg) and len(arg) == len(self.shape) else self - # *** uop UNIQUE *** # TODO: use this in Buffer