diff --git a/tinygrad/mlops.py b/tinygrad/mlops.py index bbfac292a4..00ad2a8531 100644 --- a/tinygrad/mlops.py +++ b/tinygrad/mlops.py @@ -24,7 +24,7 @@ class Log(Function): class Exp(Function): def forward(self, x): ret = x.unary_op(UnaryOps.EXP) - self.save_for_backward(ret) # we save the output here, not the input + self.save_for_backward(ret) return ret def backward(self, grad_output): diff --git a/tinygrad/ops.py b/tinygrad/ops.py index aeacccfb37..8551159ea4 100644 --- a/tinygrad/ops.py +++ b/tinygrad/ops.py @@ -30,7 +30,7 @@ def get_lazyops(op:LazyOp) -> List[LazyOp]: return functools.reduce(operator.add # a placeholder class to extend by the exec classes class DeviceBuffer: - shape: Any # should be Tuple[int, ...] but ndarray and torch.tensor have imcompatible types + shape: Any # should be Tuple[int, ...] but ndarray and torch.tensor have incompatible types # extend this if you don't have an exec_ast function # used in CPUBuffer and TorchBuffer diff --git a/tinygrad/shapetracker.py b/tinygrad/shapetracker.py index d71d0d837b..47fe5c9175 100644 --- a/tinygrad/shapetracker.py +++ b/tinygrad/shapetracker.py @@ -156,4 +156,3 @@ class ShapeTracker: new_shape = [(s+(abs(m)-1))//abs(m) for s,m in zip(self.shape, mul)] offset = sum([(s-1)*z for s,z,m in zip(self.shape, self.strides, mul) if m < 0]) self.views[-1] = View(new_shape, strides, self.offset + offset) -