mirror of
https://github.com/tinygrad/tinygrad.git
synced 2026-01-23 05:48:08 -05:00
very minor
This commit is contained in:
@@ -24,7 +24,7 @@ class Log(Function):
|
||||
class Exp(Function):
|
||||
def forward(self, x):
|
||||
ret = x.unary_op(UnaryOps.EXP)
|
||||
self.save_for_backward(ret) # we save the output here, not the input
|
||||
self.save_for_backward(ret)
|
||||
return ret
|
||||
|
||||
def backward(self, grad_output):
|
||||
|
||||
@@ -30,7 +30,7 @@ def get_lazyops(op:LazyOp) -> List[LazyOp]: return functools.reduce(operator.add
|
||||
|
||||
# a placeholder class to extend by the exec classes
|
||||
class DeviceBuffer:
|
||||
shape: Any # should be Tuple[int, ...] but ndarray and torch.tensor have imcompatible types
|
||||
shape: Any # should be Tuple[int, ...] but ndarray and torch.tensor have incompatible types
|
||||
|
||||
# extend this if you don't have an exec_ast function
|
||||
# used in CPUBuffer and TorchBuffer
|
||||
|
||||
@@ -156,4 +156,3 @@ class ShapeTracker:
|
||||
new_shape = [(s+(abs(m)-1))//abs(m) for s,m in zip(self.shape, mul)]
|
||||
offset = sum([(s-1)*z for s,z,m in zip(self.shape, self.strides, mul) if m < 0])
|
||||
self.views[-1] = View(new_shape, strides, self.offset + offset)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user