Make the tensors contiguous.

This commit is contained in:
Prashant Kumar
2023-06-26 11:39:40 +00:00
parent 27a08735db
commit 74a7202173

View File

@@ -136,7 +136,7 @@ class SharkBackend:
self.shark_module = shark_module
def __call__(self, *inputs):
np_inputs = [x.detach().cpu().numpy() for x in inputs]
np_inputs = [x.contiguous().detach().cpu().numpy() for x in inputs]
np_outs = self.shark_module("forward", np_inputs)
if self.was_unwrapped:
np_outs = [