From 9ae3e9daf3537d2b5f77e52f6d8f62b8ddb3f655 Mon Sep 17 00:00:00 2001 From: George Hotz Date: Thu, 29 Oct 2020 08:13:05 -0700 Subject: [PATCH] shape has to be a kwarg now, idk why this didn't break before --- test/test_mnist.py | 2 +- tinygrad/ops.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_mnist.py b/test/test_mnist.py index f73a58f56..6b3c1ddb8 100644 --- a/test/test_mnist.py +++ b/test/test_mnist.py @@ -40,7 +40,7 @@ class TinyConvNet: x.data = x.data.reshape((-1, 1, 28, 28)) # hacks x = x.conv2d(self.c1).relu().max_pool2d() x = x.conv2d(self.c2).relu().max_pool2d() - x = x.reshape(Tensor(np.array((x.shape[0], -1)))) + x = x.reshape(shape=[x.shape[0], -1]) return x.dot(self.l1).logsoftmax() def train(model, optim, steps, BS=128): diff --git a/tinygrad/ops.py b/tinygrad/ops.py index 8201f635e..d2591de49 100644 --- a/tinygrad/ops.py +++ b/tinygrad/ops.py @@ -127,7 +127,7 @@ class Reshape(Function): @staticmethod def backward(ctx, grad_output): in_shape, = ctx.saved_tensors - return grad_output.reshape(in_shape), None + return grad_output.reshape(in_shape) register('reshape', Reshape) class LogSoftmax(Function):