mirror of
https://github.com/tinygrad/tinygrad.git
synced 2026-01-08 22:48:25 -05:00
remove np from beautiful_cifar (#10988)
* remove np from beautiful_cifar * remove np from cifar * rename variable and rename tensor.arrange to just tensor.randperm --------- Co-authored-by: chenyu <chenyu@fastmail.com>
This commit is contained in:
@@ -2,7 +2,6 @@ import time
|
||||
start_tm = time.perf_counter()
|
||||
import math
|
||||
from typing import Tuple, cast
|
||||
import numpy as np
|
||||
from tinygrad import Tensor, nn, GlobalCounters, TinyJit, dtypes, Device
|
||||
from tinygrad.helpers import partition, trange, getenv, Context
|
||||
from extra.lr_scheduler import OneCycleLR
|
||||
@@ -150,13 +149,12 @@ if __name__ == "__main__":
|
||||
acc.append((out.argmax(-1) == Y).sum() / eval_batchsize)
|
||||
return Tensor.stack(*loss).mean() / (batchsize*loss_batchsize_scaler), Tensor.stack(*acc).mean()
|
||||
|
||||
np.random.seed(1337)
|
||||
Tensor.manual_seed(1337)
|
||||
num_train_samples = X_train.shape[0]
|
||||
|
||||
for epoch in range(math.ceil(hyp['misc']['train_epochs'])):
|
||||
# TODO: move to tinygrad
|
||||
gst = time.perf_counter()
|
||||
idxs = np.arange(X_train.shape[0])
|
||||
np.random.shuffle(idxs)
|
||||
tidxs = Tensor(idxs, dtype='int')[:num_steps_per_epoch*batchsize].reshape(num_steps_per_epoch, batchsize) # NOTE: long doesn't fold
|
||||
tidxs = Tensor.randperm(num_train_samples, dtype='int')[:num_steps_per_epoch*batchsize].reshape(num_steps_per_epoch, batchsize)
|
||||
train_loss:float = 0
|
||||
for epoch_step in (t:=trange(num_steps_per_epoch)):
|
||||
st = time.perf_counter()
|
||||
|
||||
Reference in New Issue
Block a user