1
0
mirror of https://github.com/osmarks/nanogpt-experiments.git synced 2024-11-10 20:09:58 +00:00

slight tweak compressing LOC

This commit is contained in:
Andrej Karpathy 2023-02-04 15:57:29 +00:00
parent 53d56b82f1
commit b3c17c6c6a

View File

@ -233,13 +233,10 @@ X, Y = get_batch('train') # fetch the very first batch
t0 = time.time()
while True:
# determine the learning rate for this iteration
if decay_lr:
lr = get_lr(iter_num)
# determine and set the learning rate for this iteration
lr = get_lr(iter_num) if decay_lr else learning_rate
for param_group in optimizer.param_groups:
param_group['lr'] = lr
else:
lr = learning_rate
# evaluate the loss on train/val sets and write checkpoints
if iter_num % eval_interval == 0 and master_process: