# forward total_loss, acc = loss() # backward model.zero_grad() total_loss.backward() # update (sgd) learning_rate = 1.0 - 0.9*k/100 for p in model.parameters(): p.data -= learning_rate * p.grad if k % 1 == 0: # should it add a new line here? # total_loss, acc = loss() print(f"step {k} loss {total_loss.data}, accuracy {acc*100}%")