package for implementing various optimization algorithms
#constructing an Optimizer
optimizer = optim.SGD(model.parameters(), lr=0.01, momentum=0.9)
optimizer = optim.Adam([var1, var2], lr=0.0001)
#scpecifying per-parameter options
optim.SGD([ #passing dict type instead of iterables
{'params': model.base.parameters()},
{'params': model.classifier.parameters(), 'lr': 1e-3} #overriding 'lr'
], lr=1e-2, momentum=0.9)
for epoch in range(epochs):
optimizer.zero_grad() # set all gradients to zero
outputs=model(inputs) # forward pass
loss=criterion(outputs, labels) # get loss
loss.backward() # backward pass
optimizer.step() # updating parameters