Skip to content

Commit

Permalink
smart_optimizer() revert to weight with decay (ultralytics#9817)
Browse files Browse the repository at this point in the history
If a parameter does not fall into any other category

Signed-off-by: Glenn Jocher <[email protected]>

Signed-off-by: Glenn Jocher <[email protected]>
  • Loading branch information
glenn-jocher authored Oct 16, 2022
1 parent df80e7c commit e42c89d
Showing 1 changed file with 7 additions and 6 deletions.
13 changes: 7 additions & 6 deletions utils/torch_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,12 +319,13 @@ def smart_optimizer(model, name='Adam', lr=0.001, momentum=0.9, decay=1e-5):
g = [], [], [] # optimizer parameter groups
bn = tuple(v for k, v in nn.__dict__.items() if 'Norm' in k) # normalization layers, i.e. BatchNorm2d()
for v in model.modules():
if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): # bias (no decay)
g[2].append(v.bias)
if isinstance(v, bn): # weight (no decay)
g[1].append(v.weight)
elif hasattr(v, 'weight') and isinstance(v.weight, nn.Parameter): # weight (with decay)
g[0].append(v.weight)
for p_name, p in v.named_parameters(recurse=0):
if p_name == 'bias': # bias (no decay)
g[2].append(p)
elif p_name == 'weight' and isinstance(v, bn): # weight (no decay)
g[1].append(p)
else:
g[0].append(p) # weight (with decay)

if name == 'Adam':
optimizer = torch.optim.Adam(g[2], lr=lr, betas=(momentum, 0.999)) # adjust beta1 to momentum
Expand Down

0 comments on commit e42c89d

Please sign in to comment.