Skip to content

Commit

Permalink
fix forward compatibility as AdamW became subclass of Adam
Browse files Browse the repository at this point in the history
  • Loading branch information
haifeng-jin committed Feb 5, 2025
1 parent ea59e40 commit 6b20d80
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions tests/tests_pytorch/callbacks/test_lr_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,10 +548,10 @@ def finetune_function(self, pl_module, epoch: int, optimizer):
"""Called when the epoch begins."""
if epoch == 1 and isinstance(optimizer, torch.optim.SGD):
self.unfreeze_and_add_param_group(pl_module.backbone[0], optimizer, lr=0.1)
if epoch == 2 and isinstance(optimizer, torch.optim.Adam):
if epoch == 2 and type(optimizer) == torch.optim.Adam:
self.unfreeze_and_add_param_group(pl_module.layer, optimizer, lr=0.1)

if epoch == 3 and isinstance(optimizer, torch.optim.Adam):
if epoch == 3 and type(optimizer) == torch.optim.Adam:
assert len(optimizer.param_groups) == 2
self.unfreeze_and_add_param_group(pl_module.backbone[1], optimizer, lr=0.1)
assert len(optimizer.param_groups) == 3
Expand Down

0 comments on commit 6b20d80

Please sign in to comment.