I want to apply custom learning rate scheduler. #8042
-
I want to apply custom learning rate scheduler like below. class WarmupLRScheduler(torch.optim.lr_scheduler._LRScheduler):
"""
Warmup learning rate until `total_steps`
Args:
optimizer (Optimizer): wrapped optimizer.
configs (DictConfig): configuration set.
"""
def __init__(
self,
optimizer: Optimizer,
configs: DictConfig,
) -> None:
super(WarmupLRScheduler, self).__init__(optimizer, configs.lr_scheduler.init_lr)
if configs.lr_scheduler.warmup_steps != 0:
warmup_rate = configs.lr_scheduler.peak_lr - configs.lr_scheduler.init_lr
self.warmup_rate = warmup_rate / configs.lr_scheduler.warmup_steps
else:
self.warmup_rate = 0
self.update_steps = 1
self.lr = configs.lr_scheduler.init_lr
self.warmup_steps = configs.lr_scheduler.warmup_steps
def step(self, val_loss: Optional[torch.FloatTensor] = None):
if self.update_steps < self.warmup_steps:
lr = self.init_lr + self.warmup_rate * self.update_steps
self.set_lr(self.optimizer, lr)
self.lr = lr
self.update_steps += 1
return self.lr But I find that my custom lr schedulers doesn't work in pytorch lightning. def configure_optimizers(self):
r"""
Choose what optimizers and learning-rate schedulers to use in your optimization.
Returns:
- **Dictionary** - The first item has multiple optimizers, and the second has multiple LR schedulers
(or multiple ``lr_dict``).
"""
SUPPORTED_OPTIMIZERS = {
"adam": Adam,
"adamp": AdamP,
"radam": RAdam,
"adagrad": Adagrad,
"adadelta": Adadelta,
"adamax": Adamax,
"adamw": AdamW,
"sgd": SGD,
"asgd": ASGD,
"novograd": Novograd,
}
assert self.configs.model.optimizer in SUPPORTED_OPTIMIZERS.keys(), \
f"Unsupported Optimizer: {self.configs.model.optimizer}\n" \
f"Supported Optimizers: {SUPPORTED_OPTIMIZERS.keys()}"
self.optimizer = SUPPORTED_OPTIMIZERS[self.configs.model.optimizer](
self.parameters(),
lr=self.configs.lr_scheduler.lr,
)
scheduler = SCHEDULER_REGISTRY[self.configs.lr_scheduler.scheduler_name](self.optimizer, self.configs)
if self.configs.lr_scheduler.scheduler_name == "reduce_lr_on_plateau":
lr_scheduler = {
'scheduler': scheduler,
'monitor': 'val_loss',
'interval': 'epoch',
}
elif self.configs.lr_scheduler.scheduler_name == "warmup_reduce_lr_on_plateau":
lr_scheduler = {
'scheduler': scheduler,
'monitor': 'val_loss',
'interval': 'step',
}
else:
lr_scheduler = {
'scheduler': scheduler,
'interval': 'step',
}
return {
'optimizer': self.optimizer,
'lr_scheduler': lr_scheduler
} If you fine some weird, please let me know. Thank you. |
Beta Was this translation helpful? Give feedback.
Replies: 4 comments 7 replies
-
Hi @sooftware, can you share the error or unexpected behaviour you are getting with this configuration? Thanks 😃 |
Beta Was this translation helpful? Give feedback.
-
Dear @sooftware, |
Beta Was this translation helpful? Give feedback.
-
Dear @sooftware, Any updates ? We solved some bugs related monitoring. Best, |
Beta Was this translation helpful? Give feedback.
-
Hey @sooftware, Awesome work ! By the way, do you know about Lightning Flash: https://github.com/PyTorchLightning/lightning-flash. I believe there could be some cool synergies between your framework and Flash. Best, |
Beta Was this translation helpful? Give feedback.
Dear @sooftware,
Any updates ? We solved some bugs related monitoring.
Best,
T.C