From 09bd3ef68577cddc1fd7b4beb0dc66e746c48635 Mon Sep 17 00:00:00 2001 From: Kent Gauen Date: Fri, 11 Sep 2020 08:13:05 -0400 Subject: [PATCH] Update scheduler.py I've added the warning to get_lr to only be called within "step". I think this is standard for Pytorch schedulers, so it might be savvy to include this. --- warmup_scheduler/scheduler.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/warmup_scheduler/scheduler.py b/warmup_scheduler/scheduler.py index 2ebdc70..6b2a5d3 100644 --- a/warmup_scheduler/scheduler.py +++ b/warmup_scheduler/scheduler.py @@ -1,5 +1,6 @@ from torch.optim.lr_scheduler import _LRScheduler from torch.optim.lr_scheduler import ReduceLROnPlateau +import warnings class GradualWarmupScheduler(_LRScheduler): @@ -23,6 +24,9 @@ def __init__(self, optimizer, multiplier, total_epoch, after_scheduler=None): super(GradualWarmupScheduler, self).__init__(optimizer) def get_lr(self): + if not self._get_lr_called_within_step: + warnings.warn("To get the last learning rate computed by the scheduler, " + "please use `get_last_lr()`.") if self.last_epoch > self.total_epoch: if self.after_scheduler: if not self.finished: