mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-08-08 09:32:20 +00:00
Avoid using lr from checkpoint. (#1781)
This commit is contained in:
parent
37a1420603
commit
05f756390c
@ -787,7 +787,9 @@ class LRScheduler(object):
|
||||
is not the optimizer.
|
||||
"""
|
||||
return {
|
||||
"base_lrs": self.base_lrs,
|
||||
# the user might try to override the base_lr, so don't include this in the state.
|
||||
# previously they were included.
|
||||
# "base_lrs": self.base_lrs,
|
||||
"epoch": self.epoch,
|
||||
"batch": self.batch,
|
||||
}
|
||||
@ -799,7 +801,12 @@ class LRScheduler(object):
|
||||
state_dict (dict): scheduler state. Should be an object returned
|
||||
from a call to :meth:`state_dict`.
|
||||
"""
|
||||
# the things with base_lrs are a work-around for a previous problem
|
||||
# where base_lrs were written with the state dict.
|
||||
base_lrs = self.base_lrs
|
||||
self.__dict__.update(state_dict)
|
||||
self.base_lrs = base_lrs
|
||||
|
||||
|
||||
def get_last_lr(self) -> List[float]:
|
||||
"""Return last computed learning rate by current scheduler. Will be a list of float."""
|
||||
|
Loading…
x
Reference in New Issue
Block a user