from local

This commit is contained in:
dohe0342 2023-05-24 12:01:39 +09:00
parent 3a01cdff8b
commit 64a4a7d6d7
3 changed files with 7 additions and 0 deletions

View File

@ -101,6 +101,7 @@ from torch.nn.parallel import DistributedDataParallel as DDP
from torch.utils.tensorboard import SummaryWriter
from zipformer import Zipformer
from data2vec_encoder import FairSeqData2VecEncoder
from data2vec_audio import LoRAModule
from icefall import diagnostics
from icefall.checkpoint import remove_checkpoints
@ -127,6 +128,12 @@ import wandb
#from icefall.checkpoint import save_checkpoint as save_checkpoint_impl
LRSchedulerType = Union[torch.optim.lr_scheduler._LRScheduler, optim.LRScheduler]
class LoRAHook():
def __init__(self, module):
self.hook = module.register_forward_hook(self.hook_fn)
def hook_fn(self, module, input, output):
def set_batch_count(model: Union[nn.Module, DDP], batch_count: float) -> None:
if isinstance(model, DDP):