mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-12-11 06:55:27 +00:00
from local
This commit is contained in:
parent
3a01cdff8b
commit
64a4a7d6d7
Binary file not shown.
Binary file not shown.
@ -101,6 +101,7 @@ from torch.nn.parallel import DistributedDataParallel as DDP
|
||||
from torch.utils.tensorboard import SummaryWriter
|
||||
from zipformer import Zipformer
|
||||
from data2vec_encoder import FairSeqData2VecEncoder
|
||||
from data2vec_audio import LoRAModule
|
||||
|
||||
from icefall import diagnostics
|
||||
from icefall.checkpoint import remove_checkpoints
|
||||
@ -127,6 +128,12 @@ import wandb
|
||||
#from icefall.checkpoint import save_checkpoint as save_checkpoint_impl
|
||||
LRSchedulerType = Union[torch.optim.lr_scheduler._LRScheduler, optim.LRScheduler]
|
||||
|
||||
class LoRAHook():
|
||||
def __init__(self, module):
|
||||
self.hook = module.register_forward_hook(self.hook_fn)
|
||||
|
||||
def hook_fn(self, module, input, output):
|
||||
|
||||
|
||||
def set_batch_count(model: Union[nn.Module, DDP], batch_count: float) -> None:
|
||||
if isinstance(model, DDP):
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user