diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp index a37660fd4..5419cbebe 100644 Binary files a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp and b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp differ diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py index 64e534410..253daf143 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py @@ -148,7 +148,7 @@ class LoRAHook(): def save_checkpoint(self, i, iter_, save_dir): if isinstance(self.lora, DDP): lora = self.lora.module - torch.save(self.lora.state_dict(), f"{save_dir}/lora_{iter_}_{i}.pt") + torch.save(lora.state_dict(), f"{save_dir}/lora_{iter_}_{i}.pt") def set_batch_count(model: Union[nn.Module, DDP], batch_count: float) -> None: