From ff62d9d54f3017b6e1ef1da07135e7203e3a4521 Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Mon, 9 Jan 2023 20:23:52 +0900 Subject: [PATCH] from local --- .../incremental_transf/.identity_train.py.swp | Bin 65536 -> 65536 bytes .../ASR/incremental_transf/.model.py.swp | Bin 24576 -> 24576 bytes .../ASR/incremental_transf/identity_train.py | 7 ++++++- 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/incremental_transf/.identity_train.py.swp b/egs/librispeech/ASR/incremental_transf/.identity_train.py.swp index 3449aace706fed396e086cddd85ce0d6592fffa8..34b612f8f753787595c04610656ae64b8042a1a1 100644 GIT binary patch delta 380 zcmXBPze@rE7{>9}x==GUcZ(Y$8>DCuTqh~xuWbZHRze~&Dw1XO@*5Xv z;~W(%Lqi%9_=$8u zwWolhlIzaaP`r2aVp@fwJymqycak&av}Kg51SJmU+N1gl4ITJ#9-i$O;~L`*4^mFv9;Tv=<1bHo_a!BzJT=YFymSML z6SPpoXF__zHM%I^drrDX8{1gMB69eMOIPTljVe|U;5Cq50_~{BafAwrSVs;?OlGA! zjL?O_8dl-qaYj1F8BWnb9a&^B9rlDV`o4ACTQ90L{OF;aUdaBXwI7Dfy>O}d?_ukW TXU7gIyD@vQ8O7{qt91Db-AX$| diff --git a/egs/librispeech/ASR/incremental_transf/.model.py.swp b/egs/librispeech/ASR/incremental_transf/.model.py.swp index 18b72fcd8f7b0179b5172928db2da0965ce6daef..6d743592b631e81bc009c69c0ffdef6b5b8f2d9e 100644 GIT binary patch delta 33 ncmZoTz}RqrQ7p+I%+puFQqO<^2m}}yPW{-OJd=B)*sFK|q^t^Z delta 33 ncmZoTz}RqrQ7p+I%+puFQqO<^2m}}y{(Rk?yp3(6*sFK|ry2^m diff --git a/egs/librispeech/ASR/incremental_transf/identity_train.py b/egs/librispeech/ASR/incremental_transf/identity_train.py index 8b722562f..66e59465c 100755 --- a/egs/librispeech/ASR/incremental_transf/identity_train.py +++ b/egs/librispeech/ASR/incremental_transf/identity_train.py @@ -982,6 +982,11 @@ def run(rank, world_size, args): transducer_model.load_state_dict(pre_trained_model, strict=True) model = get_interformer_model(transducer_model.encoder, params) + for n, p in model.named_parameters(): + if 'pt_encoder' in n: + p.requires_grad = False + else: + print(n) ''' for n, p in model.named_parameters(): if 'layer' not in n: @@ -1016,7 +1021,7 @@ def run(rank, world_size, args): if world_size > 1: logging.info("Using DDP") model = DDP(model, device_ids=[rank]) - + optimizer = Eve(model.parameters(), lr=params.initial_lr) scheduler = Eden(optimizer, params.lr_batches, params.lr_epochs)