From a29a9f19dbbcad1ba718e639d912ce1532207e6b Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Tue, 10 Jan 2023 01:16:57 +0900 Subject: [PATCH] from local --- .../ASR/incremental_transf/.train.py.swp | Bin 61440 -> 61440 bytes .../ASR/incremental_transf/train.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/incremental_transf/.train.py.swp b/egs/librispeech/ASR/incremental_transf/.train.py.swp index 06a4c8a51cf255ee9c0da5e665d997c424cccb13..02b0c4faaa22db266dd83d100052924f25038e68 100644 GIT binary patch delta 102 zcmZp8z})bFSv1KY%+puFQqO<^2m}}yEN%BB_iPk>KcDq9BLhS9X3hoCjH-Ps3=CmF wEDgj5nHd=N0P%7lo&?0@KpX?a(VIILUSwos*c`dIhH3=APa zED6N>nHd;%0r65Go&dz9KpX|ck()agUSym+X|eF;{KXlJOxlK%-z`<09Jh>ZGt<(4 FUH~b`9{&IU diff --git a/egs/librispeech/ASR/incremental_transf/train.py b/egs/librispeech/ASR/incremental_transf/train.py index fde89a961..02d399655 100755 --- a/egs/librispeech/ASR/incremental_transf/train.py +++ b/egs/librispeech/ASR/incremental_transf/train.py @@ -993,7 +993,7 @@ def run(rank, world_size, args): except: print(f'2: pre-trained model has no parameter named {old_name}.') else: layer_name_splited[0] = 'inter_encoder' - layer_name_splited[3] = str(int(layer_name_splited[3])//2+1) + layer_name_splited[3] = str(int(layer_name_splited[3])//2) old_name = '.'.join(layer_name_splited) try: p.data = pre_trained_model2[old_name] except: print(f'3: pre-trained model has no parameter named {old_name}.')