from local

This commit is contained in:
dohe0342 2022-12-10 13:36:01 +09:00
parent 0291ce4eff
commit a6b159f56f
2 changed files with 30 additions and 24 deletions

View File

@ -105,17 +105,23 @@ def set_batch_count(model: Union[nn.Module, DDP], batch_count: float) -> None:
def add_rep_arguments(parser: argparse.ArgumentParser):
parser.add_argument(
"--decode-interval",
"--wandb",
type=bool,
default=False,
help="Use wandb for MLOps",
)
parser.add_argument(
"--accum-grads",
type=int,
default=200,
help="decode interval",
default=1,
help="accum-grad num.",
)
parser.add_argument(
"--encoder-dim",
type=int,
default=768,
help="encoder embedding dimension",
"--multi-optim",
type=bool,
default=False,
help="use sperate optimizer (enc / dec)",
)
parser.add_argument(
@ -132,41 +138,41 @@ def add_rep_arguments(parser: argparse.ArgumentParser):
help="The initial learning rate. This value should not need to be changed.",
)
parser.add_argument(
"--multi-optim",
type=bool,
default=False,
help="use sperate optimizer (enc / dec)",
)
parser.add_argument(
"--accum-grads",
type=int,
default=1,
help="accum-grad num.",
)
parser.add_argument(
"--encoder-type",
type=str,
default='d2v',
help="Type of encoder (e.g. conformer, w2v, d2v...",
)
parser.add_argument(
"--additional-block",
type=bool,
default=False,
"--encoder-dim",
type=int,
default=768,
help="encoder embedding dimension",
)
parser.add_argument(
"--freeze-finetune-updates",
type=int,
default=0
)
parser.add_argument(
"--wandb",
"--additional-block",
type=bool,
default=False,
help="Use wandb for MLOps",
)
parser.add_argument(
"--decode-interval",
type=int,
default=200,
help="decode interval",
)
def add_model_arguments(parser: argparse.ArgumentParser):