Undo changes to util summary writer

Signed-off-by: Xinyuan Li <xli257@b17.clsp.jhu.edu>
This commit is contained in:
Xinyuan Li 2024-01-23 23:23:22 -05:00
parent 8dc1ca194d
commit 7047a579b8

View File

@ -38,7 +38,7 @@ import sentencepiece as spm
import torch import torch
import torch.distributed as dist import torch.distributed as dist
import torch.nn as nn import torch.nn as nn
# from torch.utils.tensorboard import SummaryWriter from torch.utils.tensorboard import SummaryWriter
from icefall.checkpoint import average_checkpoints from icefall.checkpoint import average_checkpoints
@ -1125,22 +1125,22 @@ class MetricsTracker(collections.defaultdict):
for k, v in zip(keys, s.cpu().tolist()): for k, v in zip(keys, s.cpu().tolist()):
self[k] = v self[k] = v
# def write_summary( def write_summary(
# self, self,
# tb_writer: SummaryWriter, tb_writer: SummaryWriter,
# prefix: str, prefix: str,
# batch_idx: int, batch_idx: int,
# ) -> None: ) -> None:
# """Add logging information to a TensorBoard writer. """Add logging information to a TensorBoard writer.
# Args: Args:
# tb_writer: a TensorBoard writer tb_writer: a TensorBoard writer
# prefix: a prefix for the name of the loss, e.g. "train/valid_", prefix: a prefix for the name of the loss, e.g. "train/valid_",
# or "train/current_" or "train/current_"
# batch_idx: The current batch index, used as the x-axis of the plot. batch_idx: The current batch index, used as the x-axis of the plot.
# """ """
# for k, v in self.norm_items(): for k, v in self.norm_items():
# tb_writer.add_scalar(prefix + k, v, batch_idx) tb_writer.add_scalar(prefix + k, v, batch_idx)
def concat(ragged: k2.RaggedTensor, value: int, direction: str) -> k2.RaggedTensor: def concat(ragged: k2.RaggedTensor, value: int, direction: str) -> k2.RaggedTensor: