Bug fix in caching_eval, may make no difference.

This commit is contained in:
Daniel Povey 2022-12-15 23:32:29 +08:00
parent f5d4fb092d
commit 6caaa4e9c6

View File

@ -245,7 +245,6 @@ class CachingEvalFunction(torch.autograd.Function):
# Caution: this assumes you are not going to use any random numbers from torch (for any purpose # Caution: this assumes you are not going to use any random numbers from torch (for any purpose
# that matters in the forward pass), e.g. there should be no dropout. # that matters in the forward pass), e.g. there should be no dropout.
ctx.random_state = random.getstate() ctx.random_state = random.getstate()
ctx.save_for_backward(x)
# we are inside torch.no_grad() here, so the following won't create the computation graph. # we are inside torch.no_grad() here, so the following won't create the computation graph.
y = m(x) y = m(x)
ctx.save_for_backward(x, y) ctx.save_for_backward(x, y)