mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-12-11 06:55:27 +00:00
Bug fix in caching_eval, may make no difference.
This commit is contained in:
parent
f5d4fb092d
commit
6caaa4e9c6
@ -245,7 +245,6 @@ class CachingEvalFunction(torch.autograd.Function):
|
|||||||
# Caution: this assumes you are not going to use any random numbers from torch (for any purpose
|
# Caution: this assumes you are not going to use any random numbers from torch (for any purpose
|
||||||
# that matters in the forward pass), e.g. there should be no dropout.
|
# that matters in the forward pass), e.g. there should be no dropout.
|
||||||
ctx.random_state = random.getstate()
|
ctx.random_state = random.getstate()
|
||||||
ctx.save_for_backward(x)
|
|
||||||
# we are inside torch.no_grad() here, so the following won't create the computation graph.
|
# we are inside torch.no_grad() here, so the following won't create the computation graph.
|
||||||
y = m(x)
|
y = m(x)
|
||||||
ctx.save_for_backward(x, y)
|
ctx.save_for_backward(x, y)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user