diff --git a/icefall/shared/make_kn_lm.py b/icefall/shared/make_kn_lm.py index a52e8da71..8d0170f8b 100755 --- a/icefall/shared/make_kn_lm.py +++ b/icefall/shared/make_kn_lm.py @@ -165,9 +165,9 @@ class NgramCounts: n1 += stat[1] n2 += stat[2] assert n1 + 2 * n2 > 0 - self.d.append(max(0.001, n1 * 1.0) / (n1 + 2 * n2)) # We are doing this max(0.001, xxx) to avoid zero discounting constant D, - # which could happen if the number of symbols is small and all w in the vocab - # has been seen after certain h. This can cause division by zero in computing BOW. + self.d.append(max(0.001, n1 * 1.0) / (n1 + 2 * n2)) # We are doing this max(0.001, xxx) to avoid zero discounting constant D due to n1=0, + # which could happen if the number of symbols is small. + # Otherwise, zero discounting constant can cause division by zero in computing BOW. def cal_f(self): # f(a_z) is a probability distribution of word sequence a_z.