self.links.append(link)
// Add recombined tokens.
oov_words = set()
for token, new_history, nn_lm_logprob in reversed(recomb_tokens):
assert new_history[-1] == token.history[-1]
word_id = token.history[-1]
if isinstance(word_id, int):
word = vocabulary.id_to_word[word_id]
else:
word = word_id
oov_words.add(word)
// Find the incoming link that corresponds to the token that was kept
// during recombination.
try:
recomb_from_node = follow_word_ids(new_history[:-1], False)
except NodeNotFoundError:
continue
// Our new lattice doesn"t contain null links, so word_to_link maps
// never skip nodes.
if word not in recomb_from_node.word_to_link:
continue
recomb_link = recomb_from_node.word_to_link[word]
// Add a link from the previous word in the token history to the node
// that was kept during recombination. The difference in LM log
// probability can be computed from the token (path) NNLM log
// probabilities.
from_node = follow_word_ids(token.history[:-1])
lm_logprob_diff = token.nn_lm_logprob - nn_lm_logprob
new_link = self.Link(from_node, recomb_link.end_node, word,
recomb_link.ac_logprob,
recomb_link.lm_logprob + lm_logprob_diff,
recomb_link.transitions)
from_node.out_links.append(new_link)
// Tokens never contain null words, so we can be sure that
// word_to_link maps in our new lattice never skip nodes.
assert word is not None
from_node.word_to_link[word] = new_link
self.links.append(new_link)
if oov_words:
logging.debug("Out-of-vocabulary words in lattice: %s",
", ".join(oov_words))
final_node.id = len(self.nodes)
self.nodes.append(final_node)
def _add_word_maps(self, nodes):