def compute_sentences(self):
if self.is_computed(SENTENCES):
self.__computed.remove(SENTENCES)
tok = self.__sentence_tokenizer
spans = tok.span_tokenize(self.text)
dicts = []
for start, end in spans:
After Change
dicts = []
for paragraph in self[PARAGRAPHS]:
para_start, para_end = paragraph[START], paragraph[END]
para_text = text[para_start:para_end]spans = tok.span_tokenize(para_text)
for start, end in spans:
dicts.append({"start": start+para_start, "end": end+para_start})
self[SENTENCES] = dicts
self.__computed.add(SENTENCES)