if len(tokens) > self.opt["text_size"]:
tokens = tokens[:self.opt["text_size"]]
for tok in tokens:
embeddings.append(self.fasttext_model.infer(tok))
if len(tokens) < self.opt["text_size"]:
pads = [np.zeros(self.opt["embedding_size"])
for _ in range(self.opt["text_size"] - len(tokens))]
After Change
if len(tokens) > self.opt["text_size"]:
tokens = tokens[:self.opt["text_size"]]
embeddings = self.fasttext_model.infer(" ".join(tokens))
if len(tokens) < self.opt["text_size"]:
pads = [np.zeros(self.opt["embedding_size"])
for _ in range(self.opt["text_size"] - len(tokens))]
embeddings = pads + embeddings