"`min_freq` must be 1"
)
if not config.vocab_from_train_data: // Reset token counter.
tensorizer.vocab_builder._counter = collections.Counter()
pretrained_vocab = pretrained_embedding.embed_vocab
if config.vocab_size:
pretrained_vocab = pretrained_vocab[: config.vocab_size]
tensorizer.vocab_builder.add_all(pretrained_vocab)
After Change
if config.pretrained_embeddings_path and not init_from_saved_state:
if not any(
vocab_file.filepath == config.pretrained_embeddings_path
for vocab_file in tensorizer.vocab_config.vocab_files
):
raise ValueError(
f"Tensorizer"s vocab files should include pretrained "