if self.max_len > tensor.size(0):
pad = torch.ones((self.max_len - tensor.size(0),
tensor.size(1))) * self.fill_value
pad = pad.type_as(tensor)
tensor = torch.cat((tensor, pad), dim=0)
elif self.max_len < tensor.size(0):
tensor = tensor[:self.max_len, :]
return tensor
After Change
padding_size = [self.max_len - tensor.size(self.len_dim) if i == self.len_dim
else tensor.size(self.ch_dim)
for i in range(2)]
pad = torch.empty(padding_size, dtype=tensor.dtype).fill_(self.fill_value)
tensor = torch.cat((tensor, pad), dim=self.len_dim)
elif self.max_len < tensor.size(self.len_dim):