// Use GPU if possible
import torch
self._device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
self._model.to(self._device)
def predict(self, x, logits=False, batch_size=128):
After Change
// Use GPU if possible
import torch
self._device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
self._model.to(self._device)
def predict(self, x, logits=False, batch_size=128):