yield "" // placeholder to fill batch up
def _inference(self, Xs, mode=None):
self._data = list(self.input_pipeline._format_for_inference(Xs))
self._closed = False
n = len(self._data)
if not getattr(self, "estimator", None):
self.estimator = self.get_estimator()
self._input_fn = lambda: self.input_pipeline._dataset_without_targets(
self._data_generator, train=None
).batch(self.config.batch_size)
self._predictions = self.estimator.predict(input_fn=self._input_fn)
predictions = [None] * n
for i in range(n):
y = next(self._predictions)
y = y[mode] if mode else y
predictions[i] = y
After Change
length = len(Xs) if not callable(Xs) else None
predictions = tqdm.tqdm(
estimator.predict(
input_fn=input_fn, predict_keys=mode
),
total=length,
desc="Inference"
)
return [pred[mode] if mode else pred for pred in predictions]
def fit(self, *args, **kwargs):
An alias for finetune.