// if not (np.sum(ret[1].numpy()) == 0):
yield ret
valid_loader = batch_iter(
iter(valid_dataset), batch_size=args.valid_batchsize)
fps = [0]*args.num_classes
if not os.path.exists(outdir):
os.mkdir(outdir)
for i in range(args.num_classes):
buf = "%s/%s%s.txt" % (prefix, outfile, names[i])
fps[i] = open(buf, "w")
lineId = -1
future_data = pool.apply_async(
utils.raise_info_thread(next), (valid_loader, None))
for batch_idx in itertools.count():
curr_data = future_data
future_data = pool.apply_async(
utils.raise_info_thread(next), (valid_loader, None))
ret = curr_data.get()
if ret is None:
After Change
fps[i] = open(buf, "w")
lineId = 0
total_samples = len(valid_files)
total_batches = (total_samples+args.valid_batchsize -
1)//args.valid_batchsize
for each_batch in range(0, total_batches):
ret = valid_dataset.next()
data, target = ret
yolo_x_nnabla.d = data
yolo_features_nnabla.forward(clear_buffer=True)
batch_boxes = utils.get_region_boxes(
yolo_features_nnabla.d, args.conf_thresh, args.num_classes, args.anchors, args.num_anchors, 0, 1)
for i in range(yolo_features_nnabla.d.shape[0]):
if lineId >= total_samples:
print("Reached End of total_samples")
break
fileId = os.path.basename(valid_files[lineId]).split(".")[0]
width, height = utils.get_image_size(valid_files[lineId])
print(valid_files[lineId])
lineId += 1