for line in target.tolist()]
writer.add_summary(mgd, global_step=step)
for index, line in enumerate(zip(source, sourcetxt, target, targettxt)):
print("{}:{} source:{} txt:{}".format((step-1)*32+index, step, line[0], line[1]))
print("{}:{} target:{} txt:{}".format((step-1)*32+index, step, line[2], line[3]))
for var in tf.trainable_variables():
print("name:{}\tshape:{}\ttype:{}".format(var.name, var.shape, var.dtype))
if step % 1000 == 0:
After Change
for line in source.tolist()]
targettxt = [ " ".join([text_database.target_vocab._id_to_token_map_py[i] for i in line]) \
for line in target.tolist()]
predicttxt = [ " ".join([text_database.target_vocab._id_to_token_map_py[i] for i in line])\
for line in predict.tolist()]
writer.add_summary(mgd, global_step=step)
//for index, line in enumerate(zip(source, sourcetxt, target, targettxt, predict, predicttxt)):
// print("{}:{} source:{} txt:{}".format((step-1)*32+index, step, line[0], line[1]))