bd0245981d11faa2aad750fc32821205689da874,tensorflowTUT/tf17_dropout/full_code.py,,,#,39

Before Change


train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)

sess = tf.Session()
merged = tf.merge_all_summaries()
// summary writer goes in here
train_writer = tf.train.SummaryWriter("logs/train", sess.graph)
test_writer = tf.train.SummaryWriter("logs/test", sess.graph)

// tf.initialize_all_variables() no long valid from
// 2017-03-02 if using tensorflow >= 0.12
if int((tf.__version__).split(".")[1]) < 12 and int((tf.__version__).split(".")[0]) < 1:
    init = tf.initialize_all_variables()
else:
    init = tf.global_variables_initializer()
sess.run(init)

for i in range(500):
    // here to determine the keeping probability
    sess.run(train_step, feed_dict={xs: X_train, ys: y_train, keep_prob: 0.5})
    if i % 50 == 0:
        // record loss
        train_result = sess.run(merged, feed_dict={xs: X_train, ys: y_train, keep_prob: 1})
        test_result = sess.run(merged, feed_dict={xs: X_test, ys: y_test, keep_prob: 1})
        train_writer.add_summary(train_result, i)
        test_writer.add_summary(test_result, i)

After Change


// the loss between prediction and real data
cross_entropy = tf.reduce_mean(-tf.reduce_sum(ys * tf.log(prediction),
                                              reduction_indices=[1]))  // loss
tf.summary.scalar("loss", cross_entropy)
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)

sess = tf.Session()
merged = tf.summary.merge_all()
// summary writer goes in here
train_writer = tf.summary.FileWriter("logs/train", sess.graph)
test_writer = tf.summary.FileWriter("logs/test", sess.graph)

// tf.initialize_all_variables() no long valid from
// 2017-03-02 if using tensorflow >= 0.12
if int((tf.__version__).split(".")[1]) < 12 and int((tf.__version__).split(".")[0]) < 1:
    init = tf.initialize_all_variables()
else:
    init = tf.global_variables_initializer()
sess.run(init)

for i in range(500):
    // here to determine the keeping probability
    sess.run(train_step, feed_dict={xs: X_train, ys: y_train, keep_prob: 0.5})
    if i % 50 == 0:
        // record loss
        train_result = sess.run(merged, feed_dict={xs: X_train, ys: y_train, keep_prob: 1})
        test_result = sess.run(merged, feed_dict={xs: X_test, ys: y_test, keep_prob: 1})
        train_writer.add_summary(train_result, i)
        test_writer.add_summary(test_result, i)
Italian Trulli
In pattern: SUPERPATTERN

Frequency: 5

Non-data size: 6

Instances


Project Name: MorvanZhou/tutorials
Commit Name: bd0245981d11faa2aad750fc32821205689da874
Time: 2017-03-18
Author: morvanzhou@hotmail.com
File Name: tensorflowTUT/tf17_dropout/full_code.py
Class Name:
Method Name:


Project Name: wenwei202/iss-rnns
Commit Name: 14ee0e46ce23ab031fda1ba7f2aa3975d98486ee
Time: 2017-05-09
Author: seominjoon@gmail.com
File Name: basic/model.py
Class Name: Model
Method Name: __init__


Project Name: jakeret/tf_unet
Commit Name: 6f1a876bf8614a7aa0f1a7043d2f15f0b3064d5f
Time: 2016-12-27
Author: jakeret@phys.ethz.ch
File Name: tf_unet/unet.py
Class Name: Trainer
Method Name: _initialize


Project Name: wenwei202/iss-rnns
Commit Name: 14ee0e46ce23ab031fda1ba7f2aa3975d98486ee
Time: 2017-05-09
Author: seominjoon@gmail.com
File Name: basic_cnn/model.py
Class Name: Model
Method Name: __init__


Project Name: MorvanZhou/tutorials
Commit Name: bd0245981d11faa2aad750fc32821205689da874
Time: 2017-03-18
Author: morvanzhou@hotmail.com
File Name: tensorflowTUT/tf17_dropout/full_code.py
Class Name:
Method Name:


Project Name: dpressel/mead-baseline
Commit Name: 20aa03a2285ab4a18a6c63d6d42ca472ff699567
Time: 2017-01-23
Author: dpressel@gmail.com
File Name: seq2seq/tf/train.py
Class Name: Trainer
Method Name: __init__