544625545afe2e9c2b358e356c11bb8be53ceb51,keras/optimizers.py,Adagrad,__init__,#Adagrad#Any#Any#Any#,320
Before Change
self.learning_rate = K.variable(learning_rate, name="learning_rate")
self.decay = K.variable(decay, name="decay")
self.iterations = K.variable(0, dtype="int64", name="iterations")
if epsilon is None:
epsilon = K.epsilon()
self.epsilon = epsilon
self.initial_decay = decay
@interfaces.legacy_get_updates_support
After Change
def __init__(self, learning_rate=0.01, momentum=0.,
nesterov=False, **kwargs):
learning_rate = kwargs.pop("lr", learning_rate)
self.initial_decay = kwargs.pop("decay", 0.0)
super(SGD, self).__init__(**kwargs)
with K.name_scope(self.__class__.__name__):
self.iterations = K.variable(0, dtype="int64", name="iterations")
self.learning_rate = K.variable(learning_rate, name="learning_rate")
self.momentum = K.variable(momentum, name="momentum")
self.decay = K.variable(self.initial_decay, name="decay")
self.nesterov = nesterov
@interfaces.legacy_get_updates_support
In pattern: SUPERPATTERN
Frequency: 3
Non-data size: 9
Instances
Project Name: keras-team/keras
Commit Name: 544625545afe2e9c2b358e356c11bb8be53ceb51
Time: 2019-08-25
Author: tanzheny@google.com
File Name: keras/optimizers.py
Class Name: Adagrad
Method Name: __init__
Project Name: keras-team/keras
Commit Name: 08f6bdeb5652550f36210f64aefda3d0d41e2d79
Time: 2019-05-28
Author: tanzheny@google.com
File Name: keras/optimizers.py
Class Name: Adagrad
Method Name: __init__
Project Name: keras-team/keras
Commit Name: 08f6bdeb5652550f36210f64aefda3d0d41e2d79
Time: 2019-05-28
Author: tanzheny@google.com
File Name: keras/optimizers.py
Class Name: RMSprop
Method Name: __init__