self.lr = shared_scalar(lr)
def get_updates(self, params, constraints, grads):
accumulators = [shared_zeros(p.get_value().shape) for p in params]
self.updates = []
for p, g, a, c in zip(params, grads, accumulators, constraints):
new_a = a + g ** 2 // update accumulator
self.updates.append((a, new_a))
new_p = p - self.lr * g / T.sqrt(new_a + self.epsilon)
self.updates.append((p, c(new_p))) // apply constraints
return c(new_p)
def get_config(self):
return {"name": self.__class__.__name__,
"lr": float(self.lr.get_value()),
After Change
def get_updates(self, params, constraints, grads):
//accumulators = [shared_zeros(theano.shared(p).get_value().shape) for p in params]
accumulators = [np.zeros_like(p) for p in params]
//self.updates = []
new_weights = []