self._var_form_base, self._operator,
self._optimizer)
// pick maximum gradients and choose that excitation
max_grad = max(cur_grads, key=lambda item: np.abs(item[0]))
if prev_max != () and prev_max[1] == max_grad[1]:
cur_grads_red = [g for g in cur_grads if g[1] != prev_max[1]]
max_grad = max(cur_grads_red, key=lambda item: np.abs(item[0]))
if prev_prev_max != () and prev_prev_max[1] == max_grad[1]:
After Change
// -> this results in any number of repeating numbers being detected
threshold_satisfied = False
prev_op_indices = []
theta = []
iteration = 0
while not threshold_satisfied:
iteration += 1
logger.info("--- Iteration //%s ---", str(iteration))
// compute gradients
cur_grads = self._compute_gradients(self._excitation_pool, theta, self._delta,
self._var_form_base, self._operator,
self._optimizer)
// pick maximum gradients and choose that excitation
max_grad_index, max_grad = max(enumerate(cur_grads),
key=lambda item: np.abs(item[1][0]))
prev_op_indices.append(max_grad_index)
// check indices of picked gradients for cycles
if cycle_regex.search(" ".join(map(str, prev_op_indices))) is not None:
logger.info("Alternating sequence found. Finishing.")
logger.info("Final maximum gradient: %s", str(np.abs(max_grad[0])))