fca0d19f28eb612862376edc73e0d1ed4c397e9f,pyglmnet/pyglmnet.py,GLM,fit,#GLM#Any#Any#,664

Before Change



        // Initialize loss accumulators
        if callable(self.callback):
            self.loss_trace = list()

        // Iterative updates
        for t in range(0, self.max_iter):
            if self.solver == "batch-gradient":
                grad = _grad_L2loss(self.distr,
                                    alpha, self.Tau,
                                    reg_lambda, X, y, self.eta,
                                    beta)
                if t > 1:
                    if np.linalg.norm(grad) / np.linalg.norm(beta) < tol / lr:
                        msg = ("\tConverged in {0:d} iterations".format(t))
                        logger.info(msg)
                        break
                beta = beta - self.learning_rate * grad

            elif self.solver == "cdfast":
                beta_old = deepcopy(beta)
                beta, z = \
                    self._cdfast(X, y, z, ActiveSet, beta, reg_lambda)
                if t > 1:
                    if ((np.linalg.norm(beta - beta_old) /
                         np.linalg.norm(beta_old) < tol / lr)):
                        msg = ("\tConverged in {0:d} iterations".format(t))
                        logger.info(msg)
                        break
            // Apply proximal operator
            beta[1:] = self._prox(beta[1:], reg_lambda * alpha)

            // Update active set
            if self.solver == "cdfast":
                ActiveSet[beta == 0] = 0
                ActiveSet[0] = 1.

            // Compute and save loss if callbacks are requested
            if callable(self.callback):
                self.loss_trace.append(self.callback(self.distr, alpha,
                                                     self.Tau, reg_lambda,
                                                     X, y, self.eta,
                                                     self.group, beta))

        // Update the estimated variables
        self.beta0_ = beta[0]
        self.beta_ = beta[1:]
        self.ynull_ = np.mean(y)
        return self

After Change



            // Compute and save loss if callbacks are requested
            if callable(self.callback):
                self.callback(beta)

        // Update the estimated variables
        self.beta0_ = beta[0]
        self.beta_ = beta[1:]
Italian Trulli
In pattern: SUPERPATTERN

Frequency: 3

Non-data size: 5

Instances


Project Name: glm-tools/pyglmnet
Commit Name: fca0d19f28eb612862376edc73e0d1ed4c397e9f
Time: 2018-09-07
Author: mainakjas@gmail.com
File Name: pyglmnet/pyglmnet.py
Class Name: GLM
Method Name: fit


Project Name: glm-tools/pyglmnet
Commit Name: b6af7a54569d20d53080bc09687732c8325b00a8
Time: 2018-09-08
Author: mainakjas@gmail.com
File Name: pyglmnet/pyglmnet.py
Class Name: GLM
Method Name: fit


Project Name: glm-tools/pyglmnet
Commit Name: fca0d19f28eb612862376edc73e0d1ed4c397e9f
Time: 2018-09-07
Author: mainakjas@gmail.com
File Name: pyglmnet/pyglmnet.py
Class Name: GLM
Method Name: fit


Project Name: scipy/scipy
Commit Name: a50ca1edd57935e1006cdb146d6c5bcf231c859f
Time: 2020-03-17
Author: andyfaff@gmail.com
File Name: scipy/optimize/_differentialevolution.py
Class Name: DifferentialEvolutionSolver
Method Name: solve