775122950d145382146e9120308432a9faf9a9b8,fairseq/optim/adamax.py,Adamax,step,#Adamax#Any#,95

Before Change


                if grad.is_sparse:
                    raise RuntimeError("Adamax does not support sparse gradients")

                p_data_fp32 = p.data.float()

                state = self.state[p]

                // State initialization
                if len(state) == 0:
                    state["step"] = 0
                    state["exp_avg"] = torch.zeros_like(p_data_fp32)
                    state["exp_inf"] = torch.zeros_like(p_data_fp32)
                else:
                    state["exp_avg"] = state["exp_avg"].type_as(p_data_fp32)

After Change


                    raise RuntimeError("Adamax does not support sparse gradients")

                p_data_fp32 = p.data
                if p.data.dtype in {torch.float16, torch.bfloat16}:
                    p_data_fp32 = p_data_fp32.float()

                state = self.state[p]

                // State initialization
                if len(state) == 0:
                    state["step"] = 0
                    state["exp_avg"] = torch.zeros_like(p_data_fp32)
                    state["exp_inf"] = torch.zeros_like(p_data_fp32)
                else:
                    state["exp_avg"] = state["exp_avg"].to(p_data_fp32)
Italian Trulli
In pattern: SUPERPATTERN

Frequency: 3

Non-data size: 12

Instances


Project Name: pytorch/fairseq
Commit Name: 775122950d145382146e9120308432a9faf9a9b8
Time: 2020-05-18
Author: myleott@fb.com
File Name: fairseq/optim/adamax.py
Class Name: Adamax
Method Name: step


Project Name: pytorch/fairseq
Commit Name: 775122950d145382146e9120308432a9faf9a9b8
Time: 2020-05-18
Author: myleott@fb.com
File Name: fairseq/optim/adafactor.py
Class Name: Adafactor
Method Name: step


Project Name: pytorch/fairseq
Commit Name: 775122950d145382146e9120308432a9faf9a9b8
Time: 2020-05-18
Author: myleott@fb.com
File Name: fairseq/optim/adam.py
Class Name: Adam
Method Name: step