cde27bc0dad15b0bef4b0568f6ac1778920447e5,models/TransformerModel.py,TransformerModel,_prepare_feature,#TransformerModel#Any#Any#Any#,339
Before Change
att_feats = pack_wrapper(self.att_embed, att_feats, att_masks)
if att_masks is None:
att_masks = att_feats.new_ones(att_feats.shape[:2], dtype=torch.long)
att_masks = att_masks.unsqueeze(-2)
if seq is not None:
// crop the last one
seq = seq[:,:-1]
seq_mask = (seq.data > 0)
seq_mask[:,0] += 1
seq_mask = seq_mask.unsqueeze(-2)
seq_mask = seq_mask & subsequent_mask(seq.size(-1)).to(seq_mask)
else:
seq_mask = None
return att_feats, seq, att_masks, seq_mask
def _forward(self, fc_feats, att_feats, seq, att_masks=None):
att_feats, seq, att_masks, seq_mask = self._prepare_feature(att_feats, att_masks, seq)
After Change
att_feats, seq, att_masks, seq_mask = self._prepare_feature_forward(att_feats, att_masks)
memory = self.model.encode(att_feats, att_masks)
return fc_feats[...,:1], att_feats[...,:1], memory, att_masks
def _prepare_feature_forward(self, att_feats, att_masks=None, seq=None):
att_feats, att_masks = self.clip_att(att_feats, att_masks)
In pattern: SUPERPATTERN
Frequency: 3
Non-data size: 10
Instances
Project Name: ruotianluo/self-critical.pytorch
Commit Name: cde27bc0dad15b0bef4b0568f6ac1778920447e5
Time: 2019-04-07
Author: rluo@ttic.edu
File Name: models/TransformerModel.py
Class Name: TransformerModel
Method Name: _prepare_feature
Project Name: ruotianluo/self-critical.pytorch
Commit Name: cde27bc0dad15b0bef4b0568f6ac1778920447e5
Time: 2019-04-07
Author: rluo@ttic.edu
File Name: models/TransformerModel.py
Class Name: TransformerModel
Method Name: _prepare_feature
Project Name: rusty1s/pytorch_geometric
Commit Name: 17aafdea24122bbb777f71a8ea7c2505e3fee84c
Time: 2019-03-15
Author: matthias.fey@tu-dortmund.de
File Name: torch_geometric/nn/models/autoencoder.py
Class Name: GAE
Method Name: reconstruction_loss