f25bc176d0365234ebb051d5069edff24ad2de4d,python/dgl/nn/mxnet/conv/sgconv.py,SGConv,forward,#SGConv#Any#Any#,55

Before Change


        If ``cache`` is se to True, ``feat`` and ``graph`` should not change during
        training, or you will get wrong results.
        
        graph = graph.local_var()
        if self._cached_h is not None:
            feat = self._cached_h
        else:
            // compute normalization
            degs = nd.clip(graph.in_degrees().astype(feat.dtype), 1, float("inf"))
            norm = nd.power(degs, -0.5).expand_dims(1)
            norm = norm.as_in_context(feat.context)
            // compute (D^-1 A D)^k X
            for _ in range(self._k):
                feat = feat * norm
                graph.ndata["h"] = feat
                graph.update_all(fn.copy_u("h", "m"),
                                 fn.sum("m", "h"))
                feat = graph.ndata.pop("h")
                feat = feat * norm

            if self.norm is not None:
                feat = self.norm(feat)

            // cache feature
            if self._cached:
                self._cached_h = feat
        return self.fc(feat)

After Change


        If ``cache`` is se to True, ``feat`` and ``graph`` should not change during
        training, or you will get wrong results.
        
        with graph.local_scope():
            if self._cached_h is not None:
                feat = self._cached_h
            else:
                // compute normalization
                degs = nd.clip(graph.in_degrees().astype(feat.dtype), 1, float("inf"))
                norm = nd.power(degs, -0.5).expand_dims(1)
                norm = norm.as_in_context(feat.context)
                // compute (D^-1 A D)^k X
                for _ in range(self._k):
                    feat = feat * norm
                    graph.ndata["h"] = feat
                    graph.update_all(fn.copy_u("h", "m"),
                                     fn.sum("m", "h"))
                    feat = graph.ndata.pop("h")
                    feat = feat * norm

                if self.norm is not None:
                    feat = self.norm(feat)

                // cache feature
                if self._cached:
                    self._cached_h = feat
            return self.fc(feat)
Italian Trulli
In pattern: SUPERPATTERN

Frequency: 3

Non-data size: 8

Instances


Project Name: dmlc/dgl
Commit Name: f25bc176d0365234ebb051d5069edff24ad2de4d
Time: 2020-05-01
Author: wmjlyjemaine@gmail.com
File Name: python/dgl/nn/mxnet/conv/sgconv.py
Class Name: SGConv
Method Name: forward


Project Name: dmlc/dgl
Commit Name: f25bc176d0365234ebb051d5069edff24ad2de4d
Time: 2020-05-01
Author: wmjlyjemaine@gmail.com
File Name: python/dgl/nn/tensorflow/conv/sgconv.py
Class Name: SGConv
Method Name: call


Project Name: dmlc/dgl
Commit Name: f25bc176d0365234ebb051d5069edff24ad2de4d
Time: 2020-05-01
Author: wmjlyjemaine@gmail.com
File Name: python/dgl/nn/pytorch/conv/sgconv.py
Class Name: SGConv
Method Name: forward