0ad33d606682537466f3430fc6d6ac7d47460f1a,beginner_source/examples_autograd/two_layer_net_autograd.py,,,#,24

Before Change


// Create random Tensors for weights, and wrap them in Variables.
// Setting requires_grad=True indicates that we want to compute gradients with
// respect to these Variables during the backward pass.
w1 = Variable(torch.randn(D_in, H).type(dtype), requires_grad=True)
w2 = Variable(torch.randn(H, D_out).type(dtype), requires_grad=True)

learning_rate = 1e-6

After Change


// Create random Tensors to hold input and outputs.
// Setting requires_grad=False indicates that we do not need to compute gradients
// with respect to these Tensors during the backward pass.
x = torch.randn(N, D_in, device=device, dtype=dtype)
y = torch.randn(N, D_out, device=device, dtype=dtype)

// Create random Tensors for weights.
// Setting requires_grad=True indicates that we want to compute gradients with
// respect to these Tensors during the backward pass.
w1 = torch.randn(D_in, H, device=device, dtype=dtype, requires_grad=True)
w2 = torch.randn(H, D_out, device=device, dtype=dtype, requires_grad=True)

learning_rate = 1e-6
for t in range(500):
    // Forward pass: compute predicted y using operations on Tensors; these
    // are exactly the same operations we used to compute the forward pass using
    // Tensors, but we do not need to keep references to intermediate values since
    // we are not implementing the backward pass by hand.
    y_pred = x.mm(w1).clamp(min=0).mm(w2)

    // Compute and print loss using operations on Tensors.
    // Now loss is a Tensor of shape (1,)
    // loss.item() gets the a scalar value held in the loss.
    loss = (y_pred - y).pow(2).sum()
    print(t, loss.item())

    // Use autograd to compute the backward pass. This call will compute the
    // gradient of loss with respect to all Tensors with requires_grad=True.
Italian Trulli
In pattern: SUPERPATTERN

Frequency: 3

Non-data size: 5

Instances


Project Name: pytorch/tutorials
Commit Name: 0ad33d606682537466f3430fc6d6ac7d47460f1a
Time: 2018-04-24
Author: soumith@gmail.com
File Name: beginner_source/examples_autograd/two_layer_net_autograd.py
Class Name:
Method Name:


Project Name: pytorch/tutorials
Commit Name: 0ad33d606682537466f3430fc6d6ac7d47460f1a
Time: 2018-04-24
Author: soumith@gmail.com
File Name: beginner_source/examples_autograd/two_layer_net_custom_function.py
Class Name:
Method Name:


Project Name: pytorch/tutorials
Commit Name: 0ad33d606682537466f3430fc6d6ac7d47460f1a
Time: 2018-04-24
Author: soumith@gmail.com
File Name: beginner_source/examples_autograd/two_layer_net_autograd.py
Class Name:
Method Name:


Project Name: pytorch/tutorials
Commit Name: 0ad33d606682537466f3430fc6d6ac7d47460f1a
Time: 2018-04-24
Author: soumith@gmail.com
File Name: beginner_source/examples_tensor/two_layer_net_tensor.py
Class Name:
Method Name: