Skip to content

Commit 776e959

Browse files
authored
Merge branch 'main' into 2.3-RC-TEST
2 parents 2d59c4e + 8e65226 commit 776e959

File tree

1 file changed

+27
-10
lines changed

1 file changed

+27
-10
lines changed

beginner_source/blitz/neural_networks_tutorial.py

Lines changed: 27 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -55,16 +55,33 @@ def __init__(self):
5555
self.fc2 = nn.Linear(120, 84)
5656
self.fc3 = nn.Linear(84, 10)
5757

58-
def forward(self, x):
59-
# Max pooling over a (2, 2) window
60-
x = F.max_pool2d(F.relu(self.conv1(x)), (2, 2))
61-
# If the size is a square, you can specify with a single number
62-
x = F.max_pool2d(F.relu(self.conv2(x)), 2)
63-
x = torch.flatten(x, 1) # flatten all dimensions except the batch dimension
64-
x = F.relu(self.fc1(x))
65-
x = F.relu(self.fc2(x))
66-
x = self.fc3(x)
67-
return x
58+
def forward(self, input):
59+
# Convolution layer C1: 1 input image channel, 6 output channels,
60+
# 5x5 square convolution, it uses RELU activation function, and
61+
# outputs a Tensor with size (N, 6, 28, 28), where N is the size of the batch
62+
c1 = F.relu(self.conv1(input))
63+
# Subsampling layer S2: 2x2 grid, purely functional,
64+
# this layer does not have any parameter, and outputs a (N, 16, 14, 14) Tensor
65+
s2 = F.max_pool2d(c1, (2, 2))
66+
# Convolution layer C3: 6 input channels, 16 output channels,
67+
# 5x5 square convolution, it uses RELU activation function, and
68+
# outputs a (N, 16, 10, 10) Tensor
69+
c3 = F.relu(self.conv2(s2))
70+
# Subsampling layer S4: 2x2 grid, purely functional,
71+
# this layer does not have any parameter, and outputs a (N, 16, 5, 5) Tensor
72+
s4 = F.max_pool2d(c3, 2)
73+
# Flatten operation: purely functional, outputs a (N, 400) Tensor
74+
s4 = torch.flatten(s4, 1)
75+
# Fully connected layer F5: (N, 400) Tensor input,
76+
# and outputs a (N, 120) Tensor, it uses RELU activation function
77+
f5 = F.relu(self.fc1(s4))
78+
# Fully connected layer F6: (N, 120) Tensor input,
79+
# and outputs a (N, 84) Tensor, it uses RELU activation function
80+
f6 = F.relu(self.fc2(f5))
81+
# Gaussian layer OUTPUT: (N, 84) Tensor input, and
82+
# outputs a (N, 10) Tensor
83+
output = self.fc3(f6)
84+
return output
6885

6986

7087
net = Net()

0 commit comments

Comments
 (0)