-
Notifications
You must be signed in to change notification settings - Fork 2
/
torch_terminal.py
53 lines (47 loc) · 1.57 KB
/
torch_terminal.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
#%%
import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
"""
input:[batch_size,in_channel,height,width]
kernel:[out_channel,in_channel,kh,kw]
"""
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
# 1 input image channel, 6 output channels, 5x5 square convolution
# kernel
self.conv1 = nn.Conv2d(1, 6, 5,padding=(2,2))
#(28-5+1)/2=12
self.conv2 = nn.Conv2d(6, 16, 5,padding=(2,2))
#(12-5+1)/2=4
# an affine operation: y = Wx + b
self.fc1 = nn.Linear(16 * 8 * 8, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 10)
def forward(self, x):
# Max pooling over a (2, 2) window
x = F.max_pool2d(F.relu(self.conv1(x)), (2, 2))
# If the size is a square you can only specify a single number
print "after conv1 size is {}".format(x.size())
x = F.max_pool2d(F.relu(self.conv2(x)), 2)
print "after conv2 size is {}".format(x.size())
x = x.view(-1, self.num_flat_features(x))
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
def num_flat_features(self, x):
size = x.size()[1:] # all dimensions except the batch dimension
num_features = 1
for s in size:
num_features *= s
return num_features
net = Net()
print(net)
print type(net.parameters())
print "hello world"
input = Variable(torch.Tensor(np.random.randint(1,10,size=(1,1,32,32))))
net.forward(input)