Reputation: 435
I have below CNN architecture and forward pass function. I want to freeze all layer except last layer. How can I do that? I have trained this model on one dataset therefore I have all trained weights.
class ConvNet(nn.Module):
def __init__(self):
super(ConvNet, self).__init__()
self.conv1 = nn.Conv2d(in_channels=2, out_channels=4, kernel_size=4)
self.pool = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(4, 8, 4)
self.fc1 = nn.Linear(8 * 6 * 6, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 1)
def forward(self, x):
# -> n, 3, 32, 32
x = self.pool(F.relu(self.conv1(x))) # -> n, 6, 14, 14
x = self.pool(F.relu(self.conv2(x))) # -> n, 16, 5, 5
x = x.flatten() # -> n, 400
x = F.relu(self.fc1(x)) # -> n, 120
x = F.relu(self.fc2(x)) # -> n, 84
# x = nn.LeakyReLU(0.1)(self.fc3(x)) # -> n, 10
x = self.fc3(x) # -> n, 10
return x
Upvotes: 0
Views: 93
Reputation: 611
from torch import nn
class ConvNet(nn.Module):
def __init__(self):
super(ConvNet, self).__init__()
self.conv1 = nn.Conv2d(in_channels=2, out_channels=4, kernel_size=4)
self.pool = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(4, 8, 4)
self.fc1 = nn.Linear(8 * 6 * 6, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 1)
def forward(self, x):
# -> n, 3, 32, 32
x = self.pool(F.relu(self.conv1(x))) # -> n, 6, 14, 14
x = self.pool(F.relu(self.conv2(x))) # -> n, 16, 5, 5
x = x.flatten() # -> n, 400
x = F.relu(self.fc1(x)) # -> n, 120
x = F.relu(self.fc2(x)) # -> n, 84
# x = nn.LeakyReLU(0.1)(self.fc3(x)) # -> n, 10
x = self.fc3(x) # -> n, 10
return x
net = ConvNet()
for n, p in net.named_parameters():
if not 'fc3' in n:
p.requires_grad = False
Upvotes: 1