1 x = self.layer5(x)#[64 13]
2 # print("layer5",x.shape)
3 x = x.view(x.size(0), -1)
4 # print("view",x.shape)
5 x = self.fc(x)
6 return x
# 这里layer5是一个卷积层
#:
self.layer5 = nn.Sequential(
nn.Conv1d(64, 64, kernel_size=3),
nn.BatchNorm1d(64),
nn.ReLU(inplace=True),
nn.MaxPool1d(kernel_size=2, stride=2)
# nn.AdaptiveMaxPool1d(4)
)
#fc是一个全连接层
self.fc=nn.Sequential(
# 将最后一层池化的结果展平,形状是(None, 192)
# 将最后一层池化的结果展平,形状是(None, 576)
nn.Linear(192, 100),
nn.ReLU(inplace=True),
nn.Linear(100, out_channel)
)
运行上面的代码会报错,因为
layer5(x)后view后x的形状为576
而nn.Liear(192,100)的形状要求输入是192,输出是100
而输入是579,所以得改Linear为nn.Linear(576, 100),
self.fc=nn.Sequential(
# 将最后一层池化的结果展平,形状是(None, 192)
# 将最后一层池化的结果展平,形状是(None, 576)
nn.Linear(192, 100),
nn.ReLU(inplace=True),
nn.Linear(100, out_channel)
)