123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051 |
- from torch.nn import Module
- from torch.nn import Conv2d, Linear
- from torch.nn.functional import relu, max_pool2d
- import torch
- import torch.nn as nn
- class Lenet5(Module):
- def __init__(self):
- super(Lenet5, self).__init__()
- # 卷积层定义
- self.conv_layers = nn.Sequential(
- nn.Conv2d(1, 32, kernel_size=3, padding=1), # 输入通道数为3,输出通道数为32,卷积核大小为3,填充为1
- nn.ReLU(), # ReLU激活函数
- nn.MaxPool2d(kernel_size=2), # 最大池化层,池化核大小为2
- nn.Dropout(p=0.2), # Dropout层,丢弃概率为0.2
- nn.Conv2d(32, 64, kernel_size=3, padding=1), # 输入通道数为32,输出通道数为64,卷积核大小为3,填充为1
- nn.ReLU(),
- nn.MaxPool2d(kernel_size=2),
- nn.Dropout(p=0.2),
- nn.Conv2d(64, 128, kernel_size=3, padding=1), # 输入通道数为64,输出通道数为128,卷积核大小为3,填充为1
- nn.ReLU(),
- nn.MaxPool2d(kernel_size=2),
- nn.Dropout(p=0.2),
- nn.Conv2d(128, 128, kernel_size=3, padding=1), # 输入通道数为128,输出通道数为128,卷积核大小为3,填充为1
- nn.ReLU(),
- nn.MaxPool2d(kernel_size=2)
- )
- # 全连接层定义
- self.fc_layers = nn.Sequential(
- nn.Flatten(), # 将输入展平为一维张量
- nn.Linear(128 * 9 * 9, 512), # 输入大小为128*9*9,输出大小为512
- nn.ReLU(),
- nn.Dropout(p=0.5),
- nn.Linear(512, 3), # 输入大小为512,输出大小为3
- nn.Softmax(dim=1) # Softmax函数,用于多分类问题
- )
- def forward(self, x):
- x = self.conv_layers(x) # 卷积层的前向传播
- x = self.fc_layers(x) # 全连接层的前向传播
- return x
|