lenet5.py 1.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. from torch.nn import Module
  2. from torch.nn import Conv2d, Linear
  3. from torch.nn.functional import relu, max_pool2d
  4. import torch
  5. import torch.nn as nn
  6. class Lenet5(Module):
  7. def __init__(self):
  8. super(Lenet5, self).__init__()
  9. # 卷积层定义
  10. self.conv_layers = nn.Sequential(
  11. nn.Conv2d(1, 32, kernel_size=3, padding=1), # 输入通道数为3,输出通道数为32,卷积核大小为3,填充为1
  12. nn.ReLU(), # ReLU激活函数
  13. nn.MaxPool2d(kernel_size=2), # 最大池化层,池化核大小为2
  14. nn.Dropout(p=0.2), # Dropout层,丢弃概率为0.2
  15. nn.Conv2d(32, 64, kernel_size=3, padding=1), # 输入通道数为32,输出通道数为64,卷积核大小为3,填充为1
  16. nn.ReLU(),
  17. nn.MaxPool2d(kernel_size=2),
  18. nn.Dropout(p=0.2),
  19. nn.Conv2d(64, 128, kernel_size=3, padding=1), # 输入通道数为64,输出通道数为128,卷积核大小为3,填充为1
  20. nn.ReLU(),
  21. nn.MaxPool2d(kernel_size=2),
  22. nn.Dropout(p=0.2),
  23. nn.Conv2d(128, 128, kernel_size=3, padding=1), # 输入通道数为128,输出通道数为128,卷积核大小为3,填充为1
  24. nn.ReLU(),
  25. nn.MaxPool2d(kernel_size=2)
  26. )
  27. # 全连接层定义
  28. self.fc_layers = nn.Sequential(
  29. nn.Flatten(), # 将输入展平为一维张量
  30. nn.Linear(128 * 9 * 9, 512), # 输入大小为128*9*9,输出大小为512
  31. nn.ReLU(),
  32. nn.Dropout(p=0.5),
  33. nn.Linear(512, 3), # 输入大小为512,输出大小为3
  34. nn.Softmax(dim=1) # Softmax函数,用于多分类问题
  35. )
  36. def forward(self, x):
  37. x = self.conv_layers(x) # 卷积层的前向传播
  38. x = self.fc_layers(x) # 全连接层的前向传播
  39. return x