1 Star 0 Fork 1

ink/Pytorch_AlexNet

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
克隆/下载
model.py 1.81 KB
一键复制 编辑 原始数据 按行查看 历史
ink 提交于 2020-08-10 08:07 . mophy 001 update
from torch import nn
from lrn_module import LRN
class AlexNet(nn.Module):
def __init__(self, num_classes=1000):
super().__init__()
self.layer1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=96, kernel_size=11, stride=4),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
LRN(local_size=5, alpha=1e-4, beta=0.75, ACROSS_CHANNELS=True)
)
self.layer2 = nn.Sequential(
nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, groups=2, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
LRN(local_size=5, alpha=1e-4, beta=0.75, ACROSS_CHANNELS=True)
)
self.layer3 = nn.Sequential(
nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, padding=1),
nn.ReLU(inplace=True)
)
self.layer4 = nn.Sequential(
nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, padding=1),
nn.ReLU(inplace=True)
)
self.layer5 = nn.Sequential(
nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, padding=1),
nn.ReLU(inplace=True)
)
self.layer6 = nn.Sequential(
nn.Linear(in_features=6*6*256, out_features=4096),
nn.ReLU(inplace=True),
nn.Dropout()
)
self.layer7 = nn.Sequential(
nn.Linear(in_features=4096, out_features=4096),
nn.ReLU(inplace=True),
nn.Dropout()
)
self.layer8 = nn.Linear(in_features=4096, out_features=num_classes)
def forward(self,x):
x = self.layer5(self.layer4(self.layer3(self.layer2(self.layer1(x)))))
x = x.view(-1, 6*6*256)
x = self.layer8(self.layer7(self.layer6(x)))
return x
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/inkCode/Pytorch_AlexNet.git
git@gitee.com:inkCode/Pytorch_AlexNet.git
inkCode
Pytorch_AlexNet
Pytorch_AlexNet
master

搜索帮助