2 Star 1 Fork 0

一路向前/learn

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
该仓库未声明开源许可证文件(LICENSE),使用请关注具体项目描述及其代码上游依赖。
克隆/下载
10nn_seq.py 1.65 KB
一键复制 编辑 原始数据 按行查看 历史
LY 提交于 2024-09-21 18:46 . 网络构建,损失函数,优化器
import torch
from torch import nn
from torch.nn import Conv2d, MaxPool2d, Flatten, Linear, Sequential
from torch.utils.tensorboard import SummaryWriter
class TD(nn.Module):
def __init__(self):
super(TD, self).__init__()
# # 方法1,使用Sequential,将所有操作放到一个model里,
# self.model = Sequential(
# Conv2d(3, 32, 5, padding=2),
# MaxPool2d(2),
# Conv2d(32, 32, 5, padding=2),
# MaxPool2d(2),
# Conv2d(32, 64, 5, padding=2),
# MaxPool2d(2),
# Flatten(),
# Linear(1024, 64),
# Linear(64, 10),
# )
# 方法2,自己定义每一层,在forward里自行调用
self.conv1 = Conv2d(3, 32, 5, padding=2)
self.maxpool1 = MaxPool2d(2)
self.conv2 = Conv2d(32, 32, 5, padding=2)
self.maxpool2 = MaxPool2d(2)
self.conv3 = Conv2d(32, 64, 5, padding=2)
self.maxpool3 = MaxPool2d(2)
self.flaten = Flatten()
self.linear1 = Linear(1024, 64)
self.linear2 = Linear(64, 10)
def forward(self, x):
# # 方法1,直接调用即可
# x = self.model(x)
# 方法2,每一层都需要自行调用
x = self.conv1(x)
x = self.maxpool1(x)
x = self.conv2(x)
x = self.maxpool2(x)
x = self.conv3(x)
x = self.maxpool3(x)
x = self.flaten(x)
x = self.linear1(x)
x = self.linear2(x)
return x
td = TD()
input = torch.ones((64, 3, 32, 32))
output = td(input)
print(output.shape)
writer = SummaryWriter("logs_seq")
writer.add_graph(td, input)
writer.close()
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/li-zen/learn.git
git@gitee.com:li-zen/learn.git
li-zen
learn
learn
master

搜索帮助