一个有序的容器,神经网络模块将按照在传入构造器的顺序依次被添加到计算图中执行,同时以神经网络模块为元素的有序字典也可以作为传入参数。
import torch.nn as nn
# Example of using Sequential
model = nn.Sequential(
nn.Conv2d(1,20,5),
nn.ReLU(inplace=True),
nn.Conv2d(20,64,5),
nn.ReLU()
)
# Example of using Sequential with OrderedDict
model = nn.Sequential(OrderedDict([
('conv1', nn.Conv2d(1,20,5)),
('relu1', nn.ReLU(inplace=True)),
('conv2', nn.Conv2d(20,64,5)),
('relu2', nn.ReLU())
]))
放到一个网络中,通过Squential将网络层和激活函数结合起来,输出激活后的网络节点。
import torch.nn as nn
class Net(nn.Module):
def __init__(self, in_dim, n_hidden_1, n_hidden_2, out_dim):
super().__init__()
self.layer = nn.Sequential(
nn.Linear(in_dim, n_hidden_1),
nn.ReLU(True),
nn.Linear(n_hidden_1, n_hidden_2),
nn.ReLU(True),
# 最后一层不需要添加激活函数
nn.Linear(n_hidden_2, out_dim)
)
def forward(self, x):
x = self.layer(x)
return x
if __name__ == '__main__':
in_dim = 1
n_hidden_1 = 1
n_hidden_2 = 1
out_dim = 1
model = Net(in_dim, n_hidden_1, n_hidden_2, out_dim)
print(model)
输出:
Net(
(layer): Sequential(
(0): Linear(in_features=1, out_features=1, bias=True)
(1): ReLU(inplace=True)
(2): Linear(in_features=1, out_features=1, bias=True)
(3): ReLU(inplace=True)
(4): Linear(in_features=1, out_features=1, bias=True)
)
)
感谢链接:
https://blog.csdn.net/dss_dssssd/article/details/82980222