gpt4 book ai didi

python - 如何在循环中为 pytorch 神经网络中的层创建变量名称

转载 作者:行者123 更新时间:2023-12-03 23:21:02 24 4
gpt4 key购买 nike

我正在 PyTorch 中实现一个简单的前馈神经 newtork。但是我想知道是否有更好的方法来向网络添加灵活的层数?也许通过在循环中命名它们,但我听说那是不可能的?

目前我正在这样做

import torch
import torch.nn as nn
import torch.nn.functional as F

class Net(nn.Module):

def __init__(self, input_dim, output_dim, hidden_dim):
super(Net, self).__init__()
self.input_dim = input_dim
self.output_dim = output_dim
self.hidden_dim = hidden_dim
self.layer_dim = len(hidden_dim)
self.fc1 = nn.Linear(self.input_dim, self.hidden_dim[0])
i = 1
if self.layer_dim > i:
self.fc2 = nn.Linear(self.hidden_dim[i-1], self.hidden_dim[i])
i += 1
if self.layer_dim > i:
self.fc3 = nn.Linear(self.hidden_dim[i-1], self.hidden_dim[i])
i += 1
if self.layer_dim > i:
self.fc4 = nn.Linear(self.hidden_dim[i-1], self.hidden_dim[i])
i += 1
if self.layer_dim > i:
self.fc5 = nn.Linear(self.hidden_dim[i-1], self.hidden_dim[i])
i += 1
if self.layer_dim > i:
self.fc6 = nn.Linear(self.hidden_dim[i-1], self.hidden_dim[i])
i += 1
if self.layer_dim > i:
self.fc7 = nn.Linear(self.hidden_dim[i-1], self.hidden_dim[i])
i += 1
if self.layer_dim > i:
self.fc8 = nn.Linear(self.hidden_dim[i-1], self.hidden_dim[i])
i += 1
self.fcn = nn.Linear(self.hidden_dim[-1], self.output_dim)

def forward(self, x):
# Max pooling over a (2, 2) window
x = F.relu(self.fc1(x))
i = 1
if self.layer_dim > i:
x = F.relu(self.fc2(x))
i += 1
if self.layer_dim > i:
x = F.relu(self.fc3(x))
i += 1
if self.layer_dim > i:
x = F.relu(self.fc4(x))
i += 1
if self.layer_dim > i:
x = F.relu(self.fc5(x))
i += 1
if self.layer_dim > i:
x = F.relu(self.fc6(x))
i += 1
if self.layer_dim > i:
x = F.relu(self.fc7(x))
i += 1
if self.layer_dim > i:
x = F.relu(self.fc8(x))
i += 1
x = F.softmax(self.fcn(x))
return x

最佳答案

您可以将图层放在 ModuleList 容器中:

import torch
import torch.nn as nn
import torch.nn.functional as F

class Net(nn.Module):

def __init__(self, input_dim, output_dim, hidden_dim):
super(Net, self).__init__()
self.input_dim = input_dim
self.output_dim = output_dim
self.hidden_dim = hidden_dim
current_dim = input_dim
self.layers = nn.ModuleList()
for hdim in hidden_dim:
self.layers.append(nn.Linear(current_dim, hdim))
current_dim = hdim
self.layers.append(nn.Linear(current_dim, output_dim))

def forward(self, x):
for layer in self.layers[:-1]:
x = F.relu(layer(x))
out = F.softmax(self.layers[-1](x))
return out

对层使用 pytorch Containers 非常重要,而不仅仅是简单的 Python 列表。请参阅 this answer 以了解原因。

关于python - 如何在循环中为 pytorch 神经网络中的层创建变量名称,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/58097924/

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com