为什么这两个代码段不等价: 第 1 部分:创建 2 层模型。
class FNNModule(nn.Module):
def __init__(self, input_dim, output_dim, hidden_dim1, hidden_dim2, non_linear_function):
super().__init__()
self.hidden1 = nn.Linear(input_dim, hidden_dim1)
self.hidden2 = nn.Linear(hidden_dim1, hidden_dim2)
self.non_linear_function = non_linear_function()
self.final_linear = nn.Linear(hidden_dim2, output_dim)
def forward(self, x):
out = self.hidden1(x)
out = self.non_linear_function(out)
out = self.hidden2(x)
out = self.non_linear_function(out)
out = self.final_linear(out)
return out
第二部分:创建相同的模型但更改代码,其中hidden_layers是变量:
class FNNModuleVar(nn.Module):
def __init__(self, input_dim, output_dim, hidden_dim_array = [], non_linear_function_array=[]):
super().__init__()
self.linear_functions = []
self.non_linear_functions = [x() for x in non_linear_function_array]
self.hidden_layers = len(hidden_dim_array)
for l in range(self.hidden_layers):
self.linear_functions.append(nn.Linear(input_dim, hidden_dim_array[l]))
input_dim = hidden_dim_array[l]
self.final_linear = nn.Linear(input_dim, output_dim)
def forward(self, x):
out = x
for i in range(self.hidden_layers):
out = self.linear_functions[i](out)
out = self.non_linear_functions[i](out)
out = self.final_linear(x)
return out
modelVar = FNNModuleVar(input_dim, output_dim, [100, 50], [nn.Tanh, nn.Tanh])
model = FNNModule(input_dim, output_dim, 100, 50, nn.Tanh)
当我尝试迭代 modelVar.parameters()
和 model.parameters()
时,我发现我有非常不同的模型。
我在 modelVar
中做错了什么?
最佳答案
这些模块的调用方式与您期望的方式相同,只是它们对模块不可见。为了使它们可见,您可以将它们包装在 nn.ModuleList
中,如下所示:
class FNNModuleVar(nn.Module):
def __init__(self, input_dim, output_dim, hidden_dim_array = [], non_linear_function_array=[]):
super().__init__()
self.linear_functions = []
self.non_linear_functions = [x() for x in non_linear_function_array]
self.hidden_layers = len(hidden_dim_array)
for l in range(self.hidden_layers):
self.linear_functions.append(nn.Linear(input_dim, hidden_dim_array[l]))
input_dim = hidden_dim_array[l]
self.linear_functions = nn.ModuleList(self.linear_functions)
self.final_linear = nn.Linear(input_dim, output_dim)
def forward(self, x):
out = x
for i in range(self.hidden_layers):
out = self.linear_functions[i](out)
out = self.non_linear_functions[i](out)
out = self.final_linear(out)
return out
现在打印模型将产生:
FNNModule(
(hidden1): Linear(in_features=50, out_features=100, bias=True)
(hidden2): Linear(in_features=100, out_features=50, bias=True)
(non_linear_function): Tanh()
(final_linear): Linear(in_features=50, out_features=100, bias=True)
)
FNNModuleVar(
(linear_functions): ModuleList(
(0): Linear(in_features=50, out_features=100, bias=True)
(1): Linear(in_features=100, out_features=50, bias=True)
)
(final_linear): Linear(in_features=50, out_features=100, bias=True)
)
更多详细信息:https://pytorch.org/docs/stable/nn.html#torch.nn.ModuleList
关于pytorch - 在 Pytorch 中创建具有动态数量隐藏层的前馈神经网络模型,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/58585892/