pytorch 出现错误: expected a Variable argument, but got function

你好,我在运行代码时遇到了这个奇怪的错误。
class Residual(nn.Module):
def __init__(self,dropout, shape, negative_slope, BNflag = False):
super(Residual, self).__init__()
self.dropout = dropout
self.linear1 = nn.Linear(shape[0],shape[1])
self.linear2 = nn.Linear(shape[1],shape[0])
self.dropout = nn.Dropout(self.dropout)
self.BNflag = BNflag
self.batch_normlization = nn.BatchNorm1d(shape[0])
self.leakyRelu = nn.LeakyReLU(negative_slope = negative_slope , inplace=False)

def forward(self, X):
x = X
if self.BNflag:
x = self.batch_normlization(x)
x = self.leakyRelu(x)
x = self.dropout(x)
x = self.linear1(x)
if self.BNflag:
x = self.batch_normlization(x)
x = self.leakyRelu(x)
x = self.dropout(x)
x = self.linear2(x)
x = torch.add(x,X)
return x

res = Residual(0.5,[100,200],0.2,False)
已邀请:

要回复问题请先登录注册