在哪一维度上进行softmax操作,哪一维度的值之和为1
class Fc(nn.Module):
def __init__(self, in_channel, out_channel):
super(Fc, self).__init__()
self.in_channel = in_channel
self.out_channel = out_channel
self.fc = nn.Linear(self.in_channel, self.out_channel)
def forward(self, x):
# x shape: (T, N, C)
y = self.fc(x)
y = F.softmax(y, dim=2)
return y
if __name__ == "__main__":
T, N, C = 1, 4, 64
x = torch.randn((T, N, C))
model = Fc(C, 60)
y = model(x)
a = 0
for i in y[0, 0, :]:
a = a + i
print(a)
输出:
tensor(1.0000, grad_fn=<AddBackward0>)
torch.Size([1, 4, 60])