nll_loss



'''
torch.nn torch.nn.functional (F)
CrossEntropyLoss cross_entropy
LogSoftmax log_softmax
NLLLoss nll_loss

'''


import torch
import torch.nn.functional as F

input=torch.randn(3,4)
label=torch.tensor([0,2,1])
print("input",input)

softmax=F.softmax(input,dim=1)
print("softmax",softmax)

log_softmax=F.log_softmax(input,dim=1)
print("log_softmax",log_softmax)


loss_nll=F.nll_loss(log_softmax,label)
print("loss_nll",loss_nll)

loss_cross=F.cross_entropy(input,label)
print("loss_cross",loss_cross)

'''
input tensor([[ 0.0363,  0.1419,  2.1639,  1.5429],
        [-0.6606, -0.0991, -0.6160, -2.4418],
        [-1.1279, -1.5333, -0.2142,  0.8669]])
softmax tensor([[0.0666, 0.0740, 0.5590, 0.3004],
        [0.2521, 0.4419, 0.2636, 0.0425],
        [0.0869, 0.0579, 0.2166, 0.6386]])
log_softmax tensor([[-2.7092, -2.6036, -0.5816, -1.2026],
        [-1.3781, -0.8166, -1.3335, -3.1592],
        [-2.4433, -2.8488, -1.5296, -0.4485]])
loss_nll tensor(1.4971)
loss_cross tensor(1.4971)


0.0363 0.1419 2.1639 1.5429
-0.6606    -0.0991    -0.616 -2.4418
-1.1279    -1.5333    -0.2142    0.8669

dim=0           
exp                                 sum
1.0370     1.1525     8.7050     4.6781     15.57258663
0.5165     0.9057     0.5401     0.0870     2.049298083
0.3237     0.2158     0.8072     2.3795     3.726244435

softmax          
0.0666     0.0740     0.5590     0.3004 
0.2521     0.4419     0.2636     0.0425 
0.0869     0.0579     0.2166     0.6386 

log_softmax          
-2.7092    -2.6036    -0.5816    -1.2026 
-1.3781    -0.8166    -1.3335    -3.1593 
-2.4433    -2.8487    -1.5296    -0.4485 

1.0370=exp(0.0363)
0.0666=1.0370/15.57258663
-2.7092=ln(0.0666)
1.497070103=abs(-2.7092-1.3335-0.44850/3

'''

  


免责声明!

本站转载的文章为个人学习借鉴使用,本站对版权不负任何法律责任。如果侵犯了您的隐私权益,请联系本站邮箱yoyou2525@163.com删除。



 
粤ICP备18138465号  © 2018-2025 CODEPRJ.COM