python代碼實現
''' 輸入層:(z1,z2,z3) 輸出層:(y1,y2,y3) label:(t1,t2,t3) 反向傳播結果:(y1-t1,y2-t2,y3-t3) ''' class SoftmaxWithEntropyLoss(object): def __init__(self): self.loss = None self.y = None self.t = None def forward(self, x, t): self.t = t self.y = softmax(x) self.loss = cross_entropy_error(self.y, self.t) def backward(self, dout): batch_size = self.t.shape[0] if self.t.size == self.y.size: dx = (self.y - self.t) / batch_size else: dx = self.y.copy() dx[np.arange(batch_size), self.t] -= 1 dx = dx / batch_size return dx def cross_entropy_error(y, t): if y.ndim == 1: t = t.reshape(1, t.size) y = y.reshape(1, y.size) if t.size == y.size: t = t.argmax(axis=1) batch_size = y.shape[0] return -np.sum(np.log(y[np.arange(batch_size), t] + 1e-7)) / batch_size
參考資料
https://blog.csdn.net/qian99/article/details/78046329
《深度學習入門:基於Python的理論與實現》