1.使用QR分解獲取特征值和特征向量
將矩陣A進行QR分解,得到正規正交矩陣Q與上三角形矩陣R。由上可知Ak為相似矩陣,當k增加時,Ak收斂到上三角矩陣,特征值為對角項。
2.奇異值分解(SVD)
其中U是m×m階酉矩陣;Σ是半正定m×n階對角矩陣;而V*,即V的共軛轉置,是n×n階酉矩陣。
將矩陣A乘它的轉置,得到的方陣可用於求特征向量v,進而求出奇異值σ和左奇異向量u。
1 #coding:utf8 2 import numpy as np 3 np.set_printoptions(precision=4, suppress=True) 4 5 def householder_reflection(A): 6 """Householder變換""" 7 (r, c) = np.shape(A) 8 Q = np.identity(r) 9 R = np.copy(A) 10 for cnt in range(r - 1): 11 x = R[cnt:, cnt] 12 e = np.zeros_like(x) 13 e[0] = np.linalg.norm(x) 14 u = x - e 15 v = u / np.linalg.norm(u) 16 Q_cnt = np.identity(r) 17 Q_cnt[cnt:, cnt:] -= 2.0 * np.outer(v, v) 18 R = np.dot(Q_cnt, R) # R=H(n-1)*...*H(2)*H(1)*A 19 Q = np.dot(Q, Q_cnt) # Q=H(n-1)*...*H(2)*H(1) H為自逆矩陣 20 return (Q, R) 21 22 def eig(A, epsilon=1e-10): 23 '''采用QR分解法計算特征值和特征向量 ''' 24 (q,r_)=householder_reflection(A) 25 h = np.identity(A.shape[0]) 26 for i in range(50): 27 B=np.dot(r_,q) 28 h=h.dot(q) 29 (q,r)=gram_schmidt(B) 30 if abs(r.trace()-r_.trace())< epsilon: 31 print("Converged in {} iterations!".format(i)) 32 break 33 r_=r 34 return r,h 35 36 def svd(A): 37 '''奇異值分解''' 38 n, m = A.shape 39 svd_ = [] 40 k = min(n, m) 41 v_=eig(np.dot(A.T, A))[1] #np.linalg.eig(np.dot(A.T, A))[1] 42 for i in range(k): 43 v=v_.T[i] 44 u_ = np.dot(A, v) 45 s = np.linalg.norm(u_) 46 u = u_ / s 47 svd_.append((s, u, v)) 48 ss, us, vs = [np.array(x) for x in zip(*svd_)] 49 return us.T,ss, vs 50 51 if __name__ == "__main__": 52 53 mat = np.array([ 54 [2, 5, 3], 55 [1, 2, 1], 56 [4, 1, 1], 57 [3, 5, 2], 58 [5, 3, 1], 59 [4, 5, 5], 60 [2, 4, 2], 61 [2, 2, 5], 62 ], dtype='float64') 63 u,s,v = svd(mat) 64 print u 65 print s 66 print v 67 print np.dot(np.dot(u,np.diag(s)),v)