'''
隨機選擇隨機數,不等於J
'''
def selectJrand(i,m):
j=i #we want to select any J not equal to i
while (j==i):
j = int(random.uniform(0,m)) # 一直在挑選隨機數j,直到不等於i,隨機數的范圍在0~m
return j # 返回挑選好的隨機數
'''
門限函數
'''
def clipAlpha(aj,H,L): # 最大不能超過H,最小不能低於L
if aj > H:
aj = H
if L > aj:
aj = L
return aj
'''
簡化版的SMO函數
'''
def smoSimple(dataMatIn, classLabels, C, toler, maxIter): # 輸入數據,標記,常數C,容錯率,最大迭代次數
dataMatrix = mat(dataMatIn); # 轉換成矩陣
labelMat = mat(classLabels).transpose() # 轉換成矩陣,並轉置,標記成為一個列向量,每一行和數據矩陣對應
m,n = shape(dataMatrix) # 行,列
b = 0; # 參數b的初始化
alphas = mat(zeros((m,1))) # 參數alphas是個list,初始化也是全0,大小等於樣本數
iter = 0 # 當前迭代次數,maxIter是最大迭代次數
while (iter < maxIter): # 當超過最大迭代次數,推出
alphaPairsChanged = 0 # 標記位,記錄alpha在該次循環中,有沒有優化
for i in range(m): # 第i個樣本
fXi = float(multiply(alphas,labelMat).T*(dataMatrix*dataMatrix[i,:].T)) + b # 第i樣本的預測類別
Ei = fXi - float(labelMat[i])#if checks if an example violates KKT conditions # 誤差
#是否可以繼續優化
if ((labelMat[i]*Ei < -toler) and (alphas[i] < C)) or ((labelMat[i]*Ei > toler) and (alphas[i] > 0)):
j = selectJrand(i,m) # 隨機選擇第j個樣本
fXj = float(multiply(alphas,labelMat).T*(dataMatrix*dataMatrix[j,:].T)) + b # 樣本j的預測類別
Ej = fXj - float(labelMat[j]) # 誤差
alphaIold = alphas[i].copy(); # 拷貝,分配新的內存
alphaJold = alphas[j].copy();
if (labelMat[i] != labelMat[j]):
L = max(0, alphas[j] - alphas[i])
H = min(C, C + alphas[j] - alphas[i])
else:
L = max(0, alphas[j] + alphas[i] - C)
H = min(C, alphas[j] + alphas[i])
if L==H: print "L==H"; continue
eta = 2.0 * dataMatrix[i,:]*dataMatrix[j,:].T - dataMatrix[i,:]*dataMatrix[i,:].T - dataMatrix[j,:]*dataMatrix[j,:].T
if eta >= 0: print "eta>=0"; continue
alphas[j] -= labelMat[j]*(Ei - Ej)/eta
alphas[j] = clipAlpha(alphas[j],H,L) # 門限函數阻止alpha_j的修改量過大
#如果修改量很微小
if (abs(alphas[j] - alphaJold) < 0.00001): print "j not moving enough"; continue
# alpha_i的修改方向相反
alphas[i] += labelMat[j]*labelMat[i]*(alphaJold - alphas[j])#update i by the same amount as j
#the update is in the oppostie direction
# 為兩個alpha設置常數項b
b1 = b - Ei- labelMat[i]*(alphas[i]-alphaIold)*dataMatrix[i,:]*dataMatrix[i,:].T - labelMat[j]*(alphas[j]-alphaJold)*dataMatrix[i,:]*dataMatrix[j,:].T
b2 = b - Ej- labelMat[i]*(alphas[i]-alphaIold)*dataMatrix[i,:]*dataMatrix[j,:].T - labelMat[j]*(alphas[j]-alphaJold)*dataMatrix[j,:]*dataMatrix[j,:].T
if (0 < alphas[i]) and (C > alphas[i]): b = b1
elif (0 < alphas[j]) and (C > alphas[j]): b = b2
else: b = (b1 + b2)/2.0
# 說明alpha已經發生改變
alphaPairsChanged += 1
print "iter: %d i:%d, pairs changed %d" % (iter,i,alphaPairsChanged)
#如果沒有更新,那么繼續迭代;如果有更新,那么迭代次數歸0,繼續優化
if (alphaPairsChanged == 0): iter += 1
else: iter = 0
print "iteration number: %d" % iter
# 只有當某次優化更新達到了最大迭代次數,這個時候才返回優化之后的alpha和b
return b,alphas