最近剛接觸機器學習,就一個線性回歸學起來都是十分的吃力
剛接觸了梯度下降算法,算法解析很多大牛解析的更好,我就放一下自己理解的寫出的代碼好了
需要用到的數據和導入庫
import matplotlib.pyplot as plt from sklearn import linear_model import numpy as np import random x=[[400],[450],[484],[500],[510],[525],[540],[549],[558],[590],[610],[640],[680],[750],[900]] y=[[80],[89],[92],[102],[121],[160],[180],[189],[199],[203],[247],[250],[259],[289],[356]] plt.plot(x,y,'ks') def pi_tidy(x,y):#批量梯度下降 alpha=0.000002 m=len(x) w0=0 w1=0 count=0 error2=0 while 1: count=count+1 w=[0,0] for i in range(m): error1=0 w[0]+=w0+w1*x[i][0]-y[i][0] w[1]+=(w0+w1*x[i][0]-y[i][0])*x[i][0] w0=w0-alpha/m*w[0] w1=w1-alpha/m*w[1] for i in range(m): error1+=1/(2*m)*((w0+w1*x[i][0]-y[i][0])**2) if abs(error2-error1)<0.01: break; if count>50: break; error2=error1 print('w0:%f,w1 :%f,eeror:%f'%(w0,w1,error1)) print('------------------------------------') X=[] Y=[] X.append(400) X.append(900) Y.append(w0+w1*400) Y.append(w0+w1*900) plt.plot(X,Y,'g-',label='pi_tidu',color='b') def rand_tidu(x,y):#隨機批量下降 alpha=0.000001 m=len(x) count=0 error2=0 w0=0 w1=0 while 1: finish=0 w=[0,0] for i in range(m): count+=1 w[0]=w0+w1*x[i][0]-y[i][0] w[1]=(w0+w1*x[i][0]-y[i][0])*x[i][0] w0=w0-alpha*w[0] w1=w1-alpha*w[1] error1=abs(w0+w1*x[i][0]-y[i][0]) if abs(error2-error1)<1:#收斂程度 finish=1 break if(count>15): finish=1 break error2=error1 print('w0:%f,w1:%f,error:%f'%(w0,w1,error1)) if(finish==1): break print('------------------------------------') X=[] Y=[] X.append(400) X.append(900) Y.append(w0+w1*400) Y.append(w0+w1*900) plt.plot(X,Y,'g-',label='rand_tidu',color='r') def smallpi_tidu(x,y):#小批量梯度下降 count=0 alpha=0.000001 m=len(x) error2=0 w0=0 w1=0 tend=2 while 1: finish=0 for i in range(0,m-tend): error1=0 w=[0,0] count+=1 for j in range(i,i+tend): w[0]+=w0+w1*x[j][0]-y[j][0] w[1]+=(w0+w1*x[j][0]-y[j][0])*x[j][0] w0=w0-alpha/tend*w[0] w1=w1-alpha/tend*w[1] for j in range(i,i+tend): error1+=1/2*tend*((w0+w1*x[j][0]-y[j][0])**2) if abs(error2-error1)<1: finish=1 break if(count>m-tend): finish=1 break error2=error1 print('w0:%f,w1:%f,error:%f'%(w0,w1,error1)) if(finish==1): break print('------------------------------------') X=[] Y=[] X.append(400) X.append(900) Y.append(w0+w1*400) Y.append(w0+w1*900) plt.plot(X,Y,'g-',label='small_tidu') pi_tidy(x,y) rand_tidu(x,y) smallpi_tidu(x,y) clf=linear_model.LinearRegression() clf.fit(x,y) x2=[[400],[750],[900]] y2=clf.predict(x2) plt.plot(x2,y2,'g-',label='min_*2',color='y')#sklearn里面的線性模型 plt.legend() plt.show()
輸出結果截圖