TensorFlow(三) 用TensorFlow實現L2正則損失函數線性回歸算法


import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
from sklearn import  datasets
sess=tf.Session()
#加載鳶尾花集
iris=datasets.load_iris()
#寬度 長度
x_vals=np.array([x[3] for x in iris.data])
y_vals=np.array([x[0] for x in iris.data])

learning_rate=0.05
batch_size=25

x_data=tf.placeholder(shape=[None,1],dtype=tf.float32)
y_data=tf.placeholder(shape=[None,1],dtype=tf.float32)

A=tf.Variable(tf.random_normal(shape=[1,1]))
b=tf.Variable(tf.random_normal(shape=[1,1]))

#增加線性模型y=Ax+b  x*a==>shape(None,1)+b==>shape(NOne,1)
model_out=tf.add(tf.matmul(x_data,A),b)
#聲明L2損失函數
loss=tf.reduce_mean(tf.square(y_data-model_out))

#初始化變量
init=tf.global_variables_initializer()
sess.run(init)

#梯度下降
my_opt=tf.train.GradientDescentOptimizer(learning_rate)
train_step=my_opt.minimize(loss)

#循環迭代
loss_rec=[]
for i in range(100):
    rand_index=np.random.choice(len(x_vals),size=batch_size)
    #shape(None,1)
    rand_x=np.transpose([ x_vals[rand_index] ])
    rand_y=np.transpose([ y_vals[rand_index] ])

    #運行
    sess.run(train_step,feed_dict={x_data:rand_x,y_data:rand_y})
    temp_loss =sess.run(loss,feed_dict={x_data:rand_x,y_data:rand_y})

    #添加記錄
    loss_rec.append(temp_loss)
    #打印
    if (i+1)%25==0:
        print('Step: %d A=%s b=%s'%(i,str(sess.run(A)),str(sess.run(b))))
        print('Loss:%s'% str(temp_loss))
#抽取系數
[slope]=sess.run(A)
print(slope)
[intercept]=sess.run(b)
best_fit=[]
for i in x_vals:
    best_fit.append(slope*i+intercept)
#x_vals shape(None,1)
plt.plot(x_vals,y_vals,'o',label='Data')
plt.plot(x_vals,best_fit,'r-',label='Best fit line',linewidth=3)
plt.legend(loc='upper left')

plt.xlabel('Pedal Width')
plt.ylabel('Pedal Length')
plt.show()
#L2
plt.plot(loss_rec,'k-',label='Loss')
plt.title('L2 loss per Generation')
plt.xlabel('Generation')
plt.ylabel('L2 loss ')
plt.show()

 


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM