案例1
1 from keras.models import Sequential 2 from keras.layers import Dense, LSTM, Activation 3 from keras.optimizers import adam, rmsprop, adadelta 4 import numpy as np 5 import matplotlib.pyplot as plt 6 #construct model 7 models = Sequential() 8 models.add(Dense(100, init='uniform',activation='relu' ,input_dim=1)) 9 models.add(Dense(50, activation='relu')) 10 models.add(Dense(1,activation='tanh')) 11 adamoptimizer = adam(lr=0.001, beta_1=0.9, beta_2=0.999, decay=0.00001) 12 models.compile(optimizer='rmsprop', loss='mse',metrics=["accuracy"] ) 13 14 #train data 15 dataX = np.linspace(-2 * np.pi,2 * np.pi, 1000) 16 dataX = np.reshape(dataX, [dataX.__len__(), 1]) 17 noise = np.random.rand(dataX.__len__(), 1) * 0.1 18 dataY = np.sin(dataX) + noise 19 20 models.fit(dataX, dataY, epochs=100, batch_size=10, shuffle=True, verbose = 1) 21 predictY = models.predict(dataX, batch_size=1) 22 score = models.evaluate(dataX, dataY, batch_size=10) 23 24 print(score) 25 #plot 26 fig, ax = plt.subplots() 27 ax.plot(dataX, dataY, 'b-') 28 ax.plot(dataX, predictY, 'r.',) 29 30 ax.set(xlabel="x", ylabel="y=f(x)", title="y = sin(x),red:predict data,bule:true data") 31 ax.grid(True) 32 33 plt.show()
案例2:
1 import numpy as np 2 3 import random 4 from sklearn.preprocessing import MinMaxScaler 5 import matplotlib.pyplot as plt 6 from keras.models import Sequential 7 from keras.layers import Dense,Activation 8 from keras.optimizers import Adam,SGD 9 10 X = np.linspace(1,20,1000) 11 X = X[:,np.newaxis] 12 y = np.sin(X) + np.random.normal(0,0.08,(1000,1)) 13 min_max_scaler = MinMaxScaler((0,1)) 14 y_train = min_max_scaler.fit_transform(y) 15 x_train = min_max_scaler.fit_transform(X) 16 17 model1=Sequential() 18 model1.add(Dense(1000,input_dim = 1)) 19 model1.add(Activation('relu')) 20 model1.add(Dense(1)) 21 model1.add(Activation('sigmoid')) 22 adam = Adam(lr = 0.001) 23 sgd = SGD(lr = 0.1,decay=12-5,momentum=0.9) 24 model1.compile(optimizer = adam,loss = 'mse') 25 print('-------------training--------------') 26 model1.fit(x_train,y_train,batch_size= 12,nb_epoch = 500,shuffle=True) 27 Y_train_pred=model1.predict(x_train) 28 plt.scatter(x_train,y_train) 29 plt.plot(x_train,Y_train_pred,'r-') 30 plt.show()
案例3
1 #加激活函數的方法2:model.add(Dense(units=10,input_dim=1,activation=' ')) 2 from keras.optimizers import SGD 3 from keras.layers import Dense,Activation 4 #構建一個順序模型 5 model=Sequential() 6 7 #在模型中添加一個全連接層 8 #units是輸出維度,input_dim是輸入維度(shift+兩次tab查看函數參數) 9 #輸入1個神經元,隱藏層10個神經元,輸出層1個神經元 10 model.add(Dense(units=10,input_dim=1,activation='relu')) #增加非線性激活函數 11 model.add(Dense(units=1,activation='relu')) #默認連接上一層input_dim=10 12 13 #定義優化算法(修改學習率) 14 defsgd=SGD(lr=0.3) 15 16 #編譯模型 17 model.compile(optimizer=defsgd,loss='mse') #optimizer參數設置優化器,loss設置目標函數 18 19 #訓練模型 20 for step in range(3001): 21 #每次訓練一個批次 22 cost=model.train_on_batch(x_data,y_data) 23 #每500個batch打印一個cost值 24 if step%500==0: 25 print('cost:',cost) 26 27 #打印權值和偏置值 28 W,b=model.layers[0].get_weights() #layers[0]只有一個網絡層 29 print('W:',W,'b:',b) 30 31 #x_data輸入網絡中,得到預測值y_pred 32 y_pred=model.predict(x_data) 33 34 plt.scatter(x_data,y_data) 35 36 plt.plot(x_data,y_pred,'r-',lw=3) 37 plt.show()
案例4:
1 #加激活函數的方法1:mode.add(Activation('')) 2 from keras.optimizers import SGD 3 from keras.layers import Dense,Activation 4 import numpy as np 5 6 np.random.seed(0) 7 x_data=np.linspace(-0.5,0.5,200) 8 noise=np.random.normal(0,0.02,x_data.shape) 9 y_data=np.square(x_data)+noise 10 11 #構建一個順序模型 12 model=Sequential() 13 14 #在模型中添加一個全連接層 15 #units是輸出維度,input_dim是輸入維度(shift+兩次tab查看函數參數) 16 #輸入1個神經元,隱藏層10個神經元,輸出層1個神經元 17 model.add(Dense(units=10,input_dim=1)) 18 model.add(Activation('tanh')) #增加非線性激活函數 19 model.add(Dense(units=1)) #默認連接上一層input_dim=10 20 model.add(Activation('tanh')) 21 22 #定義優化算法(修改學習率) 23 defsgd=SGD(lr=0.3) 24 25 #編譯模型 26 model.compile(optimizer=defsgd,loss='mse') #optimizer參數設置優化器,loss設置目標函數 27 28 #訓練模型 29 for step in range(3001): 30 #每次訓練一個批次 31 cost=model.train_on_batch(x_data,y_data) 32 #每500個batch打印一個cost值 33 if step%500==0: 34 print('cost:',cost) 35 36 #打印權值和偏置值 37 W,b=model.layers[0].get_weights() #layers[0]只有一個網絡層 38 print('W:',W,'b:',b) 39 40 #x_data輸入網絡中,得到預測值y_pred 41 y_pred=model.predict(x_data) 42 43 plt.scatter(x_data,y_data) 44 45 plt.plot(x_data,y_pred,'r-',lw=3) 46 plt.show()
案列5
1 import numpy as np 2 import matplotlib.pyplot as plt 3 from keras.models import Sequential 4 from keras.layers import Dense 5 from keras.optimizers import Adam 6 7 np.random.seed(0) 8 points = 500 9 X = np.linspace(-3, 3, points) 10 y = np.sin(X) + np.random.uniform(-0.5, 0.5, points) 11 12 13 model = Sequential() 14 model.add(Dense(50, activation='sigmoid', input_dim=1)) 15 model.add(Dense(30, activation='sigmoid')) 16 model.add(Dense(1)) 17 adam = Adam(lr=0.01) 18 model.compile(loss='mse', optimizer=adam) 19 model.fit(X, y, epochs=50) 20 21 predictions = model.predict(X) 22 plt.scatter(X, y) 23 plt.plot(X, predictions, 'ro') 24 plt.show()
案列6:
%matplotlib inline import matplotlib.pyplot as plt import numpy as np x = list(np.arange(0,4,0.1)) #給3次多項式添加噪音 y = list(map(lambda val: val**3*3 + np.random.random()*20 , x) ) plt.scatter(x, y) #指明用3次多項式匹配 w = np.polyfit (x, y, 3) fn = np.poly1d(w) #打印適配出來的參數和函數 print(w) print(fn) plt.plot(x, fn(x))
案列7
1 %matplotlib inline
2 import matplotlib.pyplot as plt 3 from keras.datasets import mnist 4 from keras.models import Sequential 5 from keras.layers.core import Dense, Activation 6 from keras.layers.advanced_activations import LeakyReLU, PReLU 7 from keras.optimizers import SGD 8 9 x = list(np.arange(0,4,0.1)) 10 #給3次多項式添加噪音 11 y = list(map(lambda val: val**3*3 + np.random.random()*20 , x) ) 12 13 model = Sequential() 14 #神經元個數越多,效果會越好,收斂越快,太少的話難以收斂到所需曲線 15 model.add(Dense(100, input_shape=(1,))) 16 17 #Relu,得到的是一條橫線 18 #Tanh,稍稍好於Relu,但是擬合的不夠 19 #sigmoid, 只要神經元個數足夠(50+),訓練1000輪以上,就能達到比較好的效果 20 model.add(Activation('sigmoid')) 21 #model.add(LeakyReLU(alpha=0.01)) 22 #model.add(Dense(3)) 23 24 model.add(Dense(1)) 25 model.compile(optimizer="sgd", loss="mse") 26 model.fit(x, y, epochs=2000, verbose=0) 27 28 print(type(fn(3))) 29 print(fn(1)) 30 print(fn(3)) 31 32 plt.scatter(x, y) 33 plt.plot(x, model.predict(x))