Keras學習:第一個例子-訓練MNIST數據集


import numpy as npimport gzip
import struct
import keras as ks
import logging
from keras.layers import Dense, Activation, Flatten, Convolution2D
from keras.utils import np_utils


def read_data(label_url,image_url):
    with gzip.open(label_url) as flbl:
        magic, num = struct.unpack(">II",flbl.read(8))
        label = np.fromstring(flbl.read(),dtype=np.int8)
    with gzip.open(image_url,'rb') as fimg:
        magic, num, rows, cols = struct.unpack(">IIII",fimg.read(16))
        image = np.fromstring(fimg.read(),dtype=np.uint8).reshape(len(label),rows,cols)
    return (label, image)


(train_lbl, train_img) = read_data('mnist/train-labels-idx1-ubyte.gz','mnist/train-images-idx3-ubyte.gz')
(val_lbl, val_img) = read_data('mnist/t10k-labels-idx1-ubyte.gz','mnist/t10k-images-idx3-ubyte.gz')


def to4d(img):
    return img.reshape(img.shape[0],784).astype(np.float32)/255


train_img = to4d(train_img)
val_img = to4d(val_img)
#important
train_LBL = np_utils.to_categorical(train_lbl,nb_classes=10)
val_LBL = np_utils.to_categorical(val_lbl,nb_classes=10)

model = ks.models.Sequential()
model.add(Dense(128,input_dim=784))
model.add(Activation('relu'))
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dense(10))
model.add(Activation('softmax'))

model.compile(loss='categorical_crossentropy',optimizer='adadelta',metrics=['accuracy'])
model.fit(x=train_img,y=train_LBL,batch_size=100,nb_epoch=10,verbose=1,validation_data=(val_img,val_LBL))

 源碼如上↑

mnist的數據集下載好后保存在mnist的文件夾里,main.py里復制粘貼上面的源碼應該就能跑了,main.py要放在mnist文件夾的外邊,最重要的就是#important下面的兩行,keras似乎必須要對標簽分類之后才能使用標簽。

 


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM