import numpy as np import matplotlib.pyplot as plt from sklearn import datasets,naive_bayes from sklearn.model_selection import train_test_split # 加載 scikit-learn 自帶的 digits 數據集 def load_data(): ''' 加載用於分類問題的數據集。這里使用 scikit-learn 自帶的 digits 數據集 ''' digits=datasets.load_digits() return train_test_split(digits.data,digits.target,test_size=0.25,random_state=0,stratify=digits.target) #多項式貝葉斯分類器MultinomialNB模型 def test_MultinomialNB(*data): X_train,X_test,y_train,y_test=data cls=naive_bayes.MultinomialNB() cls.fit(X_train,y_train) print('Training Score: %.2f' % cls.score(X_train,y_train)) print('Testing Score: %.2f' % cls.score(X_test, y_test)) # 產生用於分類問題的數據集 X_train,X_test,y_train,y_test=load_data() # 調用 test_GaussianNB test_MultinomialNB(X_train,X_test,y_train,y_test)
def test_MultinomialNB_alpha(*data): ''' 測試 MultinomialNB 的預測性能隨 alpha 參數的影響 ''' X_train,X_test,y_train,y_test=data alphas=np.logspace(-2,5,num=200) train_scores=[] test_scores=[] for alpha in alphas: cls=naive_bayes.MultinomialNB(alpha=alpha) cls.fit(X_train,y_train) train_scores.append(cls.score(X_train,y_train)) test_scores.append(cls.score(X_test, y_test)) ## 繪圖 fig=plt.figure() ax=fig.add_subplot(1,1,1) ax.plot(alphas,train_scores,label="Training Score") ax.plot(alphas,test_scores,label="Testing Score") ax.set_xlabel(r"$\alpha$") ax.set_ylabel("score") ax.set_ylim(0,1.0) ax.set_title("MultinomialNB") ax.set_xscale("log") plt.show() # 調用 test_MultinomialNB_alpha test_MultinomialNB_alpha(X_train,X_test,y_train,y_test)