吳裕雄 python 機器學習——支持向量機線性分類LinearSVC模型


import numpy as np
import matplotlib.pyplot as plt

from sklearn import datasets, linear_model,svm
from sklearn.model_selection import train_test_split

def load_data_classfication():
    '''
    加載用於分類問題的數據集
    '''
    # 使用 scikit-learn 自帶的 iris 數據集
    iris=datasets.load_iris() 
    X_train=iris.data
    y_train=iris.target
    # 分層采樣拆分成訓練集和測試集,測試集大小為原始數據集大小的 1/4
    return train_test_split(X_train, y_train,test_size=0.25,random_state=0,stratify=y_train) 

#支持向量機線性分類LinearSVC模型
def test_LinearSVC(*data):
    X_train,X_test,y_train,y_test=data
    cls=svm.LinearSVC()
    cls.fit(X_train,y_train)
    print('Coefficients:%s, intercept %s'%(cls.coef_,cls.intercept_))
    print('Score: %.2f' % cls.score(X_test, y_test))
    
# 生成用於分類的數據集
X_train,X_test,y_train,y_test=load_data_classfication() 
# 調用 test_LinearSVC
test_LinearSVC(X_train,X_test,y_train,y_test) 

def test_LinearSVC_loss(*data):
    '''
    測試 LinearSVC 的預測性能隨損失函數的影響
    '''
    X_train,X_test,y_train,y_test=data
    losses=['hinge','squared_hinge']
    for loss in losses:
        cls=svm.LinearSVC(loss=loss)
        cls.fit(X_train,y_train)
        print("Loss:%s"%loss)
        print('Coefficients:%s, intercept %s'%(cls.coef_,cls.intercept_))
        print('Score: %.2f' % cls.score(X_test, y_test))
        
# 調用 test_LinearSVC_loss
test_LinearSVC_loss(X_train,X_test,y_train,y_test) 

def test_LinearSVC_L12(*data):
    '''
    測試 LinearSVC 的預測性能隨正則化形式的影響
    '''
    X_train,X_test,y_train,y_test=data
    L12=['l1','l2']
    for p in L12:
        cls=svm.LinearSVC(penalty=p,dual=False)
        cls.fit(X_train,y_train)
        print("penalty:%s"%p)
        print('Coefficients:%s, intercept %s'%(cls.coef_,cls.intercept_))
        print('Score: %.2f' % cls.score(X_test, y_test))
        
# 調用 test_LinearSVC_L12
test_LinearSVC_L12(X_train,X_test,y_train,y_test) 

def test_LinearSVC_C(*data):
    '''
    測試 LinearSVC 的預測性能隨參數 C 的影響
    '''
    X_train,X_test,y_train,y_test=data
    Cs=np.logspace(-2,1)
    train_scores=[]
    test_scores=[]
    for C in Cs:
        cls=svm.LinearSVC(C=C)
        cls.fit(X_train,y_train)
        train_scores.append(cls.score(X_train,y_train))
        test_scores.append(cls.score(X_test,y_test))

    ## 繪圖
    fig=plt.figure()
    ax=fig.add_subplot(1,1,1)
    ax.plot(Cs,train_scores,label="Traing score")
    ax.plot(Cs,test_scores,label="Testing score")
    ax.set_xlabel(r"C")
    ax.set_ylabel(r"score")
    ax.set_xscale('log')
    ax.set_title("LinearSVC")
    ax.legend(loc='best')
    plt.show()
    
# 調用 test_LinearSVC_C
test_LinearSVC_C(X_train,X_test,y_train,y_test) 

 


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM