回歸預測之XGBoost——運行+調優


import xgboost as xgb
from xgboost import plot_importance
from xgboost.sklearn import XGBClassifier,XGBRegressor
xgb_reg = xgb.XGBRegressor(n_estimators=750
                           ,max_depth=9
                           ,colsample_bytree=0.5
                           ,reg_alpha=0.05
                           ,subsample=0.6
)
xgb_reg.fit(X_train, y_train)
y_pred=xgb_reg.predict(X_test)
print(y_pred)
# 返回擬合優度the coefficient of determination
xgb_reg.score(X_test,y_test)
# mse
metrics.mean_squared_error(y_test, y_pred)
xgb_reg.set_params()
xgb_reg.feature_importances_
plt.figure(figsize=(15, 10))
# 創建t變量
t = np.arange(len(X_test))
# 繪制y_test曲線
plt.plot(t, y_test, 'r', linewidth=2, label='真實值')
# 繪制y_hat曲線
plt.plot(t, y_pred, 'g', linewidth=2, label='預測值')
# 設置圖例
plt.legend()
plt.show()

image-20220219174015590

粗調

# 參數設定
xgb_params = {'max_depth':[6,9],    
             'subsample':[0.6,0.9,1],
             'colsample_bytree':[0.5,0.6],
             'reg_alpha':[0,0.05,0.1],
             'n_estimators':[750,1000]}
# 參數搜索
xgb_gridsearch = GridSearchCV(xgb_reg,xgb_params,cv=3,n_jobs=-1,
                                verbose=10,refit=True)
xgb_gridsearch.fit(X_train, y_train)
y_pred=xgb_gridsearch.predict(X_test)
print(y_pred)
# 擬合優度R2
print("r2:", xgb_gridsearch.score(X_test, y_test))
# 用Scikit_learn計算MSE
print("MSE:", metrics.mean_squared_error(y_test, y_pred))
print(xgb_gridsearch.best_params_)

將得到的參數,代入模型,重跑。


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM