Python實現CART(基尼指數)


Python實現CART(基尼指數)

運行環境

  • Pyhton3
  • treePlotter模塊(畫圖所需,不畫圖可不必)
  • matplotlib(如果使用上面的模塊必須)

計算過程

st=>start: 開始
e=>end
op1=>operation: 讀入數據
op2=>operation: 格式化數據
cond=>condition: 是否建樹完成
su=>subroutine: 遞歸建樹
op3=>operation: 選擇基尼指數最小的為判決點
op4=>operation: 測試判決情況
op5=>operation: 划分為判決節點子樹

st->op1->op2->cond
cond(no)->su->op5->op3->su
cond(yes)->op4->e

輸入樣例

/* Dataset.txt */
訓練集:

    outlook    temperature    humidity    windy 
    ---------------------------------------------------------
    sunny       hot            high         false         N
    sunny       hot            high         true          N
    overcast    hot            high         false         Y
    rain        mild           high         false         Y
    rain        cool           normal       false         Y
    rain        cool           normal       true          N
    overcast    cool           normal       true          Y

測試集
    outlook    temperature    humidity    windy 
    ---------------------------------------------------------
    sunny       mild           high         false          
    sunny       cool           normal       false         
    rain        mild           normal       false        
    sunny       mild           normal       true          
    overcast    mild           high         true          
    overcast    hot            normal       false         
    rain        mild           high         true         

代碼實現

# -*- coding: utf-8 -*-
__author__ = 'Wsine'

from math import log
import operator
import treePlotter

def calcShannonEnt(dataSet):
	"""
	輸入:數據集
	輸出:數據集的香農熵
	描述:計算給定數據集的香農熵
	"""
	numEntries = len(dataSet)
	labelCounts = {}
	for featVec in dataSet:
		currentLabel = featVec[-1]
		if currentLabel not in labelCounts.keys():
			labelCounts[currentLabel] = 0
		labelCounts[currentLabel] += 1
	shannonEnt = 0.0
	for key in labelCounts:
		prob = float(labelCounts[key])/numEntries
		shannonEnt -= prob * log(prob, 2)
	return shannonEnt

def splitDataSet(dataSet, axis, value):
	"""
	輸入:數據集,選擇維度,選擇值
	輸出:划分數據集
	描述:按照給定特征划分數據集;去除選擇維度中等於選擇值的項
	"""
	retDataSet = []
	for featVec in dataSet:
		if featVec[axis] == value:
			reduceFeatVec = featVec[:axis]
			reduceFeatVec.extend(featVec[axis+1:])
			retDataSet.append(reduceFeatVec)
	return retDataSet

def chooseBestFeatureToSplit(dataSet):
	"""
	輸入:數據集
	輸出:最好的划分維度
	描述:選擇最好的數據集划分維度
	"""
	numFeatures = len(dataSet[0]) - 1
	bestGini = 999999.0
	bestFeature = -1
	for i in range(numFeatures):
		featList = [example[i] for example in dataSet]
		uniqueVals = set(featList)
		gini = 0.0
		for value in uniqueVals:
			subDataSet = splitDataSet(dataSet, i, value)
			prob = len(subDataSet)/float(len(dataSet))
			subProb = len(splitDataSet(subDataSet, -1, 'N')) / float(len(subDataSet))
			gini += prob * (1.0 - pow(subProb, 2) - pow(1 - subProb, 2))
		if (gini < bestGini):
			bestGini = gini
			bestFeature = i
	return bestFeature

def majorityCnt(classList):
	"""
	輸入:分類類別列表
	輸出:子節點的分類
	描述:數據集已經處理了所有屬性,但是類標簽依然不是唯一的,
		  采用多數判決的方法決定該子節點的分類
	"""
	classCount = {}
	for vote in classList:
		if vote not in classCount.keys():
			classCount[vote] = 0
		classCount[vote] += 1
	sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reversed=True)
	return sortedClassCount[0][0]

def createTree(dataSet, labels):
	"""
	輸入:數據集,特征標簽
	輸出:決策樹
	描述:遞歸構建決策樹,利用上述的函數
	"""
	classList = [example[-1] for example in dataSet]
	if classList.count(classList[0]) == len(classList):
		# 類別完全相同,停止划分
		return classList[0]
	if len(dataSet[0]) == 1:
		# 遍歷完所有特征時返回出現次數最多的
		return majorityCnt(classList)
	bestFeat = chooseBestFeatureToSplit(dataSet)
	bestFeatLabel = labels[bestFeat]
	myTree = {bestFeatLabel:{}}
	del(labels[bestFeat])
	# 得到列表包括節點所有的屬性值
	featValues = [example[bestFeat] for example in dataSet]
	uniqueVals = set(featValues)
	for value in uniqueVals:
		subLabels = labels[:]
		myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat, value), subLabels)
	return myTree

def classify(inputTree, featLabels, testVec):
	"""
	輸入:決策樹,分類標簽,測試數據
	輸出:決策結果
	描述:跑決策樹
	"""
	firstStr = list(inputTree.keys())[0]
	secondDict = inputTree[firstStr]
	featIndex = featLabels.index(firstStr)
	classLabel = 'N'
	for key in secondDict.keys():
		if testVec[featIndex] == key:
			if type(secondDict[key]).__name__ == 'dict':
				classLabel = classify(secondDict[key], featLabels, testVec)
			else:
				classLabel = secondDict[key]
	return classLabel

def classifyAll(inputTree, featLabels, testDataSet):
	"""
	輸入:決策樹,分類標簽,測試數據集
	輸出:決策結果
	描述:跑決策樹
	"""
	classLabelAll = []
	for testVec in testDataSet:
		classLabelAll.append(classify(inputTree, featLabels, testVec))
	return classLabelAll

def storeTree(inputTree, filename):
	"""
	輸入:決策樹,保存文件路徑
	輸出:
	描述:保存決策樹到文件
	"""
	import pickle
	fw = open(filename, 'wb')
	pickle.dump(inputTree, fw)
	fw.close()

def grabTree(filename):
	"""
	輸入:文件路徑名
	輸出:決策樹
	描述:從文件讀取決策樹
	"""
	import pickle
	fr = open(filename, 'rb')
	return pickle.load(fr)

def createDataSet():
	"""
	outlook->  0: sunny | 1: overcast | 2: rain
	temperature-> 0: hot | 1: mild | 2: cool
	humidity-> 0: high | 1: normal
	windy-> 0: false | 1: true 
	"""
	dataSet = [[0, 0, 0, 0, 'N'], 
			   [0, 0, 0, 1, 'N'], 
			   [1, 0, 0, 0, 'Y'], 
			   [2, 1, 0, 0, 'Y'], 
			   [2, 2, 1, 0, 'Y'], 
			   [2, 2, 1, 1, 'N'], 
			   [1, 2, 1, 1, 'Y']]
	labels = ['outlook', 'temperature', 'humidity', 'windy']
	return dataSet, labels

def createTestSet():
	"""
	outlook->  0: sunny | 1: overcast | 2: rain
	temperature-> 0: hot | 1: mild | 2: cool
	humidity-> 0: high | 1: normal
	windy-> 0: false | 1: true 
	"""
	testSet = [[0, 1, 0, 0], 
			   [0, 2, 1, 0], 
			   [2, 1, 1, 0], 
			   [0, 1, 1, 1], 
			   [1, 1, 0, 1], 
			   [1, 0, 1, 0], 
			   [2, 1, 0, 1]]
	return testSet

def main():
	dataSet, labels = createDataSet()
	labels_tmp = labels[:] # 拷貝,createTree會改變labels
	desicionTree = createTree(dataSet, labels_tmp)
	#storeTree(desicionTree, 'classifierStorage.txt')
	#desicionTree = grabTree('classifierStorage.txt')
	print('desicionTree:\n', desicionTree)
	treePlotter.createPlot(desicionTree)
	testSet = createTestSet()
	print('classifyResult:\n', classifyAll(desicionTree, labels, testSet))

if __name__ == '__main__':
	main()

輸出樣例

desicionTree:
 {'outlook': {0: 'N', 1: 'Y', 2: {'windy': {0: 'Y', 1: 'N'}}}}
classifyResult:
 ['N', 'N', 'Y', 'N', 'Y', 'Y', 'N']

遞歸建樹

附加文件

treePlotter.py

需要配置matplotlib才能使用

import matplotlib.pyplot as plt

decisionNode = dict(boxstyle="sawtooth", fc="0.8")
leafNode = dict(boxstyle="round4", fc="0.8")
arrow_args = dict(arrowstyle="<-")

def plotNode(nodeTxt, centerPt, parentPt, nodeType):
	createPlot.ax1.annotate(nodeTxt, xy=parentPt, xycoords='axes fraction', \
							xytext=centerPt, textcoords='axes fraction', \
							va="center", ha="center", bbox=nodeType, arrowprops=arrow_args)

def getNumLeafs(myTree):
	numLeafs = 0
	firstStr = list(myTree.keys())[0]
	secondDict = myTree[firstStr]
	for key in secondDict.keys():
		if type(secondDict[key]).__name__ == 'dict':
			numLeafs += getNumLeafs(secondDict[key])
		else:
			numLeafs += 1
	return numLeafs

def getTreeDepth(myTree):
	maxDepth = 0
	firstStr = list(myTree.keys())[0]
	secondDict = myTree[firstStr]
	for key in secondDict.keys():
		if type(secondDict[key]).__name__ == 'dict':
			thisDepth = getTreeDepth(secondDict[key]) + 1
		else:
			thisDepth = 1
		if thisDepth > maxDepth:
			maxDepth = thisDepth
	return maxDepth

def plotMidText(cntrPt, parentPt, txtString):
	xMid = (parentPt[0] - cntrPt[0]) / 2.0 + cntrPt[0]
	yMid = (parentPt[1] - cntrPt[1]) / 2.0 + cntrPt[1]
	createPlot.ax1.text(xMid, yMid, txtString)

def plotTree(myTree, parentPt, nodeTxt):
	numLeafs = getNumLeafs(myTree)
	depth = getTreeDepth(myTree)
	firstStr = list(myTree.keys())[0]
	cntrPt = (plotTree.xOff + (1.0 + float(numLeafs)) / 2.0 / plotTree.totalw, plotTree.yOff)
	plotMidText(cntrPt, parentPt, nodeTxt)
	plotNode(firstStr, cntrPt, parentPt, decisionNode)
	secondDict = myTree[firstStr]
	plotTree.yOff = plotTree.yOff - 1.0 / plotTree.totalD
	for key in secondDict.keys():
		if type(secondDict[key]).__name__ == 'dict':
			plotTree(secondDict[key], cntrPt, str(key))
		else:
			plotTree.xOff = plotTree.xOff + 1.0 / plotTree.totalw
			plotNode(secondDict[key], (plotTree.xOff, plotTree.yOff), cntrPt, leafNode)
			plotMidText((plotTree.xOff, plotTree.yOff), cntrPt, str(key))
	plotTree.yOff = plotTree.yOff + 1.0 / plotTree.totalD

def createPlot(inTree):
	fig = plt.figure(1, facecolor='white')
	fig.clf()
	axprops = dict(xticks=[], yticks=[])
	createPlot.ax1 = plt.subplot(111, frameon=False, **axprops)
	plotTree.totalw = float(getNumLeafs(inTree))
	plotTree.totalD = float(getTreeDepth(inTree))
	plotTree.xOff = -0.5 / plotTree.totalw
	plotTree.yOff = 1.0
	plotTree(inTree, (0.5, 1.0), '')
	plt.show()


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM