import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier#决策树库
from sklearn import datasets #数据集库
iris = datasets.load_iris() #四个特征的样本集合
X = iris.data[:, [0,2]] #取两个特征
Y = iris.target
x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size=0.4)
#深度设置
Max_Depth = [2,6,10,15,20]
#精度存储
Predict_list = []
#训练模型
for i in Max_Depth:
# criterion(特征选择标准):'gini':采用基尼系数构建;'entropy':采用信息增益构建
# max_depth:决策树的深度,常用的可以取值10-100之间
DTC_model = DecisionTreeClassifier(criterion='entropy',max_depth=i)
DTC_model.fit(x_train,y_train)
Predict = DTC_model.score(x_test,y_test)
Predict_list.append(Predict)
#绘图
plt.title("不同深度精度对比图")
plt.xlabel("深度")
plt.ylabel("精度")
plt.rcParams['font.sans-serif']=['SimHei']
plt.rcParams['axes.unicode_minus'] = False
plt.bar(Max_Depth, Predict_list,color=['r','b','y','c','pink'],width=2,tick_label=Max_Depth)
for i in range(len(Predict_list)):
plt.text(Max_Depth[i],Predict_list[i], format(Predict_list[i], '.2f'))
plt.show()
"======================================================================================"
#决策树在训练数据上的分类效果图
x_min, x_max = x_train[:,0].min()-1, x_train[:,0].max()+1#求第1特征取值最小与最大值
y_min, y_max = x_train[:,1].min()-1, x_train[:,1].max()+1#求第2特征取值最小与最大值
t1 = np.linspace(x_min,x_max,50)
t2 = np.linspace(y_min,y_max,50)
#以深度为2的模型来测试
clf = DecisionTreeClassifier(criterion='entropy',max_depth=2)
clf.fit(x_train, y_train)
xx,yy=np.meshgrid(t1,t2)#生成网格
grid_text = np.stack((xx.flat,yy.flat),axis=1)
y_predict=clf.predict(grid_text)
#设置颜色
cm_pt = mpl.colors.ListedColormap(['k','y','w'])
cm_bg = mpl.colors.ListedColormap(['w','pink','c'])
plt.xlim(x_min,x_max)
plt.ylim(y_min,y_max)
plt.pcolormesh(xx,yy,y_predict.reshape(xx.shape),cmap=cm_bg)#绘制网格背景
plt.scatter(x_train[:,0],x_train[:,1],cmap=cm_pt,c=y_train,marker='o')
plt.xlabel('x_train[0]')
plt.ylabel('x_train[1]')
plt.grid(True)
plt.show()