目录
一、算法理论知识
朴素贝叶斯(Naive Bayes)
朴素贝叶斯模型是一组非常简单快速的分类算法,通常适用于维度非常高的数据集。因为运行速度快,而且可调参数少,因此非常适合为分类问题提供快速粗糙的基本方案。
朴素贝叶斯分类器建立在贝叶斯分类方法的基础上,其数学基础是贝叶斯定理(Bayes’s theorem)——一个描述统计量条件概率关系的公式。 在贝叶斯分类中,我们希望确定一个具有某些特征的样本属于某类标签L的概率,通常记为 P (L |特征 )
二、算法代码
################ 加微信获取源码
三、仿真实验结果及其分析
预测标签和预测结果有很多不吻合的,结果只有0.5666666,预测结果十分不理想。
------------分析原因---------------
鸢尾花数据集一共只有150个,训练集相对来说是远远不够的,数据太少导致拟合效果不足(欠拟合)。下面增加按倍数增加鸢尾花数据集,使得数据集按相同特征产生足够大的数据集,我这里从1倍逐步扩大到10倍(也就是从150个数据集逐步增加到1500个数据集),然后画出它的准确率曲线。对源代码增加一个For 循环即可:
准确率变化图像如上右,发现随着数据集增加,模型预测准确率不断增加,当数据集增加到6倍左右,准确率变为百分之百。对于机器学习中的模型,只有在大量样本的训练条件下才能作出很好的预测。
C:\Programs\Python\Python310\python.exe E:/works/PythonCharmPre/ClassificationAnalysis/naivebayes.py
-- 0 --X--
[[5.1 3.5 1.4 0.2]
[4.9 3. 1.4 0.2]
[4.7 3.2 1.3 0.2]
[4.6 3.1 1.5 0.2]
[5. 3.6 1.4 0.2]
[5.4 3.9 1.7 0.4]
[4.6 3.4 1.4 0.3]
[5. 3.4 1.5 0.2]
[4.4 2.9 1.4 0.2]
[4.9 3.1 1.5 0.1]
[5.4 3.7 1.5 0.2]
[4.8 3.4 1.6 0.2]
[4.8 3. 1.4 0.1]
[4.3 3. 1.1 0.1]
[5.8 4. 1.2 0.2]
[5.7 4.4 1.5 0.4]
[5.4 3.9 1.3 0.4]
[5.1 3.5 1.4 0.3]
[5.7 3.8 1.7 0.3]
[5.1 3.8 1.5 0.3]
[5.4 3.4 1.7 0.2]
[5.1 3.7 1.5 0.4]
[4.6 3.6 1. 0.2]
[5.1 3.3 1.7 0.5]
[4.8 3.4 1.9 0.2]
[5. 3. 1.6 0.2]
[5. 3.4 1.6 0.4]
[5.2 3.5 1.5 0.2]
[5.2 3.4 1.4 0.2]
[4.7 3.2 1.6 0.2]
[4.8 3.1 1.6 0.2]
[5.4 3.4 1.5 0.4]
[5.2 4.1 1.5 0.1]
[5.5 4.2 1.4 0.2]
[4.9 3.1 1.5 0.2]
[5. 3.2 1.2 0.2]
[5.5 3.5 1.3 0.2]
[4.9 3.6 1.4 0.1]
[4.4 3. 1.3 0.2]
[5.1 3.4 1.5 0.2]
[5. 3.5 1.3 0.3]
[4.5 2.3 1.3 0.3]
[4.4 3.2 1.3 0.2]
[5. 3.5 1.6 0.6]
[5.1 3.8 1.9 0.4]
[4.8 3. 1.4 0.3]
[5.1 3.8 1.6 0.2]
[4.6 3.2 1.4 0.2]
[5.3 3.7 1.5 0.2]
[5. 3.3 1.4 0.2]
[7. 3.2 4.7 1.4]
[6.4 3.2 4.5 1.5]
[6.9 3.1 4.9 1.5]
[5.5 2.3 4. 1.3]
[6.5 2.8 4.6 1.5]
[5.7 2.8 4.5 1.3]
[6.3 3.3 4.7 1.6]
[4.9 2.4 3.3 1. ]
[6.6 2.9 4.6 1.3]
[5.2 2.7 3.9 1.4]
[5. 2. 3.5 1. ]
[5.9 3. 4.2 1.5]
[6. 2.2 4. 1. ]
[6.1 2.9 4.7 1.4]
[5.6 2.9 3.6 1.3]
[6.7 3.1 4.4 1.4]
[5.6 3. 4.5 1.5]
[5.8 2.7 4.1 1. ]
[6.2 2.2 4.5 1.5]
[5.6 2.5 3.9 1.1]
[5.9 3.2 4.8 1.8]
[6.1 2.8 4. 1.3]
[6.3 2.5 4.9 1.5]
[6.1 2.8 4.7 1.2]
[6.4 2.9 4.3 1.3]
[6.6 3. 4.4 1.4]
[6.8 2.8 4.8 1.4]
[6.7 3. 5. 1.7]
[6. 2.9 4.5 1.5]
[5.7 2.6 3.5 1. ]
[5.5 2.4 3.8 1.1]
[5.5 2.4 3.7 1. ]
[5.8 2.7 3.9 1.2]
[6. 2.7 5.1 1.6]
[5.4 3. 4.5 1.5]
[6. 3.4 4.5 1.6]
[6.7 3.1 4.7 1.5]
[6.3 2.3 4.4 1.3]
[5.6 3. 4.1 1.3]
[5.5 2.5 4. 1.3]
[5.5 2.6 4.4 1.2]
[6.1 3. 4.6 1.4]
[5.8 2.6 4. 1.2]
[5. 2.3 3.3 1. ]
[5.6 2.7 4.2 1.3]
[5.7 3. 4.2 1.2]
[5.7 2.9 4.2 1.3]
[6.2 2.9 4.3 1.3]
[5.1 2.5 3. 1.1]
[5.7 2.8 4.1 1.3]
[6.3 3.3 6. 2.5]
[5.8 2.7 5.1 1.9]
[7.1 3. 5.9 2.1]
[6.3 2.9 5.6 1.8]
[6.5 3. 5.8 2.2]
[7.6 3. 6.6 2.1]
[4.9 2.5 4.5 1.7]
[7.3 2.9 6.3 1.8]
[6.7 2.5 5.8 1.8]
[7.2 3.6 6.1 2.5]
[6.5 3.2 5.1 2. ]
[6.4 2.7 5.3 1.9]
[6.8 3. 5.5 2.1]
[5.7 2.5 5. 2. ]
[5.8 2.8 5.1 2.4]
[6.4 3.2 5.3 2.3]
[6.5 3. 5.5 1.8]
[7.7 3.8 6.7 2.2]
[7.7 2.6 6.9 2.3]
[6. 2.2 5. 1.5]
[6.9 3.2 5.7 2.3]
[5.6 2.8 4.9 2. ]
[7.7 2.8 6.7 2. ]
[6.3 2.7 4.9 1.8]
[6.7 3.3 5.7 2.1]
[7.2 3.2 6. 1.8]
[6.2 2.8 4.8 1.8]
[6.1 3. 4.9 1.8]
[6.4 2.8 5.6 2.1]
[7.2 3. 5.8 1.6]
[7.4 2.8 6.1 1.9]
[7.9 3.8 6.4 2. ]
[6.4 2.8 5.6 2.2]
[6.3 2.8 5.1 1.5]
[6.1 2.6 5.6 1.4]
[7.7 3. 6.1 2.3]
[6.3 3.4 5.6 2.4]
[6.4 3.1 5.5 1.8]
[6. 3. 4.8 1.8]
[6.9 3.1 5.4 2.1]
[6.7 3.1 5.6 2.4]
[6.9 3.1 5.1 2.3]
[5.8 2.7 5.1 1.9]
[6.8 3.2 5.9 2.3]
[6.7 3.3 5.7 2.5]
[6.7 3. 5.2 2.3]
[6.3 2.5 5. 1.9]
[6.5 3. 5.2 2. ]
[6.2 3.4 5.4 2.3]
[5.9 3. 5.1 1.8]]
-- 0 --Y--
[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2
2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
2 2]
--预测集标签--
[2 1 0 2 0 2 0 1 1 1 2 1 1 1 1 0 1 1 0 0 2 1 0 0 2 0 0 1 1 0]
--预测结果--
[2 2 0 2 0 2 0 2 2 2 2 2 2 2 2 0 2 2 0 0 2 2 0 0 2 0 0 2 2 0]
-- 1 --X--
[[10.2 7. 2.8 0.4]
[ 9.8 6. 2.8 0.4]
[ 9.4 6.4 2.6 0.4]
[ 9.2 6.2 3. 0.4]
[10. 7.2 2.8 0.4]
[10.8 7.8 3.4 0.8]
[ 9.2 6.8 2.8 0.6]
[10. 6.8 3. 0.4]
[ 8.8 5.8 2.8 0.4]
[ 9.8 6.2 3. 0.2]
[10.8 7.4 3. 0.4]
[ 9.6 6.8 3.2 0.4]
[ 9.6 6. 2.8 0.2]
[ 8.6 6. 2.2 0.2]
[11.6 8. 2.4 0.4]
[11.4 8.8 3. 0.8]
[10.8 7.8 2.6 0.8]
[10.2 7. 2.8 0.6]
[11.4 7.6 3.4 0.6]
[10.2 7.6 3. 0.6]
[10.8 6.8 3.4 0.4]
[10.2 7.4 3. 0.8]
[ 9.2 7.2 2. 0.4]
[10.2 6.6 3.4 1. ]
[ 9.6 6.8 3.8 0.4]
[10. 6. 3.2 0.4]
[10. 6.8 3.2 0.8]
[10.4 7. 3. 0.4]
[10.4 6.8 2.8 0.4]
[ 9.4 6.4 3.2 0.4]
[ 9.6 6.2 3.2 0.4]
[10.8 6.8 3. 0.8]
[10.4 8.2 3. 0.2]
[11. 8.4 2.8 0.4]
[ 9.8 6.2 3. 0.4]
[10. 6.4 2.4 0.4]
[11. 7. 2.6 0.4]
[ 9.8 7.2 2.8 0.2]
[ 8.8 6. 2.6 0.4]
[10.2 6.8 3. 0.4]
[10. 7. 2.6 0.6]
[ 9. 4.6 2.6 0.6]
[ 8.8 6.4 2.6 0.4]
[10. 7. 3.2 1.2]
[10.2 7.6 3.8 0.8]
[ 9.6 6. 2.8 0.6]
[10.2 7.6 3.2 0.4]
[ 9.2 6.4 2.8 0.4]
[10.6 7.4 3. 0.4]
[10. 6.6 2.8 0.4]
[14. 6.4 9.4 2.8]
[12.8 6.4 9. 3. ]
[13.8 6.2 9.8 3. ]
[11. 4.6 8. 2.6]
[13. 5.6 9.2 3. ]
[11.4 5.6 9. 2.6]
[12.6 6.6 9.4 3.2]
[ 9.8 4.8 6.6 2. ]
[13.2 5.8 9.2 2.6]
[10.4 5.4 7.8 2.8]
[10. 4. 7. 2. ]
[11.8 6. 8.4 3. ]
[12. 4.4 8. 2. ]
[12.2 5.8 9.4 2.8]
[11.2 5.8 7.2 2.6]
[13.4 6.2 8.8 2.8]
[11.2 6. 9. 3. ]
[11.6 5.4 8.2 2. ]
[12.4 4.4 9. 3. ]
[11.2 5. 7.8 2.2]
[11.8 6.4 9.6 3.6]
[12.2 5.6 8. 2.6]
[12.6 5. 9.8 3. ]
[12.2 5.6 9.4 2.4]
[12.8 5.8 8.6 2.6]
[13.2 6. 8.8 2.8]
[13.6 5.6 9.6 2.8]
[13.4 6. 10. 3.4]
[12. 5.8 9. 3. ]
[11.4 5.2 7. 2. ]
[11. 4.8 7.6 2.2]
[11. 4.8 7.4 2. ]
[11.6 5.4 7.8 2.4]
[12. 5.4 10.2 3.2]
[10.8 6. 9. 3. ]
[12. 6.8 9. 3.2]
[13.4 6.2 9.4 3. ]
[12.6 4.6 8.8 2.6]
[11.2 6. 8.2 2.6]
[11. 5. 8. 2.6]
[11. 5.2 8.8 2.4]
[12.2 6.