#KNN
k-NN算法采用测量不同特征值之间的距离方法进行分类
优点:简单直观、精度高、对异常值不敏感
缺点: 计算复杂、空间复杂度高
使用python进行数据分类的步骤:
1、使用python导入数据
2、从文本文件中解析数据
3、测试分类器
分类器代码:
def classify0(inX, dataSet, labels, k): #分类器代码
dataSetSize = dataSet.shape[0] #numpy函数shape[0]返回dataSet的行数
diffMat = tile(inX, (dataSetSize,1)) - dataSet #在列向量方向重复inX共一次(横向),行向量方向重复inX共dataSetSize次(纵向)
sqDiffMat = diffMat**2 #二维特征相减后平方
sqDistances = sqDiffMat.sum(axis=1) #sum()元素相加,sum(0)列相加,sum(1)行相加
distances = sqDistances**0.5 #开平方
sortedDistIndicies = distances.argsort() #返回distances中元素从小到大的索引值classCount={} #记录类别此数的字典
for i in range(k): #排序循环
VoteIlabel = labels[sortedDistIndicies[i]]
classCount[voteIlabel] = classCount.get(voteIlabel,0) + 1
sortedClassCount = sorted(classCount.iteritems(),
key=operator.itemgetter(1), reverse=Ture)
return sortedClassCount[0][0] #返回次数最多的类别
距离度量方式:
1、欧拉距离
2、曼哈顿距离
3、切比夫距离
分类器使用详细代码:
kNN.pyfrom numpy import * #模块导入(科学计算包NumPy)
import operator #模块导入(运算符)
def createDataSet():
group = array([[1.0,1.1],[1.0,1.0],[0,0],[0,0.1]])
labels=[‘A’, ‘A’, ‘B’, ‘B’]
return group, labels
def file2matrix(filename): #读取文件数据fr = open(filename)
arrayOLines = fr.readlines()
numberOfLines = len(arrayOLines)
returnMat = zeros((numberOfLines,3))
classLabelVector = []
Index = 0
for line in arrayOLines
line = line.strip() #截取回车字符
listFromLine = line.split(‘\t’) #分割数据为元素列表
returnMat[index,:]=listFromLine[0:3]
classLabelVector.append(int(listFromLine[-1]))
index += 1
return returnMat,classLabelVector
def autoNorm(dataSet): #归一化特征值(平衡各类数据权重)
minVals = dataSet.min(0)
maxVals = dataSet.max(0)
ranges = maxVals - minVals
normDataSet = zeros(shape(dataSet))
m = dataSet.shape[0]
normDataSet = dataSet - tile(minVals, (m,1))
normDataSet = nermDataSet/tile(ranges,(m,1))r
return normDataSet, ranges, minVals
def datingClassTest(): #分类器测试代码,计算错误率
hoRatio = 0.10
datingDataMat,datingLables = file2matrix(‘datingTestSet.txt’)
normMat,ranges, minVals = autoNorm(datingDataMat)
m = normMat.shape[0]
numTestVecs = int(m*hoRatio)
errorCount = 0.0
for i in range(numTestVecs):
classifierResult=classify0(normMat[i,:],mornMat[numTestVecs:m,:],datingLabels[numTestVecs:m],3)
Print “the classifier came back with : %d, the real answer is : %d” % (classifierResult, datingLabels[i])
If(classifierResult != datingLabels[i]): errorCount += 1.0
Print “the total errror rate is : %f” % (errorCount/float(numTestVecs))