K-Nearest Neighbors algorithm

原创 2015年07月07日 19:03:41
from numpy import *
import operator
import matplotlib
import matplotlib.pyplot as plt
from os import listdir

def createDataSet():
    group=array([[1.0,1.1],[1.0,1.0],[0,0],[0,0.1]])
    labels=['A','A','B','B']
    return group, labels

group, labels = createDataSet()
  
def classify0(inX, dataSet, labels, k):
    dataSetSize = dataSet.shape[0]
    diffMat = tile(inX, (dataSetSize, 1))-dataSet
    sqDiffMat = diffMat ** 2
    sqlDistances = sqDiffMat.sum(axis=1)
    distances = sqlDistances**0.5
    sortedDistIndicies = distances.argsort()
    classCount = {}
    for i in range(k):
        voteLabel = labels[sortedDistIndicies[i]]
        classCount[voteLabel] = classCount.get(voteLabel, 0)+1
    
    sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True)
   # print sortedClassCount
    return sortedClassCount[0][0]

def file2matrix(filename):
    fr = open(filename)
    numberOfLines = len(fr.readlines())
    returnMat = zeros((numberOfLines, 3))
    classLabelVector = []
    fr = open(filename)
    index = 0
    for line in fr.readlines():
        line = line.strip()
        listFromLine = line.split('\t')
        returnMat[index,:]=listFromLine[0:3]
        classLabelVector.append(listFromLine[-1])
        index += 1
    return returnMat, classLabelVector
    
datingDataMat, datingLabels = file2matrix('datingTestSet.txt')

def autoNorm(dataSet):
    minVals = dataSet.min(0)
    maxVals = dataSet.max(0)
    ranges = maxVals-minVals
    normDataSet = zeros(shape(dataSet))
    m = dataSet.shape[0]
    normDataSet = dataSet - tile(minVals, (m,1))
    normDataSet = normDataSet/tile(ranges, (m,1))
    return normDataSet, ranges, minVals
    

def datingClassTest():
    hoRatio = 0.10;
    datingDataMat, datingLabels = file2matrix('datingTestSet.txt')
    normMat, ranges, minVals = autoNorm(datingDataMat)
    m = normMat.shape[0]
    numTestVecs = int(m*hoRatio)
    errCount = 0.0
    
    for i in range(numTestVecs):
        classifierResult = classify0(normMat[i,:], normMat[numTestVecs:m,:], datingLabels[numTestVecs:m], 4)
        print 'the classifier came back with: %s, the real answer is: %s' % (classifierResult, datingLabels[i])
        if(classifierResult != datingLabels[i]):
            errCount += 1.0
    print "the total error rate is: %f" % (errCount/float(numTestVecs))
    

def classifyPerson():
    resultList = ['not at all', 'in small doses', 'in large doses']
    percentTats = float(raw_input("percent of time spent playing video games?"))
    ffMiles = float(raw_input("frequent filter miles earned per year?"))
    iceCream = float(raw_input("liters of ice cream consumed per year?"))
    
    datingDataMat, datingLabels = file2matrix('datingTestSet.txt')
    normMat, ranges, minVals = autoNorm(datingDataMat)
    inArr = array([percentTats, ffMiles, iceCream])
    classifierResult = classify0((inArr-minVals)/ranges, normMat, datingLabels, 3)
    
    print "You will probably like this person: ", resultList[classifierResult-1]
    
def img2vector(filename):
    returnVect = zeros((1,1024))
    fr = open(filename)
    for i in range(32):
        lineStr = fr.readline()
        for j in range(32):
            returnVect[0,32*i+j]=int(lineStr[j])
    
    return returnVect
    
def handwritingClassTest():
    hwLabels = []
    trainingFileList = listdir('trainingDigits')
    m=len(trainingFileList)
    #print "m=",m
    trainingMat = zeros((m,1024))
    for i in range(m):
        fileNameStr = trainingFileList[i]
        fileStr = fileNameStr.split('.')[0]
        classNumStr = int(fileStr.split('_')[0])
        hwLabels.append(classNumStr)
        trainingMat[i,:]=img2vector('trainingDigits/%s' % fileNameStr)
        
    testFileList = listdir('testDigits')
    errCount = 0.0
    mTest = len(testFileList)
    
    for i in range(mTest):
        fileNameStr = testFileList[i]
        fileStr = fileNameStr.split('.')[0]
        classNumStr = fileStr.split('_')[0]
        vectorUnderTest = img2vector('testDigits/%s' % fileNameStr)
        classifierResult = classify0(vectorUnderTest, trainingMat, hwLabels, 3)
        print "the classifier came back with: %s, the real answer is: %s" % (classifierResult, classNumStr)
        #print 'classifierResult = ', classifierResult,"classNumStr = ", classNumStr
        if (int(classifierResult) != int(classNumStr)):
            errCount += 1.0
    
    print "\n the total number of error is: %d" % (errCount)
    print "\n the total error rate is: %f" % (errCount/float(mTest))

K-NEAREST NEIGHBORS SEARCH

  • 2011年04月27日 22:53
  • 2KB
  • 下载

一、K -近邻算法(KNN:k-Nearest Neighbors)

一、K -近邻算法(KNN:k-Nearest Neighbors) 算法基本思想:物以类聚,人以群分 存在一个样本数据集合,也称作训练样本集,并且样本集中每个数据都存在标签。输入没有标签的新数据...

[机器学习系列] k-近邻算法(K–nearest neighbors)

C++与机器学习算法 尝试用最简单的语言描述机器学习算法。

K临近算法k-Nearest Neighbor algorithm

邻近算法   KNN算法的决策过程   k-Nearest Neighbor algorithm    右图中,绿色圆要被决定赋予哪个类,是红色三角形还是蓝色四方形?如果K=3...

k-nearest-neighbors-from-global-to-local

  • 2017年02月23日 19:53
  • 531KB
  • 下载

OpenCV Maching Learning 之K-Nearest Neighbors

参考: http://docs.opencv.org/2.4/modules/ml/doc/k_nearest_neighbors.html               http://www.cnb...

ML:Scikit-Learn 学习笔记(1) --- Nearest Neighbors 最近邻 综述

Scikit-Learn 学习笔记(1) — Nearest Neighbors 最近邻 综述1 前言最近在做机器学习的作业,要用到Scilit-Learn这个东西,由于我这个人功利性比较明显,让我看...
  • MebiuW
  • MebiuW
  • 2016年04月03日 18:42
  • 2111

Unsupervised Nearest Neighbors Clustering With Application to Hyperspectral Images

A dynamic niching clustering algorithm based on individual-connectedness and its application to colo...

machine learning in action 之二 —— k-Nearest Neighbors

k-近邻分类器构建
内容举报
返回顶部
收藏助手
不良信息举报
您举报文章:K-Nearest Neighbors algorithm
举报原因:
原因补充:

(最多只允许输入30个字)