K-Nearest Neighbors algorithm

原创 2015年07月07日 19:03:41
from numpy import *
import operator
import matplotlib
import matplotlib.pyplot as plt
from os import listdir

def createDataSet():
    group=array([[1.0,1.1],[1.0,1.0],[0,0],[0,0.1]])
    labels=['A','A','B','B']
    return group, labels

group, labels = createDataSet()
  
def classify0(inX, dataSet, labels, k):
    dataSetSize = dataSet.shape[0]
    diffMat = tile(inX, (dataSetSize, 1))-dataSet
    sqDiffMat = diffMat ** 2
    sqlDistances = sqDiffMat.sum(axis=1)
    distances = sqlDistances**0.5
    sortedDistIndicies = distances.argsort()
    classCount = {}
    for i in range(k):
        voteLabel = labels[sortedDistIndicies[i]]
        classCount[voteLabel] = classCount.get(voteLabel, 0)+1
    
    sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True)
   # print sortedClassCount
    return sortedClassCount[0][0]

def file2matrix(filename):
    fr = open(filename)
    numberOfLines = len(fr.readlines())
    returnMat = zeros((numberOfLines, 3))
    classLabelVector = []
    fr = open(filename)
    index = 0
    for line in fr.readlines():
        line = line.strip()
        listFromLine = line.split('\t')
        returnMat[index,:]=listFromLine[0:3]
        classLabelVector.append(listFromLine[-1])
        index += 1
    return returnMat, classLabelVector
    
datingDataMat, datingLabels = file2matrix('datingTestSet.txt')

def autoNorm(dataSet):
    minVals = dataSet.min(0)
    maxVals = dataSet.max(0)
    ranges = maxVals-minVals
    normDataSet = zeros(shape(dataSet))
    m = dataSet.shape[0]
    normDataSet = dataSet - tile(minVals, (m,1))
    normDataSet = normDataSet/tile(ranges, (m,1))
    return normDataSet, ranges, minVals
    

def datingClassTest():
    hoRatio = 0.10;
    datingDataMat, datingLabels = file2matrix('datingTestSet.txt')
    normMat, ranges, minVals = autoNorm(datingDataMat)
    m = normMat.shape[0]
    numTestVecs = int(m*hoRatio)
    errCount = 0.0
    
    for i in range(numTestVecs):
        classifierResult = classify0(normMat[i,:], normMat[numTestVecs:m,:], datingLabels[numTestVecs:m], 4)
        print 'the classifier came back with: %s, the real answer is: %s' % (classifierResult, datingLabels[i])
        if(classifierResult != datingLabels[i]):
            errCount += 1.0
    print "the total error rate is: %f" % (errCount/float(numTestVecs))
    

def classifyPerson():
    resultList = ['not at all', 'in small doses', 'in large doses']
    percentTats = float(raw_input("percent of time spent playing video games?"))
    ffMiles = float(raw_input("frequent filter miles earned per year?"))
    iceCream = float(raw_input("liters of ice cream consumed per year?"))
    
    datingDataMat, datingLabels = file2matrix('datingTestSet.txt')
    normMat, ranges, minVals = autoNorm(datingDataMat)
    inArr = array([percentTats, ffMiles, iceCream])
    classifierResult = classify0((inArr-minVals)/ranges, normMat, datingLabels, 3)
    
    print "You will probably like this person: ", resultList[classifierResult-1]
    
def img2vector(filename):
    returnVect = zeros((1,1024))
    fr = open(filename)
    for i in range(32):
        lineStr = fr.readline()
        for j in range(32):
            returnVect[0,32*i+j]=int(lineStr[j])
    
    return returnVect
    
def handwritingClassTest():
    hwLabels = []
    trainingFileList = listdir('trainingDigits')
    m=len(trainingFileList)
    #print "m=",m
    trainingMat = zeros((m,1024))
    for i in range(m):
        fileNameStr = trainingFileList[i]
        fileStr = fileNameStr.split('.')[0]
        classNumStr = int(fileStr.split('_')[0])
        hwLabels.append(classNumStr)
        trainingMat[i,:]=img2vector('trainingDigits/%s' % fileNameStr)
        
    testFileList = listdir('testDigits')
    errCount = 0.0
    mTest = len(testFileList)
    
    for i in range(mTest):
        fileNameStr = testFileList[i]
        fileStr = fileNameStr.split('.')[0]
        classNumStr = fileStr.split('_')[0]
        vectorUnderTest = img2vector('testDigits/%s' % fileNameStr)
        classifierResult = classify0(vectorUnderTest, trainingMat, hwLabels, 3)
        print "the classifier came back with: %s, the real answer is: %s" % (classifierResult, classNumStr)
        #print 'classifierResult = ', classifierResult,"classNumStr = ", classNumStr
        if (int(classifierResult) != int(classNumStr)):
            errCount += 1.0
    
    print "\n the total number of error is: %d" % (errCount)
    print "\n the total error rate is: %f" % (errCount/float(mTest))

K-Nearest Neighbor algorithm K最邻近结点算法

K-Nearest Neighbor algorithm K最邻近结点算法Introduction定义  在模式识别领域中,最近邻居法(k-Nearest Neighbors algorithm,KN...
  • LiJiancheng0614
  • LiJiancheng0614
  • 2015年09月02日 16:43
  • 1926

一、K -近邻算法(KNN:k-Nearest Neighbors)

一、K -近邻算法(KNN:k-Nearest Neighbors) 算法基本思想:物以类聚,人以群分 存在一个样本数据集合,也称作训练样本集,并且样本集中每个数据都存在标签。输入没有标签的新数据...
  • Maybe2030
  • Maybe2030
  • 2015年03月12日 11:34
  • 1658

K-nearest neighbors(KNN) learning algorithm

1. Call function from sklearn.neighbors import KNeighborsClassifier knn = KNeighborsClassifier(n_...
  • u013096864
  • u013096864
  • 2017年04月09日 16:15
  • 116

kNN算法 k-Nearest Neighbors algorithm

对照着书中的代码,自己有一定的python基础,但是会的比较简单,参考网上的python教程,推荐一下 廖雪峰先生的网站,http://www.liaoxuefeng.com/ #我用的是win1...
  • Bro3Rd
  • Bro3Rd
  • 2015年11月16日 22:59
  • 369

K近邻k-Nearest Neighbor(KNN)算法的理解

一、KNN算法概述 KNN作为一种有监督分类算法,是最简单的机器学习算法之一,顾名思义,其算法主体思想就是根据距离相近的邻居类别,来判定自己的所属类别。算法的前提是需要有一个已被标记类别的训练数据集,...
  • helloworld6746
  • helloworld6746
  • 2016年03月07日 10:25
  • 6067

k-Nearest Neighbors

# -*- coding: UTF-8 -*- //显示中文,中文编码 ''' Created on Sep 16, 2010 kNN: k Nearest Neighbors Input: ...
  • qq_31820885
  • qq_31820885
  • 2017年06月12日 13:51
  • 127

k-Nearest Neighbors(KNN)算法—程序和总结篇

下面是python3.4代码,我修改过。是根据《machine learning in action》中第二章的算法改变的。 from numpy import * import operator ...
  • qianmeiling2848
  • qianmeiling2848
  • 2016年04月13日 18:56
  • 1645

Density Peak改进(PCA/KNN)

参考文献:Study on Density Peaks Based on K-Nearest Neighbors and Principal Component Analysis (KBS2016 C...
  • LilyXFan
  • LilyXFan
  • 2017年04月24日 16:39
  • 280

Opencv2.4.9源码分析——K-Nearest Neighbors

一、原理   K近邻算法(KNN,K-NearestNeighbors)是一种非常简单的机器学习方法,它既可以处理分类问题,也可以处理回归问题,而且它的执行效果非常好。 KNN是一种懒惰学习算法(la...
  • zhaocj
  • zhaocj
  • 2016年02月29日 11:21
  • 3253

K-Nearest Neighbors KNN

KNN is a kind of voting algorithm that calculating distance with every instance then choose the K ne...
  • c602273091
  • c602273091
  • 2016年02月13日 20:59
  • 1365
内容举报
返回顶部
收藏助手
不良信息举报
您举报文章:K-Nearest Neighbors algorithm
举报原因:
原因补充:

(最多只允许输入30个字)