一、说明
我是在jupyter完成的,然后导出成markdown格式,ipynb文件导出为markdown的命令如下:
jupyter nbconvert --to markdown xxx.ipynb
二、题目
三、实战
3.1 kNN.py
'''
Created on Sep 16, 2010
kNN: k Nearest Neighbors
Input: inX: vector to compare to existing dataset (1xN)
dataSet: size m data set of known vectors (NxM)
labels: data set labels (1xM vector)
k: number of neighbors to use for comparison (should be an odd number)
Output: the most popular class label
@author: pbharrin
'''
from numpy import *
import operator
from os import listdir
# 分类器
def classify0(inX, dataSet, labels, k):
'''
:param inX:
:param dataSet: 数据集合 矩阵
:param labels: 类别名
:param k: K值 int
:return:
'''
dataSetSize = dataSet.shape[0]
diffMat = tile(inX, (dataSetSize, 1)) - dataSet
sqDiffMat = diffMat ** 2
sqDistances = sqDiffMat.sum(axis=1)
distances = sqDistances ** 0.5
sortedDistIndicies = distances.argsort()
classCount = {}
for i in range(k):
voteIlabel = labels[sortedDistIndicies[i]]
classCount[voteIlabel] = classCount.get(voteIlabel, 0) + 1
sortedClassCount = sorted(classCount.items(), key=operator.itemgetter(1), reverse=True)
return sortedClassCount[0][0]
# 建立数据集
def createDataSet():
group = array([[1.0, 1.1], [1.0, 1.0], [0, 0], [0, 0.1]])
labels = ['A', 'A', 'B', 'B']
return group, labels
# 文件 to 矩阵
def file2matrix(filename):
fr = open(filename)
numberOfLines = len(fr.readlines()) # get the number of lines in the file
returnMat = zeros((numberOfLines, 3)) # prepare matrix to return
classLabelVector = [] # prepare labels return
fr = open(filename)
index = 0
for line in fr.readlines():
line = line.strip()
listFromLine = line.split('\t')
returnMat[index, :] = listFromLine[0:3]
classLabelVector.append(int(listFromLine[-1]))
index += 1
return returnMat, classLabelVector
# 规化数据
def autoNorm(dataSet):
minVals = dataSet.min(0)
maxVals = dataSet.max(0)
ranges = maxVals - minVals
normDataSet = zeros(shape(dataSet))
m = dataSet.shape[0]
normDataSet = dataSet - tile(minVals, (m, 1))
normDataSet = normDataSet / tile(ranges, (m, 1)) # element wise divide
return normDataSet, ranges, minVals
# 测试算法
def datingClassTest():
hoRatio = 0.50 # hold out 10%
datingDataMat, datingLabels = file2matrix('datingTestSet2.txt') # load data setfrom file
normMat, ranges, minVals = autoNorm(datingDataMat)
m = normMat.shape[0]
numTestVecs = int(m * hoRatio)
errorCount = 0.0
for i in range(numTestVecs):
classifierResult = classify0(normMat[i, :], normMat[numTestVecs:m, :], datingLabels[numTestVecs:m], 3)
print("the classifier came back with: %d, the real answer is: %d" % (classifierResult, datingLabels[i]))
if (classifierResult != datingLabels[i]):
errorCount += 1.0
print("the total error rate is: %f" % (errorCount / float(numTestVecs)))
print(errorCount)
# 判断新样本的类型
def classifyPerson():
resultList = ['not at all', 'in small doses', 'in large doses']
percentTats = float(input('percentage of time spent playing video games?'))
ffMiles = float(input("frequent fliter miles earned per year?"))
icecream = float(input("liters of ice cream consumed per year?"))
datingDataMat, datingLabels = file2matrix('datingTestSet2.txt')
normMat, ranges, minVals = autoNorm(datingDataMat)
inArr = array([ffMiles, percentTats, icecream])
classifierResult = classify0((inArr - minVals), normMat, datingLabels, 3)
print('you will probably like this person:')
print(resultList[classifierResult - 1])
# 图片 to 向量
def img2vector(filename):
returnVect = zeros((1, 1024))
fr = open(filename)
for i in range(32):
lineStr = fr.readline()
for j in range(32):
returnVect[0, 32 * i + j] = int(lineStr[j])
return returnVect
# 手写
def handwritingClassTest():
hwLabels = []
trainingFileList = listdir('trainingDigits') # load the training set
m = len(trainingFileList)
trainingMat = zeros((m, 1024))
for i in range(m):
fileNameStr = trainingFileList[i]
fileStr = fileNameStr.split('.')[0] # take off .txt
classNumStr = int(fileStr.split('_')[0])
hwLabels.append(classNumStr)
trainingMat[i, :] = img2vector('trainingDigits/%s' % fileNameStr)
testFileList = listdir('testDigits') # iterate through the test set
errorCount = 0.0
mTest = len(testFileList)
for i in range(mTest):
fileNameStr = testFileList[i]
fileStr = fileNameStr.split('.')[0] # take off .txt
classNumStr = int(fileStr.split('_')[0])
vectorUnderTest = img2vector('testDigits/%s' % fileNameStr)
classifierResult = classify0(vectorUnderTest, trainingMat, hwLabels, 3)
print("the classifier came back with: %d, the real answer is: %d" % (classifierResult, classNumStr))
if (classifierResult != classNumStr):
errorCount += 1.0
print("\nthe total number of errors is: %d" % errorCount)
print("\nthe total error rate is: %f" % (errorCount / float(mTest)))
if __name__ == '__main__':
handwritingClassTest()
3.2 正式实践
开始先简单练练手
输出散点图如下
四、源代码
4.1 数据部分
40920 8.326976 0.953952 3
14488 7.153469 1.673904 2
26052 1.441871 0.805124 1
75136 13.147394 0.428964 1
38344 1.669788 0.134296 1
72993 10.141740 1.032955 1
35948 6.830792 1.213192 3
42666 13.276369 0.543880 3
67497 8.631577 0.749278 1
35483 12.273169 1.508053 3
50242 3.723498 0.831917 1
63275 8.385879 1.669485 1
5569 4.875435 0.728658 2
51052 4.680098 0.625224 1
77372 15.299570 0.331351 1
43673 1.889461 0.191283 1
61364 7.516754 1.269164 1
69673 14.239195 0.261333 1
15669 0.000000 1.250185 2
28488 10.528555 1.304844 3
6487 3.540265 0.822483 2
37708 2.991551 0.833920 1
22620 5.297865 0.638306 2
28782 6.593803 0.187108 3
19739 2.816760 1.686209 2
36788 12.458258 0.649617 3
5741 0.000000 1.656418 2
28567 9.968648 0.731232 3
6808 1.364838 0.640103 2
41611 0.230453 1.151996 1
36661 11.865402 0.882810 3
43605 0.120460 1.352013 1
15360 8.545204 1.340429 3
63796 5.856649 0.160006 1
10743 9.665618 0.778626 2
70808 9.778763 1.084103 1
72011 4.932976 0.632026 1
5914 2.216246 0.587095 2
14851 14.305636 0.632317 3
33553 12.591889 0.686581 3
44952 3.424649 1.004504 1
17934 0.000000 0.147573 2
27738 8.533823 0.205324 3
29290 9.829528 0.238620 3
42330 11.492186 0.263499 3
36429 3.570968 0.832254 1
39623 1.771228 0.207612 1
32404 3.513921 0.991854 1
27268 4.398172 0.975024 1
5477 4.276823 1.174874 2
14254 5.946014 1.614244 2
68613 13.798970 0.724375 1
41539 10.393591 1.663724 3
7917 3.007577 0.297302 2
21331 1.031938 0.486174 2
8338 4.751212 0.064693 2
5176 3.692269 1.655113 2
18983 10.448091 0.267652 3
68837 10.585786 0.329557 1
13438 1.604501 0.069064 2
48849 3.679497 0.961466 1
12285 3.795146 0.696694 2
7826 2.531885 1.659173 2
5565 9.733340 0.977746 2
10346 6.093067 1.413798 2
1823 7.712960 1.054927 2
9744 11.470364 0.760461 3
16857 2.886529 0.934416 2
39336 10.054373 1.138351 3
65230 9.972470 0.881876 1
2463 2.335785 1.366145 2
27353 11.375155 1.528626 3
16191 0.000000 0.605619 2
12258 4.126787 0.357501 2
42377 6.319522 1.058602 1
25607 8.680527 0.086955 3
77450 14.856391 1.129823 1
58732 2.454285 0.222380 1
46426 7.292202 0.548607 3
32688 8.745137 0.857348 3
64890 8.579001 0.683048 1
8554 2.507302 0.869177 2
28861 11.415476 1.505466 3
42050 4.838540 1.680892 1
32193 10.339507 0.583646 3
64895 6.573742 1.151433 1
2355 6.539397 0.462065 2
0 2.209159 0.723567 2
70406 11.196378 0.836326 1
57399 4.229595 0.128253 1
41732 9.505944 0.005273 3
11429 8.652725 1.348934 3
75270 17.101108 0.490712 1
5459 7.871839 0.717662 2
73520 8.262131 1.361646 1
40279 9.015635 1.658555 3
21540 9.215351 0.806762 3
17694 6.375007 0.033678 2
22329 2.262014 1.022169 1
46570 5.677110 0.709469 1
42403 11.293017 0.207976 3
33654 6.590043 1.353117 1
9171 4.711960 0.194167 2
28122 8.768099 1.108041 3
34095 11.502519 0.545097 3
1774 4.682812 0.578112 2
40131 12.446578 0.300754 3
13994 12.908384 1.657722 3
77064 12.601108 0.974527 1
11210 3.929456 0.025466 2
6122 9.751503 1.182050 3
15341 3.043767 0.888168 2
44373 4.391522 0.807100 1
28454 11.695276 0.679015 3
63771 7.879742 0.154263 1
9217 5.613163 0.933632 2
69076 9.140172 0.851300 1
24489 4.258644 0.206892 1
16871 6.799831 1.221171 2
39776 8.752758 0.484418 3
5901 1.123033 1.180352 2
40987 10.833248 1.585426 3
7479 3.051618 0.026781 2
38768 5.308409 0.030683 3
4933 1.841792 0.028099 2
32311 2.261978 1.605603 1
26501 11.573696 1.061347 3
37433 8.038764 1.083910 3
23503 10.734007 0.103715 3
68607 9.661909 0.350772 1
27742 9.005850 0.548737 3
11303 0.000000 0.539131 2
0 5.757140 1.062373 2
32729 9.164656 1.624565 3
24619 1.318340 1.436243 1
42414 14.075597 0.695934 3
20210 10.107550 1.308398 3
33225 7.960293 1.219760 3
54483 6.317292 0.018209 1
18475 12.664194 0.595653 3
33926 2.906644 0.581657 1
43865 2.388241 0.913938 1
26547 6.024471 0.486215 3
44404 7.226764 1.255329 3
16674 4.183997 1.275290 2
8123 11.850211 1.096981 3
42747 11.661797 1.167935 3
56054 3.574967 0.494666 1
10933 0.000000 0.107475 2
18121 7.937657 0.904799 3
11272 3.365027 1.014085 2
16297 0.000000 0.367491 2
28168 13.860672 1.293270 3
40963 10.306714 1.211594 3
31685 7.228002 0.670670 3
55164 4.508740 1.036192 1
17595 0.366328 0.163652 2
1862 3.299444 0.575152 2
57087 0.573287 0.607915 1
63082 9.183738 0.012280 1
51213 7.842646 1.060636 3
6487 4.750964 0.558240 2
4805 11.438702 1.556334 3
30302 8.243063 1.122768 3
68680 7.949017 0.271865 1
17591 7.875477 0.227085 2
74391 9.569087 0.364856 1
37217 7.750103 0.869094 3
42814 0.000000 1.515293 1
14738 3.396030 0.633977 2
19896 11.916091 0.025294 3
14673 0.460758 0.689586 2
32011 13.087566 0.476002 3
58736 4.589016 1.672600 1
54744 8.397217 1.534103 1
29482 5.562772 1.689388 1
27698 10.905159 0.619091 3
11443 1.311441 1.169887 2
56117 10.647170 0.980141 3
39514 0.000000 0.481918 1
26627 8.503025 0.830861 3
16525 0.436880 1.395314 2
24368 6.127867 1.102179 1
22160 12.112492 0.359680 3
6030 1.264968 1.141582 2
6468 6.067568 1.327047 2
22945 8.010964 1.681648 3
18520 3.791084 0.304072 2
34914 11.773195 1.262621 3
6121 8.339588 1.443357 2
38063 2.563092 1.464013 1
23410 5.954216 0.953782 1
35073 9.288374 0.767318 3
52914 3.976796 1.043109 1
16801 8.585227 1.455708 3
9533 1.271946 0.796506 2
16721 0.000000 0.242778 2
5832 0.000000 0.089749 2
44591 11.521298 0.300860 3
10143 1.139447 0.415373 2
21609 5.699090 1.391892 2
23817 2.449378 1.322560 1
15640 0.000000 1.228380 2
8847 3.168365 0.053993 2
50939 10.428610 1.126257 3
28521 2.943070 1.446816 1
32901 10.441348 0.975283 3
42850 12.478764 1.628726 3
13499 5.856902 0.363883 2
40345 2.476420 0.096075 1
43547 1.826637 0.811457 1
70758 4.324451 0.328235 1
19780 1.376085 1.178359 2
44484 5.342462 0.394527 1
54462 11.835521 0.693301 3
20085 12.423687 1.424264 3
42291 12.161273 0.071131 3
47550 8.148360 1.649194 3
11938 1.531067 1.549756 2
40699 3.200912 0.309679 1
70908 8.862691 0.530506 1
73989 6.370551 0.369350 1
11872 2.468841 0.145060 2
48463 11.054212 0.141508 3
15987 2.037080 0.715243 2
70036 13.364030 0.549972 1
32967 10.249135 0.192735 3
63249 10.464252 1.669767 1
42795 9.424574 0.013725 3
14459 4.458902 0.268444 2
19973 0.000000 0.575976 2
5494 9.686082 1.029808 3
67902 13.649402 1.052618 1
25621 13.181148 0.273014 3
27545 3.877472 0.401600 1
58656 1.413952 0.451380 1
7327 4.248986 1.430249 2
64555 8.779183 0.845947 1
8998 4.156252 0.097109 2
11752 5.580018 0.158401 2
76319 15.040440 1.366898 1
27665 12.793870 1.307323 3
67417 3.254877 0.669546 1
21808 10.725607 0.588588 3
15326 8.256473 0.765891 2
20057 8.033892 1.618562 3
79341 10.702532 0.204792 1
15636 5.062996 1.132555 2
35602 10.772286 0.668721 3
28544 1.892354 0.837028 1
57663 1.019966 0.372320 1
78727 15.546043 0.729742 1
68255 11.638205 0.409125 1
14964 3.427886 0.975616 2
21835 11.246174 1.475586 3
7487 0.000000 0.645045 2
8700 0.000000 1.424017 2
26226 8.242553 0.279069 3
65899 8.700060 0.101807 1
6543 0.812344 0.260334 2
46556 2.448235 1.176829 1
71038 13.230078 0.616147 1
47657 0.236133 0.340840 1
19600 11.155826 0.335131 3
37422 11.029636 0.505769 3
1363 2.901181 1.646633 2
26535 3.924594 1.143120 1
47707 2.524806 1.292848 1
38055 3.527474 1.449158 1
6286 3.384281 0.889268 2
10747 0.000000 1.107592 2
44883 11.898890 0.406441 3
56823 3.529892 1.375844 1
68086 11.442677 0.696919 1
70242 10.308145 0.422722 1
11409 8.540529 0.727373 2
67671 7.156949 1.691682 1
61238 0.720675 0.847574 1
17774 0.229405 1.038603 2
53376 3.399331 0.077501 1
30930 6.157239 0.580133 1
28987 1.239698 0.719989 1
13655 6.036854 0.016548 2
7227 5.258665 0.933722 2
40409 12.393001 1.571281 3
13605 9.627613 0.935842 2
26400 11.130453 0.597610 3
13491 8.842595 0.349768 3
30232 10.690010 1.456595 3
43253 5.714718 1.674780 3
55536 3.052505 1.335804 1
8807 0.000000 0.059025 2
25783 9.945307 1.287952 3
22812 2.719723 1.142148 1
77826 11.154055 1.608486 1
38172 2.687918 0.660836 1
31676 10.037847 0.962245 3
74038 12.404762 1.112080 1
44738 10.237305 0.633422 3
17410 4.745392 0.662520 2
5688 4.639461 1.569431 2
36642 3.149310 0.639669 1
29956 13.406875 1.639194 3
60350 6.068668 0.881241 1
23758 9.477022 0.899002 3
25780 3.897620 0.560201 2
11342 5.463615 1.203677 2
36109 3.369267 1.575043 1
14292 5.234562 0.825954 2
11160 0.000000 0.722170 2
23762 12.979069 0.504068 3
39567 5.376564 0.557476 1
25647 13.527910 1.586732 3
14814 2.196889 0.784587 2
73590 10.691748 0.007509 1
35187 1.659242 0.447066 1
49459 8.369667 0.656697 3
31657 13.157197 0.143248 3
6259 8.199667 0.908508 2
33101 4.441669 0.439381 3
27107 9.846492 0.644523 3
17824 0.019540 0.977949 2
43536 8.253774 0.748700 3
67705 6.038620 1.509646 1
35283 6.091587 1.694641 3
71308 8.986820 1.225165 1
31054 11.508473 1.624296 3
52387 8.807734 0.713922 3
40328 0.000000 0.816676 1
34844 8.889202 1.665414 3
11607 3.178117 0.542752 2
64306 7.013795 0.139909 1
32721 9.605014 0.065254 3
33170 1.230540 1.331674 1
37192 10.412811 0.890803 3
13089 0.000000 0.567161 2
66491 9.699991 0.122011 1
15941 0.000000 0.061191 2
4272 4.455293 0.272135 2
48812 3.020977 1.502803 1
28818 8.099278 0.216317 3
35394 1.157764 1.603217 1
71791 10.105396 0.121067 1
40668 11.230148 0.408603 3
39580 9.070058 0.011379 3
11786 0.566460 0.478837 2
19251 0.000000 0.487300 2
56594 8.956369 1.193484 3
54495 1.523057 0.620528 1
11844 2.749006 0.169855 2
45465 9.235393 0.188350 3
31033 10.555573 0.403927 3
16633 6.956372 1.519308 2
13887 0.636281 1.273984 2
52603 3.574737 0.075163 1
72000 9.032486 1.461809 1
68497 5.958993 0.023012 1
35135 2.435300 1.211744 1
26397 10.539731 1.638248 3
7313 7.646702 0.056513 2
91273 20.919349 0.644571 1
24743 1.424726 0.838447 1
31690 6.748663 0.890223 3
15432 2.289167 0.114881 2
58394 5.548377 0.402238 1
33962 6.057227 0.432666 1
31442 10.828595 0.559955 3
31044 11.318160 0.271094 3
29938 13.265311 0.633903 3
9875 0.000000 1.496715 2
51542 6.517133 0.402519 3
11878 4.934374 1.520028 2
69241 10.151738 0.896433 1
37776 2.425781 1.559467 1
68997 9.778962 1.195498 1
67416 12.219950 0.657677 1
59225 7.394151 0.954434 1
29138 8.518535 0.742546 3
5962 2.798700 0.662632 2
10847 0.637930 0.617373 2
70527 10.750490 0.097415 1
9610 0.625382 0.140969 2
64734 10.027968 0.282787 1
25941 9.817347 0.364197 3
2763 0.646828 1.266069 2
55601 3.347111 0.914294 1
31128 11.816892 0.193798 3
5181 0.000000 1.480198 2
69982 10.945666 0.993219 1
52440 10.244706 0.280539 3
57350 2.579801 1.149172 1
57869 2.630410 0.098869 1
56557 11.746200 1.695517 3
42342 8.104232 1.326277 3
15560 12.409743 0.790295 3
34826 12.167844 1.328086 3
8569 3.198408 0.299287 2
77623 16.055513 0.541052 1
78184 7.138659 0.158481 1
7036 4.831041 0.761419 2
69616 10.082890 1.373611 1
21546 10.066867 0.788470 3
36715 8.129538 0.329913 3
20522 3.012463 1.138108 2
42349 3.720391 0.845974 1
9037 0.773493 1.148256 2
26728 10.962941 1.037324 3
587 0.177621 0.162614 2
48915 3.085853 0.967899 1
9824 8.426781 0.202558 2
4135 1.825927 1.128347 2
9666 2.185155 1.010173 2
59333 7.184595 1.261338 1
36198 0.000000 0.116525 1
34909 8.901752 1.033527 3
47516 2.451497 1.358795 1
55807 3.213631 0.432044 1
14036 3.974739 0.723929 2
42856 9.601306 0.619232 3
64007 8.363897 0.445341 1
59428 6.381484 1.365019 1
13730 0.000000 1.403914 2
41740 9.609836 1.438105 3
63546 9.904741 0.985862 1
30417 7.185807 1.489102 3
69636 5.466703 1.216571 1
64660 0.000000 0.915898 1
14883 4.575443 0.535671 2
7965 3.277076 1.010868 2
68620 10.246623 1.239634 1
8738 2.341735 1.060235 2
7544 3.201046 0.498843 2
6377 6.066013 0.120927 2
36842 8.829379 0.895657 3
81046 15.833048 1.568245 1
67736 13.516711 1.220153 1
32492 0.664284 1.116755 1
39299 6.325139 0.605109 3
77289 8.677499 0.344373 1
33835 8.188005 0.964896 3
71890 9.414263 0.384030 1
32054 9.196547 1.138253 3
38579 10.202968 0.452363 3
55984 2.119439 1.481661 1
72694 13.635078 0.858314 1
42299 0.083443 0.701669 1
26635 9.149096 1.051446 3
8579 1.933803 1.374388 2
37302 14.115544 0.676198 3
22878 8.933736 0.943352 3
4364 2.661254 0.946117 2
4985 0.988432 1.305027 2
37068 2.063741 1.125946 1
41137 2.220590 0.690754 1
67759 6.424849 0.806641 1
11831 1.156153 1.613674 2
34502 3.032720 0.601847 1
4088 3.076828 0.952089 2
15199 0.000000 0.318105 2
17309 7.750480 0.554015 3
42816 10.958135 1.482500 3
43751 10.222018 0.488678 3
58335 2.367988 0.435741 1
75039 7.686054 1.381455 1
42878 11.464879 1.481589 3
42770 11.075735 0.089726 3
8848 3.543989 0.345853 2
31340 8.123889 1.282880 3
41413 4.331769 0.754467 3
12731 0.120865 1.211961 2
22447 6.116109 0.701523 3
33564 7.474534 0.505790 3
48907 8.819454 0.649292 3
8762 6.802144 0.615284 2
46696 12.666325 0.931960 3
36851 8.636180 0.399333 3
67639 11.730991 1.289833 1
171 8.132449 0.039062 2
26674 10.296589 1.496144 3
8739 7.583906 1.005764 2
66668 9.777806 0.496377 1
68732 8.833546 0.513876 1
69995 4.907899 1.518036 1
82008 8.362736 1.285939 1
25054 9.084726 1.606312 3
33085 14.164141 0.560970 3
41379 9.080683 0.989920 3
39417 6.522767 0.038548 3
12556 3.690342 0.462281 2
39432 3.563706 0.242019 1
38010 1.065870 1.141569 1
69306 6.683796 1.456317 1
38000 1.712874 0.243945 1
46321 13.109929 1.280111 3
66293 11.327910 0.780977 1
22730 4.545711 1.233254 1
5952 3.367889 0.468104 2
72308 8.326224 0.567347 1
60338 8.978339 1.442034 1
13301 5.655826 1.582159 2
27884 8.855312 0.570684 3
11188 6.649568 0.544233 2
56796 3.966325 0.850410 1
8571 1.924045 1.664782 2
4914 6.004812 0.280369 2
10784 0.000000 0.375849 2
39296 9.923018 0.092192 3
13113 2.389084 0.119284 2
70204 13.663189 0.133251 1
46813 11.434976 0.321216 3
11697 0.358270 1.292858 2
44183 9.598873 0.223524 3
2225 6.375275 0.608040 2
29066 11.580532 0.458401 3
4245 5.319324 1.598070 2
34379 4.324031 1.603481 1
44441 2.358370 1.273204 1
2022 0.000000 1.182708 2
26866 12.824376 0.890411 3
57070 1.587247 1.456982 1
32932 8.510324 1.520683 3
51967 10.428884 1.187734 3
44432 8.346618 0.042318 3
67066 7.541444 0.809226 1
17262 2.540946 1.583286 2
79728 9.473047 0.692513 1
14259 0.352284 0.474080 2
6122 0.000000 0.589826 2
76879 12.405171 0.567201 1
11426 4.126775 0.871452 2
2493 0.034087 0.335848 2
19910 1.177634 0.075106 2
10939 0.000000 0.479996 2
17716 0.994909 0.611135 2
31390 11.053664 1.180117 3
20375 0.000000 1.679729 2
26309 2.495011 1.459589 1
33484 11.516831 0.001156 3
45944 9.213215 0.797743 3
4249 5.332865 0.109288 2
6089 0.000000 1.689771 2
7513 0.000000 1.126053 2
27862 12.640062 1.690903 3
39038 2.693142 1.317518 1
19218 3.328969 0.268271 2
62911 7.193166 1.117456 1
77758 6.615512 1.521012 1
27940 8.000567 0.835341 3
2194 4.017541 0.512104 2
37072 13.245859 0.927465 3
15585 5.970616 0.813624 2
25577 11.668719 0.886902 3
8777 4.283237 1.272728 2
29016 10.742963 0.971401 3
21910 12.326672 1.592608 3
12916 0.000000 0.344622 2
10976 0.000000 0.922846 2
79065 10.602095 0.573686 1
36759 10.861859 1.155054 3
50011 1.229094 1.638690 1
1155 0.410392 1.313401 2
71600 14.552711 0.616162 1
30817 14.178043 0.616313 3
54559 14.136260 0.362388 1
29764 0.093534 1.207194 1
69100 10.929021 0.403110 1
47324 11.432919 0.825959 3
73199 9.134527 0.586846 1
44461 5.071432 1.421420 1
45617 11.460254 1.541749 3
28221 11.620039 1.103553 3
7091 4.022079 0.207307 2
6110 3.057842 1.631262 2
79016 7.782169 0.404385 1
18289 7.981741 0.929789 3
43679 4.601363 0.268326 1
22075 2.595564 1.115375 1
23535 10.049077 0.391045 3
25301 3.265444 1.572970 2
32256 11.780282 1.511014 3
36951 3.075975 0.286284 1
31290 1.795307 0.194343 1
38953 11.106979 0.202415 3
35257 5.994413 0.800021 1
25847 9.706062 1.012182 3
32680 10.582992 0.836025 3
62018 7.038266 1.458979 1
9074 0.023771 0.015314 2
33004 12.823982 0.676371 3
44588 3.617770 0.493483 1
32565 8.346684 0.253317 3
38563 6.104317 0.099207 1
75668 16.207776 0.584973 1
9069 6.401969 1.691873 2
53395 2.298696 0.559757 1
28631 7.661515 0.055981 3
71036 6.353608 1.645301 1
71142 10.442780 0.335870 1
37653 3.834509 1.346121 1
76839 10.998587 0.584555 1
9916 2.695935 1.512111 2
38889 3.356646 0.324230 1
39075 14.677836 0.793183 3
48071 1.551934 0.130902 1
7275 2.464739 0.223502 2
41804 1.533216 1.007481 1
35665 12.473921 0.162910 3
67956 6.491596 0.032576 1
41892 10.506276 1.510747 3
38844 4.380388 0.748506 1
74197 13.670988 1.687944 1
14201 8.317599 0.390409 2
3908 0.000000 0.556245 2
2459 0.000000 0.290218 2
32027 10.095799 1.188148 3
12870 0.860695 1.482632 2
9880 1.557564 0.711278 2
72784 10.072779 0.756030 1
17521 0.000000 0.431468 2
50283 7.140817 0.883813 3
33536 11.384548 1.438307 3
9452 3.214568 1.083536 2
37457 11.720655 0.301636 3
17724 6.374475 1.475925 3
43869 5.749684 0.198875 3
264 3.871808 0.552602 2
25736 8.336309 0.636238 3
39584 9.710442 1.503735 3
31246 1.532611 1.433898 1
49567 9.785785 0.984614 3
7052 2.633627 1.097866 2
35493 9.238935 0.494701 3
10986 1.205656 1.398803 2
49508 3.124909 1.670121 1
5734 7.935489 1.585044 2
65479 12.746636 1.560352 1
77268 10.732563 0.545321 1
28490 3.977403 0.766103 1
13546 4.194426 0.450663 2
37166 9.610286 0.142912 3
16381 4.797555 1.260455 2
10848 1.615279 0.093002 2
35405 4.614771 1.027105 1
15917 0.000000 1.369726 2
6131 0.608457 0.512220 2
67432 6.558239 0.667579 1
30354 12.315116 0.197068 3
69696 7.014973 1.494616 1
33481 8.822304 1.194177 3
43075 10.086796 0.570455 3
38343 7.241614 1.661627 3
14318 4.602395 1.511768 2
5367 7.434921 0.079792 2
37894 10.467570 1.595418 3
36172 9.948127 0.003663 3
40123 2.478529 1.568987 1
10976 5.938545 0.878540 2
12705 0.000000 0.948004 2
12495 5.559181 1.357926 2
35681 9.776654 0.535966 3
46202 3.092056 0.490906 1
11505 0.000000 1.623311 2
22834 4.459495 0.538867 1
49901 8.334306 1.646600 3
71932 11.226654 0.384686 1
13279 3.904737 1.597294 2
49112 7.038205 1.211329 3
77129 9.836120 1.054340 1
37447 1.990976 0.378081 1
62397 9.005302 0.485385 1
0 1.772510 1.039873 2
15476 0.458674 0.819560 2
40625 10.003919 0.231658 3
36706 0.520807 1.476008 1
28580 10.678214 1.431837 3
25862 4.425992 1.363842 1
63488 12.035355 0.831222 1
33944 10.606732 1.253858 3
30099 1.568653 0.684264 1
13725 2.545434 0.024271 2
36768 10.264062 0.982593 3
64656 9.866276 0.685218 1
14927 0.142704 0.057455 2
43231 9.853270 1.521432 3
66087 6.596604 1.653574 1
19806 2.602287 1.321481 2
41081 10.411776 0.664168 3
10277 7.083449 0.622589 2
7014 2.080068 1.254441 2
17275 0.522844 1.622458 2
31600 10.362000 1.544827 3
59956 3.412967 1.035410 1
42181 6.796548 1.112153 3
51743 4.092035 0.075804 1
5194 2.763811 1.564325 2
30832 12.547439 1.402443 3
7976 5.708052 1.596152 2
14602 4.558025 0.375806 2
41571 11.642307 0.438553 3
55028 3.222443 0.121399 1
5837 4.736156 0.029871 2
39808 10.839526 0.836323 3
20944 4.194791 0.235483 2
22146 14.936259 0.888582 3
42169 3.310699 1.521855 1
7010 2.971931 0.034321 2
3807 9.261667 0.537807 2
29241 7.791833 1.111416 3
52696 1.480470 1.028750 1
42545 3.677287 0.244167 1
24437 2.202967 1.370399 1
16037 5.796735 0.935893 2
8493 3.063333 0.144089 2
68080 11.233094 0.492487 1
59016 1.965570 0.005697 1
11810 8.616719 0.137419 2
68630 6.609989 1.083505 1
7629 1.712639 1.086297 2
71992 10.117445 1.299319 1
13398 0.000000 1.104178 2
26241 9.824777 1.346821 3
11160 1.653089 0.980949 2
76701 18.178822 1.473671 1
32174 6.781126 0.885340 3
45043 8.206750 1.549223 3
42173 10.081853 1.376745 3
69801 6.288742 0.112799 1
41737 3.695937 1.543589 1
46979 6.726151 1.069380 3
79267 12.969999 1.568223 1
4615 2.661390 1.531933 2
32907 7.072764 1.117386 3
37444 9.123366 1.318988 3
569 3.743946 1.039546 2
8723 2.341300 0.219361 2
6024 0.541913 0.592348 2
52252 2.310828 1.436753 1
8358 6.226597 1.427316 2
26166 7.277876 0.489252 3
18471 0.000000 0.389459 2
3386 7.218221 1.098828 2
41544 8.777129 1.111464 3
10480 2.813428 0.819419 2
5894 2.268766 1.412130 2
7273 6.283627 0.571292 2
22272 7.520081 1.626868 3
31369 11.739225 0.027138 3
10708 3.746883 0.877350 2
69364 12.089835 0.521631 1
37760 12.310404 0.259339 3
13004 0.000000 0.671355 2
37885 2.728800 0.331502 1
52555 10.814342 0.607652 3
38997 12.170268 0.844205 3
69698 6.698371 0.240084 1
11783 3.632672 1.643479 2
47636 10.059991 0.892361 3
15744 1.887674 0.756162 2
69058 8.229125 0.195886 1
33057 7.817082 0.476102 3
28681 12.277230 0.076805 3
34042 10.055337 1.115778 3
29928 3.596002 1.485952 1
9734 2.755530 1.420655 2
7344 7.780991 0.513048 2
7387 0.093705 0.391834 2
33957 8.481567 0.520078 3
9936 3.865584 0.110062 2
36094 9.683709 0.779984 3
39835 10.617255 1.359970 3
64486 7.203216 1.624762 1
0 7.601414 1.215605 2
39539 1.386107 1.417070 1
66972 9.129253 0.594089 1
15029 1.363447 0.620841 2
44909 3.181399 0.359329 1
38183 13.365414 0.217011 3
37372 4.207717 1.289767 1
0 4.088395 0.870075 2
17786 3.327371 1.142505 2
39055 1.303323 1.235650 1
37045 7.999279 1.581763 3
6435 2.217488 0.864536 2
72265 7.751808 0.192451 1
28152 14.149305 1.591532 3
25931 8.765721 0.152808 3
7538 3.408996 0.184896 2
1315 1.251021 0.112340 2
12292 6.160619 1.537165 2
49248 1.034538 1.585162 1
9025 0.000000 1.034635 2
13438 2.355051 0.542603 2
69683 6.614543 0.153771 1
25374 10.245062 1.450903 3
55264 3.467074 1.231019 1
38324 7.487678 1.572293 3
69643 4.624115 1.185192 1
44058 8.995957 1.436479 3
41316 11.564476 0.007195 3
29119 3.440948 0.078331 1
51656 1.673603 0.732746 1
3030 4.719341 0.699755 2
35695 10.304798 1.576488 3
1537 2.086915 1.199312 2
9083 6.338220 1.131305 2
47744 8.254926 0.710694 3
71372 16.067108 0.974142 1
37980 1.723201 0.310488 1
42385 3.785045 0.876904 1
22687 2.557561 0.123738 1
39512 9.852220 1.095171 3
11885 3.679147 1.557205 2
4944 9.789681 0.852971 2
73230 14.958998 0.526707 1
17585 11.182148 1.288459 3
68737 7.528533 1.657487 1
13818 5.253802 1.378603 2
31662 13.946752 1.426657 3
86686 15.557263 1.430029 1
43214 12.483550 0.688513 3
24091 2.317302 1.411137 1
52544 10.069724 0.766119 3
61861 5.792231 1.615483 1
47903 4.138435 0.475994 1
37190 12.929517 0.304378 3
6013 9.378238 0.307392 2
27223 8.361362 1.643204 3
69027 7.939406 1.325042 1
78642 10.735384 0.705788 1
30254 11.592723 0.286188 3
21704 10.098356 0.704748 3
34985 9.299025 0.545337 3
31316 11.158297 0.218067 3
76368 16.143900 0.558388 1
27953 10.971700 1.221787 3
152 0.000000 0.681478 2
9146 3.178961 1.292692 2
75346 17.625350 0.339926 1
26376 1.995833 0.267826 1
35255 10.640467 0.416181 3
19198 9.628339 0.985462 3
12518 4.662664 0.495403 2
25453 5.754047 1.382742 2
12530 0.000000 0.037146 2
62230 9.334332 0.198118 1
9517 3.846162 0.619968 2
71161 10.685084 0.678179 1
1593 4.752134 0.359205 2
33794 0.697630 0.966786 1
39710 10.365836 0.505898 3
16941 0.461478 0.352865 2
69209 11.339537 1.068740 1
4446 5.420280 0.127310 2
9347 3.469955 1.619947 2
55635 8.517067 0.994858 3
65889 8.306512 0.413690 1
10753 2.628690 0.444320 2
7055 0.000000 0.802985 2
7905 0.000000 1.170397 2
53447 7.298767 1.582346 3
9194 7.331319 1.277988 2
61914 9.392269 0.151617 1
15630 5.541201 1.180596 2
79194 15.149460 0.537540 1
12268 5.515189 0.250562 2
33682 7.728898 0.920494 3
26080 11.318785 1.510979 3
19119 3.574709 1.531514 2
30902 7.350965 0.026332 3
63039 7.122363 1.630177 1
51136 1.828412 1.013702 1
35262 10.117989 1.156862 3
42776 11.309897 0.086291 3
64191 8.342034 1.388569 1
15436 0.241714 0.715577 2
14402 10.482619 1.694972 2
6341 9.289510 1.428879 2
14113 4.269419 0.134181 2
6390 0.000000 0.189456 2
8794 0.817119 0.143668 2
43432 1.508394 0.652651 1
38334 9.359918 0.052262 3
34068 10.052333 0.550423 3
30819 11.111660 0.989159 3
22239 11.265971 0.724054 3
28725 10.383830 0.254836 3
57071 3.878569 1.377983 1
72420 13.679237 0.025346 1
28294 10.526846 0.781569 3
9896 0.000000 0.924198 2
65821 4.106727 1.085669 1
7645 8.118856 1.470686 2
71289 7.796874 0.052336 1
5128 2.789669 1.093070 2
13711 6.226962 0.287251 2
22240 10.169548 1.660104 3
15092 0.000000 1.370549 2
5017 7.513353 0.137348 2
10141 8.240793 0.099735 2
35570 14.612797 1.247390 3
46893 3.562976 0.445386 1
8178 3.230482 1.331698 2
55783 3.612548 1.551911 1
1148 0.000000 0.332365 2
10062 3.931299 0.487577 2
74124 14.752342 1.155160 1
66603 10.261887 1.628085 1
11893 2.787266 1.570402 2
50908 15.112319 1.324132 3
39891 5.184553 0.223382 3
65915 3.868359 0.128078 1
65678 3.507965 0.028904 1
62996 11.019254 0.427554 1
36851 3.812387 0.655245 1
36669 11.056784 0.378725 3
38876 8.826880 1.002328 3
26878 11.173861 1.478244 3
46246 11.506465 0.421993 3
12761 7.798138 0.147917 3
35282 10.155081 1.370039 3
68306 10.645275 0.693453 1
31262 9.663200 1.521541 3
34754 10.790404 1.312679 3
13408 2.810534 0.219962 2
30365 9.825999 1.388500 3
10709 1.421316 0.677603 2
24332 11.123219 0.809107 3
45517 13.402206 0.661524 3
6178 1.212255 0.836807 2
10639 1.568446 1.297469 2
29613 3.343473 1.312266 1
22392 5.400155 0.193494 1
51126 3.818754 0.590905 1
53644 7.973845 0.307364 3
51417 9.078824 0.734876 3
24859 0.153467 0.766619 1
61732 8.325167 0.028479 1
71128 7.092089 1.216733 1
27276 5.192485 1.094409 3
30453 10.340791 1.087721 3
18670 2.077169 1.019775 2
70600 10.151966 0.993105 1
12683 0.046826 0.809614 2
81597 11.221874 1.395015 1
69959 14.497963 1.019254 1
8124 3.554508 0.533462 2
18867 3.522673 0.086725 2
80886 14.531655 0.380172 1
55895 3.027528 0.885457 1
31587 1.845967 0.488985 1
10591 10.226164 0.804403 3
70096 10.965926 1.212328 1
53151 2.129921 1.477378 1
11992 0.000000 1.606849 2
33114 9.489005 0.827814 3
7413 0.000000 1.020797 2
10583 0.000000 1.270167 2
58668 6.556676 0.055183 1
35018 9.959588 0.060020 3
70843 7.436056 1.479856 1
14011 0.404888 0.459517 2
35015 9.952942 1.650279 3
70839 15.600252 0.021935 1
3024 2.723846 0.387455 2
5526 0.513866 1.323448 2
5113 0.000000 0.861859 2
20851 7.280602 1.438470 2
40999 9.161978 1.110180 3
15823 0.991725 0.730979 2
35432 7.398380 0.684218 3
53711 12.149747 1.389088 3
64371 9.149678 0.874905 1
9289 9.666576 1.370330 2
60613 3.620110 0.287767 1
18338 5.238800 1.253646 2
22845 14.715782 1.503758 3
74676 14.445740 1.211160 1
34143 13.609528 0.364240 3
14153 3.141585 0.424280 2
9327 0.000000 0.120947 2
18991 0.454750 1.033280 2
9193 0.510310 0.016395 2
2285 3.864171 0.616349 2
9493 6.724021 0.563044 2
2371 4.289375 0.012563 2
13963 0.000000 1.437030 2
2299 3.733617 0.698269 2
5262 2.002589 1.380184 2
4659 2.502627 0.184223 2
17582 6.382129 0.876581 2
27750 8.546741 0.128706 3
9868 2.694977 0.432818 2
18333 3.951256 0.333300 2
3780 9.856183 0.329181 2
18190 2.068962 0.429927 2
11145 3.410627 0.631838 2
68846 9.974715 0.669787 1
26575 10.650102 0.866627 3
48111 9.134528 0.728045 3
43757 7.882601 1.332446 3
4.2 代码部分
import kNN
group, labels = kNN.createDataSet()
group
array([[1. , 1.1],
[1. , 1. ],
[0. , 0. ],
[0. , 0.1]])
labels
['A', 'A', 'B', 'B']
kNN.classify0([0,0], group, labels, 3)
'B'
# 数据准备 – 文本中解析数据
import imp
imp.reload(kNN)
<module 'kNN' from 'H:\\HomeWork\\3_2\\InformAnalysis\\homework\\05\\kNN.py'>
datingDataMat, datingLabels = kNN.file2matrix('datingTestSet2.txt')
datingDataMat
array([[4.0920000e+04, 8.3269760e+00, 9.5395200e-01],
[1.4488000e+04, 7.1534690e+00, 1.6739040e+00],
[2.6052000e+04, 1.4418710e+00, 8.0512400e-01],
...,
[2.6575000e+04, 1.0650102e+01, 8.6662700e-01],
[4.8111000e+04, 9.1345280e+00, 7.2804500e-01],
[4.3757000e+04, 7.8826010e+00, 1.3324460e+00]])
datingLabels[0:20]
[3, 2, 1, 1, 1, 1, 3, 3, 1, 3, 1, 1, 2, 1, 1, 1, 1, 1, 2, 3]
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
# 这里为plt用法简单回顾
x = np.arange(1,10)
y = x
fig = plt.figure()
ax1 = fig.add_subplot(111)
#设置标题
ax1.set_title('Scatter Plot')
#设置X轴标签
plt.xlabel('X')
#设置Y轴标签
plt.ylabel('Y')
#画散点图
ax1.scatter(x,y,c = 'r',marker = 'o')
#设置图标
plt.legend('x1')
#显示所画的图
plt.show()
# 简单设置一下
fig = plt.figure()
ax2 = fig.add_subplot(111)
#设置标题
ax2.set_title('Scatter Plot')
#设置X轴标签
plt.xlabel('X')
#设置Y轴标签
plt.ylabel('Y')
Text(0, 0.5, 'Y')
# 画散点图
# ax2.scatter(datingDataMat[:,1], datingDataMat[:,2], c = 'r',marker = 'o')
datingDataMat[:,1]
array([8.3269760e+00, 7.1534690e+00, 1.4418710e+00, 1.3147394e+01,
1.6697880e+00, 1.0141740e+01, 6.8307920e+00, 1.3276369e+01,
8.6315770e+00, 1.2273169e+01, 3.7234980e+00, 8.3858790e+00,
4.8754350e+00, 4.6800980e+00, 1.5299570e+01, 1.8894610e+00,
7.5167540e+00, 1.4239195e+01, 0.0000000e+00, 1.0528555e+01,
3.5402650e+00, 2.9915510e+00, 5.2978650e+00, 6.5938030e+00,
2.8167600e+00, 1.2458258e+01, 0.0000000e+00, 9.9686480e+00,
1.3648380e+00, 2.3045300e-01, 1.1865402e+01, 1.2046000e-01,
8.5452040e+00, 5.8566490e+00, 9.6656180e+00, 9.7787630e+00,
4.9329760e+00, 2.2162460e+00, 1.4305636e+01, 1.2591889e+01,
3.4246490e+00, 0.0000000e+00, 8.5338230e+00, 9.8295280e+00,
1.1492186e+01, 3.5709680e+00, 1.7712280e+00, 3.5139210e+00,
4.3981720e+00, 4.2768230e+00, 5.9460140e+00, 1.3798970e+01,
1.0393591e+01, 3.0075770e+00, 1.0319380e+00, 4.7512120e+00,
3.6922690e+00, 1.0448091e+01, 1.0585786e+01, 1.6045010e+00,
3.6794970e+00, 3.7951460e+00, 2.5318850e+00, 9.7333400e+00,
6.0930670e+00, 7.7129600e+00, 1.1470364e+01, 2.8865290e+00,
1.0054373e+01, 9.9724700e+00, 2.3357850e+00, 1.1375155e+01,
0.0000000e+00, 4.1267870e+00, 6.3195220e+00, 8.6805270e+00,
1.4856391e+01, 2.4542850e+00, 7.2922020e+00, 8.7451370e+00,
8.5790010e+00, 2.5073020e+00, 1.1415476e+01, 4.8385400e+00,
1.0339507e+01, 6.5737420e+00, 6.5393970e+00, 2.2091590e+00,
1.1196378e+01, 4.2295950e+00, 9.5059440e+00, 8.6527250e+00,
1.7101108e+01, 7.8718390e+00, 8.2621310e+00, 9.0156350e+00,
9.2153510e+00, 6.3750070e+00, 2.2620140e+00, 5.6771100e+00,
1.1293017e+01, 6.5900430e+00, 4.7119600e+00, 8.7680990e+00,
1.1502519e+01, 4.6828120e+00, 1.2446578e+01, 1.2908384e+01,
1.2601108e+01, 3.9294560e+00, 9.7515030e+00, 3.0437670e+00,
4.3915220e+00, 1.1695276e+01, 7.8797420e+00, 5.6131630e+00,
9.1401720e+00, 4.2586440e+00, 6.7998310e+00, 8.7527580e+00,
1.1230330e+00, 1.0833248e+01, 3.0516180e+00, 5.3084090e+00,
1.8417920e+00, 2.2619780e+00, 1.1573696e+01, 8.0387640e+00,
1.0734007e+01, 9.6619090e+00, 9.0058500e+00, 0.0000000e+00,
5.7571400e+00, 9.1646560e+00, 1.3183400e+00, 1.4075597e+01,
1.0107550e+01, 7.9602930e+00, 6.3172920e+00, 1.2664194e+01,
2.9066440e+00, 2.3882410e+00, 6.0244710e+00, 7.2267640e+00,
4.1839970e+00, 1.1850211e+01, 1.1661797e+01, 3.5749670e+00,
0.0000000e+00, 7.9376570e+00, 3.3650270e+00, 0.0000000e+00,
1.3860672e+01, 1.0306714e+01, 7.2280020e+00, 4.5087400e+00,
3.6632800e-01, 3.2994440e+00, 5.7328700e-01, 9.1837380e+00,
7.8426460e+00, 4.7509640e+00, 1.1438702e+01, 8.2430630e+00,
7.9490170e+00, 7.8754770e+00, 9.5690870e+00, 7.7501030e+00,
0.0000000e+00, 3.3960300e+00, 1.1916091e+01, 4.6075800e-01,
1.3087566e+01, 4.5890160e+00, 8.3972170e+00, 5.5627720e+00,
1.0905159e+01, 1.3114410e+00, 1.0647170e+01, 0.0000000e+00,
8.5030250e+00, 4.3688000e-01, 6.1278670e+00, 1.2112492e+01,
1.2649680e+00, 6.0675680e+00, 8.0109640e+00, 3.7910840e+00,
1.1773195e+01, 8.3395880e+00, 2.5630920e+00, 5.9542160e+00,
9.2883740e+00, 3.9767960e+00, 8.5852270e+00, 1.2719460e+00,
0.0000000e+00, 0.0000000e+00, 1.1521298e+01, 1.1394470e+00,
5.6990900e+00, 2.4493780e+00, 0.0000000e+00, 3.1683650e+00,
1.0428610e+01, 2.9430700e+00, 1.0441348e+01, 1.2478764e+01,
5.8569020e+00, 2.4764200e+00, 1.8266370e+00, 4.3244510e+00,
1.3760850e+00, 5.3424620e+00, 1.1835521e+01, 1.2423687e+01,
1.2161273e+01, 8.1483600e+00, 1.5310670e+00, 3.2009120e+00,
8.8626910e+00, 6.3705510e+00, 2.4688410e+00, 1.1054212e+01,
2.0370800e+00, 1.3364030e+01, 1.0249135e+01, 1.0464252e+01,
9.4245740e+00, 4.4589020e+00, 0.0000000e+00, 9.6860820e+00,
1.3649402e+01, 1.3181148e+01, 3.8774720e+00, 1.4139520e+00,
4.2489860e+00, 8.7791830e+00, 4.1562520e+00, 5.5800180e+00,
1.5040440e+01, 1.2793870e+01, 3.2548770e+00, 1.0725607e+01,
8.2564730e+00, 8.0338920e+00, 1.0702532e+01, 5.0629960e+00,
1.0772286e+01, 1.8923540e+00, 1.0199660e+00, 1.5546043e+01,
1.1638205e+01, 3.4278860e+00, 1.1246174e+01, 0.0000000e+00,
0.0000000e+00, 8.2425530e+00, 8.7000600e+00, 8.1234400e-01,
2.4482350e+00, 1.3230078e+01, 2.3613300e-01, 1.1155826e+01,
1.1029636e+01, 2.9011810e+00, 3.9245940e+00, 2.5248060e+00,
3.5274740e+00, 3.3842810e+00, 0.0000000e+00, 1.1898890e+01,
3.5298920e+00, 1.1442677e+01, 1.0308145e+01, 8.5405290e+00,
7.1569490e+00, 7.2067500e-01, 2.2940500e-01, 3.3993310e+00,
6.1572390e+00, 1.2396980e+00, 6.0368540e+00, 5.2586650e+00,
1.2393001e+01, 9.6276130e+00, 1.1130453e+01, 8.8425950e+00,
1.0690010e+01, 5.7147180e+00, 3.0525050e+00, 0.0000000e+00,
9.9453070e+00, 2.7197230e+00, 1.1154055e+01, 2.6879180e+00,
1.0037847e+01, 1.2404762e+01, 1.0237305e+01, 4.7453920e+00,
4.6394610e+00, 3.1493100e+00, 1.3406875e+01, 6.0686680e+00,
9.4770220e+00, 3.8976200e+00, 5.4636150e+00, 3.3692670e+00,
5.2345620e+00, 0.0000000e+00, 1.2979069e+01, 5.3765640e+00,
1.3527910e+01, 2.1968890e+00, 1.0691748e+01, 1.6592420e+00,
8.3696670e+00, 1.3157197e+01, 8.1996670e+00, 4.4416690e+00,
9.8464920e+00, 1.9540000e-02, 8.2537740e+00, 6.0386200e+00,
6.0915870e+00, 8.9868200e+00, 1.1508473e+01, 8.8077340e+00,
0.0000000e+00, 8.8892020e+00, 3.1781170e+00, 7.0137950e+00,
9.6050140e+00, 1.2305400e+00, 1.0412811e+01, 0.0000000e+00,
9.6999910e+00, 0.0000000e+00, 4.4552930e+00, 3.0209770e+00,
8.0992780e+00, 1.1577640e+00, 1.0105396e+01, 1.1230148e+01,
9.0700580e+00, 5.6646000e-01, 0.0000000e+00, 8.9563690e+00,
1.5230570e+00, 2.7490060e+00, 9.2353930e+00, 1.0555573e+01,
6.9563720e+00, 6.3628100e-01, 3.5747370e+00, 9.0324860e+00,
5.9589930e+00, 2.4353000e+00, 1.0539731e+01, 7.6467020e+00,
2.0919349e+01, 1.4247260e+00, 6.7486630e+00, 2.2891670e+00,
5.5483770e+00, 6.0572270e+00, 1.0828595e+01, 1.1318160e+01,
1.3265311e+01, 0.0000000e+00, 6.5171330e+00, 4.9343740e+00,
1.0151738e+01, 2.4257810e+00, 9.7789620e+00, 1.2219950e+01,
7.3941510e+00, 8.5185350e+00, 2.7987000e+00, 6.3793000e-01,
1.0750490e+01, 6.2538200e-01, 1.0027968e+01, 9.8173470e+00,
6.4682800e-01, 3.3471110e+00, 1.1816892e+01, 0.0000000e+00,
1.0945666e+01, 1.0244706e+01, 2.5798010e+00, 2.6304100e+00,
1.1746200e+01, 8.1042320e+00, 1.2409743e+01, 1.2167844e+01,
3.1984080e+00, 1.6055513e+01, 7.1386590e+00, 4.8310410e+00,
1.0082890e+01, 1.0066867e+01, 8.1295380e+00, 3.0124630e+00,
3.7203910e+00, 7.7349300e-01, 1.0962941e+01, 1.7762100e-01,
3.0858530e+00, 8.4267810e+00, 1.8259270e+00, 2.1851550e+00,
7.1845950e+00, 0.0000000e+00, 8.9017520e+00, 2.4514970e+00,
3.2136310e+00, 3.9747390e+00, 9.6013060e+00, 8.3638970e+00,
6.3814840e+00, 0.0000000e+00, 9.6098360e+00, 9.9047410e+00,
7.1858070e+00, 5.4667030e+00, 0.0000000e+00, 4.5754430e+00,
3.2770760e+00, 1.0246623e+01, 2.3417350e+00, 3.2010460e+00,
6.0660130e+00, 8.8293790e+00, 1.5833048e+01, 1.3516711e+01,
6.6428400e-01, 6.3251390e+00, 8.6774990e+00, 8.1880050e+00,
9.4142630e+00, 9.1965470e+00, 1.0202968e+01, 2.1194390e+00,
1.3635078e+01, 8.3443000e-02, 9.1490960e+00, 1.9338030e+00,
1.4115544e+01, 8.9337360e+00, 2.6612540e+00, 9.8843200e-01,
2.0637410e+00, 2.2205900e+00, 6.4248490e+00, 1.1561530e+00,
3.0327200e+00, 3.0768280e+00, 0.0000000e+00, 7.7504800e+00,
1.0958135e+01, 1.0222018e+01, 2.3679880e+00, 7.6860540e+00,
1.1464879e+01, 1.1075735e+01, 3.5439890e+00, 8.1238890e+00,
4.3317690e+00, 1.2086500e-01, 6.1161090e+00, 7.4745340e+00,
8.8194540e+00, 6.8021440e+00, 1.2666325e+01, 8.6361800e+00,
1.1730991e+01, 8.1324490e+00, 1.0296589e+01, 7.5839060e+00,
9.7778060e+00, 8.8335460e+00, 4.9078990e+00, 8.3627360e+00,
9.0847260e+00, 1.4164141e+01, 9.0806830e+00, 6.5227670e+00,
3.6903420e+00, 3.5637060e+00, 1.0658700e+00, 6.6837960e+00,
1.7128740e+00, 1.3109929e+01, 1.1327910e+01, 4.5457110e+00,
3.3678890e+00, 8.3262240e+00, 8.9783390e+00, 5.6558260e+00,
8.8553120e+00, 6.6495680e+00, 3.9663250e+00, 1.9240450e+00,
6.0048120e+00, 0.0000000e+00, 9.9230180e+00, 2.3890840e+00,
1.3663189e+01, 1.1434976e+01, 3.5827000e-01, 9.5988730e+00,
6.3752750e+00, 1.1580532e+01, 5.3193240e+00, 4.3240310e+00,
2.3583700e+00, 0.0000000e+00, 1.2824376e+01, 1.5872470e+00,
8.5103240e+00, 1.0428884e+01, 8.3466180e+00, 7.5414440e+00,
2.5409460e+00, 9.4730470e+00, 3.5228400e-01, 0.0000000e+00,
1.2405171e+01, 4.1267750e+00, 3.4087000e-02, 1.1776340e+00,
0.0000000e+00, 9.9490900e-01, 1.1053664e+01, 0.0000000e+00,
2.4950110e+00, 1.1516831e+01, 9.2132150e+00, 5.3328650e+00,
0.0000000e+00, 0.0000000e+00, 1.2640062e+01, 2.6931420e+00,
3.3289690e+00, 7.1931660e+00, 6.6155120e+00, 8.0005670e+00,
4.0175410e+00, 1.3245859e+01, 5.9706160e+00, 1.1668719e+01,
4.2832370e+00, 1.0742963e+01, 1.2326672e+01, 0.0000000e+00,
0.0000000e+00, 1.0602095e+01, 1.0861859e+01, 1.2290940e+00,
4.1039200e-01, 1.4552711e+01, 1.4178043e+01, 1.4136260e+01,
9.3534000e-02, 1.0929021e+01, 1.1432919e+01, 9.1345270e+00,
5.0714320e+00, 1.1460254e+01, 1.1620039e+01, 4.0220790e+00,
3.0578420e+00, 7.7821690e+00, 7.9817410e+00, 4.6013630e+00,
2.5955640e+00, 1.0049077e+01, 3.2654440e+00, 1.1780282e+01,
3.0759750e+00, 1.7953070e+00, 1.1106979e+01, 5.9944130e+00,
9.7060620e+00, 1.0582992e+01, 7.0382660e+00, 2.3771000e-02,
1.2823982e+01, 3.6177700e+00, 8.3466840e+00, 6.1043170e+00,
1.6207776e+01, 6.4019690e+00, 2.2986960e+00, 7.6615150e+00,
6.3536080e+00, 1.0442780e+01, 3.8345090e+00, 1.0998587e+01,
2.6959350e+00, 3.3566460e+00, 1.4677836e+01, 1.5519340e+00,
2.4647390e+00, 1.5332160e+00, 1.2473921e+01, 6.4915960e+00,
1.0506276e+01, 4.3803880e+00, 1.3670988e+01, 8.3175990e+00,
0.0000000e+00, 0.0000000e+00, 1.0095799e+01, 8.6069500e-01,
1.5575640e+00, 1.0072779e+01, 0.0000000e+00, 7.1408170e+00,
1.1384548e+01, 3.2145680e+00, 1.1720655e+01, 6.3744750e+00,
5.7496840e+00, 3.8718080e+00, 8.3363090e+00, 9.7104420e+00,
1.5326110e+00, 9.7857850e+00, 2.6336270e+00, 9.2389350e+00,
1.2056560e+00, 3.1249090e+00, 7.9354890e+00, 1.2746636e+01,
1.0732563e+01, 3.9774030e+00, 4.1944260e+00, 9.6102860e+00,
4.7975550e+00, 1.6152790e+00, 4.6147710e+00, 0.0000000e+00,
6.0845700e-01, 6.5582390e+00, 1.2315116e+01, 7.0149730e+00,
8.8223040e+00, 1.0086796e+01, 7.2416140e+00, 4.6023950e+00,
7.4349210e+00, 1.0467570e+01, 9.9481270e+00, 2.4785290e+00,
5.9385450e+00, 0.0000000e+00, 5.5591810e+00, 9.7766540e+00,
3.0920560e+00, 0.0000000e+00, 4.4594950e+00, 8.3343060e+00,
1.1226654e+01, 3.9047370e+00, 7.0382050e+00, 9.8361200e+00,
1.9909760e+00, 9.0053020e+00, 1.7725100e+00, 4.5867400e-01,
1.0003919e+01, 5.2080700e-01, 1.0678214e+01, 4.4259920e+00,
1.2035355e+01, 1.0606732e+01, 1.5686530e+00, 2.5454340e+00,
1.0264062e+01, 9.8662760e+00, 1.4270400e-01, 9.8532700e+00,
6.5966040e+00, 2.6022870e+00, 1.0411776e+01, 7.0834490e+00,
2.0800680e+00, 5.2284400e-01, 1.0362000e+01, 3.4129670e+00,
6.7965480e+00, 4.0920350e+00, 2.7638110e+00, 1.2547439e+01,
5.7080520e+00, 4.5580250e+00, 1.1642307e+01, 3.2224430e+00,
4.7361560e+00, 1.0839526e+01, 4.1947910e+00, 1.4936259e+01,
3.3106990e+00, 2.9719310e+00, 9.2616670e+00, 7.7918330e+00,
1.4804700e+00, 3.6772870e+00, 2.2029670e+00, 5.7967350e+00,
3.0633330e+00, 1.1233094e+01, 1.9655700e+00, 8.6167190e+00,
6.6099890e+00, 1.7126390e+00, 1.0117445e+01, 0.0000000e+00,
9.8247770e+00, 1.6530890e+00, 1.8178822e+01, 6.7811260e+00,
8.2067500e+00, 1.0081853e+01, 6.2887420e+00, 3.6959370e+00,
6.7261510e+00, 1.2969999e+01, 2.6613900e+00, 7.0727640e+00,
9.1233660e+00, 3.7439460e+00, 2.3413000e+00, 5.4191300e-01,
2.3108280e+00, 6.2265970e+00, 7.2778760e+00, 0.0000000e+00,
7.2182210e+00, 8.7771290e+00, 2.8134280e+00, 2.2687660e+00,
6.2836270e+00, 7.5200810e+00, 1.1739225e+01, 3.7468830e+00,
1.2089835e+01, 1.2310404e+01, 0.0000000e+00, 2.7288000e+00,
1.0814342e+01, 1.2170268e+01, 6.6983710e+00, 3.6326720e+00,
1.0059991e+01, 1.8876740e+00, 8.2291250e+00, 7.8170820e+00,
1.2277230e+01, 1.0055337e+01, 3.5960020e+00, 2.7555300e+00,
7.7809910e+00, 9.3705000e-02, 8.4815670e+00, 3.8655840e+00,
9.6837090e+00, 1.0617255e+01, 7.2032160e+00, 7.6014140e+00,
1.3861070e+00, 9.1292530e+00, 1.3634470e+00, 3.1813990e+00,
1.3365414e+01, 4.2077170e+00, 4.0883950e+00, 3.3273710e+00,
1.3033230e+00, 7.9992790e+00, 2.2174880e+00, 7.7518080e+00,
1.4149305e+01, 8.7657210e+00, 3.4089960e+00, 1.2510210e+00,
6.1606190e+00, 1.0345380e+00, 0.0000000e+00, 2.3550510e+00,
6.6145430e+00, 1.0245062e+01, 3.4670740e+00, 7.4876780e+00,
4.6241150e+00, 8.9959570e+00, 1.1564476e+01, 3.4409480e+00,
1.6736030e+00, 4.7193410e+00, 1.0304798e+01, 2.0869150e+00,
6.3382200e+00, 8.2549260e+00, 1.6067108e+01, 1.7232010e+00,
3.7850450e+00, 2.5575610e+00, 9.8522200e+00, 3.6791470e+00,
9.7896810e+00, 1.4958998e+01, 1.1182148e+01, 7.5285330e+00,
5.2538020e+00, 1.3946752e+01, 1.5557263e+01, 1.2483550e+01,
2.3173020e+00, 1.0069724e+01, 5.7922310e+00, 4.1384350e+00,
1.2929517e+01, 9.3782380e+00, 8.3613620e+00, 7.9394060e+00,
1.0735384e+01, 1.1592723e+01, 1.0098356e+01, 9.2990250e+00,
1.1158297e+01, 1.6143900e+01, 1.0971700e+01, 0.0000000e+00,
3.1789610e+00, 1.7625350e+01, 1.9958330e+00, 1.0640467e+01,
9.6283390e+00, 4.6626640e+00, 5.7540470e+00, 0.0000000e+00,
9.3343320e+00, 3.8461620e+00, 1.0685084e+01, 4.7521340e+00,
6.9763000e-01, 1.0365836e+01, 4.6147800e-01, 1.1339537e+01,
5.4202800e+00, 3.4699550e+00, 8.5170670e+00, 8.3065120e+00,
2.6286900e+00, 0.0000000e+00, 0.0000000e+00, 7.2987670e+00,
7.3313190e+00, 9.3922690e+00, 5.5412010e+00, 1.5149460e+01,
5.5151890e+00, 7.7288980e+00, 1.1318785e+01, 3.5747090e+00,
7.3509650e+00, 7.1223630e+00, 1.8284120e+00, 1.0117989e+01,
1.1309897e+01, 8.3420340e+00, 2.4171400e-01, 1.0482619e+01,
9.2895100e+00, 4.2694190e+00, 0.0000000e+00, 8.1711900e-01,
1.5083940e+00, 9.3599180e+00, 1.0052333e+01, 1.1111660e+01,
1.1265971e+01, 1.0383830e+01, 3.8785690e+00, 1.3679237e+01,
1.0526846e+01, 0.0000000e+00, 4.1067270e+00, 8.1188560e+00,
7.7968740e+00, 2.7896690e+00, 6.2269620e+00, 1.0169548e+01,
0.0000000e+00, 7.5133530e+00, 8.2407930e+00, 1.4612797e+01,
3.5629760e+00, 3.2304820e+00, 3.6125480e+00, 0.0000000e+00,
3.9312990e+00, 1.4752342e+01, 1.0261887e+01, 2.7872660e+00,
1.5112319e+01, 5.1845530e+00, 3.8683590e+00, 3.5079650e+00,
1.1019254e+01, 3.8123870e+00, 1.1056784e+01, 8.8268800e+00,
1.1173861e+01, 1.1506465e+01, 7.7981380e+00, 1.0155081e+01,
1.0645275e+01, 9.6632000e+00, 1.0790404e+01, 2.8105340e+00,
9.8259990e+00, 1.4213160e+00, 1.1123219e+01, 1.3402206e+01,
1.2122550e+00, 1.5684460e+00, 3.3434730e+00, 5.4001550e+00,
3.8187540e+00, 7.9738450e+00, 9.0788240e+00, 1.5346700e-01,
8.3251670e+00, 7.0920890e+00, 5.1924850e+00, 1.0340791e+01,
2.0771690e+00, 1.0151966e+01, 4.6826000e-02, 1.1221874e+01,
1.4497963e+01, 3.5545080e+00, 3.5226730e+00, 1.4531655e+01,
3.0275280e+00, 1.8459670e+00, 1.0226164e+01, 1.0965926e+01,
2.1299210e+00, 0.0000000e+00, 9.4890050e+00, 0.0000000e+00,
0.0000000e+00, 6.5566760e+00, 9.9595880e+00, 7.4360560e+00,
4.0488800e-01, 9.9529420e+00, 1.5600252e+01, 2.7238460e+00,
5.1386600e-01, 0.0000000e+00, 7.2806020e+00, 9.1619780e+00,
9.9172500e-01, 7.3983800e+00, 1.2149747e+01, 9.1496780e+00,
9.6665760e+00, 3.6201100e+00, 5.2388000e+00, 1.4715782e+01,
1.4445740e+01, 1.3609528e+01, 3.1415850e+00, 0.0000000e+00,
4.5475000e-01, 5.1031000e-01, 3.8641710e+00, 6.7240210e+00,
4.2893750e+00, 0.0000000e+00, 3.7336170e+00, 2.0025890e+00,
2.5026270e+00, 6.3821290e+00, 8.5467410e+00, 2.6949770e+00,
3.9512560e+00, 9.8561830e+00, 2.0689620e+00, 3.4106270e+00,
9.9747150e+00, 1.0650102e+01, 9.1345280e+00, 7.8826010e+00])
datingDataMat[:,2]
array([9.539520e-01, 1.673904e+00, 8.051240e-01, 4.289640e-01,
1.342960e-01, 1.032955e+00, 1.213192e+00, 5.438800e-01,
7.492780e-01, 1.508053e+00, 8.319170e-01, 1.669485e+00,
7.286580e-01, 6.252240e-01, 3.313510e-01, 1.912830e-01,
1.269164e+00, 2.613330e-01, 1.250185e+00, 1.304844e+00,
8.224830e-01, 8.339200e-01, 6.383060e-01, 1.871080e-01,
1.686209e+00, 6.496170e-01, 1.656418e+00, 7.312320e-01,
6.401030e-01, 1.151996e+00, 8.828100e-01, 1.352013e+00,
1.340429e+00, 1.600060e-01, 7.786260e-01, 1.084103e+00,
6.320260e-01, 5.870950e-01, 6.323170e-01, 6.865810e-01,
1.004504e+00, 1.475730e-01, 2.053240e-01, 2.386200e-01,
2.634990e-01, 8.322540e-01, 2.076120e-01, 9.918540e-01,
9.750240e-01, 1.174874e+00, 1.614244e+00, 7.243750e-01,
1.663724e+00, 2.973020e-01, 4.861740e-01, 6.469300e-02,
1.655113e+00, 2.676520e-01, 3.295570e-01, 6.906400e-02,
9.614660e-01, 6.966940e-01, 1.659173e+00, 9.777460e-01,
1.413798e+00, 1.054927e+00, 7.604610e-01, 9.344160e-01,
1.138351e+00, 8.818760e-01, 1.366145e+00, 1.528626e+00,
6.056190e-01, 3.575010e-01, 1.058602e+00, 8.695500e-02,
1.129823e+00, 2.223800e-01, 5.486070e-01, 8.573480e-01,
6.830480e-01, 8.691770e-01, 1.505466e+00, 1.680892e+00,
5.836460e-01, 1.151433e+00, 4.620650e-01, 7.235670e-01,
8.363260e-01, 1.282530e-01, 5.273000e-03, 1.348934e+00,
4.907120e-01, 7.176620e-01, 1.361646e+00, 1.658555e+00,
8.067620e-01, 3.367800e-02, 1.022169e+00, 7.094690e-01,
2.079760e-01, 1.353117e+00, 1.941670e-01, 1.108041e+00,
5.450970e-01, 5.781120e-01, 3.007540e-01, 1.657722e+00,
9.745270e-01, 2.546600e-02, 1.182050e+00, 8.881680e-01,
8.071000e-01, 6.790150e-01, 1.542630e-01, 9.336320e-01,
8.513000e-01, 2.068920e-01, 1.221171e+00, 4.844180e-01,
1.180352e+00, 1.585426e+00, 2.678100e-02, 3.068300e-02,
2.809900e-02, 1.605603e+00, 1.061347e+00, 1.083910e+00,
1.037150e-01, 3.507720e-01, 5.487370e-01, 5.391310e-01,
1.062373e+00, 1.624565e+00, 1.436243e+00, 6.959340e-01,
1.308398e+00, 1.219760e+00, 1.820900e-02, 5.956530e-01,
5.816570e-01, 9.139380e-01, 4.862150e-01, 1.255329e+00,
1.275290e+00, 1.096981e+00, 1.167935e+00, 4.946660e-01,
1.074750e-01, 9.047990e-01, 1.014085e+00, 3.674910e-01,
1.293270e+00, 1.211594e+00, 6.706700e-01, 1.036192e+00,
1.636520e-01, 5.751520e-01, 6.079150e-01, 1.228000e-02,
1.060636e+00, 5.582400e-01, 1.556334e+00, 1.122768e+00,
2.718650e-01, 2.270850e-01, 3.648560e-01, 8.690940e-01,
1.515293e+00, 6.339770e-01, 2.529400e-02, 6.895860e-01,
4.760020e-01, 1.672600e+00, 1.534103e+00, 1.689388e+00,
6.190910e-01, 1.169887e+00, 9.801410e-01, 4.819180e-01,
8.308610e-01, 1.395314e+00, 1.102179e+00, 3.596800e-01,
1.141582e+00, 1.327047e+00, 1.681648e+00, 3.040720e-01,
1.262621e+00, 1.443357e+00, 1.464013e+00, 9.537820e-01,
7.673180e-01, 1.043109e+00, 1.455708e+00, 7.965060e-01,
2.427780e-01, 8.974900e-02, 3.008600e-01, 4.153730e-01,
1.391892e+00, 1.322560e+00, 1.228380e+00, 5.399300e-02,
1.126257e+00, 1.446816e+00, 9.752830e-01, 1.628726e+00,
3.638830e-01, 9.607500e-02, 8.114570e-01, 3.282350e-01,
1.178359e+00, 3.945270e-01, 6.933010e-01, 1.424264e+00,
7.113100e-02, 1.649194e+00, 1.549756e+00, 3.096790e-01,
5.305060e-01, 3.693500e-01, 1.450600e-01, 1.415080e-01,
7.152430e-01, 5.499720e-01, 1.927350e-01, 1.669767e+00,
1.372500e-02, 2.684440e-01, 5.759760e-01, 1.029808e+00,
1.052618e+00, 2.730140e-01, 4.016000e-01, 4.513800e-01,
1.430249e+00, 8.459470e-01, 9.710900e-02, 1.584010e-01,
1.366898e+00, 1.307323e+00, 6.695460e-01, 5.885880e-01,
7.658910e-01, 1.618562e+00, 2.047920e-01, 1.132555e+00,
6.687210e-01, 8.370280e-01, 3.723200e-01, 7.297420e-01,
4.091250e-01, 9.756160e-01, 1.475586e+00, 6.450450e-01,
1.424017e+00, 2.790690e-01, 1.018070e-01, 2.603340e-01,
1.176829e+00, 6.161470e-01, 3.408400e-01, 3.351310e-01,
5.057690e-01, 1.646633e+00, 1.143120e+00, 1.292848e+00,
1.449158e+00, 8.892680e-01, 1.107592e+00, 4.064410e-01,
1.375844e+00, 6.969190e-01, 4.227220e-01, 7.273730e-01,
1.691682e+00, 8.475740e-01, 1.038603e+00, 7.750100e-02,
5.801330e-01, 7.199890e-01, 1.654800e-02, 9.337220e-01,
1.571281e+00, 9.358420e-01, 5.976100e-01, 3.497680e-01,
1.456595e+00, 1.674780e+00, 1.335804e+00, 5.902500e-02,
1.287952e+00, 1.142148e+00, 1.608486e+00, 6.608360e-01,
9.622450e-01, 1.112080e+00, 6.334220e-01, 6.625200e-01,
1.569431e+00, 6.396690e-01, 1.639194e+00, 8.812410e-01,
8.990020e-01, 5.602010e-01, 1.203677e+00, 1.575043e+00,
8.259540e-01, 7.221700e-01, 5.040680e-01, 5.574760e-01,
1.586732e+00, 7.845870e-01, 7.509000e-03, 4.470660e-01,
6.566970e-01, 1.432480e-01, 9.085080e-01, 4.393810e-01,
6.445230e-01, 9.779490e-01, 7.487000e-01, 1.509646e+00,
1.694641e+00, 1.225165e+00, 1.624296e+00, 7.139220e-01,
8.166760e-01, 1.665414e+00, 5.427520e-01, 1.399090e-01,
6.525400e-02, 1.331674e+00, 8.908030e-01, 5.671610e-01,
1.220110e-01, 6.119100e-02, 2.721350e-01, 1.502803e+00,
2.163170e-01, 1.603217e+00, 1.210670e-01, 4.086030e-01,
1.137900e-02, 4.788370e-01, 4.873000e-01, 1.193484e+00,
6.205280e-01, 1.698550e-01, 1.883500e-01, 4.039270e-01,
1.519308e+00, 1.273984e+00, 7.516300e-02, 1.461809e+00,
2.301200e-02, 1.211744e+00, 1.638248e+00, 5.651300e-02,
6.445710e-01, 8.384470e-01, 8.902230e-01, 1.148810e-01,
4.022380e-01, 4.326660e-01, 5.599550e-01, 2.710940e-01,
6.339030e-01, 1.496715e+00, 4.025190e-01, 1.520028e+00,
8.964330e-01, 1.559467e+00, 1.195498e+00, 6.576770e-01,
9.544340e-01, 7.425460e-01, 6.626320e-01, 6.173730e-01,
9.741500e-02, 1.409690e-01, 2.827870e-01, 3.641970e-01,
1.266069e+00, 9.142940e-01, 1.937980e-01, 1.480198e+00,
9.932190e-01, 2.805390e-01, 1.149172e+00, 9.886900e-02,
1.695517e+00, 1.326277e+00, 7.902950e-01, 1.328086e+00,
2.992870e-01, 5.410520e-01, 1.584810e-01, 7.614190e-01,
1.373611e+00, 7.884700e-01, 3.299130e-01, 1.138108e+00,
8.459740e-01, 1.148256e+00, 1.037324e+00, 1.626140e-01,
9.678990e-01, 2.025580e-01, 1.128347e+00, 1.010173e+00,
1.261338e+00, 1.165250e-01, 1.033527e+00, 1.358795e+00,
4.320440e-01, 7.239290e-01, 6.192320e-01, 4.453410e-01,
1.365019e+00, 1.403914e+00, 1.438105e+00, 9.858620e-01,
1.489102e+00, 1.216571e+00, 9.158980e-01, 5.356710e-01,
1.010868e+00, 1.239634e+00, 1.060235e+00, 4.988430e-01,
1.209270e-01, 8.956570e-01, 1.568245e+00, 1.220153e+00,
1.116755e+00, 6.051090e-01, 3.443730e-01, 9.648960e-01,
3.840300e-01, 1.138253e+00, 4.523630e-01, 1.481661e+00,
8.583140e-01, 7.016690e-01, 1.051446e+00, 1.374388e+00,
6.761980e-01, 9.433520e-01, 9.461170e-01, 1.305027e+00,
1.125946e+00, 6.907540e-01, 8.066410e-01, 1.613674e+00,
6.018470e-01, 9.520890e-01, 3.181050e-01, 5.540150e-01,
1.482500e+00, 4.886780e-01, 4.357410e-01, 1.381455e+00,
1.481589e+00, 8.972600e-02, 3.458530e-01, 1.282880e+00,
7.544670e-01, 1.211961e+00, 7.015230e-01, 5.057900e-01,
6.492920e-01, 6.152840e-01, 9.319600e-01, 3.993330e-01,
1.289833e+00, 3.906200e-02, 1.496144e+00, 1.005764e+00,
4.963770e-01, 5.138760e-01, 1.518036e+00, 1.285939e+00,
1.606312e+00, 5.609700e-01, 9.899200e-01, 3.854800e-02,
4.622810e-01, 2.420190e-01, 1.141569e+00, 1.456317e+00,
2.439450e-01, 1.280111e+00, 7.809770e-01, 1.233254e+00,
4.681040e-01, 5.673470e-01, 1.442034e+00, 1.582159e+00,
5.706840e-01, 5.442330e-01, 8.504100e-01, 1.664782e+00,
2.803690e-01, 3.758490e-01, 9.219200e-02, 1.192840e-01,
1.332510e-01, 3.212160e-01, 1.292858e+00, 2.235240e-01,
6.080400e-01, 4.584010e-01, 1.598070e+00, 1.603481e+00,
1.273204e+00, 1.182708e+00, 8.904110e-01, 1.456982e+00,
1.520683e+00, 1.187734e+00, 4.231800e-02, 8.092260e-01,
1.583286e+00, 6.925130e-01, 4.740800e-01, 5.898260e-01,
5.672010e-01, 8.714520e-01, 3.358480e-01, 7.510600e-02,
4.799960e-01, 6.111350e-01, 1.180117e+00, 1.679729e+00,
1.459589e+00, 1.156000e-03, 7.977430e-01, 1.092880e-01,
1.689771e+00, 1.126053e+00, 1.690903e+00, 1.317518e+00,
2.682710e-01, 1.117456e+00, 1.521012e+00, 8.353410e-01,
5.121040e-01, 9.274650e-01, 8.136240e-01, 8.869020e-01,
1.272728e+00, 9.714010e-01, 1.592608e+00, 3.446220e-01,
9.228460e-01, 5.736860e-01, 1.155054e+00, 1.638690e+00,
1.313401e+00, 6.161620e-01, 6.163130e-01, 3.623880e-01,
1.207194e+00, 4.031100e-01, 8.259590e-01, 5.868460e-01,
1.421420e+00, 1.541749e+00, 1.103553e+00, 2.073070e-01,
1.631262e+00, 4.043850e-01, 9.297890e-01, 2.683260e-01,
1.115375e+00, 3.910450e-01, 1.572970e+00, 1.511014e+00,
2.862840e-01, 1.943430e-01, 2.024150e-01, 8.000210e-01,
1.012182e+00, 8.360250e-01, 1.458979e+00, 1.531400e-02,
6.763710e-01, 4.934830e-01, 2.533170e-01, 9.920700e-02,
5.849730e-01, 1.691873e+00, 5.597570e-01, 5.598100e-02,
1.645301e+00, 3.358700e-01, 1.346121e+00, 5.845550e-01,
1.512111e+00, 3.242300e-01, 7.931830e-01, 1.309020e-01,
2.235020e-01, 1.007481e+00, 1.629100e-01, 3.257600e-02,
1.510747e+00, 7.485060e-01, 1.687944e+00, 3.904090e-01,
5.562450e-01, 2.902180e-01, 1.188148e+00, 1.482632e+00,
7.112780e-01, 7.560300e-01, 4.314680e-01, 8.838130e-01,
1.438307e+00, 1.083536e+00, 3.016360e-01, 1.475925e+00,
1.988750e-01, 5.526020e-01, 6.362380e-01, 1.503735e+00,
1.433898e+00, 9.846140e-01, 1.097866e+00, 4.947010e-01,
1.398803e+00, 1.670121e+00, 1.585044e+00, 1.560352e+00,
5.453210e-01, 7.661030e-01, 4.506630e-01, 1.429120e-01,
1.260455e+00, 9.300200e-02, 1.027105e+00, 1.369726e+00,
5.122200e-01, 6.675790e-01, 1.970680e-01, 1.494616e+00,
1.194177e+00, 5.704550e-01, 1.661627e+00, 1.511768e+00,
7.979200e-02, 1.595418e+00, 3.663000e-03, 1.568987e+00,
8.785400e-01, 9.480040e-01, 1.357926e+00, 5.359660e-01,
4.909060e-01, 1.623311e+00, 5.388670e-01, 1.646600e+00,
3.846860e-01, 1.597294e+00, 1.211329e+00, 1.054340e+00,
3.780810e-01, 4.853850e-01, 1.039873e+00, 8.195600e-01,
2.316580e-01, 1.476008e+00, 1.431837e+00, 1.363842e+00,
8.312220e-01, 1.253858e+00, 6.842640e-01, 2.427100e-02,
9.825930e-01, 6.852180e-01, 5.745500e-02, 1.521432e+00,
1.653574e+00, 1.321481e+00, 6.641680e-01, 6.225890e-01,
1.254441e+00, 1.622458e+00, 1.544827e+00, 1.035410e+00,
1.112153e+00, 7.580400e-02, 1.564325e+00, 1.402443e+00,
1.596152e+00, 3.758060e-01, 4.385530e-01, 1.213990e-01,
2.987100e-02, 8.363230e-01, 2.354830e-01, 8.885820e-01,
1.521855e+00, 3.432100e-02, 5.378070e-01, 1.111416e+00,
1.028750e+00, 2.441670e-01, 1.370399e+00, 9.358930e-01,
1.440890e-01, 4.924870e-01, 5.697000e-03, 1.374190e-01,
1.083505e+00, 1.086297e+00, 1.299319e+00, 1.104178e+00,
1.346821e+00, 9.809490e-01, 1.473671e+00, 8.853400e-01,
1.549223e+00, 1.376745e+00, 1.127990e-01, 1.543589e+00,
1.069380e+00, 1.568223e+00, 1.531933e+00, 1.117386e+00,
1.318988e+00, 1.039546e+00, 2.193610e-01, 5.923480e-01,
1.436753e+00, 1.427316e+00, 4.892520e-01, 3.894590e-01,
1.098828e+00, 1.111464e+00, 8.194190e-01, 1.412130e+00,
5.712920e-01, 1.626868e+00, 2.713800e-02, 8.773500e-01,
5.216310e-01, 2.593390e-01, 6.713550e-01, 3.315020e-01,
6.076520e-01, 8.442050e-01, 2.400840e-01, 1.643479e+00,
8.923610e-01, 7.561620e-01, 1.958860e-01, 4.761020e-01,
7.680500e-02, 1.115778e+00, 1.485952e+00, 1.420655e+00,
5.130480e-01, 3.918340e-01, 5.200780e-01, 1.100620e-01,
7.799840e-01, 1.359970e+00, 1.624762e+00, 1.215605e+00,
1.417070e+00, 5.940890e-01, 6.208410e-01, 3.593290e-01,
2.170110e-01, 1.289767e+00, 8.700750e-01, 1.142505e+00,
1.235650e+00, 1.581763e+00, 8.645360e-01, 1.924510e-01,
1.591532e+00, 1.528080e-01, 1.848960e-01, 1.123400e-01,
1.537165e+00, 1.585162e+00, 1.034635e+00, 5.426030e-01,
1.537710e-01, 1.450903e+00, 1.231019e+00, 1.572293e+00,
1.185192e+00, 1.436479e+00, 7.195000e-03, 7.833100e-02,
7.327460e-01, 6.997550e-01, 1.576488e+00, 1.199312e+00,
1.131305e+00, 7.106940e-01, 9.741420e-01, 3.104880e-01,
8.769040e-01, 1.237380e-01, 1.095171e+00, 1.557205e+00,
8.529710e-01, 5.267070e-01, 1.288459e+00, 1.657487e+00,
1.378603e+00, 1.426657e+00, 1.430029e+00, 6.885130e-01,
1.411137e+00, 7.661190e-01, 1.615483e+00, 4.759940e-01,
3.043780e-01, 3.073920e-01, 1.643204e+00, 1.325042e+00,
7.057880e-01, 2.861880e-01, 7.047480e-01, 5.453370e-01,
2.180670e-01, 5.583880e-01, 1.221787e+00, 6.814780e-01,
1.292692e+00, 3.399260e-01, 2.678260e-01, 4.161810e-01,
9.854620e-01, 4.954030e-01, 1.382742e+00, 3.714600e-02,
1.981180e-01, 6.199680e-01, 6.781790e-01, 3.592050e-01,
9.667860e-01, 5.058980e-01, 3.528650e-01, 1.068740e+00,
1.273100e-01, 1.619947e+00, 9.948580e-01, 4.136900e-01,
4.443200e-01, 8.029850e-01, 1.170397e+00, 1.582346e+00,
1.277988e+00, 1.516170e-01, 1.180596e+00, 5.375400e-01,
2.505620e-01, 9.204940e-01, 1.510979e+00, 1.531514e+00,
2.633200e-02, 1.630177e+00, 1.013702e+00, 1.156862e+00,
8.629100e-02, 1.388569e+00, 7.155770e-01, 1.694972e+00,
1.428879e+00, 1.341810e-01, 1.894560e-01, 1.436680e-01,
6.526510e-01, 5.226200e-02, 5.504230e-01, 9.891590e-01,
7.240540e-01, 2.548360e-01, 1.377983e+00, 2.534600e-02,
7.815690e-01, 9.241980e-01, 1.085669e+00, 1.470686e+00,
5.233600e-02, 1.093070e+00, 2.872510e-01, 1.660104e+00,
1.370549e+00, 1.373480e-01, 9.973500e-02, 1.247390e+00,
4.453860e-01, 1.331698e+00, 1.551911e+00, 3.323650e-01,
4.875770e-01, 1.155160e+00, 1.628085e+00, 1.570402e+00,
1.324132e+00, 2.233820e-01, 1.280780e-01, 2.890400e-02,
4.275540e-01, 6.552450e-01, 3.787250e-01, 1.002328e+00,
1.478244e+00, 4.219930e-01, 1.479170e-01, 1.370039e+00,
6.934530e-01, 1.521541e+00, 1.312679e+00, 2.199620e-01,
1.388500e+00, 6.776030e-01, 8.091070e-01, 6.615240e-01,
8.368070e-01, 1.297469e+00, 1.312266e+00, 1.934940e-01,
5.909050e-01, 3.073640e-01, 7.348760e-01, 7.666190e-01,
2.847900e-02, 1.216733e+00, 1.094409e+00, 1.087721e+00,
1.019775e+00, 9.931050e-01, 8.096140e-01, 1.395015e+00,
1.019254e+00, 5.334620e-01, 8.672500e-02, 3.801720e-01,
8.854570e-01, 4.889850e-01, 8.044030e-01, 1.212328e+00,
1.477378e+00, 1.606849e+00, 8.278140e-01, 1.020797e+00,
1.270167e+00, 5.518300e-02, 6.002000e-02, 1.479856e+00,
4.595170e-01, 1.650279e+00, 2.193500e-02, 3.874550e-01,
1.323448e+00, 8.618590e-01, 1.438470e+00, 1.110180e+00,
7.309790e-01, 6.842180e-01, 1.389088e+00, 8.749050e-01,
1.370330e+00, 2.877670e-01, 1.253646e+00, 1.503758e+00,
1.211160e+00, 3.642400e-01, 4.242800e-01, 1.209470e-01,
1.033280e+00, 1.639500e-02, 6.163490e-01, 5.630440e-01,
1.256300e-02, 1.437030e+00, 6.982690e-01, 1.380184e+00,
1.842230e-01, 8.765810e-01, 1.287060e-01, 4.328180e-01,
3.333000e-01, 3.291810e-01, 4.299270e-01, 6.318380e-01,
6.697870e-01, 8.666270e-01, 7.280450e-01, 1.332446e+00])
ax2.scatter(datingDataMat[:,1], datingDataMat[:,2], c = 'r',marker = 'o')
<matplotlib.collections.PathCollection at 0x21ea3d50710>
plt.show()
# 规化数据
imp.reload(kNN)
<module 'kNN' from 'H:\\HomeWork\\3_2\\InformAnalysis\\homework\\05\\kNN.py'>
norMat, ranges, minVals = kNN.autoNorm(datingDataMat)
norMat
array([[0.44832535, 0.39805139, 0.56233353],
[0.15873259, 0.34195467, 0.98724416],
[0.28542943, 0.06892523, 0.47449629],
...,
[0.29115949, 0.50910294, 0.51079493],
[0.52711097, 0.43665451, 0.4290048 ],
[0.47940793, 0.3768091 , 0.78571804]])
ranges
array([9.1273000e+04, 2.0919349e+01, 1.6943610e+00])
minVals
array([0. , 0. , 0.001156])
kNN.datingClassTest()
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 2
the classifier came back with: 2, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 1
the classifier came back with: 3, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 2, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the total error rate is: 0.066000
33.0
# 测试新样本
import imp
imp.reload(kNN)
<module 'kNN' from 'H:\\HomeWork\\3_2\\InformAnalysis\\homework\\05\\kNN.py'>
kNN.classifyPerson()
percentage of time spent playing video games?10
frequent fliter miles earned per year?10000
liters of ice cream consumed per year?0.5
you will probably like this person:
not at all
al answer is: 3
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 3, the real answer is: 3
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 1, the real answer is: 1
the classifier came back with: 2, the real answer is: 2
the total error rate is: 0.066000
33.0
# 测试新样本
import imp
imp.reload(kNN)
<module 'kNN' from 'H:\\HomeWork\\3_2\\InformAnalysis\\homework\\05\\kNN.py'>
kNN.classifyPerson()
percentage of time spent playing video games?10
frequent fliter miles earned per year?10000
liters of ice cream consumed per year?0.5
you will probably like this person:
not at all