python机器学习4-3代码讲解

import urllib.request
import numpy
from sklearn import datasets, linear_model
from math import sqrt
import matplotlib.pyplot as plot

def S(z, gamma):
    if gamma >= abs(z):
        return 0.0
    return (z/abs(z))*(abs(z) - gamma)

target_url = ("http://archive.ics.uci.edu/ml/machine-learning-databases/wine-quality/winequality-red.csv")
data = urllib.request.urlopen(target_url)

xList = []
labels = []
names = []
firstLine = True
for line in data:
    if firstLine == True:
        names = line.strip().split(";".encode(encoding='utf-8'))
        firstLine = False
    else:
        row = line.strip().split(";".encode(encoding='utf-8'))
        labels.append(float(row[-1]))
        row.pop()
        floatRow = [float(num) for num in row]
        xList.append(floatRow)


#以下步骤都是标准化属性元素的过程
nrows = len(xList)
ncols = len(xList[0])

xMeans = []
xSD = []
for i in range(ncols):
    col = [xList[j][i] for j in range(nrows)]#把每一列属性统一放在col中
    mean = sum(col) / nrows#计算列的平均值
    xMeans.append(mean)#把每一列属性的平均值逐步添加到xMeans中
    colDiff = [(xList[j][i] - mean) for j in range(nrows)]#计算每一列属性中的各个元素与该列平均值的差值
    sumSq = sum([colDiff[i] * colDiff[i] for i in range(nrows)])#将每一列的差值平方相加
    stdDev = sqrt(sumSq/nrows)#计算每一列属性的均方根误差,即标准差
    xSD.append(stdDev)#将所有列的均方根误差加载到xSD列表中


xNormalized = []
for i in range(nrows):
    rowNormalized = [(xList[i][j] - xMeans[j])/xSD[j] for j in range(ncols)]#对每一个属性元素进行标准化
    xNormalized.append(rowNormalized)#将标准化好的每一行元素依次放入xNormalized

#以下步骤是标签标准化的过程
meanLabel = sum(labels) / nrows
sdLabel = sqrt(sum([(labels[i] - meanLabel) * (labels[i] - meanLabel) for i in range(nrows)]) / nrows)
labelNormalized = [(labels[i] - meanLabel) / sdLabel for i in range(nrows)]

#求解lambda
alpha = 1.0
xy = [0.0] * ncols
for i in range(nrows):
    for j in range(ncols):
        xy[j] += xNormalized[i][j] * labelNormalized[i]#第i行的每一个列属性元素都要乘以该行的标签值,并相加放在xy中
maxXY = 0.0
for i in range(ncols):
    val = abs(xy[i])/nrows
    if val > maxXY:
        maxXY = val
lam = maxXY / alpha


beta = [0.0] * ncols
betaMat = []
betaMat.append(list(beta))

nSteps = 100
lamMult = 0.93
nzList = []

for iStep in range(nSteps):
    lam = lam * lamMult#使lam在每一次迭代中都不断的减少
    deltaBeta = 100.0
    eps = 0.01
    iterStep = 0
    betaInner = list(beta)
    while deltaBeta > eps:
        iterStep += 1
        if iterStep > 100:
            break
        betaStart = list(betaInner)
        for iCol in range(ncols):
            xyj = 0.0
            for i in range(nrows):
                labelHat = sum([xNormalized[i][k]*betaInner[k] for k in range(ncols)])#将第0行第k列元素与betaInner中第k列相乘并放在列表中,将得到的列表相加得到labelHat
                residual = labelNormalized[i] -labelHat#将第0行标签值都减去上面的到的labelHat得到residual,ncols*1
                xyj += xNormalized[i][iCol] * residual#某一列的所有行乘以其对应的残差并相加得到xyj
                
            uncBeta = xyj/nrows + betaInner[iCol]
            betaInner[iCol] = S(uncBeta, lam * alpha) / (1 + lam * (1 - alpha))
        sumDiff = sum([abs(betaInner[n] - betaStart[n]) for n in range(ncols)])
        sumBeta = sum([abs(betaInner[n]) for n in range(ncols)])
        deltaBeta = sumDiff/sumBeta
        print(deltaBeta)
    print(iStep, iterStep)
    beta = betaInner
    betaMat.append(beta)

    nzBeta = [index for index in range(ncols) if beta[index] != 0.0]
    for q in nzBeta:
        if(q in nzList) == False:
            nzList.append(q)

nameList = [names[nzList[i]] for i in range(len(nzList))]
print(nameList)

nPts = len(betaMat)
for i in range(ncols):
    coefCurve = [betaMat[k][i] for k in range(nPts)]
    xaxis = range(nPts)
    plot.plot(xaxis, coefCurve)

plot.xlabel("Steps Taken")
plot.ylabel(("Coefficient Values"))
plot.show()

最终运行结果:

1.0
7.660927429069544e-14
0 2
0.48186528497406733
7.766202620143036e-15
1 2
0.3094565100719131
7.746412065603154e-15
2 2
0.2960992927139347
0.04479396305021842
0.0018352208997363972
3 3
0.2587797189518615
0.028461454157101728
0.001165556071097969
4 3
0.19704697955954012
0.021574218176883864
0.0008833440480076845
5 3
0.15681958146932848
0.01711978419933609
0.0007008749779264942
6 3
0.12859149389967606
0.014009517160353362
0.0005734938267187316
7 3
0.10773847080952316
0.011719999328994302
0.00047974025406813376
8 3
0.09174143983501057
0.009968301885601886
9 2
0.0788709948427117
0.008602918343163735
10 2
0.06870039250794036
0.007488104357363408
11 2
0.06513144675406116
0.008954671888753537
12 2
0.07162380771110424
0.012507185365986544
0.0007064612699625676
13 3
0.06303788217137794
0.010999700855115493
0.000622154708319487
14 3
0.0557404661577843
0.009719066508940843
15 2
0.04923054998793937
0.008646124478930808
16 2
0.04399433490772871
0.007722090899574827
17 2
0.0394790755668868
0.006926300475640684
18 2
0.035555267662213313
0.00623535804141607
19 2
0.03542588199195089
0.007148860799712854
20 2
0.03763565522967456
0.006599046436264904
21 2
0.03391038702962465
0.0059613755397397925
22 2
0.03066667291776782
0.005389694880167262
23 2
0.02780724034727454
0.004885827219170376
24 2
0.025273324369474947
0.004439547405468631
25 2
0.029896557684823164
0.00808724237431938
26 2
0.03240686697555933
0.008488787910015134
27 2
0.030201455663052875
0.007734159761632011
28 2
0.029529685466006424
0.011601467128324092
0.00505355035585772
29 3
0.026647703707447422
0.011612915622813478
0.005305207449647482
30 3
0.02421198185096137
0.010753415903598624
0.004931109923953799
31 3
0.022001691894491054
0.009797761802810532
32 2
0.020424276759888886
0.010027078780378952
0.004657464908109853
33 3
0.018314813324258036
0.008268731014181085
34 2
0.017026188682999384
0.008384736291763718
35 2
0.01565270488251711
0.007875219299358362
36 2
0.014358935677135837
0.007263195956624264
37 2
0.013175746324586428
0.006673703921589477
38 2
0.012100669850721026
0.006131330972676951
39 2
0.011124011006190562
0.00563708059388805
40 2
0.01023555406072306
0.005187107917256629
41 2
0.009426014143285392
42 1
0.009465242295908876
43 1
0.012799962454704234
0.009171412821181786
44 2
0.011673397656984321
0.007352327003338079
45 2
0.010529142262001833
0.0062009192798988646
46 2
0.009475041331178331
47 1
0.0100314173284008
0.006348418639378765
48 2
0.008464318706580876
49 1
0.009625709228817054
50 1
0.009711386243600904
51 1
0.009329866131761531
52 1
0.008777448546457716
53 1
0.009207666214831572
54 1
0.009466412441026176
55 1
0.009498042897986017
56 1
0.0090980586316763
57 1
0.008762170944546337
58 1
0.008454032021140081
59 1
0.007993942660976642
60 1
0.007465229587011302
61 1
0.007277585431600777
62 1
0.007108890477898357
63 1
0.007060728920538817
64 1
0.006980272102067449
65 1
0.006832110169234684
66 1
0.006638281548293216
67 1
0.0064145521671366825
68 1
0.006225625180587745
69 1
0.006101091062096072
70 1
0.0059446201233039585
71 1
0.00576406633693867
72 1
0.005565868977267822
73 1
0.0053553139612133455
74 1
0.005136737348733823
75 1
0.00491368647196008
76 1
0.004689050113542807
77 1
0.004465165487405902
78 1
0.0042439072450718455
79 1
0.004026762129490382
80 1
0.003814891902881599
81 1
0.0036102882146570366
82 1
0.003416084419600545
83 1
0.0032282667922483186
84 1
0.0030472919615610124
85 1
0.0028734685289147485
86 1
0.002706982806414263
87 1
0.0025479206407273518
88 1
0.0023962858982822023
89 1
0.002252016102545357
90 1
0.00211499564189905
91 1
0.0019850669052312846
92 1
0.0018620396501248318
93 1
0.0017456988640412558
94 1
0.0016358113410018629
95 1
0.00153213116394545
96 1
0.0014344042553862592
97 1
0.0013423721354553508
98 1
0.0012557750063203537
99 1
[b'"alcohol"', b'"volatile acidity"', b'"sulphates"', b'"total sulfur dioxide"', b'"chlorides"', b'"fixed acidity"', b'"pH"', b'"free sulfur dioxide"', b'"residual sugar"', b'"citric acid"', b'"density"']


评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值