import numpy as np
def loadDataSet():
dataMat = []; labelMat = []
fr = open('testSet.txt')
for line in fr.readlines():
lineArr = line.strip().split()
dataMat.append([1.0, float(lineArr[0]), float(lineArr[1])])
labelMat.append(int(lineArr[2]))
return dataMat,labelMat
def sigmoid(inX):
return 1.0/(1+np.exp(-inX))
def gradAscent(dataMatIn, classLabels):
dataMatrix = np.mat(dataMatIn) #convert to NumPy matrix
labelMat = np.mat(classLabels).transpose() #convert to NumPy matrix 转置(100, 1)
m,n = np.shape(dataMatrix)
print(m,n)
alpha = 0.001
maxCycles = 500
weights = np.ones((n,1)) #(3, 1)
for k in range(maxCycles): #heavy on matrix operations
h = sigmoid(dataMatrix*weights) #matrix mult 100,1
error = (labelMat - h) #vector subtraction
weights = weights + alpha * dataMatrix.transpose()* error #matrix mult(3,1)+(3,100)*(100,1)
return weights
dataMat,labelMat = loadDataSet()
print(gradAscent(dataMat, labelMat))
逻辑回归
最新推荐文章于 2024-11-21 16:37:27 发布
本文介绍了如何使用Python和NumPy实现线性回归的梯度上升算法,详细步骤包括数据集加载、sigmoid函数应用、误差计算及权重更新。通过实例展示了如何利用`loadDataSet`和`gradAscent`函数进行数据预处理和模型训练。
5万+

被折叠的 条评论
为什么被折叠?



