# 经验:梯度下降要让目标确实减小(或者增大),此程序tempsum需要减小,故要着重检查!!
import numpy as np
def dlhg(x, d):
x = np.c_[x, np.ones(d.shape[0])]
w = np.random.randn(x[0].shape[0])
miu = 0.01
temp_sum_old = np.inf
count = 0
while True:
tempsum = 0
dw = 0
for i in range(d.shape[0]):
pdt = np.sum(w * x[i])
tempsum = tempsum + np.log(1 + np.exp(pdt)) - d[i] * pdt
dw = dw + (np.exp(pdt) / (1 + np.exp(pdt)) - d[i]) * x[i]
if np.abs(tempsum - temp_sum_old) < 0.01:
break
w = w - miu * dw
temp_sum_old = tempsum
count = count + 1
#print(tempsum)
#print(count)
#for i in range(d.shape[0]):
# # i 为第i个样本
# if np.sum(w * x[i]) > 0:
# print(1)
#else:
# print(0)
#print(w)
ret
【python|ML】k-fold/leave-one-out 方法在对率回归实现(西瓜书习题3.4,数据UCI-iris)
最新推荐文章于 2022-04-11 20:50:26 发布