作为一枚机器学习的爱好者,逻辑回归算是一个简单入门的算法,原理比较简单,但是自己手动实现逻辑回归有一些要注意的事项:
第一是步长选择的问题,根据你的数据大小来选择。
第二是自己手动可选择加不加入常数项,用于做训练。
第三是实际写代码用的梯度上升代码来求解,算法原理建议使用梯度下降,但是工程为了方便用梯度上升来求解。
第四是正则化问题,可以选择L1、L2正则来实现你的代码。
第五是终止条件的问题,一般写工程可以选择迭代次数,也可以选择根据最后weights变化来写终止条件,也可以两个一起结合一起使用。
第六是优化算法,可以用批梯度,也可以用随机梯度,也可以拟牛顿迭代法,原理都较简单。
基本就是这些,欢迎大牛补充,下面自己用java写了个,数据源是python机器学习实战那本书里面的数据,java实现就么有用矩阵,了解矩阵算法背后原理实际用list也是一个性质,不说直接看代码。
首先是读取数据代码:
package com.wanda.logistic;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class ReadData {
public static final String PATH = "d:\\wilson.zhou\\Desktop\\logistic.txt";
public static List<List<Float>> dataList = new ArrayList<List<Float>>();
public static List<Float> labelList = new ArrayList<Float>();
static {
try {
init();
} catch (IOException e) {
e.printStackTrace();
}
}
private static void init() throws IOException {
BufferedReader buff = new BufferedReader(new InputStreamReader(
(new FileInputStream(new File(PATH)))));
String str = buff.readLine();
while (str != null) {
String[] arr = str.split("\t");
labelList.add(Float.parseFloat(arr[2]));
dataList.add(Arrays.asList(Float.parseFloat(arr[0]),
Float.parseFloat(arr[1])));
str = buff.readLine();
}
buff.close();
}
}
逻辑回归代码:
package com.wanda.logistic;
import java.util.Arrays;
import java.util.List;
public class LogRegression {
public static void main(String[] args) {
LogRegression lr = new LogRegression();
ReadData instances = new ReadData();
lr.train(instances, 0.001f, 1); //
}
public void train(ReadData instances, float step, int type) {
List<List<Float>> datas = instances.dataList;
List<Float> labels = instances.labelList;
int size = datas.size();
int dim = datas.get(0).size();
float[] w = new float[dim]; // 初始化权重
float changas = Float.MAX_VALUE;
int caculate = 0;
switch (type) {
case 1: // 批梯度下降的方式
while (changas > 0.0001) {
float[] wClone = w.clone();
float[] out = new float[size];
for (int s = 0; s < size; s++) {
float lire = innerProduct(w, datas.get(s));
out[s] = sigmoid(lire);
}
for (int d = 0; d < dim; d++) {
float sum = 0;
for (int s = 0; s < size; s++) {
sum += (labels.get(s) - out[s]) * datas.get(s).get(d);
}
float q=w[d];
w[d] = (float) (q + step * sum);
// w[d] = (float) (q + step * sum-0.01*Math.pow(q,2)); L2正则
// w[d] = (float) (q + step * sum-0.01*Math.abs(q)); L1正则
}
changas = changsWeight(wClone, w);
caculate++;
System.out.println("迭代次数是:" + caculate + " 权重是:"
+ Arrays.toString(w));
}
break;
case 2://随机梯度下降
while (changas > 0.0001) {
float[] wClone = w.clone();
for (int s = 0; s < size; s++) {
float lire = innerProduct(w, datas.get(s));
float out = sigmoid(lire);
float error = labels.get(s) - out;
for (int d = 0; d < dim; d++) {
w[d] += step * error * datas.get(s).get(d);
}
}
changas = changsWeight(wClone, w);
caculate++;
System.out.println("迭代次数是:" + caculate + " 权重是:"
+ Arrays.toString(w));
}
break;
default:
break;
}
}
private float changsWeight(float[] wClone, float[] w) {
float changs = 0;
for (int i = 0; i < w.length; i++) {
changs += Math.pow(w[i] - wClone[i], 2);
}
return (float) Math.sqrt(changs);
}
private float innerProduct(float[] w, List<Float> x) {
float sum = 0;
for (int i = 0; i < w.length; i++) {
sum += w[i] * x.get(i);
}
return sum;
}
private float sigmoid(float src) {
return (float) (1.0 / (1 + Math.exp(-src)));
}
}
数据:
-0.017612 14.053064 0
-1.395634 4.662541 1
-0.752157 6.538620 0
-1.322371 7.152853 0
0.423363 11.054677 0
0.406704 7.067335 1
0.667394 12.741452 0
-2.460150 6.866805 1
0.569411 9.548755 0
-0.026632 10.427743 0
0.850433 6.920334 1
1.347183 13.175500 0
1.176813 3.167020 1
-1.781871 9.097953 0
-0.566606 5.749003 1
0.931635 1.589505 1
-0.024205 6.151823 1
-0.036453 2.690988 1
-0.196949 0.444165 1
1.014459 5.754399 1
1.985298 3.230619 1
-1.693453 -0.557540 1
-0.576525 11.778922 0
-0.346811 -1.678730 1
-2.124484 2.672471 1
1.217916 9.597015 0
-0.733928 9.098687 0
-3.642001 -1.618087 1
0.315985 3.523953 1
1.416614 9.619232 0
-0.386323 3.989286 1
0.556921 8.294984 1
1.224863 11.587360 0
-1.347803 -2.406051 1
1.196604 4.951851 1
0.275221 9.543647 0
0.470575 9.332488 0
-1.889567 9.542662 0
-1.527893 12.150579 0
-1.185247 11.309318 0
-0.445678 3.297303 1
1.042222 6.105155 1
-0.618787 10.320986 0
1.152083 0.548467 1
0.828534 2.676045 1
-1.237728 10.549033 0
-0.683565 -2.166125 1
0.229456 5.921938 1
-0.959885 11.555336 0
0.492911 10.993324 0
0.184992 8.721488 0
-0.355715 10.325976 0
-0.397822 8.058397 0
0.824839 13.730343 0
1.507278 5.027866 1
0.099671 6.835839 1
-0.344008 10.717485 0
1.785928 7.718645 1
-0.918801 11.560217 0
-0.364009 4.747300 1
-0.841722 4.119083 1
0.490426 1.960539 1
-0.007194 9.075792 0
0.356107 12.447863 0
0.342578 12.281162 0
-0.810823 -1.466018 1
2.530777 6.476801 1
1.296683 11.607559 0
0.475487 12.040035 0
-0.783277 11.009725 0
0.074798 11.023650 0
-1.337472 0.468339 1
-0.102781 13.763651 0
-0.147324 2.874846 1
0.518389 9.887035 0
1.015399 7.571882 0
-1.658086 -0.027255 1
1.319944 2.171228 1
2.056216 5.019981 1
-0.851633 4.375691 1
-1.510047 6.061992 0
-1.076637 -3.181888 1
1.821096 10.283990 0
3.010150 8.401766 1
-1.099458 1.688274 1
-0.834872 -1.733869 1
-0.846637 3.849075 1
1.400102 12.628781 0
1.752842 5.468166 1
0.078557 0.059736 1
0.089392 -0.715300 1
1.825662 12.693808 0
0.197445 9.744638 0
0.126117 0.922311 1
-0.679797 1.220530 1
0.677983 2.556666 1
0.761349 10.693862 0
-2.168791 0.143632 1
1.388610 9.341997 0
0.317029 14.739025 0