以下是使用2种方式实现的概率算法比较:
import java.util.*;
public class ProbabilityUtil {
private final static int count = 100000;
public static void main(String[] args) {
List<Double> list = Arrays.asList(0.1, 0.2, 0.2);
baseSubtract(list);
baseList(list);
}
/**
* 基于减法实现的随机数算法
*
* @param origin 源数组
*/
public static void baseSubtract(List<Double> origin) {
Date start = new Date();
Map<Integer, Integer> counters = new HashMap<Integer, Integer>();
for (int j = 0; j < count; j++) {
double offset = new Random().nextDouble();
for (int i = 0; i < origin.size(); i++) {
offset -= origin.get(i);
if (offset < 0) {
if (counters.containsKey(i)) {
counters.put(i, counters.get(i) + 1);
} else {
counters.put(i, 1);
}
break;
}
}
}
System.out.println("-------------- 基于减法实现的随机数算法 --------------");
System.out.println("耗时:" + (new Date().getTime() - start.getTime()) / 1000.0 + "s");
System.out.println("区间概率:" + counters);
}
/**
* 基于List数组的index实现的随机数算法
*
* @param origin 源数组
*/
public static void baseList(List<Double> origin) {
Date start = new Date();
Map<Integer, Integer> counters = new HashMap<Integer, Integer>();
for (int j = 0; j < count; j++) {
List<Double> list = new ArrayList<Double>();
double temp = 0.0;
for (Double d : origin) {
temp += d;
list.add(temp);
}
double offset = new Random().nextDouble();
list.add(offset);
Collections.sort(list);
int idx = list.indexOf(offset);
if (counters.containsKey(idx)) {
counters.put(idx, counters.get(idx) + 1);
} else {
counters.put(idx, 1);
}
}
System.out.println("-------------- 基于List数组的index实现的随机数算法 --------------");
System.out.println("耗时:" + (new Date().getTime() - start.getTime()) / 1000.0 + "s");
System.out.println("区间概率:" + counters);
}
}
输入结果:
-------------- 基于减法实现的随机数算法 --------------
耗时:0.068s 区间概率:{0=9949, 1=20034, 2=19899}
-------------- 基于List数组的index实现的随机数算法 --------------
耗时:0.203s 区间概率:{0=10024, 1=20107, 2=20029, 3=49840}
结论:基于减法实现的随机算法比基于List实现的算法效率要快很多。