#二分查找
//使用二分查找必要条件:数据是有序
//第一种方法
public static int BinarySearch(int[] arr,int find,int start,int end){
//先判断关键字有没有可能在数据arr中,如果不不可能在返回-1
if(find < arr[start] || find > arr[end] || start > end){
return -1;
}
//初始中间位置
int middle = (start + end) / 2;
if(arr[middle] > find){
//比关键字大则关键字在左区域
return BinarySearch(arr, key, start, middle - 1);
}else if(arr[middle] < find){
//比关键字小则关键字在右区域
return BinarySearch(arr, key, middle + 1, end);
}else {
return middle;
}
}
//第二种方式
public static int BinarySearch(int[] arr,int find){
int start= 0;
int end = arr.length - 1;
int middle = 0;
if(find < arr[low] || find > arr[high] || find > high){
return -1;
}
while(start <= end){
middle = (start + end) / 2;
if(arr[middle] > find){
//比关键字大则关键字在左边区域
end = middle - 1;
}else if(arr[middle] < find){
//比关键字小则关键字在右边区域
start = middle + 1;
}else{
return middle;
}
}
return -1; //最后仍然没有找到,则返回-1
}
#快排
public class QuickSort {
private static int count;
public static void main(String[] args) {
int[] num = {3,45,78,64,52,11,64,55,99,11,18};
System.out.println(arrayToString(num,"未排序"));
QuickSort(num,0,num.length-1);
System.out.println(arrayToString(num,"排序"));
System.out.println("数组个数:"+num.length);
System.out.println("循环次数:"+count);
}
private static void QuickSort(int[] num, int left, int right) {
//如果left等于right,即数组只有一个元素,直接返回
if(left>=right) {
return;
}
//设置最左边的元素为基准值
int key=num[left];
//数组中比key小的放在左边,比key大的放在右边,key值下标为i
int i=left;
int j=right;
while(i<j){
//j向左移,直到遇到比key小的值
while(num[j]>=key && i<j){
j--;
}
//i向右移,直到遇到比key大的值
while(num[i]<=key && i<j){
i++;
}
//i和j指向的元素交换
if(i<j){
int temp=num[i];
num[i]=num[j];
num[j]=temp;
}
}
num[left]=num[i];
num[i]=key;
count++;
QuickSort(num,left,i-1);
QuickSort(num,i+1,right);
}
private static String arrayToString(int[] arr,String flag) {
String str = "数组为("+flag+"):";
for(int a : arr) {
str += a + "\t";
}
return str;
}
}
#map,reduce(Word Count文件计数,HDFS,生成jar包)
public class WordCountHdfs {
public static class MapDemoMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
Text outkey;
IntWritable outvalue;
protected void setup(Context context) throws IOException, InterruptedException {
outkey = new Text();
outvalue = new IntWritable();
}
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String[] worlds = value.toString().split(" ");
for (String world : worlds) {
outkey.set(world);
outvalue.set(1);
context.write(outkey, outvalue);
}
}
}
public static class RuduceDemo extends Reducer<Text, IntWritable, Text, IntWritable> {
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int sum = 0;
for (IntWritable value : values) {
sum += value.get();
}
context.write(key, new IntWritable(sum));
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
job.setJarByClass(WordCountHdfs.class);
job.setMapperClass(MapDemoMapper.class);
job.setReducerClass(RuduceDemo.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setNumReduceTasks(1);
FileSystem fs = FileSystem.get(conf);
Path path = new Path(args[1]);
if (fs.exists(path)) {
fs.delete(path, true);
}
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, path);
boolean isDone = job.waitForCompletion(true);
System.exit(isDone ? 0 : 1);
}
}