import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
public class Demo {
static int[] array = new int[80];
static int[] array2 = new int[40];
static int[] array3 = new int[88];
public static int[] mergeSortedArray(int[] A, int[] B) {
// Write your code here
if(A.length==0)
return B;
if(B.length==0)
return A;
int []C=new int[A.length+B.length];
int i=0;//A
int j=0;//B
int k=0;//C
while(i<A.length&&j<B.length){
if(A[i]>B[j]){
C[k++]=B[j++];
}else{
C[k++]=A[i++];
}
}
while(i<A.length){
C[k++]=A[i++];
}
while(j<B.length){
C[k++]=B[j++];
}
return C;
}
public static void main(String[] args) {
try {
String filename = "hdfs://localhost:9000/input/3.txt";
String filename2 = "hdfs://localhost:9000/input/4.txt";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
FileSystem fs2 = FileSystem.get(conf);
if (fs.exists(new Path(filename))&&fs2.exists(new Path(filename2))) {
System.out.println("文件存在");
try {
Path filePath=new Path(filename);
Path filePath2=new Path(filename2);
fs=FileSystem.get(URI.create(filename),conf);
fs2=FileSystem.get(URI.create(filename2),conf);
if(fs.exists(filePath)){
String charset="UTF-8";
//打开文件数据输入流
FSDataInputStream fsDataInputStream=fs.open(filePath);
//创建文件输入
InputStreamReader inputStreamReader=new InputStreamReader(fsDataInputStream,charset);
String line=null;
//把数据读入到缓冲区中
BufferedReader reader=null;
reader=new BufferedReader(inputStreamReader);
//从缓冲区中读取数据
while((line=reader.readLine())!=null)
{
System.out.println("line="+line);
String [] arr = line.split("\\s+");
int[] intarr = new int[arr.length];
for (int i = 0; i < arr.length; i++)
{
intarr[i] = Integer.parseInt(arr[i]);
//substring是找出包含起始位置,不包含结束位置,到结束位置的前一位的子串 }
//输出数组arr
System.out.print(intarr[i]+" ");
}
array=intarr;
}
}
if(fs2.exists(filePath2)){
String charset="UTF-8";
//打开文件数据输入流
FSDataInputStream fsDataInputStream=fs.open(filePath2);
//创建文件输入
InputStreamReader inputStreamReader=new InputStreamReader(fsDataInputStream,charset);
String line=null;
//把数据读入到缓冲区中
BufferedReader reader=null;
reader=new BufferedReader(inputStreamReader);
//从缓冲区中读取数据
while((line=reader.readLine())!=null)
{
System.out.println("line="+line);
String [] arr2 = line.split("\\s+");
int[] intarr2 = new int[arr2.length];
for (int i = 0; i < arr2.length; i++)
{
intarr2[i] = Integer.parseInt(arr2[i]);
//substring是找出包含起始位置,不包含结束位置,到结束位置的前一位的子串 }
//输出数组
System.out.print(intarr2[i]+" ");
}
array2=intarr2;
/* for(String ss : arr2){
System.out.println(ss);
}*/
/*int[] arr2= {12};
for(int i=0;i<arr.length;i++){
arr2[i]=Integer.parseInt(arr[i]);
System.out.println(arr2[i]);
}*/
}
}
//合并并且排序
array3 =mergeSortedArray(array,array2);
//生成5.txt
File file = new File("/home/hadoop/5.txt");
FileWriter out = new FileWriter(file);
for(int i=0;i<array3.length;i++) {
out.write(array3[i]+"\t");
System.out.println(array3[i]);
}
out.close();
//上传文件
try{
Path localPath = new Path("/home/hadoop/5.txt");
Path hdfsPath = new Path("hdfs://localhost:9000/input/5.txt");
FileSystem hdfs = FileSystem.get(conf);
hdfs.copyFromLocalFile(localPath, hdfsPath);
}catch(Exception e){
e.printStackTrace();
}
}
catch (IOException e)
{ e.printStackTrace();
}
} else {
System.out.println("文件不存在");
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
//格式化文件,
//./bin/hadoop namenode -format
//启动 结束时要关比,不然数据节点不起动
//./sbin/start-dfs.sh
//创建文件夹input
//./bin/hadoop fs -mkdir /input
//查看文件夹input(是空的)
//./bin/hdfs dfs -ls /input
//上传文件到input
//./bin/hadoop fs -put /home/hadoop/3.txt /input
//./bin/hadoop fs -put /home/hadoop/4.txt /input
//运行这个java代码,生成5.txt
//查看5.txt
//./bin/hdfs dfs -text /input/5.txt
//删除5.txt
//./bin/hdfs dfs -rm /input/5.txt