package org.apache.hadoop.book;
import java.io.InputStream;
import java.net.URL;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;
public class URLCat {
static {
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
}
public static void main(String[] args) throws Exception {
InputStream in = null;
try {
in = new URL(args[0]).openStream();
IOUtils.copyBytes(in, System.out, 4096, false);
} finally {
IOUtils.closeStream(in);
}
}
}
/*1,创建目录class,并编译
[root@centos7 class]# pwd
root/hadoop-2.9.2/class 注:class一般在hadoop的安装目录下
[root@centos7 java]#javac -classpath /home/hdc/user/hadoop-2.9.1/share/hadoop/common/hadoop-common-2.9.1.jar -d ${HADOOP_HOME}/class URLCat.java
(javac -classpath /home/hdc/user/hadoop-2.9.1/share/hadoop/common/hadoop-common-2.9.1.jar:/home/hdc/user/hadoop-2.9.1/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.9.1.jar:/home/hdc/user/hadoop-2.9.1/share/hadoop/common/lib/commons-cli-1.2.jar -d ${HADOOP_HOME}/class *.java )
2,打包
jar -cvf URLCat.jar ./org/apache/hadoop/book/URLCat.class
3,配置环境
export HADOOP_CLASSPATH=/home/hdc/user/hadoop-2.9.1/class
4,执行
hadoop org.apache.hadoop.book.URLCat hdfs://localhost:9000/hdc/test
(hadoop jar MaxTemperature.jar temperature.MaxTemperature /program/maxtemperature/input /program/maxtemperature/output)
(
jar -cvf WordCount.jar ~/eclipse-workspace/WordCount/bin/WordCount*.class //有三个.class文件分别是WordCount.class,WordCountMapper.class,WordCountReducer.class
hadoop jar ~/eclipse-workspace/WordCount/bin/WordCount.jar WordCount /hdc/input_1/word /program_output/wordcount_out
主要参考教程:http://dblab.xmu.edu.cn/blog/hadoop-build-project-by-shell/
)
*/