hadoop2.6.0部署好环境后写的一个简单的test

实用maven创建java工程,引入hadoop的依赖

	<dependencies>
		<dependency>
			<groupId>junit</groupId>
			<artifactId>junit</artifactId>
			<version>3.8.1</version>
			<scope>test</scope>
		</dependency>
		<dependency>
			<groupId>org.apache.hadoop</groupId>
			<artifactId>hadoop-hdfs</artifactId>
			<version>2.6.0</version>
		</dependency>
		<dependency>
			<groupId>org.apache.hadoop</groupId>
			<artifactId>hadoop-client</artifactId>
			<version>2.6.0</version>
		</dependency>
		<dependency>
			<groupId>org.apache.hadoop</groupId>
			<artifactId>hadoop-mapreduce-examples</artifactId>
			<version>2.6.0</version>
			<type>pom</type>
		</dependency>
	</dependencies>

另外给main方法设置成jar包入口

	<build>
		<finalName>${project.artifactId}</finalName>
		<plugins>
			<plugin>
				<groupId>org.apache.maven.plugins</groupId>
				<artifactId>maven-shade-plugin</artifactId>
				<version>1.4</version>
				<executions>
					<execution>
						<phase>package</phase>
						<goals>
							<goal>shade</goal>
						</goals>
						<configuration>
							<transformers>
								<transformer
									implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
									<mainClass>my.hadoopstudy.dfs.Test</mainClass>
								</transformer>
							</transformers>
						</configuration>
					</execution>
				</executions>
			</plugin>
			<plugin>
				<groupId>org.apache.maven.plugins</groupId>
				<artifactId>maven-source-plugin</artifactId>
				<version>2.1.2</version>
				<configuration>
					<attach>true</attach>
				</configuration>
				<executions>
					<execution>
						<phase>compile</phase>
						<goals>
							<goal>jar</goal>
						</goals>
					</execution>
				</executions>
			</plugin>
		</plugins>
	</build>
然后去写test这个类

package my.hadoopstudy.dfs;
 
import java.io.BufferedReader;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
 
public class Test {

	private static final Logger LOGGER = LoggerFactory.getLogger(Test.class);
    public static void main(String[] args) {
        String uri = "hdfs://192.168.184.128:9000/";
        Configuration config = new Configuration();
        FileSystem fs = null;
		try {
			fs = FileSystem.get(URI.create(uri), config);
		} catch (IOException e) {
			LOGGER.error("get url "+ uri+"error" + e);
		}
 
        // 列出hdfs上/user/fkong/目录下的所有文件和目录
        FileStatus[] statuses = null;
		try {
			statuses = fs.listStatus(new Path("/usr/local"));
		} catch (Exception e) {
			LOGGER.error("listStatus  /usr/local error" + e);
		}
        for (FileStatus status : statuses) {
        	LOGGER.info(status + "" );
        }
 
        // 在hdfs的/user/fkong目录下创建一个文件,并写入一行文本
        FSDataOutputStream os = null;
		try {
			os = fs.create(new Path("/usr/local/test.log"));
			os.write("Hello World!".getBytes());
			os.flush();
		} catch (Exception e) {
			LOGGER.error("create /usr/local/test.log error " + e);
		}
        try {
			os.flush();
			if (os != null) {
				os.close();
			}
		} catch (IOException e) {
			LOGGER.error("close /usr/local/test.log error " + e);
		}
 
        // 显示在hdfs的/user/fkong下指定文件的内容
        FilterInputStream is = null;
		BufferedReader bufferedReader = null;
		InputStreamReader isr = null;
		try {
			is = fs.open(new Path("/usr/local/test.log"));
			isr = new InputStreamReader(is, "UTF-8");
			bufferedReader = new BufferedReader(isr);
			String content = null;
			while ((content = bufferedReader.readLine()) != null) {
				LOGGER.info("content:" + content);
			}
		} catch (Exception e) {
			LOGGER.error("open /usr/local/test.log error " + e);
		}
        LOGGER.info("成功");
    }
}
写完后将maven打成jar的形式,maven的命令是clean package

打成架包后将架包放入hadoop主机上运行。java -jar ***.jar就可以了

如果碰到由于路由器什么的问题可以看

http://blog.csdn.net/qq_22929803/article/details/45871107这篇文章


2015-05-20 15:11:02,638 DEBUG [org.apache.hadoop.metrics2.lib.MutableMetricsFactory]:42 - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Rate of successful kerberos logins and latency (milliseconds)])
2015-05-20 15:11:02,659 DEBUG [org.apache.hadoop.metrics2.lib.MutableMetricsFactory]:42 - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Rate of failed kerberos logins and latency (milliseconds)])
2015-05-20 15:11:02,670 DEBUG [org.apache.hadoop.metrics2.lib.MutableMetricsFactory]:42 - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.getGroups with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[GetGroups])
2015-05-20 15:11:02,672 DEBUG [org.apache.hadoop.metrics2.impl.MetricsSystemImpl]:231 - UgiMetrics, User and group related metrics
2015-05-20 15:11:02,867 DEBUG [org.apache.hadoop.security.Groups]:278 -  Creating new Groups object
2015-05-20 15:11:02,879 DEBUG [org.apache.hadoop.util.NativeCodeLoader]:46 - Trying to load the custom-built native-hadoop library...
2015-05-20 15:11:02,885 DEBUG [org.apache.hadoop.util.NativeCodeLoader]:55 - Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path
2015-05-20 15:11:02,886 DEBUG [org.apache.hadoop.util.NativeCodeLoader]:56 - java.library.path=/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib
2015-05-20 15:11:02,886 WARN  [org.apache.hadoop.util.NativeCodeLoader]:62 - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
2015-05-20 15:11:02,887 DEBUG [org.apache.hadoop.util.PerformanceAdvisory]:41 - Falling back to shell based
2015-05-20 15:11:02,891 DEBUG [org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback]:45 - Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping
2015-05-20 15:11:03,101 DEBUG [org.apache.hadoop.util.Shell]:396 - setsid exited with exit code 0
2015-05-20 15:11:03,103 DEBUG [org.apache.hadoop.security.Groups]:91 - Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000
2015-05-20 15:11:03,123 DEBUG [org.apache.hadoop.security.UserGroupInformation]:209 - hadoop login
2015-05-20 15:11:03,124 DEBUG [org.apache.hadoop.security.UserGroupInformation]:144 - hadoop login commit
2015-05-20 15:11:03,139 DEBUG [org.apache.hadoop.security.UserGroupInformation]:174 - using local user:UnixPrincipal: root
2015-05-20 15:11:03,153 DEBUG [org.apache.hadoop.security.UserGroupInformation]:180 - Using user: "UnixPrincipal: root" with name root
2015-05-20 15:11:03,154 DEBUG [org.apache.hadoop.security.UserGroupInformation]:190 - User entry: "root"
2015-05-20 15:11:03,159 DEBUG [org.apache.hadoop.security.UserGroupInformation]:799 - UGI loginUser:root (auth:SIMPLE)
2015-05-20 15:11:03,421 DEBUG [org.apache.hadoop.hdfs.BlockReaderLocal]:443 - dfs.client.use.legacy.blockreader.local = false
2015-05-20 15:11:03,421 DEBUG [org.apache.hadoop.hdfs.BlockReaderLocal]:446 - dfs.client.read.shortcircuit = false
2015-05-20 15:11:03,424 DEBUG [org.apache.hadoop.hdfs.BlockReaderLocal]:449 - dfs.client.domain.socket.data.traffic = false
2015-05-20 15:11:03,424 DEBUG [org.apache.hadoop.hdfs.BlockReaderLocal]:452 - dfs.domain.socket.path =
2015-05-20 15:11:03,477 DEBUG [org.apache.hadoop.hdfs.DFSClient]:634 - No KeyProvider found.
2015-05-20 15:11:03,557 DEBUG [org.apache.hadoop.io.retry.RetryUtils]:74 - multipleLinearRandomRetry = null
2015-05-20 15:11:03,634 DEBUG [org.apache.hadoop.ipc.Server]:233 - rpcKind=RPC_PROTOCOL_BUFFER, rpcRequestWrapperClass=class org.apache.hadoop.ipc.ProtobufRpcEngine$RpcRequestWrapper, rpcInvoker=org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker@31206beb
2015-05-20 15:11:03,668 DEBUG [org.apache.hadoop.ipc.Client]:63 - getting client out of cache: org.apache.hadoop.ipc.Client@77be656f
2015-05-20 15:11:04,503 DEBUG [org.apache.hadoop.util.PerformanceAdvisory]:109 - Both short-circuit local reads and UNIX domain socket are disabled.
2015-05-20 15:11:04,519 DEBUG [org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil]:183 - DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection
2015-05-20 15:11:04,582 DEBUG [org.apache.hadoop.ipc.Client]:427 - The ping interval is 60000 ms.
2015-05-20 15:11:04,585 DEBUG [org.apache.hadoop.ipc.Client]:697 - Connecting to /192.168.184.128:9000
2015-05-20 15:11:04,754 DEBUG [org.apache.hadoop.ipc.Client]:961 - IPC Client (586084331) connection to /192.168.184.128:9000 from root: starting, having connections 1
2015-05-20 15:11:04,756 DEBUG [org.apache.hadoop.ipc.Client]:1024 - IPC Client (586084331) connection to /192.168.184.128:9000 from root sending #0
2015-05-20 15:11:04,815 DEBUG [org.apache.hadoop.ipc.Client]:1081 - IPC Client (586084331) connection to /192.168.184.128:9000 from root got value #0
2015-05-20 15:11:04,816 DEBUG [org.apache.hadoop.ipc.ProtobufRpcEngine]:253 - Call: getListing took 276ms
2015-05-20 15:11:04,859 INFO  [my.hadoopstudy.dfs.Test]:44 - FileStatus{path=hdfs://192.168.184.128:9000/usr/local/test; isDirectory=true; modification_time=1432093791363; access_time=0; owner=root; group=supergroup; permission=rwxr-xr-x; isSymlink=false}
2015-05-20 15:11:04,860 INFO  [my.hadoopstudy.dfs.Test]:44 - FileStatus{path=hdfs://192.168.184.128:9000/usr/local/test.log; isDirectory=false; length=0; replication=3; blocksize=134217728; modification_time=1432103622198; access_time=1432103622198; owner=root; group=supergroup; permission=rw-r--r--; isSymlink=false}
2015-05-20 15:11:04,863 DEBUG [org.apache.hadoop.hdfs.DFSClient]:1657 - /usr/local/test.log: masked=rw-r--r--
2015-05-20 15:11:04,923 DEBUG [org.apache.hadoop.ipc.Client]:1024 - IPC Client (586084331) connection to /192.168.184.128:9000 from root sending #1
2015-05-20 15:11:04,930 DEBUG [org.apache.hadoop.ipc.Client]:1081 - IPC Client (586084331) connection to /192.168.184.128:9000 from root got value #1
2015-05-20 15:11:04,930 DEBUG [org.apache.hadoop.ipc.ProtobufRpcEngine]:253 - Call: create took 11ms
2015-05-20 15:11:04,940 DEBUG [org.apache.hadoop.hdfs.DFSClient]:1806 - computePacketChunkSize: src=/usr/local/test.log, chunkSize=516, chunksPerPacket=127, packetSize=65532
2015-05-20 15:11:04,959 DEBUG [org.apache.hadoop.hdfs.LeaseRenewer]:295 - Lease renewer daemon for [DFSClient_NONMAPREDUCE_2039563311_1] with renew id 1 started
2015-05-20 15:11:04,965 DEBUG [org.apache.hadoop.hdfs.DFSClient]:1873 - DFSClient writeChunk allocating new packet seqno=0, src=/usr/local/test.log, packetSize=65532, chunksPerPacket=127, bytesCurBlock=0
2015-05-20 15:11:04,965 DEBUG [org.apache.hadoop.hdfs.DFSClient]:1819 - Queued packet 0
2015-05-20 15:11:04,966 DEBUG [org.apache.hadoop.hdfs.DFSClient]:586 - Allocating new block
2015-05-20 15:11:04,971 DEBUG [org.apache.hadoop.hdfs.DFSClient]:1819 - Queued packet 1
2015-05-20 15:11:04,972 DEBUG [org.apache.hadoop.hdfs.DFSClient]:2137 - Waiting for ack for: 1
2015-05-20 15:11:04,989 DEBUG [org.apache.hadoop.ipc.Client]:1024 - IPC Client (586084331) connection to /192.168.184.128:9000 from root sending #2
2015-05-20 15:11:04,995 DEBUG [org.apache.hadoop.ipc.Client]:1081 - IPC Client (586084331) connection to /192.168.184.128:9000 from root got value #2
2015-05-20 15:11:04,996 DEBUG [org.apache.hadoop.ipc.ProtobufRpcEngine]:253 - Call: addBlock took 11ms
2015-05-20 15:11:05,015 DEBUG [org.apache.hadoop.hdfs.DFSClient]:1394 - pipeline = 192.168.184.129:50010
2015-05-20 15:11:05,017 DEBUG [org.apache.hadoop.hdfs.DFSClient]:1394 - pipeline = 192.168.184.130:50010
2015-05-20 15:11:05,017 DEBUG [org.apache.hadoop.hdfs.DFSClient]:1605 - Connecting to datanode 192.168.184.129:50010
2015-05-20 15:11:05,019 DEBUG [org.apache.hadoop.hdfs.DFSClient]:1614 - Send buf size 124928
2015-05-20 15:11:05,021 DEBUG [org.apache.hadoop.ipc.Client]:1024 - IPC Client (586084331) connection to /192.168.184.128:9000 from root sending #3
2015-05-20 15:11:05,023 DEBUG [org.apache.hadoop.ipc.Client]:1081 - IPC Client (586084331) connection to /192.168.184.128:9000 from root got value #3
2015-05-20 15:11:05,025 DEBUG [org.apache.hadoop.ipc.ProtobufRpcEngine]:253 - Call: getServerDefaults took 4ms
2015-05-20 15:11:05,033 DEBUG [org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient]:244 - SASL client skipping handshake in unsecured configuration for addr = /192.168.184.129, datanodeId = 192.168.184.129:50010
2015-05-20 15:11:05,312 DEBUG [org.apache.hadoop.hdfs.DFSClient]:637 - DataStreamer block BP-824136034-192.168.184.128-1432019708326:blk_1073741848_1024 sending packet packet seqno:0 offsetInBlock:0 lastPacketInBlock:false lastByteOffsetInBlock: 12
2015-05-20 15:11:05,494 DEBUG [org.apache.hadoop.hdfs.DFSClient]:876 - DFSClient seqno: 0 status: SUCCESS status: SUCCESS downstreamAckTimeNanos: 1635651
2015-05-20 15:11:05,494 DEBUG [org.apache.hadoop.hdfs.DFSClient]:637 - DataStreamer block BP-824136034-192.168.184.128-1432019708326:blk_1073741848_1024 sending packet packet seqno:1 offsetInBlock:12 lastPacketInBlock:true lastByteOffsetInBlock: 12
2015-05-20 15:11:05,502 DEBUG [org.apache.hadoop.hdfs.DFSClient]:876 - DFSClient seqno: 1 status: SUCCESS status: SUCCESS downstreamAckTimeNanos: 3076249
2015-05-20 15:11:05,509 DEBUG [org.apache.hadoop.ipc.Client]:1024 - IPC Client (586084331) connection to /192.168.184.128:9000 from root sending #4
2015-05-20 15:11:05,514 DEBUG [org.apache.hadoop.ipc.Client]:1081 - IPC Client (586084331) connection to /192.168.184.128:9000 from root got value #4
2015-05-20 15:11:05,516 DEBUG [org.apache.hadoop.ipc.ProtobufRpcEngine]:253 - Call: complete took 8ms
2015-05-20 15:11:05,543 DEBUG [org.apache.hadoop.ipc.Client]:1024 - IPC Client (586084331) connection to /192.168.184.128:9000 from root sending #5
2015-05-20 15:11:05,552 DEBUG [org.apache.hadoop.ipc.Client]:1081 - IPC Client (586084331) connection to /192.168.184.128:9000 from root got value #5
2015-05-20 15:11:05,554 DEBUG [org.apache.hadoop.ipc.ProtobufRpcEngine]:253 - Call: getBlockLocations took 23ms
2015-05-20 15:11:05,557 DEBUG [org.apache.hadoop.hdfs.DFSClient]:273 - newInfo = LocatedBlocks{
  fileLength=12
  underConstruction=false
  blocks=[LocatedBlock{BP-824136034-192.168.184.128-1432019708326:blk_1073741848_1024; getBlockSize()=12; corrupt=false; offset=0; locs=[192.168.184.129:50010, 192.168.184.130:50010]; storageIDs=[DS-b0a4e12f-e888-4071-a1e8-ef38deb2c0d7, DS-777e9485-5f53-4813-b526-783a678f1281]; storageTypes=[DISK, DISK]}]
  lastLocatedBlock=LocatedBlock{BP-824136034-192.168.184.128-1432019708326:blk_1073741848_1024; getBlockSize()=12; corrupt=false; offset=0; locs=[192.168.184.129:50010, 192.168.184.130:50010]; storageIDs=[DS-b0a4e12f-e888-4071-a1e8-ef38deb2c0d7, DS-777e9485-5f53-4813-b526-783a678f1281]; storageTypes=[DISK, DISK]}
  isLastBlockComplete=true}
2015-05-20 15:11:05,564 DEBUG [org.apache.hadoop.hdfs.DFSClient]:960 - Connecting to datanode 192.168.184.129:50010
2015-05-20 15:11:05,587 DEBUG [org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient]:244 - SASL client skipping handshake in unsecured configuration for addr = /192.168.184.129, datanodeId = 192.168.184.129:50010
2015-05-20 15:11:05,698 INFO  [my.hadoopstudy.dfs.Test]:75 - content:Hello World!
2015-05-20 15:11:05,698 INFO  [my.hadoopstudy.dfs.Test]:80 - 成功
2015-05-20 15:11:05,705 DEBUG [org.apache.hadoop.ipc.Client]:97 - stopping client from cache: org.apache.hadoop.ipc.Client@77be656f
2015-05-20 15:11:05,705 DEBUG [org.apache.hadoop.ipc.Client]:103 - removing client from cache: org.apache.hadoop.ipc.Client@77be656f
2015-05-20 15:11:05,705 DEBUG [org.apache.hadoop.ipc.Client]:110 - stopping actual client because no more references remain: org.apache.hadoop.ipc.Client@77be656f
2015-05-20 15:11:05,709 DEBUG [org.apache.hadoop.ipc.Client]:1234 - Stopping client
2015-05-20 15:11:05,710 DEBUG [org.apache.hadoop.ipc.Client]:1184 - IPC Client (586084331) connection to /192.168.184.128:9000 from root: closed
2015-05-20 15:11:05,710 DEBUG [org.apache.hadoop.ipc.Client]:979 - IPC Client (586084331) connection to /192.168.184.128:9000 from root: stopped, remaining connections
执行成功的日志




  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值