linux 命令之ln

ln是用于创建链接,分为软链接和硬链接

一、创建软链接

[root@hadoop001 opt]# ll
total 80
-rw-r--r-- 1 root root 73128 May 26 22:20 hadoop-root-datanode-

hadoop001.log
drwxr-xr-x 2 root root  4096 Mar  4 21:42 module
drwxr-xr-x 3 root root  4096 Mar  5 21:10 software
[root@hadoop001 opt]# ln -s hadoop-root-datanode-hadoop001.log link2019
[root@hadoop001 opt]# ll
total 80
-rw-r--r-- 1 root root 73128 May 26 22:20 hadoop-root-datanode-

hadoop001.log
lrwxrwxrwx 1 root root    34 May 26 22:21 link2019 -> hadoop-root-

datanode-hadoop001.log
drwxr-xr-x 2 root root  4096 Mar  4 21:42 module
drwxr-xr-x 3 root root  4096 Mar  5 21:10 software

试一下:

[root@hadoop001 opt]# cat link2019
2019-03-23 13:37:06,815 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: STARTUP_MSG: 
/************************************************************
STARTUP_MSG: Starting DataNode
STARTUP_MSG:   host = localhost/127.0.0.1
STARTUP_MSG:   args = []
STARTUP_MSG:   version = 2.7.1
STARTUP_MSG:   classpath = /opt/software/hadoop-

2.7.1/etc/hadoop:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/netty-

3.6.2.Final.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-configuration-

1.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jetty-

6.1.26.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

codec-1.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

beanutils-core-1.8.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/stax-api-1.0-2.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-logging-

1.1.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/avro-

1.7.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-io-

2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/curator-client-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/activation-

1.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jackson-mapper-

asl-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/httpcore-4.2.5.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/hadoop-auth-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/slf4j-log4j12-

1.7.10.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jaxb-api-

2.2.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/xmlenc-

0.52.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/gson-

2.2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jaxb-impl-

2.2.3-1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jersey-

json-1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/xz-

1.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/paranamer-

2.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/mockito-all-

1.8.5.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/apacheds-

i18n-2.0.0-M15.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jsr305-3.0.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-compress-

1.4.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

httpclient-3.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/servlet-api-2.5.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jackson-xc-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/htrace-core-3.1.0-

incubating.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/hadoop-

annotations-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jersey-server-1.9.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/curator-recipes-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/log4j-

1.2.17.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

digester-1.8.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-net-3.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-cli-1.2.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/api-asn1-api-1.0.0-

M20.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jsp-api-

2.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/java-

xmlbuilder-0.4.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jettison-1.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/zookeeper-3.4.6.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jetty-util-6.1.26.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/snappy-java-

1.0.4.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/slf4j-api-

1.7.10.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jets3t-

0.9.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-math3

-3.1.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

lang-2.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

beanutils-1.7.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/api-util-1.0.0-M20.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/protobuf-java-

2.5.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/guava-

11.0.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/junit-

4.11.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/httpclient-

4.2.5.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/apacheds-

kerberos-codec-2.0.0-M15.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jersey-core-1.9.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/hamcrest-core-1.3.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/asm-3.2.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jackson-jaxrs-

1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jsch-

0.1.42.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jackson-

core-asl-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-collections-

3.2.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/curator-

framework-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/hadoop-

nfs-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/hadoop-

common-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/hadoop-

common-2.7.1-tests.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/netty-3.6.2.Final.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/jetty-6.1.26.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/commons-codec-1.4.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/commons-logging-

1.1.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-io-

2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jackson-mapper-

asl-1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/xmlenc-

0.52.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/netty-all-

4.0.23.Final.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/leveldbjni-all-1.8.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/commons-daemon-

1.0.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jsr305-

3.0.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/servlet-api-

2.5.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/htrace-core-3.1.0

-incubating.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jersey-

server-1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/log4j-

1.2.17.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/xml-apis-

1.3.04.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-cli-

1.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/xercesImpl-

2.9.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jetty-util-

6.1.26.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-lang-

2.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/protobuf-java-

2.5.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/guava-

11.0.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jersey-core-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/asm-

3.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jackson-core-asl-

1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/hadoop-hdfs-2.7.1-

tests.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/hadoop-hdfs-nfs-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/hadoop-hdfs-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/netty-

3.6.2.Final.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jetty-

6.1.26.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/javax.inject-

1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-codec-

1.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-guice-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/stax-api-1.0-

2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-logging-

1.1.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-io-

2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/activation-

1.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-mapper-

asl-1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/guice-

3.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jaxb-api-

2.2.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-client-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jaxb-impl-2.2.3-

1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-json-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/aopalliance-

1.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/xz-

1.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/leveldbjni-all-

1.8.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jsr305-

3.0.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-

compress-1.4.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/yarn/lib/zookeeper-3.4.6-

tests.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/servlet-api-

2.5.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-xc-

1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-server-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/log4j-

1.2.17.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-cli-

1.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jettison-

1.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/zookeeper-

3.4.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jetty-util-

6.1.26.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/guice-servlet-

3.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-lang-

2.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/protobuf-java-

2.5.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/guava-

11.0.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-core-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/asm-

3.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-jaxrs-

1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-core-

asl-1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-

collections-3.2.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-server-resourcemanager-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/yarn/hadoop-yarn-server-sharedcachemanager-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-client-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-

applicationhistoryservice-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/yarn/hadoop-yarn-server-nodemanager-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-

common-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-

registry-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-applications-unmanaged-am-launcher-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-

web-proxy-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-api-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-common-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-server-tests-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/netty-

3.6.2.Final.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/javax.inject-1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/jersey-guice-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/avro-

1.7.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/commons-

io-2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/jackson-

mapper-asl-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/guice-3.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/aopalliance-1.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/xz-1.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/paranamer-2.3.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/leveldbjni-all-

1.8.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/commons-

compress-1.4.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/hadoop-annotations-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/jersey-

server-1.9.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/log4j-1.2.17.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/snappy-java-

1.0.4.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/guice-

servlet-3.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/protobuf-java-

2.5.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/junit-

4.11.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/jersey-

core-1.9.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/hamcrest-core-

1.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/asm-

3.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/jackson-

core-asl-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-

mapreduce-client-shuffle-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-common-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-

mapreduce-examples-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-app-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-

mapreduce-client-core-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-

mapreduce-client-hs-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.1-

tests.jar:/opt/software/hadoop-2.7.1/contrib/capacity-

scheduler/*.jar:/opt/software/hadoop-2.7.1/contrib/capacity-

scheduler/*.jar:/opt/software/hadoop-2.7.1/contrib/capacity-scheduler/*.jar
STARTUP_MSG:   build = https://git-wip-us.apache.org/repos/asf/hadoop.git 

-r 15ecc87ccf4a0228f35af08fc56de536e6ce657a; compiled by 'jenkins' on 2015

-06-29T06:04Z
STARTUP_MSG:   java = 1.8.0_65
************************************************************/
2019-03-23 13:37:06,824 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: registered UNIX signal 

handlers for [TERM, HUP, INT]
2019-03-23 13:37:07,194 WARN org.apache.hadoop.util.NativeCodeLoader: 

Unable to load native-hadoop library for your platform... using builtin-

java classes where applicable
2019-03-23 13:37:07,431 INFO org.apache.hadoop.metrics2.impl.MetricsConfig: 

loaded properties from hadoop-metrics2.properties
2019-03-23 13:37:07,514 INFO 

org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Scheduled snapshot 

period at 10 second(s).
2019-03-23 13:37:07,514 INFO 

org.apache.hadoop.metrics2.impl.MetricsSystemImpl: DataNode metrics system 

started
2019-03-23 13:37:07,520 INFO 

org.apache.hadoop.hdfs.server.datanode.BlockScanner: Initialized block 

scanner with targetBytesPerSec 1048576
2019-03-23 13:37:07,523 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Configured hostname is 

localhost
2019-03-23 13:37:07,530 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Starting DataNode with 

maxLockedMemory = 0
2019-03-23 13:37:07,559 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Opened streaming server at 

/0.0.0.0:50010
2019-03-23 13:37:07,561 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Balancing bandwith is 

1048576 bytes/s
2019-03-23 13:37:07,561 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Number threads for 

balancing is 5
2019-03-23 13:37:07,647 INFO org.mortbay.log: Logging to 

org.slf4j.impl.Log4jLoggerAdapter(org.mortbay.log) via 

org.mortbay.log.Slf4jLog
2019-03-23 13:37:07,655 INFO 

org.apache.hadoop.security.authentication.server.AuthenticationFilter: 

Unable to initialize FileSignerSecretProvider, falling back to use random 

secrets.
2019-03-23 13:37:07,660 INFO org.apache.hadoop.http.HttpRequestLog: Http 

request log for http.requests.datanode is not defined
2019-03-23 13:37:07,666 INFO org.apache.hadoop.http.HttpServer2: Added 

global filter 'safety' 

(class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter)
2019-03-23 13:37:07,668 INFO org.apache.hadoop.http.HttpServer2: Added 

filter static_user_filter 

(class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to 

context datanode
2019-03-23 13:37:07,668 INFO org.apache.hadoop.http.HttpServer2: Added 

filter static_user_filter 

(class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to 

context static
2019-03-23 13:37:07,668 INFO org.apache.hadoop.http.HttpServer2: Added 

filter static_user_filter 

(class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to 

context logs
2019-03-23 13:37:07,681 INFO org.apache.hadoop.http.HttpServer2: Jetty 

bound to port 35875
2019-03-23 13:37:07,681 INFO org.mortbay.log: jetty-6.1.26
2019-03-23 13:37:07,843 INFO org.mortbay.log: Started 

HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:35875
2019-03-23 13:37:07,948 INFO 

org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer: Listening 

HTTP traffic on /0.0.0.0:50075
2019-03-23 13:37:08,109 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: dnUserName = root
2019-03-23 13:37:08,109 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: supergroup = supergroup
2019-03-23 13:37:09,190 INFO org.apache.hadoop.ipc.CallQueueManager: Using 

callQueue class java.util.concurrent.LinkedBlockingQueue
2019-03-23 13:37:09,205 INFO org.apache.hadoop.ipc.Server: Starting Socket 

Reader #1 for port 50020
2019-03-23 13:37:09,227 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Opened IPC server at 

/0.0.0.0:50020
2019-03-23 13:37:09,240 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Refresh request received 

for nameservices: null
2019-03-23 13:37:09,261 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Starting BPOfferServices 

for nameservices: <default>
2019-03-23 13:37:09,276 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool <registering> 

(Datanode Uuid unassigned) service to hadoop001/127.0.0.1:9000 starting to 

offer service
2019-03-23 13:37:09,280 INFO org.apache.hadoop.ipc.Server: IPC Server 

Responder: starting
2019-03-23 13:37:09,281 INFO org.apache.hadoop.ipc.Server: IPC Server 

listener on 50020: starting
2019-03-23 13:37:09,756 INFO org.apache.hadoop.hdfs.server.common.Storage: 

Lock on /opt/software/hadoop-2.7.1/data/tmp/dfs/data/in_use.lock acquired 

by nodename 1552@localhost
2019-03-23 13:37:09,758 WARN org.apache.hadoop.hdfs.server.common.Storage: 

java.io.IOException: Incompatible clusterIDs in /opt/software/hadoop-

2.7.1/data/tmp/dfs/data: namenode clusterID = CID-a135bed3-efab-4cc6-b1a4-

5ef7b1d2057b; datanode clusterID = CID-bfd504a2-6851-4fe6-b980-c71f1808b2ac
2019-03-23 13:37:09,758 FATAL 

org.apache.hadoop.hdfs.server.datanode.DataNode: Initialization failed for 

Block pool <registering> (Datanode Uuid unassigned) service to 

hadoop001/127.0.0.1:9000. Exiting. 
java.io.IOException: All specified directories are failed to load.
	at 

org.apache.hadoop.hdfs.server.datanode.DataStorage.recoverTransitionRead

(DataStorage.java:477)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.initStorage

(DataNode.java:1361)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.initBlockPool

(DataNode.java:1326)
	at 

org.apache.hadoop.hdfs.server.datanode.BPOfferService.verifyAndSetNamespace

Info(BPOfferService.java:316)
	at 

org.apache.hadoop.hdfs.server.datanode.BPServiceActor.connectToNNAndHandsha

ke(BPServiceActor.java:223)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run

(BPServiceActor.java:801)
	at java.lang.Thread.run(Thread.java:745)
2019-03-23 13:37:09,760 WARN 

org.apache.hadoop.hdfs.server.datanode.DataNode: Ending block pool service 

for: Block pool <registering> (Datanode Uuid unassigned) service to 

hadoop001/127.0.0.1:9000
2019-03-23 13:37:09,761 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Removed Block pool 

<registering> (Datanode Uuid unassigned)
2019-03-23 13:37:11,762 WARN 

org.apache.hadoop.hdfs.server.datanode.DataNode: Exiting Datanode
2019-03-23 13:37:11,763 INFO org.apache.hadoop.util.ExitUtil: Exiting with 

status 0
2019-03-23 13:37:11,765 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: SHUTDOWN_MSG: 
/************************************************************
SHUTDOWN_MSG: Shutting down DataNode at localhost/127.0.0.1
************************************************************/
2019-03-23 13:44:36,059 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: STARTUP_MSG: 
/************************************************************
STARTUP_MSG: Starting DataNode
STARTUP_MSG:   host = localhost/127.0.0.1
STARTUP_MSG:   args = []
STARTUP_MSG:   version = 2.7.1
STARTUP_MSG:   classpath = /opt/software/hadoop-

2.7.1/etc/hadoop:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/netty-

3.6.2.Final.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-configuration-

1.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jetty-

6.1.26.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

codec-1.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

beanutils-core-1.8.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/stax-api-1.0-2.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-logging-

1.1.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/avro-

1.7.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-io-

2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/curator-client-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/activation-

1.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jackson-mapper-

asl-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/httpcore-4.2.5.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/hadoop-auth-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/slf4j-log4j12-

1.7.10.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jaxb-api-

2.2.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/xmlenc-

0.52.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/gson-

2.2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jaxb-impl-

2.2.3-1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jersey-

json-1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/xz-

1.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/paranamer-

2.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/mockito-all-

1.8.5.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/apacheds-

i18n-2.0.0-M15.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jsr305-3.0.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-compress-

1.4.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

httpclient-3.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/servlet-api-2.5.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jackson-xc-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/htrace-core-3.1.0-

incubating.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/hadoop-

annotations-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jersey-server-1.9.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/curator-recipes-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/log4j-

1.2.17.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

digester-1.8.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-net-3.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-cli-1.2.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/api-asn1-api-1.0.0-

M20.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jsp-api-

2.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/java-

xmlbuilder-0.4.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jettison-1.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/zookeeper-3.4.6.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jetty-util-6.1.26.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/snappy-java-

1.0.4.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/slf4j-api-

1.7.10.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jets3t-

0.9.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-math3

-3.1.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

lang-2.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

beanutils-1.7.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/api-util-1.0.0-M20.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/protobuf-java-

2.5.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/guava-

11.0.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/junit-

4.11.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/httpclient-

4.2.5.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/apacheds-

kerberos-codec-2.0.0-M15.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jersey-core-1.9.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/hamcrest-core-1.3.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/asm-3.2.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jackson-jaxrs-

1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jsch-

0.1.42.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jackson-

core-asl-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-collections-

3.2.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/curator-

framework-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/hadoop-

nfs-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/hadoop-

common-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/hadoop-

common-2.7.1-tests.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/netty-3.6.2.Final.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/jetty-6.1.26.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/commons-codec-1.4.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/commons-logging-

1.1.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-io-

2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jackson-mapper-

asl-1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/xmlenc-

0.52.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/netty-all-

4.0.23.Final.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/leveldbjni-all-1.8.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/commons-daemon-

1.0.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jsr305-

3.0.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/servlet-api-

2.5.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/htrace-core-3.1.0

-incubating.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jersey-

server-1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/log4j-

1.2.17.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/xml-apis-

1.3.04.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-cli-

1.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/xercesImpl-

2.9.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jetty-util-

6.1.26.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-lang-

2.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/protobuf-java-

2.5.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/guava-

11.0.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jersey-core-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/asm-

3.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jackson-core-asl-

1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/hadoop-hdfs-2.7.1-

tests.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/hadoop-hdfs-nfs-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/hadoop-hdfs-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/netty-

3.6.2.Final.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jetty-

6.1.26.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/javax.inject-

1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-codec-

1.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-guice-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/stax-api-1.0-

2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-logging-

1.1.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-io-

2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/activation-

1.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-mapper-

asl-1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/guice-

3.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jaxb-api-

2.2.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-client-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jaxb-impl-2.2.3-

1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-json-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/aopalliance-

1.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/xz-

1.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/leveldbjni-all-

1.8.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jsr305-

3.0.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-

compress-1.4.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/yarn/lib/zookeeper-3.4.6-

tests.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/servlet-api-

2.5.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-xc-

1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-server-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/log4j-

1.2.17.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-cli-

1.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jettison-

1.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/zookeeper-

3.4.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jetty-util-

6.1.26.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/guice-servlet-

3.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-lang-

2.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/protobuf-java-

2.5.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/guava-

11.0.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-core-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/asm-

3.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-jaxrs-

1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-core-

asl-1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-

collections-3.2.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-server-resourcemanager-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/yarn/hadoop-yarn-server-sharedcachemanager-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-client-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-

applicationhistoryservice-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/yarn/hadoop-yarn-server-nodemanager-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-

common-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-

registry-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-applications-unmanaged-am-launcher-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-

web-proxy-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-api-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-common-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-server-tests-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/netty-

3.6.2.Final.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/javax.inject-1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/jersey-guice-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/avro-

1.7.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/commons-

io-2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/jackson-

mapper-asl-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/guice-3.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/aopalliance-1.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/xz-1.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/paranamer-2.3.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/leveldbjni-all-

1.8.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/commons-

compress-1.4.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/hadoop-annotations-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/jersey-

server-1.9.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/log4j-1.2.17.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/snappy-java-

1.0.4.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/guice-

servlet-3.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/protobuf-java-

2.5.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/junit-

4.11.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/jersey-

core-1.9.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/hamcrest-core-

1.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/asm-

3.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/jackson-

core-asl-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-

mapreduce-client-shuffle-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-common-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-

mapreduce-examples-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-app-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-

mapreduce-client-core-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-

mapreduce-client-hs-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.1-

tests.jar:/opt/software/hadoop-2.7.1/contrib/capacity-

scheduler/*.jar:/opt/software/hadoop-2.7.1/contrib/capacity-

scheduler/*.jar:/opt/software/hadoop-2.7.1/contrib/capacity-scheduler/*.jar
STARTUP_MSG:   build = https://git-wip-us.apache.org/repos/asf/hadoop.git 

-r 15ecc87ccf4a0228f35af08fc56de536e6ce657a; compiled by 'jenkins' on 2015

-06-29T06:04Z
STARTUP_MSG:   java = 1.8.0_65
************************************************************/
2019-03-23 13:44:36,079 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: registered UNIX signal 

handlers for [TERM, HUP, INT]
2019-03-23 13:44:36,472 WARN org.apache.hadoop.util.NativeCodeLoader: 

Unable to load native-hadoop library for your platform... using builtin-

java classes where applicable
2019-03-23 13:44:36,698 INFO org.apache.hadoop.metrics2.impl.MetricsConfig: 

loaded properties from hadoop-metrics2.properties
2019-03-23 13:44:36,765 INFO 

org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Scheduled snapshot 

period at 10 second(s).
2019-03-23 13:44:36,765 INFO 

org.apache.hadoop.metrics2.impl.MetricsSystemImpl: DataNode metrics system 

started
2019-03-23 13:44:36,770 INFO 

org.apache.hadoop.hdfs.server.datanode.BlockScanner: Initialized block 

scanner with targetBytesPerSec 1048576
2019-03-23 13:44:36,771 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Configured hostname is 

localhost
2019-03-23 13:44:36,777 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Starting DataNode with 

maxLockedMemory = 0
2019-03-23 13:44:36,798 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Opened streaming server at 

/0.0.0.0:50010
2019-03-23 13:44:36,800 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Balancing bandwith is 

1048576 bytes/s
2019-03-23 13:44:36,801 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Number threads for 

balancing is 5
2019-03-23 13:44:36,871 INFO org.mortbay.log: Logging to 

org.slf4j.impl.Log4jLoggerAdapter(org.mortbay.log) via 

org.mortbay.log.Slf4jLog
2019-03-23 13:44:36,878 INFO 

org.apache.hadoop.security.authentication.server.AuthenticationFilter: 

Unable to initialize FileSignerSecretProvider, falling back to use random 

secrets.
2019-03-23 13:44:36,883 INFO org.apache.hadoop.http.HttpRequestLog: Http 

request log for http.requests.datanode is not defined
2019-03-23 13:44:36,889 INFO org.apache.hadoop.http.HttpServer2: Added 

global filter 'safety' 

(class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter)
2019-03-23 13:44:36,890 INFO org.apache.hadoop.http.HttpServer2: Added 

filter static_user_filter 

(class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to 

context datanode
2019-03-23 13:44:36,891 INFO org.apache.hadoop.http.HttpServer2: Added 

filter static_user_filter 

(class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to 

context static
2019-03-23 13:44:36,891 INFO org.apache.hadoop.http.HttpServer2: Added 

filter static_user_filter 

(class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to 

context logs
2019-03-23 13:44:36,901 INFO org.apache.hadoop.http.HttpServer2: Jetty 

bound to port 42776
2019-03-23 13:44:36,901 INFO org.mortbay.log: jetty-6.1.26
2019-03-23 13:44:37,060 INFO org.mortbay.log: Started 

HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:42776
2019-03-23 13:44:37,152 INFO 

org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer: Listening 

HTTP traffic on /0.0.0.0:50075
2019-03-23 13:44:37,334 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: dnUserName = root
2019-03-23 13:44:37,334 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: supergroup = supergroup
2019-03-23 13:44:37,396 INFO org.apache.hadoop.ipc.CallQueueManager: Using 

callQueue class java.util.concurrent.LinkedBlockingQueue
2019-03-23 13:44:37,412 INFO org.apache.hadoop.ipc.Server: Starting Socket 

Reader #1 for port 50020
2019-03-23 13:44:37,435 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Opened IPC server at 

/0.0.0.0:50020
2019-03-23 13:44:37,447 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Refresh request received 

for nameservices: null
2019-03-23 13:44:37,468 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Starting BPOfferServices 

for nameservices: <default>
2019-03-23 13:44:37,482 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool <registering> 

(Datanode Uuid unassigned) service to hadoop001/127.0.0.1:9000 starting to 

offer service
2019-03-23 13:44:37,494 INFO org.apache.hadoop.ipc.Server: IPC Server 

Responder: starting
2019-03-23 13:44:37,501 INFO org.apache.hadoop.ipc.Server: IPC Server 

listener on 50020: starting
2019-03-23 13:44:37,910 INFO org.apache.hadoop.hdfs.server.common.Storage: 

Lock on /opt/software/hadoop-2.7.1/data/tmp/dfs/data/in_use.lock acquired 

by nodename 1813@localhost
2019-03-23 13:44:37,950 INFO org.apache.hadoop.hdfs.server.common.Storage: 

Analyzing storage directories for bpid BP-1244105038-127.0.0.1-

1553319341262
2019-03-23 13:44:37,950 INFO org.apache.hadoop.hdfs.server.common.Storage: 

Locking is disabled for /opt/software/hadoop-

2.7.1/data/tmp/dfs/data/current/BP-1244105038-127.0.0.1-1553319341262
2019-03-23 13:44:37,950 INFO org.apache.hadoop.hdfs.server.common.Storage: 

Block pool storage directory /opt/software/hadoop-

2.7.1/data/tmp/dfs/data/current/BP-1244105038-127.0.0.1-1553319341262 is 

not formatted for BP-1244105038-127.0.0.1-1553319341262
2019-03-23 13:44:37,950 INFO org.apache.hadoop.hdfs.server.common.Storage: 

Formatting ...
2019-03-23 13:44:37,951 INFO org.apache.hadoop.hdfs.server.common.Storage: 

Formatting block pool BP-1244105038-127.0.0.1-1553319341262 directory 

/opt/software/hadoop-2.7.1/data/tmp/dfs/data/current/BP-1244105038-

127.0.0.1-1553319341262/current
2019-03-23 13:44:37,953 INFO org.apache.hadoop.hdfs.server.common.Storage: 

Restored 0 block files from trash.
2019-03-23 13:44:37,955 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Setting up storage: 

nsid=1341947409;bpid=BP-1244105038-127.0.0.1-1553319341262;lv=-

56;nsInfo=lv=-63;cid=CID-a135bed3-efab-4cc6-b1a4-

5ef7b1d2057b;nsid=1341947409;c=0;bpid=BP-1244105038-127.0.0.1-

1553319341262;dnuuid=fc4fe5b9-f39d-462d-adbf-73e2a2270bd7
2019-03-23 13:44:37,994 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Added 

new volume: DS-091cd237-2e30-437c-b919-867e6a8b5fcf
2019-03-23 13:44:37,994 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Added 

volume - /opt/software/hadoop-2.7.1/data/tmp/dfs/data/current, StorageType: 

DISK
2019-03-23 13:44:38,000 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: 

Registered FSDatasetState MBean
2019-03-23 13:44:38,000 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Adding 

block pool BP-1244105038-127.0.0.1-1553319341262
2019-03-23 13:44:38,002 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: 

Scanning block pool BP-1244105038-127.0.0.1-1553319341262 on volume 

/opt/software/hadoop-2.7.1/data/tmp/dfs/data/current...
2019-03-23 13:44:38,015 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Time 

taken to scan block pool BP-1244105038-127.0.0.1-1553319341262 on 

/opt/software/hadoop-2.7.1/data/tmp/dfs/data/current: 14ms
2019-03-23 13:44:38,016 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Total 

time to scan all replicas for block pool BP-1244105038-127.0.0.1-

1553319341262: 15ms
2019-03-23 13:44:38,016 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Adding 

replicas to map for block pool BP-1244105038-127.0.0.1-1553319341262 on 

volume /opt/software/hadoop-2.7.1/data/tmp/dfs/data/current...
2019-03-23 13:44:38,016 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Time 

to add replicas to map for block pool BP-1244105038-127.0.0.1-1553319341262 

on volume /opt/software/hadoop-2.7.1/data/tmp/dfs/data/current: 0ms
2019-03-23 13:44:38,016 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Total 

time to add all replicas to map: 0ms
2019-03-23 13:44:38,152 INFO 

org.apache.hadoop.hdfs.server.datanode.VolumeScanner: Now scanning bpid 

BP-1244105038-127.0.0.1-1553319341262 on volume /opt/software/hadoop-

2.7.1/data/tmp/dfs/data
2019-03-23 13:44:38,155 INFO 

org.apache.hadoop.hdfs.server.datanode.DirectoryScanner: Periodic Directory 

Tree Verification scan starting at 1553335933155 with interval 21600000
2019-03-23 13:44:38,157 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool BP-1244105038-

127.0.0.1-1553319341262 (Datanode Uuid null) service to 

hadoop001/127.0.0.1:9000 beginning handshake with NN
2019-03-23 13:44:38,174 INFO 

org.apache.hadoop.hdfs.server.datanode.VolumeScanner: VolumeScanner

(/opt/software/hadoop-2.7.1/data/tmp/dfs/data, DS-091cd237-2e30-437c-b919-

867e6a8b5fcf): finished scanning block pool BP-1244105038-127.0.0.1-

1553319341262
2019-03-23 13:44:38,207 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool Block pool BP-

1244105038-127.0.0.1-1553319341262 (Datanode Uuid null) service to 

hadoop001/127.0.0.1:9000 successfully registered with NN
2019-03-23 13:44:38,208 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: For namenode 

hadoop001/127.0.0.1:9000 using DELETEREPORT_INTERVAL of 300000 msec  

BLOCKREPORT_INTERVAL of 21600000msec CACHEREPORT_INTERVAL of 10000msec 

Initial delay: 0msec; heartBeatInterval=3000
2019-03-23 13:44:38,250 INFO 

org.apache.hadoop.hdfs.server.datanode.VolumeScanner: VolumeScanner

(/opt/software/hadoop-2.7.1/data/tmp/dfs/data, DS-091cd237-2e30-437c-b919-

867e6a8b5fcf): no suitable block pools found to scan.  Waiting 1814399902 

ms.
2019-03-23 13:44:38,318 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Namenode Block pool BP-

1244105038-127.0.0.1-1553319341262 (Datanode Uuid fc4fe5b9-f39d-462d-adbf-

73e2a2270bd7) service to hadoop001/127.0.0.1:9000 trying to claim ACTIVE 

state with txid=2
2019-03-23 13:44:38,319 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Acknowledging ACTIVE 

Namenode Block pool BP-1244105038-127.0.0.1-1553319341262 (Datanode Uuid 

fc4fe5b9-f39d-462d-adbf-73e2a2270bd7) service to hadoop001/127.0.0.1:9000
2019-03-23 13:44:38,391 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Successfully sent block 

report 0x13ff7e48735,  containing 1 storage report(s), of which we sent 1. 

The reports had 0 total blocks and used 1 RPC(s). This took 8 msec to 

generate and 64 msecs for RPC and NN processing. Got back one command: 

FinalizeCommand/5.
2019-03-23 13:44:38,391 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Got finalize command for 

block pool BP-1244105038-127.0.0.1-1553319341262
2019-03-23 14:59:42,118 ERROR 

org.apache.hadoop.hdfs.server.datanode.DataNode: RECEIVED SIGNAL 15: 

SIGTERM
2019-03-23 14:59:42,123 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: SHUTDOWN_MSG: 
/************************************************************
SHUTDOWN_MSG: Shutting down DataNode at localhost/127.0.0.1
************************************************************/
2019-03-23 15:06:02,355 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: STARTUP_MSG: 
/************************************************************
STARTUP_MSG: Starting DataNode
STARTUP_MSG:   host = localhost/127.0.0.1
STARTUP_MSG:   args = []
STARTUP_MSG:   version = 2.7.1
STARTUP_MSG:   classpath = /opt/software/hadoop-

2.7.1/etc/hadoop:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/netty-

3.6.2.Final.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-configuration-

1.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jetty-

6.1.26.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

codec-1.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

beanutils-core-1.8.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/stax-api-1.0-2.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-logging-

1.1.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/avro-

1.7.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-io-

2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/curator-client-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/activation-

1.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jackson-mapper-

asl-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/httpcore-4.2.5.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/hadoop-auth-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/slf4j-log4j12-

1.7.10.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jaxb-api-

2.2.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/xmlenc-

0.52.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/gson-

2.2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jaxb-impl-

2.2.3-1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jersey-

json-1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/xz-

1.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/paranamer-

2.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/mockito-all-

1.8.5.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/apacheds-

i18n-2.0.0-M15.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jsr305-3.0.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-compress-

1.4.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

httpclient-3.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/servlet-api-2.5.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jackson-xc-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/htrace-core-3.1.0-

incubating.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/hadoop-

annotations-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jersey-server-1.9.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/curator-recipes-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/log4j-

1.2.17.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

digester-1.8.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-net-3.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-cli-1.2.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/api-asn1-api-1.0.0-

M20.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jsp-api-

2.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/java-

xmlbuilder-0.4.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jettison-1.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/zookeeper-3.4.6.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jetty-util-6.1.26.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/snappy-java-

1.0.4.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/slf4j-api-

1.7.10.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jets3t-

0.9.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-math3

-3.1.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

lang-2.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/commons-

beanutils-1.7.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/api-util-1.0.0-M20.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/protobuf-java-

2.5.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/guava-

11.0.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/junit-

4.11.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/httpclient-

4.2.5.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/apacheds-

kerberos-codec-2.0.0-M15.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jersey-core-1.9.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/hamcrest-core-1.3.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/asm-3.2.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/jackson-jaxrs-

1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jsch-

0.1.42.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/jackson-

core-asl-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/common/lib/commons-collections-

3.2.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/lib/curator-

framework-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/hadoop-

nfs-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/hadoop-

common-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/common/hadoop-

common-2.7.1-tests.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/netty-3.6.2.Final.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/jetty-6.1.26.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/commons-codec-1.4.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/commons-logging-

1.1.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-io-

2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jackson-mapper-

asl-1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/xmlenc-

0.52.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/netty-all-

4.0.23.Final.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/leveldbjni-all-1.8.jar:/opt/software/hadoop-

2.7.1/share/hadoop/hdfs/lib/commons-daemon-

1.0.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jsr305-

3.0.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/servlet-api-

2.5.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/htrace-core-3.1.0

-incubating.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jersey-

server-1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/log4j-

1.2.17.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/xml-apis-

1.3.04.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-cli-

1.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/xercesImpl-

2.9.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jetty-util-

6.1.26.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-lang-

2.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/protobuf-java-

2.5.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/guava-

11.0.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jersey-core-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/asm-

3.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/lib/jackson-core-asl-

1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/hadoop-hdfs-2.7.1-

tests.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/hadoop-hdfs-nfs-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/hdfs/hadoop-hdfs-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/netty-

3.6.2.Final.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jetty-

6.1.26.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/javax.inject-

1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-codec-

1.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-guice-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/stax-api-1.0-

2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-logging-

1.1.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-io-

2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/activation-

1.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-mapper-

asl-1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/guice-

3.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jaxb-api-

2.2.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-client-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jaxb-impl-2.2.3-

1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-json-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/aopalliance-

1.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/xz-

1.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/leveldbjni-all-

1.8.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jsr305-

3.0.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-

compress-1.4.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/yarn/lib/zookeeper-3.4.6-

tests.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/servlet-api-

2.5.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-xc-

1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-server-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/log4j-

1.2.17.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-cli-

1.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jettison-

1.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/zookeeper-

3.4.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jetty-util-

6.1.26.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/guice-servlet-

3.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-lang-

2.6.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/protobuf-java-

2.5.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/guava-

11.0.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-core-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/asm-

3.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-jaxrs-

1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-core-

asl-1.9.13.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/lib/commons-

collections-3.2.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-server-resourcemanager-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/yarn/hadoop-yarn-server-sharedcachemanager-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-client-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-

applicationhistoryservice-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/yarn/hadoop-yarn-server-nodemanager-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-

common-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-

registry-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-applications-unmanaged-am-launcher-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-

web-proxy-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-api-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-common-2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/yarn/hadoop-

yarn-server-tests-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/netty-

3.6.2.Final.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/javax.inject-1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/jersey-guice-

1.9.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/avro-

1.7.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/commons-

io-2.4.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/jackson-

mapper-asl-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/guice-3.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/aopalliance-1.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/xz-1.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/paranamer-2.3.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/leveldbjni-all-

1.8.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/commons-

compress-1.4.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/hadoop-annotations-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/jersey-

server-1.9.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/log4j-1.2.17.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/snappy-java-

1.0.4.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/guice-

servlet-3.0.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/protobuf-java-

2.5.0.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/junit-

4.11.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/jersey-

core-1.9.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/lib/hamcrest-core-

1.3.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/asm-

3.2.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/lib/jackson-

core-asl-1.9.13.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-

mapreduce-client-shuffle-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-common-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-

mapreduce-examples-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-app-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-

mapreduce-client-core-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-

2.7.1.jar:/opt/software/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-

mapreduce-client-hs-2.7.1.jar:/opt/software/hadoop-

2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.1-

tests.jar:/contrib/capacity-scheduler/*.jar:/opt/software/hadoop-

2.7.1/contrib/capacity-scheduler/*.jar:/opt/software/hadoop-

2.7.1/contrib/capacity-scheduler/*.jar
STARTUP_MSG:   build = https://git-wip-us.apache.org/repos/asf/hadoop.git 

-r 15ecc87ccf4a0228f35af08fc56de536e6ce657a; compiled by 'jenkins' on 2015

-06-29T06:04Z
STARTUP_MSG:   java = 1.8.0_65
************************************************************/
2019-03-23 15:06:02,363 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: registered UNIX signal 

handlers for [TERM, HUP, INT]
2019-03-23 15:06:02,740 WARN org.apache.hadoop.util.NativeCodeLoader: 

Unable to load native-hadoop library for your platform... using builtin-

java classes where applicable
2019-03-23 15:06:02,961 INFO org.apache.hadoop.metrics2.impl.MetricsConfig: 

loaded properties from hadoop-metrics2.properties
2019-03-23 15:06:03,024 INFO 

org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Scheduled snapshot 

period at 10 second(s).
2019-03-23 15:06:03,024 INFO 

org.apache.hadoop.metrics2.impl.MetricsSystemImpl: DataNode metrics system 

started
2019-03-23 15:06:03,029 INFO 

org.apache.hadoop.hdfs.server.datanode.BlockScanner: Initialized block 

scanner with targetBytesPerSec 1048576
2019-03-23 15:06:03,031 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Configured hostname is 

localhost
2019-03-23 15:06:03,036 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Starting DataNode with 

maxLockedMemory = 0
2019-03-23 15:06:03,057 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Opened streaming server at 

/0.0.0.0:50010
2019-03-23 15:06:03,059 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Balancing bandwith is 

1048576 bytes/s
2019-03-23 15:06:03,059 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Number threads for 

balancing is 5
2019-03-23 15:06:03,130 INFO org.mortbay.log: Logging to 

org.slf4j.impl.Log4jLoggerAdapter(org.mortbay.log) via 

org.mortbay.log.Slf4jLog
2019-03-23 15:06:03,137 INFO 

org.apache.hadoop.security.authentication.server.AuthenticationFilter: 

Unable to initialize FileSignerSecretProvider, falling back to use random 

secrets.
2019-03-23 15:06:03,144 INFO org.apache.hadoop.http.HttpRequestLog: Http 

request log for http.requests.datanode is not defined
2019-03-23 15:06:03,150 INFO org.apache.hadoop.http.HttpServer2: Added 

global filter 'safety' 

(class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter)
2019-03-23 15:06:03,152 INFO org.apache.hadoop.http.HttpServer2: Added 

filter static_user_filter 

(class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to 

context datanode
2019-03-23 15:06:03,152 INFO org.apache.hadoop.http.HttpServer2: Added 

filter static_user_filter 

(class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to 

context static
2019-03-23 15:06:03,152 INFO org.apache.hadoop.http.HttpServer2: Added 

filter static_user_filter 

(class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to 

context logs
2019-03-23 15:06:03,164 INFO org.apache.hadoop.http.HttpServer2: Jetty 

bound to port 44010
2019-03-23 15:06:03,164 INFO org.mortbay.log: jetty-6.1.26
2019-03-23 15:06:03,312 INFO org.mortbay.log: Started 

HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:44010
2019-03-23 15:06:03,419 INFO 

org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer: Listening 

HTTP traffic on /0.0.0.0:50075
2019-03-23 15:06:03,572 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: dnUserName = root
2019-03-23 15:06:03,572 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: supergroup = supergroup
2019-03-23 15:06:03,652 INFO org.apache.hadoop.ipc.CallQueueManager: Using 

callQueue class java.util.concurrent.LinkedBlockingQueue
2019-03-23 15:06:03,683 INFO org.apache.hadoop.ipc.Server: Starting Socket 

Reader #1 for port 50020
2019-03-23 15:06:03,708 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Opened IPC server at 

/0.0.0.0:50020
2019-03-23 15:06:03,719 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Refresh request received 

for nameservices: null
2019-03-23 15:06:03,741 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Starting BPOfferServices 

for nameservices: <default>
2019-03-23 15:06:03,751 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool <registering> 

(Datanode Uuid unassigned) service to hadoop001/127.0.0.1:9000 starting to 

offer service
2019-03-23 15:06:03,755 INFO org.apache.hadoop.ipc.Server: IPC Server 

Responder: starting
2019-03-23 15:06:03,765 INFO org.apache.hadoop.ipc.Server: IPC Server 

listener on 50020: starting
2019-03-23 15:06:04,263 INFO org.apache.hadoop.hdfs.server.common.Storage: 

Lock on /opt/software/hadoop-2.7.1/data/tmp/dfs/data/in_use.lock acquired 

by nodename 2365@localhost
2019-03-23 15:06:04,309 INFO org.apache.hadoop.hdfs.server.common.Storage: 

Analyzing storage directories for bpid BP-1244105038-127.0.0.1-

1553319341262
2019-03-23 15:06:04,309 INFO org.apache.hadoop.hdfs.server.common.Storage: 

Locking is disabled for /opt/software/hadoop-

2.7.1/data/tmp/dfs/data/current/BP-1244105038-127.0.0.1-1553319341262
2019-03-23 15:06:04,310 INFO org.apache.hadoop.hdfs.server.common.Storage: 

Restored 0 block files from trash.
2019-03-23 15:06:04,311 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Setting up storage: 

nsid=1341947409;bpid=BP-1244105038-127.0.0.1-1553319341262;lv=-

56;nsInfo=lv=-63;cid=CID-a135bed3-efab-4cc6-b1a4-

5ef7b1d2057b;nsid=1341947409;c=0;bpid=BP-1244105038-127.0.0.1-

1553319341262;dnuuid=fc4fe5b9-f39d-462d-adbf-73e2a2270bd7
2019-03-23 15:06:04,353 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Added 

new volume: DS-091cd237-2e30-437c-b919-867e6a8b5fcf
2019-03-23 15:06:04,353 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Added 

volume - /opt/software/hadoop-2.7.1/data/tmp/dfs/data/current, StorageType: 

DISK
2019-03-23 15:06:04,361 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: 

Registered FSDatasetState MBean
2019-03-23 15:06:04,362 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Adding 

block pool BP-1244105038-127.0.0.1-1553319341262
2019-03-23 15:06:04,363 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: 

Scanning block pool BP-1244105038-127.0.0.1-1553319341262 on volume 

/opt/software/hadoop-2.7.1/data/tmp/dfs/data/current...
2019-03-23 15:06:04,375 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Cached 

dfsUsed found for /opt/software/hadoop-2.7.1/data/tmp/dfs/data/current/BP-

1244105038-127.0.0.1-1553319341262/current: 28672
2019-03-23 15:06:04,377 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Time 

taken to scan block pool BP-1244105038-127.0.0.1-1553319341262 on 

/opt/software/hadoop-2.7.1/data/tmp/dfs/data/current: 14ms
2019-03-23 15:06:04,377 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Total 

time to scan all replicas for block pool BP-1244105038-127.0.0.1-

1553319341262: 15ms
2019-03-23 15:06:04,377 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Adding 

replicas to map for block pool BP-1244105038-127.0.0.1-1553319341262 on 

volume /opt/software/hadoop-2.7.1/data/tmp/dfs/data/current...
2019-03-23 15:06:04,378 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Time 

to add replicas to map for block pool BP-1244105038-127.0.0.1-1553319341262 

on volume /opt/software/hadoop-2.7.1/data/tmp/dfs/data/current: 0ms
2019-03-23 15:06:04,378 INFO 

org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Total 

time to add all replicas to map: 1ms
2019-03-23 15:06:04,564 INFO 

org.apache.hadoop.hdfs.server.datanode.VolumeScanner: VolumeScanner

(/opt/software/hadoop-2.7.1/data/tmp/dfs/data, DS-091cd237-2e30-437c-b919-

867e6a8b5fcf): no suitable block pools found to scan.  Waiting 1809513588 

ms.
2019-03-23 15:06:04,566 INFO 

org.apache.hadoop.hdfs.server.datanode.DirectoryScanner: Periodic Directory 

Tree Verification scan starting at 1553338118566 with interval 21600000
2019-03-23 15:06:04,568 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool BP-1244105038-

127.0.0.1-1553319341262 (Datanode Uuid null) service to 

hadoop001/127.0.0.1:9000 beginning handshake with NN
2019-03-23 15:06:04,607 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool Block pool BP-

1244105038-127.0.0.1-1553319341262 (Datanode Uuid null) service to 

hadoop001/127.0.0.1:9000 successfully registered with NN
2019-03-23 15:06:04,607 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: For namenode 

hadoop001/127.0.0.1:9000 using DELETEREPORT_INTERVAL of 300000 msec  

BLOCKREPORT_INTERVAL of 21600000msec CACHEREPORT_INTERVAL of 10000msec 

Initial delay: 0msec; heartBeatInterval=3000
2019-03-23 15:06:04,690 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Namenode Block pool BP-

1244105038-127.0.0.1-1553319341262 (Datanode Uuid fc4fe5b9-f39d-462d-adbf-

73e2a2270bd7) service to hadoop001/127.0.0.1:9000 trying to claim ACTIVE 

state with txid=3
2019-03-23 15:06:04,690 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Acknowledging ACTIVE 

Namenode Block pool BP-1244105038-127.0.0.1-1553319341262 (Datanode Uuid 

fc4fe5b9-f39d-462d-adbf-73e2a2270bd7) service to hadoop001/127.0.0.1:9000
2019-03-23 15:06:04,753 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Successfully sent block 

report 0x5b1aa041d3b,  containing 1 storage report(s), of which we sent 1. 

The reports had 0 total blocks and used 1 RPC(s). This took 3 msec to 

generate and 60 msecs for RPC and NN processing. Got back one command: 

FinalizeCommand/5.
2019-03-23 15:06:04,753 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Got finalize command for 

block pool BP-1244105038-127.0.0.1-1553319341262
2019-03-23 15:22:51,793 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Receiving BP-1244105038-

127.0.0.1-1553319341262:blk_1073741825_1001 src: /127.0.0.1:33314 dest: 

/127.0.0.1:50010
2019-03-23 15:22:51,946 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode.clienttrace: src: 

/127.0.0.1:33314, dest: /127.0.0.1:50010, bytes: 45, op: HDFS_WRITE, cliID: 

DFSClient_NONMAPREDUCE_2021642614_1, offset: 0, srvID: fc4fe5b9-f39d-462d-

adbf-73e2a2270bd7, blockid: BP-1244105038-127.0.0.1-

1553319341262:blk_1073741825_1001, duration: 118686326
2019-03-23 15:22:51,963 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: PacketResponder: BP-

1244105038-127.0.0.1-1553319341262:blk_1073741825_1001, 

type=LAST_IN_PIPELINE, downstreams=0:[] terminating
2019-03-23 15:26:05,845 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Receiving BP-1244105038-

127.0.0.1-1553319341262:blk_1073741826_1002 src: /127.0.0.1:33416 dest: 

/127.0.0.1:50010
2019-03-23 15:26:12,872 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode.clienttrace: src: 

/127.0.0.1:33416, dest: /127.0.0.1:50010, bytes: 134217728, op: HDFS_WRITE, 

cliID: DFSClient_NONMAPREDUCE_1340210517_1, offset: 0, srvID: fc4fe5b9-

f39d-462d-adbf-73e2a2270bd7, blockid: BP-1244105038-127.0.0.1-

1553319341262:blk_1073741826_1002, duration: 7024381628
2019-03-23 15:26:12,872 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: PacketResponder: BP-

1244105038-127.0.0.1-1553319341262:blk_1073741826_1002, 

type=LAST_IN_PIPELINE, downstreams=0:[] terminating
2019-03-23 15:26:12,963 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: Receiving BP-1244105038-

127.0.0.1-1553319341262:blk_1073741827_1003 src: /127.0.0.1:33430 dest: 

/127.0.0.1:50010
2019-03-23 15:26:15,344 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode.clienttrace: src: 

/127.0.0.1:33430, dest: /127.0.0.1:50010, bytes: 76389079, op: HDFS_WRITE, 

cliID: DFSClient_NONMAPREDUCE_1340210517_1, offset: 0, srvID: fc4fe5b9-

f39d-462d-adbf-73e2a2270bd7, blockid: BP-1244105038-127.0.0.1-

1553319341262:blk_1073741827_1003, duration: 2374456847
2019-03-23 15:26:15,344 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: PacketResponder: BP-

1244105038-127.0.0.1-1553319341262:blk_1073741827_1003, 

type=LAST_IN_PIPELINE, downstreams=0:[] terminating
2019-03-23 16:10:27,767 WARN 

org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in 

offerService
java.io.IOException: Failed on local exception: java.io.IOException: 

Connection reset by peer; Host Details : local host is: 

"localhost/127.0.0.1"; destination host is: "hadoop001":9000; 
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:773)
	at org.apache.hadoop.ipc.Client.call(Client.java:1480)
	at org.apache.hadoop.ipc.Client.call(Client.java:1407)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke

(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at 

org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.se

ndHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at 

org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat

(BPServiceActor.java:553)
	at 

org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService

(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run

(BPServiceActor.java:823)
	at java.lang.Thread.run(Thread.java:745)
Caused by: java.io.IOException: Connection reset by peer
	at sun.nio.ch.FileDispatcherImpl.read0(Native Method)
	at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:39)
	at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:223)
	at sun.nio.ch.IOUtil.read(IOUtil.java:197)
	at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:380)
	at org.apache.hadoop.net.SocketInputStream$Reader.performIO

(SocketInputStream.java:57)
	at org.apache.hadoop.net.SocketIOWithTimeout.doIO

(SocketIOWithTimeout.java:142)
	at org.apache.hadoop.net.SocketInputStream.read

(SocketInputStream.java:161)
	at org.apache.hadoop.net.SocketInputStream.read

(SocketInputStream.java:131)
	at java.io.FilterInputStream.read(FilterInputStream.java:133)
	at java.io.FilterInputStream.read(FilterInputStream.java:133)
	at org.apache.hadoop.ipc.Client$Connection$PingInputStream.read

(Client.java:515)
	at java.io.BufferedInputStream.fill(BufferedInputStream.java:246)
	at java.io.BufferedInputStream.read(BufferedInputStream.java:265)
	at java.io.DataInputStream.readInt(DataInputStream.java:387)
	at org.apache.hadoop.ipc.Client$Connection.receiveRpcResponse

(Client.java:1079)
	at org.apache.hadoop.ipc.Client$Connection.run(Client.java:974)
2019-03-23 16:10:31,749 INFO org.apache.hadoop.ipc.Client: Retrying connect 

to server: hadoop001/127.0.0.1:9000. Already tried 0 time(s); retry policy 

is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 

MILLISECONDS)
2019-03-23 16:10:32,562 ERROR 

org.apache.hadoop.hdfs.server.datanode.DataNode: RECEIVED SIGNAL 15: 

SIGTERM
2019-03-23 16:10:32,564 INFO 

org.apache.hadoop.hdfs.server.datanode.DataNode: SHUTDOWN_MSG: 
/************************************************************
SHUTDOWN_MSG: Shutting down DataNode at localhost/127.0.0.1
************************************************************/

二、创建硬链接

[root@hadoop001 opt]# ln hadoop-root-datanode-hadoop001.log ln_hadoop
[root@hadoop001 opt]# ll
total 152
-rw-r--r-- 2 root root 73128 May 26 22:20 hadoop-root-datanode-

hadoop001.log
lrwxrwxrwx 1 root root    34 May 26 22:21 link2019 -> hadoop-root-

datanode-hadoop001.log
-rw-r--r-- 2 root root 73128 May 26 22:20 ln_hadoop
drwxr-xr-x 2 root root  4096 Mar  4 21:42 module
drwxr-xr-x 3 root root  4096 Mar  5 21:10 software

总结一下:我们可以看出来,软链接相当于快捷方式,硬链接相当于副本

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值