java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.util.JsonSerialization
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem.jsonParse(WebHdfsFileSystem.java:477)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$FsPathResponseRunner.getResponse(WebHdfsFileSystem.java:959)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.runWithRetry(WebHdfsFileSystem.java:822)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.access$100(WebHdfsFileSystem.java:649)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner$1.run(WebHdfsFileSystem.java:687)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.run(WebHdfsFileSystem.java:683)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem.getDelegationToken(WebHdfsFileSystem.java:1684)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem.getDelegationToken(WebHdfsFileSystem.java:365)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem.getAuthParameters(WebHdfsFileSystem.java:585)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem.toUrl(WebHdfsFileSystem.java:638)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractFsPathRunner.getUrl(WebHdfsFileSystem.java:923)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.runWithRetry(WebHdfsFileSystem.java:819)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.access$100(WebHdfsFileSystem.java:649)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner$1.run(WebHdfsFileSystem.java:687)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.run(WebHdfsFileSystem.java:683)
at org.apache.hadoop.hdfs.web.WebHdfsFileSystem.listStatus(WebHdfsFileSystem.java:1634)
at org.apache.hadoop.fs.FileSystem.listStatus(FileSystem.java:1868)
at org.apache.hadoop.fs.FileSystem.listStatus(FileSystem.java:1910)
echo `hadoop classpath`
类似 /usr/current/hadoop-hdfs-client/lib/*目录下有 jackson-databind的jar jar包冲突
dr-elephant-2.1.7/lib 目录下的jackson-databind不可以删除,不然play框架启不来,只能想办法修改 echo `hadoop classpath` 的
题外话
dr-elephant-2.1.7/app-conf/FetcherConf.xml 核心配置文件
spark有两种配置
1 Loads the eventlog from HDFS and replays to get the metrics and application properties
<fetcher> <applicationtype>spark</applicationtype> <classname>com.linkedin.drelephant.spark.fetchers.FSFetcher</classname> <params> <event_log_size_limit_in_mb>500</event_log_size_limit_in_mb> <event_log_location_uri>webhdfs://localhost:50070/system/spark-history</event_log_location_uri> </params> </fetcher>
2 uses SHS REST API (Requires Spark >= 1.5.0)
<params> <event_log_size_limit_in_mb>500</event_log_size_limit_in_mb> <event_log_location_uri>webhdfs://localhost:50070/system/spark-history</event_log_location_uri> </params>
一种不通可以试试另外一种