-
JVM本身就支持远程调试,Eclipse也支持JDWP,只需要在各模块的JVM启动时加载以下参数:
-Xdebug -Xrunjdwp:transport=dt_socket, address=8000,server=y,suspend=y
各参数的含义: -Xdebug 启用调试特性
|
注意在工程里面改就可以了。。。。
<?xml version="1.0"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!-- Put site-specific property overrides in this file. -->
<configuration> <property> <name>mapred.job.tracker</name> <value>192.168.1.219:9001</value> </property>
<property> <name>mapred.child.java.opts</name> <value>-agentlib:jdwp=transport=dt_socket,address=8883,server=y,suspend=y</value> </property>
<property> <name>mapred.tasktracker.map.tasks.maximum</name> <value>1</value> </property> <property> <name>mapred.tasktracker.reduce.tasks.maximum</name> <value>1</value> </property>
<property> <name>mapred.job.reuse.jvm.num.tasks</name> <value>-1</value> </property> </configuration> |
在Eclipse中使用方法:
打开eclipse,找到Debug Configurations...,添加一个Remout Java Application: 在source中可以关联到hive的源代码,然后,单击Debug按钮进入远程debug模式。 |
HADOOP_NAMENODE_OPTS="-agentlib:jdwp=transport=dt_socket,address=8888,server=y,suspend=y" #HADOOP_SECONDARYNAMENODE_OPTS="-agentlib:jdwp=transport=dt_socket,address=8789,server=y,suspend=y" #HADOOP_DATANODE_OPTS="-agentlib:jdwp=transport=dt_socket,address=8790,server=y,suspend=y" #HADOOP_BALANCER_OPTS="-agentlib:jdwp=transport=dt_socket,address=8791,server=y,suspend=y" #HADOOP_JOBTRACKER_OPTS="-agentlib:jdwp=transport=dt_socket,address=8792,server=y,suspend=y" #HADOOP_TASKTRACKER_OPTS="-agentlib:jdwp=transport=dt_socket,address=8793,server=y,suspend=y" |
对NameNode,SecondaryName,DataNode,JobTracker,TaskTracker进行远程调试,则需要修改一下bin/hadoop文件:
if [ "$COMMAND" = "namenode" ] ; then
CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_NAMENODE_OPTS -agentlib:jdwp=transport=dt_socket,address=8888,server=y,suspend=n"
elif [ "$COMMAND" = "secondarynamenode" ] ; then
CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS -agentlib:jdwp=transport=dt_socket,address=8887,server=y,suspend=n"
elif [ "$COMMAND" = "datanode" ] ; then
CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_DATANODE_OPTS -agentlib:jdwp=transport=dt_socket,address=8886,server=y,suspend=n"
……
elif [ "$COMMAND" = "jobtracker" ] ; then
CLASS=org.apache.hadoop.mapred.JobTracker
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOBTRACKER_OPTS -agentlib:jdwp=transport=dt_socket,address=8885,server=y,suspend=n"
elif [ "$COMMAND" = "tasktracker" ] ; then
CLASS=org.apache.hadoop.mapred.TaskTracker
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_TASKTRACKER_OPTS -agentlib:jdwp=transport=dt_socket,address=8884,server=y,suspend=n" |
Namenode触发:
public DirectoryListing getListing(String src, byte[] startAfter) throws IOException { DirectoryListing files = namesystem.getListing(src, startAfter); myMetrics.incrNumGetListingOps(); if (files != null) { myMetrics.incrNumFilesInGetListingOps(files.getPartialListing().length); } return files; } |
Secondnamenode
private void initialize(final Configuration conf) throws IOException { final InetSocketAddress infoSocAddr = getHttpAddress(conf); infoBindAddress = infoSocAddr.getHostName(); if (UserGroupInformation.isSecurityEnabled()) { SecurityUtil.login(conf, DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY, DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY, infoBindAddress); } |
Datenode 触发:
DataNode(final Configuration conf, final AbstractList<File> dataDirs, SecureResources resources) throws IOException { super(conf); SecurityUtil.login(conf, DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY);
datanodeObject = this; supportAppends = conf.getBoolean("dfs.support.append", false); this.userWithLocalPathAccess = conf .get(DFSConfigKeys.DFS_BLOCK_LOCAL_PATH_ACCESS_USER_KEY); try { startDataNode(conf, dataDirs, resources); } catch (IOException ie) { shutdown(); throw ie; } } |
Jobtracker
public static JobTracker startTracker(JobConf conf, String identifier) throws IOException, InterruptedException { |