版本:hadoop2.2.0
源码在https://github.com/hortonworks/simple-yarn-app这里可以下载。之前一直试验这个simpleyarnapp一直没有成功过,作为yarn的hello world应该没有那么难运行吧。几经排查,发现还是classpath路径的问题。
首先,还是要按照http://blog.csdn.net/fansy1990/article/details/22896249配置环境。
这里说是classpath的问题,主要是指linux和windows里面设置java的classpath的方式是不同的。假如按照github上面的源码(由于我是使用windows提交任务的,所以会出现这样的问题,如果是linux提交任务则不会出现这样的问题),设置断点查看到的classpath的路径为:
{CLASSPATH=$HADOOP_CONF_DIR;$HADOOP_COMMON_HOME/share/hadoop/common/*;$HADOOP_COMMON_HOME/share/hadoop/common/lib/*;$HADOOP_HDFS_HOME/share/hadoop/hdfs/*;$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*;$HADOOP_YARN_HOME/share/hadoop/yarn/*;$HADOOP_YARN_HOME/share/hadoop/yarn/lib/*;%PWD%\*}
而使用修改过的源码,其路径为:
{CLASSPATH=$HADOOP_CONF_DIR:$HADOOP_COMMON_HOME/share/hadoop/common/*:$HADOOP_COMMON_HOME/share/hadoop/common/lib/*:$HADOOP_HDFS_HOME/share/hadoop/hdfs/*:$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*:$HADOOP_YARN_HOME/share/hadoop/yarn/*:$HADOOP_YARN_HOME/share/hadoop/yarn/lib/*:$PWD/*}
分号和冒号以及$和%的差别。
client的源码如下:
package com.hortonworks.simpleyarnapp;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.StringInterner;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.client.api.YarnClientApplication;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.Apps;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Records;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Client {
Logger log = LoggerFactory.getLogger(Client.class);
Configuration conf = new YarnConfiguration();
public void run(String[] args) throws Exception {
final String command = args[0];
final int n = Integer.valueOf(args[1]);
final Path jarPath = new Path(args[2]);
// Create yarnClient
// YarnConfiguraton extends Configuration
// YarnConfiguration conf = new YarnConfiguration();
conf.set("fs.defaultFS", "hdfs://node31:9000");
conf.set("mapreduce.framework.name", "yarn");
conf.set("yarn.resourcemanager.address", "node31:8032");
YarnClient yarnClient = YarnClient.createYarnClient();
yarnClient.init(conf);
yarnClient.start();
// Create application via yarnClient
YarnClientApplication app = yarnClient.createApplication();