hadoop:Win10本地环境配置hadoop(附wordcount实例) org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Ljava/

下载 hadoop包后先安装本地环境,配置环境变量:

解压hadoop-2.6.0.tar.gz

变量名 HADOOP_HOME   路径:E:\spark\anzhuangsoft\hadoop-2.6.0

path中:%HADOOP_HOME%\lib和 %HADOOP_HOME%\bin

 

 

验证:

 

 

IDEA中跑wordcount程序:

建一个 maven项目,目录结构:

代码:

RunJob 执行类:
package main.java;
 
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 
public class RunJob {
//    static {
//        try {
//            // 设置 HADOOP_HOME 目录
//            System.setProperty("hadoop.home.dir", "E:\\spark\\anzhuangsoft\\hadoop-2.7.5");
//            // 加载库文件
//            System.load("E:\\spark\\anzhuangsoft\\hadoop-2.7.5\\bin\\hadoop.dll");
//        } catch (UnsatisfiedLinkError e) {
//            System.err.println("Native code library failed to load.\n" + e);
//            System.exit(1);
//        }
//    }

    public static void main(String[] args) throws Exception {
 
        Configuration configuration = new Configuration();
 
        FileSystem fs = FileSystem.get(configuration);
 
        Job job = Job.getInstance(configuration);
        job.setJarByClass(RunJob.class);
        job.setJobName("wordCount");
 
        job.setMapperClass(WordCount.WordCountMapper.class);
        job.setReducerClass(WordCount.WordCountReducer.class);
 
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);
 
        FileInputFormat.addInputPath(job, new Path("input"));
        Path outPath = new Path("output");
        if (fs.exists(outPath)) {
            fs.delete(outPath, true);
        }
        FileOutputFormat.setOutputPath(job, outPath);
 
        boolean completion = job.waitForCompletion(true);
        if (completion) {
            System.out.println("执行完成");
        }
 
 
    }
}

 

WordCount(含mapper和reducer)
package main.java;

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class WordCount {
    public static class WordCountMapper extends Mapper<LongWritable, Text, Text,IntWritable>{
        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context)
                throws IOException, InterruptedException {
            String [] words = value.toString().split(" ");
            for (String word : words) {
                context.write(new Text(word), new IntWritable(1));
            }
        }
    }
    public static class WordCountReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
        @Override
        protected void reduce(Text key, Iterable<IntWritable> value,Context context) throws IOException, InterruptedException {
            int sum = 0;
            for (IntWritable intWritable : value) {
                sum += intWritable.get();
            }
            context.write(key, new IntWritable(sum));
        }
    }

//        static {
//        try {
//            // 设置 HADOOP_HOME 目录
//            System.setProperty("hadoop.home.dir", "E:\\spark\\anzhuangsoft\\hadoop-2.7.5");
//            // 加载库文件
//            System.load("E:\\spark\\anzhuangsoft\\hadoop-2.7.5\\bin\\hadoop.dll");
//        } catch (UnsatisfiedLinkError e) {
//            System.err.println("Native code library failed to load.\n" + e);
//            System.exit(1);
//        }
//    }

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("mapreduce.framework.name", "local");
        Job job = Job.getInstance(conf);

        job.setJarByClass(WordCount.class);

        //设置Mapper类和Reducer类
        job.setMapperClass(WordCountMapper.class);
        job.setReducerClass(WordCountReducer.class);
        //设置Map端输出key类和输出value类
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);
        //设置Reduce端输出key类和输出value类
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        FileInputFormat.addInputPath(job,new Path("input\\words.txt"));
        FileOutputFormat.setOutputPath(job,new Path("output"));
        //执行任务
        boolean status = job.waitForCompletion(true);
        System.out.println(status);
    }

}

pom: 

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.tzb</groupId>
    <artifactId>hadoop</artifactId>
    <version>1.0-SNAPSHOT</version>

    <properties>
        <!--<hadoop.version>2.7.5</hadoop.version>-->
        <hadoop.version>2.6.0</hadoop.version>
    </properties>
    <dependencies>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>${hadoop.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>${hadoop.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>${hadoop.version}</version>
        </dependency>
        <!--<dependency>-->
            <!--<groupId>org.apache.hadoop</groupId>-->
            <!--<artifactId>hadoop-client</artifactId>-->
            <!--<version>${hadoop.version}</version>-->
        <!--</dependency>-->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-mapreduce-client-core</artifactId>
            <version>${hadoop.version}</version>
        </dependency>
        <!--<dependency>-->
        <!--<groupId>jdk.tools</groupId>-->
        <!--<artifactId>jdk.tools</artifactId>-->
        <!--<version>1.8</version>-->
        <!--<scope>system</scope>-->
        <!--<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>-->
        <!--</dependency>-->
    </dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-jar-plugin</artifactId>
                <version>2.4</version>
                <configuration>
                    <archive>
                        <manifest>
                            <addClasspath>true</addClasspath>
                            <classpathPrefix>lib/</classpathPrefix>
                            <mainClass>cn.itcast.hadoop.mr.WordCountDriver</mainClass>
                        </manifest>
                    </archive>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <!--<version>3.0</version>-->
                <configuration>
                    <source>1.8</source>
                    <target>1.8</target>
                    <encoding>UTF-8</encoding>
                </configuration>
            </plugin>
        </plugins>
    </build>
</project>

执行效果:

将Reducer的代码注释掉,只执行mapper的代码(可以看出未作聚合的效果):

 

 

 

执行代码

报错1:

F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\bin\java "-javaagent:H:\Program Files\JetBrains\IntelliJ IDEA 2017.2.5\lib\idea_rt.jar=52764:H:\Program Files\JetBrains\IntelliJ IDEA 2017.2.5\bin" -Dfile.encoding=GBK -classpath F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\charsets.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\deploy.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\access-bridge-32.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\cldrdata.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\dnsns.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\jaccess.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\jfxrt.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\localedata.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\nashorn.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunec.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunjce_provider.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunmscapi.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunpkcs11.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\zipfs.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\javaws.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jce.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jfr.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jfxswt.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jsse.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\management-agent.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\plugin.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\resources.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\rt.jar;D:\Workspaces\hadoop\target\classes;D:\repository\org\apache\hadoop\hadoop-client\2.6.0\hadoop-client-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-app\2.6.0\hadoop-mapreduce-client-app-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-common\2.6.0\hadoop-mapreduce-client-common-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-yarn-client\2.6.0\hadoop-yarn-client-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-yarn-server-common\2.6.0\hadoop-yarn-server-common-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-shuffle\2.6.0\hadoop-mapreduce-client-shuffle-2.6.0.jar;D:\repository\org\fusesource\leveldbjni\leveldbjni-all\1.8\leveldbjni-all-1.8.jar;D:\repository\org\apache\hadoop\hadoop-yarn-api\2.6.0\hadoop-yarn-api-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-jobclient\2.6.0\hadoop-mapreduce-client-jobclient-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-annotations\2.6.0\hadoop-annotations-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-common\2.6.0\hadoop-common-2.6.0.jar;D:\repository\com\google\guava\guava\11.0.2\guava-11.0.2.jar;D:\repository\commons-cli\commons-cli\1.2\commons-cli-1.2.jar;D:\repository\org\apache\commons\commons-math3\3.1.1\commons-math3-3.1.1.jar;D:\repository\xmlenc\xmlenc\0.52\xmlenc-0.52.jar;D:\repository\commons-httpclient\commons-httpclient\3.1\commons-httpclient-3.1.jar;D:\repository\commons-codec\commons-codec\1.4\commons-codec-1.4.jar;D:\repository\commons-io\commons-io\2.4\commons-io-2.4.jar;D:\repository\commons-net\commons-net\3.1\commons-net-3.1.jar;D:\repository\commons-collections\commons-collections\3.2.1\commons-collections-3.2.1.jar;D:\repository\javax\servlet\servlet-api\2.5\servlet-api-2.5.jar;D:\repository\org\mortbay\jetty\jetty\6.1.26\jetty-6.1.26.jar;D:\repository\org\mortbay\jetty\jetty-util\6.1.26\jetty-util-6.1.26.jar;D:\repository\com\sun\jersey\jersey-core\1.9\jersey-core-1.9.jar;D:\repository\com\sun\jersey\jersey-json\1.9\jersey-json-1.9.jar;D:\repository\org\codehaus\jettison\jettison\1.1\jettison-1.1.jar;D:\repository\com\sun\xml\bind\jaxb-impl\2.2.3-1\jaxb-impl-2.2.3-1.jar;D:\repository\org\codehaus\jackson\jackson-jaxrs\1.8.3\jackson-jaxrs-1.8.3.jar;D:\repository\org\codehaus\jackson\jackson-xc\1.8.3\jackson-xc-1.8.3.jar;D:\repository\com\sun\jersey\jersey-server\1.9\jersey-server-1.9.jar;D:\repository\asm\asm\3.1\asm-3.1.jar;D:\repository\tomcat\jasper-compiler\5.5.23\jasper-compiler-5.5.23.jar;D:\repository\tomcat\jasper-runtime\5.5.23\jasper-runtime-5.5.23.jar;D:\repository\javax\servlet\jsp\jsp-api\2.1\jsp-api-2.1.jar;D:\repository\commons-el\commons-el\1.0\commons-el-1.0.jar;D:\repository\commons-logging\commons-logging\1.1.3\commons-logging-1.1.3.jar;D:\repository\log4j\log4j\1.2.17\log4j-1.2.17.jar;D:\repository\net\java\dev\jets3t\jets3t\0.9.0\jets3t-0.9.0.jar;D:\repository\org\apache\httpcomponents\httpclient\4.1.2\httpclient-4.1.2.jar;D:\repository\org\apache\httpcomponents\httpcore\4.1.2\httpcore-4.1.2.jar;D:\repository\com\jamesmurty\utils\java-xmlbuilder\0.4\java-xmlbuilder-0.4.jar;D:\repository\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;D:\repository\commons-configuration\commons-configuration\1.6\commons-configuration-1.6.jar;D:\repository\commons-digester\commons-digester\1.8\commons-digester-1.8.jar;D:\repository\commons-beanutils\commons-beanutils\1.7.0\commons-beanutils-1.7.0.jar;D:\repository\commons-beanutils\commons-beanutils-core\1.8.0\commons-beanutils-core-1.8.0.jar;D:\repository\org\slf4j\slf4j-api\1.7.5\slf4j-api-1.7.5.jar;D:\repository\org\slf4j\slf4j-log4j12\1.7.5\slf4j-log4j12-1.7.5.jar;D:\repository\org\codehaus\jackson\jackson-core-asl\1.9.13\jackson-core-asl-1.9.13.jar;D:\repository\org\codehaus\jackson\jackson-mapper-asl\1.9.13\jackson-mapper-asl-1.9.13.jar;D:\repository\org\apache\avro\avro\1.7.4\avro-1.7.4.jar;D:\repository\com\thoughtworks\paranamer\paranamer\2.3\paranamer-2.3.jar;D:\repository\org\xerial\snappy\snappy-java\1.0.4.1\snappy-java-1.0.4.1.jar;D:\repository\com\google\protobuf\protobuf-java\2.5.0\protobuf-java-2.5.0.jar;D:\repository\com\google\code\gson\gson\2.2.4\gson-2.2.4.jar;D:\repository\org\apache\hadoop\hadoop-auth\2.6.0\hadoop-auth-2.6.0.jar;D:\repository\org\apache\directory\server\apacheds-kerberos-codec\2.0.0-M15\apacheds-kerberos-codec-2.0.0-M15.jar;D:\repository\org\apache\directory\server\apacheds-i18n\2.0.0-M15\apacheds-i18n-2.0.0-M15.jar;D:\repository\org\apache\directory\api\api-asn1-api\1.0.0-M20\api-asn1-api-1.0.0-M20.jar;D:\repository\org\apache\directory\api\api-util\1.0.0-M20\api-util-1.0.0-M20.jar;D:\repository\org\apache\curator\curator-framework\2.6.0\curator-framework-2.6.0.jar;D:\repository\com\jcraft\jsch\0.1.42\jsch-0.1.42.jar;D:\repository\org\apache\curator\curator-client\2.6.0\curator-client-2.6.0.jar;D:\repository\org\apache\curator\curator-recipes\2.6.0\curator-recipes-2.6.0.jar;D:\repository\com\google\code\findbugs\jsr305\1.3.9\jsr305-1.3.9.jar;D:\repository\org\htrace\htrace-core\3.0.4\htrace-core-3.0.4.jar;D:\repository\org\apache\zookeeper\zookeeper\3.4.6\zookeeper-3.4.6.jar;D:\repository\org\apache\commons\commons-compress\1.4.1\commons-compress-1.4.1.jar;D:\repository\org\tukaani\xz\1.0\xz-1.0.jar;D:\repository\org\apache\hadoop\hadoop-hdfs\2.6.0\hadoop-hdfs-2.6.0.jar;D:\repository\commons-daemon\commons-daemon\1.0.13\commons-daemon-1.0.13.jar;D:\repository\io\netty\netty\3.6.2.Final\netty-3.6.2.Final.jar;D:\repository\xerces\xercesImpl\2.9.1\xercesImpl-2.9.1.jar;D:\repository\xml-apis\xml-apis\1.3.04\xml-apis-1.3.04.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-core\2.6.0\hadoop-mapreduce-client-core-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-yarn-common\2.6.0\hadoop-yarn-common-2.6.0.jar;D:\repository\javax\xml\bind\jaxb-api\2.2.2\jaxb-api-2.2.2.jar;D:\repository\javax\xml\stream\stax-api\1.0-2\stax-api-1.0-2.jar;D:\repository\javax\activation\activation\1.1\activation-1.1.jar;D:\repository\com\sun\jersey\jersey-client\1.9\jersey-client-1.9.jar;D:\repository\com\google\inject\guice\3.0\guice-3.0.jar;D:\repository\javax\inject\javax.inject\1\javax.inject-1.jar;D:\repository\aopalliance\aopalliance\1.0\aopalliance-1.0.jar;D:\repository\com\sun\jersey\contribs\jersey-guice\1.9\jersey-guice-1.9.jar;D:\repository\com\google\inject\extensions\guice-servlet\3.0\guice-servlet-3.0.jar main.java.RunJob
log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
Exception in thread "main" java.lang.UnsatisfiedLinkError: org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Ljava/lang/String;I)Z
	at org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Native Method)
	at org.apache.hadoop.io.nativeio.NativeIO$Windows.access(NativeIO.java:557)
	at org.apache.hadoop.fs.FileUtil.canRead(FileUtil.java:977)
	at org.apache.hadoop.util.DiskChecker.checkAccessByFileMethods(DiskChecker.java:187)
	at org.apache.hadoop.util.DiskChecker.checkDirAccess(DiskChecker.java:174)
	at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:108)
	at org.apache.hadoop.fs.LocalDirAllocator$AllocatorPerContext.confChanged(LocalDirAllocator.java:285)
	at org.apache.hadoop.fs.LocalDirAllocator$AllocatorPerContext.getLocalPathForWrite(LocalDirAllocator.java:344)
	at org.apache.hadoop.fs.LocalDirAllocator.getLocalPathForWrite(LocalDirAllocator.java:150)
	at org.apache.hadoop.fs.LocalDirAllocator.getLocalPathForWrite(LocalDirAllocator.java:131)
	at org.apache.hadoop.fs.LocalDirAllocator.getLocalPathForWrite(LocalDirAllocator.java:115)
	at org.apache.hadoop.mapred.LocalDistributedCacheManager.setup(LocalDistributedCacheManager.java:131)
	at org.apache.hadoop.mapred.LocalJobRunner$Job.<init>(LocalJobRunner.java:163)
	at org.apache.hadoop.mapred.LocalJobRunner.submitJob(LocalJobRunner.java:731)
	at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:536)
	at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
	at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
	at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
	at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1314)
	at main.java.RunJob.main(RunJob.java:49)

Process finished with exit code 1

以上报错原因,我经过各种尝试,最终锁定目录:C:\Windows\SysWOW64

只有当这个目录中有 hadoop.dll文件时,wordcount程序才可以执行成功。

但是最好把hadoop.dll和winutils.exe两个文件都放到下面两个目录中去

C:\Windows\System32 和 C:\Windows\SysWOW64

如果还是报这个错试试将如下代码放到runjob中去,main方法的外面 ,然后再执行试试,里边目录指向你的hadoop目录。

static {
        try {
            // 设置 HADOOP_HOME 目录
            System.setProperty("hadoop.home.dir", "E:\\spark\\anzhuangsoft\\hadoop-2.7.5");
            // 加载库文件
            System.load("E:\\spark\\anzhuangsoft\\hadoop-2.7.5\\bin\\hadoop.dll");
        } catch (UnsatisfiedLinkError e) {
            System.err.println("Native code library failed to load.\n" + e);
            System.exit(1);
        }
    }

报错2:

F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\bin\java "-javaagent:H:\Program Files\JetBrains\IntelliJ IDEA 2017.2.5\lib\idea_rt.jar=53113:H:\Program Files\JetBrains\IntelliJ IDEA 2017.2.5\bin" -Dfile.encoding=GBK -classpath F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\charsets.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\deploy.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\access-bridge-32.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\cldrdata.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\dnsns.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\jaccess.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\jfxrt.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\localedata.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\nashorn.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunec.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunjce_provider.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunmscapi.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunpkcs11.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\zipfs.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\javaws.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jce.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jfr.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jfxswt.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jsse.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\management-agent.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\plugin.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\resources.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\rt.jar;D:\Workspaces\hadoop\target\classes;D:\repository\org\apache\hadoop\hadoop-client\2.6.0\hadoop-client-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-app\2.6.0\hadoop-mapreduce-client-app-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-common\2.6.0\hadoop-mapreduce-client-common-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-yarn-client\2.6.0\hadoop-yarn-client-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-yarn-server-common\2.6.0\hadoop-yarn-server-common-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-shuffle\2.6.0\hadoop-mapreduce-client-shuffle-2.6.0.jar;D:\repository\org\fusesource\leveldbjni\leveldbjni-all\1.8\leveldbjni-all-1.8.jar;D:\repository\org\apache\hadoop\hadoop-yarn-api\2.6.0\hadoop-yarn-api-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-jobclient\2.6.0\hadoop-mapreduce-client-jobclient-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-annotations\2.6.0\hadoop-annotations-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-common\2.6.0\hadoop-common-2.6.0.jar;D:\repository\com\google\guava\guava\11.0.2\guava-11.0.2.jar;D:\repository\commons-cli\commons-cli\1.2\commons-cli-1.2.jar;D:\repository\org\apache\commons\commons-math3\3.1.1\commons-math3-3.1.1.jar;D:\repository\xmlenc\xmlenc\0.52\xmlenc-0.52.jar;D:\repository\commons-httpclient\commons-httpclient\3.1\commons-httpclient-3.1.jar;D:\repository\commons-codec\commons-codec\1.4\commons-codec-1.4.jar;D:\repository\commons-io\commons-io\2.4\commons-io-2.4.jar;D:\repository\commons-net\commons-net\3.1\commons-net-3.1.jar;D:\repository\commons-collections\commons-collections\3.2.1\commons-collections-3.2.1.jar;D:\repository\javax\servlet\servlet-api\2.5\servlet-api-2.5.jar;D:\repository\org\mortbay\jetty\jetty\6.1.26\jetty-6.1.26.jar;D:\repository\org\mortbay\jetty\jetty-util\6.1.26\jetty-util-6.1.26.jar;D:\repository\com\sun\jersey\jersey-core\1.9\jersey-core-1.9.jar;D:\repository\com\sun\jersey\jersey-json\1.9\jersey-json-1.9.jar;D:\repository\org\codehaus\jettison\jettison\1.1\jettison-1.1.jar;D:\repository\com\sun\xml\bind\jaxb-impl\2.2.3-1\jaxb-impl-2.2.3-1.jar;D:\repository\org\codehaus\jackson\jackson-jaxrs\1.8.3\jackson-jaxrs-1.8.3.jar;D:\repository\org\codehaus\jackson\jackson-xc\1.8.3\jackson-xc-1.8.3.jar;D:\repository\com\sun\jersey\jersey-server\1.9\jersey-server-1.9.jar;D:\repository\asm\asm\3.1\asm-3.1.jar;D:\repository\tomcat\jasper-compiler\5.5.23\jasper-compiler-5.5.23.jar;D:\repository\tomcat\jasper-runtime\5.5.23\jasper-runtime-5.5.23.jar;D:\repository\javax\servlet\jsp\jsp-api\2.1\jsp-api-2.1.jar;D:\repository\commons-el\commons-el\1.0\commons-el-1.0.jar;D:\repository\commons-logging\commons-logging\1.1.3\commons-logging-1.1.3.jar;D:\repository\log4j\log4j\1.2.17\log4j-1.2.17.jar;D:\repository\net\java\dev\jets3t\jets3t\0.9.0\jets3t-0.9.0.jar;D:\repository\org\apache\httpcomponents\httpclient\4.1.2\httpclient-4.1.2.jar;D:\repository\org\apache\httpcomponents\httpcore\4.1.2\httpcore-4.1.2.jar;D:\repository\com\jamesmurty\utils\java-xmlbuilder\0.4\java-xmlbuilder-0.4.jar;D:\repository\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;D:\repository\commons-configuration\commons-configuration\1.6\commons-configuration-1.6.jar;D:\repository\commons-digester\commons-digester\1.8\commons-digester-1.8.jar;D:\repository\commons-beanutils\commons-beanutils\1.7.0\commons-beanutils-1.7.0.jar;D:\repository\commons-beanutils\commons-beanutils-core\1.8.0\commons-beanutils-core-1.8.0.jar;D:\repository\org\slf4j\slf4j-api\1.7.5\slf4j-api-1.7.5.jar;D:\repository\org\slf4j\slf4j-log4j12\1.7.5\slf4j-log4j12-1.7.5.jar;D:\repository\org\codehaus\jackson\jackson-core-asl\1.9.13\jackson-core-asl-1.9.13.jar;D:\repository\org\codehaus\jackson\jackson-mapper-asl\1.9.13\jackson-mapper-asl-1.9.13.jar;D:\repository\org\apache\avro\avro\1.7.4\avro-1.7.4.jar;D:\repository\com\thoughtworks\paranamer\paranamer\2.3\paranamer-2.3.jar;D:\repository\org\xerial\snappy\snappy-java\1.0.4.1\snappy-java-1.0.4.1.jar;D:\repository\com\google\protobuf\protobuf-java\2.5.0\protobuf-java-2.5.0.jar;D:\repository\com\google\code\gson\gson\2.2.4\gson-2.2.4.jar;D:\repository\org\apache\hadoop\hadoop-auth\2.6.0\hadoop-auth-2.6.0.jar;D:\repository\org\apache\directory\server\apacheds-kerberos-codec\2.0.0-M15\apacheds-kerberos-codec-2.0.0-M15.jar;D:\repository\org\apache\directory\server\apacheds-i18n\2.0.0-M15\apacheds-i18n-2.0.0-M15.jar;D:\repository\org\apache\directory\api\api-asn1-api\1.0.0-M20\api-asn1-api-1.0.0-M20.jar;D:\repository\org\apache\directory\api\api-util\1.0.0-M20\api-util-1.0.0-M20.jar;D:\repository\org\apache\curator\curator-framework\2.6.0\curator-framework-2.6.0.jar;D:\repository\com\jcraft\jsch\0.1.42\jsch-0.1.42.jar;D:\repository\org\apache\curator\curator-client\2.6.0\curator-client-2.6.0.jar;D:\repository\org\apache\curator\curator-recipes\2.6.0\curator-recipes-2.6.0.jar;D:\repository\com\google\code\findbugs\jsr305\1.3.9\jsr305-1.3.9.jar;D:\repository\org\htrace\htrace-core\3.0.4\htrace-core-3.0.4.jar;D:\repository\org\apache\zookeeper\zookeeper\3.4.6\zookeeper-3.4.6.jar;D:\repository\org\apache\commons\commons-compress\1.4.1\commons-compress-1.4.1.jar;D:\repository\org\tukaani\xz\1.0\xz-1.0.jar;D:\repository\org\apache\hadoop\hadoop-hdfs\2.6.0\hadoop-hdfs-2.6.0.jar;D:\repository\commons-daemon\commons-daemon\1.0.13\commons-daemon-1.0.13.jar;D:\repository\io\netty\netty\3.6.2.Final\netty-3.6.2.Final.jar;D:\repository\xerces\xercesImpl\2.9.1\xercesImpl-2.9.1.jar;D:\repository\xml-apis\xml-apis\1.3.04\xml-apis-1.3.04.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-core\2.6.0\hadoop-mapreduce-client-core-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-yarn-common\2.6.0\hadoop-yarn-common-2.6.0.jar;D:\repository\javax\xml\bind\jaxb-api\2.2.2\jaxb-api-2.2.2.jar;D:\repository\javax\xml\stream\stax-api\1.0-2\stax-api-1.0-2.jar;D:\repository\javax\activation\activation\1.1\activation-1.1.jar;D:\repository\com\sun\jersey\jersey-client\1.9\jersey-client-1.9.jar;D:\repository\com\google\inject\guice\3.0\guice-3.0.jar;D:\repository\javax\inject\javax.inject\1\javax.inject-1.jar;D:\repository\aopalliance\aopalliance\1.0\aopalliance-1.0.jar;D:\repository\com\sun\jersey\contribs\jersey-guice\1.9\jersey-guice-1.9.jar;D:\repository\com\google\inject\extensions\guice-servlet\3.0\guice-servlet-3.0.jar main.java.RunJob
log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
Exception in thread "main" java.lang.NullPointerException
	at java.lang.ProcessBuilder.start(ProcessBuilder.java:1012)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:482)
	at org.apache.hadoop.util.Shell.run(Shell.java:455)
	at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:808)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:791)
	at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1097)
	at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:582)
	at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:557)
	at org.apache.hadoop.fs.LocatedFileStatus.<init>(LocatedFileStatus.java:42)
	at org.apache.hadoop.fs.FileSystem$4.next(FileSystem.java:1699)
	at org.apache.hadoop.fs.FileSystem$4.next(FileSystem.java:1681)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:303)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.listStatus(FileInputFormat.java:264)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:385)
	at org.apache.hadoop.mapreduce.JobSubmitter.writeNewSplits(JobSubmitter.java:597)
	at org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:614)
	at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:492)
	at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
	at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
	at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
	at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1314)
	at main.java.RunJob.main(RunJob.java:49)

Process finished with exit code 1

这个报错原因是由于hadoop的bin目录下没有winutils.exe文件。复制进去。

 

转载请注明出处,感谢!

参考:

https://www.cnblogs.com/kevin-lee123/p/11020570.html

https://blog.csdn.net/a2099948768/article/details/79577246

https://www.cnblogs.com/jhxxb/p/10723369.html

winutil.exe和hadoop.dll下载:

https://github.com/steveloughran/winutils

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值