解决mapreduce无法将数据批量导入hbase的问题

20 篇文章 4 订阅

hadoop版本:hadoop-2.6.0-cdh5.5.2

hbase版本:hbase-1.0.0-cdh5.5.2


创建日志表:

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;

public class HbaseDemo {

	public static void main(String[] args) throws IOException {
		String tableName = "wlan_log";//定义表名"wlan_lg"
		String columnFamily = "cf";//定义列族"cf"

		HbaseDemo.create(tableName, columnFamily);//创建表名和列族

		// HbaseDemo.put(tableName, "row1", columnFamily, "cl1", "data");
		// HbaseDemo.get(tableName, "row1");
		// HbaseDemo.scan(tableName);
		// HbaseDemo.delete(tableName);
	}
	//参数配置
	private static Configuration getConfiguration() {
		Configuration conf = HBaseConfiguration.create();//调用构造器,构造新对象conf实例
		conf.set("hbase.rootdir", "hdfs://192.168.8.71:9000/hbase");//设置hbase存储位置
		conf.set("hbase.zookeeper.quorum", "192.168.8.71");//设置sqoop
		return conf;//返回实例
	}
	//创建表
	public static void create(String tableName, String columnFamily)
			throws IOException {
		HBaseAdmin admin = new HBaseAdmin(getConfiguration());//调用构造器构造实例admin
		if (admin.tableExists(tableName)) {//if判断表是否存在
			System.out.println("table exists!");//存在输出
		} else {				//esle执行下面语句
			HTableDescriptor tableDesc = new HTableDescriptor(tableName);//调用构造器构造tableDesc实例
			tableDesc.addFamily(new HColumnDescriptor(columnFamily));//添加列族
			admin.createTable(tableDesc);//创建表
			System.out.println("create table success!");//创建成功
		}
	}
	//添加一条记录
	public static void put(String tableName, String row, String columnFamily,
			String column, String data) throws IOException {
		HTable table = new HTable(getConfiguration(), tableName);//导入getConfiguration()参数配置和表名
		Put p1 = new Put(Bytes.toBytes(row));//创建对象和设置行健
		p1.add(Bytes.toBytes(columnFamily), Bytes.toBytes(column), Bytes
				.toBytes(data));//添加列族、列、数据
		table.put(p1);//put上传p1
		System.out.println("put'" + row + "'," + columnFamily + ":" + column
				+ "','" + data + "'");//数据输出展示
	}
	//读取一条记录
	public static void get(String tableName, String row) throws IOException {
		HTable table = new HTable(getConfiguration(), tableName);//创建一个table实例,或许当前参数配置和表名
		Get get = new Get(Bytes.toBytes(row));//创建get实例,获取行健
		Result result = table.get(get);//将上一步的值赋给result
		System.out.println("Get: " + result);//打印结果输出
	}
	//显示所有数据
	public static void scan(String tableName) throws IOException {
		HTable table = new HTable(getConfiguration(), tableName);//创建table实例,获取参数配置和表名
		Scan scan = new Scan();//创建实例scan
		ResultScanner scanner = table.getScanner(scan);//将实例scan值赋给scanner
		for (Result result : scanner) {//强遍历
			System.out.println("Scan: " + result);//打印result中所有结果
		}
	}
	//删除表
	public static void delete(String tableName) throws IOException {
		HBaseAdmin admin = new HBaseAdmin(getConfiguration());//创建admin实例进行表删除操作
		if (admin.tableExists(tableName)) {//if语句判断
			try {
				admin.disableTable(tableName);//使表失效
				admin.deleteTable(tableName);//删除表
			} catch (IOException e) {
				e.printStackTrace();
				System.out.println("Delete " + tableName + "shi bai");//打印语句
			}
		}
		System.out.println("Delete " + tableName + "cheng gong");//打印语句
	}

}

将日志文件导入Hbase表wlan_log中:

import java.text.SimpleDateFormat;
import java.util.Date;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;

public class HbaseBatchImport {

        public static void main(String[] args) throws Exception {
                final Configuration configuration = new Configuration();//设置最终实例使用final表示不可更改,创建实例
                configuration.set("hbase.zookeeper.quorum", "192.168.8.71");//配置hbase地址

                configuration.set(TableOutputFormat.OUTPUT_TABLE, "wlan_log");//数据hbase表名wlan_log

                configuration.set("dfs.socket.timeout", "180000");//设置超时响应时间

                final Job job = new Job(configuration, "HBaseBatchImport");//创建job实例

                job.setMapperClass(BatchImportMapper.class);
                job.setReducerClass(BatchImportReducer.class);
                job.setMapOutputKeyClass(LongWritable.class);//设置mapper输出
                job.setMapOutputValueClass(Text.class);

                job.setInputFormatClass(TextInputFormat.class);
                job.setOutputFormatClass(TableOutputFormat.class);//设置输出格式

                FileInputFormat.setInputPaths(job, "hdfs://192.168.8.71:9000/input");//设置输入地址

                job.waitForCompletion(true);
        }

        static class BatchImportMapper extends
                        Mapper<LongWritable, Text, LongWritable, Text> {
                SimpleDateFormat dateformat1 = new SimpleDateFormat("yyyyMMddHHmmss");//创建实例并设置格式
                Text v2 = new Text();//创建v2实例

                protected void map(LongWritable key, Text value, Context context)
                                throws java.io.IOException, InterruptedException {
                        final String[] splited = value.toString().split("\t");//数据以/t分割
                        try {
                                final Date date = new Date(Long.parseLong(splited[0].trim()));//将下标为0的数据赋值给date同时用trim去空格
                                final String dateFormat = dateformat1.format(date);//获取电脑date
                                String rowKey = splited[1] + ":" + dateFormat;//将dateFormat和date的第二个数据拼接
                                v2.set(rowKey + "\t" + value.toString());//set v2数据
                                context.write(key, v2);//把key v2写入下面程序
                        } catch (NumberFormatException e) {
                                final Counter counter = context.getCounter("BatchImport",
                                                "ErrorFormat");
                                counter.increment(1L);
                                System.out.println("chu cuo le" + splited[0] + " " + e.getMessage());
                        }
                }
        }

        static class BatchImportReducer extends
                        TableReducer<LongWritable, Text, NullWritable> {//reduce实例
                protected void reduce(LongWritable key,
                                java.lang.Iterable<Text> values, Context context)
                                throws java.io.IOException, InterruptedException {
                        for (Text text : values) {	//强遍历
                                final String[] splited = text.toString().split("\t");//用\t方式数据分割

                                final Put put = new Put(Bytes.toBytes(splited[0]));//把第一个数据赋值给put
                                put.add(Bytes.toBytes("cf"), Bytes.toBytes("date"), Bytes//上传数据到cf列,date
                                                .toBytes(splited[1]));
                                context.write(NullWritable.get(), put);//数据输出
                        }
                }
        }
}

input日志:

1363157985066 	13726230503	00-FD-07-A4-72-B8:CMCC	120.196.100.82	i02.c.aliimg.com		24	27	2481	24681	200
1363157995052 	13826544101	5C-0E-8B-C7-F1-E0:CMCC	120.197.40.4			4	0	264	0	200
1363157991076 	13926435656	20-10-7A-28-CC-0A:CMCC	120.196.100.99			2	4	132	1512	200
1363154400022 	13926251106	5C-0E-8B-8B-B1-50:CMCC	120.197.40.4			4	0	240	0	200
1363157993044 	18211575961	94-71-AC-CD-E6-18:CMCC-EASY	120.196.100.99	iface.qiyi.com	视频网站	15	12	1527	2106	200
1363157995074 	84138413	5C-0E-8B-8C-E8-20:7DaysInn	120.197.40.4	122.72.52.12		20	16	4116	1432	200
1363157993055 	13560439658	C4-17-FE-BA-DE-D9:CMCC	120.196.100.99			18	15	1116	954	200
1363157995033 	15920133257	5C-0E-8B-C7-BA-20:CMCC	120.197.40.4	sug.so.360.cn	信息安全	20	20	3156	2936	200
1363157983019 	13719199419	68-A1-B7-03-07-B1:CMCC-EASY	120.196.100.82			4	0	240	0	200
1363157984041 	13660577991	5C-0E-8B-92-5C-20:CMCC-EASY	120.197.40.4	s19.cnzz.com	站点统计	24	9	6960	690	200
1363157973098 	15013685858	5C-0E-8B-C7-F7-90:CMCC	120.197.40.4	rank.ie.sogou.com	搜索引擎	28	27	3659	3538	200
1363157986029 	15989002119	E8-99-C4-4E-93-E0:CMCC-EASY	120.196.100.99	www.umeng.com	站点统计	3	3	1938	180	200
1363157992093 	13560439658	C4-17-FE-BA-DE-D9:CMCC	120.196.100.99			15	9	918	4938	200
1363157986041 	13480253104	5C-0E-8B-C7-FC-80:CMCC-EASY	120.197.40.4			3	3	180	180	200
1363157984040 	13602846565	5C-0E-8B-8B-B6-00:CMCC	120.197.40.4	2052.flash2-http.qq.com	综合门户	15	12	1938	2910	200
1363157995093 	13922314466	00-FD-07-A2-EC-BA:CMCC	120.196.100.82	img.qfc.cn		12	12	3008	3720	200
1363157982040 	13502468823	5C-0A-5B-6A-0B-D4:CMCC-EASY	120.196.100.99	y0.ifengimg.com	综合门户	57	102	7335	110349	200
1363157986072 	18320173382	84-25-DB-4F-10-1A:CMCC-EASY	120.196.100.99	input.shouji.sogou.com	搜索引擎	21	18	9531	2412	200
1363157990043 	13925057413	00-1F-64-E1-E6-9A:CMCC	120.196.100.55	t3.baidu.com	搜索引擎	69	63	11058	48243	200
1363157988072 	13760778710	00-FD-07-A4-7B-08:CMCC	120.196.100.82			2	2	120	120	200
1363157985079 	13823070001	20-7C-8F-70-68-1F:CMCC	120.196.100.99			6	3	360	180	200
1363157985069 	13600217502	00-1F-64-E2-E8-B1:CMCC	120.196.100.55			18	138	1080	186852	200

[hadoop@h71 q1]$ hadoop fs -put input /


在hbase中创建wlan_log表:

[hadoop@h71 q1]$ /usr/jdk1.7.0_25/bin/javac HbaseDemo.java

[hadoop@h71 q1]$ /usr/jdk1.7.0_25/bin/java HbaseDemo


[hadoop@h71 q1]$ /usr/jdk1.7.0_25/bin/javac HbaseBatchImport.java 
Note: HbaseBatchImport.java uses or overrides a deprecated API.
Note: Recompile with -Xlint:deprecation for details.
[hadoop@h71 q1]$ /usr/jdk1.7.0_25/bin/jar cvf xx.jar HbaseBatchImport*class
added manifest
adding: HbaseBatchImport$BatchImportMapper.class(in = 2731) (out= 1204)(deflated 55%)
adding: HbaseBatchImport$BatchImportReducer.class(in = 2185) (out= 906)(deflated 58%)
adding: HbaseBatchImport.class(in = 1613) (out= 833)(deflated 48%)
[hadoop@h71 q1]$ hadoop jar xx.jar HbaseBatchImport

(我在hadoop-0.20.2-cdh3u5+hbase-0.90.6-cdh3u5中运行就没报这个错,在我这个hadoop-2.6.0-cdh5.5.2中运行却报错了)

SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/hadoop/hadoop-2.6.0-cdh5.5.2/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/hadoop/hbase-1.0.0-cdh5.5.2/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
12/12/12 23:55:37 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
12/12/12 23:55:37 INFO client.RMProxy: Connecting to ResourceManager at h71/192.168.8.71:8032
12/12/12 23:55:38 INFO Configuration.deprecation: dfs.socket.timeout is deprecated. Instead, use dfs.client.socket-timeout
12/12/12 23:55:38 WARN mapreduce.JobResourceUploader: Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.
12/12/12 23:55:38 WARN mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
12/12/12 23:55:38 INFO input.FileInputFormat: Total input paths to process : 1
12/12/12 23:55:38 INFO mapreduce.JobSubmitter: number of splits:1
12/12/12 23:55:38 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1355303155288_0017
12/12/12 23:55:38 INFO mapred.YARNRunner: Job jar is not present. Not adding any jar to the list of resources.
12/12/12 23:55:38 INFO impl.YarnClientImpl: Submitted application application_1355303155288_0017
12/12/12 23:55:39 INFO mapreduce.Job: The url to track the job: http://h71:8088/proxy/application_1355303155288_0017/
12/12/12 23:55:39 INFO mapreduce.Job: Running job: job_1355303155288_0017
12/12/12 23:55:46 INFO mapreduce.Job: Job job_1355303155288_0017 running in uber mode : false
12/12/12 23:55:46 INFO mapreduce.Job:  map 0% reduce 0%
12/12/12 23:55:46 INFO mapreduce.Job: Job job_1355303155288_0017 failed with state FAILED due to: Application application_1355303155288_0017 failed 2 times due to AM Container for appattempt_1355303155288_0017_000002 exited with  exitCode: 1
For more detailed output, check application tracking page:http://h71:8088/proxy/application_1355303155288_0017/Then, click on links to logs of each attempt.
Diagnostics: Exception from container-launch.
Container id: container_1355303155288_0017_02_000001
Exit code: 1
Stack trace: ExitCodeException exitCode=1: 
        at org.apache.hadoop.util.Shell.runCommand(Shell.java:561)
        at org.apache.hadoop.util.Shell.run(Shell.java:478)
        at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:738)
        at org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor.launchContainer(DefaultContainerExecutor.java:210)
        at org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:302)
        at org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:82)
        at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
        at java.util.concurrent.FutureTask.run(FutureTask.java:166)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:724)


Container exited with a non-zero exit code 1
Failing this attempt. Failing the application.
12/12/12 23:55:46 INFO mapreduce.Job: Counters: 0

解决:修改yarn-site.xml文件,添加如下内容

[hadoop@h71 ~]$ cd hadoop-2.6.0-cdh5.5.2/etc/hadoop
[hadoop@h71 hadoop]$ vi yarn-site.xml

<property>
  <name>yarn.application.classpath</name>
      <value>
/home/hadoop/hadoop-2.6.0-cdh5.5.2/etc/hadoop/*,/home/hadoop/hadoop-2.6.0-cdh5.5.2/share/hadoop/common/lib/*,/home/hadoop/hadoop-2.6.0-cdh5.5.2/share/hadoop/common/*,/home/hadoop/hadoop-2.6.0-cdh5.5.2/share/hadoop/hdfs,/home/hadoop/hadoop-2.6.0-cdh5.5.2/share/hadoop/hdfs/lib/*,/home/hadoop/hadoop-2.6.0-cdh5.5.2/share/hadoop/hdfs/*,/home/hadoop/hadoop-2.6.0-cdh5.5.2/share/hadoop/yarn/lib/*,/home/hadoop/hadoop-2.6.0-cdh5.5.2/share/hadoop/yarn/*,/home/hadoop/hadoop-2.6.0-cdh5.5.2/share/hadoop/mapreduce/lib/*,/home/hadoop/hadoop-2.6.0-cdh5.5.2/share/hadoop/mapreduce/*,/home/hadoop/hive-1.1.0-cdh5.5.2/lib/*,/home/hadoop/hadoop-2.6.0-cdh5.5.2/contrib/capacity-scheduler/*,/home/hadoop/hbase-1.0.0-cdh5.5.2/lib/*
      </value>
</property>

再次执行以上命令却又报错:

[hadoop@h71 q1]$ hadoop jar xx.jar HbaseBatchImport

SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/hadoop/hadoop-2.6.0-cdh5.5.2/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/hadoop/hbase-1.0.0-cdh5.5.2/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
12/12/12 22:16:16 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
12/12/12 22:16:16 INFO client.RMProxy: Connecting to ResourceManager at h71/192.168.8.71:8032
12/12/12 22:16:17 INFO Configuration.deprecation: dfs.socket.timeout is deprecated. Instead, use dfs.client.socket-timeout
12/12/12 22:16:17 WARN mapreduce.JobResourceUploader: Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.
12/12/12 22:16:17 WARN mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
12/12/12 22:16:17 INFO input.FileInputFormat: Total input paths to process : 1
12/12/12 22:16:17 INFO mapreduce.JobSubmitter: number of splits:1
12/12/12 22:16:17 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1355303155288_0017
12/12/12 22:16:17 INFO mapred.YARNRunner: Job jar is not present. Not adding any jar to the list of resources.
12/12/12 22:16:17 INFO impl.YarnClientImpl: Submitted application application_1355303155288_0017
12/12/12 22:16:17 INFO mapreduce.Job: The url to track the job: http://h71:8088/proxy/application_1355303155288_0017/
12/12/12 22:16:17 INFO mapreduce.Job: Running job: job_1355303155288_0017
12/12/12 22:16:22 INFO mapreduce.Job: Job job_1355303155288_0017 running in uber mode : false
12/12/12 22:16:22 INFO mapreduce.Job:  map 0% reduce 0%
12/12/12 22:16:25 INFO mapreduce.Job: Task Id : attempt_1355303155288_0017_m_000000_0, Status : FAILED
Error: java.lang.RuntimeException: java.lang.ClassNotFoundException: Class HbaseBatchImport$BatchImportMapper not found
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2199)
        at org.apache.hadoop.mapreduce.task.JobContextImpl.getMapperClass(JobContextImpl.java:196)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:745)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:163)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
Caused by: java.lang.ClassNotFoundException: Class HbaseBatchImport$BatchImportMapper not found
        at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2105)
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2197)
        ... 8 more

12/12/12 22:16:29 INFO mapreduce.Job: Task Id : attempt_1355303155288_0017_m_000000_1, Status : FAILED
Error: java.lang.RuntimeException: java.lang.ClassNotFoundException: Class HbaseBatchImport$BatchImportMapper not found
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2199)
        at org.apache.hadoop.mapreduce.task.JobContextImpl.getMapperClass(JobContextImpl.java:196)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:745)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:163)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
Caused by: java.lang.ClassNotFoundException: Class HbaseBatchImport$BatchImportMapper not found
        at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2105)
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2197)
        ... 8 more

12/12/12 22:16:33 INFO mapreduce.Job: Task Id : attempt_1355303155288_0017_m_000000_2, Status : FAILED
Error: java.lang.RuntimeException: java.lang.ClassNotFoundException: Class HbaseBatchImport$BatchImportMapper not found
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2199)
        at org.apache.hadoop.mapreduce.task.JobContextImpl.getMapperClass(JobContextImpl.java:196)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:745)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:163)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
Caused by: java.lang.ClassNotFoundException: Class HbaseBatchImport$BatchImportMapper not found
        at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2105)
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2197)
        ... 8 more

12/12/12 22:16:40 INFO mapreduce.Job:  map 100% reduce 100%
12/12/12 22:16:40 INFO mapreduce.Job: Job job_1355303155288_0017 failed with state FAILED due to: Task failed task_1355303155288_0017_m_000000
Job failed as tasks failed. failedMaps:1 failedReduces:0

12/12/12 22:16:40 INFO mapreduce.Job: Counters: 9
        Job Counters 
                Failed map tasks=4
                Launched map tasks=4
                Other local map tasks=3
                Data-local map tasks=1
                Total time spent by all maps in occupied slots (ms)=10007
                Total time spent by all reduces in occupied slots (ms)=0
                Total time spent by all map tasks (ms)=10007
                Total vcore-seconds taken by all map tasks=10007
                Total megabyte-seconds taken by all map tasks=10247168

解决:

[hadoop@h71 q1]$ rm -rf HbaseBatchImport*class
[hadoop@h71 q1]$ vi HbaseBatchImport.java
添加:
job.setJarByClass(HbaseBatchImport.class);(hadoop1版本中不加就好使,hadoop2版本却得加才好使。。。)


再执行上述命令又报错:

[hadoop@h71 q1]$ hadoop jar xx.jar HbaseBatchImport

12/12/13 00:36:00 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/hadoop/hbase/mapreduce/TableReducer
        at java.lang.ClassLoader.defineClass1(Native Method)
        at java.lang.ClassLoader.defineClass(ClassLoader.java:792)
        at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
        at java.net.URLClassLoader.defineClass(URLClassLoader.java:449)
        at java.net.URLClassLoader.access$100(URLClassLoader.java:71)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at messages3.main(messages3.java:34)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
        at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.hbase.mapreduce.TableReducer
        at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        ... 18 more

解决:修改hadoop-env.sh文件,添加如下内容

[hadoop@h71 hadoop]$ vi hadoop-env.sh

export HBASE_HOME=/home/hadoop/hbase-1.0.0-cdh5.5.2
export HADOOP_CLASSPATH=$HBASE_HOME/lib/*


(可能得重启hadoop和hbase或者虚拟机,我没有也成功了)

[hadoop@h71 q1]$ hadoop jar xx.jar HbaseBatchImport

17/03/18 16:50:06 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
17/03/18 16:50:07 INFO client.RMProxy: Connecting to ResourceManager at h71/192.168.8.71:8032
17/03/18 16:50:07 INFO Configuration.deprecation: dfs.socket.timeout is deprecated. Instead, use dfs.client.socket-timeout
17/03/18 16:50:08 WARN mapreduce.JobResourceUploader: Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.
17/03/18 16:50:08 INFO input.FileInputFormat: Total input paths to process : 1
17/03/18 16:50:08 INFO mapreduce.JobSubmitter: number of splits:1
17/03/18 16:50:08 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1489740265001_0002
17/03/18 16:50:09 INFO impl.YarnClientImpl: Submitted application application_1489740265001_0002
17/03/18 16:50:09 INFO mapreduce.Job: The url to track the job: http://h71:8088/proxy/application_1489740265001_0002/
17/03/18 16:50:09 INFO mapreduce.Job: Running job: job_1489740265001_0002
17/03/18 16:50:18 INFO mapreduce.Job: Job job_1489740265001_0002 running in uber mode : false
17/03/18 16:50:18 INFO mapreduce.Job:  map 0% reduce 0%
17/03/18 16:50:32 INFO mapreduce.Job:  map 100% reduce 0%
17/03/18 16:50:38 INFO mapreduce.Job:  map 100% reduce 100%
17/03/18 16:50:38 INFO mapreduce.Job: Job job_1489740265001_0002 completed successfully
17/03/18 16:50:38 INFO mapreduce.Job: Counters: 49
        File System Counters
                FILE: Number of bytes read=190
                FILE: Number of bytes written=221731
                FILE: Number of read operations=0
                FILE: Number of large read operations=0
                FILE: Number of write operations=0
                HDFS: Number of bytes read=215
                HDFS: Number of bytes written=0
                HDFS: Number of read operations=2
                HDFS: Number of large read operations=0
                HDFS: Number of write operations=0
        Job Counters 
                Launched map tasks=1
                Launched reduce tasks=1
                Data-local map tasks=1
                Total time spent by all maps in occupied slots (ms)=12385
                Total time spent by all reduces in occupied slots (ms)=3458
                Total time spent by all map tasks (ms)=12385
                Total time spent by all reduce tasks (ms)=3458
                Total vcore-seconds taken by all map tasks=12385
                Total vcore-seconds taken by all reduce tasks=3458
                Total megabyte-seconds taken by all map tasks=12682240
                Total megabyte-seconds taken by all reduce tasks=3540992
        Map-Reduce Framework
                Map input records=2
                Map output records=2
                Map output bytes=180
                Map output materialized bytes=190
                Input split bytes=95
                Combine input records=0
                Combine output records=0
                Reduce input groups=2
                Reduce shuffle bytes=190
                Reduce input records=2
                Reduce output records=2
                Spilled Records=4
                Shuffled Maps =1
                Failed Shuffles=0
                Merged Map outputs=1
                GC time elapsed (ms)=366
                CPU time spent (ms)=2050
                Physical memory (bytes) snapshot=243867648
                Virtual memory (bytes) snapshot=2168229888
                Total committed heap usage (bytes)=136974336
        Shuffle Errors
                BAD_ID=0
                CONNECTION=0
                IO_ERROR=0
                WRONG_LENGTH=0
                WRONG_MAP=0
                WRONG_REDUCE=0
        File Input Format Counters 
                Bytes Read=120
        File Output Format Counters 
                Bytes Written=0


查看hbase中的wlan_log会有相应的数据产生:

[hadoop@h71 hbase-1.0.0-cdh5.5.2]$ bin/hbase shell
hbase(main):002:0> scan 'wlan_log'


参考:

http://787141854-qq-com.iteye.com/blog/2067818

http://blog.csdn.net/hadoop_/article/details/11538201

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

小强签名设计

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值