Hive+GenericUDF示例二

        再来看一个分数统计的小例子。

        在Hive中存在如下一张表:

hive> describe tb_test2;
OK
name	string	
score_list	array<map<string,int>>	
Time taken: 0.074 seconds
hive> select * from tb_test2;
OK
A	[{"math":100,"english":90,"history":85}]
B	[{"math":95,"english":80,"history":100}]
C	[{"math":80,"english":90,"histroy":100}]
Time taken: 0.107 seconds

        编写genericUDF.

       

package com.wz.udf;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.lazy.LazyString;
import org.apache.hadoop.hive.serde2.lazy.LazyMap;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import java.util.ArrayList;
 
public class helloGenericUDFNew extends GenericUDF {
     输入变量定义
     private ObjectInspector nameObj;
     private ListObjectInspector listoi;
     private MapObjectInspector mapOI;
     private ArrayList<Object> valueList = new ArrayList<Object>(); 
     @Override
     public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
          nameObj = (ObjectInspector)arguments[0];
          listoi = (ListObjectInspector)arguments[1];
	  mapOI = ((MapObjectInspector)listoi.getListElementObjectInspector());
          //输出结构体定义
          ArrayList structFieldNames = new ArrayList();
          ArrayList structFieldObjectInspectors = new ArrayList();
          structFieldNames.add("name");
	  structFieldNames.add("totalScore");
 
          structFieldObjectInspectors.add( PrimitiveObjectInspectorFactory.writableStringObjectInspector );
          structFieldObjectInspectors.add( PrimitiveObjectInspectorFactory.writableIntObjectInspector );

          StructObjectInspector si2;
          si2 = ObjectInspectorFactory.getStandardStructObjectInspector(structFieldNames, structFieldObjectInspectors); 
          return si2;
     }
 
     @Override
     public Object evaluate(DeferredObject[] arguments) throws HiveException{
	  LazyString LName = (LazyString)(arguments[0].get());
	  String strName = ((StringObjectInspector)nameObj).getPrimitiveJavaObject( LName );

	  int nelements = listoi.getListLength(arguments[1].get());
          int nTotalScore=0;
          valueList.clear();
          //遍历list
	  for(int i=0;i<nelements;i++)
	  { 
               LazyMap LMap = (LazyMap)listoi.getListElement(arguments[1].get(),i);
               //获取map中的所有value值
 	       valueList.addAll(mapOI.getMap(LMap).values()); 
               for (int j = 0; j < valueList.size(); j++)
	       {
                   nTotalScore+=Integer.parseInt(valueList.get(j).toString());
               }               
          }
	  Object[] e;	
	  e = new Object[2];
	  e[0] = new Text(strName);
          e[1] = new IntWritable(nTotalScore);
          return e;
     }
 
     @Override
     public String getDisplayString(String[] children) {
          assert( children.length>0 );
 
          StringBuilder sb = new StringBuilder();
          sb.append("helloGenericUDFNew(");
          sb.append(children[0]);
          sb.append(")");
 
          return sb.toString();
     }
}

        在Hive中执行,结果如下:

        
hive> add jar /home/wangzhun/hive/hive-0.8.1/lib/helloGenericUDFNew.jar;    
Added /home/wangzhun/hive/hive-0.8.1/lib/helloGenericUDFNew.jar to class path
Added resource: /home/wangzhun/hive/hive-0.8.1/lib/helloGenericUDFNew.jar
hive> create temporary function hellonew as 'com.wz.udf.helloGenericUDFNew';
OK
Time taken: 0.016 seconds
hive> select hellonew(tb_test2.name,tb_test2.score_list) from tb_test2;     
Total MapReduce jobs = 1
Launching Job 1 out of 1
Number of reduce tasks is set to 0 since there's no reduce operator
Starting Job = job_201312091733_0018, Tracking URL = http://localhost:50030/jobdetails.jsp?jobid=job_201312091733_0018
Kill Command = /home/wangzhun/hadoop/hadoop-0.20.2/bin/../bin/hadoop job  -Dmapred.job.tracker=localhost:9001 -kill job_201312091733_0018
Hadoop job information for Stage-1: number of mappers: 1; number of reducers: 0
2013-12-09 22:31:22,328 Stage-1 map = 0%,  reduce = 0%
2013-12-09 22:31:25,354 Stage-1 map = 100%,  reduce = 0%
2013-12-09 22:31:28,390 Stage-1 map = 100%,  reduce = 100%
Ended Job = job_201312091733_0018
MapReduce Jobs Launched: 
Job 0: Map: 1   HDFS Read: 99 HDFS Write: 18 SUCESS
Total MapReduce CPU Time Spent: 0 msec
OK
{"people":"A","totalscore":275}
{"people":"B","totalscore":275}
{"people":"C","totalscore":270}
Time taken: 21.7 seconds


  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值