提供一下java代码,仅供参考:
package com.hadoopbook.hive;
import java.util.ArrayList;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthExcepti
on;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspector
Factory;
public class UDTFSerial extends GenericUDTF { Object[] result = new Object[1]; @Override public void close() throws HiveException { } @Override public StructObjectInspector initialize(ObjectInspector[] args) throws UDFArgumentException { if (args.length != 1) { throw new UDFArgumentLengthExcepti on("UDTFSerial takes only one argument"); } if (!args[0].getTypeName().equals("int")) { throw new UDFArgumentException("UDTFSerial only takes an integer as a parameter"); } ArrayList fieldNames = new ArrayList(); ArrayList fieldOIs = new ArrayList(); fieldNames.add("col1"); fieldOIs.add(PrimitiveObjectInspector Factory.javaIntObjectInspector); return ObjectInspectorFactory.getStandardStructObjectI nspector(fieldNames,fieldOIs); } @Override public void process(Object[] args) throws HiveException { try { int n = Integer.parseInt(args[0].toString()); for (int i=0;i