方式1:
代码:
package com.***;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.*;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
@Description(name = "hive2kafka", value = "_FUNC_(brokerhost_and_port,topic, array<map<string,string>>) - Return ret ")
public class Hive2KakfaUDF extends GenericUDF {
private String hostAndPort;
private String topics;
private StandardListObjectInspector paramsListInspector;
private StandardMapObjectInspector paramsElementInspector;
public ObjectInspector initialize(ObjectInspector[] arg0) throws UDFArgumentException {
if (arg0.length != 3) {
throw new UDFArgumentException(" Expecting two arguments:<brokerhost:port> <topic> array<map<string,string>> ");
}
// 第一个参数验证
if (arg0[0].getCategory() == Category.PRIMITIVE
&& ((PrimitiveObjectInspector) arg0[0]).getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.STRING) {
if (!(arg0[0] instanceof ConstantObjectInspector)) {
throw new UDFArgumentException("broker host:port must be constant");
}
ConstantObjectInspector brokerhost_and_port = (ConstantObjectInspector) arg0[0];
hostAndPort = brokerhost_and_port.getWritableConstantValue().toString();
}
// 第二个参数验证
if (arg0[1].getCategory() == Category.PRIMITIVE
&& ((PrimitiveObjectInspector) arg0[1]).getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.STRING) {
if (!(arg0[1] instanceof ConstantObjectInspector)) {
throw new UDFArgumentException("kafka topic must be constant");
}
ConstantObjectInspector topicCOI = (ConstantObjectInspector) arg0[1];
topics = topicCOI.getWritableConstantValue().toString();
}
// 第三个参数验证
if (arg0[2].getCategory() != Category.LIST) {
throw new UDFArgumentException(" Expecting an array<map<string,string>> field as third argument ");
}
ListObjectInspector third = (ListObjectInspector) arg0[2];
if (third.getListElementObjectInspector().getCategory() != Category.MAP) {
throw new UDFArgumentException(" Expecting an array<map<string,string>> field as third argument ");
}
paramsListInspector = ObjectInspectorFactory.getStandardListObjectInspector(third.getListElementObjectInspector());
paramsElementInspector = (StandardMapObjectInspector) third.getListElementObjectInspector();
System.out.println(paramsElementInspector.getMapKeyObjectInspector().getCategory());
System.out.println(paramsElementInspector.getMapValueObjectInspector().getCategory());
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
}
public Object evaluate(DeferredObject[] arg0) throws HiveException {
Properties props = new Properties();
props.put("bootstrap.servers", hostAndPort);
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
// 创建kafka生产者
Producer<String, String> producer = new KafkaProducer<String, String>(props);
for (int i = 0; i < paramsListInspector.getListLength(arg0[2].get()); i++) {
Object row = paramsListInspector.getListElement(arg0[2].get(), i);
Map<?, ?> map = paramsElementInspector.getMap(row);
// Object obj = ObjectInspectorUtils.copyToStandardJavaObject(row,paramsElementInspector);
// 转成标准的java map,否则里面的key value字段为hadoop writable对象
Map<String, String> data = new HashMap<String,String>();
for (Map.Entry<?, ?> entry : map.entrySet()) {
if (entry.getValue() != null && !"".equals(entry.getValue().toString())) {
data.put(entry.getKey().toString(), entry.getValue().toString());
}
}
producer.send(new ProducerRecord<String, String>(topics, Integer.toString(i), data.toString()));
}
producer.close();
return new IntWritable(1);
}
public String getDisplayString(String[] strings) {
return "hive2kafka(brokerhost_and_port,topic, array<map<string,string>>)";
}
}
pom:
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.***</groupId>
<artifactId>udf_hive2kafka</artifactId>
<version>0.0.1-SNAPSHOT</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.8</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.8.2.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.2</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
注册udf:
add jar /var/lib/hadoop-hdfs/spride_sqoop_beijing/udf_jar/udf_hive2kafka-0.0.1-SNAPSHOT.jar;
create temporary function udf_hive2kafka as 'cn.***.Hive2KakfaUDF';
执行sql即可导入kafka:
SELECT
udf_hive2kafka( 'localhostname:9092', 'topicname',
collect_list(map(
'p_name',p_name,
'request_url', request_url,
'com_fullname',com_fullname,
'edu_require',edu_require,
'exp_require',exp_require,
'city',city,
'source',source,
'release_time',release_time,
'update_time',update_time,
'salary_month',salary_month,
'salary_max',salary_max,
'salary_min',salary_min,
'statement',statement,
'bo_id',bo_id,
'insert_time',insert_time,
'com_id',com_id,
'com_simplename',com_simplename,
'position_id',position_id,
'jobnature',jobnature,
'recruits_number',recruits_number,
'address',address,
'welfare',welfare,
'status',status
)) ) AS result
FROM
(
SELECT
*
FROM
dws_bo_final_spider_position001 limit 10
) a;
有的hive版本不支持collect_list()函数,则可以使以下方式推送kafka:
方式2:
package test;
import com.alibaba.fastjson.JSON;
import org.apache.hadoop.hive.ql.exec.UDF;
import com.alibaba.fastjson.JSONObject;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import kafka.serializer.StringEncoder;
/**
* Created by tang on 2019/01/07
*
* 创建临时udf函数:
* add jar /usr/local/udf_hive2kafka-0.0.1-SNAPSHOT.jar;
* create temporary function udf_hive2kafka1 as 'test.HiveToKakfaUDF';
* 创建永久udf函数:
* CREATE FUNCTION udf_hive2kafka AS 'com.tzb.Hive2KakfaUDF'
* USING JAR 'hdfs:///user/hive/warehouse/ods.db/udf_jar/udf_hive2kafka-0.0.1-SNAPSHOT.jar';
* 传hdfs:
* hadoop fs -put /var/lib/hadoop-hdfs/spride_sqoop_beijing/udf_jar/udf_hive2kafka-0.0.1-SNAPSHOT.jar /user/hive/warehouse/ods.db/udf_jar/udf_hive2kafka-0.0.1-SNAPSHOT.jar
*
*
* 执行语句:
* select udf_hive2kafka1('10.21.0.210:2181','10.21.0.210:9092','sparkkafka',map('full_name', full_name,'mobile_phone',mobile_phone,'email',email)) from dws.dws_bo_final_spider_contact limit 10;
*/
public class HiveToKakfaUDF extends UDF {
public String evaluate(String zklis, String brokerlis, String topic,Map<String,String> map) {
Producer producer = createProducer(zklis, brokerlis);
String jsonData = JSON.toJSONString(map);
producer.send(new KeyedMessage<Integer, String>(topic,jsonData));
return jsonData;
}
private static Producer createProducer(String zklis, String brokerlis) {
Properties properties = new Properties();
properties.put("zookeeper.connect", zklis);//声明zk 多个ip逗号分隔
properties.put("serializer.class", StringEncoder.class.getName());
properties.put("metadata.broker.list", brokerlis);// 声明kafka broker
return new Producer<Integer, String>(new ProducerConfig(properties));
}
}
pom:
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.huayong</groupId>
<artifactId>udf_hive2kafka</artifactId>
<version>0.0.1-SNAPSHOT</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.8</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.8.2.0</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.46</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20160212</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>3.10-FINAL</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>3.10-FINAL</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.2</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>