STORM入门之(HIVE集成架构)

集成方式 hivebolt

注意事项版本误差会导致乱七八糟各种找不到,未定义异常 找起来很头疼 

hive版本2.1.1  storm版本1.0.5 hadoop版本2.6.5

在windows跑local模式连接linux hive会出现unix验证类找不到,这个是rt.jar中的class,只有liunx版本才会有,所以windows跑程序的时候,要安装本地的hive,这块需要注意一下

hivesql:

* 本测试的hive建表语句 *

create table demo (id int,name string,sex string) partitioned by (age int) clustered by (id) into 3 buckets stored as orc tblproperties ("orc.compress"="NONE",'transactional'='true');

 *


import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.hive.bolt.HiveBolt;
import org.apache.storm.hive.bolt.mapper.DelimitedRecordHiveMapper;
import org.apache.storm.hive.common.HiveOptions;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.TopologyBuilder;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import org.apache.storm.utils.Utils;
import java.util.Map;
import java.util.Random;
public class Storm2Hive {
    static class Storm_Hive_Spout extends BaseRichSpout {
        SpoutOutputCollector spoutOutputCollector;
        String[] name = {"aa","bb","cc","dd","ee","ff","gg","hh"};
        String[] sex = {"man","woman"};
        int[] id = {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16};
        Random random = new Random();

        public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
            this.spoutOutputCollector=spoutOutputCollector;
         
        }

        public void nextTuple() {
            Utils.sleep(1);
            String s = name[random.nextInt(name.length)];
            String sex1 = sex[random.nextInt(sex.length)];
            int id1 = id[random.nextInt(id.length)];
            spoutOutputCollector.emit(new Values(id1,s,sex1,"18"));

        }

        public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
            outputFieldsDeclarer.declare(new Fields("id","name","sex","age"));

        }
    }

    public static void main(String[] args) {
        System.setProperty("HADOOP_USER_NAME", "hadoop");  
        System.setProperty("os.name", "Linux");  
        DelimitedRecordHiveMapper delimitedRecordHiveMapper = new DelimitedRecordHiveMapper();//映射字段,spout那边发来的
        delimitedRecordHiveMapper.withColumnFields(new Fields("id","name","sex")).withPartitionFields(new Fields("age"));
        HiveOptions hiveOptions = new HiveOptions("thrift://10.176.62.1:9083","default","demo",delimitedRecordHiveMapper);
        hiveOptions.withTxnsPerBatch(10)
                .withBatchSize(5000)
                .withIdleTimeout(10);

        HiveBolt hiveBolt = new HiveBolt(hiveOptions);
        

        TopologyBuilder topologyBuilder = new TopologyBuilder();
        
        topologyBuilder.setSpout("spout",new Storm_Hive_Spout());
        topologyBuilder.setSpout("spout1",new Storm_Hive_Spout2());
        topologyBuilder.setBolt("bolt",hiveBolt).shuffleGrouping("spout");
        LocalCluster localCluster = new LocalCluster();
        localCluster.submitTopology("soc",new Config(),topologyBuilder.createTopology());
       
    }
   
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值