Spark sql读取数据库和ES数据进行处理代码

读取数据库数据和ElasticSearch数据进行连接处理

import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;

import com.dinpay.bdp.rcp.domain.FlowMain;
import com.dinpay.bdp.rcp.util.CodisUtil;
import com.dinpay.bdp.rcp.util.Constant;
import com.google.gson.Gson;

import redis.clients.jedis.Jedis;

/**
 * 首页的数据,定时Job定时刷新
 */
public class MainFlowProcedure{
     private static Logger logger = Logger.getLogger(MainFlowProcedure.class.getSimpleName());

     public static void main(String[] args) {
         
       //屏蔽日志
       Logger.getLogger("org.apache.spark").setLevel(Level.WARN);
       
       //配置SparkConf
       SparkConf conf = new SparkConf().setAppName("MainFlowProcedure").setMaster("local[2]");
       JavaSparkContext sc =new JavaSparkContext(conf);
       SQLContext sqlContext = new SQLContext(sc);
       
       registerTable(sqlContext,"t_sys_attention_library");
       registerTable(sqlContext,"t_sys_big_order");
       registerTable(sqlContext,"t_sys_doubtful_order");
       registerTable(sqlContext,"t_rc_event");
       registerESTable(sqlContext, "t_order");

       sqlContext.sql("select merchant_id,count(order_id) as txcnt ,sum(tx_money) as txamount from t_order group by merchant_id")
                    .registerTempTable("t_order_merchant");
    
       sqlContext.sql("select t2.merchant_id,count(t1.order_id) as bigcnt from t_sys_big_order t1 join t_order t2 on t1.order_id = t2.order_id group by t2.merchant_id")
                    .registerTempTable("t_big_merchant");


       sqlContext.sql("select t2.merchant_id,count(t1.order_id) as dbtcnt from t_sys_doubtful_order t1 join t_order t2 on t1.order_id = t2.order_id group by t2.merchant_id")
                  .registerTempTable("t_doubt_merchant");

       sqlContext.sql("select merchant_id,count(*) as rccnt from t_rc_event group by merchant_id")
                    .registerTempTable("t_rc_merchant");
       
       sqlContext.sql("select t1.merchant_id,t2.txcnt,t3.dbtcnt,t4.bigcnt,t2.txamount,t5.rccnt from t_sys_attention_library t1 left join t_order_merchant t2 on t1.merchant_id = t2.merchant_id left join t_doubt_merchant t3 on t1.merchant_id = t3.merchant_id left join t_big_merchant t4 on t1.merchant_id = t4.merchant_id left join t_rc_merchant t5 on t1.merchant_id = t5.merchant_id")
                  .registerTempTable("t_attention");
       //生成结果集
       DataFrame resultDF =sqlContext.sql("select t.merchant_id,t.txcnt,t.dbtcnt,t.bigcnt,t.txamount,t.rccnt from t_attention t"); 
       
       List<FlowMain> flowMains = resultDF.javaRDD().map(new Function<Row,FlowMain>(){public FlowMain call(Row row){
FlowMain flowMain = new FlowMain();
               flowMain.setMerchantId(row.getString(0));
               flowMain.setTxCnt(row.isNullAt(1)?0:row.getInt(1));
               flowMain.setSuspectCnt(row.isNullAt(2)?0:row.getInt(2));
               flowMain.setBigAmountCnt(row.isNullAt(3)?0:row.getInt(3));
               flowMain.setTxAmount(row.isNullAt(4)?0.0:row.getDouble(4));
               flowMain.setTxRate("偏高");
               flowMain.setRcEventCnt(row.isNullAt(5)?0:row.getInt(5));
               return flowMain;
           }
       }).collect();
       
       Gson gson = new Gson();
       String res = gson.toJson(flowMains);       
       //连接codis进行操作,每次将新生成的数据,放到对应的key中
       Jedis jedis = CodisUtil.getJedis() ;
       jedis.set("mainFlow", res);
       logger.info("插入到Codis数据完成!!!");
       sc.stop();
   }
   
   //获取数据库的表注册为临时表
   private static void registerTable(SQLContext sqlContext,String dbtable){
       Map<String,String> mcOptions =new HashMap<String, String>();
       mcOptions.put("url", Constant.URL);
       mcOptions.put("driver", Constant.DRIVER);
       mcOptions.put("dbtable", dbtable);
       mcOptions.put("user", Constant.USER);
       mcOptions.put("password", Constant.PASSWD);
       DataFrame jdbcDF = sqlContext.read().format("jdbc").options(mcOptions).load();
       jdbcDF.registerTempTable(dbtable);
   }
   
   //获取ElasticSearch中的索引注册为表
   private static void registerESTable(SQLContext sqlContext,String index){
       Map<String,String> esOptions =new HashMap<String, String>();
       esOptions.put("es.nodes", Constant.ESNODES);
       esOptions.put("es.port", Constant.ESPORT);
       esOptions.put("es.index.auto.create", "true");
       
       DataFrame OrderDF = sqlContext.read().format("org.elasticsearch.spark.sql")
                                   .options(esOptions)
                                   .load(index+"/"+index);
       OrderDF.registerTempTable(index);
   } 
   
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

荣华富贵8

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值