记录一些自己的开发过程中的 工具类(持续更新)

初始化  Properties  文件

 

package com.sym.common

import java.io.{BufferedInputStream, File, FileInputStream, InputStream}
import java.sql.{Connection, DriverManager, PreparedStatement, ResultSet}
import java.util.Properties

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.rdd.{JdbcRDD, RDD}
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

/**
  * FileName: DBSourcesUtil
  * Author:   ShaoYM
  * Date:     2018/11/27 9:10
  * Description: spark  获取数据源,转化为RDD
  */
object InitializeProperties {

  private val logger:Logger=Logger.getLogger(this.getClass.getName)

  /*
      * @methods InitDBProperties
      * @author ShaoYM 
      * @date 2019/6/13 16:40 
      * @param null 
      * @return _root_.java.util.Properties 
      * @descripe 读取 mysqldb.properties 配置文件
  */  
  def InitMysqlDBProperties():Properties={
    //  配置变量
      var pro:Properties=new Properties();
      var in:InputStream = null;
      try {
         in =this.getClass.getResourceAsStream("/mysqldb.properties")
         pro.load(new BufferedInputStream(in));
      } catch{
        case t: Throwable => logger.info("*************  initizlize  mysqldbProperties bad !!!")
      } finally {
        if(null!= in)
        in.close()
      }
      logger.info("*************  initizlize mysqldbProperties success !!!")
      pro
  }






  /*
      * @methods InitOrcleDBProperties
      * @author ShaoYM
      * @date 2019/6/13 16:40
      * @param null
      * @return _root_.java.util.Properties
      * @descripe 读取 oracledb.properties 配置文件
  */
  def InitOrcleDBProperties():Properties={

    //  配置变量
    var pro:Properties=new Properties();
    var in:InputStream = null;
    try {
      in =this.getClass.getResourceAsStream("/oracledb.properties")
      pro.load(new BufferedInputStream(in));
    } catch{
      case t: Throwable => logger.info("*************  initizlize  oracledbProperties bad !!!")
    } finally {
      if(null!= in)
        in.close()
    }
    logger.info("*************  initizlize oracledbProperties success !!!")
    pro
  }


  /*
    * @methods InitHiveDBProperties
    * @author ShaoYM
    * @date 2019/6/13 16:40
    * @param null
    * @return _root_.java.util.Properties
    * @descripe 读取 mysqldb.properties 配置文件
*/
  def InitHiveDBProperties():Properties={
    //  配置变量
    var pro:Properties=new Properties();
    var in:InputStream = null;
    try {
      in =this.getClass.getResourceAsStream("/hivedb.properties")
      pro.load(new BufferedInputStream(in));
    } catch{
      case t: Throwable => logger.info("*************  initizlize  hivedbProperties bad !!!")
    } finally {
      if(null!= in)
        in.close()
    }
    logger.info("*************  initizlize hivedbProperties success !!!")
    pro
  }


  /*
    * @methods InitphoenixDBProperties
    * @author ShaoYM
    * @date 2019/6/13 16:40
    * @param null
    * @return _root_.java.util.Properties
    * @descripe 读取 phoenixDB.properties 配置文件
*/
  def InitPhoenixDBProperties():Properties={
    //  配置变量
    var pro:Properties=new Properties();
    var in:InputStream = null;
    try {
      in =this.getClass.getResourceAsStream("/phoenixdb.properties")
      pro.load(new BufferedInputStream(in));
    } catch{
      case t: Throwable => logger.info("*************  initizlize  phoenixDBProperties bad !!!")
    } finally {
      if(null!= in)
        in.close()
    }
    logger.info("*************  initizlize phoenixDBProperties success !!!")
    pro
  }



  def InitProperties(path:String):Properties={
    //  配置变量
    var pro:Properties=new Properties();
    var in:InputStream = null;
    try {
      in=new FileInputStream(new File(path))
      pro.load(new BufferedInputStream(in));
    } catch{
      case t: Throwable => logger.info(s"*************  initizlize  InitProperties :${path} bad !!!")
    } finally {
      if(null!= in)
        in.close()
    }
    logger.info(s"*************  initizlize InitProperties  :${path} success !!!")
    pro
  }




}

 

 

jdbc   连接池

 

package com.sym.common

import java.sql.Connection
import java.util.Properties

import com.mchange.v2.c3p0.ComboPooledDataSource
import org.apache.log4j.{Level, Logger}
/**
  * FileName: MySQLPoolManager
  * Author:   ShaoYM
  * Date:     2019/6/14 10:11
  * Description: MySQL连接池管理类
  */

object DBPoolManager {

  lazy val logger:Logger=Logger.getLogger(this.getClass.getSimpleName)

  // 连接池
  var mysqlManager: MysqlPool = _
  var oracleManager: OraclePool = _
  var phoenixManager: PhoenixPool = _
  var hiveManager: HivePool = _



  def getMysqlManager: MysqlPool = {
    synchronized {
      if (mysqlManager == null) {
        mysqlManager = new MysqlPool
      }
    }
    mysqlManager
  }


  def getOracleManager: OraclePool = {
    synchronized {
      if (oracleManager == null) {
        oracleManager = new OraclePool
      }
    }
    oracleManager
  }


  def getHiveManager: HivePool = {
    synchronized {
      if (hiveManager == null) {
        hiveManager = new HivePool
      }
    }
    hiveManager
  }


  def getPhoenixManager: PhoenixPool = {
    synchronized {
      if (phoenixManager == null) {
        phoenixManager = new PhoenixPool
      }
    }
    phoenixManager
  }



  class MysqlPool extends Serializable {
    val pro: Properties = InitializeProperties.InitMysqlDBProperties()
    private val cpds: ComboPooledDataSource = new ComboPooledDataSource(true)
    try {
      cpds.setJdbcUrl(pro.getProperty("url"))
      cpds.setDriverClass(pro.getProperty("driverClass"))
      cpds.setUser(pro.getProperty("userName"))
      cpds.setPassword(pro.getProperty("passWord"))
      cpds.setMinPoolSize(pro.getProperty("minPoolSize").toInt)
      cpds.setMaxPoolSize(pro.getProperty("maxPoolSize").toInt)
      cpds.setAcquireIncrement(pro.getProperty("acquireIncrement").toInt)
      cpds.setMaxStatements(pro.getProperty("maxStatements").toInt)
    } catch {
      case e: Exception =>{
        logger.info("initilize Pool is fail !!!")
        e.printStackTrace()
      }
    }

    def getConnection: Connection = {
      try {
        cpds.getConnection()
      } catch {
        case ex: Exception =>
          ex.printStackTrace()
          null
      }
    }


    def close(): Unit = {
      try {
        cpds.close()
      } catch {
        case ex: Exception =>
          ex.printStackTrace()
      }
    }

  }


  class OraclePool extends Serializable {
      val pro:Properties=InitializeProperties.InitOrcleDBProperties()
      private val cpds: ComboPooledDataSource = new ComboPooledDataSource(true)
      try {
        cpds.setJdbcUrl(pro.getProperty("url"))
        cpds.setDriverClass(pro.getProperty("driverClass"))
        cpds.setUser(pro.getProperty("userName"))
        cpds.setPassword(pro.getProperty("passWord"))
        cpds.setMinPoolSize(pro.getProperty("minPoolSize").toInt)
        cpds.setMaxPoolSize(pro.getProperty("maxPoolSize").toInt)
        cpds.setAcquireIncrement(pro.getProperty("acquireIncrement").toInt)
        cpds.setMaxStatements(pro.getProperty("maxStatements").toInt)
      } catch {
        case e: Exception => {
          logger.info("initilize Pool is fail !!!")
          e.printStackTrace()
        }
      }

      def getConnection: Connection = {
        try {
          cpds.getConnection()
        } catch {
          case ex: Exception =>
            ex.printStackTrace()
            null
        }
      }

    def close(): Unit = {
      try {
        cpds.close()
      } catch {
        case ex: Exception =>
          ex.printStackTrace()
      }
    }
  }


  class HivePool extends Serializable {
    val pro:Properties=InitializeProperties.InitHiveDBProperties()
    private val cpds: ComboPooledDataSource = new ComboPooledDataSource(true)
    try {
      cpds.setJdbcUrl(pro.getProperty("url"))
      cpds.setDriverClass(pro.getProperty("driverClass"))
      cpds.setUser(pro.getProperty("userName"))
      cpds.setPassword(pro.getProperty("passWord"))
      cpds.setMinPoolSize(pro.getProperty("minPoolSize").toInt)
      cpds.setMaxPoolSize(pro.getProperty("maxPoolSize").toInt)
      cpds.setAcquireIncrement(pro.getProperty("acquireIncrement").toInt)
      cpds.setMaxStatements(pro.getProperty("maxStatements").toInt)
    } catch {
      case e: Exception => {
        logger.info("initilize Pool is fail !!!")
        e.printStackTrace()
      }
    }

    def getConnection: Connection = {
      try {
        cpds.getConnection()
      } catch {
        case ex: Exception =>
          ex.printStackTrace()
          null
      }
    }

    def close(): Unit = {
      try {
        cpds.close()
      } catch {
        case ex: Exception =>
          ex.printStackTrace()
      }
    }
  }


  class PhoenixPool extends Serializable {
    val pro:Properties=InitializeProperties.InitPhoenixDBProperties()
    private val cpds: ComboPooledDataSource = new ComboPooledDataSource(true)
    try {
      cpds.setJdbcUrl(pro.getProperty("url"))
      cpds.setDriverClass(pro.getProperty("driverClass"))
      cpds.setMinPoolSize(pro.getProperty("minPoolSize").toInt)
      cpds.setMaxPoolSize(pro.getProperty("maxPoolSize").toInt)
      cpds.setAcquireIncrement(pro.getProperty("acquireIncrement").toInt)
      cpds.setMaxStatements(pro.getProperty("maxStatements").toInt)
    } catch {
      case e: Exception => {
        logger.info("initilize Pool is fail !!!")
        e.printStackTrace()
      }
    }

    def getConnection: Connection = {
      try {
        cpds.getConnection()
      } catch {
        case ex: Exception =>
          ex.printStackTrace()
          null
      }
    }

    def close(): Unit = {
      try {
        cpds.close()
      } catch {
        case ex: Exception =>
          ex.printStackTrace()
      }
    }
  }







}

 

 

spark  sql  jdbc  对 mysql   oracle  hbase 操作

 

package com.sym.common

import java.util.Properties

import org.apache.log4j.Logger
import org.apache.spark.sql._

/**
  * FileName: SparkSqlSources
  * Author:   ShaoYM
  * Date:     2019/6/14 9:04
  * Description:
  */
object SparkSqlSources {

  lazy val logger=Logger.getLogger(this.getClass.getSimpleName)

  /*
      * @methods registerTable
      * @author ShaoYM
      * @date 2019/6/14 10:53
      * @param sqlContext
    * @param dbtable
      * @return Unit
      * @descripe 获取数据库的表注册为临时表 mysql/oracle/phoenix
  */
   def registerTable(sqlContext: SQLContext, dbtables: Array[String],dbType:DBType): Unit = {
        // 根据传入的 dbType 初始化
        lazy val dataFrameReader:DataFrameReader= dbType match {
          case DBType.MYSQLDB => getMysqlDataFrameReader(sqlContext)
          case DBType.ORACLEDB => getOracleDataFrameReader(sqlContext)
          case DBType.PHOENIXDB => getphoenxDataFrameReader(sqlContext)
          case DBType.HIVEDB => getHiveDataFrameReader(sqlContext)
          case _ => {
            logger.error(" The daType don't exists ,initialize fail !  ")
            null
          }
        }
        // 判断 初始化 是否成功
        if(null == dataFrameReader){
          logger.error("*********** dataFrameReader is null initialize fail !  ")
          return
        }
        // 将传入的 数据库表 注册为 临时表
        for(tableName <- dbtables){
          dbType match {
            case DBType.PHOENIXDB => dataFrameReader.option("dbtable","\""+tableName+"\"").load().createOrReplaceTempView(tableName)
            case _ => dataFrameReader.option("dbtable",tableName).load().createOrReplaceTempView(tableName)
          }
        }

  }

   def registerMysqlTable(sqlContext: SQLContext, dbtables: Array[String]): Unit = {
    // 根据传入的 dbType 初始化
    lazy val dataFrameReader:DataFrameReader= getMysqlDataFrameReader(sqlContext)
    // 判断 初始化 是否成功
    if(null == dataFrameReader){
      logger.error("*********** dataFrameReader is null initialize fail !  ")
      return
    }
    // 将传入的 数据库表 注册为 临时表
    for(tableName <- dbtables){
        dataFrameReader.option("dbtable",tableName).load().createOrReplaceTempView(tableName)
    }
  }

   def registerOracleTable(sqlContext: SQLContext, dbtables: Array[String]): Unit = {
    // 根据传入的 dbType 初始化
    lazy val dataFrameReader:DataFrameReader= getOracleDataFrameReader(sqlContext)
    // 判断 初始化 是否成功
    if(null == dataFrameReader){
      logger.error("*********** dataFrameReader is null initialize fail !  ")
      return
    }
    // 将传入的 数据库表 注册为 临时表
    for(tableName <- dbtables){
      dataFrameReader.option("dbtable",tableName).load().createOrReplaceTempView(tableName)
    }
  }

   def registerPhoenixTable(sqlContext: SQLContext, dbtables: Array[String]): Unit = {
    // 根据传入的 dbType 初始化
    lazy val dataFrameReader:DataFrameReader= getphoenxDataFrameReader(sqlContext)
    // 判断 初始化 是否成功
    if(null == dataFrameReader){
      logger.error("*********** dataFrameReader is null initialize fail !  ")
      return
    }
    // 将传入的 数据库表 注册为 临时表
    for(tableName <- dbtables){
      dataFrameReader.option("dbtable","\""+tableName+"\"").load().createOrReplaceTempView(tableName)
    }
  }





  /*  
      * @methods writeTable
      * @author ShaoYM 
      * @date 2019/6/28 16:54 
      * @param df
    * @param insertTableName
    * @param dbType :DBType
      * @return Unit 
      * @descripe 获取到的 DataFrame 存储到数据库中, mysql/oracle 不存在表则创建。phoenix 存储数据表需存在
  */  
  def writePhoenixTable(df: DataFrame,insertTableName:String): Unit ={
      lazy  val pro=InitializeProperties.InitPhoenixDBProperties()
          df.write
          .format("org.apache.phoenix.spark")
          .mode(SaveMode.Overwrite)
          .option("table", insertTableName)
          .option("zkUrl", pro.getProperty("url"))
          .save()
  }

  def writeOracleTable(df: DataFrame,insertTableName:String): Unit ={
    lazy  val pro=InitializeProperties.InitOrcleDBProperties()

      lazy val prop = new Properties()
      prop.put("user", pro.getProperty("userName"))
      prop.put("password", pro.getProperty("passWord"))
      df.write.mode(SaveMode.Append).jdbc(pro.getProperty("url"),insertTableName,prop)

  }

  def writeMysqlTable(df: DataFrame,insertTableName:String,dbType:DBType): Unit ={
    lazy  val pro=InitializeProperties.InitMysqlDBProperties()
    lazy val prop = new Properties()
    prop.put("user", pro.getProperty("userName"))
    prop.put("password", pro.getProperty("passWord"))
    df.write.mode(SaveMode.Append).jdbc(pro.getProperty("url"),insertTableName,prop)

  }

  def writeHiveTable(df: DataFrame,insertTableName:String): Unit ={
    lazy  val pro=InitializeProperties.InitHiveDBProperties()
    lazy val prop = new Properties()
    prop.put("user", pro.getProperty("userName"))
    prop.put("password", pro.getProperty("passWord"))
    df.write.mode(SaveMode.Append).jdbc(pro.getProperty("url"),insertTableName,prop)
  }













  /*
      * @methods getMysqlDataFrameReader
      * @author ShaoYM 
      * @date 2019/6/14 10:59 
      * @param sqlContext 
      * @return _root_.org.apache.spark.sql.DataFrameReader 
      * @descripe 返回一个 初始化了基础数据库信息的 DataFrameReader
  */  
  def getMysqlDataFrameReader(sqlContext: SQLContext): DataFrameReader ={
    lazy val pro=InitializeProperties.InitMysqlDBProperties()
    sqlContext.read.format("jdbc")
      .option("url",pro.getProperty("url"))
      .option("driver",pro.getProperty("driverClass"))
      .option("user",pro.getProperty("userName"))
      .option("password",pro.getProperty("passWord"))
  }

  /*  
      * @methods getOracleDataFrameReader
      * @author ShaoYM 
      * @date 2019/7/1 9:16 
      * @param sqlContext 
      * @return _root_.org.apache.spark.sql.DataFrameReader 
      * @descripe TODO 
  */  
  def getOracleDataFrameReader(sqlContext: SQLContext): DataFrameReader ={
    lazy val pro=InitializeProperties.InitOrcleDBProperties()
    sqlContext.read.format("jdbc")
      .option("url",pro.getProperty("url"))
      .option("driver",pro.getProperty("driverClass"))
      .option("user",pro.getProperty("userName"))
      .option("password",pro.getProperty("passWord"))
  }

  /*  
      * @methods getHiveDataFrameReader
      * @author ShaoYM 
      * @date 2019/7/1 9:17 
      * @param sqlContext 
      * @return _root_.org.apache.spark.sql.DataFrameReader 
      * @descripe TODO 
  */  
  def getHiveDataFrameReader(sqlContext: SQLContext): DataFrameReader ={
    lazy val pro=InitializeProperties.InitHiveDBProperties()
    sqlContext.read.format("jdbc")
      .option("url",pro.getProperty("url"))
      .option("driver",pro.getProperty("driverClass"))
      .option("user",pro.getProperty("userName"))
      .option("password",pro.getProperty("passWord"))
  }

  /*  
      * @methods getphoenxDataFrameReader
      * @author ShaoYM 
      * @date 2019/7/1 9:17 
      * @param sqlContext 
      * @return _root_.org.apache.spark.sql.DataFrameReader 
      * @descripe TODO 
  */  
  def getphoenxDataFrameReader(sqlContext: SQLContext): DataFrameReader ={
    lazy val pro=InitializeProperties.InitPhoenixDBProperties()
    sqlContext.read.format("jdbc")
      .option("url",pro.getProperty("url"))
      .option("driver",pro.getProperty("driverClass"))
  }


  def main(args: Array[String]): Unit = {

  }

}

 

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

笑笑码

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值