ScalaConn

package DAO;


import java.sql._
import java.text.SimpleDateFormat
import java.util.{List, Date, HashMap, Properties}
import com.google.protobuf.TextFormat.ParseException
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.{TableName, HBaseConfiguration}
import org.apache.hadoop.hbase.client._
import org.apache.hadoop.hbase.filter.PageFilter
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.mapred.JobConf
import org.apache.spark.sql.{Row, DataFrame, SQLContext}
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.sql.SparkSession
import org.apache.hadoop.hbase.client.{Table, Connection}
import java.text.SimpleDateFormat;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.Encoder
import org.apache.spark.rdd.RDD
import org.apache.hadoop.hbase.mapred.TableOutputFormat
import org.slf4j.LoggerFactory

object ScalaConn {
  def LOG = LoggerFactory.getLogger(getClass)
  val driver = "com.mysql.jdbc.Driver"
  val jdbcUrl = "jdbc:mysql://192.168.100.233:3306/sp2p628"
  val MYSQL_USERNAME: String = "root"
  val MYSQL_PWD: String = "dzwang**"
  val MYSQL_CONNECTION_URL: String = "jdbc:mysql://192.168.100.233:3306/sp2p628"

val warehouseLocation = "file:${system:user.dir}/spark-warehouse"
val spark = SparkSession
  .builder()
  .master("local[*]") //spark://192.168.100.110:7077
  .appName("SparkSQL")
  .config("spark.executor.memory", "2g")
  .config("spark.sql.warehouse.dir", "file:///D:/BigData/spark-warehouse")
  .getOrCreate()
//local "local[*]"

  def getJdbcConn {
//    var connection: Connection = null
    try {

      Class.forName(driver)
//      connection = DriverManager.getConnection(jdbcUrl, MYSQL_USERNAME, MYSQL_PWD)
//      val statement = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)
      // val resultSet = statement.executeQuery("select name, password from scala_t")
      //  while (resultSet.next()) {
      //         val name = resultSet.getString("name")
      //        val password = resultSet.getString("password")
      // }
      //  return statement;
    } catch {
      case e: Exception => {
        LOG.error(e.toString)
        }
    }
  }
    def getP2PDataFrame(tableName: String): DataFrame =
  {

    val property = new Properties()

   val url = "jdbc:mysql://192.168.100.233:3306/sp2p628"
    property.put("user","root")
    property.put("password", "dzwang**")
    val jdbcDF = spark.read.jdbc(url,tableName, property)
     jdbcDF
  }

  def getHadoopDataFrame(tableName: String): DataFrame =
  {
        val property = new Properties()
        val url = "jdbc:mysql://192.168.55.218:3306/hadoop"
        property.put("user","root")
        property.put("password", "dzwang**")
        val jdbcDF = spark.read.jdbc(url,tableName, property)
       jdbcDF
  }

  def getHadoopFtoulanalyDataFrame(tableName: String): DataFrame =
  {
    val property = new Properties()
    val url = "jdbc:mysql://192.168.55.218:3306/ftoulanaly"
    property.put("user","root")
    property.put("password", "dzwang**")
    val jdbcDF = spark.read.jdbc(url,tableName, property)
    jdbcDF
  }
  def getHadoopFtcrawlerDataFrame(tableName: String): DataFrame =
  {
    val property = new Properties()
    val url = "jdbc:mysql://192.168.55.218:3306/ftcrawler"
    property.put("user","root")
    property.put("password", "dzwang**")
    val jdbcDF = spark.read.jdbc(url,tableName, property)
    jdbcDF
  }
  def getHadoopDedetestDataFrame(tableName: String): DataFrame =
  {
    val property = new Properties()
    val url = "jdbc:mysql://192.168.55.218:3306/dedetest"
    property.put("user","root")
    property.put("password", "dzwang**")
    val jdbcDF = spark.read.jdbc(url,tableName, property)
    jdbcDF
  }

  def getShopDataFrame(tableName: String): DataFrame =
  {
    val property = new Properties()
    val url = "jdbc:mysql://192.168.100.239:3306/ftoul_shop"
    property.put("user","root")
    property.put("password", "dzwang**")
    val jdbcDF = spark.read.jdbc(url,tableName, property)
    jdbcDF
  }
//  def getHaseDataFrame(tableName: String):DataFrame=
//  {
//    getHbaseConf.set(TableInputFormat.INPUT_TABLE,tableName)
//    import spark.implicits._
//    val hbaseRDD = spark.sparkContext.newAPIHadoopRDD(getHbaseConf,classOf[TableInputFormat],classOf[ImmutableBytesWritable],classOf[Result])
//    val HbaseDF = hbaseRDD.map(r=>(
//      Bytes.toString(r._2.getValue(Bytes.toBytes("info"),Bytes.toBytes("customer_id"))),
//      Bytes.toString(r._2.getValue(Bytes.toBytes("info"),Bytes.toBytes("create_id")))
//      )).toDF("customer_id","create_id")
//    shop.createOrReplaceTempView("shop")
//    HbaseDF
//  }

  /**
   * 获得指定日期的前一天
   */
  def getSpecifiedDayBefore(specifiedDay:String):String = {
    //SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
    var c:Calendar = Calendar.getInstance();
    var date:Date =null;
    try {
      date = new SimpleDateFormat("yyyyMMdd").parse(specifiedDay)
    }  catch {
      case e: Exception => {
        e.printStackTrace()
      }
    }
    c.setTime(date);
     var day =c.get(Calendar.DATE);
    c.set(Calendar.DATE,day-1);

   var dayBefore:String =new SimpleDateFormat("yyyyMMdd").format(c.getTime());
     dayBefore
  }

  /**
   * 获得指定日期的后一天
   */
  def getSpecifiedDayAfter(specifiedDay:String):String = {
    //SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
    var c:Calendar = Calendar.getInstance();
    var date:Date =null;
    try {
      date = new SimpleDateFormat("yyyyMMdd").parse(specifiedDay)
    }  catch {
      case e: Exception => {
        e.printStackTrace()
      }
    }
    c.setTime(date);
    var day =c.get(Calendar.DATE);
    c.set(Calendar.DATE,day+1);

    var dayBefore:String =new SimpleDateFormat("yyyyMMdd").format(c.getTime());
    dayBefore
  }

  def getHbaseConf: Configuration = {
    val conf: Configuration = HBaseConfiguration.create
    conf.set("hbase.zookeeper.property.clientPort", "2181")
    conf.set("spark.executor.memory", "3000m")
    conf.set("hbase.zookeeper.quorum", "192.168.100.110,192.168.100.111,192.168.100.112")
    conf.set("hbase.master", "192.168.100.110:16020")
    conf.set("hbase.rootdir", "hdfs://192.168.100.110:9000/hbase")
    conf
  }

    def GetStampByTime(time: String): Timestamp = {
      var Stamp: Timestamp = new Timestamp(System.currentTimeMillis)
      val sdf: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
      var date: Date = null
      try {
        date = sdf.parse(time)
        Stamp = new Timestamp(date.getTime)
//        println("DT:"+Stamp)
      }
      catch {
        case e: Exception => {
          LOG.error(e.toString)
        }
      }
      Stamp
    }

    def GetStampByTime1(time: String): Timestamp = {
      var Stamp: Timestamp = new Timestamp(System.currentTimeMillis)
      val sdf: SimpleDateFormat = new SimpleDateFormat("yyyyMMddHHmmss")
      var date: Date = null
      try {
        date = sdf.parse(time)
        Stamp = new Timestamp(date.getTime)
//        println("DT:"+Stamp)
      }
      catch {
        case e: Exception => {
          LOG.error(e.toString)
        }
      }
      Stamp
    }
  def evaluate( i:Int): String = {
    var date:Date = new Date();//取时间
    var sf:SimpleDateFormat = new SimpleDateFormat("yyyyMMdd");
//    var nowDate:String  = sf.format(date);
//    println("nowDate="+nowDate)

    var time:Long  = (date.getTime() / 1000) + 60 * 60 * 24 * i;//秒
    date.setTime(time * 1000);//毫秒
    var nextDate:String  = sf.format(date);
//    println("nextDate="+nextDate)
    nextDate
  }
//  fmt: yyyy-MM-dd HH:mm:ss
  def getYesterday(i:Int,fmt:String):String= {
  var ft = ""
   if (fmt.isEmpty){
     ft = "yyyy-MM-dd HH:mm:ss"
   }else{
     ft = fmt
   }
    var dateFormat: SimpleDateFormat = new SimpleDateFormat(ft)
    var cal: Calendar = Calendar.getInstance()
    cal.add(Calendar.DATE, i)
    var yesterday = dateFormat.format(cal.getTime())
    yesterday
  }
    def getMaxDate(conf: Configuration, hconn: Connection,table: Table, colFamily: String, dt: String): String = {
      var maxDate: String = "2014-12-01 00:00:00"
      var results: ResultScanner = null
//      val table: Table = hconn.getTable(TableName.valueOf(tableName))
      try {

        val scan: Scan = new Scan

        scan.setReversed(true)
        scan.setMaxVersions()
        scan.setMaxResultSize(1)
        scan.setFilter(new PageFilter(1))


       scan.addFamily(Bytes.toBytes(colFamily));
       scan.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(dt));

        results = table.getScanner(scan)
//        println("results =================="+results.next())
        var i = 0
        import scala.collection.JavaConversions._
       for (r <- results)  {
         if( i == 0) {
           maxDate = new String(r.getValue(Bytes.toBytes(colFamily), Bytes.toBytes(dt)))
         }
//         i= i+1
//          println("maxDate =================="+maxDate)
        }
        if ((maxDate == "") || (maxDate == null)|| (maxDate == "null")) {
          maxDate = "2014-12-01 00:00:00"
        }
        LOG.info("maxDate="+maxDate)
      }
      catch {
        case e: Exception => {
          LOG.error(e.toString)
        }
      } finally {
        try {
          if (results != null) results.close
        }
        catch {
          case e: Exception => {
            LOG.error(e.toString)
          }
        }
      }
       maxDate
    }
  }

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值