package DAO; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.sql.Timestamp; import java.text.SimpleDateFormat; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.filter.PageFilter; import org.apache.hadoop.hbase.mapreduce.TableInputFormat; import org.apache.hadoop.hbase.util.Bytes; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.DataFrameReader; import org.apache.spark.sql.DataFrameWriter; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.SQLContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.spark.sql.SparkSession; public class Conn { private static final long serialVersionUID = -1; private static final Logger LOG = LoggerFactory.getLogger(Conn.class); private static final String MYSQL_USERNAME = "root"; private static final String MYSQL_PWD = "dzwang**"; private static final String MYSQL_CONNECTION_URL = "jdbc:mysql://192.168.100.233:3306/sp2p628"; public SparkSession spark = SparkSession .builder() .master("local[*]") //spark://192.168.100.110:7077 .config("spark.sql.warehouse.dir", "file:///D:/BigData/spark-warehouse") .getOrCreate(); // public static void main(String[] args) { // } public Dataset getDataFrame(String tableName){ Map<String, String> options = new HashMap<String, String>(); options.put("url", MYSQL_CONNECTION_URL); options.put("user", MYSQL_USERNAME); options.put("password", MYSQL_PWD); options.put("dbtable", tableName); Dataset jdbcDF = spark.read().format("jdbc").options(options).load(); return jdbcDF; } public static Configuration getHbaseConf(){ Configuration conf=HBaseConfiguration.create(); conf.set("hbase.zookeeper.property.clientPort", "2181"); conf.set("hbase.zookeeper.quorum", "192.168.100.110,192.168.100.111,192.168.100.112"); conf.set("hbase.master", "192.168.100.110:16020"); conf.set("hbase.rootdir", "hdfs://192.168.100.110:9000/hbase"); return conf; } // // public static Configuration getHbaseConf(){ // Configuration conf=HBaseConfiguration.create(); // conf.set("hbase.zookeeper.property.clientPort", "2181"); // conf.set("hbase.zookeeper.quorum", "192.168.100.238,192.168.100.239,192.168.100.240"); // conf.set("hbase.master", "192.168.100.238:16020"); // conf.set("hbase.rootdir", "hdfs://192.168.100.238:9000/hbase"); // return conf; // } public String GetTimeByStamp(String timestamp) { long datatime= Long.parseLong(timestamp); Date date=new Date(datatime); SimpleDateFormat format=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); String timeresult=format.format(date); System.out.println("Time : "+timeresult); return timeresult; } public Timestamp GetStampByTime(String time) { Timestamp Stamp=new Timestamp(System.currentTimeMillis()); SimpleDateFormat sdf=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); Date date; try { date=sdf.parse(time); Stamp=new Timestamp(date.getTime()); // System.out.println(Stamp); }catch(Exception e){e.printStackTrace();} return Stamp; } public Timestamp GetStampByTime1(String time) { Timestamp Stamp=new Timestamp(System.currentTimeMillis()); SimpleDateFormat sdf=new SimpleDateFormat("yyyyMMddHHmmss"); Date date; try { date=sdf.parse(time); Stamp=new Timestamp(date.getTime()); System.out.println(Stamp); }catch(Exception e){e.printStackTrace();} return Stamp; } public String getMaxDate(Configuration conf,Connection hconn,Table table,String colFamily,String dt) throws Exception{ String maxDate = "2013-07-02 00:00:00"; // Date dNow = new Date(); //当前时间 // Date dBefore = new Date(); // Calendar calendar = Calendar.getInstance(); //得到日历 // calendar.setTime(dNow);//把当前时间赋给日历 // calendar.add(Calendar.DAY_OF_MONTH, -30); //设置为前一个月 // dBefore = calendar.getTime(); //得到前一天的时间 // SimpleDateFormat sdf=new SimpleDateFormat("yyyyMMdd"); //设置时间格式 // String defaultStartDate = sdf.format(dBefore)+":#"; //格式化前一个月年月日开头 // String defaultEndDate = sdf.format(dNow); //格式化当前时间 ResultScanner results = null; // Table table = hconn.getTable(TableName.valueOf(tableName)); try{ Scan scan = new Scan(); // scan.setStartRow(Bytes.toBytes(defaultStartDate)); scan.setReversed(true); scan.setMaxResultSize(1); scan.setFilter(new PageFilter(1L)); byte[] famA = Bytes.toBytes(colFamily); byte[] col1 = Bytes.toBytes(dt); scan.addColumn(famA, col1); results = table.getScanner(scan); for (Result r : results) { // System.out.println("1----- " + new String(r.toString()) + " "); // System.out.println("2----:" + new String(r.getValue(famA,col1))); maxDate=new String(r.getValue(famA,col1)); } if (maxDate.equals("") || maxDate.equals(null)){ maxDate = "2013-07-02 00:00:00"; } } catch(Exception e){ LOG.error("error Exception:"+e.toString()); }finally{ try { if ( results != null ) results.close(); } catch (Exception e) { LOG.error("error Exception:"+e.toString()); } } return maxDate; } }
Conn java版本
最新推荐文章于 2022-09-11 22:46:28 发布