package com.adtime.udf.main;
import java.sql.Date;
import java.text.SimpleDateFormat;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.ql.exec.UDF;
/**
* @author leen
* @date Mar 13, 2017 10:29:54 AM
* @parameter input:2017-01-05 19:34:17 return:2017-01-05 20:34:17 的时间戳
*/
public class UdfReturnOneHourLater extends UDF {
public String evaluate (String datetime,String add) throws Exception {
//对于日期为null / "" / " " 返回null
if (StringUtils.isBlank(datetime) || (datetime.length() != 19 && datetime.length() != 21)){
return null;
}
if(StringUtils.isBlank(add)){
//添加的时间小时长度的不能为空
return null;
}
if(datetime.length() == 21){
datetime = datetime.substring(0, 19);
}
//将字符串转化为时间戳
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Long date1 = df.parse(datetime).getTime();
//add转为Long类型
Double addDouble = Double.parseDouble(add);
//将时间add加上去
long addDate = (long) (date1 + 1000*60*60*addDouble);
//将时间戳转为时间格式:yyyy-MM-dd hh:mm:ss
Date date2 = new Date(addDate);
String returnDate = df.format(date2).toString();
//返回更新之后的时间戳的字符串
return returnDate;
}
public static void main(String[] args) throws Exception {
UdfReturnOneHourLater uu = new UdfReturnOneHourLater();
System.out.println(uu.evaluate("2017-01-05 19:34:19.0", "-0.5"));//2017-01-05 19:04:19
System.out.println(uu.evaluate("2017-01-05 19:34:19", "0.5"));//2017-01-05 20:04:19
}
}