import java.text.DecimalFormat
import com.alibaba.fastjson.JSON
import com.donews.data.AppConfig
import com.typesafe.config.ConfigFactory
import org.apache.spark.sql.types.{StructField, StructType}
import org.apache.spark.sql.{Row, SaveMode, DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.LoggerFactory
/**
* Created by silentwolf on 2016/6/3.
*/
case class UserTag(SUUID: String,
MAN: Float,
WOMAN: Float,
AGE10_19: Float,
AGE20_29: Float,
AGE30_39: Float,
AGE40_49: Float,
AGE50_59: Float,
GAME: Float,
MOVIE: Float,
MUSIC: Float,
ART: Float,
POLITICS_NEWS: Float,
FINANCIAL: Float,
EDUCATION_TRAINING: Float,
HEALTH_CARE: Float,
TRAVEL: Float,
AUTOMOBILE: Float,
HOUSE_PROPERTY: Float,
CLOTHING_ACCESSORIES: Float,
BEAUTY: Float,
IT: Float,
BABY_PRODUCT: Float,
FOOD_SERVICE: Float,
HOME_FURNISHING: Float,
SPORTS: Float,
OUTDOOR_ACTIVITIES: Float,
MEDICINE: Float
)
object UserTagTable {
val LOG = LoggerFactory.getLogger(UserOverviewFirst.getClass)
val REP_HOME = s"${AppConfig.HDFS_MASTER}/${AppConfig.HDFS_REP}"
def main(args: Array[String]) {
var startTime = System.currentTimeMillis()
val conf: com.typesafe.config.Config = ConfigFactory.load()
val sc = new SparkContext()
val sqlContext = new SQLContext(sc)
var df1: DataFrame = null
if (args.length == 0) {
println("请输入: appkey , StartTime : 2016-04-10 ,StartEnd :2016-04-11")
}
else {
var ap