dataframe scala 修改值_DataFrame:通过SparkSql将scala类转为DataFrame的方法

import java.text.DecimalFormat

import com.alibaba.fastjson.JSON

import com.donews.data.AppConfig

import com.typesafe.config.ConfigFactory

import org.apache.spark.sql.types.{StructField, StructType}

import org.apache.spark.sql.{Row, SaveMode, DataFrame, SQLContext}

import org.apache.spark.{SparkConf, SparkContext}

import org.slf4j.LoggerFactory

/**

* Created by silentwolf on 2016/6/3.

*/

case class UserTag(SUUID: String,

MAN: Float,

WOMAN: Float,

AGE10_19: Float,

AGE20_29: Float,

AGE30_39: Float,

AGE40_49: Float,

AGE50_59: Float,

GAME: Float,

MOVIE: Float,

MUSIC: Float,

ART: Float,

POLITICS_NEWS: Float,

FINANCIAL: Float,

EDUCATION_TRAINING: Float,

HEALTH_CARE: Float,

TRAVEL: Float,

AUTOMOBILE: Float,

HOUSE_PROPERTY: Float,

CLOTHING_ACCESSORIES: Float,

BEAUTY: Float,

IT: Float,

BABY_PRODUCT: Float,

FOOD_SERVICE: Float,

HOME_FURNISHING: Float,

SPORTS: Float,

OUTDOOR_ACTIVITIES: Float,

MEDICINE: Float

)

object UserTagTable {

val LOG = LoggerFactory.getLogger(UserOverviewFirst.getClass)

val REP_HOME = s"${AppConfig.HDFS_MASTER}/${AppConfig.HDFS_REP}"

def main(args: Array[String]) {

var startTime = System.currentTimeMillis()

val conf: com.typesafe.config.Config = ConfigFactory.load()

val sc = new SparkContext()

val sqlContext = new SQLContext(sc)

var df1: DataFrame = null

if (args.length == 0) {

println("请输入: appkey , StartTime : 2016-04-10 ,StartEnd :2016-04-11")

}

else {

var ap

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值