import java.text.SimpleDateFormat
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.{SparkConf, SparkContext}
object Jackson{
def main(args: Array[String]): Unit = {
if (args.length <2) {
System.err.println("Usage:ParseLog <inputPath><outputPath>")
System.exit(1)
}
val conf = new SparkConf().setAppName("ParseLog")
val sc = new SparkContext(conf)
val fileSystem = FileSystem.get(sc.hadoopConfiguration)
val outputPath = new Path(args(1))
if (fileSystem.exists(outputPath)) {
fileSystem.delete(outputPath, true)
}
def init():ObjectMapper = {
val mapper = new ObjectMapper()
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
mapper.registerModule(DefaultScalaModule)
spark scala 使用jackson 解析
最新推荐文章于 2024-05-03 13:10:58 发布