spark篇3:spark操作ftp

6 篇文章 0 订阅
1 篇文章 0 订阅

spark篇3:spark操作ftp

废话不多说,直接上干货

package com.iflytek.ftp

import java.text.SimpleDateFormat
import java.util._

import com.alibaba.fastjson.JSON
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

object spark2ftp {
  def main(args: Array[String]): Unit = {
     val sparkSession = SparkSession.builder()
       .master("local")
      .appName("appName")
      .config("spark.testing.memory","471859200")
      .getOrCreate()
    sparkSession.sql(s"use carbondata");
    val rq= new SimpleDateFormat("yyyy-MM-dd").format(new Date().getTime)

    val dataSource = "ftp://账号:密码@ip:端口/目录/"
    //从ftp整个目录下读取数据
    val ftpInput: RDD[(String, String)] = sparkSession.sparkContext.wholeTextFiles(dataSource)
    val value: RDD[String] = ftpInput.map(_._2)

    val xq_sb: RDD[(String, String)] = value.map(json => {
      val nObject = JSON.parseObject(json)
      val bodyObject1 = nObject.getJSONObject("body")
      val bodyObject2 = bodyObject1.getJSONObject("body")
      val sbmc = bodyObject2.get("id").toString
      val xqbm = bodyObject2.get("name").toString
      (sbmc, xqbm)
    })

 
    val ds: Dataset[(String, String)] = sparkSession.createDataset(xq_sb)
    val frame: DataFrame = ds.toDF("sbmc","xqbm")
    frame.filter(s"sbmc like '%名称%'").where("1=1 and 2=2").limit(2).select("sbmc").show()

  }
}
spark 读取 linux sftp上的文本文件,原jar只支持josn,csv等,增加bcp,txt文件的支持 下面是例子: public static void main(String[] args) throws Exception { SparkConf conf = new SparkConf().setMaster("local").setAppName("SparkDataFrame"); JavaSparkContext javacontext = new JavaSparkContext(conf); SQLContext sqlContext = new SQLContext(javacontext); Dataset<Row> df = sqlContext.read(). format("com.springml.spark.sftp"). option("host", "192.168.1.3"). option("username", "root"). option("password", "111111"). option("fileType", "bcp"). load("/sparktest/sparkfile0.bcp"); /*List<Row> list = df.collectAsList(); for(Row row:list){ String[] words = new String(row.getString(0).getBytes(),0,row.getString(0).length(),"UTF-8").split(" ",-1); for(int i=0;i<words.length;i++){ System.out.println("words==="+words[i]); } }*/ JavaRDD<Row> rowRdd = df.javaRDD(); JavaRDD<Row> words_bcp= rowRdd.map(new Function<Row, Row>() { @Override public Row call(Row row) throws Exception { // TODO Auto-generated method stub String line = row.getString(0); String[] words = new String(line.getBytes(),0,line.getBytes().length,"utf-8").split(" ",-1); return RowFactory.create(words); } }); List<Row> list = words_bcp.collect(); for(Row row:list){ System.out.println("row1=="+row.getString(0)); } df.write().format("com.springml.spark.sftp"). option("host", "192.168.1.3"). option("username", "root"). option("password", "111111"). option("fileType", "bcp"). save("/sparktest/luozhao.bcp"); df.show(); javacontext.close(); }
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值