【大数据】对空间数据进行过滤后(即查询筛查)SparkRDD运算

class SparkTestIO extends java.io.Serializable {
  val params = Map(
    "hbase.zookeepers" -> "192.168.6.129",
    "hbase.catalog" -> "building_1")
  // see geomesa-tools/conf/sfts/gdelt/reference.conf
  //  val filter = ECQL.toFilter(null)
var n=0;
  def addOne(x: SimpleFeature): SimpleFeature = {

    val a = x.getAttribute("the_geom").asInstanceOf[Geometry]
    val TYPE = DataUtilities.createType(
      "my_type",
      "__fid__:String," +
        "the_geom:Geometry," +
        "osm_id:String," +
        "code:String," +
        "fclass:String") // a number attribute
    val featureBuilder = new SimpleFeatureBuilder(TYPE)

    //创建要素并添加道集合
    val _fid__ = "beijing"
    featureBuilder.add(_fid__)

    featureBuilder.add(a)

    val osm_id = "16"
    featureBuilder.add(osm_id)

    val code = "001"
    featureBuilder.add(code)

    val fclass = "003"
    featureBuilder.add(fclass)

    //添加的数据一定按照SimpleFeatureType给的字段顺序进行赋值

    //构建要素
    val feature: SimpleFeature = featureBuilder.buildFeature(null).asInstanceOf[SimpleFeature]
n=n+1
    val feajson = new FeatureJSON
    println(",")
    println(feajson.toString(x))

    return x
  }

  def runSparkDemo: String = {

    val ds = DataStoreFinder.getDataStore(params).asInstanceOf[HBaseDataStore]
    //    val query = new Query("my_type", filter)
//    val filter = ECQL.toFilter("CONTAINS(MultiPolygon(((121.0892 31.1813,121.0894 31.1813,121.0894 31.1808,121.0892 31.1808,121.0892 31.1813))), the_geom)")
    val filter = ECQL.toFilter("osm_id BETWEEN 635209774 AND 635209790")
    
    //get the type/typeName
    val typeName = ds.getTypeNames()(0)
    import org.geotools.data.simple.SimpleFeatureSource
    val featureSource1 = ds.getFeatureSource(typeName)
    import org.geotools.data.simple.SimpleFeatureCollection
    val featureCollection1 = featureSource1.getFeatures
    import org.opengis.feature.simple.SimpleFeatureType
    val featureType1 = featureCollection1.getSchema
    
    val query = new Query(typeName,filter)
    println("---------------------------------------------------------")
    println(typeName)
    println("---------------------------------------------------------")
    println(featureType1)
    println("---------------------------------------------------------")
    // set SparkContext
    val conf = new SparkConf().setMaster("local[*]").setAppName("hrfhdk").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
    val sc = SparkContext.getOrCreate(conf)

    // create RDD with a geospatial query using GeoMesa functions
    val spatialRDDProvider = GeoMesaSpark(params)

    val resultRDD = spatialRDDProvider.rdd(new Configuration, sc, params, query)
    val result_0 = resultRDD.map(addOne)


    ds.dispose()
    val feajson = new FeatureJSON
    val result_1 =result_0.count()//计算元素个数
    sc.stop()

    return ""+result_1;
    sc.stop()

  }
}

 

  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值