class SparkTestIO extends java.io.Serializable {
val params = Map(
"hbase.zookeepers" -> "192.168.6.129",
"hbase.catalog" -> "building_1")
// see geomesa-tools/conf/sfts/gdelt/reference.conf
// val filter = ECQL.toFilter(null)
var n=0;
def addOne(x: SimpleFeature): SimpleFeature = {
val a = x.getAttribute("the_geom").asInstanceOf[Geometry]
val TYPE = DataUtilities.createType(
"my_type",
"__fid__:String," +
"the_geom:Geometry," +
"osm_id:String," +
"code:String," +
"fclass:String") // a number attribute
val featureBuilder = new SimpleFeatureBuilder(TYPE)
//创建要素并添加道集合
val _fid__ = "beijing"
featureBuilder.add(_fid__)
featureBuilder.add(a)
val osm_id = "16"
featureBuilder.add(osm_id)
val code = "001"
featureBuilder.add(code)
val fclass = "003"
featureBuilder.add(fclass)
//添加的数据一定按照SimpleFeatureType给的字段顺序进行赋值
//构建要素
val feature: SimpleFeature = featureBuilder.buildFeature(null).asInstanceOf[SimpleFeature]
n=n+1
val feajson = new FeatureJSON
println(",")
println(feajson.toString(x))
return x
}
def runSparkDemo: String = {
val ds = DataStoreFinder.getDataStore(params).asInstanceOf[HBaseDataStore]
// val query = new Query("my_type", filter)
// val filter = ECQL.toFilter("CONTAINS(MultiPolygon(((121.0892 31.1813,121.0894 31.1813,121.0894 31.1808,121.0892 31.1808,121.0892 31.1813))), the_geom)")
val filter = ECQL.toFilter("osm_id BETWEEN 635209774 AND 635209790")
//get the type/typeName
val typeName = ds.getTypeNames()(0)
import org.geotools.data.simple.SimpleFeatureSource
val featureSource1 = ds.getFeatureSource(typeName)
import org.geotools.data.simple.SimpleFeatureCollection
val featureCollection1 = featureSource1.getFeatures
import org.opengis.feature.simple.SimpleFeatureType
val featureType1 = featureCollection1.getSchema
val query = new Query(typeName,filter)
println("---------------------------------------------------------")
println(typeName)
println("---------------------------------------------------------")
println(featureType1)
println("---------------------------------------------------------")
// set SparkContext
val conf = new SparkConf().setMaster("local[*]").setAppName("hrfhdk").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
val sc = SparkContext.getOrCreate(conf)
// create RDD with a geospatial query using GeoMesa functions
val spatialRDDProvider = GeoMesaSpark(params)
val resultRDD = spatialRDDProvider.rdd(new Configuration, sc, params, query)
val result_0 = resultRDD.map(addOne)
ds.dispose()
val feajson = new FeatureJSON
val result_1 =result_0.count()//计算元素个数
sc.stop()
return ""+result_1;
sc.stop()
}
}