三、geotrellis实现影像切片功能

本章使用scala实现影像的切片功能。步骤如下:

一、环境及软件版本:

1、Scala2.11.7

2、Java1.8

3、spark2.2.0

二、使用maven构建测试案例

1、maven依赖库如下:

<dependencies>

  <dependency>
    <groupId>org.apache.camel</groupId>
    <artifactId>camel-core</artifactId>
  </dependency>
  <dependency>
    <groupId>org.apache.camel</groupId>
    <artifactId>camel-scala</artifactId>
  </dependency>

  <!-- scala -->
  <dependency>
    <groupId>org.scala-lang</groupId>
    <artifactId>scala-library</artifactId>
    <version>2.11.1</version>
  </dependency>
  <dependency>
    <groupId>org.scala-lang.modules</groupId>
    <artifactId>scala-xml_2.11</artifactId>
    <version>1.0.6</version>
  </dependency>

  <!-- logging -->
  <dependency>
    <groupId>org.apache.logging.log4j</groupId>
    <artifactId>log4j-api</artifactId>
    <scope>runtime</scope>
  </dependency>
  <dependency>
    <groupId>org.apache.logging.log4j</groupId>
    <artifactId>log4j-core</artifactId>
    <scope>runtime</scope>
  </dependency>
  <dependency>
    <groupId>org.apache.logging.log4j</groupId>
    <artifactId>log4j-slf4j-impl</artifactId>
    <scope>runtime</scope>
  </dependency>

  <!-- testing -->
  <dependency>
    <groupId>org.apache.camel</groupId>
    <artifactId>camel-test</artifactId>
    <scope>test</scope>
  </dependency>

  <dependency>
    <groupId>com.typesafe.akka</groupId>
    <artifactId>akka-http_2.11</artifactId>
    <version>10.0.7</version>
  </dependency>
  <dependency>
    <groupId>com.typesafe.akka</groupId>
    <artifactId>akka-stream_2.11</artifactId>
    <version>2.4.18</version>
  </dependency>
  <dependency>
    <groupId>com.typesafe.akka</groupId>
    <artifactId>akka-actor_2.11</artifactId>
    <version>2.4.18</version>
  </dependency>
  <dependency>
    <groupId>com.typesafe.akka</groupId>
    <artifactId>akka-http-core_2.11</artifactId>
    <version>10.0.7</version>
  </dependency>
  <dependency>
    <groupId>com.typesafe.akka</groupId>
    <artifactId>akka-http-spray-json_2.11</artifactId>
    <version>10.0.7</version>
  </dependency>
  <dependency>
    <groupId>com.typesafe.akka</groupId>
    <artifactId>akka-parsing_2.11</artifactId>
    <version>10.0.7</version>
  </dependency>

  <dependency>
    <groupId>org.locationtech.geotrellis</groupId>
    <artifactId>geotrellis-spark_2.11</artifactId>
    <version>2.1.0</version>
  </dependency>
  <dependency>
    <groupId>org.locationtech.geotrellis</groupId>
    <artifactId>geotrellis-raster_2.11</artifactId>
    <version>2.1.0</version>
  </dependency>
  <dependency>
    <groupId>org.locationtech.geotrellis</groupId>
    <artifactId>geotrellis-vector_2.11</artifactId>
    <version>2.1.0</version>
  </dependency>
  <dependency>
    <groupId>org.locationtech.geotrellis</groupId>
    <artifactId>geotrellis-proj4_2.11</artifactId>
    <version>2.1.0</version>
  </dependency>
  <dependency>
    <groupId>org.locationtech.geotrellis</groupId>
    <artifactId>geotrellis-util_2.11</artifactId>
    <version>2.1.0</version>
  </dependency>
  <dependency>
    <groupId>org.locationtech.geotrellis</groupId>
    <artifactId>geotrellis-macros_2.11</artifactId>
    <version>2.1.0</version>
  </dependency>

  <dependency>
    <groupId>io.spray</groupId>
    <artifactId>spray-json_2.11</artifactId>
    <version>1.3.3</version>
  </dependency>

  <dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-core_2.11</artifactId>
    <version>2.3.1</version>
  </dependency>

  <dependency>
    <groupId>com.fasterxml.jackson.core</groupId>
    <artifactId>jackson-core</artifactId>
    <version>2.6.7</version>
  </dependency>
  <dependency>
    <groupId>com.fasterxml.jackson.core</groupId>
    <artifactId>jackson-databind</artifactId>
    <version>2.6.7</version>
  </dependency>
  <dependency>
    <groupId>com.fasterxml.jackson.core</groupId>
    <artifactId>jackson-annotations</artifactId>
    <version>2.6.7</version>
  </dependency>

</dependencies>

2、影像切片提取代码如下:

import geotrellis.raster._

import geotrellis.raster.io.geotiff._

import geotrellis.raster.render._

import geotrellis.raster.resample._

import geotrellis.raster.reproject._

import geotrellis.proj4._



import geotrellis.spark._

import geotrellis.spark.io._

import geotrellis.spark.io.file._

import geotrellis.spark.io.hadoop._

import geotrellis.spark.io.index._

import geotrellis.spark.pyramid._

import geotrellis.spark.reproject._

import geotrellis.spark.tiling._

import geotrellis.spark.render._



import geotrellis.vector._



import org.apache.spark._

import org.apache.spark.rdd._



import scala.io.StdIn

import java.io.File



object IngestImage {



  //输入输出路径设置,因为使用本地环境测试,所以这里使用的是绝对路径,如果是hdfs获取他路径,需修改设置

  val inputPath = "D:/etasy_data_dir/out_data_dir/2019165/clipped/uniclipped/uniclipped_merged_ET.tif"

  val outputPath = "data/catalog1"

  def main(args: Array[String]): Unit = {

    // 创建spark conf 并配置 Kryo serializer.

    val conf =

      new SparkConf()

        .setMaster("local[*]")

        .setAppName("Spark Tiler")

        .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

        .set("spark.kryo.registrator", "geotrellis.spark.io.kryo.KryoRegistrator")



    val sc = new SparkContext(conf)

    try {

      run(sc)

      // 暂停并等待spark的close

      println("Hit enter to exit.")

      StdIn.readLine()

    } finally {

      sc.stop()

    }

  }



  def fullPath(path: String) = new java.io.File(path).getAbsolutePath



  def run(implicit sc: SparkContext) = {

    //读取geotiff并s生成rdd,注意这里使用是单波段,多波段改为:

    // val inputRdd: RDD[(ProjectedExtent, MultibandTile)] =

    //     sc.hadoopMultibandGeoTiffRDD(inputPath)其他同理

    val inputRdd: RDD[(ProjectedExtent, Tile)] =

      sc.hadoopGeoTiffRDD(inputPath)





    //创建元数据信息,包括层级(zoom),类型的信息

    val (_, rasterMetaData) =

      TileLayerMetadata.fromRDD(inputRdd, FloatingLayoutScheme(512))



    //创建切片RDD,并设置执行任务数

    val tiled: RDD[(SpatialKey, Tile)] =

      inputRdd

        .tileToLayout(rasterMetaData.cellType, rasterMetaData.layout, Bilinear)

        .repartition(100)



    //设置投影方式及瓦片大小,这里默认为墨卡托投影,大小为256

    val layoutScheme = ZoomedLayoutScheme(WebMercator, tileSize = 256)



    //切片投影处理

    val (zoom, reprojected): (Int, RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]]) =

      TileLayerRDD(tiled, rasterMetaData)

        .reproject(WebMercator, layoutScheme, Bilinear)



    //创建输出存储区

    val attributeStore = FileAttributeStore(outputPath)



    // 写入对象

    val writer = FileLayerWriter(attributeStore)

    

    //根据金字塔样式设置缩放级别,并将tiles写入输出存储区

    Pyramid.upLevels(reprojected, layoutScheme, zoom, Bilinear) { (rdd, z) =>

      val layerId = LayerId("landsat", z)

      // If the layer exists already, delete it out before writing

      if(attributeStore.layerExists(layerId)) {

        new FileLayerManager(attributeStore).delete(layerId)

      }

      writer.write(layerId, rdd, ZCurveKeyIndexMethod)

    }

  }

}

 

  • 0
    点赞
  • 7
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值