|
| 1 | +/* |
| 2 | + * Copyright (c) 2020 Astraea, Inc. All right reserved. |
| 3 | + */ |
| 4 | + |
| 5 | +package org.locationtech.rasterframes.datasource.slippy |
| 6 | + |
| 7 | +import geotrellis.layer.{SpatialKey, TileLayerMetadata, ZoomedLayoutScheme} |
| 8 | +import geotrellis.proj4.{LatLng, WebMercator} |
| 9 | +import geotrellis.raster._ |
| 10 | +import geotrellis.raster.render.ColorRamp |
| 11 | +import geotrellis.raster.resample.Bilinear |
| 12 | +import geotrellis.spark._ |
| 13 | +import geotrellis.spark.pyramid.Pyramid |
| 14 | +import geotrellis.spark.store.slippy.HadoopSlippyTileWriter |
| 15 | +import geotrellis.vector.reproject.Implicits._ |
| 16 | +import org.apache.commons.text.StringSubstitutor |
| 17 | +import org.apache.hadoop.fs.{FileSystem, Path} |
| 18 | +import org.apache.spark.sql.{DataFrame, SparkSession} |
| 19 | +import org.locationtech.rasterframes.encoders.StandardEncoders |
| 20 | +import org.locationtech.rasterframes.expressions.aggregates.ProjectedLayerMetadataAggregate |
| 21 | +import org.locationtech.rasterframes.util.withResource |
| 22 | +import org.locationtech.rasterframes.{rf_agg_approx_histogram, _} |
| 23 | +import org.locationtech.rasterframes.datasource._ |
| 24 | + |
| 25 | +import java.io.PrintStream |
| 26 | +import java.net.URI |
| 27 | +import java.nio.file.Paths |
| 28 | +import scala.io.Source |
| 29 | +import RenderingProfiles._ |
| 30 | +import org.locationtech.rasterframes.datasource.slippy.RenderingModes.{RenderingMode, Uniform} |
| 31 | + |
| 32 | +object DataFrameSlippyExport extends StandardEncoders { |
| 33 | + val destCRS = WebMercator |
| 34 | + |
| 35 | + /** |
| 36 | + * Export tiles as a slippy map. |
| 37 | + * NB: Temporal components are ignored blindly. |
| 38 | + * |
| 39 | + * @param dest URI for Hadoop supported storage endpoint (e.g. 'file://', 'hdfs://', etc.). |
| 40 | + * @param profile Rendering profile |
| 41 | + */ |
| 42 | + def writeSlippyTiles(df: DataFrame, dest: URI, profile: Profile): SlippyResult = { |
| 43 | + |
| 44 | + val spark = df.sparkSession |
| 45 | + implicit val sc = spark.sparkContext |
| 46 | + |
| 47 | + val outputPath: String = dest.toASCIIString |
| 48 | + |
| 49 | + require( |
| 50 | + df.tileColumns.length >= profile.expectedBands, // TODO: Do we want to allow this greater than case? Warn the user? |
| 51 | + s"Selected rendering mode '${profile}' expected ${profile.expectedBands} bands.") |
| 52 | + |
| 53 | + // select only the tile columns given by user and crs, extent columns which are fallback if first `column` is not a PRT |
| 54 | + val SpatialComponents(crs, extent, dims, cellType) = projectSpatialComponents(df) |
| 55 | + .getOrElse( |
| 56 | + throw new IllegalArgumentException("Provided dataframe did not have an Extent and/or CRS")) |
| 57 | + |
| 58 | + val tlm: TileLayerMetadata[SpatialKey] = |
| 59 | + df.select( |
| 60 | + ProjectedLayerMetadataAggregate( |
| 61 | + destCRS, |
| 62 | + extent, |
| 63 | + crs, |
| 64 | + cellType, |
| 65 | + dims |
| 66 | + ) |
| 67 | + ) |
| 68 | + .first() |
| 69 | + |
| 70 | + val rfLayer = df |
| 71 | + .toLayer(tlm) |
| 72 | + // TODO: this should be fixed in RasterFrames |
| 73 | + .na |
| 74 | + .drop() |
| 75 | + .persist() |
| 76 | + .asInstanceOf[RasterFrameLayer] |
| 77 | + |
| 78 | + val inputRDD: MultibandTileLayerRDD[SpatialKey] = |
| 79 | + rfLayer.toMultibandTileLayerRDD match { |
| 80 | + case Left(spatial) => spatial |
| 81 | + case Right(_) => |
| 82 | + throw new NotImplementedError( |
| 83 | + "Dataframes with multiple temporal values are not yet supported.") |
| 84 | + } |
| 85 | + |
| 86 | + val tileColumns = rfLayer.tileColumns |
| 87 | + |
| 88 | + val rp = profile match { |
| 89 | + case up: UniformColorRampProfile => |
| 90 | + val hist = rfLayer |
| 91 | + .select(rf_agg_approx_histogram(tileColumns.head)) |
| 92 | + .first() |
| 93 | + up.toResolvedProfile(hist) |
| 94 | + case up: UniformRGBColorProfile => |
| 95 | + require(tileColumns.length >= 3) |
| 96 | + val stats = rfLayer |
| 97 | + .select( |
| 98 | + rf_agg_stats(tileColumns(0)), |
| 99 | + rf_agg_stats(tileColumns(1)), |
| 100 | + rf_agg_stats(tileColumns(2))) |
| 101 | + .first() |
| 102 | + up.toResolvedProfile(stats._1, stats._2, stats._3) |
| 103 | + case o => o |
| 104 | + } |
| 105 | + |
| 106 | + val layoutScheme = ZoomedLayoutScheme(WebMercator, tileSize = 256) |
| 107 | + |
| 108 | + val (zoom, reprojected) = inputRDD.reproject(WebMercator, layoutScheme, Bilinear) |
| 109 | + val renderer = (_: SpatialKey, tile: MultibandTile) => rp.render(tile).bytes |
| 110 | + val writer = new HadoopSlippyTileWriter[MultibandTile](outputPath, "png")(renderer) |
| 111 | + |
| 112 | + // Pyramiding up the zoom levels, write our tiles out to the local file system. |
| 113 | + Pyramid.upLevels(reprojected, layoutScheme, zoom, Bilinear) { (rdd, z) => |
| 114 | + writer.write(z, rdd) |
| 115 | + } |
| 116 | + |
| 117 | + rfLayer.unpersist() |
| 118 | + |
| 119 | + val center = reprojected.metadata.extent.center |
| 120 | + .reproject(WebMercator, LatLng) |
| 121 | + |
| 122 | + SlippyResult(dest, center.getY, center.getX, zoom) |
| 123 | + } |
| 124 | + |
| 125 | + def writeSlippyTiles(df: DataFrame, dest: URI, renderingMode: RenderingMode): SlippyResult = { |
| 126 | + |
| 127 | + val profile = (df.tileColumns.length, renderingMode) match { |
| 128 | + case (1, Uniform) => UniformColorRampProfile(greyscale) |
| 129 | + case (_, Uniform) => UniformRGBColorProfile() |
| 130 | + case (1, _) => ColorRampProfile(greyscale) |
| 131 | + case _ => RGBColorProfile() |
| 132 | + } |
| 133 | + writeSlippyTiles(df, dest, profile) |
| 134 | + } |
| 135 | + |
| 136 | + def writeSlippyTiles(df: DataFrame, dest: URI, colorRamp: ColorRamp, renderingMode: RenderingMode): SlippyResult = { |
| 137 | + val profile = renderingMode match { |
| 138 | + case Uniform ⇒ UniformColorRampProfile(colorRamp) |
| 139 | + case _ ⇒ ColorRampProfile(colorRamp) |
| 140 | + } |
| 141 | + writeSlippyTiles(df, dest, profile) |
| 142 | + } |
| 143 | + |
| 144 | + case class SlippyResult(dest: URI, centerLat: Double, centerLon: Double, maxZoom: Int) { |
| 145 | + // for python interop |
| 146 | + def outputUrl(): String = dest.toASCIIString |
| 147 | + |
| 148 | + def writeHtml(spark: SparkSession): Unit = { |
| 149 | + import java.util.{HashMap => JMap} |
| 150 | + |
| 151 | + val subst = new StringSubstitutor(new JMap[String, String]() { |
| 152 | + put("maxNativeZoom", maxZoom.toString) |
| 153 | + put("id", Paths.get(dest.getPath).getFileName.toString) |
| 154 | + put("viewLat", centerLat.toString) |
| 155 | + put("viewLon", centerLon.toString) |
| 156 | + }) |
| 157 | + |
| 158 | + val rawLines = Source.fromInputStream(getClass.getResourceAsStream("/slippy.html")).getLines() |
| 159 | + |
| 160 | + val fs = FileSystem.get(dest, spark.sparkContext.hadoopConfiguration) |
| 161 | + |
| 162 | + withResource(fs.create(new Path(new Path(dest), "index.html"), true)) { hout => |
| 163 | + val out = new PrintStream(hout, true, "UTF-8") |
| 164 | + for (line <- rawLines) { |
| 165 | + out.println(subst.replace(line)) |
| 166 | + } |
| 167 | + } |
| 168 | + } |
| 169 | + } |
| 170 | +} |
0 commit comments