Skip to content

Commit 44f4072

Browse files
authored
Merge pull request #572 from s22s/feature/slippy
Slippy map writing support.
2 parents 8d901b7 + c7183ce commit 44f4072

File tree

12 files changed

+693
-51
lines changed

12 files changed

+693
-51
lines changed

build.sbt

+1-1
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,7 @@ lazy val docs = project
176176
Compile / paradoxMaterialTheme ~= { _
177177
.withRepository(uri("https://github.com/locationtech/rasterframes"))
178178
.withCustomStylesheet("assets/custom.css")
179-
.withCopyright("""&copy; 2017-2019 <a href="https://astraea.earth">Astraea</a>, Inc. All rights reserved.""")
179+
.withCopyright("""&copy; 2017-2021 <a href="https://astraea.earth">Astraea</a>, Inc. All rights reserved.""")
180180
.withLogo("assets/images/RF-R.svg")
181181
.withFavicon("assets/images/RasterFrames_32x32.ico")
182182
.withColor("blue-grey", "light-blue")

core/src/main/scala/org/locationtech/rasterframes/expressions/aggregates/ProjectedLayerMetadataAggregate.scala

+10-10
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ import org.apache.spark.sql.types.{DataType, StructType}
3434
import org.apache.spark.sql.{Column, Row, TypedColumn}
3535

3636
class ProjectedLayerMetadataAggregate(destCRS: CRS, destDims: Dimensions[Int]) extends UserDefinedAggregateFunction {
37+
3738
import ProjectedLayerMetadataAggregate._
3839

3940
def inputSchema: StructType = InputRecord.inputRecordEncoder.schema
@@ -47,10 +48,10 @@ class ProjectedLayerMetadataAggregate(destCRS: CRS, destDims: Dimensions[Int]) e
4748
def initialize(buffer: MutableAggregationBuffer): Unit = ()
4849

4950
def update(buffer: MutableAggregationBuffer, input: Row): Unit = {
50-
if(!input.isNullAt(0)) {
51+
if (!input.isNullAt(0)) {
5152
val in = input.as[InputRecord]
5253

53-
if(buffer.isNullAt(0)) {
54+
if (buffer.isNullAt(0)) {
5455
in.toBufferRecord(destCRS).write(buffer)
5556
} else {
5657
val br = buffer.as[BufferRecord]
@@ -71,16 +72,15 @@ class ProjectedLayerMetadataAggregate(destCRS: CRS, destDims: Dimensions[Int]) e
7172
case _ => ()
7273
}
7374

74-
def evaluate(buffer: Row): Any = {
75-
val buf = buffer.as[BufferRecord]
76-
if (buf.isEmpty) throw new IllegalArgumentException("Can not collect metadata from empty data frame.")
75+
def evaluate(buffer: Row): Any =
76+
Option(buffer).map(_.as[BufferRecord]).filter(!_.isEmpty).map(buf => {
77+
val re = RasterExtent(buf.extent, buf.cellSize)
78+
val layout = LayoutDefinition(re, destDims.cols, destDims.rows)
7779

78-
val re = RasterExtent(buf.extent, buf.cellSize)
79-
val layout = LayoutDefinition(re, destDims.cols, destDims.rows)
80+
val kb = KeyBounds(layout.mapTransform(buf.extent))
81+
TileLayerMetadata(buf.cellType, layout, buf.extent, destCRS, kb).toRow
8082

81-
val kb = KeyBounds(layout.mapTransform(buf.extent))
82-
TileLayerMetadata(buf.cellType, layout, buf.extent, destCRS, kb).toRow
83-
}
83+
}).getOrElse(throw new IllegalArgumentException("Can not collect metadata from empty data frame."))
8484
}
8585

8686
object ProjectedLayerMetadataAggregate {

datasource/src/main/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister

+1
Original file line numberDiff line numberDiff line change
@@ -5,3 +5,4 @@ org.locationtech.rasterframes.datasource.raster.RasterSourceDataSource
55
org.locationtech.rasterframes.datasource.geojson.GeoJsonDataSource
66
org.locationtech.rasterframes.datasource.stac.api.StacApiDataSource
77
org.locationtech.rasterframes.datasource.tiles.TilesDataSource
8+
org.locationtech.rasterframes.datasource.slippy.SlippyDataSource
+77
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
<!DOCTYPE html>
2+
<!--
3+
~ This software is licensed under the Apache 2 license, quoted below.
4+
~
5+
~ Copyright 2021 Astraea. Inc.
6+
~
7+
~ Licensed under the Apache License, Version 2.0 (the "License"); you may not
8+
~ use this file except in compliance with the License. You may obtain a copy of
9+
~ the License at
10+
~
11+
~ [http://www.apache.org/licenses/LICENSE-2.0]
12+
~
13+
~ Unless required by applicable law or agreed to in writing, software
14+
~ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
15+
~ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
16+
~ License for the specific language governing permissions and limitations under
17+
~ the License.
18+
~
19+
~
20+
-->
21+
22+
<html lang="en">
23+
<head>
24+
<title>RasterFrames Rendering</title>
25+
<meta charset="utf-8" />
26+
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
27+
<meta property="eai:center" content="(${viewLat},${viewLon})"/>
28+
<meta property="eai:maxZoom" content="${maxNativeZoom}"/>
29+
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.3.1/dist/leaflet.css" integrity="sha512-Rksm5RenBEKSKFjgI3a41vrjkw4EVPlJ3+OiI65vTjIdo9brlAacEuKOiQ5OFh7cOI1bkDwLqdLw3Zg0cRJAAQ==" crossorigin=""/>
30+
<script src="https://unpkg.com/leaflet@1.3.1/dist/leaflet.js" integrity="sha512-/Nsx9X4HebavoBvEBuyp3I7od5tA0UzAxs+j83KgC8PU0kgB4XiK4Lfe4y4cgBtaRJQEIFCW+oC506aPT2L1zw==" crossorigin=""></script>
31+
<link rel="stylesheet" href="https://unpkg.com/leaflet-control-geocoder/dist/Control.Geocoder.css" />
32+
<script src="https://unpkg.com/leaflet-control-geocoder/dist/Control.Geocoder.js"></script>
33+
<style>
34+
#mapid {
35+
position: absolute;
36+
top: 10px;
37+
bottom: 10px;
38+
left: 10px;
39+
right: 10px;
40+
}
41+
</style>
42+
</head>
43+
<body>
44+
45+
<div id="mapid"></div>
46+
47+
<script>
48+
49+
var map = L.map('mapid')
50+
.setView([${viewLat}, ${viewLon}], ${maxNativeZoom});
51+
52+
L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {attribution: '&copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors'}).addTo(map);
53+
54+
L.tileLayer(
55+
'{z}/{x}/{y}.png', {
56+
maxZoom: 18,
57+
maxNativeZoom: ${maxNativeZoom}
58+
}
59+
).addTo(map);
60+
61+
L.control.scale().addTo(map);
62+
63+
L.Control.geocoder().addTo(map);
64+
65+
var popup = L.popup();
66+
67+
function showPos(e) {
68+
popup
69+
.setLatLng(e.latlng)
70+
.setContent(e.latlng.toString())
71+
.openOn(map);
72+
}
73+
74+
map.on('click', showPos);
75+
</script>
76+
</body>
77+
</html>

datasource/src/main/scala/org/locationtech/rasterframes/datasource/package.scala

+45
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ package org.locationtech.rasterframes
2424
import cats.syntax.option._
2525
import io.circe.Json
2626
import io.circe.parser
27+
import org.apache.spark.sql.{Column, DataFrame}
2728
import org.apache.spark.sql.util.CaseInsensitiveStringMap
2829
import sttp.model.Uri
2930

@@ -72,4 +73,48 @@ package object datasource {
7273
def jsonParam(key: String, parameters: CaseInsensitiveStringMap): Option[Json] =
7374
if(parameters.containsKey(key)) parser.parse(parameters.get(key)).toOption
7475
else None
76+
77+
78+
/**
79+
* Convenience grouping for transient columns defining spatial context.
80+
*/
81+
private[rasterframes]
82+
case class SpatialComponents(crsColumn: Column,
83+
extentColumn: Column,
84+
dimensionColumn: Column,
85+
cellTypeColumn: Column)
86+
87+
private[rasterframes]
88+
object SpatialComponents {
89+
def apply(tileColumn: Column, crsColumn: Column, extentColumn: Column): SpatialComponents = {
90+
val dim = rf_dimensions(tileColumn) as "dims"
91+
val ct = rf_cell_type(tileColumn) as "cellType"
92+
SpatialComponents(crsColumn, extentColumn, dim, ct)
93+
}
94+
def apply(prColumn : Column): SpatialComponents = {
95+
SpatialComponents(
96+
rf_crs(prColumn) as "crs",
97+
rf_extent(prColumn) as "extent",
98+
rf_dimensions(prColumn) as "dims",
99+
rf_cell_type(prColumn) as "cellType"
100+
)
101+
}
102+
}
103+
104+
/**
105+
* If the given DataFrame has extent and CRS columns return the DataFrame, the CRS column an extent column.
106+
* Otherwise, see if there's a `ProjectedRaster` column add `crs` and `extent` columns extracted from the
107+
* `ProjectedRaster` column to the returned DataFrame.
108+
*
109+
* @param d DataFrame to process.
110+
* @return Tuple containing the updated DataFrame followed by the CRS column and the extent column
111+
*/
112+
private[rasterframes]
113+
def projectSpatialComponents(d: DataFrame): Option[SpatialComponents] =
114+
d.tileColumns.headOption.zip(d.crsColumns.headOption.zip(d.extentColumns.headOption)).headOption
115+
.map { case (tile, (crs, extent)) => SpatialComponents(tile, crs, extent) }
116+
.orElse(
117+
d.projRasterColumns.headOption
118+
.map(pr => SpatialComponents(pr))
119+
)
75120
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,170 @@
1+
/*
2+
* Copyright (c) 2020 Astraea, Inc. All right reserved.
3+
*/
4+
5+
package org.locationtech.rasterframes.datasource.slippy
6+
7+
import geotrellis.layer.{SpatialKey, TileLayerMetadata, ZoomedLayoutScheme}
8+
import geotrellis.proj4.{LatLng, WebMercator}
9+
import geotrellis.raster._
10+
import geotrellis.raster.render.ColorRamp
11+
import geotrellis.raster.resample.Bilinear
12+
import geotrellis.spark._
13+
import geotrellis.spark.pyramid.Pyramid
14+
import geotrellis.spark.store.slippy.HadoopSlippyTileWriter
15+
import geotrellis.vector.reproject.Implicits._
16+
import org.apache.commons.text.StringSubstitutor
17+
import org.apache.hadoop.fs.{FileSystem, Path}
18+
import org.apache.spark.sql.{DataFrame, SparkSession}
19+
import org.locationtech.rasterframes.encoders.StandardEncoders
20+
import org.locationtech.rasterframes.expressions.aggregates.ProjectedLayerMetadataAggregate
21+
import org.locationtech.rasterframes.util.withResource
22+
import org.locationtech.rasterframes.{rf_agg_approx_histogram, _}
23+
import org.locationtech.rasterframes.datasource._
24+
25+
import java.io.PrintStream
26+
import java.net.URI
27+
import java.nio.file.Paths
28+
import scala.io.Source
29+
import RenderingProfiles._
30+
import org.locationtech.rasterframes.datasource.slippy.RenderingModes.{RenderingMode, Uniform}
31+
32+
object DataFrameSlippyExport extends StandardEncoders {
33+
val destCRS = WebMercator
34+
35+
/**
36+
* Export tiles as a slippy map.
37+
* NB: Temporal components are ignored blindly.
38+
*
39+
* @param dest URI for Hadoop supported storage endpoint (e.g. 'file://', 'hdfs://', etc.).
40+
* @param profile Rendering profile
41+
*/
42+
def writeSlippyTiles(df: DataFrame, dest: URI, profile: Profile): SlippyResult = {
43+
44+
val spark = df.sparkSession
45+
implicit val sc = spark.sparkContext
46+
47+
val outputPath: String = dest.toASCIIString
48+
49+
require(
50+
df.tileColumns.length >= profile.expectedBands, // TODO: Do we want to allow this greater than case? Warn the user?
51+
s"Selected rendering mode '${profile}' expected ${profile.expectedBands} bands.")
52+
53+
// select only the tile columns given by user and crs, extent columns which are fallback if first `column` is not a PRT
54+
val SpatialComponents(crs, extent, dims, cellType) = projectSpatialComponents(df)
55+
.getOrElse(
56+
throw new IllegalArgumentException("Provided dataframe did not have an Extent and/or CRS"))
57+
58+
val tlm: TileLayerMetadata[SpatialKey] =
59+
df.select(
60+
ProjectedLayerMetadataAggregate(
61+
destCRS,
62+
extent,
63+
crs,
64+
cellType,
65+
dims
66+
)
67+
)
68+
.first()
69+
70+
val rfLayer = df
71+
.toLayer(tlm)
72+
// TODO: this should be fixed in RasterFrames
73+
.na
74+
.drop()
75+
.persist()
76+
.asInstanceOf[RasterFrameLayer]
77+
78+
val inputRDD: MultibandTileLayerRDD[SpatialKey] =
79+
rfLayer.toMultibandTileLayerRDD match {
80+
case Left(spatial) => spatial
81+
case Right(_) =>
82+
throw new NotImplementedError(
83+
"Dataframes with multiple temporal values are not yet supported.")
84+
}
85+
86+
val tileColumns = rfLayer.tileColumns
87+
88+
val rp = profile match {
89+
case up: UniformColorRampProfile =>
90+
val hist = rfLayer
91+
.select(rf_agg_approx_histogram(tileColumns.head))
92+
.first()
93+
up.toResolvedProfile(hist)
94+
case up: UniformRGBColorProfile =>
95+
require(tileColumns.length >= 3)
96+
val stats = rfLayer
97+
.select(
98+
rf_agg_stats(tileColumns(0)),
99+
rf_agg_stats(tileColumns(1)),
100+
rf_agg_stats(tileColumns(2)))
101+
.first()
102+
up.toResolvedProfile(stats._1, stats._2, stats._3)
103+
case o => o
104+
}
105+
106+
val layoutScheme = ZoomedLayoutScheme(WebMercator, tileSize = 256)
107+
108+
val (zoom, reprojected) = inputRDD.reproject(WebMercator, layoutScheme, Bilinear)
109+
val renderer = (_: SpatialKey, tile: MultibandTile) => rp.render(tile).bytes
110+
val writer = new HadoopSlippyTileWriter[MultibandTile](outputPath, "png")(renderer)
111+
112+
// Pyramiding up the zoom levels, write our tiles out to the local file system.
113+
Pyramid.upLevels(reprojected, layoutScheme, zoom, Bilinear) { (rdd, z) =>
114+
writer.write(z, rdd)
115+
}
116+
117+
rfLayer.unpersist()
118+
119+
val center = reprojected.metadata.extent.center
120+
.reproject(WebMercator, LatLng)
121+
122+
SlippyResult(dest, center.getY, center.getX, zoom)
123+
}
124+
125+
def writeSlippyTiles(df: DataFrame, dest: URI, renderingMode: RenderingMode): SlippyResult = {
126+
127+
val profile = (df.tileColumns.length, renderingMode) match {
128+
case (1, Uniform) => UniformColorRampProfile(greyscale)
129+
case (_, Uniform) => UniformRGBColorProfile()
130+
case (1, _) => ColorRampProfile(greyscale)
131+
case _ => RGBColorProfile()
132+
}
133+
writeSlippyTiles(df, dest, profile)
134+
}
135+
136+
def writeSlippyTiles(df: DataFrame, dest: URI, colorRamp: ColorRamp, renderingMode: RenderingMode): SlippyResult = {
137+
val profile = renderingMode match {
138+
case Uniform UniformColorRampProfile(colorRamp)
139+
case _ ColorRampProfile(colorRamp)
140+
}
141+
writeSlippyTiles(df, dest, profile)
142+
}
143+
144+
case class SlippyResult(dest: URI, centerLat: Double, centerLon: Double, maxZoom: Int) {
145+
// for python interop
146+
def outputUrl(): String = dest.toASCIIString
147+
148+
def writeHtml(spark: SparkSession): Unit = {
149+
import java.util.{HashMap => JMap}
150+
151+
val subst = new StringSubstitutor(new JMap[String, String]() {
152+
put("maxNativeZoom", maxZoom.toString)
153+
put("id", Paths.get(dest.getPath).getFileName.toString)
154+
put("viewLat", centerLat.toString)
155+
put("viewLon", centerLon.toString)
156+
})
157+
158+
val rawLines = Source.fromInputStream(getClass.getResourceAsStream("/slippy.html")).getLines()
159+
160+
val fs = FileSystem.get(dest, spark.sparkContext.hadoopConfiguration)
161+
162+
withResource(fs.create(new Path(new Path(dest), "index.html"), true)) { hout =>
163+
val out = new PrintStream(hout, true, "UTF-8")
164+
for (line <- rawLines) {
165+
out.println(subst.replace(line))
166+
}
167+
}
168+
}
169+
}
170+
}

0 commit comments

Comments
 (0)