Skip to content

Commit

Permalink
Merge branch 'release/0.7.1'
Browse files Browse the repository at this point in the history
  • Loading branch information
metasim committed Jul 31, 2018
2 parents c2a71b0 + f49e039 commit 405c35e
Show file tree
Hide file tree
Showing 17 changed files with 117 additions and 94 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ Please see the [Getting Started](http://rasterframes.io/getting-started.html) se

## Documentation

* [Giter8 Template](https://github.com/s22s/raster-frames.g8) (i.e. `sbt new s22s/raster-frames.g8`)
* [Users' Manual](http://rasterframes.io/)
* [API Documentation](http://rasterframes.io/latest/api/index.html)
* [List of available UDFs](http://rasterframes.io/latest/api/index.html#astraea.spark.rasterframes.RasterFunctions)
* [RasterFrames Jupyter Notebook Docker Image](https://hub.docker.com/r/s22s/rasterframes-notebooks/)

## Copyright and License

Expand Down
6 changes: 4 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,10 @@ lazy val datasource = project
.disablePlugins(SparkPackagePlugin)

lazy val experimental = project
.dependsOn(core % "test->test;compile->compile")
.dependsOn(datasource % "test->test;compile->compile")
.configs(IntegrationTest)
.settings(Defaults.itSettings)
.dependsOn(core % "test->test;it->test;compile->compile")
.dependsOn(datasource % "test->test;it->test;compile->compile")
.disablePlugins(SparkPackagePlugin)

lazy val docs = project
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,9 @@ object MultibandRender {

/** Base type for Rendering profiles. */
trait Profile {
/** Expected number of bands. */
def expectedBands: Int = 3

/** Value from -255 to 255 */
def brightness: Int = 0
/** Value from -255 to 255 */
Expand Down Expand Up @@ -102,6 +105,8 @@ object MultibandRender {
compressRange _ andThen colorAdjust

def render(tile: MultibandTile) = {
require(expectedBands <= tile.bandCount, s"Need at least $expectedBands bands (${tile.bandCount} provided).")

val r = applyAdjustment(red(tile))
val g = applyAdjustment(green(tile))
val b = applyAdjustment(blue(tile))
Expand All @@ -128,8 +133,11 @@ object MultibandRender {
}

case class ColorRampProfile(ramp: ColorRamp) extends Profile {
override def expectedBands: Int = 1
// Are there other ways to use the other bands?
override def render(tile: MultibandTile): Png =
colorAdjust(tile.band(0)).renderPng(ramp)
override def render(tile: MultibandTile): Png = {
require(tile.bandCount >= 1, s"Need at least 1 band")
applyAdjustment(tile.band(0)).renderPng(ramp)
}
}
}
5 changes: 5 additions & 0 deletions docs/src/main/tut/release-notes.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

## 0.7.x

### 0.7.1

* Fixed ColorRamp pipeline in MultibandRender
* Fixed Python wrapper for `explodeTiles`

### 0.7.0

* Now an incubating project under Eclipse Foundation LocationTech! GitHub repo moved to [locationtech/rasterframes](https://github.com/locationtech/rasterframes).
Expand Down
6 changes: 5 additions & 1 deletion experimental/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,8 @@ libraryDependencies ++= Seq(
spark("core").value % Provided,
spark("mllib").value % Provided,
spark("sql").value % Provided
)
)

fork in IntegrationTest := true
javaOptions in IntegrationTest := Seq("-Xmx2G")
parallelExecution in IntegrationTest := false
Original file line number Diff line number Diff line change
Expand Up @@ -68,35 +68,9 @@ object MODISCatalogDataSource extends LazyLogging with ResourceCacheSupport {
val MCD43A4_BASE = "https://modis-pds.s3.amazonaws.com/MCD43A4.006/"
override def maxCacheFileAgeHours: Int = Int.MaxValue

// As of 5/6/2018, these days are missing from AWS.
private val blacklist = Seq(
"2018-03-07",
"2018-03-08",
"2018-03-09",
"2018-03-10",
"2018-03-11",
"2018-03-12",
"2018-03-13",
"2018-03-14",
"2018-03-15",
"2018-02-27",
"2018-02-28",
"2018-03-01",
"2018-03-02",
"2018-03-03",
"2018-03-04",
"2018-04-27",
"2018-03-05",
"2018-04-28",
"2018-03-06",
"2018-04-29",
"2018-04-30",
"2018-05-01",
"2018-05-02",
"2018-05-03",
"2018-05-04",
"2018-05-05",
"2018-05-06"
// List of missing days
private val blacklist = Seq[String](
//"2018-05-06"
)

private def sceneFiles(start: LocalDate, end: LocalDate, useBlacklist: Boolean) = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ case class MODISCatalogRelation(sqlContext: SQLContext, sceneList: HadoopPath)
def buildScan(): RDD[Row] = {
import sqlContext.implicits._

logger.info("Scene file is: " + sceneList)
logger.debug("Scene file is: " + sceneList)
val catalog = sqlContext.read
.option("header", "true")
.option("mode", "DROPMALFORMED") // <--- mainly for the fact that we have internal headers from the concat
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import java.io.PrintStream
import java.net.URI

import astraea.spark.rasterframes._
import astraea.spark.rasterframes.util.MultibandRender.ColorRampProfile
import astraea.spark.rasterframes.util._
import geotrellis.proj4.{LatLng, WebMercator}
import geotrellis.raster._
Expand Down Expand Up @@ -116,13 +115,13 @@ trait SlippyExport extends MethodExtensions[RasterFrame]{

val (zoom, reprojected) = inputRDD.reproject(WebMercator, layoutScheme, Bilinear)
val writer = new HadoopSlippyTileWriter[MultibandTile](dest.toASCIIString + "/" + tileDirName, "png")({ (_, tile) =>
require(tile.bandCount >= 3 || renderer.isInstanceOf[ColorRampProfile],
"Single-band and dual-band RasterFrames require a ColorRampProfile for rendering")
//require(tile.bandCount >= 3 || renderer.isInstanceOf[ColorRampProfile],
// "Single-band and dual-band RasterFrames require a ColorRampProfile for rendering")
val png = renderer.render(tile)
png.bytes
})

val center = reprojected.metadata.extent.center.reproject(WebMercator, LatLng)
val center = reprojected.metadata.extent.center

SlippyExport.writeHtml(dest, sc.hadoopConfiguration, Map(
"maxZoom" -> zoom.toString,
Expand Down
1 change: 1 addition & 0 deletions pyrasterframes/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,7 @@ ivyPaths in pysparkCmd := ivyPaths.value.withIvyHome(target.value / "ivy")
pyTest := {
val _ = assembly.value
val s = streams.value
s.log.info("Running python tests...")
val wd = pythonSource.value
Process("python setup.py test", wd) ! s.log
}
Expand Down
5 changes: 2 additions & 3 deletions pyrasterframes/python/geomesa_pyspark/spark.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,11 @@
from pyspark.sql.types import UserDefinedType
from pyspark.sql import Row
from pyspark.sql.types import *
from pyrasterframes.context import _checked_context
from pyrasterframes.context import RFContext


__all__ = ['GeometryUDT']


class GeometryUDT(UserDefinedType):
"""User-defined type (UDT).
Expand All @@ -41,4 +40,4 @@ def serialize(self, obj):
return Row(obj.toBytes)

def deserialize(self, datum):
return _checked_context().generateGeometry(datum[0])
return RFContext._jvm_mirror().generateGeometry(datum[0])
5 changes: 1 addition & 4 deletions pyrasterframes/python/pyrasterframes/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,15 @@
appended to PySpark classes.
"""


from __future__ import absolute_import
from pyspark.sql.types import UserDefinedType
from pyspark import SparkContext
from pyspark.sql import SparkSession, DataFrame, DataFrameReader
from pyspark.sql.types import *
from pyspark.sql.column import _to_java_column

# Import RasterFrame types and functions
from .types import *
from . import rasterfunctions

from .context import RFContext

__all__ = ['RasterFrame', 'TileExploder']

Expand Down
46 changes: 39 additions & 7 deletions pyrasterframes/python/pyrasterframes/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,42 @@

from pyspark import SparkContext

def _checked_context():
""" Get the active SparkContext and throw an error if it is not enabled for RasterFrames."""
sc = SparkContext._active_spark_context
if not hasattr(sc, '_rf_context'):
raise AttributeError(
"RasterFrames have not been enabled for the active session. Call 'SparkSession.withRasterFrames()'.")
return sc._rf_context._jrfctx
__all__ = ['RFContext']


class RFContext(object):
"""
Entrypoint to RasterFrames services
"""
def __init__(self, spark_session):
self._spark_session = spark_session
self._gateway = spark_session.sparkContext._gateway
self._jvm = self._gateway.jvm
jsess = self._spark_session._jsparkSession
self._jrfctx = self._jvm.astraea.spark.rasterframes.py.PyRFContext(jsess)

def list_to_seq(self, py_list):
conv = self.lookup('listToSeq')
return conv(py_list)

def lookup(self, function_name):
return getattr(self._jrfctx, function_name)

@staticmethod
def active():
"""
Get the active Pythono RFContext and throw an error if it is not enabled for RasterFrames.
"""
sc = SparkContext._active_spark_context
if not hasattr(sc, '_rf_context'):
raise AttributeError(
"RasterFrames have not been enabled for the active session. Call 'SparkSession.withRasterFrames()'.")
return sc._rf_context

@staticmethod
def _jvm_mirror():
"""
Get the active Scala PyRFContext and throw an error if it is not enabled for RasterFrames.
"""
return RFContext.active()._jrfctx

40 changes: 25 additions & 15 deletions pyrasterframes/python/pyrasterframes/rasterfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,14 @@
from __future__ import absolute_import
from pyspark.sql.types import *
from pyspark.sql.column import Column, _to_java_column
from .context import _checked_context
from .context import RFContext


THIS_MODULE = 'pyrasterframes'


def _context_call(name, *args):
f = getattr(_checked_context(), name)
f = RFContext.active().lookup(name)
return f(*args)


Expand All @@ -27,8 +27,7 @@ def _celltype(cellTypeStr):
def _create_assembleTile():
""" Create a function mapping to the Scala implementation."""
def _(colIndex, rowIndex, cellData, numCols, numRows, cellType):
ctx = _checked_context()
jfcn = getattr(ctx, 'assembleTile')
jfcn = RFContext.active().lookup('assembleTile')
return Column(jfcn(_to_java_column(colIndex), _to_java_column(rowIndex), _to_java_column(cellData), numCols, numRows, _celltype(cellType)))
_.__name__ = 'assembleTile'
_.__doc__ = "Create a Tile from a column of cell data with location indices"
Expand All @@ -39,7 +38,7 @@ def _(colIndex, rowIndex, cellData, numCols, numRows, cellType):
def _create_arrayToTile():
""" Create a function mapping to the Scala implementation."""
def _(arrayCol, numCols, numRows):
jfcn = getattr(_checked_context(), 'arrayToTile')
jfcn = RFContext.active().lookup('arrayToTile')
return Column(jfcn(_to_java_column(arrayCol), numCols, numRows))
_.__name__ = 'arrayToTile'
_.__doc__ = "Convert array in `arrayCol` into a Tile of dimensions `numCols` and `numRows'"
Expand All @@ -50,7 +49,7 @@ def _(arrayCol, numCols, numRows):
def _create_convertCellType():
""" Create a function mapping to the Scala implementation."""
def _(tileCol, cellType):
jfcn = getattr(_checked_context(), 'convertCellType')
jfcn = RFContext.active().lookup('convertCellType')
return Column(jfcn(_to_java_column(tileCol), _celltype(cellType)))
_.__name__ = 'convertCellType'
_.__doc__ = "Convert the numeric type of the Tiles in `tileCol`"
Expand All @@ -61,7 +60,7 @@ def _(tileCol, cellType):
def _create_makeConstantTile():
""" Create a function mapping to the Scala implementation."""
def _(value, cols, rows, cellType):
jfcn = getattr(_checked_context(), 'makeConstantTile')
jfcn = RFContext.active().lookup('makeConstantTile')
return Column(jfcn(value, cols, rows, cellType))
_.__name__ = 'makeConstantTile'
_.__doc__ = "Constructor for constant tile column"
Expand All @@ -72,7 +71,7 @@ def _(value, cols, rows, cellType):
def _create_tileZeros():
""" Create a function mapping to the Scala implementation."""
def _(cols, rows, cellType = 'float64'):
jfcn = getattr(_checked_context(), 'tileZeros')
jfcn = RFContext.active().lookup('tileZeros')
return Column(jfcn(cols, rows, cellType))
_.__name__ = 'tileZeros'
_.__doc__ = "Create column of constant tiles of zero"
Expand All @@ -83,7 +82,7 @@ def _(cols, rows, cellType = 'float64'):
def _create_tileOnes():
""" Create a function mapping to the Scala implementation."""
def _(cols, rows, cellType = 'float64'):
jfcn = getattr(_checked_context(), 'tileOnes')
jfcn = RFContext.active().lookup('tileOnes')
return Column(jfcn(cols, rows, cellType))
_.__name__ = 'tileOnes'
_.__doc__ = "Create column of constant tiles of one"
Expand All @@ -94,7 +93,7 @@ def _(cols, rows, cellType = 'float64'):
def _create_rasterize():
""" Create a function mapping to the Scala rasterize function. """
def _(geometryCol, boundsCol, valueCol, numCols, numRows):
jfcn = getattr(_checked_context(), 'rasterize')
jfcn = RFContext.active().lookup('rasterize')
return Column(jfcn(_to_java_column(geometryCol), _to_java_column(boundsCol), _to_java_column(valueCol), numCols, numRows))
_.__name__ = 'rasterize'
_.__doc__ = 'Create a tile where cells in the grid defined by cols, rows, and bounds are filled with the given value.'
Expand All @@ -104,7 +103,7 @@ def _(geometryCol, boundsCol, valueCol, numCols, numRows):
def _create_reproject_geometry():
""" Create a function mapping to the Scala reprojectGeometry function. """
def _(geometryCol, srcCRSName, dstCRSName):
jfcn = getattr(_checked_context(), 'reprojectGeometry')
jfcn = RFContext.active().lookup('reprojectGeometry')
return Column(jfcn(_to_java_column(geometryCol), srcCRSName, dstCRSName))
_.__name__ = 'reprojectGeometry'
_.__doc__ = """Reproject a column of geometry given the CRS names of the source and destination.
Expand All @@ -114,6 +113,17 @@ def _(geometryCol, srcCRSName, dstCRSName):
_.__module__ = THIS_MODULE
return _

def _create_explode_tiles():
""" Create a function mapping to Scala explodeTiles function """
def _(*args):
jfcn = RFContext.active().lookup('explodeTiles')
jcols = [_to_java_column(arg) for arg in args]
return Column(jfcn(RFContext.active().list_to_seq(jcols)))
_.__name__ = 'explodeTiles'
_.__doc__ = 'Create a row for each cell in Tile.'
_.__module__ = THIS_MODULE
return _

_rf_unique_functions = {
'assembleTile': _create_assembleTile(),
'arrayToTile': _create_arrayToTile(),
Expand All @@ -123,7 +133,8 @@ def _(geometryCol, srcCRSName, dstCRSName):
'tileOnes': _create_tileOnes(),
'cellTypes': lambda: _context_call('cellTypes'),
'rasterize': _create_rasterize(),
'reprojectGeometry': _create_reproject_geometry()
'reprojectGeometry': _create_reproject_geometry(),
'explodeTiles': _create_explode_tiles()
}


Expand Down Expand Up @@ -154,7 +165,6 @@ def _(geometryCol, srcCRSName, dstCRSName):

_rf_column_functions = {
# ------- RasterFrames functions -------
'explodeTiles': 'Create a row for each cell in Tile.',
'tileDimensions': 'Query the number of (cols, rows) in a Tile.',
'envelope': 'Extracts the bounding box (envelope) of the geometry.',
'tileToIntArray': 'Flattens Tile into an array of integers.',
Expand Down Expand Up @@ -280,7 +290,7 @@ def _(geometryCol, srcCRSName, dstCRSName):
def _create_column_function(name, doc=""):
""" Create a mapping to Scala UDF for a column function by name"""
def _(*args):
jfcn = getattr(_checked_context(), name)
jfcn = RFContext.active().lookup(name)
jcols = [_to_java_column(arg) for arg in args]
return Column(jfcn(*jcols))
_.__name__ = name
Expand All @@ -292,7 +302,7 @@ def _(*args):
def _create_columnScalarFunction(name, doc=""):
""" Create a mapping to Scala UDF for a (column, scalar) -> column function by name"""
def _(col, scalar):
jfcn = getattr(_checked_context(), name)
jfcn = RFContext.active().lookup(name)
return Column(jfcn(_to_java_column(col), scalar))
_.__name__ = name
_.__doc__ = doc
Expand Down
Loading

0 comments on commit 405c35e

Please sign in to comment.