diff --git a/core/src/main/scala/org/locationtech/rasterframes/expressions/BinaryLocalRasterOp.scala b/core/src/main/scala/org/locationtech/rasterframes/expressions/BinaryLocalRasterOp.scala index bd55345fa..9994fdef1 100644 --- a/core/src/main/scala/org/locationtech/rasterframes/expressions/BinaryLocalRasterOp.scala +++ b/core/src/main/scala/org/locationtech/rasterframes/expressions/BinaryLocalRasterOp.scala @@ -21,18 +21,22 @@ package org.locationtech.rasterframes.expressions -import org.locationtech.rasterframes.encoders.CatalystSerializer._ -import org.locationtech.rasterframes.expressions.DynamicExtractors._ -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import geotrellis.raster.Tile import org.apache.spark.sql.catalyst.analysis.TypeCheckResult import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess} import org.apache.spark.sql.catalyst.expressions.BinaryExpression import org.apache.spark.sql.rf.TileUDT import org.apache.spark.sql.types.DataType +import org.locationtech.rasterframes.encoders.CatalystSerializer._ +import org.locationtech.rasterframes.expressions.DynamicExtractors._ +import org.slf4j.LoggerFactory /** Operation combining two tiles or a tile and a scalar into a new tile. */ -trait BinaryLocalRasterOp extends BinaryExpression with LazyLogging { +trait BinaryLocalRasterOp extends BinaryExpression { + + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) + override def dataType: DataType = left.dataType diff --git a/core/src/main/scala/org/locationtech/rasterframes/expressions/BinaryRasterOp.scala b/core/src/main/scala/org/locationtech/rasterframes/expressions/BinaryRasterOp.scala index 690658064..2c33eae12 100644 --- a/core/src/main/scala/org/locationtech/rasterframes/expressions/BinaryRasterOp.scala +++ b/core/src/main/scala/org/locationtech/rasterframes/expressions/BinaryRasterOp.scala @@ -21,18 +21,20 @@ package org.locationtech.rasterframes.expressions -import org.locationtech.rasterframes.expressions.DynamicExtractors.tileExtractor -import org.locationtech.rasterframes.encoders.CatalystSerializer._ -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import geotrellis.raster.Tile import org.apache.spark.sql.catalyst.analysis.TypeCheckResult import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess} import org.apache.spark.sql.catalyst.expressions.BinaryExpression import org.apache.spark.sql.rf.TileUDT import org.apache.spark.sql.types.DataType +import org.locationtech.rasterframes.encoders.CatalystSerializer._ +import org.locationtech.rasterframes.expressions.DynamicExtractors.tileExtractor +import org.slf4j.LoggerFactory /** Operation combining two tiles into a new tile. */ -trait BinaryRasterOp extends BinaryExpression with LazyLogging { +trait BinaryRasterOp extends BinaryExpression { + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) override def dataType: DataType = left.dataType diff --git a/core/src/main/scala/org/locationtech/rasterframes/expressions/UnaryLocalRasterOp.scala b/core/src/main/scala/org/locationtech/rasterframes/expressions/UnaryLocalRasterOp.scala index 46969c226..a410f47f8 100644 --- a/core/src/main/scala/org/locationtech/rasterframes/expressions/UnaryLocalRasterOp.scala +++ b/core/src/main/scala/org/locationtech/rasterframes/expressions/UnaryLocalRasterOp.scala @@ -21,18 +21,20 @@ package org.locationtech.rasterframes.expressions -import org.locationtech.rasterframes.encoders.CatalystSerializer._ -import org.locationtech.rasterframes.expressions.DynamicExtractors._ -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import geotrellis.raster.Tile import org.apache.spark.sql.catalyst.analysis.TypeCheckResult import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess} import org.apache.spark.sql.catalyst.expressions.UnaryExpression import org.apache.spark.sql.rf.TileUDT import org.apache.spark.sql.types.DataType +import org.locationtech.rasterframes.encoders.CatalystSerializer._ +import org.locationtech.rasterframes.expressions.DynamicExtractors._ +import org.slf4j.LoggerFactory /** Operation on a tile returning a tile. */ -trait UnaryLocalRasterOp extends UnaryExpression with LazyLogging { +trait UnaryLocalRasterOp extends UnaryExpression { + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) override def dataType: DataType = child.dataType diff --git a/core/src/main/scala/org/locationtech/rasterframes/expressions/generators/RasterSourceToTiles.scala b/core/src/main/scala/org/locationtech/rasterframes/expressions/generators/RasterSourceToTiles.scala index 32b3f4b11..595bac20d 100644 --- a/core/src/main/scala/org/locationtech/rasterframes/expressions/generators/RasterSourceToTiles.scala +++ b/core/src/main/scala/org/locationtech/rasterframes/expressions/generators/RasterSourceToTiles.scala @@ -21,19 +21,20 @@ package org.locationtech.rasterframes.expressions.generators -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback import org.apache.spark.sql.types.{DataType, StructField, StructType} import org.apache.spark.sql.{Column, TypedColumn} import org.locationtech.rasterframes +import org.locationtech.rasterframes.RasterSourceType import org.locationtech.rasterframes.encoders.CatalystSerializer._ import org.locationtech.rasterframes.expressions.generators.RasterSourceToRasterRefs.bandNames import org.locationtech.rasterframes.model.TileDimensions import org.locationtech.rasterframes.tiles.ProjectedRasterTile import org.locationtech.rasterframes.util._ -import org.locationtech.rasterframes.RasterSourceType +import org.slf4j.LoggerFactory import scala.util.Try import scala.util.control.NonFatal @@ -45,7 +46,9 @@ import scala.util.control.NonFatal * @since 9/6/18 */ case class RasterSourceToTiles(children: Seq[Expression], bandIndexes: Seq[Int], subtileDims: Option[TileDimensions] = None) extends Expression - with Generator with CodegenFallback with ExpectsInputTypes with LazyLogging { + with Generator with CodegenFallback with ExpectsInputTypes { + + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) override def inputTypes: Seq[DataType] = Seq.fill(children.size)(RasterSourceType) override def nodeName: String = "rf_raster_source_to_tiles" diff --git a/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/Mask.scala b/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/Mask.scala index 106a52a7b..69dac94c7 100644 --- a/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/Mask.scala +++ b/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/Mask.scala @@ -21,10 +21,7 @@ package org.locationtech.rasterframes.expressions.transformers -import org.locationtech.rasterframes.encoders.CatalystSerializer._ -import org.locationtech.rasterframes.expressions.DynamicExtractors._ -import org.locationtech.rasterframes.expressions.row -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import geotrellis.raster import geotrellis.raster.Tile import geotrellis.raster.mapalgebra.local.{Defined, InverseMask => gtInverseMask, Mask => gtMask} @@ -35,9 +32,16 @@ import org.apache.spark.sql.catalyst.expressions.{Expression, ExpressionDescript import org.apache.spark.sql.rf.TileUDT import org.apache.spark.sql.types.DataType import org.apache.spark.sql.{Column, TypedColumn} +import org.locationtech.rasterframes.encoders.CatalystSerializer._ +import org.locationtech.rasterframes.expressions.DynamicExtractors._ +import org.locationtech.rasterframes.expressions.row +import org.slf4j.LoggerFactory abstract class Mask(val left: Expression, val middle: Expression, val right: Expression, inverse: Boolean) - extends TernaryExpression with CodegenFallback with Serializable with LazyLogging { + extends TernaryExpression with CodegenFallback with Serializable { + + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) + override def children: Seq[Expression] = Seq(left, middle, right) diff --git a/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/RasterRefToTile.scala b/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/RasterRefToTile.scala index f0c82c6de..3c699099a 100644 --- a/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/RasterRefToTile.scala +++ b/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/RasterRefToTile.scala @@ -21,16 +21,17 @@ package org.locationtech.rasterframes.expressions.transformers -import org.locationtech.rasterframes.encoders.CatalystSerializer._ -import org.locationtech.rasterframes.expressions.row -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, Expression, UnaryExpression} import org.apache.spark.sql.rf._ import org.apache.spark.sql.types.DataType import org.apache.spark.sql.{Column, TypedColumn} +import org.locationtech.rasterframes.encoders.CatalystSerializer._ +import org.locationtech.rasterframes.expressions.row import org.locationtech.rasterframes.ref.RasterRef import org.locationtech.rasterframes.tiles.ProjectedRasterTile +import org.slf4j.LoggerFactory /** * Realizes a RasterRef into a Tile. @@ -38,7 +39,9 @@ import org.locationtech.rasterframes.tiles.ProjectedRasterTile * @since 11/2/18 */ case class RasterRefToTile(child: Expression) extends UnaryExpression - with CodegenFallback with ExpectsInputTypes with LazyLogging { + with CodegenFallback with ExpectsInputTypes { + + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) override def nodeName: String = "raster_ref_to_tile" diff --git a/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/SetNoDataValue.scala b/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/SetNoDataValue.scala index 39e9c133e..eddca3508 100644 --- a/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/SetNoDataValue.scala +++ b/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/SetNoDataValue.scala @@ -21,7 +21,7 @@ package org.locationtech.rasterframes.expressions.transformers -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import org.apache.spark.sql.Column import org.apache.spark.sql.catalyst.analysis.TypeCheckResult import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess} @@ -33,6 +33,7 @@ import org.apache.spark.sql.types._ import org.locationtech.rasterframes.encoders.CatalystSerializer._ import org.locationtech.rasterframes.expressions.DynamicExtractors._ import org.locationtech.rasterframes.expressions.row +import org.slf4j.LoggerFactory @ExpressionDescription( usage = "_FUNC_(tile, value) - Set the NoData value for the given tile.", @@ -45,7 +46,8 @@ import org.locationtech.rasterframes.expressions.row > SELECT _FUNC_(tile, 1.5); ...""" ) -case class SetNoDataValue(left: Expression, right: Expression) extends BinaryExpression with CodegenFallback with LazyLogging { +case class SetNoDataValue(left: Expression, right: Expression) extends BinaryExpression with CodegenFallback { + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) override val nodeName: String = "rf_with_no_data" override def dataType: DataType = left.dataType diff --git a/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/URIToRasterSource.scala b/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/URIToRasterSource.scala index 903e62dde..96af62149 100644 --- a/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/URIToRasterSource.scala +++ b/core/src/main/scala/org/locationtech/rasterframes/expressions/transformers/URIToRasterSource.scala @@ -23,14 +23,15 @@ package org.locationtech.rasterframes.expressions.transformers import java.net.URI -import org.locationtech.rasterframes.RasterSourceType -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, Expression, UnaryExpression} import org.apache.spark.sql.types.{DataType, StringType} import org.apache.spark.sql.{Column, TypedColumn} import org.apache.spark.unsafe.types.UTF8String +import org.locationtech.rasterframes.RasterSourceType import org.locationtech.rasterframes.ref.RasterSource +import org.slf4j.LoggerFactory /** * Catalyst generator to convert a geotiff download URL into a series of rows @@ -39,7 +40,9 @@ import org.locationtech.rasterframes.ref.RasterSource * @since 5/4/18 */ case class URIToRasterSource(override val child: Expression) - extends UnaryExpression with ExpectsInputTypes with CodegenFallback with LazyLogging { + extends UnaryExpression with ExpectsInputTypes with CodegenFallback { + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) + override def nodeName: String = "rf_uri_to_raster_source" diff --git a/core/src/main/scala/org/locationtech/rasterframes/extensions/RasterFrameLayerMethods.scala b/core/src/main/scala/org/locationtech/rasterframes/extensions/RasterFrameLayerMethods.scala index 28f2839ed..e9d375f12 100644 --- a/core/src/main/scala/org/locationtech/rasterframes/extensions/RasterFrameLayerMethods.scala +++ b/core/src/main/scala/org/locationtech/rasterframes/extensions/RasterFrameLayerMethods.scala @@ -23,8 +23,7 @@ package org.locationtech.rasterframes.extensions import java.time.ZonedDateTime -import org.locationtech.rasterframes.util._ -import org.locationtech.rasterframes.RasterFrameLayer +import com.typesafe.scalalogging.Logger import geotrellis.proj4.CRS import geotrellis.raster.resample.{NearestNeighbor, ResampleMethod} import geotrellis.raster.{MultibandTile, ProjectedRaster, Tile, TileLayout} @@ -37,12 +36,13 @@ import org.apache.spark.annotation.Experimental import org.apache.spark.sql._ import org.apache.spark.sql.functions._ import org.apache.spark.sql.types.{Metadata, TimestampType} -import spray.json._ -import org.locationtech.rasterframes.encoders.StandardEncoders._ +import org.locationtech.rasterframes.{MetadataKeys, RasterFrameLayer} import org.locationtech.rasterframes.encoders.StandardEncoders.PrimitiveEncoders._ -import com.typesafe.scalalogging.LazyLogging -import org.locationtech.rasterframes.MetadataKeys +import org.locationtech.rasterframes.encoders.StandardEncoders._ import org.locationtech.rasterframes.tiles.ShowableTile +import org.locationtech.rasterframes.util._ +import org.slf4j.LoggerFactory +import spray.json._ import scala.reflect.runtime.universe._ @@ -52,9 +52,11 @@ import scala.reflect.runtime.universe._ * @since 7/18/17 */ trait RasterFrameLayerMethods extends MethodExtensions[RasterFrameLayer] - with RFSpatialColumnMethods with MetadataKeys with LazyLogging { + with RFSpatialColumnMethods with MetadataKeys { import Implicits.{WithDataFrameMethods, WithRasterFrameLayerMethods} + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) + /** * A convenience over `DataFrame.withColumnRenamed` whereby the `RasterFrameLayer` type is maintained. */ diff --git a/core/src/main/scala/org/locationtech/rasterframes/rasterframes.scala b/core/src/main/scala/org/locationtech/rasterframes/rasterframes.scala index 1517e8f0e..f22753c1e 100644 --- a/core/src/main/scala/org/locationtech/rasterframes/rasterframes.scala +++ b/core/src/main/scala/org/locationtech/rasterframes/rasterframes.scala @@ -21,9 +21,8 @@ package org.locationtech import com.typesafe.config.ConfigFactory -import com.typesafe.scalalogging.LazyLogging -import geotrellis.raster.isData -import geotrellis.raster.{Tile, TileFeature} +import com.typesafe.scalalogging.Logger +import geotrellis.raster.{Tile, TileFeature, isData} import geotrellis.spark.{ContextRDD, Metadata, SpaceTimeKey, SpatialKey, TileLayerMetadata} import org.apache.spark.rdd.RDD import org.apache.spark.sql.rf.{RasterSourceUDT, TileUDT} @@ -33,6 +32,7 @@ import org.locationtech.rasterframes.encoders.StandardEncoders import org.locationtech.rasterframes.extensions.Implicits import org.locationtech.rasterframes.model.TileDimensions import org.locationtech.rasterframes.util.ZeroSevenCompatibilityKit +import org.slf4j.LoggerFactory import shapeless.tag.@@ import scala.reflect.runtime.universe._ @@ -43,8 +43,10 @@ package object rasterframes extends StandardColumns with Implicits with rasterframes.jts.Implicits with StandardEncoders - with DataFrameFunctions.Library - with LazyLogging { + with DataFrameFunctions.Library { + + // Don't make this a `lazy val`... breaks Spark assemblies for some reason. + protected def logger: Logger = Logger(LoggerFactory.getLogger(getClass.getName)) @transient private[rasterframes] diff --git a/core/src/main/scala/org/locationtech/rasterframes/ref/RangeReaderRasterSource.scala b/core/src/main/scala/org/locationtech/rasterframes/ref/RangeReaderRasterSource.scala index 90df001bd..d4f7aa6b2 100644 --- a/core/src/main/scala/org/locationtech/rasterframes/ref/RangeReaderRasterSource.scala +++ b/core/src/main/scala/org/locationtech/rasterframes/ref/RangeReaderRasterSource.scala @@ -21,16 +21,19 @@ package org.locationtech.rasterframes.ref -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import geotrellis.proj4.CRS -import geotrellis.raster.{CellType, GridBounds, MultibandTile, Raster} import geotrellis.raster.io.geotiff.Tags import geotrellis.raster.io.geotiff.reader.GeoTiffReader +import geotrellis.raster.{CellType, GridBounds, MultibandTile, Raster} import geotrellis.util.RangeReader import geotrellis.vector.Extent import org.locationtech.rasterframes.util.GeoTiffInfoSupport +import org.slf4j.LoggerFactory + +trait RangeReaderRasterSource extends RasterSource with GeoTiffInfoSupport { + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) -trait RangeReaderRasterSource extends RasterSource with GeoTiffInfoSupport with LazyLogging { protected def rangeReader: RangeReader private def realInfo = diff --git a/core/src/main/scala/org/locationtech/rasterframes/util/package.scala b/core/src/main/scala/org/locationtech/rasterframes/util/package.scala index 213596424..3186c4877 100644 --- a/core/src/main/scala/org/locationtech/rasterframes/util/package.scala +++ b/core/src/main/scala/org/locationtech/rasterframes/util/package.scala @@ -49,8 +49,8 @@ import scala.Boolean.box * @since 12/18/17 */ package object util extends DataFrameRenderers { - @transient - protected lazy val logger: Logger = + // Don't make this a `lazy val`... breaks Spark assemblies for some reason. + protected def logger: Logger = Logger(LoggerFactory.getLogger("org.locationtech.rasterframes")) import reflect.ClassTag diff --git a/core/src/test/scala/org/locationtech/rasterframes/TestEnvironment.scala b/core/src/test/scala/org/locationtech/rasterframes/TestEnvironment.scala index aa2253580..01fbffcd0 100644 --- a/core/src/test/scala/org/locationtech/rasterframes/TestEnvironment.scala +++ b/core/src/test/scala/org/locationtech/rasterframes/TestEnvironment.scala @@ -22,7 +22,7 @@ package org.locationtech.rasterframes import java.nio.file.{Files, Path} -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import geotrellis.raster.testkit.RasterMatchers import org.apache.spark.sql._ import org.apache.spark.sql.functions.col @@ -33,9 +33,11 @@ import org.locationtech.rasterframes.util._ import org.scalactic.Tolerance import org.scalatest._ import org.scalatest.matchers.{MatchResult, Matcher} +import org.slf4j.LoggerFactory trait TestEnvironment extends FunSpec - with Matchers with Inspectors with Tolerance with RasterMatchers with LazyLogging { + with Matchers with Inspectors with Tolerance with RasterMatchers { + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) lazy val scratchDir: Path = { val outputDir = Files.createTempDirectory("rf-scratch-") diff --git a/datasource/src/main/scala/org/locationtech/rasterframes/datasource/geotiff/GeoTiffDataSource.scala b/datasource/src/main/scala/org/locationtech/rasterframes/datasource/geotiff/GeoTiffDataSource.scala index 64c12eaca..9e2d8dcb3 100644 --- a/datasource/src/main/scala/org/locationtech/rasterframes/datasource/geotiff/GeoTiffDataSource.scala +++ b/datasource/src/main/scala/org/locationtech/rasterframes/datasource/geotiff/GeoTiffDataSource.scala @@ -27,7 +27,7 @@ import _root_.geotrellis.proj4.CRS import _root_.geotrellis.raster.io.geotiff.compression._ import _root_.geotrellis.raster.io.geotiff.tags.codes.ColorSpace import _root_.geotrellis.raster.io.geotiff.{GeoTiffOptions, MultibandGeoTiff, Tags, Tiled} -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import org.apache.spark.sql._ import org.apache.spark.sql.sources.{BaseRelation, CreatableRelationProvider, DataSourceRegister, RelationProvider} import org.locationtech.rasterframes._ @@ -35,14 +35,18 @@ import org.locationtech.rasterframes.datasource._ import org.locationtech.rasterframes.expressions.aggregates.TileRasterizerAggregate import org.locationtech.rasterframes.model.{LazyCRS, TileDimensions} import org.locationtech.rasterframes.util._ +import org.slf4j.LoggerFactory /** * Spark SQL data source over GeoTIFF files. */ class GeoTiffDataSource - extends DataSourceRegister with RelationProvider with CreatableRelationProvider with DataSourceOptions with LazyLogging { + extends DataSourceRegister with RelationProvider with CreatableRelationProvider with DataSourceOptions { import GeoTiffDataSource._ + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) + + def shortName() = GeoTiffDataSource.SHORT_NAME def createRelation(sqlContext: SQLContext, parameters: Map[String, String]) = { diff --git a/datasource/src/main/scala/org/locationtech/rasterframes/datasource/geotiff/GeoTiffRelation.scala b/datasource/src/main/scala/org/locationtech/rasterframes/datasource/geotiff/GeoTiffRelation.scala index b08ebc830..81aab93af 100644 --- a/datasource/src/main/scala/org/locationtech/rasterframes/datasource/geotiff/GeoTiffRelation.scala +++ b/datasource/src/main/scala/org/locationtech/rasterframes/datasource/geotiff/GeoTiffRelation.scala @@ -23,10 +23,7 @@ package org.locationtech.rasterframes.datasource.geotiff import java.net.URI -import org.locationtech.rasterframes._ -import org.locationtech.rasterframes.encoders.CatalystSerializer._ -import org.locationtech.rasterframes.util._ -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import geotrellis.proj4.CRS import geotrellis.spark._ import geotrellis.spark.io._ @@ -40,6 +37,10 @@ import org.apache.spark.sql.rf.TileUDT import org.apache.spark.sql.sources._ import org.apache.spark.sql.types._ import org.apache.spark.sql.{Row, SQLContext} +import org.locationtech.rasterframes._ +import org.locationtech.rasterframes.encoders.CatalystSerializer._ +import org.locationtech.rasterframes.util._ +import org.slf4j.LoggerFactory /** * Spark SQL data source over a single GeoTiff file. Works best with CoG compliant ones. @@ -47,7 +48,9 @@ import org.apache.spark.sql.{Row, SQLContext} * @since 1/14/18 */ case class GeoTiffRelation(sqlContext: SQLContext, uri: URI) extends BaseRelation - with PrunedScan with GeoTiffInfoSupport with LazyLogging { + with PrunedScan with GeoTiffInfoSupport { + + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) lazy val (info, tileLayerMetadata) = extractGeoTiffLayout( HdfsRangeReader(new Path(uri), sqlContext.sparkContext.hadoopConfiguration) diff --git a/datasource/src/main/scala/org/locationtech/rasterframes/datasource/geotrellis/GeoTrellisRelation.scala b/datasource/src/main/scala/org/locationtech/rasterframes/datasource/geotrellis/GeoTrellisRelation.scala index 343f4683d..49a7a0af0 100644 --- a/datasource/src/main/scala/org/locationtech/rasterframes/datasource/geotrellis/GeoTrellisRelation.scala +++ b/datasource/src/main/scala/org/locationtech/rasterframes/datasource/geotrellis/GeoTrellisRelation.scala @@ -26,16 +26,7 @@ import java.net.URI import java.sql.{Date, Timestamp} import java.time.{ZoneOffset, ZonedDateTime} -import org.locationtech.rasterframes._ -import org.locationtech.rasterframes.datasource.geotrellis.TileFeatureSupport._ -import org.locationtech.rasterframes.rules.splitFilters -import org.locationtech.rasterframes.rules.SpatialFilters.{Contains => sfContains, Intersects => sfIntersects} -import org.locationtech.rasterframes.rules.SpatialRelationReceiver -import org.locationtech.rasterframes.rules.TemporalFilters.{BetweenDates, BetweenTimes} -import org.locationtech.rasterframes.util.SubdivideSupport._ -import org.locationtech.rasterframes.util._ -import com.typesafe.scalalogging.LazyLogging -import org.locationtech.jts.geom +import com.typesafe.scalalogging.Logger import geotrellis.raster.{CellGrid, MultibandTile, Tile, TileFeature} import geotrellis.spark.io._ import geotrellis.spark.io.avro.AvroRecordCodec @@ -51,7 +42,16 @@ import org.apache.spark.sql.rf.TileUDT import org.apache.spark.sql.sources._ import org.apache.spark.sql.types._ import org.apache.spark.sql.{Row, SQLContext, sources} +import org.locationtech.jts.geom +import org.locationtech.rasterframes._ import org.locationtech.rasterframes.datasource.geotrellis.GeoTrellisRelation.{C, TileFeatureData} +import org.locationtech.rasterframes.datasource.geotrellis.TileFeatureSupport._ +import org.locationtech.rasterframes.rules.SpatialFilters.{Contains => sfContains, Intersects => sfIntersects} +import org.locationtech.rasterframes.rules.TemporalFilters.{BetweenDates, BetweenTimes} +import org.locationtech.rasterframes.rules.{SpatialRelationReceiver, splitFilters} +import org.locationtech.rasterframes.util.SubdivideSupport._ +import org.locationtech.rasterframes.util._ +import org.slf4j.LoggerFactory import scala.reflect.ClassTag import scala.reflect.runtime.universe._ @@ -66,7 +66,9 @@ case class GeoTrellisRelation(sqlContext: SQLContext, failOnUnrecognizedFilter: Boolean = false, tileSubdivisions: Option[Int] = None, filters: Seq[Filter] = Seq.empty) - extends BaseRelation with PrunedScan with SpatialRelationReceiver[GeoTrellisRelation] with LazyLogging { + extends BaseRelation with PrunedScan with SpatialRelationReceiver[GeoTrellisRelation] { + + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) implicit val sc = sqlContext.sparkContext diff --git a/docs/src/main/paradox/release-notes.md b/docs/src/main/paradox/release-notes.md index 826ee018e..0d7ed4c9c 100644 --- a/docs/src/main/paradox/release-notes.md +++ b/docs/src/main/paradox/release-notes.md @@ -4,6 +4,8 @@ ### 0.8.3 +* Updated to GeoTrellis 2.3.3 and Proj4j 1.1.0. +* Fixed issues with `LazyLogger` and shading assemblies ([#293](https://github.com/locationtech/rasterframes/issues/293)) * Updated `rf_crs` to accept string columns containing CRS specifications. ([#366](https://github.com/locationtech/rasterframes/issues/366)) * _Breaking_ (potentially): removed `pyrasterframes.create_spark_session` in lieu of `pyrasterframes.utils.create_rf_spark_session` diff --git a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/CachedDatasetRelation.scala b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/CachedDatasetRelation.scala index 5b162b940..1fac7699a 100644 --- a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/CachedDatasetRelation.scala +++ b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/CachedDatasetRelation.scala @@ -21,7 +21,6 @@ package org.locationtech.rasterframes.experimental.datasource -import com.typesafe.scalalogging.LazyLogging import org.apache.hadoop.fs.{FileSystem, Path => HadoopPath} import org.apache.spark.rdd.RDD import org.apache.spark.sql.sources.BaseRelation @@ -33,7 +32,7 @@ import org.locationtech.rasterframes.util._ * * @since 8/24/18 */ -trait CachedDatasetRelation extends ResourceCacheSupport { self: BaseRelation with LazyLogging ⇒ +trait CachedDatasetRelation extends ResourceCacheSupport { self: BaseRelation ⇒ protected def cacheFile: HadoopPath protected def constructDataset: Dataset[Row] diff --git a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/DownloadSupport.scala b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/DownloadSupport.scala index f5e0dff64..e66d8f659 100644 --- a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/DownloadSupport.scala +++ b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/DownloadSupport.scala @@ -24,10 +24,11 @@ package org.locationtech.rasterframes.experimental.datasource import java.io._ import java.net -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import org.apache.commons.httpclient._ import org.apache.commons.httpclient.methods._ import org.apache.commons.httpclient.params.HttpMethodParams +import org.slf4j.LoggerFactory import spray.json._ @@ -37,7 +38,9 @@ import spray.json._ * * @since 5/5/18 */ -trait DownloadSupport { self: LazyLogging ⇒ +trait DownloadSupport { + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) + private def applyMethodParams[M <: HttpMethodBase](method: M): M = { method.getParams.setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, true)) method.getParams.setIntParameter(HttpMethodParams.BUFFER_WARN_TRIGGER_LIMIT, 1024 * 1024 * 100) diff --git a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/ResourceCacheSupport.scala b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/ResourceCacheSupport.scala index 0a99f6017..2f4d72fa5 100644 --- a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/ResourceCacheSupport.scala +++ b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/ResourceCacheSupport.scala @@ -24,7 +24,6 @@ package org.locationtech.rasterframes.experimental.datasource import java.net.URI import java.time.{Duration, Instant} -import com.typesafe.scalalogging.LazyLogging import org.apache.commons.io.FilenameUtils import org.apache.hadoop.fs.{FileSystem, Path => HadoopPath} import org.apache.hadoop.io.MD5Hash @@ -38,7 +37,8 @@ import scala.util.control.NonFatal * * @since 5/4/18 */ -trait ResourceCacheSupport extends DownloadSupport { self: LazyLogging ⇒ +trait ResourceCacheSupport extends DownloadSupport { + def maxCacheFileAgeHours: Int = sys.props.get("rasterframes.resource.age.max") .flatMap(v ⇒ Try(v.toInt).toOption) .getOrElse(24) diff --git a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/L8CatalogDataSource.scala b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/L8CatalogDataSource.scala index bdc35d650..32c52bb59 100644 --- a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/L8CatalogDataSource.scala +++ b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/L8CatalogDataSource.scala @@ -24,7 +24,6 @@ package org.locationtech.rasterframes.experimental.datasource.awspds import java.io.FileNotFoundException import java.net.URI -import com.typesafe.scalalogging.LazyLogging import org.apache.hadoop.fs.FileSystem import org.apache.spark.sql.SQLContext import org.apache.spark.sql.sources.{BaseRelation, DataSourceRegister, RelationProvider} @@ -48,7 +47,7 @@ class L8CatalogDataSource extends DataSourceRegister with RelationProvider { } } -object L8CatalogDataSource extends LazyLogging with ResourceCacheSupport { +object L8CatalogDataSource extends ResourceCacheSupport { final val SHORT_NAME: String = "aws-pds-l8-catalog" private val remoteSource = URI.create("http://landsat-pds.s3.amazonaws.com/c1/L8/scene_list.gz") private def sceneListFile(implicit fs: FileSystem) = diff --git a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/L8CatalogRelation.scala b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/L8CatalogRelation.scala index f87597362..9a14c86f3 100644 --- a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/L8CatalogRelation.scala +++ b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/L8CatalogRelation.scala @@ -21,7 +21,6 @@ package org.locationtech.rasterframes.experimental.datasource.awspds -import com.typesafe.scalalogging.LazyLogging import geotrellis.vector.Extent import org.apache.hadoop.fs.{Path => HadoopPath} import org.apache.spark.sql.functions._ @@ -37,7 +36,7 @@ import org.locationtech.rasterframes.experimental.datasource.CachedDatasetRelati * @since 9/28/17 */ case class L8CatalogRelation(sqlContext: SQLContext, sceneListPath: HadoopPath) - extends BaseRelation with TableScan with CachedDatasetRelation with LazyLogging { + extends BaseRelation with TableScan with CachedDatasetRelation { import L8CatalogRelation._ override def schema: StructType = L8CatalogRelation.schema diff --git a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/MODISCatalogDataSource.scala b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/MODISCatalogDataSource.scala index 347d82d29..ce2c552e3 100644 --- a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/MODISCatalogDataSource.scala +++ b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/MODISCatalogDataSource.scala @@ -25,14 +25,15 @@ import java.net.URI import java.time.LocalDate import java.time.temporal.ChronoUnit -import org.locationtech.rasterframes.util.withResource -import org.locationtech.rasterframes._ -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.Logger import org.apache.hadoop.fs.{FileSystem, Path => HadoopPath} import org.apache.hadoop.io.IOUtils import org.apache.spark.sql.SQLContext import org.apache.spark.sql.sources.{BaseRelation, DataSourceRegister, RelationProvider} +import org.locationtech.rasterframes._ import org.locationtech.rasterframes.experimental.datasource.ResourceCacheSupport +import org.locationtech.rasterframes.util.withResource +import org.slf4j.LoggerFactory /** @@ -43,7 +44,9 @@ import org.locationtech.rasterframes.experimental.datasource.ResourceCacheSuppor * * @since 5/4/18 */ -class MODISCatalogDataSource extends DataSourceRegister with RelationProvider with LazyLogging { +class MODISCatalogDataSource extends DataSourceRegister with RelationProvider { + @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) + override def shortName(): String = MODISCatalogDataSource.SHORT_NAME /** * Create a MODIS catalog data source. @@ -70,7 +73,7 @@ class MODISCatalogDataSource extends DataSourceRegister with RelationProvider wi } } -object MODISCatalogDataSource extends LazyLogging with ResourceCacheSupport { +object MODISCatalogDataSource extends ResourceCacheSupport { final val SHORT_NAME = "aws-pds-modis-catalog" final val MCD43A4_BASE = "https://modis-pds.s3.amazonaws.com/MCD43A4.006/" override def maxCacheFileAgeHours: Int = Int.MaxValue diff --git a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/MODISCatalogRelation.scala b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/MODISCatalogRelation.scala index 8fa3422e3..30b3ba234 100644 --- a/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/MODISCatalogRelation.scala +++ b/experimental/src/main/scala/org/locationtech/rasterframes/experimental/datasource/awspds/MODISCatalogRelation.scala @@ -21,14 +21,12 @@ package org.locationtech.rasterframes.experimental.datasource.awspds -import com.typesafe.scalalogging.LazyLogging import org.apache.hadoop.fs.{Path => HadoopPath} import org.apache.spark.sql._ import org.apache.spark.sql.functions._ import org.apache.spark.sql.sources._ import org.apache.spark.sql.types._ import org.locationtech.rasterframes.experimental.datasource.CachedDatasetRelation -import org.locationtech.rasterframes.experimental.datasource.awspds.MODISCatalogRelation.Bands /** * Constructs a dataframe from the available scenes @@ -36,8 +34,7 @@ import org.locationtech.rasterframes.experimental.datasource.awspds.MODISCatalog * @since 5/4/18 */ case class MODISCatalogRelation(sqlContext: SQLContext, sceneList: HadoopPath) - extends BaseRelation with TableScan with CachedDatasetRelation with LazyLogging { - + extends BaseRelation with TableScan with CachedDatasetRelation { import MODISCatalogRelation._ protected def cacheFile: HadoopPath = sceneList.suffix(".parquet") diff --git a/project/RFAssemblyPlugin.scala b/project/RFAssemblyPlugin.scala index 06246d3ae..60a7badfe 100644 --- a/project/RFAssemblyPlugin.scala +++ b/project/RFAssemblyPlugin.scala @@ -54,7 +54,8 @@ object RFAssemblyPlugin extends AutoPlugin { "com.amazonaws", "org.apache.avro", "org.apache.http", - "com.google.guava" + "com.google.guava", + "com.typesafe.scalalogging" ) shadePrefixes.map(p ⇒ ShadeRule.rename(s"$p.**" -> s"rf.shaded.$p.@1").inAll) }, diff --git a/project/RFDependenciesPlugin.scala b/project/RFDependenciesPlugin.scala index f8e21c243..5c161eadb 100644 --- a/project/RFDependenciesPlugin.scala +++ b/project/RFDependenciesPlugin.scala @@ -60,8 +60,7 @@ object RFDependenciesPlugin extends AutoPlugin { // NB: Make sure to update the Spark version in pyrasterframes/python/setup.py rfSparkVersion := "2.3.4", - rfGeoTrellisVersion := "2.3.1", + rfGeoTrellisVersion := "2.3.3", rfGeoMesaVersion := "2.2.1", - //dependencyOverrides += "com.azavea.gdal" % "gdal-warp-bindings" % "33.58d4965" ) }