Skip to content

Commit

Permalink
Merge pull request #110 from WikiWatershed/jp/gt-3.7.0
Browse files Browse the repository at this point in the history
Closes #105 
Closes #106
  • Loading branch information
rajadain authored Apr 9, 2024
2 parents cb54f3c + e172b06 commit 2ad377f
Show file tree
Hide file tree
Showing 16 changed files with 222 additions and 882 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,7 @@ project/plugins/project/
# Scala-IDE specific
.scala_dependencies
.worksheet
.bloop/
.bsp/
.metals/
project/project/
17 changes: 10 additions & 7 deletions api/src/main/scala/Geoprocessing.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@ package org.wikiwatershed.mmw.geoprocessing
import java.util.concurrent.atomic.{LongAdder, DoubleAdder, DoubleAccumulator}
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import scala.collection.parallel.CollectionConverters._

import collection.concurrent.TrieMap

import geotrellis.layer._
import geotrellis.raster._
import geotrellis.raster.rasterize._
import geotrellis.vector._

import geotrellis.spark._

import cats.implicits._

trait Geoprocessing extends Utils {
Expand Down Expand Up @@ -53,7 +53,7 @@ trait Geoprocessing extends Utils {
} yield {
val results = op.name match {
case "RasterGroupedCount" =>
rasterGroupedCount(layers, shape, opts).mapValues(_.toDouble)
rasterGroupedCount(layers, shape, opts).fmap(_.toDouble)

case "RasterGroupedAverage" =>
targetLayer match {
Expand All @@ -68,7 +68,7 @@ trait Geoprocessing extends Utils {
input.streamLines match {
case Some(mls) => {
val lines = cropLinesToAOI(mls.map(parseMultiLineString), shape)
rasterLinesJoin(layers, lines).mapValues(_.toDouble)
rasterLinesJoin(layers, lines).fmap(_.toDouble)
}
case None =>
throw new MissingStreamLinesException
Expand All @@ -81,7 +81,7 @@ trait Geoprocessing extends Utils {
}

val nested: Future[Map[HucID, Map[OperationID, Map[String, Double]]]] = tabular.map { list =>
list.groupBy { case (a, _, _) => a }.mapValues {
list.groupBy { case (a, _, _) => a }.fmap {
grouped => grouped.map {case (_, b, c) => (b, c) }.toMap
}
}
Expand Down Expand Up @@ -210,7 +210,7 @@ trait Geoprocessing extends Utils {
*/
private def rasterLinesJoin(
rasterLayers: Seq[TileLayerCollection[SpatialKey]],
lines: Seq[MultiLine]
lines: Seq[MultiLineString]
): Map[String, Int] = {
val metadata = rasterLayers.head.metadata
val pixelGroups: TrieMap[(List[Int], TilePixel), Int] = TrieMap.empty
Expand All @@ -232,7 +232,7 @@ trait Geoprocessing extends Utils {

pixelGroups
.groupBy(_._1._1.toString)
.mapValues(_.size)
.fmap(_.size)
}

/**
Expand Down Expand Up @@ -319,9 +319,11 @@ trait Geoprocessing extends Utils {

update(targetLayerValue, pixelValues)
}
case (_,_) => ()
})

pixelGroups
.view
.mapValues { case (accumulator, counter) => accumulator.sum / counter.sum }
.map { case (k, v) => k.toString -> v }
.toMap
Expand Down Expand Up @@ -360,6 +362,7 @@ trait Geoprocessing extends Utils {
})

pixelGroups
.view
.mapValues(_.sum().toInt)
.map { case (k, v) => k.toString -> v}
.toMap
Expand Down
46 changes: 25 additions & 21 deletions api/src/main/scala/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,16 @@ import java.util.function.DoubleBinaryOperator
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global

import spray.json._

import geotrellis.proj4.{CRS, ConusAlbers, LatLng, WebMercator}

import geotrellis.layer._
import geotrellis.raster._
import geotrellis.raster.rasterize._
import geotrellis.store._
import geotrellis.store.s3._
import geotrellis.vector._
import geotrellis.vector.io._
import geotrellis.spark._
import geotrellis.spark.io._
import geotrellis.spark.io.s3._
import geotrellis.vector.io.json.GeoJson

import com.typesafe.config.ConfigFactory

Expand Down Expand Up @@ -68,7 +67,7 @@ trait Utils {
val parseGeom =
parseGeometry(_: String, getCRS(input.polygonCRS), getCRS(input.rasterCRS))

input.polygon.map { str => parseGeom(str).buffer(0).asMultiPolygon.get }
input.polygon.map { str => regularizeMultiPolygon(parseGeom(str)) }
.unionGeometries
.asMultiPolygon
.get
Expand All @@ -85,7 +84,7 @@ trait Utils {
val parseGeom =
parseGeometry(_: String, getCRS(input.polygonCRS), getCRS(input.rasterCRS))

input.polygon.map { str => parseGeom(str).buffer(0).asMultiPolygon.get }
input.polygon.map { str => regularizeMultiPolygon(parseGeom(str)) }
}

/**
Expand All @@ -95,10 +94,7 @@ trait Utils {
* @return A MultiPolygon
*/
def normalizeHuc(huc: HUC): MultiPolygon = {
parseGeometry(huc.shape, LatLng, ConusAlbers)
.buffer(0)
.asMultiPolygon
.get
regularizeMultiPolygon(parseGeometry(huc.shape, LatLng, ConusAlbers))
}

/**
Expand Down Expand Up @@ -131,13 +127,21 @@ trait Utils {
* @return A MultiPolygon
*/
def parseGeometry(geoJson: String, srcCRS: CRS, destCRS: CRS): MultiPolygon = {
geoJson.parseJson.convertTo[Geometry] match {
GeoJson.parse[Geometry](geoJson) match {
case p: Polygon => MultiPolygon(p.reproject(srcCRS, destCRS))
case mp: MultiPolygon => mp.reproject(srcCRS, destCRS)
case _ => MultiPolygon()
}
}

def regularizeMultiPolygon(mp: MultiPolygon): MultiPolygon = {
mp.buffer(0) match {
case p: Polygon if p.isEmpty => MultiPolygon()
case p: Polygon => MultiPolygon(Seq(p))
case mp: MultiPolygon => mp
}
}

/**
* Given an input vector along with a vectorCRS and rasterCRS, return a Seq
* of MultiLines
Expand All @@ -151,7 +155,7 @@ trait Utils {
vector: List[String],
vectorCRS: String,
rasterCRS: String
): Seq[MultiLine] = {
): Seq[MultiLineString] = {
val parseVector =
parseMultiLineString(_: String, getCRS(vectorCRS), getCRS(rasterCRS))

Expand All @@ -167,18 +171,18 @@ trait Utils {
* @param destCRS The CRS that the outgoing geometry should be in
* @return A MultiLine
*/
def parseMultiLineString(geoJson: String, srcCRS: CRS, destCRS: CRS): MultiLine = {
geoJson.parseJson.convertTo[Geometry] match {
case l: Line => MultiLine(l.reproject(srcCRS, destCRS))
case ml: MultiLine => ml.reproject(srcCRS, destCRS)
case _ => MultiLine()
def parseMultiLineString(geoJson: String, srcCRS: CRS, destCRS: CRS): MultiLineString = {
GeoJson.parse[Geometry](geoJson) match {
case l: LineString => MultiLineString(l.reproject(srcCRS, destCRS))
case ml: MultiLineString => ml.reproject(srcCRS, destCRS)
case _ => MultiLineString()
}
}

/**
* Convenience flavor of the above with defaults
*/
def parseMultiLineString(geoJson: String): MultiLine =
def parseMultiLineString(geoJson: String): MultiLineString =
parseMultiLineString(geoJson, LatLng, ConusAlbers)

/**
Expand All @@ -189,8 +193,8 @@ trait Utils {
* @param aoi Area of Interest
* @return A sequence of MultiLines that intersect with the Area of Interest
*/
def cropLinesToAOI(lines: Seq[MultiLine], aoi: MultiPolygon): Seq[MultiLine] = {
lines.flatMap(line => (line & aoi).asMultiLine)
def cropLinesToAOI(lines: Seq[MultiLineString], aoi: MultiPolygon): Seq[MultiLineString] = {
lines.flatMap(line => (line & aoi).asMultiLineString)
}

/**
Expand Down
18 changes: 9 additions & 9 deletions api/src/main/scala/WebServer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -49,16 +49,16 @@ case class MultiInput (
)

object PostRequestProtocol extends DefaultJsonProtocol {
implicit val inputFormat = jsonFormat10(InputData)
implicit val postFormat = jsonFormat1(PostRequest)
implicit val resultFormat = jsonFormat1(ResultInt)
implicit val resultDoubleFormat = jsonFormat1(ResultDouble)
implicit val resultSummaryFormat = jsonFormat1(ResultSummary)
implicit val resultManyIntFormat = jsonFormat1(ResultManyInt)
implicit val inputFormat: RootJsonFormat[InputData] = jsonFormat10(InputData)
implicit val postFormat: RootJsonFormat[PostRequest] = jsonFormat1(PostRequest)
implicit val resultFormat: RootJsonFormat[ResultInt] = jsonFormat1(ResultInt)
implicit val resultDoubleFormat: RootJsonFormat[ResultDouble] = jsonFormat1(ResultDouble)
implicit val resultSummaryFormat: RootJsonFormat[ResultSummary] = jsonFormat1(ResultSummary)
implicit val resultManyIntFormat: RootJsonFormat[ResultManyInt] = jsonFormat1(ResultManyInt)

implicit val hucFormat = jsonFormat2(HUC)
implicit val operationFormat = jsonFormat5(Operation)
implicit val multiInputFormat = jsonFormat3(MultiInput)
implicit val hucFormat: RootJsonFormat[HUC] = jsonFormat2(HUC)
implicit val operationFormat: RootJsonFormat[Operation] = jsonFormat5(Operation)
implicit val multiInputFormat: RootJsonFormat[MultiInput] = jsonFormat3(MultiInput)
}

object WebServer extends HttpApp with App with LazyLogging with Geoprocessing with ErrorHandler {
Expand Down
2 changes: 1 addition & 1 deletion api/src/main/scala/package.scala
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package org.wikiwatershed.mmw

import geotrellis.spark._
import geotrellis.layer._

package object geoprocessing {
type HucID = String
Expand Down
120 changes: 120 additions & 0 deletions build.sbt
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import Dependencies._

name := "mmw-geoprocessing"
organization := "org.wikiwatershed"
licenses := Seq(
"Apache-2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0.html")
)

scalaVersion := Version.scala
ThisBuild / scalaVersion := Version.scala

lazy val root = Project("mmw-geoprocessing", file("."))
.aggregate(
api
)

lazy val api = project
.settings(commonSettings ++ apiDependencies ++ consoleSettings)

lazy val scalacOpts = Seq(
"-deprecation",
"-unchecked",
"-feature",
"-language:implicitConversions",
"-language:reflectiveCalls",
"-language:higherKinds",
"-language:postfixOps",
"-language:existentials",
"-Yrangepos"
)

lazy val apiDependencies = Seq(
libraryDependencies ++= Seq(
akkaActor,
akkaHttp,
akkaHttpSprayJson,
akkaStream,
logging,
scalaParallel,
scalatest % Test,
scalactic % Test,
geotrellisS3,
geotrellisGdal,
geotrellisRaster,
geotrellisVector
)
)

lazy val commonSettings = Seq(
evictionErrorLevel := Level.Warn,

Compile / scalacOptions ++= scalacOpts,
Compile / console / scalacOptions -= "-Ywarn-unused-import",
unmanagedSources / excludeFilter := ".#*.scala",
publishMavenStyle := true,
Test / publishArtifact := false,
pomIncludeRepository := { _ =>
false
},

resolvers ++= Seq(
"GeoSolutions" at "https://maven.geo-solutions.it/",
"LT-releases" at "https://repo.locationtech.org/content/groups/releases",
"OSGeo" at "https://repo.osgeo.org/repository/release/",
"maven2" at "https://repo1.maven.org/maven2"
),

assembly / assemblyShadeRules := {
val shadePackage = "org.wikiwatershed.shaded"
Seq(
ShadeRule.rename("cats.kernel.**" -> s"$shadePackage.cats.kernel.@1").inAll
)
},

Test / fork := true,
Test / parallelExecution := false,
Test / testOptions += Tests.Argument("-oD"),
Test / javaOptions ++= Seq("-Xms1024m", "-Xmx8144m", "-Djts.overlay=ng"),

// Settings for sbt-assembly plugin which builds fat jars for use by spark jobs
assembly / test := {},
assembly / assemblyMergeStrategy := {
case "reference.conf" => MergeStrategy.concat
case "application.conf" => MergeStrategy.concat
case PathList("META-INF", xs@_*) =>
xs match {
case ("MANIFEST.MF" :: Nil) => MergeStrategy.discard
// Concatenate everything in the services directory to keep GeoTools happy.
case ("services" :: _ :: Nil) =>
MergeStrategy.concat
// Concatenate these to keep JAI happy.
case ("javax.media.jai.registryFile.jai" :: Nil) |
("registryFile.jai" :: Nil) | ("registryFile.jaiext" :: Nil) =>
MergeStrategy.concat
case (name :: Nil) => {
// Must exclude META-INF/*.([RD]SA|SF) to avoid "Invalid signature file digest for Manifest main attributes" exception.
if (name.endsWith(".RSA") || name.endsWith(".DSA") || name.endsWith(
".SF"
))
MergeStrategy.discard
else
MergeStrategy.first
}
case _ => MergeStrategy.first
}
case _ => MergeStrategy.first
}
)

lazy val consoleSettings = Seq(
// auto imports for local SBT console
// can be used with `test:console` command
console / initialCommands :=
"""
import geotrellis.raster._
import geotrellis.vector._
import geotrellis.vector.io._
import geotrellis.vector.io.wkt.WKT
"""
)
24 changes: 24 additions & 0 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import sbt._

object Dependencies {

private val dependencyScope = "provided"

val akkaActor = "com.typesafe.akka" %% "akka-actor" % Version.akka
val akkaHttp = "com.typesafe.akka" %% "akka-http" % Version.akkaHttp
val akkaHttpSprayJson = "com.typesafe.akka" %% "akka-http-spray-json" % Version.akkaHttp
val akkaStream = "com.typesafe.akka" %% "akka-stream" % Version.akka

val geotrellisS3 = "org.locationtech.geotrellis" %% "geotrellis-s3" % Version.geotrellis
val geotrellisRaster = "org.locationtech.geotrellis" %% "geotrellis-raster" % Version.geotrellis
val geotrellisVector = "org.locationtech.geotrellis" %% "geotrellis-vector" % Version.geotrellis
val geotrellisRasterTestkit = "org.locationtech.geotrellis" %% "geotrellis-raster-testkit" % Version.geotrellis
val geotrellisGdal = "org.locationtech.geotrellis" %% "geotrellis-gdal" % Version.geotrellis

val pureconfig = "com.github.pureconfig" %% "pureconfig" % "0.9.1"
val logging = "com.typesafe.scala-logging" %% "scala-logging" % Version.scalaLogging
val scalatest = "org.scalatest" %% "scalatest" % Version.scalatest
val scalactic = "org.scalactic" %% "scalactic" % Version.scalatest

val scalaParallel = "org.scala-lang.modules" %% "scala-parallel-collections" % Version.scalaParallel
}
Loading

0 comments on commit 2ad377f

Please sign in to comment.