Skip to content

Commit

Permalink
Fixed sonar code smells.
Browse files Browse the repository at this point in the history
Fixes #162.
  • Loading branch information
morazow committed Jul 21, 2021
1 parent 37ed5dc commit ccb3bda
Show file tree
Hide file tree
Showing 6 changed files with 22 additions and 18 deletions.
6 changes: 4 additions & 2 deletions doc/changes/changes_1.2.1.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ Code name:

* #150: Added extracted parquet-io-java library
* #157: Refactored build setup
* #162: Fixed code smells reported by Sonar cloud

## Dependency Updates

Expand All @@ -21,7 +22,7 @@ Code name:
* Added `com.google.guava:guava:30.1.1-jre`
* Added `com.typesafe.scala-logging:scala-logging:3.9.4`
* Added `org.apache.commons:commons-lang3:3.12.0`
* Added `org.slf4j:slf4j-log4j12:1.7.31`
* Added `org.slf4j:slf4j-log4j12:1.7.32`
* Added `org.apache.hadoop:hadoop-common:3.3.1`
* Removed `org.apache.hadoop:hadoop-client:3.3.1`
* Removed `org.apache.parquet:parquet-hadoop:1.12.0`
Expand All @@ -31,10 +32,11 @@ Code name:
### Test Dependency Updates

* Updated `org.mockito:mockito-core:3.11.1` to `3.11.2`
* Updates `org.testcontainers:localstack:1.15.3` to `1.16.0`

### Plugin Updates

* Updated `org.wartremover:sbt-wartremover:2.4.13` to `2.4.15`
* Updated `org.wartremover:sbt-wartremover:2.4.13` to `2.4.16`
* Updated `org.wartremover:sbt-wartremover-contrib:1.3.11` to `1.3.12`
* Updated `org.scoverage:sbt-coveralls:1.3.0` to `1.3.1`
* Updated `net.bzzt:sbt-reproducible-builds:0.25` to `0.28`
Expand Down
4 changes: 2 additions & 2 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ object Dependencies {
private val ExasolHamcrestMatcherVersion = "1.4.0"
private val ExasolTestDBBuilderVersion = "3.2.0"
private val ExasolTestContainersVersion = "3.5.3"
private val TestContainersLocalstackVersion = "1.15.3"
private val TestContainersLocalstackVersion = "1.16.0"
private val TestContainersScalaVersion = "0.39.5"

val Resolvers: Seq[Resolver] = Seq(
Expand Down Expand Up @@ -115,7 +115,7 @@ object Dependencies {
ExclusionRule(organization = "org.apache.zookeeper")
),
// Logging Dependencies
"org.slf4j" % "slf4j-log4j12" % "1.7.31",
"org.slf4j" % "slf4j-log4j12" % "1.7.32",
"com.typesafe.scala-logging" %% "scala-logging" % "3.9.4"
)

Expand Down
2 changes: 1 addition & 1 deletion project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3")

// Adds a `wartremover` a flexible Scala code linting tool
// http://github.com/puffnfresh/wartremover
addSbtPlugin("org.wartremover" % "sbt-wartremover" % "2.4.15")
addSbtPlugin("org.wartremover" % "sbt-wartremover" % "2.4.16")

// Adds Contrib Warts
// http://github.com/wartremover/wartremover-contrib/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ class RowWriteSupport(schema: MessageType) extends WriteSupport[Row] {
case PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY if originalType == OriginalType.DECIMAL =>
val decimal =
primitiveType.getLogicalTypeAnnotation().asInstanceOf[DecimalLogicalTypeAnnotation]
makeDecimalWriter(decimal.getPrecision(), decimal.getScale())
makeDecimalWriter(decimal.getPrecision())

case _ => throw new UnsupportedOperationException(s"Unsupported parquet type '$typeName'.")
}
Expand All @@ -163,7 +163,7 @@ class RowWriteSupport(schema: MessageType) extends WriteSupport[Row] {
recordConsumer.addBinary(Binary.fromReusedByteArray(timestampBuffer))
}

private def makeDecimalWriter(precision: Int, scale: Int): RowValueWriter = {
private def makeDecimalWriter(precision: Int): RowValueWriter = {
require(
precision >= 1,
s"Decimal precision $precision should not be less than minimum precision 1"
Expand Down
18 changes: 10 additions & 8 deletions src/main/scala/com/exasol/cloudetl/storage/StorageProperties.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ class StorageProperties(

import StorageProperties._

private[this] val AZURE_DELTA_LOG_STORE_CLASS = "org.apache.spark.sql.delta.storage.AzureLogStore"
private[this] val S3_DELTA_LOG_STORE_CLASS = "org.apache.spark.sql.delta.storage.S3SingleDriverLogStore"
private[this] val HDFS_DELTA_LOG_STORE_CLASS = "org.apache.spark.sql.delta.storage.HDFSLogStore"

/**
* Returns the storage path.
*
Expand Down Expand Up @@ -44,15 +48,13 @@ class StorageProperties(
* requirements.
*/
final def getDeltaFormatLogStoreClassName(): String = getStoragePathScheme() match {
case "abfs" | "abfss" => "org.apache.spark.sql.delta.storage.AzureLogStore"
case "adl" => "org.apache.spark.sql.delta.storage.AzureLogStore"
case "abfs" | "abfss" => AZURE_DELTA_LOG_STORE_CLASS
case "adl" => AZURE_DELTA_LOG_STORE_CLASS
case "gs" =>
throw new UnsupportedOperationException(
"Delta format LogStore API is not supported in Google Cloud Storage yet."
)
case "s3a" => "org.apache.spark.sql.delta.storage.S3SingleDriverLogStore"
case "wasb" | "wasbs" => "org.apache.spark.sql.delta.storage.AzureLogStore"
case _ => "org.apache.spark.sql.delta.storage.HDFSLogStore"
throw new UnsupportedOperationException("Delta format LogStore API is not supported in Google Cloud Storage yet.")
case "s3a" => S3_DELTA_LOG_STORE_CLASS
case "wasb" | "wasbs" => AZURE_DELTA_LOG_STORE_CLASS
case _ => HDFS_DELTA_LOG_STORE_CLASS
}

/** Returns the [[FileFormat]] file format. */
Expand Down
6 changes: 3 additions & 3 deletions src/main/scala/com/exasol/cloudetl/util/DateTimeUtil.scala
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,9 @@ object DateTimeUtil {
*/
@SuppressWarnings(Array("org.wartremover.contrib.warts.ExposedTuples"))
def getJulianDayAndNanos(us: Long): (Int, Long) = {
val julian_us = us + JULIAN_DAY_OF_EPOCH * MICROS_PER_DAY
val day = julian_us / MICROS_PER_DAY
val micros = julian_us % MICROS_PER_DAY
val julianMicros = us + JULIAN_DAY_OF_EPOCH * MICROS_PER_DAY
val day = julianMicros / MICROS_PER_DAY
val micros = julianMicros % MICROS_PER_DAY
(day.toInt, micros * 1000L)
}

Expand Down

0 comments on commit ccb3bda

Please sign in to comment.