Skip to content

Commit

Permalink
#1882 Update Cobrix to 2.4.1 and update Cobol test suite for ASCII fi…
Browse files Browse the repository at this point in the history
…les.
  • Loading branch information
yruslan committed Sep 22, 2021
1 parent ba22336 commit 8fa6cbc
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 11 deletions.
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@
<atum.version>3.3.0</atum.version>
<bower.chart.js.version>2.7.3</bower.chart.js.version>
<bson.codec.jsr310.version>3.5.4</bson.codec.jsr310.version>
<cobrix.version>2.3.0</cobrix.version>
<cobrix.version>2.4.1</cobrix.version>
<diffson.version>2.0.2</diffson.version>
<gson.version>2.8.2</gson.version>
<guava.version>27.0.1-jre</guava.version>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ class StandardizationPropertiesProvider {
// For EBCDIC files --charset is converted into Cobrix "ebcdic_code_page" option
HashMap(
getCopybookOption(cobolOptions, dataset),
"is_xcom" -> isXcomOpt.map(BooleanParameter),
"is_record_sequence" -> isXcomOpt.map(BooleanParameter),
"is_text" -> isTextOpt.map(BooleanParameter),
"string_trimming_policy" -> cobolOptions.trimmingPolicy.map(StringParameter),
"encoding" -> cobolOptions.encoding.map(StringParameter),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@

package za.co.absa.enceladus.standardization

import java.nio.charset.StandardCharsets

import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.mockito.scalatest.MockitoSugar
Expand All @@ -28,6 +26,8 @@ import za.co.absa.enceladus.standardization.config.StandardizationConfig
import za.co.absa.enceladus.standardization.fixtures.TempFileFixture
import za.co.absa.enceladus.utils.testUtils.SparkTestBase

import java.nio.charset.StandardCharsets

class StandardizationCobolAsciiSuite extends FixtureAnyFunSuite with SparkTestBase with TempFileFixture with MockitoSugar {

type FixtureParam = String
Expand Down Expand Up @@ -90,7 +90,7 @@ class StandardizationCobolAsciiSuite extends FixtureAnyFunSuite with SparkTestBa
|{"A1":"4","A2":" on ","A3":" Data 4"}""".stripMargin.replace("\r\n", "\n")

val df = getTestDataFrame(tmpFileName, args)
val actual = df.toJSON.collect.mkString("\n")
val actual = df.orderBy("A1").toJSON.collect.mkString("\n")

assert(actual == expected)
}
Expand All @@ -105,7 +105,7 @@ class StandardizationCobolAsciiSuite extends FixtureAnyFunSuite with SparkTestBa
|{"A1":"4","A2":"on ","A3":"Data 4"}""".stripMargin.replace("\r\n", "\n")

val df = getTestDataFrame(tmpFileName, args)
val actual = df.toJSON.collect.mkString("\n")
val actual = df.orderBy("A1").toJSON.collect.mkString("\n")

assert(actual == expected)
}
Expand All @@ -120,7 +120,7 @@ class StandardizationCobolAsciiSuite extends FixtureAnyFunSuite with SparkTestBa
|{"A1":"4","A2":" on","A3":" Data 4"}""".stripMargin.replace("\r\n", "\n")

val df = getTestDataFrame(tmpFileName, args)
val actual = df.toJSON.collect.mkString("\n")
val actual = df.orderBy("A1").toJSON.collect.mkString("\n")

assert(actual == expected)
}
Expand All @@ -135,7 +135,7 @@ class StandardizationCobolAsciiSuite extends FixtureAnyFunSuite with SparkTestBa
|{"A1":"4","A2":"on","A3":"Data 4"}""".stripMargin.replace("\r\n", "\n")

val df = getTestDataFrame(tmpFileName, args)
val actual = df.toJSON.collect.mkString("\n")
val actual = df.orderBy("A1").toJSON.collect.mkString("\n")

assert(actual == expected)
}
Expand All @@ -146,11 +146,11 @@ class StandardizationCobolAsciiSuite extends FixtureAnyFunSuite with SparkTestBa
val expected =
"""{"A1":"1","A2":"Tes","A3":"0123456789"}
|{"A1":"2","A2":"est2","A3":"SomeText"}
|{"A1":"3","A2":"None","A3":"Data 3"}
|{"A1":"","A2":"4 on","A3":"Data"}""".stripMargin.replace("\r\n", "\n")
|{"A1":"3","A2":"None","A3":"Data 3"}
|{"A1":"4","A2":"on","A3":"Data 4"}""".stripMargin.replace("\r\n", "\n")

val df = getTestDataFrame(tmpFileName, args)
val actual = df.toJSON.collect.mkString("\n")
val actual = df.orderBy("A1").toJSON.collect.mkString("\n")

assert(actual == expected)
}
Expand Down

0 comments on commit 8fa6cbc

Please sign in to comment.