Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

:sprakles: (feat) UNIC-705 hash transformation with column filter #231

Merged
merged 2 commits into from
Jul 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import org.apache.spark.sql.Column
import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.types.StringType

trait HashTransformation extends Transformation {self =>
val columns: Seq[String]
trait HashTransformation[A] extends Transformation {self =>
val columns: A
val nullValues: Column = lit(null).cast(StringType)
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import scala.language.postfixOps
* @param keyLength length of the resulting hash
* @param columns names of the columns to hash
*/
case class PBKDF2(salt: String, iteration: Int, keyLength: Int, override val columns: String*) extends HashTransformation {
case class PBKDF2(salt: String, iteration: Int, keyLength: Int, override val columns: String*) extends HashTransformation[Seq[String]] {

override def transform: DataFrame => DataFrame = { df =>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.StringType

case class SHA1(salt: String, override val columns: String*) extends HashTransformation {
case class SHA1(salt: String, override val columns: String*) extends HashTransformation[Seq[String]] {
override def transform: DataFrame => DataFrame = { df =>
columns.foldLeft(df){ case (d, column) =>
d.withColumn(column,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package bio.ferlab.datalake.spark3.transformation

import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions.{col, concat_ws, lit, sha1, when}
import org.apache.spark.sql.types.StringType

case class SHA1Dynamic(salt: String, override val columns: DataFrame => Seq[String]) extends HashTransformation[DataFrame => Seq[String]] {

override def transform: DataFrame => DataFrame = { df =>
columns(df).foldLeft(df){ case (d, column) =>
d.withColumn(column,
when(col(column).isNull, nullValues)
.otherwise(
if(salt.nonEmpty) sha1(concat_ws("_", col(column).cast(StringType), lit(salt)))
else sha1(col(column).cast(StringType))
))
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.StringType

case class SHA256(salt: String, override val columns: String*) extends HashTransformation {
case class SHA256(salt: String, override val columns: String*) extends HashTransformation[Seq[String]] {
override def transform: DataFrame => DataFrame = { df =>
columns.foldLeft(df){ case (d, column) =>
d.withColumn(column,
Expand Down
Loading