Skip to content

Commit

Permalink
spline #605 minor Refactoring and code style
Browse files Browse the repository at this point in the history
  • Loading branch information
wajda committed Feb 28, 2023
1 parent ec82e1d commit 015388d
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ package za.co.absa.spline.harvester.builder

import org.apache.spark.sql.catalyst.{expressions => sparkExprssions}
import za.co.absa.spline.harvester.IdGeneratorsBundle
import za.co.absa.spline.harvester.builder.OperationNodeBuilder.Attributes
import za.co.absa.spline.harvester.builder.plan.PlanOperationNodeBuilder.OperationId
import za.co.absa.spline.producer.model.{Attribute, FunctionalExpression, Literal}

Expand All @@ -33,10 +34,10 @@ trait OperationNodeBuilder {
def addChild(childBuilder: OperationNodeBuilder): Unit = childBuilders :+= childBuilder
protected def resolveAttributeChild(attribute: sparkExprssions.Attribute): Option[sparkExprssions.Expression] = None

protected def inputAttributes: Seq[Seq[Attribute]] = childBuilders.map(_.outputAttributes)
protected def inputAttributes: Seq[Attributes] = childBuilders.map(_.outputAttributes)
protected def idGenerators: IdGeneratorsBundle

def outputAttributes: Seq[Attribute]
def outputAttributes: Attributes

def childIds: Seq[OperationId] = childBuilders.map(_.operationId)

Expand All @@ -46,3 +47,7 @@ trait OperationNodeBuilder {

def outputExprToAttMap: Map[sparkExprssions.ExprId, Attribute]
}

object OperationNodeBuilder {
type Attributes = Seq[Attribute]
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import za.co.absa.spline.harvester.postprocessing.PostProcessor
import za.co.absa.spline.producer.model.{AttrRef, Attribute, DataOperation, FunctionalExpression}

class MergeIntoNodeBuilder
(override val logicalPlan: LogicalPlan)
(logicalPlan: LogicalPlan)
(idGenerators: IdGeneratorsBundle, dataTypeConverter: DataTypeConverter, dataConverter: DataConverter, postProcessor: PostProcessor)
extends GenericPlanNodeBuilder(logicalPlan)(idGenerators, dataTypeConverter, dataConverter, postProcessor) {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import org.apache.spark.sql.catalyst.{expressions => sparkExprssions}
import za.co.absa.commons.lang.extensions.NonOptionExtension._
import za.co.absa.spline.harvester.IdGeneratorsBundle
import za.co.absa.spline.harvester.ModelConstants.CommonExtras
import za.co.absa.spline.harvester.builder.OperationNodeBuilder.Attributes
import za.co.absa.spline.harvester.builder.plan.UnionNodeBuilder.Names
import za.co.absa.spline.harvester.converter.{DataConverter, DataTypeConverter}
import za.co.absa.spline.harvester.postprocessing.PostProcessor
Expand All @@ -31,7 +32,7 @@ class UnionNodeBuilder
(idGenerators: IdGeneratorsBundle, dataTypeConverter: DataTypeConverter, dataConverter: DataConverter, postProcessor: PostProcessor)
extends GenericPlanNodeBuilder(logicalPlan)(idGenerators, dataTypeConverter, dataConverter, postProcessor) {

private lazy val unionInputs: Seq[Seq[Attribute]] = inputAttributes.transpose
private lazy val unionInputs: Seq[Attributes] = inputAttributes.transpose

override lazy val functionalExpressions: Seq[FunctionalExpression] =
unionInputs
Expand Down

0 comments on commit 015388d

Please sign in to comment.