Skip to content

Commit d52288b

Browse files
committed
update style
1 parent 7b30bc7 commit d52288b

File tree

13 files changed

+15
-25
lines changed

13 files changed

+15
-25
lines changed

core/src/main/scala/org/apache/spark/ui/sql/SQLMetrics.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@
1717

1818
package org.apache.spark.ui.sql
1919

20-
import org.apache.spark.{SparkContext, AccumulableParam, Accumulable}
2120
import org.apache.spark.util.Utils
21+
import org.apache.spark.{Accumulable, AccumulableParam, SparkContext}
2222

2323
/**
2424
* Create a layer for specialized metric. We cannot add `@specialized` to
@@ -144,6 +144,9 @@ private[spark] object SQLMetrics {
144144
* spill size, etc.
145145
*/
146146
def createSizeMetric(sc: SparkContext, name: String): LongSQLMetric = {
147+
// The final result of this metric in physical operator UI may looks like:
148+
// data size total (min, med, max):
149+
// 100GB (100MB, 1GB, 10GB)
147150
createLongMetric(sc, s"$name total (min, med, max)", StaticsLongSQLMetricParam)
148151
}
149152

core/src/main/scala/org/apache/spark/ui/sql/SparkPlanGraph.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,6 @@ package org.apache.spark.ui.sql
1919

2020
import java.util.concurrent.atomic.AtomicLong
2121

22-
import org.apache.spark.util.Utils
23-
2422
import scala.collection.mutable
2523

2624
/**

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,6 @@ import java.beans.{BeanInfo, Introspector}
2121
import java.util.Properties
2222
import java.util.concurrent.atomic.AtomicReference
2323

24-
import org.apache.spark.ui.sql.{SQLTab, SQLListener}
25-
2624
import scala.collection.JavaConverters._
2725
import scala.collection.immutable
2826
import scala.reflect.runtime.universe.TypeTag
@@ -48,6 +46,7 @@ import org.apache.spark.sql.sources.BaseRelation
4846
import org.apache.spark.sql.types._
4947
import org.apache.spark.sql.{execution => sparkexecution}
5048
import org.apache.spark.sql.util.ExecutionListenerManager
49+
import org.apache.spark.ui.sql.{SQLTab, SQLListener}
5150
import org.apache.spark.util.Utils
5251

5352
/**

sql/core/src/main/scala/org/apache/spark/sql/execution/Aggregate.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
package org.apache.spark.sql.execution
1919

2020
import java.util.HashMap
21+
2122
import org.apache.spark.rdd.RDD
2223
import org.apache.spark.sql.catalyst.InternalRow
2324
import org.apache.spark.sql.catalyst.errors._

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,6 @@ package org.apache.spark.sql.execution
1919

2020
import java.util.concurrent.atomic.AtomicBoolean
2121

22-
import org.apache.spark.ui.sql.{LongSQLMetric, SQLMetric}
23-
2422
import scala.collection.mutable.ArrayBuffer
2523

2624
import org.apache.spark.Logging
@@ -34,6 +32,7 @@ import org.apache.spark.sql.catalyst.expressions.codegen._
3432
import org.apache.spark.sql.catalyst.plans.QueryPlan
3533
import org.apache.spark.sql.catalyst.plans.physical._
3634
import org.apache.spark.sql.types.DataType
35+
import org.apache.spark.ui.sql.{LongSQLMetric, SQLMetric}
3736

3837
object SparkPlan {
3938
protected[sql] val currentContext = new ThreadLocal[SQLContext]()

sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregationIterator.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.sql.execution.aggregate
1919

20-
import org.apache.spark.ui.sql.LongSQLMetric
21-
2220
import scala.collection.mutable.ArrayBuffer
2321

2422
import org.apache.spark.unsafe.KVIterator
@@ -29,6 +27,7 @@ import org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeRowJoiner
2927
import org.apache.spark.sql.catalyst.InternalRow
3028
import org.apache.spark.sql.execution.{UnsafeKVExternalSorter, UnsafeFixedWidthAggregationMap}
3129
import org.apache.spark.sql.types.StructType
30+
import org.apache.spark.ui.sql.LongSQLMetric
3231

3332
/**
3433
* An iterator used to evaluate aggregate functions. It operates on [[UnsafeRow]]s.

sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastHashJoin.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.sql.execution.joins
1919

20-
import org.apache.spark.ui.sql.SQLMetrics
21-
2220
import scala.concurrent._
2321
import scala.concurrent.duration._
2422

@@ -27,6 +25,7 @@ import org.apache.spark.sql.catalyst.InternalRow
2725
import org.apache.spark.sql.catalyst.expressions.Expression
2826
import org.apache.spark.sql.catalyst.plans.physical.{Distribution, Partitioning, UnspecifiedDistribution}
2927
import org.apache.spark.sql.execution.{BinaryNode, SQLExecution, SparkPlan}
28+
import org.apache.spark.ui.sql.SQLMetrics
3029
import org.apache.spark.util.ThreadUtils
3130
import org.apache.spark.{InternalAccumulator, TaskContext}
3231

sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastHashOuterJoin.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.sql.execution.joins
1919

20-
import org.apache.spark.ui.sql.SQLMetrics
21-
2220
import scala.concurrent._
2321
import scala.concurrent.duration._
2422

@@ -28,6 +26,7 @@ import org.apache.spark.sql.catalyst.expressions._
2826
import org.apache.spark.sql.catalyst.plans.physical.{Distribution, Partitioning, UnspecifiedDistribution}
2927
import org.apache.spark.sql.catalyst.plans.{JoinType, LeftOuter, RightOuter}
3028
import org.apache.spark.sql.execution.{BinaryNode, SQLExecution, SparkPlan}
29+
import org.apache.spark.ui.sql.SQLMetrics
3130
import org.apache.spark.{InternalAccumulator, TaskContext}
3231

3332
/**

sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,12 @@
1717

1818
package org.apache.spark.sql.execution.joins
1919

20-
import org.apache.spark.ui.sql.SQLMetrics
2120
import org.apache.spark.{InternalAccumulator, TaskContext}
2221
import org.apache.spark.rdd.RDD
2322
import org.apache.spark.sql.catalyst.InternalRow
2423
import org.apache.spark.sql.catalyst.expressions._
2524
import org.apache.spark.sql.execution.{BinaryNode, SparkPlan}
25+
import org.apache.spark.ui.sql.SQLMetrics
2626

2727
/**
2828
* Build the right table's join keys into a HashSet, and iteratively go through the left

sql/core/src/main/scala/org/apache/spark/sql/execution/joins/ShuffledHashOuterJoin.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.sql.execution.joins
1919

20-
import org.apache.spark.ui.sql.SQLMetrics
21-
2220
import scala.collection.JavaConverters._
2321

2422
import org.apache.spark.rdd.RDD
@@ -27,6 +25,7 @@ import org.apache.spark.sql.catalyst.expressions._
2725
import org.apache.spark.sql.catalyst.plans.physical._
2826
import org.apache.spark.sql.catalyst.plans.{FullOuter, JoinType, LeftOuter, RightOuter}
2927
import org.apache.spark.sql.execution.{BinaryNode, SparkPlan}
28+
import org.apache.spark.ui.sql.SQLMetrics
3029

3130
/**
3231
* Performs a hash based outer join for two child relations by shuffling the data using

0 commit comments

Comments
 (0)