Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ private[sql] case class AvroDataToCatalyst(
@transient private lazy val nullResultRow: Any = dataType match {
case st: StructType =>
val resultRow = new SpecificInternalRow(st.map(_.dataType))
for(i <- 0 until st.length) {
for (i <- 0 until st.length) {
resultRow.setNullAt(i)
}
resultRow
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/api/r/SerDe.scala
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,7 @@ private[spark] object SerDe {
writeType(dos, "map")
writeInt(dos, v.size)
val iter = v.entrySet.iterator
while(iter.hasNext) {
while (iter.hasNext) {
val entry = iter.next
val key = entry.getKey
val value = entry.getValue
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ object HistoryServer extends Logging {
ShutdownHookManager.addShutdownHook { () => server.stop() }

// Wait until the end of the world... or if the HistoryServer process is manually stopped
while(true) { Thread.sleep(Int.MaxValue) }
while (true) { Thread.sleep(Int.MaxValue) }
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ private[spark] class HadoopDelegationTokenManager(
creds.addAll(newTokens)
}
})
if(!currentUser.equals(freshUGI)) {
if (!currentUser.equals(freshUGI)) {
FileSystem.closeAllForUGI(freshUGI)
}
}
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ private object PipedRDD {
def tokenize(command: String): Seq[String] = {
val buf = new ArrayBuffer[String]
val tok = new StringTokenizer(command)
while(tok.hasMoreElements) {
while (tok.hasMoreElements) {
buf += tok.nextToken()
}
buf.toSeq
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -806,7 +806,7 @@ private[spark] class TaskSetManager(
val info = taskInfos(tid)
// SPARK-37300: when the task was already finished state, just ignore it,
// so that there won't cause successful and tasksSuccessful wrong result.
if(info.finished) {
if (info.finished) {
if (dropTaskInfoAccumulablesOnTaskCompletion) {
// SPARK-46383: Clear out the accumulables for a completed task to reduce accumulable
// lifetime.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ private[storage] class BlockInfoManager(trackingCacheVisibility: Boolean = false
f: BlockInfo => Boolean): Option[BlockInfo] = {
var done = false
var result: Option[BlockInfo] = None
while(!done) {
while (!done) {
val wrapper = blockInfoWrappers.get(blockId)
if (wrapper == null) {
done = true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1516,7 +1516,7 @@ private[spark] class BlockManager(
return true
}

if(master.isRDDBlockVisible(blockId)) {
if (master.isRDDBlockVisible(blockId)) {
// Cache the visibility status if block exists.
blockInfoManager.tryMarkBlockAsVisible(blockId)
true
Expand Down Expand Up @@ -1882,7 +1882,7 @@ private[spark] class BlockManager(
blockId,
numPeersToReplicateTo)

while(numFailures <= maxReplicationFailureCount &&
while (numFailures <= maxReplicationFailureCount &&
peersForReplication.nonEmpty &&
peersReplicatedTo.size < numPeersToReplicateTo) {
val peer = peersForReplication.head
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class BitSet(numBits: Int) extends Serializable {
def setUntil(bitIndex: Int): Unit = {
val wordIndex = bitIndex >> 6 // divide by 64
Arrays.fill(words, 0, wordIndex, -1)
if(wordIndex < words.length) {
if (wordIndex < words.length) {
// Set the remaining bits (note that the mask could still be zero)
val mask = ~(-1L << (bitIndex & 0x3f))
words(wordIndex) |= mask
Expand All @@ -58,7 +58,7 @@ class BitSet(numBits: Int) extends Serializable {
def clearUntil(bitIndex: Int): Unit = {
val wordIndex = bitIndex >> 6 // divide by 64
Arrays.fill(words, 0, wordIndex, 0)
if(wordIndex < words.length) {
if (wordIndex < words.length) {
// Clear the remaining bits
val mask = -1L << (bitIndex & 0x3f)
words(wordIndex) &= mask
Expand All @@ -75,7 +75,7 @@ class BitSet(numBits: Int) extends Serializable {
assert(newBS.numWords >= numWords)
assert(newBS.numWords >= other.numWords)
var ind = 0
while( ind < smaller ) {
while (ind < smaller) {
newBS.words(ind) = words(ind) & other.words(ind)
ind += 1
}
Expand All @@ -92,15 +92,15 @@ class BitSet(numBits: Int) extends Serializable {
assert(newBS.numWords >= other.numWords)
val smaller = math.min(numWords, other.numWords)
var ind = 0
while( ind < smaller ) {
while (ind < smaller) {
newBS.words(ind) = words(ind) | other.words(ind)
ind += 1
}
while( ind < numWords ) {
while (ind < numWords) {
newBS.words(ind) = words(ind)
ind += 1
}
while( ind < other.numWords ) {
while (ind < other.numWords) {
newBS.words(ind) = other.words(ind)
ind += 1
}
Expand Down Expand Up @@ -242,7 +242,7 @@ class BitSet(numBits: Int) extends Serializable {
def union(other: BitSet): Unit = {
require(this.numWords <= other.numWords)
var ind = 0
while( ind < this.numWords ) {
while (ind < this.numWords) {
this.words(ind) = this.words(ind) | other.words(ind)
ind += 1
}
Expand Down
2 changes: 1 addition & 1 deletion core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -792,7 +792,7 @@ class RDDSuite extends SparkFunSuite with SharedSparkContext with Eventually {
test("randomSplit") {
val n = 600
val data = sc.parallelize(1 to n, 2)
for(seed <- 1 to 5) {
for (seed <- 1 to 5) {
val splits = data.randomSplit(Array(1.0, 2.0, 3.0), seed)
assert(splits.length == 3, "wrong number of splits")
assert(splits.flatMap(_.collect()).sorted.toList == data.collect().toList,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ class TopologyAwareBlockReplicationPolicyBehavior extends RandomBlockReplication
assert(prioritizedPeers.toSet.size == numReplicas)
val priorityPeers = prioritizedPeers.take(2)
assert(priorityPeers.forall(p => p.host != blockManager.host))
if(numReplicas > 1) {
if (numReplicas > 1) {
// both these conditions should be satisfied when numReplicas > 1
assert(priorityPeers.exists(p => p.topologyInfo == blockManager.topologyInfo))
assert(priorityPeers.exists(p => p.topologyInfo != blockManager.topologyInfo))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ object LocalKMeans {

println(s"Initial centers: $kPoints")

while(tempDist > convergeDist) {
while (tempDist > convergeDist) {
val closest = data.map (p => (closestPoint(p, kPoints), (p, 1)))

val mappings = closest.groupBy[Int] (x => x._1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ object SparkKMeans {
val kPoints = data.takeSample(withReplacement = false, K, 42)
var tempDist = 1.0

while(tempDist > convergeDist) {
while (tempDist > convergeDist) {
val closest = data.map (p => (closestPoint(p, kPoints), (p, 1)))

val pointStats = closest.reduceByKey(mergeResults)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ object Word2VecExample {

val synonyms = model.findSynonyms("1", 5)

for((synonym, cosineSimilarity) <- synonyms) {
for ((synonym, cosineSimilarity) <- synonyms) {
println(s"$synonym $cosineSimilarity")
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ class CustomReceiver(host: String, port: Int)
val reader = new BufferedReader(
new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
userInput = reader.readLine()
while(!isStopped() && userInput != null) {
while (!isStopped() && userInput != null) {
store(userInput)
userInput = reader.readLine()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ class MultilabelMetrics @Since("1.2.0") (predictionAndLabels: RDD[(Array[Double]
def f1Measure(label: Double): Double = {
val p = precision(label)
val r = recall(label)
if((p + r) == 0) 0.0 else 2 * p * r / (p + r)
if ((p + r) == 0) 0.0 else 2 * p * r / (p + r)
}

private lazy val sumTp = summary.tpPerClass.values.sum
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ private[k8s] class LoggingPodStatusWatcherImpl(conf: KubernetesDriverConf)

override def onClose(e: WatcherException): Unit = {
logDebug(s"Stopping watching application $appId with last-observed phase $phase")
if(e != null && e.isHttpGone) {
if (e != null && e.isHttpGone) {
resourceTooOldReceived = true
logDebug(s"Got HTTP Gone code, resource version changed in k8s api: $e")
} else {
Expand Down Expand Up @@ -108,7 +108,7 @@ private[k8s] class LoggingPodStatusWatcherImpl(conf: KubernetesDriverConf)
}
}

if(podCompleted) {
if (podCompleted) {
logInfo(
pod.map { p => log"Container final statuses:\n\n${MDC(STATUS, containersDescription(p))}" }
.getOrElse(log"No containers were found in the driver pod."))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ object ExternalCatalogUtils {
} else {
val sb = new java.lang.StringBuilder(length + 16)
if (firstIndex != 0) sb.append(path, 0, firstIndex)
while(firstIndex < length) {
while (firstIndex < length) {
val c = path.charAt(firstIndex)
if (needsEscaping(c)) {
sb.append('%').append(HEX_CHARS((c & 0xF0) >> 4)).append(HEX_CHARS(c & 0x0F))
Expand All @@ -110,7 +110,7 @@ object ExternalCatalogUtils {
} else {
val sb = new java.lang.StringBuilder(length)
var plaintextStartIdx = 0
while(plaintextEndIdx != -1 && plaintextEndIdx + 2 < length) {
while (plaintextEndIdx != -1 && plaintextEndIdx + 2 < length) {
if (plaintextEndIdx > plaintextStartIdx) sb.append(path, plaintextStartIdx, plaintextEndIdx)
val high = path.charAt(plaintextEndIdx + 1)
if ((high >>> 8) == 0 && unhexDigits(high) != -1) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -509,7 +509,7 @@ class InMemoryCatalog(
try {
val fs = tablePath.getFileSystem(hadoopConfig)
fs.mkdirs(newPartPath)
if(!fs.rename(oldPartPath, newPartPath)) {
if (!fs.rename(oldPartPath, newPartPath)) {
throw new IOException(s"Renaming partition path from $oldPartPath to " +
s"$newPartPath returned false")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1026,7 +1026,7 @@ case class Cast(
try doubleStr.toDouble catch {
case _: NumberFormatException =>
val d = Cast.processFloatingPointSpecialLiterals(doubleStr, false)
if(ansiEnabled && d == null) {
if (ansiEnabled && d == null) {
throw QueryExecutionErrors.invalidInputInCastToNumberError(
DoubleType, s, getContextOrNull())
} else {
Expand Down Expand Up @@ -1602,7 +1602,7 @@ case class Cast(
val util = IntervalUtils.getClass.getCanonicalName.stripSuffix("$")
(c, evPrim, evNull) =>
code"""$evPrim = $util.safeStringToInterval($c);
if(${evPrim} == null) {
if (${evPrim} == null) {
${evNull} = true;
}
""".stripMargin
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -337,11 +337,11 @@ case class CaseWhen(

// This generates code like:
// caseWhenResultState = caseWhen_1(i);
// if(caseWhenResultState != -1) {
// if (caseWhenResultState != -1) {
// continue;
// }
// caseWhenResultState = caseWhen_2(i);
// if(caseWhenResultState != -1) {
// if (caseWhenResultState != -1) {
// continue;
// }
// ...
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1124,7 +1124,7 @@ case class MapZipWith(left: Expression, right: Expression, function: Expression)

private def getKeysWithIndexesFast(keys1: ArrayData, keys2: ArrayData) = {
val hashMap = new mutable.LinkedHashMap[Any, Array[Option[Int]]]
for((z, array) <- Array((0, keys1), (1, keys2))) {
for ((z, array) <- Array((0, keys1), (1, keys2))) {
var i = 0
while (i < array.numElements()) {
val key = array.get(i, keyType)
Expand All @@ -1146,7 +1146,7 @@ case class MapZipWith(left: Expression, right: Expression, function: Expression)

private def getKeysWithIndexesBruteForce(keys1: ArrayData, keys2: ArrayData) = {
val arrayBuffer = new mutable.ArrayBuffer[(Any, Array[Option[Int]])]
for((z, array) <- Array((0, keys1), (1, keys2))) {
for ((z, array) <- Array((0, keys1), (1, keys2))) {
var i = 0
while (i < array.numElements()) {
val key = array.get(i, keyType)
Expand All @@ -1156,7 +1156,7 @@ case class MapZipWith(left: Expression, right: Expression, function: Expression)
val (bufferKey, indexes) = arrayBuffer(j)
if (ordering.equiv(bufferKey, key)) {
found = true
if(indexes(z).isEmpty) {
if (indexes(z).isEmpty) {
indexes(z) = Some(i)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1695,7 +1695,7 @@ case class ExternalMapToCatalyst private(
final Object[] $convertedValues = new Object[$length];
int $index = 0;
$defineEntries
while($entries.hasNext()) {
while ($entries.hasNext()) {
$defineKeyValue
$keyNullCheck
$valueNullCheck
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ abstract class StringRegexExpression extends BinaryExpression

protected override def nullSafeEval(input1: Any, input2: Any): Any = {
val regex = pattern(input2.asInstanceOf[UTF8String].toString)
if(regex == null) {
if (regex == null) {
null
} else {
matches(regex, input1.asInstanceOf[UTF8String].toString)
Expand Down Expand Up @@ -984,7 +984,7 @@ case class RegExpExtractAll(subject: Expression, regexp: Expression, idx: Expres
override def nullSafeEval(s: Any, p: Any, r: Any): Any = {
val m = getLastMatcher(s, p)
val matchResults = new ArrayBuffer[UTF8String]()
while(m.find) {
while (m.find) {
val mr: MatchResult = m.toMatchResult
val index = r.asInstanceOf[Int]
RegExpExtractBase.checkGroupIndex(prettyName, mr.groupCount, index)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ case class JoinEstimation(join: Join) extends Logging {
}
val keyStatsAfterJoin = new mutable.HashMap[Attribute, ColumnStat]()
var i = 0
while(i < keyPairs.length && joinCard != 0) {
while (i < keyPairs.length && joinCard != 0) {
val (leftKey, rightKey) = keyPairs(i)
// Check if the two sides are disjoint
val leftKeyStat = leftStats.attributeStats(leftKey)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ object RuleIdCollection {
"org.apache.spark.sql.catalyst.optimizer.UnwrapCastInBinaryComparison" :: Nil
}

if(Utils.isTesting) {
if (Utils.isTesting) {
rulesNeedingIds = rulesNeedingIds ++ {
// In the production code path, the following rules are run in CombinedTypeCoercionRule, and
// hence we only need to add them for unit testing.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ class PercentileSuite extends SparkFunSuite {
StringType, DateType, TimestampType,
CalendarIntervalType, NullType)

for(dataType <- invalidDataTypes;
for (dataType <- invalidDataTypes;
frequencyType <- validFrequencyTypes) {
val child = AttributeReference("a", dataType)()
val frq = AttributeReference("frq", frequencyType)()
Expand All @@ -207,7 +207,7 @@ class PercentileSuite extends SparkFunSuite {
)
}

for(dataType <- validDataTypes;
for (dataType <- validDataTypes;
frequencyType <- invalidFrequencyDataTypes) {
val child = AttributeReference("a", dataType)()
val frq = AttributeReference("frq", frequencyType)()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ class ParserUtilsSuite extends SparkFunSuite {
Some(context)
case _ =>
val it = ctx.children.iterator()
while(it.hasNext) {
while (it.hasNext) {
it.next() match {
case p: ParserRuleContext =>
val childResult = findCastContext(p)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -272,13 +272,13 @@ trait AggregateCodegenSupport

aggCodes.zip(aggregateExpressions.map(ae => (ae.mode, ae.filter))).map {
case (aggCode, (Partial | Complete, Some(condition))) =>
// Note: wrap in "do { } while(false);", so the generated checks can jump out
// Note: wrap in "do { } while (false);", so the generated checks can jump out
// with "continue;"
s"""
|do {
| ${generatePredicateCode(ctx, condition, inputAttrs, input)}
| $aggCode
|} while(false);
|} while (false);
""".stripMargin
case (aggCode, _) =>
aggCode
Expand Down
Loading