diff --git a/core/src/main/java/org/apache/spark/api/java/function/package-info.java b/core/src/main/java/org/apache/spark/api/java/function/package-info.java
index 463a42f23342..eefb29aca9d4 100644
--- a/core/src/main/java/org/apache/spark/api/java/function/package-info.java
+++ b/core/src/main/java/org/apache/spark/api/java/function/package-info.java
@@ -20,4 +20,4 @@
* these interfaces to pass functions to various Java API methods for Spark. Please visit Spark's
* Java programming guide for more details.
*/
-package org.apache.spark.api.java.function;
\ No newline at end of file
+package org.apache.spark.api.java.function;
diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
index c4041a97e86a..2be5a16b2d1e 100644
--- a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
+++ b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
@@ -256,8 +256,8 @@ public long spill(long size, MemoryConsumer trigger) throws IOException {
final long spillSize = freeMemory();
inMemSorter.reset();
// Reset the in-memory sorter's pointer array only after freeing up the memory pages holding the
- // records. Otherwise, if the task is over allocated memory, then without freeing the memory pages,
- // we might not be able to get memory for the pointer array.
+ // records. Otherwise, if the task is over allocated memory, then without freeing the memory
+ // pages, we might not be able to get memory for the pointer array.
taskContext.taskMetrics().incMemoryBytesSpilled(spillSize);
return spillSize;
}
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
index 21f2fde79d73..c44630fbbc2f 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
@@ -22,7 +22,6 @@
import org.apache.spark.annotation.Private;
import org.apache.spark.unsafe.types.ByteArray;
import org.apache.spark.unsafe.types.UTF8String;
-import org.apache.spark.util.Utils;
@Private
public class PrefixComparators {
@@ -69,7 +68,7 @@ public static long computePrefix(double value) {
* Provides radix sort parameters. Comparators implementing this also are indicating that the
* ordering they define is compatible with radix sort.
*/
- public static abstract class RadixSortSupport extends PrefixComparator {
+ public abstract static class RadixSortSupport extends PrefixComparator {
/** @return Whether the sort should be descending in binary sort order. */
public abstract boolean sortDescending();
@@ -82,37 +81,37 @@ public static abstract class RadixSortSupport extends PrefixComparator {
//
public static final class UnsignedPrefixComparator extends RadixSortSupport {
- @Override public final boolean sortDescending() { return false; }
- @Override public final boolean sortSigned() { return false; }
+ @Override public boolean sortDescending() { return false; }
+ @Override public boolean sortSigned() { return false; }
@Override
- public final int compare(long aPrefix, long bPrefix) {
+ public int compare(long aPrefix, long bPrefix) {
return UnsignedLongs.compare(aPrefix, bPrefix);
}
}
public static final class UnsignedPrefixComparatorDesc extends RadixSortSupport {
- @Override public final boolean sortDescending() { return true; }
- @Override public final boolean sortSigned() { return false; }
+ @Override public boolean sortDescending() { return true; }
+ @Override public boolean sortSigned() { return false; }
@Override
- public final int compare(long bPrefix, long aPrefix) {
+ public int compare(long bPrefix, long aPrefix) {
return UnsignedLongs.compare(aPrefix, bPrefix);
}
}
public static final class SignedPrefixComparator extends RadixSortSupport {
- @Override public final boolean sortDescending() { return false; }
- @Override public final boolean sortSigned() { return true; }
+ @Override public boolean sortDescending() { return false; }
+ @Override public boolean sortSigned() { return true; }
@Override
- public final int compare(long a, long b) {
+ public int compare(long a, long b) {
return (a < b) ? -1 : (a > b) ? 1 : 0;
}
}
public static final class SignedPrefixComparatorDesc extends RadixSortSupport {
- @Override public final boolean sortDescending() { return true; }
- @Override public final boolean sortSigned() { return true; }
+ @Override public boolean sortDescending() { return true; }
+ @Override public boolean sortSigned() { return true; }
@Override
- public final int compare(long b, long a) {
+ public int compare(long b, long a) {
return (a < b) ? -1 : (a > b) ? 1 : 0;
}
}
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RadixSort.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RadixSort.java
index 3357b8e47497..4f3f0de7b8d7 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RadixSort.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RadixSort.java
@@ -16,7 +16,7 @@
*/
package org.apache.spark.util.collection.unsafe.sort;
-
+
import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.array.LongArray;
@@ -227,7 +227,7 @@ private static long[][] getKeyPrefixArrayCounts(
}
return counts;
}
-
+
/**
* Specialization of sortAtByte() for key-prefix arrays.
*/
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
index 3c1cd39dc226..8b6c96a4c4e6 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
@@ -212,8 +212,8 @@ public long spill(long size, MemoryConsumer trigger) throws IOException {
// written to disk. This also counts the space needed to store the sorter's pointer array.
inMemSorter.reset();
// Reset the in-memory sorter's pointer array only after freeing up the memory pages holding the
- // records. Otherwise, if the task is over allocated memory, then without freeing the memory pages,
- // we might not be able to get memory for the pointer array.
+ // records. Otherwise, if the task is over allocated memory, then without freeing the memory
+ // pages, we might not be able to get memory for the pointer array.
taskContext.taskMetrics().incMemoryBytesSpilled(spillSize);
totalSpillBytes += spillSize;
diff --git a/core/src/main/scala/org/apache/spark/annotation/package-info.java b/core/src/main/scala/org/apache/spark/annotation/package-info.java
index 12c7afe6f108..9efdccf6b040 100644
--- a/core/src/main/scala/org/apache/spark/annotation/package-info.java
+++ b/core/src/main/scala/org/apache/spark/annotation/package-info.java
@@ -20,4 +20,4 @@
* This package consist of these annotations, which are used project wide and are reflected in
* Scala and Java docs.
*/
-package org.apache.spark.annotation;
\ No newline at end of file
+package org.apache.spark.annotation;
diff --git a/core/src/main/scala/org/apache/spark/api/java/package-info.java b/core/src/main/scala/org/apache/spark/api/java/package-info.java
index 10a480fc78e4..699181cafae8 100644
--- a/core/src/main/scala/org/apache/spark/api/java/package-info.java
+++ b/core/src/main/scala/org/apache/spark/api/java/package-info.java
@@ -18,4 +18,4 @@
/**
* Spark Java programming APIs.
*/
-package org.apache.spark.api.java;
\ No newline at end of file
+package org.apache.spark.api.java;
diff --git a/core/src/main/scala/org/apache/spark/broadcast/package-info.java b/core/src/main/scala/org/apache/spark/broadcast/package-info.java
index 1510e6e84c7a..bbf4a684a19e 100644
--- a/core/src/main/scala/org/apache/spark/broadcast/package-info.java
+++ b/core/src/main/scala/org/apache/spark/broadcast/package-info.java
@@ -18,4 +18,4 @@
/**
* Spark's broadcast variables, used to broadcast immutable datasets to all nodes.
*/
-package org.apache.spark.broadcast;
\ No newline at end of file
+package org.apache.spark.broadcast;
diff --git a/core/src/main/scala/org/apache/spark/executor/package-info.java b/core/src/main/scala/org/apache/spark/executor/package-info.java
index dd3b6815fb45..fb280964c490 100644
--- a/core/src/main/scala/org/apache/spark/executor/package-info.java
+++ b/core/src/main/scala/org/apache/spark/executor/package-info.java
@@ -18,4 +18,4 @@
/**
* Package for executor components used with various cluster managers.
*/
-package org.apache.spark.executor;
\ No newline at end of file
+package org.apache.spark.executor;
diff --git a/core/src/main/scala/org/apache/spark/io/package-info.java b/core/src/main/scala/org/apache/spark/io/package-info.java
index bea1bfdb6375..1a466602806e 100644
--- a/core/src/main/scala/org/apache/spark/io/package-info.java
+++ b/core/src/main/scala/org/apache/spark/io/package-info.java
@@ -18,4 +18,4 @@
/**
* IO codecs used for compression.
*/
-package org.apache.spark.io;
\ No newline at end of file
+package org.apache.spark.io;
diff --git a/core/src/main/scala/org/apache/spark/rdd/package-info.java b/core/src/main/scala/org/apache/spark/rdd/package-info.java
index 176cc58179fb..d9aa9bebe56d 100644
--- a/core/src/main/scala/org/apache/spark/rdd/package-info.java
+++ b/core/src/main/scala/org/apache/spark/rdd/package-info.java
@@ -18,4 +18,4 @@
/**
* Provides implementation's of various RDDs.
*/
-package org.apache.spark.rdd;
\ No newline at end of file
+package org.apache.spark.rdd;
diff --git a/core/src/main/scala/org/apache/spark/scheduler/package-info.java b/core/src/main/scala/org/apache/spark/scheduler/package-info.java
index 5b4a628d3cee..90fc65251eae 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/package-info.java
+++ b/core/src/main/scala/org/apache/spark/scheduler/package-info.java
@@ -18,4 +18,4 @@
/**
* Spark's DAG scheduler.
*/
-package org.apache.spark.scheduler;
\ No newline at end of file
+package org.apache.spark.scheduler;
diff --git a/core/src/main/scala/org/apache/spark/util/package-info.java b/core/src/main/scala/org/apache/spark/util/package-info.java
index 819f54ee41a7..4c5d33d88d2b 100644
--- a/core/src/main/scala/org/apache/spark/util/package-info.java
+++ b/core/src/main/scala/org/apache/spark/util/package-info.java
@@ -18,4 +18,4 @@
/**
* Spark utilities.
*/
-package org.apache.spark.util;
\ No newline at end of file
+package org.apache.spark.util;
diff --git a/core/src/main/scala/org/apache/spark/util/random/package-info.java b/core/src/main/scala/org/apache/spark/util/random/package-info.java
index 62c3762dd11b..e4f0c0febbbb 100644
--- a/core/src/main/scala/org/apache/spark/util/random/package-info.java
+++ b/core/src/main/scala/org/apache/spark/util/random/package-info.java
@@ -18,4 +18,4 @@
/**
* Utilities for random number generation.
*/
-package org.apache.spark.util.random;
\ No newline at end of file
+package org.apache.spark.util.random;
diff --git a/dev/checkstyle.xml b/dev/checkstyle.xml
index b66dca9041f2..3de6aa91dcd5 100644
--- a/dev/checkstyle.xml
+++ b/dev/checkstyle.xml
@@ -64,6 +64,8 @@
+
+
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/package-info.java b/external/flume/src/main/scala/org/apache/spark/streaming/flume/package-info.java
index d31aa5f5c096..4a5da226aded 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/package-info.java
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/package-info.java
@@ -18,4 +18,4 @@
/**
* Spark streaming receiver for Flume.
*/
-package org.apache.spark.streaming.flume;
\ No newline at end of file
+package org.apache.spark.streaming.flume;
diff --git a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/package-info.java b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/package-info.java
index 947bae115a62..2e5ab0fb3bef 100644
--- a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/package-info.java
+++ b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/package-info.java
@@ -18,4 +18,4 @@
/**
* Kafka receiver for spark streaming.
*/
-package org.apache.spark.streaming.kafka;
\ No newline at end of file
+package org.apache.spark.streaming.kafka;
diff --git a/external/kinesis-asl/src/main/resources/log4j.properties b/external/kinesis-asl/src/main/resources/log4j.properties
index 8118d12c5d47..4f5ea7bafe48 100644
--- a/external/kinesis-asl/src/main/resources/log4j.properties
+++ b/external/kinesis-asl/src/main/resources/log4j.properties
@@ -34,4 +34,4 @@ log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}:
log4j.logger.org.spark_project.jetty=WARN
log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
-log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
\ No newline at end of file
+log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/package-info.java b/graphx/src/main/scala/org/apache/spark/graphx/package-info.java
index f659cc518ebd..7c63447070fc 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/package-info.java
+++ b/graphx/src/main/scala/org/apache/spark/graphx/package-info.java
@@ -19,4 +19,4 @@
* ALPHA COMPONENT
* GraphX is a graph processing framework built on top of Spark.
*/
-package org.apache.spark.graphx;
\ No newline at end of file
+package org.apache.spark.graphx;
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/util/package-info.java b/graphx/src/main/scala/org/apache/spark/graphx/util/package-info.java
index 90cd1d46db17..86b427e31d26 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/util/package-info.java
+++ b/graphx/src/main/scala/org/apache/spark/graphx/util/package-info.java
@@ -18,4 +18,4 @@
/**
* Collections of utilities used by graphx.
*/
-package org.apache.spark.graphx.util;
\ No newline at end of file
+package org.apache.spark.graphx.util;
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/package-info.java b/mllib/src/main/scala/org/apache/spark/mllib/package-info.java
index 4991bc9e972c..5962efa96baf 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/package-info.java
+++ b/mllib/src/main/scala/org/apache/spark/mllib/package-info.java
@@ -18,4 +18,4 @@
/**
* Spark's machine learning library.
*/
-package org.apache.spark.mllib;
\ No newline at end of file
+package org.apache.spark.mllib;
diff --git a/mllib/src/test/java/org/apache/spark/ml/classification/JavaRandomForestClassifierSuite.java b/mllib/src/test/java/org/apache/spark/ml/classification/JavaRandomForestClassifierSuite.java
index 5aec52ac72b1..4f40fd65b9f1 100644
--- a/mllib/src/test/java/org/apache/spark/ml/classification/JavaRandomForestClassifierSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/classification/JavaRandomForestClassifierSuite.java
@@ -81,15 +81,15 @@ public void runDT() {
for (String featureSubsetStrategy: RandomForestClassifier.supportedFeatureSubsetStrategies()) {
rf.setFeatureSubsetStrategy(featureSubsetStrategy);
}
- String realStrategies[] = {".1", ".10", "0.10", "0.1", "0.9", "1.0"};
+ String[] realStrategies = {".1", ".10", "0.10", "0.1", "0.9", "1.0"};
for (String strategy: realStrategies) {
rf.setFeatureSubsetStrategy(strategy);
}
- String integerStrategies[] = {"1", "10", "100", "1000", "10000"};
+ String[] integerStrategies = {"1", "10", "100", "1000", "10000"};
for (String strategy: integerStrategies) {
rf.setFeatureSubsetStrategy(strategy);
}
- String invalidStrategies[] = {"-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0"};
+ String[] invalidStrategies = {"-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0"};
for (String strategy: invalidStrategies) {
try {
rf.setFeatureSubsetStrategy(strategy);
diff --git a/mllib/src/test/java/org/apache/spark/ml/regression/JavaRandomForestRegressorSuite.java b/mllib/src/test/java/org/apache/spark/ml/regression/JavaRandomForestRegressorSuite.java
index a8736669f72e..38b895f1fdd7 100644
--- a/mllib/src/test/java/org/apache/spark/ml/regression/JavaRandomForestRegressorSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/regression/JavaRandomForestRegressorSuite.java
@@ -81,15 +81,15 @@ public void runDT() {
for (String featureSubsetStrategy: RandomForestRegressor.supportedFeatureSubsetStrategies()) {
rf.setFeatureSubsetStrategy(featureSubsetStrategy);
}
- String realStrategies[] = {".1", ".10", "0.10", "0.1", "0.9", "1.0"};
+ String[] realStrategies = {".1", ".10", "0.10", "0.1", "0.9", "1.0"};
for (String strategy: realStrategies) {
rf.setFeatureSubsetStrategy(strategy);
}
- String integerStrategies[] = {"1", "10", "100", "1000", "10000"};
+ String[] integerStrategies = {"1", "10", "100", "1000", "10000"};
for (String strategy: integerStrategies) {
rf.setFeatureSubsetStrategy(strategy);
}
- String invalidStrategies[] = {"-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0"};
+ String[] invalidStrategies = {"-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0"};
for (String strategy: invalidStrategies) {
try {
rf.setFeatureSubsetStrategy(strategy);
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java
index b224a868454a..cbe8f78164ae 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java
@@ -38,7 +38,6 @@
import static org.apache.parquet.hadoop.ParquetInputFormat.getFilter;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/package-info.java b/streaming/src/main/scala/org/apache/spark/streaming/api/java/package-info.java
index d43d949d76bb..348d21d49ac4 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/package-info.java
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/package-info.java
@@ -18,4 +18,4 @@
/**
* Java APIs for spark streaming.
*/
-package org.apache.spark.streaming.api.java;
\ No newline at end of file
+package org.apache.spark.streaming.api.java;
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/package-info.java b/streaming/src/main/scala/org/apache/spark/streaming/dstream/package-info.java
index 05ca2ddffd3c..4d08afcbfea3 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/package-info.java
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/package-info.java
@@ -18,4 +18,4 @@
/**
* Various implementations of DStreams.
*/
-package org.apache.spark.streaming.dstream;
\ No newline at end of file
+package org.apache.spark.streaming.dstream;