Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
* to perform this setup.
*
* For example, a typical workflow might be:
* client.sendRPC(new OpenFile("/foo")) --> returns StreamId = 100
* client.sendRPC(new OpenFile("/foo")) --> returns StreamId = 100
* client.fetchChunk(streamId = 100, chunkIndex = 0, callback)
* client.fetchChunk(streamId = 100, chunkIndex = 1, callback)
* ...
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.apache.spark.network.buffer.ManagedBuffer;

/**
* StreamManager which allows registration of an Iterator<ManagedBuffer>, which are individually
* StreamManager which allows registration of an Iterator&lt;ManagedBuffer&gt;, which are individually
* fetched as chunks by the client. Each registered buffer is one chunk.
*/
public class OneForOneStreamManager extends StreamManager {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
* Wraps a {@link InputStream}, limiting the number of bytes which can be read.
*
* This code is from Guava's 14.0 source code, because there is no compatible way to
* use this functionality in both a Guava 11 environment and a Guava >14 environment.
* use this functionality in both a Guava 11 environment and a Guava &gt;14 environment.
*/
public final class LimitedInputStream extends FilterInputStream {
private long left;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ public static ByteToMessageDecoder createFrameDecoder() {
return new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE, 0, 8, -8, 8);
}

/** Returns the remote address on the channel or "<remote address>" if none exists. */
/** Returns the remote address on the channel or "&lt;remote address&gt;" if none exists. */
public static String getRemoteAddress(Channel channel) {
if (channel != null && channel.remoteAddress() != null) {
return channel.remoteAddress().toString();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public int connectionTimeoutMs() {

/**
* Time (in milliseconds) that we will wait in order to perform a retry after an IOException.
* Only relevant if maxIORetries > 0.
* Only relevant if maxIORetries &gt; 0.
*/
public int ioRetryWaitTime() { return conf.getInt("spark.shuffle.io.retryWaitMs", 5000); }

Expand Down
24 changes: 24 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -1049,6 +1049,11 @@
</filesets>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>2.10.1</version>
</plugin>
</plugins>
</pluginManagement>

Expand Down Expand Up @@ -1242,6 +1247,25 @@

</profile>

<profile>
<id>doclint-java8-disable</id>
<activation>
<jdk>[1.8,)</jdk>
</activation>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@ueshin One last tiny thing. I think this plugin should be declared in <pluginManagement>, and should add <version>2.10.1</version>, but would not have <configuration> of course. Then this is left exactly as-is.

All -- I think this may help the problem in SPARK-4543? It is good to commit this fix anyway but may also simply silence Java 8's warnings on missing javadoc as desired.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@srowen Thank you for your suggestion and I agree with it. I'll add it to the <pluginManagement>.

<configuration>
<additionalparam>-Xdoclint:all -Xdoclint:-missing</additionalparam>
</configuration>
</plugin>
</plugins>
</build>
</profile>

<!-- A series of build profiles where customizations for particular Hadoop releases can be made -->

<!-- Hadoop-a.b.c dependencies can be found at
Expand Down
7 changes: 6 additions & 1 deletion project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,12 @@ object SparkBuild extends PomBuild {
},
publishMavenStyle in MavenCompile := true,
publishLocal in MavenCompile <<= publishTask(publishLocalConfiguration in MavenCompile, deliverLocal),
publishLocalBoth <<= Seq(publishLocal in MavenCompile, publishLocal).dependOn
publishLocalBoth <<= Seq(publishLocal in MavenCompile, publishLocal).dependOn,

javacOptions in (Compile, doc) ++= {
val Array(major, minor, _) = System.getProperty("java.version").split("\\.", 3)
if (major.toInt >= 1 && minor.toInt >= 8) Seq("-Xdoclint:all", "-Xdoclint:-missing") else Seq.empty
}
)

def enable(settings: Seq[Setting[_]])(projectRef: ProjectRef) = {
Expand Down