Skip to content

Commit 2464789

Browse files
authored
Merge branch 'apache:trunk' into YARN-11484
2 parents 45a344d + d7d772d commit 2464789

File tree

205 files changed

+5981
-3704
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

205 files changed

+5981
-3704
lines changed

BUILDING.txt

Lines changed: 29 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -163,13 +163,13 @@ Maven build goals:
163163
YARN Application Timeline Service V2 build options:
164164

165165
YARN Timeline Service v.2 chooses Apache HBase as the primary backing storage. The supported
166-
versions of Apache HBase are 1.2.6 (default) and 2.0.0-beta1.
166+
versions of Apache HBase are 1.7.1 (default) and 2.2.4.
167167

168-
* HBase 1.2.6 is used by default to build Hadoop. The official releases are ready to use if you
169-
plan on running Timeline Service v2 with HBase 1.2.6.
168+
* HBase 1.7.1 is used by default to build Hadoop. The official releases are ready to use if you
169+
plan on running Timeline Service v2 with HBase 1.7.1.
170170

171-
* Use -Dhbase.profile=2.0 to build Hadoop with HBase 2.0.0-beta1. Provide this option if you plan
172-
on running Timeline Service v2 with HBase 2.0.
171+
* Use -Dhbase.profile=2.0 to build Hadoop with HBase 2.2.4. Provide this option if you plan
172+
on running Timeline Service v2 with HBase 2.x.
173173

174174

175175
Snappy build options:
@@ -311,6 +311,30 @@ Maven build goals:
311311
package. This option requires that -Dpmdk.lib is specified. With -Dbundle.pmdk provided,
312312
the build will fail if -Dpmdk.lib is not specified.
313313

314+
Controlling the redistribution of the protobuf-2.5 dependency
315+
316+
The protobuf 2.5.0 library is used at compile time to compile the class
317+
org.apache.hadoop.ipc.ProtobufHelper; this class known to have been used by
318+
external projects in the past. Protobuf 2.5 is not used elsewhere in
319+
the Hadoop codebase; alongside the move to Protobuf 3.x a private successor
320+
class, org.apache.hadoop.ipc.internal.ShadedProtobufHelper is now used.
321+
322+
The hadoop-common JAR still declares a dependency on protobuf-2.5, but this
323+
is likely to change in the future. The maven scope of the dependency can be
324+
set with the common.protobuf2.scope option.
325+
It can be set to "provided" in a build:
326+
-Dcommon.protobuf2.scope=provided
327+
If this is done then protobuf-2.5.0.jar will no longer be exported as a dependency,
328+
and will then be omitted from the share/hadoop/common/lib/ directory of
329+
any Hadoop distribution built. Any application declaring a dependency on hadoop-commmon
330+
will no longer get the dependency; if they need it then they must explicitly declare it:
331+
332+
<dependency>
333+
<groupId>com.google.protobuf</groupId>
334+
<artifactId>protobuf-java</artifactId>
335+
<version>2.5.0</version>
336+
</dependency>
337+
314338
----------------------------------------------------------------------------------
315339
Building components separately
316340

LICENSE-binary

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,7 @@ commons-cli:commons-cli:1.5.0
247247
commons-codec:commons-codec:1.11
248248
commons-collections:commons-collections:3.2.2
249249
commons-daemon:commons-daemon:1.0.13
250-
commons-io:commons-io:2.8.0
250+
commons-io:commons-io:2.14.0
251251
commons-net:commons-net:3.9.0
252252
de.ruedigermoeller:fst:2.50
253253
io.grpc:grpc-api:1.26.0
@@ -337,7 +337,7 @@ org.apache.kerby:kerby-xdr:2.0.3
337337
org.apache.kerby:token-provider:2.0.3
338338
org.apache.solr:solr-solrj:8.11.2
339339
org.apache.yetus:audience-annotations:0.5.0
340-
org.apache.zookeeper:zookeeper:3.6.3
340+
org.apache.zookeeper:zookeeper:3.7.2
341341
org.codehaus.jettison:jettison:1.5.4
342342
org.eclipse.jetty:jetty-annotations:9.4.51.v20230217
343343
org.eclipse.jetty:jetty-http:9.4.51.v20230217
@@ -360,7 +360,7 @@ org.objenesis:objenesis:2.6
360360
org.xerial.snappy:snappy-java:1.1.10.4
361361
org.yaml:snakeyaml:2.0
362362
org.wildfly.openssl:wildfly-openssl:1.1.3.Final
363-
software.amazon.awssdk:bundle:jar:2.20.128
363+
software.amazon.awssdk:bundle:jar:2.20.160
364364

365365

366366
--------------------------------------------------------------------------------

hadoop-client-modules/hadoop-client-minicluster/pom.xml

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -748,15 +748,10 @@
748748
</excludes>
749749
</filter>
750750
<filter>
751-
<artifact>com.fasterxml.jackson.*:*</artifact>
752-
<excludes>
753-
<exclude>META-INF/versions/11/module-info.class</exclude>
754-
</excludes>
755-
</filter>
756-
<filter>
757-
<artifact>com.google.code.gson:gson</artifact>
751+
<artifact>*:*</artifact>
758752
<excludes>
759753
<exclude>META-INF/versions/9/module-info.class</exclude>
754+
<exclude>META-INF/versions/11/module-info.class</exclude>
760755
</excludes>
761756
</filter>
762757

hadoop-client-modules/hadoop-client-runtime/pom.xml

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -239,15 +239,10 @@
239239
</excludes>
240240
</filter>
241241
<filter>
242-
<artifact>com.fasterxml.jackson.*:*</artifact>
243-
<excludes>
244-
<exclude>META-INF/versions/11/module-info.class</exclude>
245-
</excludes>
246-
</filter>
247-
<filter>
248-
<artifact>com.google.code.gson:gson</artifact>
242+
<artifact>*:*</artifact>
249243
<excludes>
250244
<exclude>META-INF/versions/9/module-info.class</exclude>
245+
<exclude>META-INF/versions/11/module-info.class</exclude>
251246
</excludes>
252247
</filter>
253248

hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -451,8 +451,7 @@
451451
</Match>
452452

453453
<Match>
454-
<Class name="org.apache.hadoop.ipc.ProtobufHelper" />
455-
<Method name="getFixedByteString" />
454+
<Class name="org.apache.hadoop.ipc.internal.ShadedProtobufHelper" />
456455
<Bug pattern="AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION" />
457456
</Match>
458457
</FindBugsFilter>

hadoop-common-project/hadoop-common/pom.xml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -263,10 +263,11 @@
263263
<artifactId>re2j</artifactId>
264264
<scope>compile</scope>
265265
</dependency>
266+
<!-- Needed for compilation, though no longer in production. -->
266267
<dependency>
267268
<groupId>com.google.protobuf</groupId>
268269
<artifactId>protobuf-java</artifactId>
269-
<scope>compile</scope>
270+
<scope>${common.protobuf2.scope}</scope>
270271
</dependency>
271272
<dependency>
272273
<groupId>com.google.code.gson</groupId>
@@ -504,11 +505,11 @@
504505
<!--These classes have direct Protobuf references for backward compatibility reasons-->
505506
<excludes>
506507
<exclude>**/ProtobufHelper.java</exclude>
507-
<exclude>**/RpcWritable.java</exclude>
508508
<exclude>**/ProtobufRpcEngineCallback.java</exclude>
509509
<exclude>**/ProtobufRpcEngine.java</exclude>
510510
<exclude>**/ProtobufRpcEngine2.java</exclude>
511511
<exclude>**/ProtobufRpcEngineProtos.java</exclude>
512+
<exclude>**/ProtobufWrapperLegacy.java</exclude>
512513
</excludes>
513514
</configuration>
514515
</execution>

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/StoreStatisticNames.java

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -407,10 +407,6 @@ public final class StoreStatisticNames {
407407
public static final String MULTIPART_UPLOAD_LIST
408408
= "multipart_upload_list";
409409

410-
/** Probe for store region: {@value}. */
411-
public static final String STORE_REGION_PROBE
412-
= "store_region_probe";
413-
414410
private StoreStatisticNames() {
415411
}
416412

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
* The fencing methods that ship with Hadoop may also be referred to
4747
* by shortened names:<br>
4848
* <ul>
49-
* <li><code>shell(/path/to/some/script.sh args...)</code></li>
49+
* <li><code>shell(/path/to/some/script.sh args...)</code> (see {@link ShellCommandFencer})
5050
* <li><code>sshfence(...)</code> (see {@link SshFenceByTcpPort})
5151
* <li><code>powershell(...)</code> (see {@link PowerShellFencer})
5252
* </ul>

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java

Lines changed: 15 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -37,14 +37,13 @@
3737
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto;
3838
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto;
3939
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToObserverRequestProto;
40-
import org.apache.hadoop.ipc.ProtobufHelper;
4140
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
4241
import org.apache.hadoop.ipc.ProtocolTranslator;
4342
import org.apache.hadoop.ipc.RPC;
4443
import org.apache.hadoop.security.UserGroupInformation;
45-
4644
import org.apache.hadoop.thirdparty.protobuf.RpcController;
47-
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
45+
46+
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
4847

4948
/**
5049
* This class is the client side translator to translate the requests made on
@@ -84,60 +83,39 @@ public HAServiceProtocolClientSideTranslatorPB(
8483

8584
@Override
8685
public void monitorHealth() throws IOException {
87-
try {
88-
rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ);
89-
} catch (ServiceException e) {
90-
throw ProtobufHelper.getRemoteException(e);
91-
}
86+
ipc(() -> rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ));
9287
}
9388

9489
@Override
9590
public void transitionToActive(StateChangeRequestInfo reqInfo) throws IOException {
96-
try {
97-
TransitionToActiveRequestProto req =
98-
TransitionToActiveRequestProto.newBuilder()
91+
TransitionToActiveRequestProto req =
92+
TransitionToActiveRequestProto.newBuilder()
9993
.setReqInfo(convert(reqInfo)).build();
100-
101-
rpcProxy.transitionToActive(NULL_CONTROLLER, req);
102-
} catch (ServiceException e) {
103-
throw ProtobufHelper.getRemoteException(e);
104-
}
94+
ipc(() -> rpcProxy.transitionToActive(NULL_CONTROLLER, req));
10595
}
10696

10797
@Override
10898
public void transitionToStandby(StateChangeRequestInfo reqInfo) throws IOException {
109-
try {
110-
TransitionToStandbyRequestProto req =
99+
TransitionToStandbyRequestProto req =
111100
TransitionToStandbyRequestProto.newBuilder()
112-
.setReqInfo(convert(reqInfo)).build();
113-
rpcProxy.transitionToStandby(NULL_CONTROLLER, req);
114-
} catch (ServiceException e) {
115-
throw ProtobufHelper.getRemoteException(e);
116-
}
101+
.setReqInfo(convert(reqInfo)).build();
102+
ipc(() -> rpcProxy.transitionToStandby(NULL_CONTROLLER, req));
117103
}
118104

119105
@Override
120106
public void transitionToObserver(StateChangeRequestInfo reqInfo)
121107
throws IOException {
122-
try {
123-
TransitionToObserverRequestProto req =
124-
TransitionToObserverRequestProto.newBuilder()
125-
.setReqInfo(convert(reqInfo)).build();
126-
rpcProxy.transitionToObserver(NULL_CONTROLLER, req);
127-
} catch (ServiceException e) {
128-
throw ProtobufHelper.getRemoteException(e);
129-
}
108+
TransitionToObserverRequestProto req =
109+
TransitionToObserverRequestProto.newBuilder()
110+
.setReqInfo(convert(reqInfo)).build();
111+
ipc(() -> rpcProxy.transitionToObserver(NULL_CONTROLLER, req));
130112
}
131113

132114
@Override
133115
public HAServiceStatus getServiceStatus() throws IOException {
134116
GetServiceStatusResponseProto status;
135-
try {
136-
status = rpcProxy.getServiceStatus(NULL_CONTROLLER,
137-
GET_SERVICE_STATUS_REQ);
138-
} catch (ServiceException e) {
139-
throw ProtobufHelper.getRemoteException(e);
140-
}
117+
status = ipc(() -> rpcProxy.getServiceStatus(NULL_CONTROLLER,
118+
GET_SERVICE_STATUS_REQ));
141119

142120
HAServiceStatus ret = new HAServiceStatus(
143121
convert(status.getState()));

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java

Lines changed: 8 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -27,15 +27,14 @@
2727
import org.apache.hadoop.ha.ZKFCProtocol;
2828
import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto;
2929
import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto;
30-
import org.apache.hadoop.ipc.ProtobufHelper;
3130
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
3231
import org.apache.hadoop.ipc.ProtocolTranslator;
3332
import org.apache.hadoop.ipc.RPC;
3433
import org.apache.hadoop.security.AccessControlException;
3534
import org.apache.hadoop.security.UserGroupInformation;
36-
3735
import org.apache.hadoop.thirdparty.protobuf.RpcController;
38-
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
36+
37+
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
3938

4039

4140
public class ZKFCProtocolClientSideTranslatorPB implements
@@ -57,24 +56,16 @@ public ZKFCProtocolClientSideTranslatorPB(
5756
@Override
5857
public void cedeActive(int millisToCede) throws IOException,
5958
AccessControlException {
60-
try {
61-
CedeActiveRequestProto req = CedeActiveRequestProto.newBuilder()
62-
.setMillisToCede(millisToCede)
63-
.build();
64-
rpcProxy.cedeActive(NULL_CONTROLLER, req);
65-
} catch (ServiceException e) {
66-
throw ProtobufHelper.getRemoteException(e);
67-
}
59+
CedeActiveRequestProto req = CedeActiveRequestProto.newBuilder()
60+
.setMillisToCede(millisToCede)
61+
.build();
62+
ipc(() -> rpcProxy.cedeActive(NULL_CONTROLLER, req));
6863
}
6964

7065
@Override
7166
public void gracefulFailover() throws IOException, AccessControlException {
72-
try {
73-
rpcProxy.gracefulFailover(NULL_CONTROLLER,
74-
GracefulFailoverRequestProto.getDefaultInstance());
75-
} catch (ServiceException e) {
76-
throw ProtobufHelper.getRemoteException(e);
77-
}
67+
ipc(() -> rpcProxy.gracefulFailover(NULL_CONTROLLER,
68+
GracefulFailoverRequestProto.getDefaultInstance()));
7869
}
7970

8071

0 commit comments

Comments
 (0)