From 6824627b45dbb06b167586083fbbdcdaf2d52933 Mon Sep 17 00:00:00 2001 From: Viraj Jasani Date: Tue, 23 Nov 2021 17:15:04 +0530 Subject: [PATCH 1/4] HADOOP-18022. Add restrict-imports-enforcer-rule for Guava Preconditions in hadoop-main pom --- .../hadoop/fs/cosn/auth/COSCredentialsProviderList.java | 2 +- .../org/apache/hadoop/fs/obs/OBSBlockOutputStream.java | 2 +- .../main/java/org/apache/hadoop/fs/obs/OBSCommonUtils.java | 2 +- .../main/java/org/apache/hadoop/fs/obs/OBSDataBlocks.java | 2 +- .../main/java/org/apache/hadoop/fs/obs/OBSIOException.java | 2 +- .../main/java/org/apache/hadoop/fs/obs/OBSInputStream.java | 2 +- .../org/apache/hadoop/fs/obs/OBSWriteOperationHelper.java | 2 +- .../hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java | 2 +- .../hadoop/mapreduce/v2/app/webapp/AMWebServices.java | 2 +- .../hadoop/mapreduce/counters/FileSystemCounterGroup.java | 2 +- .../hadoop/mapreduce/counters/FrameworkCounterGroup.java | 2 +- .../hadoop/mapreduce/lib/output/FileOutputCommitter.java | 2 +- .../hadoop/mapreduce/lib/output/FileOutputFormat.java | 2 +- .../hadoop/mapreduce/lib/output/NamedCommitterFactory.java | 2 +- .../hadoop/mapreduce/lib/output/PathOutputCommitter.java | 2 +- .../mapred/nativetask/buffer/ByteBufferDataWriter.java | 2 +- .../hadoop/mapred/nativetask/testutil/BytesFactory.java | 2 +- pom.xml | 7 +++++++ 18 files changed, 24 insertions(+), 17 deletions(-) diff --git a/hadoop-cloud-storage-project/hadoop-cos/src/main/java/org/apache/hadoop/fs/cosn/auth/COSCredentialsProviderList.java b/hadoop-cloud-storage-project/hadoop-cos/src/main/java/org/apache/hadoop/fs/cosn/auth/COSCredentialsProviderList.java index d2d2f8c9a7cab..66ef4b1c6fd87 100644 --- a/hadoop-cloud-storage-project/hadoop-cos/src/main/java/org/apache/hadoop/fs/cosn/auth/COSCredentialsProviderList.java +++ b/hadoop-cloud-storage-project/hadoop-cos/src/main/java/org/apache/hadoop/fs/cosn/auth/COSCredentialsProviderList.java @@ -24,7 +24,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import com.qcloud.cos.auth.AnonymousCOSCredentials; import com.qcloud.cos.auth.COSCredentials; import com.qcloud.cos.auth.COSCredentialsProvider; diff --git a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSBlockOutputStream.java b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSBlockOutputStream.java index cefa897927790..22c6cb5c350c9 100644 --- a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSBlockOutputStream.java +++ b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSBlockOutputStream.java @@ -19,7 +19,7 @@ package org.apache.hadoop.fs.obs; import org.apache.hadoop.classification.VisibleForTesting; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListenableFuture; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListeningExecutorService; diff --git a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSCommonUtils.java b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSCommonUtils.java index d477cec186b0e..3a06961d3acd9 100644 --- a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSCommonUtils.java +++ b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSCommonUtils.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs.obs; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import com.obs.services.ObsClient; import com.obs.services.exception.ObsException; import com.obs.services.model.AbortMultipartUploadRequest; diff --git a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSDataBlocks.java b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSDataBlocks.java index b58eaa00aa697..e347970ee8446 100644 --- a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSDataBlocks.java +++ b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSDataBlocks.java @@ -19,7 +19,7 @@ package org.apache.hadoop.fs.obs; import org.apache.hadoop.classification.VisibleForTesting; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSExceptionMessages; diff --git a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSIOException.java b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSIOException.java index 29a92c71919a8..3f99fd610efa5 100644 --- a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSIOException.java +++ b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSIOException.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs.obs; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import com.obs.services.exception.ObsException; import java.io.IOException; diff --git a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSInputStream.java b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSInputStream.java index e94565a4d760a..3f7e9888889b5 100644 --- a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSInputStream.java +++ b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSInputStream.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs.obs; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import com.obs.services.ObsClient; import com.obs.services.exception.ObsException; import com.obs.services.model.GetObjectRequest; diff --git a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSWriteOperationHelper.java b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSWriteOperationHelper.java index 5cc3008f1dcfb..2b02f962a0598 100644 --- a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSWriteOperationHelper.java +++ b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSWriteOperationHelper.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs.obs; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import com.obs.services.ObsClient; import com.obs.services.exception.ObsException; import com.obs.services.model.AbortMultipartUploadRequest; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java index 1640ee2f4fe57..fe998bbc3f0cb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java @@ -146,7 +146,7 @@ import org.apache.hadoop.yarn.util.resource.ResourceUtils; import org.apache.hadoop.classification.VisibleForTesting; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java index 72f8047dc33d9..78174afb6f892 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java @@ -78,7 +78,7 @@ import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.NotFoundException; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import com.google.inject.Inject; @Path("/ws/v1/mapreduce") diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java index 97d19a668dec0..3834d4ea88989 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java @@ -28,7 +28,7 @@ import java.util.Map; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; -import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.*; +import static org.apache.hadoop.util.Preconditions.*; import org.apache.hadoop.thirdparty.com.google.common.collect.AbstractIterator; import org.apache.hadoop.thirdparty.com.google.common.collect.Iterators; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java index ca7f1f06514d8..7ef3ec03f383f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java @@ -18,7 +18,7 @@ package org.apache.hadoop.mapreduce.counters; -import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.checkNotNull; +import static org.apache.hadoop.util.Preconditions.checkNotNull; import java.io.DataInput; import java.io.DataOutput; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java index 877d73c5a59de..82b7fcb504622 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java @@ -37,7 +37,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.classification.VisibleForTesting; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.DurationInfo; import org.apache.hadoop.util.Progressable; import org.slf4j.Logger; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java index 15ff3c67c2fb5..2b1f7e37ebe75 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.text.NumberFormat; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/NamedCommitterFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/NamedCommitterFactory.java index ddcff646e04fd..3ec2795947c8d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/NamedCommitterFactory.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/NamedCommitterFactory.java @@ -22,7 +22,7 @@ import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/PathOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/PathOutputCommitter.java index e463632fa56ae..c9fbe3b065f44 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/PathOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/PathOutputCommitter.java @@ -20,7 +20,7 @@ import java.io.IOException; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataWriter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataWriter.java index da09f59591bee..6fe0143b0120a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataWriter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataWriter.java @@ -23,7 +23,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.mapred.nativetask.NativeDataTarget; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; /** * DataOutputStream implementation which buffers data in a fixed-size diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/BytesFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/BytesFactory.java index 9b46dfa6243bd..2235eb0685fd4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/BytesFactory.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/BytesFactory.java @@ -19,7 +19,7 @@ import java.util.Random; -import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints; import org.apache.hadoop.thirdparty.com.google.common.primitives.Longs; diff --git a/pom.xml b/pom.xml index 62f011f7082b4..f59513ef33dbb 100644 --- a/pom.xml +++ b/pom.xml @@ -254,6 +254,13 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableListMultimap + + true + Use hadoop-common provided Preconditions rather than Guava provided + + org.apache.hadoop.thirdparty.com.google.common.base.Preconditions + + From e41368878caa04fb9f95d70b641c6363b4e379ce Mon Sep 17 00:00:00 2001 From: Viraj Jasani Date: Wed, 24 Nov 2021 15:54:24 +0530 Subject: [PATCH 2/4] some fixes --- .../records/impl/pb/ApplicationIdPBImpl.java | 18 ++++++++++++------ .../reader/FlowActivityEntityReader.java | 8 ++++---- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationIdPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationIdPBImpl.java index 0ee187fc79d2a..2b5c1d14d10e8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationIdPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationIdPBImpl.java @@ -24,8 +24,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; -import org.apache.hadoop.util.Preconditions; - @Private @Unstable public class ApplicationIdPBImpl extends ApplicationId { @@ -46,24 +44,32 @@ public ApplicationIdProto getProto() { @Override public int getId() { - Preconditions.checkNotNull(proto); + if (proto == null) { + throw new NullPointerException("The argument object is NULL"); + } return proto.getId(); } @Override protected void setId(int id) { - Preconditions.checkNotNull(builder); + if (builder == null) { + throw new NullPointerException("The argument object is NULL"); + } builder.setId(id); } @Override public long getClusterTimestamp() { - Preconditions.checkNotNull(proto); + if (proto == null) { + throw new NullPointerException("The argument object is NULL"); + } return proto.getClusterTimestamp(); } @Override protected void setClusterTimestamp(long clusterTimestamp) { - Preconditions.checkNotNull(builder); + if (builder == null) { + throw new NullPointerException("The argument object is NULL"); + } builder.setClusterTimestamp((clusterTimestamp)); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java index baff86c2a08f4..8005d85051600 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java @@ -46,8 +46,6 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTableRW; import org.apache.hadoop.yarn.webapp.BadRequestException; -import org.apache.hadoop.util.Preconditions; - /** * Timeline entity reader for flow activity entities that are stored in the * flow activity table. @@ -82,8 +80,10 @@ protected BaseTableRW getTable() { @Override protected void validateParams() { - Preconditions.checkNotNull(getContext().getClusterId(), - "clusterId shouldn't be null"); + String clusterId = getContext().getClusterId(); + if (clusterId == null) { + throw new NullPointerException("clusterId shouldn't be null"); + } } @Override From c90a18e8cdd942b7d89e63ed45e50d2d516af1f5 Mon Sep 17 00:00:00 2001 From: Viraj Jasani Date: Thu, 25 Nov 2021 12:16:24 +0530 Subject: [PATCH 3/4] addendum --- .../reader/ApplicationEntityReader.java | 36 +++++++++++-------- .../storage/reader/EntityTypeReader.java | 15 ++++---- .../storage/reader/FlowRunEntityReader.java | 31 +++++++++------- .../storage/reader/GenericEntityReader.java | 31 +++++++++------- .../reader/SubApplicationEntityReader.java | 26 ++++++++------ 5 files changed, 81 insertions(+), 58 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java index f9a063a8c5738..c25125d524f66 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java @@ -58,8 +58,6 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils; import org.apache.hadoop.yarn.webapp.BadRequestException; -import org.apache.hadoop.util.Preconditions; - /** * Timeline entity reader for application entities that are stored in the * application table. @@ -336,21 +334,29 @@ protected Result getResult(Configuration hbaseConf, Connection conn, @Override protected void validateParams() { - Preconditions.checkNotNull(getContext(), "context shouldn't be null"); - Preconditions.checkNotNull( - getDataToRetrieve(), "data to retrieve shouldn't be null"); - Preconditions.checkNotNull(getContext().getClusterId(), - "clusterId shouldn't be null"); - Preconditions.checkNotNull(getContext().getEntityType(), - "entityType shouldn't be null"); + if (getContext() == null) { + throw new NullPointerException("context shouldn't be null"); + } + if (getDataToRetrieve() == null) { + throw new NullPointerException("data to retrieve shouldn't be null"); + } + if (getContext().getClusterId() == null) { + throw new NullPointerException("clusterId shouldn't be null"); + } + if (getContext().getEntityType() == null) { + throw new NullPointerException("entityType shouldn't be null"); + } if (isSingleEntityRead()) { - Preconditions.checkNotNull(getContext().getAppId(), - "appId shouldn't be null"); + if (getContext().getAppId() == null) { + throw new NullPointerException("appId shouldn't be null"); + } } else { - Preconditions.checkNotNull(getContext().getUserId(), - "userId shouldn't be null"); - Preconditions.checkNotNull(getContext().getFlowName(), - "flowName shouldn't be null"); + if (getContext().getUserId() == null) { + throw new NullPointerException("userId shouldn't be null"); + } + if (getContext().getFlowName() == null) { + throw new NullPointerException("flowName shouldn't be null"); + } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java index a71001df35953..b790010df0f34 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.yarn.server.timelineservice.storage.reader; -import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Result; @@ -115,11 +114,15 @@ public Set readEntityTypes(Configuration hbaseConf, @Override protected void validateParams() { - Preconditions.checkNotNull(getContext(), "context shouldn't be null"); - Preconditions.checkNotNull(getContext().getClusterId(), - "clusterId shouldn't be null"); - Preconditions.checkNotNull(getContext().getAppId(), - "appId shouldn't be null"); + if (getContext() == null) { + throw new NullPointerException("context shouldn't be null"); + } + if (getContext().getClusterId() == null) { + throw new NullPointerException("clusterId shouldn't be null"); + } + if (getContext().getAppId() == null) { + throw new NullPointerException("appId shouldn't be null"); + } } /** diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java index dfe3775f26eb6..50e96ef1ab670 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java @@ -57,8 +57,6 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTableRW; import org.apache.hadoop.yarn.webapp.BadRequestException; -import org.apache.hadoop.util.Preconditions; - /** * Timeline entity reader for flow run entities that are stored in the flow run * table. @@ -86,18 +84,25 @@ protected BaseTableRW getTable() { @Override protected void validateParams() { - Preconditions.checkNotNull(getContext(), "context shouldn't be null"); - Preconditions.checkNotNull(getDataToRetrieve(), - "data to retrieve shouldn't be null"); - Preconditions.checkNotNull(getContext().getClusterId(), - "clusterId shouldn't be null"); - Preconditions.checkNotNull(getContext().getUserId(), - "userId shouldn't be null"); - Preconditions.checkNotNull(getContext().getFlowName(), - "flowName shouldn't be null"); + if (getContext() == null) { + throw new NullPointerException("context shouldn't be null"); + } + if (getDataToRetrieve() == null) { + throw new NullPointerException("data to retrieve shouldn't be null"); + } + if (getContext().getClusterId() == null) { + throw new NullPointerException("clusterId shouldn't be null"); + } + if (getContext().getUserId() == null) { + throw new NullPointerException("userId shouldn't be null"); + } + if (getContext().getFlowName() == null) { + throw new NullPointerException("flowName shouldn't be null"); + } if (isSingleEntityRead()) { - Preconditions.checkNotNull(getContext().getFlowRunId(), - "flowRunId shouldn't be null"); + if (getContext().getFlowRunId() == null) { + throw new NullPointerException("flowRunId shouldn't be null"); + } } EnumSet fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve(); if (!isSingleEntityRead() && fieldsToRetrieve != null) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java index a09b4493ffe9e..511d310ad4e07 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java @@ -63,8 +63,6 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW; import org.apache.hadoop.yarn.webapp.BadRequestException; -import org.apache.hadoop.util.Preconditions; - /** * Timeline entity reader for generic entities that are stored in the entity * table. @@ -406,18 +404,25 @@ protected FilterList constructFilterListBasedOnFields(Set cfsInFields) @Override protected void validateParams() { - Preconditions.checkNotNull(getContext(), "context shouldn't be null"); - Preconditions.checkNotNull(getDataToRetrieve(), - "data to retrieve shouldn't be null"); - Preconditions.checkNotNull(getContext().getClusterId(), - "clusterId shouldn't be null"); - Preconditions.checkNotNull(getContext().getAppId(), - "appId shouldn't be null"); - Preconditions.checkNotNull(getContext().getEntityType(), - "entityType shouldn't be null"); + if (getContext() == null) { + throw new NullPointerException("context shouldn't be null"); + } + if (getDataToRetrieve() == null) { + throw new NullPointerException("data to retrieve shouldn't be null"); + } + if (getContext().getClusterId() == null) { + throw new NullPointerException("clusterId shouldn't be null"); + } + if (getContext().getAppId() == null) { + throw new NullPointerException("appId shouldn't be null"); + } + if (getContext().getEntityType() == null) { + throw new NullPointerException("entityType shouldn't be null"); + } if (isSingleEntityRead()) { - Preconditions.checkNotNull(getContext().getEntityId(), - "entityId shouldn't be null"); + if (getContext().getEntityId() == null) { + throw new NullPointerException("entityId shouldn't be null"); + } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java index e769d61cc0588..ee0650c6f56c0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java @@ -56,8 +56,6 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTableRW; import org.apache.hadoop.yarn.webapp.BadRequestException; -import org.apache.hadoop.util.Preconditions; - class SubApplicationEntityReader extends GenericEntityReader { private static final SubApplicationTableRW SUB_APPLICATION_TABLE = new SubApplicationTableRW(); @@ -308,15 +306,21 @@ protected FilterList constructFilterListBasedOnFields(Set cfsInFields) @Override protected void validateParams() { - Preconditions.checkNotNull(getContext(), "context shouldn't be null"); - Preconditions.checkNotNull(getDataToRetrieve(), - "data to retrieve shouldn't be null"); - Preconditions.checkNotNull(getContext().getClusterId(), - "clusterId shouldn't be null"); - Preconditions.checkNotNull(getContext().getDoAsUser(), - "DoAsUser shouldn't be null"); - Preconditions.checkNotNull(getContext().getEntityType(), - "entityType shouldn't be null"); + if (getContext() == null) { + throw new NullPointerException("context shouldn't be null"); + } + if (getDataToRetrieve() == null) { + throw new NullPointerException("data to retrieve shouldn't be null"); + } + if (getContext().getClusterId() == null) { + throw new NullPointerException("clusterId shouldn't be null"); + } + if (getContext().getDoAsUser() == null) { + throw new NullPointerException("DoAsUser shouldn't be null"); + } + if (getContext().getEntityType() == null) { + throw new NullPointerException("entityType shouldn't be null"); + } } @Override From 6cbd30e44584c0c7b8f42eff2f1a852b0b32f724 Mon Sep 17 00:00:00 2001 From: Viraj Jasani Date: Fri, 26 Nov 2021 18:48:50 +0530 Subject: [PATCH 4/4] fix pom to include static imports --- .../hadoop/fs/azurebfs/services/AbfsLocatedFileStatus.java | 5 ++--- pom.xml | 1 + 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsLocatedFileStatus.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsLocatedFileStatus.java index 29da2c504355a..325c929c16a7a 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsLocatedFileStatus.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsLocatedFileStatus.java @@ -22,8 +22,7 @@ import org.apache.hadoop.fs.EtagSource; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.LocatedFileStatus; - -import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.checkNotNull; +import org.apache.hadoop.util.Preconditions; /** * {@link LocatedFileStatus} extended to also carry an ETag. @@ -38,7 +37,7 @@ public class AbfsLocatedFileStatus extends LocatedFileStatus implements EtagSour private final String etag; public AbfsLocatedFileStatus(FileStatus status, BlockLocation[] locations) { - super(checkNotNull(status), locations); + super(Preconditions.checkNotNull(status), locations); if (status instanceof EtagSource) { this.etag = ((EtagSource) status).getEtag(); } else { diff --git a/pom.xml b/pom.xml index f59513ef33dbb..c706dcc5c9ba6 100644 --- a/pom.xml +++ b/pom.xml @@ -259,6 +259,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x Use hadoop-common provided Preconditions rather than Guava provided org.apache.hadoop.thirdparty.com.google.common.base.Preconditions + static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.**