diff --git a/common/src/java/org/apache/hadoop/hive/common/JvmMetricsInfo.java b/common/src/java/org/apache/hadoop/hive/common/JvmMetricsInfo.java index 3ab73c5bdec8..5d793559ca50 100644 --- a/common/src/java/org/apache/hadoop/hive/common/JvmMetricsInfo.java +++ b/common/src/java/org/apache/hadoop/hive/common/JvmMetricsInfo.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.common; -import com.google.common.base.Objects; +import com.google.common.base.MoreObjects; import org.apache.hadoop.metrics2.MetricsInfo; @@ -58,7 +58,7 @@ public enum JvmMetricsInfo implements MetricsInfo { @Override public String description() { return desc; } @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("name", name()).add("description", desc) .toString(); } diff --git a/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java b/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java index cf080e3fbe7a..fb7d7de7bb2f 100644 --- a/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java +++ b/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java @@ -173,7 +173,7 @@ public String toString() { private class Monitor implements Runnable { @Override public void run() { - Stopwatch sw = new Stopwatch(); + Stopwatch sw = Stopwatch.createUnstarted(); Map gcTimesBeforeSleep = getGcTimes(); while (shouldRun) { sw.reset().start(); diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java index d4f6865464df..24b3612fa7f6 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java @@ -294,6 +294,11 @@ public String call() throws Exception { ); } }, new Predicate() { + @Override + public boolean test(@Nullable Throwable input) { + return input instanceof IOException; + } + @Override public boolean apply(@Nullable Throwable input) { return input instanceof IOException; @@ -341,6 +346,18 @@ public URL apply(DataSegment dataSegment) { int numRetries = 0; while (numRetries++ < maxTries && !setOfUrls.isEmpty()) { setOfUrls = ImmutableSet.copyOf(Sets.filter(setOfUrls, new Predicate() { + @Override + public boolean test(URL input) { + try { + String result = DruidStorageHandlerUtils.getURL(httpClient, input); + LOG.debug(String.format("Checking segment [%s] response is [%s]", input, result)); + return Strings.isNullOrEmpty(result); + } catch (IOException e) { + LOG.error(String.format("Error while checking URL [%s]", input), e); + return true; + } + } + @Override public boolean apply(URL input) { try { diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidQueryRecordReader.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidQueryRecordReader.java index 8d099c7d06da..dfcf88944b79 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidQueryRecordReader.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidQueryRecordReader.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hive.druid.serde; -import com.google.common.collect.Iterators; import com.metamx.common.lifecycle.Lifecycle; import com.metamx.http.client.HttpClient; import com.metamx.http.client.HttpClientConfig; @@ -37,6 +36,7 @@ import java.io.IOException; import java.io.InputStream; +import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -63,7 +63,8 @@ public abstract class DruidQueryRecordReader, R extends C /** * Query results. */ - protected Iterator results = Iterators.emptyIterator(); + + protected Iterator results = Collections.emptyIterator(); @Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException { diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSelectQueryRecordReader.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSelectQueryRecordReader.java index 8a41e916f2a0..b0888b0a159b 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSelectQueryRecordReader.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSelectQueryRecordReader.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.io.InputStream; +import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -27,7 +28,6 @@ import org.apache.hadoop.io.NullWritable; import com.fasterxml.jackson.core.type.TypeReference; -import com.google.common.collect.Iterators; import io.druid.query.Result; import io.druid.query.select.EventHolder; @@ -42,7 +42,7 @@ public class DruidSelectQueryRecordReader private Result current; - private Iterator values = Iterators.emptyIterator(); + private Iterator values = Collections.emptyIterator(); @Override protected SelectQuery createQuery(String content) throws IOException { diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTopNQueryRecordReader.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTopNQueryRecordReader.java index d431925684d4..36aee8df8297 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTopNQueryRecordReader.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTopNQueryRecordReader.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.io.InputStream; +import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -27,7 +28,6 @@ import org.apache.hadoop.io.NullWritable; import com.fasterxml.jackson.core.type.TypeReference; -import com.google.common.collect.Iterators; import io.druid.query.Result; import io.druid.query.topn.DimensionAndMetricValueExtractor; @@ -42,7 +42,7 @@ public class DruidTopNQueryRecordReader private Result current; - private Iterator values = Iterators.emptyIterator(); + private Iterator values = Collections.emptyIterator(); @Override protected TopNQuery createQuery(String content) throws IOException { diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/io/TestHadoopFileStatus.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/io/TestHadoopFileStatus.java index b9fc09bea3e3..d7996f802a77 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/io/TestHadoopFileStatus.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/io/TestHadoopFileStatus.java @@ -79,6 +79,14 @@ public void testHadoopFileStatusAclEntries() throws IOException { Assert.assertNotNull(sourceStatus.getAclEntries()); Assert.assertTrue(sourceStatus.getAclEntries().size() == 3); Iterables.removeIf(sourceStatus.getAclEntries(), new Predicate() { + @Override + public boolean test(AclEntry input) { + if (input.getName() == null) { + return true; + } + return false; + } + @Override public boolean apply(AclEntry input) { if (input.getName() == null) { diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscovery.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscovery.java index b153679dc8cd..6266f3098903 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscovery.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscovery.java @@ -143,6 +143,12 @@ private class ConnParamInfoPred implements Predicate { this.pathPrefix = pathPrefix; } + @Override + public boolean test(ConnParamInfo inputParam) { + return inputParam.host.equals(host) && inputParam.port == port && + inputParam.path.startsWith(pathPrefix); + } + @Override public boolean apply(ConnParamInfo inputParam) { return inputParam.host.equals(host) && inputParam.port == port && diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java index a735346c0626..1d381043aa38 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java @@ -153,7 +153,7 @@ public Void invokeInternal() throws Exception { @Override public void runTest(String tname, String fname, String fpath) throws Exception { - Stopwatch sw = new Stopwatch().start(); + Stopwatch sw = Stopwatch.createStarted(); boolean skipped = false; boolean failed = false; try { diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java index 383fa8c070f7..901171244522 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java @@ -79,6 +79,15 @@ List filterForBypass(List privilegeObj return null; } else { return Lists.newArrayList(Iterables.filter(privilegeObjects,new Predicate() { + @Override + public boolean test(@Nullable HivePrivilegeObject hivePrivilegeObject) { + // Return true to retain an item, and false to filter it out. + if (hivePrivilegeObject == null){ + return true; + } + return !bypassObjectTypes.contains(hivePrivilegeObject.getType()); + } + @Override public boolean apply(@Nullable HivePrivilegeObject hivePrivilegeObject) { // Return true to retain an item, and false to filter it out. diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/util/ElapsedTimeLoggingWrapper.java b/itests/util/src/main/java/org/apache/hadoop/hive/util/ElapsedTimeLoggingWrapper.java index 061a918f994e..74f50baa1da3 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/util/ElapsedTimeLoggingWrapper.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/util/ElapsedTimeLoggingWrapper.java @@ -28,7 +28,7 @@ public abstract class ElapsedTimeLoggingWrapper { public abstract T invokeInternal() throws Exception; public T invoke(String message, Logger LOG, boolean toStdErr) throws Exception { - Stopwatch sw = new Stopwatch().start(); + Stopwatch sw = Stopwatch.createStarted(); try { T retVal = invokeInternal(); return retVal; diff --git a/llap-client/src/java/org/apache/hadoop/hive/llap/tez/LlapProtocolClientProxy.java b/llap-client/src/java/org/apache/hadoop/hive/llap/tez/LlapProtocolClientProxy.java index ce75d722ab09..e219692f8c74 100644 --- a/llap-client/src/java/org/apache/hadoop/hive/llap/tez/LlapProtocolClientProxy.java +++ b/llap-client/src/java/org/apache/hadoop/hive/llap/tez/LlapProtocolClientProxy.java @@ -131,7 +131,7 @@ public void onSuccess(Void result) { public void onFailure(Throwable t) { LOG.warn("RequestManager shutdown with error", t); } - }); + }, requestManagerExecutor); } @Override @@ -263,7 +263,7 @@ public void shutdown() { void submitToExecutor(CallableRequest request, LlapNodeId nodeId) { ListenableFuture future = executor.submit(request); - Futures.addCallback(future, new ResponseCallback(request.getCallback(), nodeId, this)); + Futures.addCallback(future, new ResponseCallback(request.getCallback(), nodeId, this), executor); } @VisibleForTesting diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java index b4c62d5a0723..09ed5b6a4b98 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java @@ -176,7 +176,7 @@ public void onFailure(Throwable t) { Thread.getDefaultUncaughtExceptionHandler().uncaughtException(Thread.currentThread(), t); } } - }); + }, queueLookupExecutor); // TODO: why is this needed? we could just save the host and port? nodeId = LlapNodeId.getInstance(localAddress.get().getHostName(), localAddress.get().getPort()); LOG.info("AMReporter running with DaemonId: {}, NodeId: {}", daemonId, nodeId); @@ -271,7 +271,7 @@ public void onFailure(Throwable t) { LOG.warn("Failed to send taskKilled for {}. The attempt will likely time out.", taskAttemptId); } - }); + }, executor); } public void queryComplete(QueryIdentifier queryIdentifier) { @@ -337,7 +337,7 @@ public void onFailure(Throwable t) { amNodeInfo.amNodeId, currentQueryIdentifier, t); queryFailedHandler.queryFailed(currentQueryIdentifier); } - }); + }, executor); } } } catch (InterruptedException e) { diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapTaskReporter.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapTaskReporter.java index 3d597029ab77..9af5d287092c 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapTaskReporter.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapTaskReporter.java @@ -123,7 +123,7 @@ public synchronized void registerTask(RuntimeTask task, currentCallable = new HeartbeatCallable(completionListener, task, umbilical, pollInterval, sendCounterInterval, maxEventsToGet, requestCounter, containerIdStr, initialEvent, fragmentRequestId); ListenableFuture future = heartbeatExecutor.submit(currentCallable); - Futures.addCallback(future, new HeartbeatCallback(errorReporter)); + Futures.addCallback(future, new HeartbeatCallback(errorReporter), heartbeatExecutor); } /** diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorService.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorService.java index 70447d94d35a..8bedba24dc5a 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorService.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorService.java @@ -168,7 +168,7 @@ public TaskExecutorService(int numExecutors, int waitQueueSize, executionCompletionExecutorService = MoreExecutors.listeningDecorator( executionCompletionExecutorServiceRaw); ListenableFuture future = waitQueueExecutorService.submit(new WaitQueueWorker()); - Futures.addCallback(future, new WaitQueueWorkerCallback()); + Futures.addCallback(future, new WaitQueueWorkerCallback(), waitQueueExecutorService); } private Comparator createComparator( diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java index c3a74afd3d9d..21e39ddfd2f6 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java @@ -113,8 +113,8 @@ public class TaskRunnerCallable extends CallableWithNdc { private final String queryId; private final HadoopShim tezHadoopShim; private boolean shouldRunTask = true; - final Stopwatch runtimeWatch = new Stopwatch(); - final Stopwatch killtimerWatch = new Stopwatch(); + final Stopwatch runtimeWatch = Stopwatch.createUnstarted(); + final Stopwatch killtimerWatch = Stopwatch.createUnstarted(); private final AtomicBoolean isStarted = new AtomicBoolean(false); private final AtomicBoolean isCompleted = new AtomicBoolean(false); private final AtomicBoolean killInvoked = new AtomicBoolean(false); @@ -275,7 +275,7 @@ public LlapTaskUmbilicalProtocol run() throws Exception { } finally { FileSystem.closeAllForUGI(fsTaskUgi); LOG.info("ExecutionTime for Container: " + request.getContainerIdString() + "=" + - runtimeWatch.stop().elapsedMillis()); + runtimeWatch.stop().elapsed(TimeUnit.MILLISECONDS)); if (LOG.isDebugEnabled()) { LOG.debug( "canFinish post completion: " + taskSpec.getTaskAttemptID() + ": " + canFinish()); @@ -501,14 +501,14 @@ public void onSuccess(TaskRunner2Result result) { LOG.info("Killed task {}", requestId); if (killtimerWatch.isRunning()) { killtimerWatch.stop(); - long elapsed = killtimerWatch.elapsedMillis(); + long elapsed = killtimerWatch.elapsed(TimeUnit.MILLISECONDS); LOG.info("Time to die for task {}", elapsed); if (metrics != null) { metrics.addMetricsPreemptionTimeToKill(elapsed); } } if (metrics != null) { - metrics.addMetricsPreemptionTimeLost(runtimeWatch.elapsedMillis()); + metrics.addMetricsPreemptionTimeLost(runtimeWatch.elapsed(TimeUnit.MILLISECONDS)); metrics.incrExecutorTotalKilled(); } break; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonCacheInfo.java b/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonCacheInfo.java index 427a0b1c19c1..fbd7fd2892cb 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonCacheInfo.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonCacheInfo.java @@ -17,10 +17,9 @@ */ package org.apache.hadoop.hive.llap.metrics; +import com.google.common.base.MoreObjects; import org.apache.hadoop.metrics2.MetricsInfo; -import com.google.common.base.Objects; - /** * Metrics information for llap cache. */ @@ -50,7 +49,7 @@ public String description() { @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("name", name()).add("description", desc) .toString(); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonExecutorInfo.java b/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonExecutorInfo.java index 69d1c6fff14f..9695a4ccba2d 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonExecutorInfo.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonExecutorInfo.java @@ -17,10 +17,9 @@ */ package org.apache.hadoop.hive.llap.metrics; +import com.google.common.base.MoreObjects; import org.apache.hadoop.metrics2.MetricsInfo; -import com.google.common.base.Objects; - /** * Metrics information for llap daemon container. */ @@ -74,7 +73,7 @@ public String description() { @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("name", name()).add("description", desc) .toString(); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonIOInfo.java b/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonIOInfo.java index f0fde62a6ad6..ed6e2bf350f4 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonIOInfo.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonIOInfo.java @@ -17,10 +17,9 @@ */ package org.apache.hadoop.hive.llap.metrics; +import com.google.common.base.MoreObjects; import org.apache.hadoop.metrics2.MetricsInfo; -import com.google.common.base.Objects; - /** * Llap daemon I/O elevator metrics */ @@ -42,7 +41,7 @@ public String description() { @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("name", name()).add("description", desc) .toString(); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonJvmInfo.java b/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonJvmInfo.java index efbddaad14ff..1625cb234c30 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonJvmInfo.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/metrics/LlapDaemonJvmInfo.java @@ -17,10 +17,9 @@ */ package org.apache.hadoop.hive.llap.metrics; +import com.google.common.base.MoreObjects; import org.apache.hadoop.metrics2.MetricsInfo; -import com.google.common.base.Objects; - /** * Llap daemon JVM info. These are some additional metrics that are not exposed via * {@link org.apache.hadoop.metrics.jvm.JvmMetrics} @@ -53,7 +52,7 @@ public String description() { @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("name", name()).add("description", desc) .toString(); } diff --git a/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java index 6bedccbd184e..6a664d053b71 100644 --- a/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java +++ b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java @@ -317,15 +317,15 @@ public Void call() throws Exception { }, 10000L, TimeUnit.MILLISECONDS); nodeEnablerFuture = nodeEnabledExecutor.submit(nodeEnablerCallable); - Futures.addCallback(nodeEnablerFuture, new LoggingFutureCallback("NodeEnablerThread", LOG)); + Futures.addCallback(nodeEnablerFuture, new LoggingFutureCallback("NodeEnablerThread", LOG), nodeEnabledExecutor); delayedTaskSchedulerFuture = delayedTaskSchedulerExecutor.submit(delayedTaskSchedulerCallable); Futures.addCallback(delayedTaskSchedulerFuture, - new LoggingFutureCallback("DelayedTaskSchedulerThread", LOG)); + new LoggingFutureCallback("DelayedTaskSchedulerThread", LOG), delayedTaskSchedulerExecutor); schedulerFuture = schedulerExecutor.submit(schedulerCallable); - Futures.addCallback(schedulerFuture, new LoggingFutureCallback("SchedulerThread", LOG)); + Futures.addCallback(schedulerFuture, new LoggingFutureCallback("SchedulerThread", LOG), schedulerExecutor); registry.start(); registry.registerStateChangeListener(new NodeStateChangeListener()); diff --git a/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/metrics/LlapTaskSchedulerInfo.java b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/metrics/LlapTaskSchedulerInfo.java index c190be86ae66..7b1e3fea4b61 100644 --- a/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/metrics/LlapTaskSchedulerInfo.java +++ b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/metrics/LlapTaskSchedulerInfo.java @@ -17,10 +17,9 @@ */ package org.apache.hadoop.hive.llap.tezplugins.metrics; +import com.google.common.base.MoreObjects; import org.apache.hadoop.metrics2.MetricsInfo; -import com.google.common.base.Objects; - /** * Metrics information for llap task scheduler. */ @@ -52,7 +51,7 @@ public String description() { @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("name", name()).add("description", desc) .toString(); } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java index f66200fcd3cb..76e452980383 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java @@ -61,7 +61,8 @@ public void connect() throws IOException { if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_IN_TEST)) { LOG.debug("Using an in memory client transaction system for testing"); TransactionManager txnMgr = new TransactionManager(conf); - txnMgr.startAndWait(); + txnMgr.startAsync(); + txnMgr.awaitRunning(); txnClient = new InMemoryTxSystemClient(txnMgr); } else { // TODO should enable use of ZKDiscoveryService if users want it diff --git a/pom.xml b/pom.xml index 1d667226fbe1..d85684588714 100644 --- a/pom.xml +++ b/pom.xml @@ -135,7 +135,7 @@ 3.1.0 0.1.2 0.9.2 - 14.0.1 + 27.0-jre 2.4.4 1.3.166 2.7.2 diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java index 004bb2f60299..abdc2efdffb2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java @@ -22,6 +22,7 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -105,7 +106,7 @@ public class FetchOperator implements Serializable { private transient Iterator iterPath; private transient Iterator iterPartDesc; - private transient Iterator iterSplits = Iterators.emptyIterator(); + private transient Iterator iterSplits = Collections.emptyIterator(); private transient Path currPath; private transient PartitionDesc currDesc; @@ -540,7 +541,7 @@ public void clearFetchContext() throws HiveException { this.currPath = null; this.iterPath = null; this.iterPartDesc = null; - this.iterSplits = Iterators.emptyIterator(); + this.iterSplits = Collections.emptyIterator(); } catch (Exception e) { throw new HiveException("Failed with exception " + e.getMessage() + StringUtils.stringifyException(e)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java index c1f688360cbe..48415a5bac27 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java @@ -461,7 +461,7 @@ private void writeVertices(JsonWriter writer, Set vertices) throws IOExc */ private String getQueryHash(String queryStr) { Hasher hasher = Hashing.md5().newHasher(); - hasher.putString(queryStr); + hasher.putUnencodedChars(queryStr); return hasher.hash().toString(); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java index 84c090239df3..5a99a1a0820a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java @@ -433,7 +433,7 @@ private void checkPartitionDirs(Path basePath, Set allDirs, int maxDepth) ExecutorService executor; if (poolSize <= 1) { LOG.debug("Using single-threaded version of MSCK-GetPaths"); - executor = MoreExecutors.sameThreadExecutor(); + executor = MoreExecutors.newDirectExecutorService(); } else { LOG.debug("Using multi-threaded version of MSCK-GetPaths with number of threads " + poolSize); ThreadFactory threadFactory = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/views/SubstitutionVisitor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/views/SubstitutionVisitor.java index 93dcc0e0a116..1dee3b528fdb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/views/SubstitutionVisitor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/views/SubstitutionVisitor.java @@ -2413,6 +2413,10 @@ void visit(Operand operand) { public static class FilterOnProjectRule extends RelOptRule { private static final Predicate PREDICATE = new Predicate() { + public boolean test(Filter input) { + return input.getCondition() instanceof RexInputRef; + } + public boolean apply(Filter input) { return input.getCondition() instanceof RexInputRef; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdPredicates.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdPredicates.java index 69e157ecd73b..3191d184c35b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdPredicates.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdPredicates.java @@ -19,6 +19,7 @@ import java.util.ArrayList; import java.util.BitSet; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -532,7 +533,7 @@ Iterable mappings(final RexNode predicate) { public Iterator iterator() { ImmutableBitSet fields = exprFields.get(predicate.toString()); if (fields.cardinality() == 0) { - return Iterators.emptyIterator(); + return Collections.emptyIterator(); } return new ExprsItr(fields); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AnnotateRunTimeStatsOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AnnotateRunTimeStatsOptimizer.java index ee674430689d..109e5ea1f24c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AnnotateRunTimeStatsOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AnnotateRunTimeStatsOptimizer.java @@ -18,13 +18,7 @@ package org.apache.hadoop.hive.ql.optimizer.physical; import java.io.Serializable; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Set; -import java.util.Stack; +import java.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -73,16 +67,16 @@ public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) if (currTask instanceof MapRedTask) { MapRedTask mr = (MapRedTask) currTask; - ops.addAll(mr.getWork().getAllOperators()); + ops.addAll((List>) mr.getWork().getAllOperators()); } else if (currTask instanceof TezTask) { TezWork work = ((TezTask) currTask).getWork(); for (BaseWork w : work.getAllWork()) { - ops.addAll(w.getAllOperators()); + ops.addAll((Set>) w.getAllOperators()); } } else if (currTask instanceof SparkTask) { SparkWork sparkWork = (SparkWork) currTask.getWork(); for (BaseWork w : sparkWork.getAllWork()) { - ops.addAll(w.getAllOperators()); + ops.addAll((Set>) w.getAllOperators()); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSpec.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSpec.java index 48362a3574dc..349d8519b38e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSpec.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSpec.java @@ -249,9 +249,17 @@ public boolean allowEventReplacementInto(Table table) { */ public Predicate allowEventReplacementInto() { return new Predicate() { + @Override + public boolean test(@Nullable Partition partition) { + if (partition == null) { + return false; + } + return (allowEventReplacementInto(partition)); + } + @Override public boolean apply(@Nullable Partition partition) { - if (partition == null){ + if (partition == null) { return false; } return (allowEventReplacementInto(partition)); diff --git a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java index 26c881223f14..89718d51dfde 100644 --- a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java +++ b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java @@ -133,6 +133,11 @@ private void registerActiveSesssionMetrics(Metrics metrics) { @Override public Integer getValue() { Iterable filtered = Iterables.filter(getSessions(), new Predicate() { + @Override + public boolean test(HiveSession hiveSession) { + return hiveSession.getNoOperationTime() == 0L; + } + @Override public boolean apply(HiveSession hiveSession) { return hiveSession.getNoOperationTime() == 0L; diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java b/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java index 277738fac705..e1aaf5eade53 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java @@ -186,6 +186,13 @@ private static AclEntry newAclEntry(AclEntryScope scope, AclEntryType type, */ private static void removeBaseAclEntries(List entries) { Iterables.removeIf(entries, new Predicate() { + @Override + public boolean test(AclEntry input) { + if (input.getName() == null) { + return true; + } + return false; + } @Override public boolean apply(AclEntry input) { if (input.getName() == null) { diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/MetricsCollection.java b/spark-client/src/main/java/org/apache/hive/spark/client/MetricsCollection.java index 0f03a6406353..3087a1c80095 100644 --- a/spark-client/src/main/java/org/apache/hive/spark/client/MetricsCollection.java +++ b/spark-client/src/main/java/org/apache/hive/spark/client/MetricsCollection.java @@ -100,6 +100,10 @@ public Long apply(TaskInfo input) { public Metrics getTaskMetrics(final int jobId, final int stageId, final long taskId) { Predicate filter = new Predicate() { + @Override + public boolean test(TaskInfo input) { + return jobId == input.jobId && stageId == input.stageId && taskId == input.taskId; + } @Override public boolean apply(TaskInfo input) { return jobId == input.jobId && stageId == input.stageId && taskId == input.taskId; @@ -258,6 +262,10 @@ public boolean apply(TaskInfo input) { return jobId == input.jobId; } + @Override + public boolean test(TaskInfo input) { + return jobId == input.jobId; + } } private static class StageFilter implements Predicate { @@ -275,6 +283,10 @@ public boolean apply(TaskInfo input) { return jobId == input.jobId && stageId == input.stageId; } + @Override + public boolean test(TaskInfo input) { + return jobId == input.jobId && stageId == input.stageId; + } } }