From 4efa8b8d0173ff19ebd902bb40808d1a54f2d129 Mon Sep 17 00:00:00 2001 From: Robert <33911814+RobertG111@users.noreply.github.com> Date: Thu, 26 May 2022 09:17:14 +0100 Subject: [PATCH] gh-2618: Cleanup inconsistent use of logging strings (#2651) * Found 1 extra inconsistency than the mentioned one * found and fixed other inconsitencies * further fixes * further fixes 2 * fixed 3 out of 4 checkstyle errors - 1 more to fix * all checkstyle errors fixed * removed use of String.format * removed another instance of using String.format * more changes * additional change * reverted changes --- .../java/uk/gov/gchq/gaffer/commonutil/ExecutorService.java | 2 +- .../main/java/uk/gov/gchq/gaffer/commonutil/StreamUtil.java | 2 +- .../graph/src/main/java/uk/gov/gchq/gaffer/graph/Graph.java | 4 ++-- .../uk/gov/gchq/gaffer/graph/hook/FunctionAuthoriser.java | 2 +- .../resolver/named/NamedOperationScoreResolver.java | 2 +- .../gaffer/traffic/generator/RoadTrafficDataLoader.java | 2 +- .../gov/gchq/gaffer/cache/impl/HazelcastCacheService.java | 3 +-- .../integration/loader/AddElementsFromHdfsLoaderIT.java | 3 +-- .../handler/scalardd/GetRDDOfAllElementsHandlerIT.java | 2 +- .../gaffer/rest/factory/AccumuloDisableOperationsTest.java | 2 +- .../rest/controller/GraphConfigurationController.java | 2 +- .../operation/hdfs/handler/AddElementsFromHdfsHandler.java | 2 +- .../job/factory/AccumuloAddElementsFromHdfsJobFactory.java | 2 +- .../accumulostore/operation/impl/CreateSplitPointsTest.java | 2 +- .../gchq/gaffer/federatedstore/FederatedGraphStorage.java | 6 +++--- .../gov/gchq/gaffer/mapstore/impl/AddElementsHandler.java | 2 +- 16 files changed, 19 insertions(+), 21 deletions(-) diff --git a/core/common-util/src/main/java/uk/gov/gchq/gaffer/commonutil/ExecutorService.java b/core/common-util/src/main/java/uk/gov/gchq/gaffer/commonutil/ExecutorService.java index b3b1d187781..e01b0a9422e 100644 --- a/core/common-util/src/main/java/uk/gov/gchq/gaffer/commonutil/ExecutorService.java +++ b/core/common-util/src/main/java/uk/gov/gchq/gaffer/commonutil/ExecutorService.java @@ -38,7 +38,7 @@ private ExecutorService() { public static synchronized void initialise(final int jobExecutorThreadCount) { if (service == null) { - LOGGER.debug("Initialising ExecutorService with " + jobExecutorThreadCount + " threads"); + LOGGER.debug("Initialising ExecutorService with {} threads", jobExecutorThreadCount); service = Executors.newScheduledThreadPool(jobExecutorThreadCount, runnable -> { final Thread thread = new Thread(runnable); thread.setDaemon(true); diff --git a/core/common-util/src/main/java/uk/gov/gchq/gaffer/commonutil/StreamUtil.java b/core/common-util/src/main/java/uk/gov/gchq/gaffer/commonutil/StreamUtil.java index 31ca31d92e3..6295b561e52 100644 --- a/core/common-util/src/main/java/uk/gov/gchq/gaffer/commonutil/StreamUtil.java +++ b/core/common-util/src/main/java/uk/gov/gchq/gaffer/commonutil/StreamUtil.java @@ -154,7 +154,7 @@ public static InputStream[] openStreams(final Class clazz, final String folderPa inputStreams.add(openStream(clazz, file)); } catch (final Exception e) { int closedStreamsCount = closeStreams(inputStreams.toArray(new InputStream[inputStreams.size()])); - LOGGER.info(String.format("Closed %s input streams", closedStreamsCount)); + LOGGER.info("Closed {} input streams", closedStreamsCount); } } ); diff --git a/core/graph/src/main/java/uk/gov/gchq/gaffer/graph/Graph.java b/core/graph/src/main/java/uk/gov/gchq/gaffer/graph/Graph.java index 67dd5082f9a..cb91e99e70e 100644 --- a/core/graph/src/main/java/uk/gov/gchq/gaffer/graph/Graph.java +++ b/core/graph/src/main/java/uk/gov/gchq/gaffer/graph/Graph.java @@ -284,7 +284,7 @@ public JobDetail executeJob(final Job job, final Context context) throws Operati try { result = graphHook.onFailure(result, clonedOpChain, clonedContext, e); } catch (final Exception graphHookE) { - LOGGER.warn("Error in graphHook " + graphHook.getClass().getSimpleName() + ": " + graphHookE.getMessage(), graphHookE); + LOGGER.warn("Error in graphHook {} : {}", graphHook.getClass().getSimpleName(), graphHookE.getMessage(), graphHookE); } } CloseableUtil.close(clonedOpChain); @@ -341,7 +341,7 @@ private GraphResult _execute(final StoreExecuter storeExecuter, final try { result = graphHook.onFailure(result, clonedOpChain, clonedContext, e); } catch (final Exception graphHookE) { - LOGGER.warn("Error in graphHook " + graphHook.getClass().getSimpleName() + ": " + graphHookE.getMessage(), graphHookE); + LOGGER.warn("Error in graphHook {} : {}", graphHook.getClass().getSimpleName(), graphHookE.getMessage(), graphHookE); } } CloseableUtil.close(clonedOpChain); diff --git a/core/graph/src/main/java/uk/gov/gchq/gaffer/graph/hook/FunctionAuthoriser.java b/core/graph/src/main/java/uk/gov/gchq/gaffer/graph/hook/FunctionAuthoriser.java index e62c3824ed5..d964fd70b30 100644 --- a/core/graph/src/main/java/uk/gov/gchq/gaffer/graph/hook/FunctionAuthoriser.java +++ b/core/graph/src/main/java/uk/gov/gchq/gaffer/graph/hook/FunctionAuthoriser.java @@ -71,7 +71,7 @@ public void preExecute(final OperationChain opChain, final Context context) { // This should never happen in real life as operation chains should // always be json serialisable. However this could happen if using a // mock in testing. To account for this, it will be logged. - LOGGER.warn("Failed to serialise operation chain: " + opChain + " due to " + e.getMessage()); + LOGGER.warn("Failed to serialise operation chain: {} due to {}", opChain, e.getMessage()); return; } finally { if (input != null) { diff --git a/core/store/src/main/java/uk/gov/gchq/gaffer/store/operation/resolver/named/NamedOperationScoreResolver.java b/core/store/src/main/java/uk/gov/gchq/gaffer/store/operation/resolver/named/NamedOperationScoreResolver.java index 85cca528417..979126b76a3 100644 --- a/core/store/src/main/java/uk/gov/gchq/gaffer/store/operation/resolver/named/NamedOperationScoreResolver.java +++ b/core/store/src/main/java/uk/gov/gchq/gaffer/store/operation/resolver/named/NamedOperationScoreResolver.java @@ -61,7 +61,7 @@ public Integer getScore(final NamedOperation operation, final ScoreResolver defa try { namedOpDetail = cache.getFromCache(operation.getOperationName()); } catch (final CacheOperationFailedException e) { - LOGGER.warn("Error accessing cache for Operation '{}': " + e.getMessage(), operation.getClass().getName()); + LOGGER.warn("Error accessing cache for Operation '{}': {}", operation.getClass().getName(), e.getMessage()); } if (null != namedOpDetail) { diff --git a/example/road-traffic/road-traffic-generators/src/main/java/uk/gov/gchq/gaffer/traffic/generator/RoadTrafficDataLoader.java b/example/road-traffic/road-traffic-generators/src/main/java/uk/gov/gchq/gaffer/traffic/generator/RoadTrafficDataLoader.java index 764f7df50c2..1d5c48222c2 100644 --- a/example/road-traffic/road-traffic-generators/src/main/java/uk/gov/gchq/gaffer/traffic/generator/RoadTrafficDataLoader.java +++ b/example/road-traffic/road-traffic-generators/src/main/java/uk/gov/gchq/gaffer/traffic/generator/RoadTrafficDataLoader.java @@ -121,7 +121,7 @@ public static void main(final String[] args) { dataLoader.load(new File(dataFile)); LOGGER.info("Data has been loaded"); } catch (final Exception e) { - LOGGER.info("Unable to load data: " + e.getMessage()); + LOGGER.info("Unable to load data:" + e.getMessage()); throw new RuntimeException("Unable to load data", e); } } diff --git a/library/cache-library/hazelcast-cache-service/src/main/java/uk/gov/gchq/gaffer/cache/impl/HazelcastCacheService.java b/library/cache-library/hazelcast-cache-service/src/main/java/uk/gov/gchq/gaffer/cache/impl/HazelcastCacheService.java index 04c7d33b019..013639b533e 100644 --- a/library/cache-library/hazelcast-cache-service/src/main/java/uk/gov/gchq/gaffer/cache/impl/HazelcastCacheService.java +++ b/library/cache-library/hazelcast-cache-service/src/main/java/uk/gov/gchq/gaffer/cache/impl/HazelcastCacheService.java @@ -42,8 +42,7 @@ private void configureHazelcast(final Properties properties) { if (null == hazelcast || !Hazelcast.getAllHazelcastInstances().contains(hazelcast)) { String configFile = properties.getProperty(CACHE_CONFIG_FILE); if (null == configFile) { - LOGGER.warn("Config file not set using system property: " + CACHE_CONFIG_FILE - + ". Using default settings"); + LOGGER.warn("Config file not set using system property: {}. Using default settings", CACHE_CONFIG_FILE); hazelcast = Hazelcast.newHazelcastInstance(); } else { diff --git a/library/hdfs-library/src/test/java/uk/gov/gchq/gaffer/hdfs/integration/loader/AddElementsFromHdfsLoaderIT.java b/library/hdfs-library/src/test/java/uk/gov/gchq/gaffer/hdfs/integration/loader/AddElementsFromHdfsLoaderIT.java index de475850e6d..07f0de013e7 100644 --- a/library/hdfs-library/src/test/java/uk/gov/gchq/gaffer/hdfs/integration/loader/AddElementsFromHdfsLoaderIT.java +++ b/library/hdfs-library/src/test/java/uk/gov/gchq/gaffer/hdfs/integration/loader/AddElementsFromHdfsLoaderIT.java @@ -113,8 +113,7 @@ public void _setup() throws Exception { .replaceFirst("/$", "") + testFolder.getAbsolutePath(); - - logger.info("using root dir: " + root); + logger.info("using root dir: {}", root); inputDir1 = root + "/inputDir1"; inputDir2 = root + "/inputDir2"; diff --git a/library/spark/spark-accumulo-library/src/test/java/uk/gov/gchq/gaffer/sparkaccumulo/integration/operation/handler/scalardd/GetRDDOfAllElementsHandlerIT.java b/library/spark/spark-accumulo-library/src/test/java/uk/gov/gchq/gaffer/sparkaccumulo/integration/operation/handler/scalardd/GetRDDOfAllElementsHandlerIT.java index 91aab891314..78a4baaa7cf 100644 --- a/library/spark/spark-accumulo-library/src/test/java/uk/gov/gchq/gaffer/sparkaccumulo/integration/operation/handler/scalardd/GetRDDOfAllElementsHandlerIT.java +++ b/library/spark/spark-accumulo-library/src/test/java/uk/gov/gchq/gaffer/sparkaccumulo/integration/operation/handler/scalardd/GetRDDOfAllElementsHandlerIT.java @@ -473,7 +473,7 @@ private Graph getGraphForDirectRDDForIngestAggregation(final KeyPackage keyPacka filesDir.mkdir(); failuresDir.mkdir(); } catch (Exception e) { - LOGGER.error("Failed to create directory: " + e.getMessage()); + LOGGER.error("Failed to create directory: {}", e.getMessage()); } writeFile(keyPackage, graph.getSchema(), file); diff --git a/rest-api/accumulo-rest/src/test/java/uk/gov/gchq/gaffer/rest/factory/AccumuloDisableOperationsTest.java b/rest-api/accumulo-rest/src/test/java/uk/gov/gchq/gaffer/rest/factory/AccumuloDisableOperationsTest.java index ae74a5e0d82..61f4c8b716a 100644 --- a/rest-api/accumulo-rest/src/test/java/uk/gov/gchq/gaffer/rest/factory/AccumuloDisableOperationsTest.java +++ b/rest-api/accumulo-rest/src/test/java/uk/gov/gchq/gaffer/rest/factory/AccumuloDisableOperationsTest.java @@ -52,7 +52,7 @@ private void createUpdatedPropertiesFile(AccumuloProperties accumuloProperties, properties.store(fos, "AccumuloDisableOperationsTest - " + filename + " with current zookeeper"); fos.close(); } catch (IOException e) { - LOGGER.error("Failed to write Properties file: " + filename + ": " + e.getMessage()); + LOGGER.error("Failed to write Properties file: {} : {}", filename, e.getMessage()); } } diff --git a/rest-api/spring-rest/src/main/java/uk/gov/gchq/gaffer/rest/controller/GraphConfigurationController.java b/rest-api/spring-rest/src/main/java/uk/gov/gchq/gaffer/rest/controller/GraphConfigurationController.java index 58966ec0baa..a98e314a8ea 100644 --- a/rest-api/spring-rest/src/main/java/uk/gov/gchq/gaffer/rest/controller/GraphConfigurationController.java +++ b/rest-api/spring-rest/src/main/java/uk/gov/gchq/gaffer/rest/controller/GraphConfigurationController.java @@ -96,7 +96,7 @@ public Set getFilterFunctions(@PathVariable("inputClass") final String in try { predicate = (Predicate) predicateClass.newInstance(); } catch (final IllegalAccessException | InstantiationException e) { - LOGGER.warn("Failed to create new instance of " + predicateClass, e); + LOGGER.warn("Failed to create new instance of {}", predicateClass, e); LOGGER.warn("Skipping"); continue; } diff --git a/store-implementation/accumulo-store/src/main/java/uk/gov/gchq/gaffer/accumulostore/operation/hdfs/handler/AddElementsFromHdfsHandler.java b/store-implementation/accumulo-store/src/main/java/uk/gov/gchq/gaffer/accumulostore/operation/hdfs/handler/AddElementsFromHdfsHandler.java index 66e6dda4ad8..b81d2d1aec6 100644 --- a/store-implementation/accumulo-store/src/main/java/uk/gov/gchq/gaffer/accumulostore/operation/hdfs/handler/AddElementsFromHdfsHandler.java +++ b/store-implementation/accumulo-store/src/main/java/uk/gov/gchq/gaffer/accumulostore/operation/hdfs/handler/AddElementsFromHdfsHandler.java @@ -63,7 +63,7 @@ public void doOperation(final AddElementsFromHdfs operation, if (null == operation.getSplitsFilePath()) { final String splitsFilePath = getPathWithSlashSuffix(operation.getWorkingPath()) + context.getJobId() + "/splits"; - LOGGER.info("Using working directory for splits files: " + splitsFilePath); + LOGGER.info("Using working directory for splits files: {}", splitsFilePath); operation.setSplitsFilePath(splitsFilePath); } diff --git a/store-implementation/accumulo-store/src/main/java/uk/gov/gchq/gaffer/accumulostore/operation/hdfs/handler/job/factory/AccumuloAddElementsFromHdfsJobFactory.java b/store-implementation/accumulo-store/src/main/java/uk/gov/gchq/gaffer/accumulostore/operation/hdfs/handler/job/factory/AccumuloAddElementsFromHdfsJobFactory.java index 6444de1d525..1c5b52475bf 100644 --- a/store-implementation/accumulo-store/src/main/java/uk/gov/gchq/gaffer/accumulostore/operation/hdfs/handler/job/factory/AccumuloAddElementsFromHdfsJobFactory.java +++ b/store-implementation/accumulo-store/src/main/java/uk/gov/gchq/gaffer/accumulostore/operation/hdfs/handler/job/factory/AccumuloAddElementsFromHdfsJobFactory.java @@ -102,7 +102,7 @@ public void setupJob(final Job job, final AddElementsFromHdfs operation, final S if (!NoPartitioner.class.equals(operation.getPartitioner())) { if (null != operation.getPartitioner()) { operation.setPartitioner(GafferKeyRangePartitioner.class); - LOGGER.warn("Partitioner class " + operation.getPartitioner().getName() + " will be replaced with " + GafferKeyRangePartitioner.class.getName()); + LOGGER.warn("Partitioner class {} will be replaced with {}", operation.getPartitioner().getName(), GafferKeyRangePartitioner.class.getName()); } setupPartitioner(job, operation, (AccumuloStore) store); } diff --git a/store-implementation/accumulo-store/src/test/java/uk/gov/gchq/gaffer/accumulostore/operation/impl/CreateSplitPointsTest.java b/store-implementation/accumulo-store/src/test/java/uk/gov/gchq/gaffer/accumulostore/operation/impl/CreateSplitPointsTest.java index a06fab2d9d9..5f976bc54c5 100644 --- a/store-implementation/accumulo-store/src/test/java/uk/gov/gchq/gaffer/accumulostore/operation/impl/CreateSplitPointsTest.java +++ b/store-implementation/accumulo-store/src/test/java/uk/gov/gchq/gaffer/accumulostore/operation/impl/CreateSplitPointsTest.java @@ -86,7 +86,7 @@ public void setup() throws IOException { + testFolder.getAbsolutePath(); - LOGGER.info("using root dir: " + root); + LOGGER.info("using root dir: {}", root); inputDir = root + "/inputDir"; outputDir = root + "/outputDir"; diff --git a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorage.java b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorage.java index b1dd45d39ec..e44cb5b12f5 100644 --- a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorage.java +++ b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorage.java @@ -451,7 +451,7 @@ private void makeAllGraphsFromCache() throws StorageException { try { makeGraphFromCache(graphId); } catch (final Exception e) { - LOGGER.error(String.format("Skipping graphId: %s due to: %s", graphId, e.getMessage()), e); + LOGGER.error("Skipping graphId: {} due to: {}", graphId, e.getMessage(), e); } } } @@ -511,7 +511,7 @@ private boolean changeGraphAccess(final String graphId, final FederatedAccess ne } catch (final CacheOperationException e) { //TODO FS recovery String s = "Error occurred updating graphAccess. GraphStorage=updated, Cache=outdated. graphId:" + graphId; - LOGGER.error(s + " graphStorage access:{} cache access:{}", newFederatedAccess, oldAccess); + LOGGER.error("{} graphStorage access:{} cache access:{}", s, newFederatedAccess, oldAccess); throw new StorageException(s, e); } } @@ -590,7 +590,7 @@ private boolean changeGraphId(final String graphId, final String newGraphId, fin federatedStoreCache.addGraphToCache(newGraphSerialisable, key, true); } catch (final CacheOperationException e) { String s = "Contact Admin for recovery. Error occurred updating graphId. GraphStorage=updated, Cache=outdated graphId."; - LOGGER.error(s + " graphStorage graphId:{} cache graphId:{}", newGraphId, graphId); + LOGGER.error("{} graphStorage graphId:{} cache graphId:{}", s, newGraphId, graphId); throw new StorageException(s, e); } federatedStoreCache.deleteGraphFromCache(graphId); diff --git a/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/AddElementsHandler.java b/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/AddElementsHandler.java index 09c87c79532..f690a51fac1 100644 --- a/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/AddElementsHandler.java +++ b/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/AddElementsHandler.java @@ -65,7 +65,7 @@ private void addElements(final Iterable elements, final MapSt // Add all elements directly addBatch(mapImpl, schema, elements); } else { - LOGGER.info("Adding elements in batches, batch size = " + bufferSize); + LOGGER.info("Adding elements in batches, batch size = {}", bufferSize); int count = 0; final List batch = new ArrayList<>(bufferSize); for (final Element element : elements) {