Skip to content

Commit

Permalink
gh-2618: Cleanup inconsistent use of logging strings (#2651)
Browse files Browse the repository at this point in the history
* Found 1 extra inconsistency than the mentioned one

* found and fixed other inconsitencies

* further fixes

* further fixes 2

* fixed 3 out of 4 checkstyle errors - 1 more to fix

* all checkstyle errors fixed

* removed use of String.format

* removed another instance of using String.format

* more changes

* additional change

* reverted changes
  • Loading branch information
RobertG111 authored and t92549 committed May 26, 2022
1 parent 4e33a78 commit 4efa8b8
Show file tree
Hide file tree
Showing 16 changed files with 19 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ private ExecutorService() {

public static synchronized void initialise(final int jobExecutorThreadCount) {
if (service == null) {
LOGGER.debug("Initialising ExecutorService with " + jobExecutorThreadCount + " threads");
LOGGER.debug("Initialising ExecutorService with {} threads", jobExecutorThreadCount);
service = Executors.newScheduledThreadPool(jobExecutorThreadCount, runnable -> {
final Thread thread = new Thread(runnable);
thread.setDaemon(true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ public static InputStream[] openStreams(final Class clazz, final String folderPa
inputStreams.add(openStream(clazz, file));
} catch (final Exception e) {
int closedStreamsCount = closeStreams(inputStreams.toArray(new InputStream[inputStreams.size()]));
LOGGER.info(String.format("Closed %s input streams", closedStreamsCount));
LOGGER.info("Closed {} input streams", closedStreamsCount);
}
}
);
Expand Down
4 changes: 2 additions & 2 deletions core/graph/src/main/java/uk/gov/gchq/gaffer/graph/Graph.java
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ public JobDetail executeJob(final Job job, final Context context) throws Operati
try {
result = graphHook.onFailure(result, clonedOpChain, clonedContext, e);
} catch (final Exception graphHookE) {
LOGGER.warn("Error in graphHook " + graphHook.getClass().getSimpleName() + ": " + graphHookE.getMessage(), graphHookE);
LOGGER.warn("Error in graphHook {} : {}", graphHook.getClass().getSimpleName(), graphHookE.getMessage(), graphHookE);
}
}
CloseableUtil.close(clonedOpChain);
Expand Down Expand Up @@ -341,7 +341,7 @@ private <O> GraphResult<O> _execute(final StoreExecuter<O> storeExecuter, final
try {
result = graphHook.onFailure(result, clonedOpChain, clonedContext, e);
} catch (final Exception graphHookE) {
LOGGER.warn("Error in graphHook " + graphHook.getClass().getSimpleName() + ": " + graphHookE.getMessage(), graphHookE);
LOGGER.warn("Error in graphHook {} : {}", graphHook.getClass().getSimpleName(), graphHookE.getMessage(), graphHookE);
}
}
CloseableUtil.close(clonedOpChain);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ public void preExecute(final OperationChain<?> opChain, final Context context) {
// This should never happen in real life as operation chains should
// always be json serialisable. However this could happen if using a
// mock in testing. To account for this, it will be logged.
LOGGER.warn("Failed to serialise operation chain: " + opChain + " due to " + e.getMessage());
LOGGER.warn("Failed to serialise operation chain: {} due to {}", opChain, e.getMessage());
return;
} finally {
if (input != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ public Integer getScore(final NamedOperation operation, final ScoreResolver defa
try {
namedOpDetail = cache.getFromCache(operation.getOperationName());
} catch (final CacheOperationFailedException e) {
LOGGER.warn("Error accessing cache for Operation '{}': " + e.getMessage(), operation.getClass().getName());
LOGGER.warn("Error accessing cache for Operation '{}': {}", operation.getClass().getName(), e.getMessage());
}

if (null != namedOpDetail) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ public static void main(final String[] args) {
dataLoader.load(new File(dataFile));
LOGGER.info("Data has been loaded");
} catch (final Exception e) {
LOGGER.info("Unable to load data: " + e.getMessage());
LOGGER.info("Unable to load data:" + e.getMessage());
throw new RuntimeException("Unable to load data", e);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,7 @@ private void configureHazelcast(final Properties properties) {
if (null == hazelcast || !Hazelcast.getAllHazelcastInstances().contains(hazelcast)) {
String configFile = properties.getProperty(CACHE_CONFIG_FILE);
if (null == configFile) {
LOGGER.warn("Config file not set using system property: " + CACHE_CONFIG_FILE
+ ". Using default settings");
LOGGER.warn("Config file not set using system property: {}. Using default settings", CACHE_CONFIG_FILE);

hazelcast = Hazelcast.newHazelcastInstance();
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,7 @@ public void _setup() throws Exception {
.replaceFirst("/$", "")
+ testFolder.getAbsolutePath();


logger.info("using root dir: " + root);
logger.info("using root dir: {}", root);

inputDir1 = root + "/inputDir1";
inputDir2 = root + "/inputDir2";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -473,7 +473,7 @@ private Graph getGraphForDirectRDDForIngestAggregation(final KeyPackage keyPacka
filesDir.mkdir();
failuresDir.mkdir();
} catch (Exception e) {
LOGGER.error("Failed to create directory: " + e.getMessage());
LOGGER.error("Failed to create directory: {}", e.getMessage());
}

writeFile(keyPackage, graph.getSchema(), file);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ private void createUpdatedPropertiesFile(AccumuloProperties accumuloProperties,
properties.store(fos, "AccumuloDisableOperationsTest - " + filename + " with current zookeeper");
fos.close();
} catch (IOException e) {
LOGGER.error("Failed to write Properties file: " + filename + ": " + e.getMessage());
LOGGER.error("Failed to write Properties file: {} : {}", filename, e.getMessage());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ public Set<Class> getFilterFunctions(@PathVariable("inputClass") final String in
try {
predicate = (Predicate) predicateClass.newInstance();
} catch (final IllegalAccessException | InstantiationException e) {
LOGGER.warn("Failed to create new instance of " + predicateClass, e);
LOGGER.warn("Failed to create new instance of {}", predicateClass, e);
LOGGER.warn("Skipping");
continue;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public void doOperation(final AddElementsFromHdfs operation,

if (null == operation.getSplitsFilePath()) {
final String splitsFilePath = getPathWithSlashSuffix(operation.getWorkingPath()) + context.getJobId() + "/splits";
LOGGER.info("Using working directory for splits files: " + splitsFilePath);
LOGGER.info("Using working directory for splits files: {}", splitsFilePath);
operation.setSplitsFilePath(splitsFilePath);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ public void setupJob(final Job job, final AddElementsFromHdfs operation, final S
if (!NoPartitioner.class.equals(operation.getPartitioner())) {
if (null != operation.getPartitioner()) {
operation.setPartitioner(GafferKeyRangePartitioner.class);
LOGGER.warn("Partitioner class " + operation.getPartitioner().getName() + " will be replaced with " + GafferKeyRangePartitioner.class.getName());
LOGGER.warn("Partitioner class {} will be replaced with {}", operation.getPartitioner().getName(), GafferKeyRangePartitioner.class.getName());
}
setupPartitioner(job, operation, (AccumuloStore) store);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ public void setup() throws IOException {
+ testFolder.getAbsolutePath();


LOGGER.info("using root dir: " + root);
LOGGER.info("using root dir: {}", root);

inputDir = root + "/inputDir";
outputDir = root + "/outputDir";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,7 @@ private void makeAllGraphsFromCache() throws StorageException {
try {
makeGraphFromCache(graphId);
} catch (final Exception e) {
LOGGER.error(String.format("Skipping graphId: %s due to: %s", graphId, e.getMessage()), e);
LOGGER.error("Skipping graphId: {} due to: {}", graphId, e.getMessage(), e);
}
}
}
Expand Down Expand Up @@ -511,7 +511,7 @@ private boolean changeGraphAccess(final String graphId, final FederatedAccess ne
} catch (final CacheOperationException e) {
//TODO FS recovery
String s = "Error occurred updating graphAccess. GraphStorage=updated, Cache=outdated. graphId:" + graphId;
LOGGER.error(s + " graphStorage access:{} cache access:{}", newFederatedAccess, oldAccess);
LOGGER.error("{} graphStorage access:{} cache access:{}", s, newFederatedAccess, oldAccess);
throw new StorageException(s, e);
}
}
Expand Down Expand Up @@ -590,7 +590,7 @@ private boolean changeGraphId(final String graphId, final String newGraphId, fin
federatedStoreCache.addGraphToCache(newGraphSerialisable, key, true);
} catch (final CacheOperationException e) {
String s = "Contact Admin for recovery. Error occurred updating graphId. GraphStorage=updated, Cache=outdated graphId.";
LOGGER.error(s + " graphStorage graphId:{} cache graphId:{}", newGraphId, graphId);
LOGGER.error("{} graphStorage graphId:{} cache graphId:{}", s, newGraphId, graphId);
throw new StorageException(s, e);
}
federatedStoreCache.deleteGraphFromCache(graphId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ private void addElements(final Iterable<? extends Element> elements, final MapSt
// Add all elements directly
addBatch(mapImpl, schema, elements);
} else {
LOGGER.info("Adding elements in batches, batch size = " + bufferSize);
LOGGER.info("Adding elements in batches, batch size = {}", bufferSize);
int count = 0;
final List<Element> batch = new ArrayList<>(bufferSize);
for (final Element element : elements) {
Expand Down

0 comments on commit 4efa8b8

Please sign in to comment.