Skip to content

Commit

Permalink
NIFI-13421 Replaced String.format in logging statements where only fo…
Browse files Browse the repository at this point in the history
…rmatting strings (%s) with the use of logging parameters ({})

This closes apache#8994

Signed-off-by: Mike Thomsen <mthomsen@apache.org>
  • Loading branch information
dan-s1 authored and MikeThomsen committed Jun 23, 2024
1 parent 01d1aa9 commit 957510e
Show file tree
Hide file tree
Showing 81 changed files with 120 additions and 131 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ private Map<String, Object> validateAMQPHeaderProperty(String amqpPropValue, Cha
} else if (kv.length == 1) {
headers.put(kv[0].trim(), null);
} else {
getLogger().warn(String.format("Malformed key value pair in AMQP header property (%s): %s", amqpPropValue, strEntry));
getLogger().warn("Malformed key value pair in AMQP header property ({}): {}", amqpPropValue, strEntry);
}
}
return headers;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ public List<ConfigVerificationResult> verify(ProcessContext context, ComponentLo
.explanation(String.format("Successfully performed HEAD on [%s] (%s bytes) from Bucket [%s]", key, byteCount, bucket))
.build());
} catch (final Exception e) {
getLogger().error(String.format("Failed to fetch [%s] from Bucket [%s]", key, bucket), e);
getLogger().error("Failed to fetch [{}] from Bucket [{}]", key, bucket, e);
results.add(new ConfigVerificationResult.Builder()
.verificationStepName("HEAD S3 Object")
.outcome(Outcome.FAILED)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session
contentMap.put("id", UUID.randomUUID().toString());
}
if (!contentMap.containsKey(partitionKeyField)) {
logger.error(String.format("PutAzureCosmoDBRecord failed with missing partitionKeyField (%s)", partitionKeyField));
logger.error("PutAzureCosmoDBRecord failed with missing partitionKeyField ({})", partitionKeyField);
error = true;
break;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -680,7 +680,7 @@ public IndexOperationResponse bulk(final List<IndexOperationRequest> operations,
parseResponseWarningHeaders(response);

if (getLogger().isDebugEnabled()) {
getLogger().debug(String.format("Response was: %s", rawResponse));
getLogger().debug("Response was: {}", rawResponse);
}

return IndexOperationResponse.fromJsonResponse(rawResponse);
Expand Down Expand Up @@ -717,8 +717,7 @@ public DeleteOperationResponse deleteById(final String index, final String type,
watch.stop();

if (getLogger().isDebugEnabled()) {
getLogger().debug(String.format("Response for bulk delete: %s",
IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8)));
getLogger().debug("Response for bulk delete: {}", IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8));
}

parseResponseWarningHeaders(response);
Expand Down Expand Up @@ -880,7 +879,7 @@ public String initialisePointInTime(final String index, final String keepAlive)
parseResponseWarningHeaders(response);

if (getLogger().isDebugEnabled()) {
getLogger().debug(String.format("Response for initialising Point in Time: %s", body));
getLogger().debug("Response for initialising Point in Time: {}", body);
}

return (String) mapper.readValue(body, Map.class).get("id");
Expand All @@ -899,9 +898,7 @@ public DeleteOperationResponse deletePointInTime(final String pitId) {
watch.stop();

if (getLogger().isDebugEnabled()) {
getLogger().debug(String.format("Response for deleting Point in Time: %s",
IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8))
);
getLogger().debug("Response for deleting Point in Time: {}", IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8));
}

parseResponseWarningHeaders(response);
Expand All @@ -927,9 +924,7 @@ public DeleteOperationResponse deleteScroll(final String scrollId) {
watch.stop();

if (getLogger().isDebugEnabled()) {
getLogger().debug(String.format("Response for deleting Scroll: %s",
IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8))
);
getLogger().debug("Response for deleting Scroll: {}", IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8));
}

parseResponseWarningHeaders(response);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ public List<ConfigVerificationResult> verify(final ProcessContext context, final
.explanation(String.format("Successfully fetched [%s] from Bucket [%s], totaling %s bytes", key, bucketName, byteCount))
.build());
} catch (final StorageException | IOException e) {
getLogger().error(String.format("Failed to fetch [%s] from Bucket [%s]", key, bucketName), e);
getLogger().error("Failed to fetch [{}] from Bucket [{}]", key, bucketName, e);
results.add(new ConfigVerificationResult.Builder()
.verificationStepName("Fetch GCS Blob")
.outcome(Outcome.FAILED)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ public class EmbeddedHazelcastCacheManager extends IMapBasedHazelcastCacheManage

private static final int DEFAULT_HAZELCAST_PORT = 5701;
private static final String PORT_SEPARATOR = ":";
private static final String INSTANCE_CREATION_LOG = "Embedded Hazelcast server instance with instance name %s has been created successfully";
private static final String MEMBER_LIST_LOG = "Hazelcast cluster will be created based on the NiFi cluster with the following members: %s";
private static final String INSTANCE_CREATION_LOG = "Embedded Hazelcast server instance with instance name {} has been created successfully";
private static final String MEMBER_LIST_LOG = "Hazelcast cluster will be created based on the NiFi cluster with the following members: {}";

private static final AllowableValue CLUSTER_NONE = new AllowableValue("none", "None", "No high availability or data replication is provided," +
" every node has access only to the data stored locally.");
Expand Down Expand Up @@ -140,18 +140,18 @@ protected HazelcastInstance getInstance(final ConfigurationContext context) {
.map(m -> m + PORT_SEPARATOR + port)
.collect(Collectors.toList());

getLogger().info(String.format(MEMBER_LIST_LOG, hazelcastMembers.stream().collect(Collectors.joining(", "))));
getLogger().info(MEMBER_LIST_LOG, String.join(", ", hazelcastMembers));
tcpIpConfig.setMembers(hazelcastMembers);
result = Hazelcast.newHazelcastInstance(config);
getLogger().info(String.format(INSTANCE_CREATION_LOG, instanceName));
getLogger().info(INSTANCE_CREATION_LOG, instanceName);

} else if (clusteringStrategy.equals(CLUSTER_EXPLICIT.getValue())) {
final List<String> hazelcastMembers = getHazelcastMemberHosts(context);

if (hazelcastMembers.contains(getNodeTypeProvider().getCurrentNode().get())) {
tcpIpConfig.setMembers(hazelcastMembers.stream().map(m -> m + PORT_SEPARATOR + port).collect(Collectors.toList()));
result = Hazelcast.newHazelcastInstance(config);
getLogger().info(String.format(INSTANCE_CREATION_LOG, instanceName));
getLogger().info(INSTANCE_CREATION_LOG, instanceName);
} else {
result = getClientInstance(
clusterName,
Expand All @@ -164,7 +164,7 @@ protected HazelcastInstance getInstance(final ConfigurationContext context) {
}
} else if (clusteringStrategy.equals(CLUSTER_NONE.getValue())) {
result = Hazelcast.newHazelcastInstance(config);
getLogger().info(String.format(INSTANCE_CREATION_LOG, instanceName));
getLogger().info(INSTANCE_CREATION_LOG, instanceName);
} else {
throw new ProcessException("Unknown Hazelcast Clustering Strategy!");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ public final void onConfigured(final LoginIdentityProviderConfigurationContext c
final String rawIdentityStrategy = configurationContext.getProperty("Identity Strategy");

if (StringUtils.isBlank(rawIdentityStrategy)) {
logger.info(String.format("Identity Strategy is not configured, defaulting strategy to %s.", IdentityStrategy.USE_DN));
logger.info("Identity Strategy is not configured, defaulting strategy to {}.", IdentityStrategy.USE_DN);

// if this value is not configured, default to use dn which was the previous implementation
identityStrategy = IdentityStrategy.USE_DN;
Expand Down Expand Up @@ -282,7 +282,7 @@ public final AuthenticationResponse authenticate(final LoginCredentials credenti
final LdapUserDetails userDetails = (LdapUserDetails) authentication.getPrincipal();
return new AuthenticationResponse(userDetails.getDn(), credentials.getUsername(), expiration, issuer);
} else {
logger.warn(String.format("Unable to determine user DN for %s, using username.", authentication.getName()));
logger.warn("Unable to determine user DN for {}, using username.", authentication.getName());
return new AuthenticationResponse(authentication.getName(), credentials.getUsername(), expiration, issuer);
}
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -601,8 +601,8 @@ protected Group doMapFromContext(DirContextOperations ctx) {
if (user != null) {
groupToUserIdentifierMappings.computeIfAbsent(referencedGroupValue, g -> new HashSet<>()).add(user.getIdentifier());
} else {
logger.debug(String.format("%s contains member %s but that user was not found while searching users. This may be due "
+ "to a misconfiguration or it's possible the user is not a NiFi user. Ignoring group membership.", name, userValue));
logger.debug("{} contains member {} but that user was not found while searching users. This may be due "
+ "to a misconfiguration or it's possible the user is not a NiFi user. Ignoring group membership.", name, userValue);
}
} else {
// since performUserSearch is false, then the referenced group attribute must be blank... the user value must be the dn.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro

if (deleteMode.equals(DELETE_ATTR.getValue())
&& (StringUtils.isEmpty(deleteAttr) || !ALLOWED_DELETE_VALUES.contains(deleteAttr.toLowerCase()) )) {
getLogger().error(String.format("%s is not an allowed value for mongodb.delete.mode", deleteAttr));
getLogger().error("{} is not an allowed value for mongodb.delete.mode", deleteAttr);
session.transfer(flowFile, REL_FAILURE);
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,13 +159,13 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro

session.transfer(input, REL_SUCCESS);
} else {
getLogger().error(String.format("Query %s did not delete anything in %s", deleteQuery, bucket.getBucketName()));
getLogger().error("Query {} did not delete anything in {}", deleteQuery, bucket.getBucketName());
session.transfer(input, REL_FAILURE);
}

cursor.close();
} catch (Exception ex) {
getLogger().error(String.format("Error deleting using query: %s", deleteQuery), ex);
getLogger().error("Error deleting using query: {}", deleteQuery, ex);
session.transfer(input, REL_FAILURE);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -572,8 +572,7 @@ public void onTrigger(ProcessContext context, ProcessSessionFactory sessionFacto
// ensure the processor did not fail to reload at some point
final Collection<ValidationResult> results = validationResults.get();
if (!results.isEmpty()) {
log.error(String.format("Unable to run because the Processor is not valid: [%s]",
StringUtils.join(results, ", ")));
log.error("Unable to run because the Processor is not valid: [{}]", StringUtils.join(results, ", "));
context.yield();
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ public static Collection<SearchResult> search(final SearchContext context, final
try (final InputStream in = scriptFile.read()) {
script = IOUtils.toString(in, StandardCharsets.UTF_8);
} catch (Exception e) {
logger.error(String.format("Could not read from path %s", scriptFile), e);
logger.error("Could not read from path {}", scriptFile, e);
return results;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -619,7 +619,7 @@ private void closeConnection(Connection connection, boolean originalAutoCommit)
connection.setAutoCommit(originalAutoCommit);
}
} catch (final Exception autoCommitException) {
getLogger().warn(String.format("Failed to set auto-commit back to %s on connection", originalAutoCommit), autoCommitException);
getLogger().warn("Failed to set auto-commit back to {} on connection", originalAutoCommit, autoCommitException);
}

try {
Expand Down Expand Up @@ -1599,8 +1599,7 @@ private boolean isSupportsBatchUpdates(Connection connection) {
try {
return connection.getMetaData().supportsBatchUpdates();
} catch (Exception ex) {
getLogger().debug(String.format("Exception while testing if connection supportsBatchUpdates due to %s - %s",
ex.getClass().getName(), ex.getMessage()));
getLogger().debug("Exception while testing if connection supportsBatchUpdates", ex);
return false;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -558,7 +558,7 @@ public void ensureDirectoryExists(final FlowFile flowFile, final File directoryN
} catch (SFTPException e) {
if (e.getStatusCode() == Response.StatusCode.NO_SUCH_FILE) {
// No Such File. This happens when parent directory was not found.
logger.debug(String.format("Could not create %s due to 'No such file'. Will try to create the parent dir.", remoteDirectory));
logger.debug("Could not create {} due to 'No such file'. Will try to create the parent dir.", remoteDirectory);
} else if (e.getStatusCode() == Response.StatusCode.FAILURE) {
// Swallow '4: Failure' including the remote directory already exists.
logger.debug("Could not blindly create remote directory", e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -645,7 +645,7 @@ private boolean evaluateCondition(final ProcessContext context, final Condition
// evaluate the expression for the given flow file
return getPropertyValue(condition.getExpression(), context).evaluateAttributeExpressions(flowfile, null, null, statefulAttributes).asBoolean();
} catch (final Exception e) {
getLogger().error(String.format("Could not evaluate the condition '%s' while processing Flowfile '%s'", condition.getExpression(), flowfile));
getLogger().error("Could not evaluate the condition '{}' while processing Flowfile '{}'", condition.getExpression(), flowfile);
throw new ProcessException(String.format("Unable to evaluate condition '%s': %s.", condition.getExpression(), e), e);
}
}
Expand Down Expand Up @@ -700,7 +700,7 @@ private FlowFile executeActions(final ProcessSession session, final ProcessConte

// log if appropriate
if (debugEnabled) {
logger.debug(String.format("%s deleting attribute '%s' for %s per regex '%s'.", this, key, flowfile, regex));
logger.debug("{} deleting attribute '{}' for {} per regex '{}'.", this, key, flowfile, regex);
}

attributesToDelete.add(key);
Expand All @@ -710,7 +710,7 @@ private FlowFile executeActions(final ProcessSession session, final ProcessConte
attributesToUpdate.keySet().removeAll(attributesToDelete);
}
} catch (final Exception e) {
logger.error(String.format("Unable to delete attribute '%s' while processing FlowFile '%s' .", attribute, flowfile));
logger.error("Unable to delete attribute '{}' while processing FlowFile '{}' .", attribute, flowfile);
throw new ProcessException(String.format("Unable to delete attribute '%s': %s.", attribute, e), e);
}
} else {
Expand All @@ -724,7 +724,7 @@ private FlowFile executeActions(final ProcessSession session, final ProcessConte

// log if appropriate
if (debugEnabled) {
logger.debug(String.format("%s setting attribute '%s' = '%s' for %s per rule '%s'.", this, attribute, newAttributeValue, flowfile, ruleName));
logger.debug("{} setting attribute '{}' = '{}' for {} per rule '{}'.", this, attribute, newAttributeValue, flowfile, ruleName);
}

if (setStatefulAttribute) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,7 @@ private Authorizer createAuthorizer(final String identifier, final String author

// if additional classpath resources were specified, replace with a new ClassLoader that wraps the original one
if (StringUtils.isNotEmpty(classpathResources)) {
logger.info(String.format("Replacing Authorizer ClassLoader for '%s' to include additional resources: %s", identifier, classpathResources));
logger.info("Replacing Authorizer ClassLoader for '{}' to include additional resources: {}", identifier, classpathResources);
URL[] urls = ClassLoaderUtils.getURLsForClasspath(classpathResources, null, true);
authorizerClassLoader = new URLClassLoader(urls, authorizerClassLoader);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public void testShouldBuildProxyChain() {
final NiFiUser mockJohn = createMockNiFiUser(SAFE_USER_NAME_JOHN, mockProxy1, false);

final List<String> proxiedEntitiesChain = NiFiUserUtils.buildProxiedEntitiesChain(mockJohn);
logger.info(String.format("Proxied entities chain: %s", proxiedEntitiesChain));
logger.info("Proxied entities chain: {}", proxiedEntitiesChain);

assertEquals(proxiedEntitiesChain, Arrays.asList(SAFE_USER_NAME_JOHN, SAFE_USER_NAME_PROXY_1));
}
Expand All @@ -61,15 +61,15 @@ public void testShouldBuildProxyChainFromAnonymousUser() throws Exception {
final NiFiUser mockAnonymous = createMockNiFiUser("anonymous", mockProxy1, true);

final List<String> proxiedEntitiesChain = NiFiUserUtils.buildProxiedEntitiesChain(mockAnonymous);
logger.info(String.format("Proxied entities chain: %s", proxiedEntitiesChain));
logger.info("Proxied entities chain: {}", proxiedEntitiesChain);

assertEquals(proxiedEntitiesChain, Arrays.asList("", SAFE_USER_NAME_PROXY_1));
}

@Test
public void testBuildProxyChainFromNullUserShouldBeEmpty() {
final List<String> proxiedEntitiesChain = NiFiUserUtils.buildProxiedEntitiesChain(null);
logger.info(String.format("Proxied entities chain: %s", proxiedEntitiesChain));
logger.info("Proxied entities chain: {}", proxiedEntitiesChain);

assertEquals(proxiedEntitiesChain, Collections.EMPTY_LIST);
}
Expand Down
Loading

0 comments on commit 957510e

Please sign in to comment.