exceptionfactory commented on code in PR #8896:
URL: https://github.com/apache/nifi/pull/8896#discussion_r1621169344


##########
nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java:
##########
@@ -223,8 +223,7 @@ private void deleteMessages(final SqsClient client, final 
String queueUrl, final
         try {
             client.deleteMessageBatch(deleteRequest);
         } catch (final Exception e) {
-            getLogger().error("Received {} messages from Amazon SQS but failed 
to delete the messages; these messages"
-                + " may be duplicated. Reason for deletion failure: {}", new 
Object[]{messages.size(), e});
+            getLogger().error("Received {} messages from Amazon SQS but failed 
to delete the messages; these messages may be duplicated. Reason for deletion 
failure: ", messages.size(), e);

Review Comment:
   ```suggestion
               getLogger().error("Received {} messages from Amazon SQS but 
failed to delete the messages; these messages may be duplicated", 
messages.size(), e);
   ```



##########
nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java:
##########
@@ -925,12 +908,10 @@ protected MultipartUploadListing 
getS3AgeoffListAndAgeoffLocalState(final Proces
                     getLogger().warn("AccessDenied checking S3 Multipart 
Upload list for {}: {} " +
                             "** The configured user does not have the 
s3:ListBucketMultipartUploads permission " +
                             "for this bucket, S3 ageoff cannot occur without 
this permission.  Next ageoff check " +
-                            "time is being advanced by interval to prevent 
checking on every upload **",
-                            new Object[]{bucket, e.getMessage()});
+                            "time is being advanced by interval to prevent 
checking on every upload **", bucket, e.getMessage());
                     lastS3AgeOff.set(System.currentTimeMillis());
                 } else {
-                    getLogger().error("Error checking S3 Multipart Upload list 
for {}: {}",
-                            new Object[]{bucket, e.getMessage()});
+                    getLogger().error("Error checking S3 Multipart Upload list 
for {}:", bucket, e);

Review Comment:
   ```suggestion
                       getLogger().error("Error checking S3 Multipart Upload 
list for {}", bucket, e);
   ```



##########
nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java:
##########
@@ -181,7 +181,7 @@ public void onTrigger(final ProcessContext context, final 
ProcessSession session
                 session.getProvenanceReporter().fetch(outgoingFlowFile, 
qualifiedPath.toString(), stopWatch.getDuration(TimeUnit.MILLISECONDS));
                 session.transfer(outgoingFlowFile, getSuccessRelationship());
             } catch (final FileNotFoundException | AccessControlException e) {
-                getLogger().error("Failed to retrieve content from {} for {} 
due to {}; routing to failure", new Object[]{qualifiedPath, outgoingFlowFile, 
e});
+                getLogger().error("Routing to failure since failed to retrieve 
content from {} for {}", qualifiedPath, outgoingFlowFile, e);

Review Comment:
   ```suggestion
                   getLogger().error("Failed to retrieve content from {} for 
{}", qualifiedPath, outgoingFlowFile, e);
   ```



##########
nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/inotify/GetHDFSEvents.java:
##########
@@ -223,15 +223,15 @@ public void process(OutputStream out) throws IOException {
                 lastTxId = eventBatch.getTxid();
             }
         } catch (IOException | InterruptedException e) {
-            getLogger().error("Unable to get notification information: {}", 
new Object[]{e});
+            getLogger().error("Unable to get notification information:", e);

Review Comment:
   ```suggestion
               getLogger().error("Unable to get notification information", e);
   ```



##########
nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/KeyValueReader.java:
##########
@@ -68,7 +68,7 @@ public Set<FlowFile> readSequenceFile(Path file, 
Configuration configuration, Fi
         final KeyValueWriterCallback callback = new 
KeyValueWriterCallback(reader);
         final String inputfileName = file.getName() + "." + System.nanoTime() 
+ ".";
         int counter = 0;
-        LOG.debug("Read from SequenceFile: {} ", new Object[]{file});
+        LOG.debug("Read from SequenceFile: {} ", file);

Review Comment:
   ```suggestion
           LOG.debug("Read from SequenceFile: {}", file);
   ```



##########
nifi-extension-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/AbstractPutHBase.java:
##########
@@ -228,15 +228,15 @@ public void onTrigger(final ProcessContext context, final 
ProcessSession session
             } catch (final Exception e) {
                 getLogger().error(e.getMessage(), e);
                 for (PutFlowFile putFlowFile : entry.getValue()) {
-                    getLogger().error("Failed to send {} to HBase due to {}; 
routing to failure", new Object[]{putFlowFile.getFlowFile(), e});
+                    getLogger().error("Routing to failure since failed to send 
{} to HBase due to ", putFlowFile.getFlowFile(), e);

Review Comment:
   ```suggestion
                       getLogger().error("Failed to send {} to HBase ", 
putFlowFile.getFlowFile(), e);
   ```



##########
nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java:
##########
@@ -133,7 +133,7 @@ public void onTrigger(final ProcessContext context, final 
ProcessSession session
         try {
             path = getNormalizedPath(getPath(context, flowFile));
         } catch (IllegalArgumentException e) {
-            getLogger().error("Failed to retrieve content from {} for {} due 
to {}; routing to failure", new Object[] {filenameValue, flowFile, e});
+            getLogger().error("Routing to failure since failed to retrieve 
content from {} for {}", filenameValue, flowFile, e);

Review Comment:
   ```suggestion
               getLogger().error("Failed to retrieve content from {} for {}", 
filenameValue, flowFile, e);
   ```



##########
nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java:
##########
@@ -1260,20 +1260,20 @@ private FileChannel createReader(final File file, final 
long position) {
         try {
             reader = FileChannel.open(file.toPath(), StandardOpenOption.READ);
         } catch (final IOException ioe) {
-            getLogger().warn("Unable to open file {}; will attempt to access 
file again after the configured Yield Duration has elapsed: {}", new 
Object[]{file, ioe});
+            getLogger().warn("Unable to open file {}; will attempt to access 
file again after the configured Yield Duration has elapsed: ", file, ioe);

Review Comment:
   ```suggestion
               getLogger().warn("Unable to open file {}; will attempt to access 
file again after the configured Yield Duration has elapsed", file, ioe);
   ```



##########
nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java:
##########
@@ -254,21 +254,21 @@ private void updateMapping(final ProcessContext context) {
                         if (file.lastModified() > lastModified.get()) {
                             lastModified.getAndSet(file.lastModified());
                             try (FileInputStream is = new 
FileInputStream(file)) {
-                                logger.info("Reloading mapping file: {}", new 
Object[]{fileName});
+                                logger.info("Reloading mapping file: {}", 
fileName);
 
                                 final Map<String, String> mapping = 
loadMappingFile(is);
                                 final ConfigurationState newState = new 
ConfigurationState(mapping);
                                 configurationStateRef.set(newState);
                             } catch (IOException e) {
-                                logger.error("Error reading mapping file: {}", 
new Object[]{e.getMessage()});
+                                logger.error("Error reading mapping file: ", 
e);

Review Comment:
   ```suggestion
                                   logger.error("Error reading mapping file", 
e);
   ```



##########
nifi-extension-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java:
##########
@@ -364,7 +364,7 @@ public final void abstractOnScheduled(ProcessContext 
context) throws IOException
                 hdfsResources.set(resources);
             }
         } catch (Exception ex) {
-            getLogger().error("HDFS Configuration error - {}", new 
Object[]{ex});
+            getLogger().error("HDFS Configuration error -", ex);

Review Comment:
   ```suggestion
               getLogger().error("HDFS Configuration failed", ex);
   ```



##########
nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java:
##########
@@ -615,10 +615,10 @@ public FileVisitResult visitFile(final Path path, final 
BasicFileAttributes attr
                 @Override
                 public FileVisitResult visitFileFailed(final Path path, final 
IOException e) {
                     if (e instanceof AccessDeniedException) {
-                        getLogger().debug("The following file is not readable: 
{}", new Object[]{path.toString()});
+                        getLogger().debug("The following file is not readable: 
{}", path);
                         return FileVisitResult.SKIP_SUBTREE;
                     } else {
-                        getLogger().error("Error during visiting file {}: {}", 
path.toString(), e.getMessage(), e);
+                        getLogger().error("Error during visiting file {}: ", 
path, e);

Review Comment:
   ```suggestion
                           getLogger().error("Error during visiting file {}", 
path, e);
   ```



##########
nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestListenHTTP.java:
##########
@@ -651,10 +651,10 @@ private int startWebServer() {
             try {
                 socket.connect(socketAddress, SOCKET_CONNECT_TIMEOUT);
                 connected = true;
-                runner.getLogger().debug("Server Socket Connected after {} 
ms", new Object[]{elapsed});
+                runner.getLogger().debug("Server Socket Connected after {} 
ms", elapsed);
                 socket.close();
             } catch (final Exception e) {
-                runner.getLogger().debug("Server Socket Connect Failed: [{}] 
{}", new Object[]{e.getClass(), e.getMessage()});
+                runner.getLogger().debug("Server Socket Connect Failed:", e);

Review Comment:
   ```suggestion
                   runner.getLogger().debug("Server Socket Connect Failed", e);
   ```



##########
nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/inotify/GetHDFSEvents.java:
##########
@@ -223,15 +223,15 @@ public void process(OutputStream out) throws IOException {
                 lastTxId = eventBatch.getTxid();
             }
         } catch (IOException | InterruptedException e) {
-            getLogger().error("Unable to get notification information: {}", 
new Object[]{e});
+            getLogger().error("Unable to get notification information:", e);
             context.yield();
             return;
         } catch (MissingEventsException e) {
             // set lastTxId to -1 and update state. This may cause events not 
to be processed. The reason this exception is thrown is described in the
             // org.apache.hadoop.hdfs.client.HdfsAdmin#getInotifyEventStrea 
API. It suggests tuning a couple parameters if this API is used.
             lastTxId = -1L;
             getLogger().error("Unable to get notification information. Setting 
transaction id to -1. This may cause some events to get missed. " +
-                    "Please see javadoc for 
org.apache.hadoop.hdfs.client.HdfsAdmin#getInotifyEventStream: {}", new 
Object[]{e});
+                    "Please see javadoc for 
org.apache.hadoop.hdfs.client.HdfsAdmin#getInotifyEventStream:", e);

Review Comment:
   ```suggestion
                       "Please see javadoc for 
org.apache.hadoop.hdfs.client.HdfsAdmin#getInotifyEventStream", e);
   ```



##########
nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java:
##########
@@ -254,21 +254,21 @@ private void updateMapping(final ProcessContext context) {
                         if (file.lastModified() > lastModified.get()) {
                             lastModified.getAndSet(file.lastModified());
                             try (FileInputStream is = new 
FileInputStream(file)) {
-                                logger.info("Reloading mapping file: {}", new 
Object[]{fileName});
+                                logger.info("Reloading mapping file: {}", 
fileName);
 
                                 final Map<String, String> mapping = 
loadMappingFile(is);
                                 final ConfigurationState newState = new 
ConfigurationState(mapping);
                                 configurationStateRef.set(newState);
                             } catch (IOException e) {
-                                logger.error("Error reading mapping file: {}", 
new Object[]{e.getMessage()});
+                                logger.error("Error reading mapping file: ", 
e);
                             }
                         }
                     } else {
-                        logger.error("Mapping file does not exist or is not 
readable: {}", new Object[]{fileName});
+                        logger.error("Mapping file does not exist or is not 
readable: {}", fileName);
                     }
                 }
             } catch (Exception e) {
-                logger.error("Error loading mapping file: {}", new 
Object[]{e.getMessage()});
+                logger.error("Error loading mapping file: ", e);

Review Comment:
   ```suggestion
                   logger.error("Error loading mapping file", e);
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@nifi.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to