(nifi) branch main updated: NIFI-12801 Add local file upload option in PutHDFS processor

2024-06-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new a21c2544ad NIFI-12801 Add local file upload option in PutHDFS processor
a21c2544ad is described below

commit a21c2544ad6c961b7ba8330481d462981e5e0d7d
Author: shubhamsharma 
AuthorDate: Thu Feb 15 10:31:09 2024 -0800

NIFI-12801 Add local file upload option in PutHDFS processor

This closes #8415.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-hdfs-processors/pom.xml   | 15 +
 .../org/apache/nifi/processors/hadoop/PutHDFS.java | 14 -
 .../apache/nifi/processors/hadoop/PutHDFSTest.java | 64 ++
 3 files changed, 91 insertions(+), 2 deletions(-)

diff --git 
a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/pom.xml 
b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/pom.xml
index a405c449a2..22eede1958 100644
--- a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/pom.xml
+++ b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/pom.xml
@@ -112,6 +112,21 @@
 org.apache.nifi
 nifi-kerberos-user-service-api
 
+
+org.apache.nifi
+nifi-resource-transfer
+2.0.0-SNAPSHOT
+
+
+org.apache.nifi
+nifi-file-resource-service-api
+
+
+org.apache.nifi
+nifi-file-resource-service
+2.0.0-SNAPSHOT
+test
+
 
 com.github.ben-manes.caffeine
 caffeine
diff --git 
a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
 
b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
index f2d1ed111e..8acce88fd2 100644
--- 
a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
+++ 
b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
@@ -48,6 +48,7 @@ import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.fileresource.service.api.FileResource;
 import org.apache.nifi.components.PropertyValue;
 import org.apache.nifi.components.RequiredPermission;
 import org.apache.nifi.components.ValidationContext;
@@ -80,6 +81,11 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
+import java.io.InputStream;
+import org.apache.nifi.processors.transfer.ResourceTransferSource;
+import static 
org.apache.nifi.processors.transfer.ResourceTransferProperties.FILE_RESOURCE_SERVICE;
+import static 
org.apache.nifi.processors.transfer.ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE;
+import static 
org.apache.nifi.processors.transfer.ResourceTransferUtils.getFileResource;
 
 /**
  * This processor copies FlowFiles to HDFS.
@@ -260,6 +266,8 @@ public class PutHDFS extends AbstractHadoopProcessor {
 props.add(REMOTE_GROUP);
 props.add(COMPRESSION_CODEC);
 props.add(IGNORE_LOCALITY);
+props.add(RESOURCE_TRANSFER_SOURCE);
+props.add(FILE_RESOURCE_SERVICE);
 return props;
 }
 
@@ -402,7 +410,9 @@ public class PutHDFS extends AbstractHadoopProcessor {
 
 // Write FlowFile to temp file on HDFS
 final StopWatch stopWatch = new StopWatch(true);
-session.read(putFlowFile, in -> {
+final ResourceTransferSource resourceTransferSource = 
context.getProperty(RESOURCE_TRANSFER_SOURCE).asAllowableValue(ResourceTransferSource.class);
+try (final InputStream in = 
getFileResource(resourceTransferSource, context, flowFile.getAttributes())
+.map(FileResource::getInputStream).orElseGet(() -> 
session.read(flowFile))) {
 OutputStream fos = null;
 Path createdFile = null;
 try {
@@ -463,7 +473,7 @@ public class PutHDFS extends AbstractHadoopProcessor {
 }
 fos = null;
 }
-});
+}
 stopWatch.stop();
 final String dataRate = 
stopWatch.calculateDataRate(putFlowFile.getSize());
 final long millis = 
stopWatch.getDuration(TimeUnit.MILLISECONDS);
diff --git 
a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/proc

(nifi) branch support/nifi-1.x updated: NIFI-13072: Fix MonitorActivity problems with cluster scope flow monitoring

2024-05-12 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 2ea497f81c NIFI-13072: Fix MonitorActivity problems with cluster scope 
flow monitoring
2ea497f81c is described below

commit 2ea497f81c83606e2daa931b5ad7cb0a267ae850
Author: Rajmund Takacs 
AuthorDate: Thu Apr 4 17:09:59 2024 +0200

NIFI-13072: Fix MonitorActivity problems with cluster scope flow monitoring

This closes #8669.

Signed-off-by: Peter Turcsanyi 

(cherry picked from commit bffacdec982a9f3f0bf6004bcecbd21d1049e401)
---
 .../nifi/processors/standard/MonitorActivity.java  | 624 +
 .../processors/standard/TestMonitorActivity.java   | 615 +---
 2 files changed, 942 insertions(+), 297 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
index 7021f97fc9..43efc3f82b 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
@@ -16,6 +16,20 @@
  */
 package org.apache.nifi.processors.standard;
 
+import static java.util.Collections.singletonMap;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
 import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
@@ -30,7 +44,6 @@ import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.components.state.Scope;
 import org.apache.nifi.components.state.StateManager;
 import org.apache.nifi.components.state.StateMap;
@@ -44,23 +57,7 @@ import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
-import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
-import org.apache.nifi.util.StringUtils;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
 
 @SideEffectFree
 @TriggerSerially
@@ -72,10 +69,18 @@ import java.util.concurrent.atomic.AtomicLong;
 @WritesAttributes({
 @WritesAttribute(attribute = "inactivityStartMillis", description = "The 
time at which Inactivity began, in the form of milliseconds since Epoch"),
 @WritesAttribute(attribute = "inactivityDurationMillis", description = 
"The number of milliseconds that the inactivity has spanned")})
-@Stateful(scopes = Scope.CLUSTER, description = "MonitorActivity stores the 
last timestamp at each node as state, so that it can examine activity at 
cluster wide." +
-"If 'Copy Attribute' is set to true, then flow file attributes are 
also persisted.")
+@Stateful(
+scopes = { Scope.CLUSTER, Scope.LOCAL },
+description = "MonitorActivity stores the last timestamp at each node 
as state, "
++ "so that it can examine activity at cluster wide. "
++ "If 'Copy Attribute' is set to true, then flow file 
attributes are also persisted. "
++ "In local scope, it stores last known activity timestamp if 
the flow is inactive."
+)
 public class MonitorActivity extends AbstractProcessor {
 
+public static final String STATE_KEY_COMMON_FLOW_ACTIVITY_INFO = 
"CommonFlowActivityInfo.lastSuccessfulTransfer";
+public static final String STATE_KEY_LOCAL_F

(nifi) branch main updated: NIFI-13072: Fix MonitorActivity problems with cluster scope flow monitoring

2024-05-12 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new bffacdec98 NIFI-13072: Fix MonitorActivity problems with cluster scope 
flow monitoring
bffacdec98 is described below

commit bffacdec982a9f3f0bf6004bcecbd21d1049e401
Author: Rajmund Takacs 
AuthorDate: Thu Apr 4 17:09:59 2024 +0200

NIFI-13072: Fix MonitorActivity problems with cluster scope flow monitoring

This closes #8669.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi/processors/standard/MonitorActivity.java  | 624 +
 .../processors/standard/TestMonitorActivity.java   | 615 +---
 2 files changed, 942 insertions(+), 297 deletions(-)

diff --git 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
index 656c4ab11e..00301e1b5e 100644
--- 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
+++ 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
@@ -16,6 +16,20 @@
  */
 package org.apache.nifi.processors.standard;
 
+import static java.util.Collections.singletonMap;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
 import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
@@ -30,7 +44,6 @@ import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.components.state.Scope;
 import org.apache.nifi.components.state.StateManager;
 import org.apache.nifi.components.state.StateMap;
@@ -44,23 +57,7 @@ import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
-import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
-import org.apache.nifi.util.StringUtils;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
 
 @SideEffectFree
 @TriggerSerially
@@ -72,10 +69,18 @@ import java.util.concurrent.atomic.AtomicLong;
 @WritesAttributes({
 @WritesAttribute(attribute = "inactivityStartMillis", description = "The 
time at which Inactivity began, in the form of milliseconds since Epoch"),
 @WritesAttribute(attribute = "inactivityDurationMillis", description = 
"The number of milliseconds that the inactivity has spanned")})
-@Stateful(scopes = Scope.CLUSTER, description = "MonitorActivity stores the 
last timestamp at each node as state, so that it can examine activity at 
cluster wide." +
-"If 'Copy Attribute' is set to true, then flow file attributes are 
also persisted.")
+@Stateful(
+scopes = { Scope.CLUSTER, Scope.LOCAL },
+description = "MonitorActivity stores the last timestamp at each node 
as state, "
++ "so that it can examine activity at cluster wide. "
++ "If 'Copy Attribute' is set to true, then flow file 
attributes are also persisted. "
++ "In local scope, it stores last known activity timestamp if 
the flow is inactive."
+)
 public class MonitorActivity extends AbstractProcessor {
 
+public static final String STATE_KEY_COMMON_FLOW_ACTIVITY_INFO = 
"CommonFlowActivityInfo.lastSuccessfulTransfer";
+public static final String STATE_KEY_LOCAL_FLOW_ACTIVITY_INFO = 
"LocalFlowActivityInfo.lastSuccessfulTransfer";
+

(nifi) branch support/nifi-1.x updated: NIFI-12890: Refactor HadoopDBCPConnectionPool to extend AbstractDBCPConnectionPool

2024-04-11 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new b2842fd00a NIFI-12890: Refactor HadoopDBCPConnectionPool to extend 
AbstractDBCPConnectionPool
b2842fd00a is described below

commit b2842fd00af1728bc479b899fd99b8a092e82884
Author: lehelb 
AuthorDate: Wed Mar 13 14:22:19 2024 -0500

NIFI-12890: Refactor HadoopDBCPConnectionPool to extend 
AbstractDBCPConnectionPool

This closes #8623.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-hadoop-dbcp-service/pom.xml   |   5 +
 .../apache/nifi/dbcp/HadoopDBCPConnectionPool.java | 370 +++--
 .../nifi/dbcp/HadoopDBCPConnectionPoolTest.java|  15 +-
 3 files changed, 126 insertions(+), 264 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/pom.xml
 
b/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/pom.xml
index f2624a37f5..b138e1cd62 100644
--- 
a/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/pom.xml
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/pom.xml
@@ -28,6 +28,11 @@
 1.26.0-SNAPSHOT
 provided
 
+
+org.apache.nifi
+nifi-dbcp-base
+1.26.0-SNAPSHOT
+
 
 org.apache.nifi
 nifi-api
diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/src/main/java/org/apache/nifi/dbcp/HadoopDBCPConnectionPool.java
 
b/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/src/main/java/org/apache/nifi/dbcp/HadoopDBCPConnectionPool.java
index 8293386564..17917f8489 100644
--- 
a/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/src/main/java/org/apache/nifi/dbcp/HadoopDBCPConnectionPool.java
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/src/main/java/org/apache/nifi/dbcp/HadoopDBCPConnectionPool.java
@@ -30,15 +30,15 @@ import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnDisabled;
 import org.apache.nifi.annotation.lifecycle.OnEnabled;
 import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.components.PropertyValue;
 import org.apache.nifi.components.RequiredPermission;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.components.resource.ResourceCardinality;
 import org.apache.nifi.components.resource.ResourceType;
-import org.apache.nifi.controller.AbstractControllerService;
 import org.apache.nifi.controller.ConfigurationContext;
 import org.apache.nifi.controller.ControllerServiceInitializationContext;
+import org.apache.nifi.dbcp.utils.DBCPProperties;
+import org.apache.nifi.dbcp.utils.DataSourceConfiguration;
 import org.apache.nifi.expression.ExpressionLanguageScope;
 import org.apache.nifi.hadoop.KerberosProperties;
 import org.apache.nifi.hadoop.SecurityUtil;
@@ -50,7 +50,6 @@ import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.security.krb.KerberosKeytabUser;
 import org.apache.nifi.security.krb.KerberosLoginException;
 import org.apache.nifi.security.krb.KerberosPasswordUser;
-import org.apache.nifi.security.krb.KerberosUser;
 
 import javax.security.auth.login.LoginException;
 import java.io.File;
@@ -58,22 +57,38 @@ import java.io.IOException;
 import java.lang.reflect.UndeclaredThrowableException;
 import java.security.PrivilegedExceptionAction;
 import java.sql.Connection;
+import java.sql.Driver;
+import java.sql.DriverManager;
 import java.sql.SQLException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
-import java.util.Collections;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicReference;
 
+import static org.apache.nifi.dbcp.utils.DBCPProperties.DATABASE_URL;
+import static org.apache.nifi.dbcp.utils.DBCPProperties.DB_DRIVERNAME;
+import static org.apache.nifi.dbcp.utils.DBCPProperties.DB_PASSWORD;
+import static org.apache.nifi.dbcp.utils.DBCPProperties.DB_USER;
+import static org.apache.nifi.dbcp.utils.DBCPProperties.EVICTION_RUN_PERIOD;
+import static org.apache.nifi.dbcp.utils.DBCPProperties.KERBEROS_USER_SERVICE;
+import static org.apache.nifi.dbcp.utils.DBCPProperties.MAX_CONN_LIFETIME;
+import static org.apache.nifi.dbcp.utils.DBCPProperties.MAX_IDLE;
+import static org.apache.nifi.dbcp.utils.DBCPProperties.MAX_TOTAL_CONNECTIONS;
+import static

(nifi) branch main updated: NIFI-12890: Refactor HadoopDBCPConnectionPool to extend AbstractDBCPConnectionPool

2024-04-11 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 419a9cc73b NIFI-12890: Refactor HadoopDBCPConnectionPool to extend 
AbstractDBCPConnectionPool
419a9cc73b is described below

commit 419a9cc73bfe5410c0ff399f4feb8e6350b7609d
Author: lehelb 
AuthorDate: Wed Mar 13 14:22:19 2024 -0500

NIFI-12890: Refactor HadoopDBCPConnectionPool to extend 
AbstractDBCPConnectionPool

This closes #8619.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-hadoop-dbcp-service/pom.xml   |   5 +
 .../apache/nifi/dbcp/HadoopDBCPConnectionPool.java | 374 +++--
 .../nifi/dbcp/HadoopDBCPConnectionPoolTest.java|  15 +-
 3 files changed, 135 insertions(+), 259 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/pom.xml
 
b/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/pom.xml
index 37a0370672..396c82bd1a 100644
--- 
a/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/pom.xml
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/pom.xml
@@ -28,6 +28,11 @@
 2.0.0-SNAPSHOT
 provided
 
+
+org.apache.nifi
+nifi-dbcp-base
+2.0.0-SNAPSHOT
+
 
 org.apache.nifi
 nifi-api
diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/src/main/java/org/apache/nifi/dbcp/HadoopDBCPConnectionPool.java
 
b/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/src/main/java/org/apache/nifi/dbcp/HadoopDBCPConnectionPool.java
index 5d75daf2ca..09c6302990 100644
--- 
a/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/src/main/java/org/apache/nifi/dbcp/HadoopDBCPConnectionPool.java
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-hadoop-dbcp-service-bundle/nifi-hadoop-dbcp-service/src/main/java/org/apache/nifi/dbcp/HadoopDBCPConnectionPool.java
@@ -16,21 +16,6 @@
  */
 package org.apache.nifi.dbcp;
 
-import java.io.File;
-import java.io.IOException;
-import java.lang.reflect.UndeclaredThrowableException;
-import java.security.PrivilegedExceptionAction;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.time.Duration;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicReference;
-import javax.security.auth.login.LoginException;
 import org.apache.commons.dbcp2.BasicDataSource;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -45,15 +30,15 @@ import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnDisabled;
 import org.apache.nifi.annotation.lifecycle.OnEnabled;
 import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.components.PropertyValue;
 import org.apache.nifi.components.RequiredPermission;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.components.resource.ResourceCardinality;
 import org.apache.nifi.components.resource.ResourceType;
-import org.apache.nifi.controller.AbstractControllerService;
 import org.apache.nifi.controller.ConfigurationContext;
 import org.apache.nifi.controller.ControllerServiceInitializationContext;
+import org.apache.nifi.dbcp.utils.DBCPProperties;
+import org.apache.nifi.dbcp.utils.DataSourceConfiguration;
 import org.apache.nifi.expression.ExpressionLanguageScope;
 import org.apache.nifi.hadoop.KerberosProperties;
 import org.apache.nifi.hadoop.SecurityUtil;
@@ -64,15 +49,45 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.security.krb.KerberosKeytabUser;
 import org.apache.nifi.security.krb.KerberosLoginException;
 import org.apache.nifi.security.krb.KerberosPasswordUser;
-import org.apache.nifi.security.krb.KerberosUser;
+
+import javax.security.auth.login.LoginException;
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.Driver;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicReference;
+
+import static org.apache.nifi.dbcp.utils.DBCPProperties.DATABASE_URL

(nifi) branch main updated: NIFI-13014 Remove unused Avatica versions from code-coverage

2024-04-09 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 63fc1aeceb NIFI-13014 Remove unused Avatica versions from code-coverage
63fc1aeceb is described below

commit 63fc1aecebd6bff7478f102b6fcc489d8018bebe
Author: exceptionfactory 
AuthorDate: Mon Apr 8 20:07:00 2024 -0500

NIFI-13014 Remove unused Avatica versions from code-coverage

This closes #8615.

Signed-off-by: Peter Turcsanyi 
---
 nifi-code-coverage/pom.xml | 13 -
 1 file changed, 13 deletions(-)

diff --git a/nifi-code-coverage/pom.xml b/nifi-code-coverage/pom.xml
index 66e72afb3f..4d54c0fd59 100644
--- a/nifi-code-coverage/pom.xml
+++ b/nifi-code-coverage/pom.xml
@@ -28,8 +28,6 @@
 
 
 1.10.14
-1.6.0
-1.24.0
 2.12.0
 0.8.11
 
@@ -64,17 +62,6 @@
 ant-antlr
 ${ant.version}
 
-
-
-org.apache.calcite
-calcite-avatica
-${calcite.avatica.version}
-
-
-org.apache.calcite.avatica
-avatica
-${avatica.version}
-
 
 
 org.codehaus.janino



(nifi) branch support/nifi-1.x updated: NIFI-13012 Upgraded Apache Tika from 2.9.1 to 2.9.2

2024-04-09 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new c53539542b NIFI-13012 Upgraded Apache Tika from 2.9.1 to 2.9.2
c53539542b is described below

commit c53539542beabba416bb87fce7d582986e6e3903
Author: exceptionfactory 
AuthorDate: Thu Apr 4 20:19:54 2024 +0200

NIFI-13012 Upgraded Apache Tika from 2.9.1 to 2.9.2

This closes #8613.

Signed-off-by: Peter Turcsanyi 

(cherry picked from commit c335de8dbed1aa57628790c2b78f6bbacf9e8322)
---
 nifi-nar-bundles/nifi-framework-bundle/pom.xml   | 2 +-
 nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml | 2 +-
 nifi-nar-bundles/nifi-standard-bundle/pom.xml| 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/nifi-nar-bundles/nifi-framework-bundle/pom.xml 
b/nifi-nar-bundles/nifi-framework-bundle/pom.xml
index 373d378673..f07223e643 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-framework-bundle/pom.xml
@@ -25,7 +25,7 @@
 
 5.6.0
 32.1.2-jre
-2.9.1
+2.9.2
 4.3.0
 
 
diff --git a/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml 
b/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml
index b42ef36118..fb86fe12e2 100644
--- a/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml
@@ -26,7 +26,7 @@
 jar
 
 
-2.9.1
+2.9.2
 
 
 
diff --git a/nifi-nar-bundles/nifi-standard-bundle/pom.xml 
b/nifi-nar-bundles/nifi-standard-bundle/pom.xml
index e2cf9dbce5..ee0693ea41 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-standard-bundle/pom.xml
@@ -37,7 +37,7 @@
 2.2.0
 0.1.8
 2.11.0
-2.9.1
+2.9.2
 
 
 



(nifi) branch main updated: NIFI-13012 Upgraded Apache Tika from 2.9.1 to 2.9.2

2024-04-09 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new c335de8dbe NIFI-13012 Upgraded Apache Tika from 2.9.1 to 2.9.2
c335de8dbe is described below

commit c335de8dbed1aa57628790c2b78f6bbacf9e8322
Author: exceptionfactory 
AuthorDate: Thu Apr 4 13:19:54 2024 -0500

NIFI-13012 Upgraded Apache Tika from 2.9.1 to 2.9.2

This closes #8613.

Signed-off-by: Peter Turcsanyi 
---
 nifi-nar-bundles/nifi-framework-bundle/pom.xml   | 2 +-
 nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml | 2 +-
 nifi-nar-bundles/nifi-standard-bundle/pom.xml| 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/nifi-nar-bundles/nifi-framework-bundle/pom.xml 
b/nifi-nar-bundles/nifi-framework-bundle/pom.xml
index 548eed74a4..2bfc2e8b68 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-framework-bundle/pom.xml
@@ -25,7 +25,7 @@
 
 5.6.0
 33.1.0-jre
-2.9.1
+2.9.2
 4.3.0
 
 
diff --git a/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml 
b/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml
index 047b33f04b..f96b26b541 100644
--- a/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml
@@ -26,7 +26,7 @@
 jar
 
 
-2.9.1
+2.9.2
 
 
 
diff --git a/nifi-nar-bundles/nifi-standard-bundle/pom.xml 
b/nifi-nar-bundles/nifi-standard-bundle/pom.xml
index e04f155b36..65f4e6e5c8 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-standard-bundle/pom.xml
@@ -35,7 +35,7 @@
 
 2.2.0
 2.12.0
-2.9.1
+2.9.2
 
 
 



(nifi) branch main updated: NIFI-13013 Upgraded ActiveMQ from 6.0.1 to 6.1.1

2024-04-09 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new f2263458f0 NIFI-13013 Upgraded ActiveMQ from 6.0.1 to 6.1.1
f2263458f0 is described below

commit f2263458f0a638ce6be6b119a7e6923297379abe
Author: exceptionfactory 
AuthorDate: Mon Apr 8 19:52:49 2024 -0500

NIFI-13013 Upgraded ActiveMQ from 6.0.1 to 6.1.1

This closes #8614.

Signed-off-by: Peter Turcsanyi 
---
 nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml | 9 +++--
 1 file changed, 7 insertions(+), 2 deletions(-)

diff --git a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
index f3ee0a76b2..1e3c0ec70f 100644
--- a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
@@ -17,6 +17,11 @@
 4.0.0
 nifi-jms-processors
 jar
+
+
+6.1.1
+
+
 
 
 org.apache.nifi
@@ -71,13 +76,13 @@
 
 org.apache.activemq
 activemq-client
-6.0.1
+${activemq.version}
 test
 
 
 org.apache.activemq
 activemq-broker
-6.0.1
+${activemq.version}
 test
 
 



(nifi) branch support/nifi-1.x updated: NIFI-12984 - Bump Snowflake Ingest SDK to 2.1.0

2024-04-08 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new f19ea70a8f NIFI-12984 - Bump Snowflake Ingest SDK to 2.1.0
f19ea70a8f is described below

commit f19ea70a8f559cc6f22e567343f339299c51c2d3
Author: Pierre Villard 
AuthorDate: Mon Apr 1 12:28:23 2024 +0300

NIFI-12984 - Bump Snowflake Ingest SDK to 2.1.0

This closes #8591.

Signed-off-by: Peter Turcsanyi 
(cherry picked from commit d624fe7e91d93f509aae2167e872101b4c8774c7)
---
 nifi-nar-bundles/nifi-snowflake-bundle/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml 
b/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
index a0662c0b26..f46437030f 100644
--- a/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
@@ -36,7 +36,7 @@
 
 net.snowflake
 snowflake-ingest-sdk
-2.0.5
+2.1.0
 
 
 net.snowflake



(nifi) branch main updated: NIFI-12984 - Bump Snowflake Ingest SDK to 2.1.0

2024-04-08 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new d624fe7e91 NIFI-12984 - Bump Snowflake Ingest SDK to 2.1.0
d624fe7e91 is described below

commit d624fe7e91d93f509aae2167e872101b4c8774c7
Author: Pierre Villard 
AuthorDate: Mon Apr 1 12:28:23 2024 +0300

NIFI-12984 - Bump Snowflake Ingest SDK to 2.1.0

This closes #8591.

Signed-off-by: Peter Turcsanyi 
---
 nifi-nar-bundles/nifi-snowflake-bundle/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml 
b/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
index 5b47436bbb..21aec9e271 100644
--- a/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
@@ -36,7 +36,7 @@
 
 net.snowflake
 snowflake-ingest-sdk
-2.0.5
+2.1.0
 
 
 net.snowflake



(nifi) branch main updated: NIFI-12671 Added S3FileResourceService

2024-02-26 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 40d9750bb3 NIFI-12671 Added S3FileResourceService
40d9750bb3 is described below

commit 40d9750bb3dab405bd0ba7df14737837d9c3021c
Author: Balázs Gerner 
AuthorDate: Tue Feb 6 14:56:23 2024 +0100

NIFI-12671 Added S3FileResourceService

This closes #8368.

Signed-off-by: Peter Turcsanyi 
---
 .../AbstractAWSCredentialsProviderProcessor.java   |  24 +--
 .../processors/aws/s3/AbstractS3Processor.java |  50 +-
 .../nifi/processors/aws/util/RegionUtilV1.java |  94 
 .../processors/aws/v2/AbstractAwsProcessor.java|   4 +-
 .../aws/v2/{RegionUtil.java => RegionUtilV2.java}  |   7 +-
 .../nifi/processors/aws/s3/DeleteS3Object.java |   2 +
 .../nifi/processors/aws/s3/FetchS3Object.java  |   2 +
 .../org/apache/nifi/processors/aws/s3/ListS3.java  |  14 +-
 .../apache/nifi/processors/aws/s3/PutS3Object.java |   1 +
 .../apache/nifi/processors/aws/s3/TagS3Object.java |   2 +
 .../s3/encryption/StandardS3EncryptionService.java |   6 +-
 .../aws/s3/service/S3FileResourceService.java  | 154 +++
 .../processors/aws/wag/InvokeAWSGatewayApi.java|   2 +
 .../org.apache.nifi.controller.ControllerService   |   1 +
 .../nifi/processors/aws/s3/AbstractS3IT.java   |   3 +-
 .../nifi/processors/aws/s3/ITFetchS3Object.java|   3 +-
 .../nifi/processors/aws/s3/ITPutS3Object.java  |   5 +-
 .../nifi/processors/aws/s3/TestDeleteS3Object.java |  11 +-
 .../nifi/processors/aws/s3/TestFetchS3Object.java  |  19 +--
 .../apache/nifi/processors/aws/s3/TestListS3.java  |  31 ++--
 .../nifi/processors/aws/s3/TestPutS3Object.java|   5 +-
 .../nifi/processors/aws/s3/TestTagS3Object.java|  21 +--
 .../aws/s3/service/S3FileResourceServiceTest.java  | 170 +
 .../nifi/processors/aws/sqs/AbstractSQSIT.java |   6 +-
 .../aws/wag/TestInvokeAWSGatewayApiCommon.java |   3 +-
 .../aws/wag/TestInvokeAmazonGatewayApiMock.java|   3 +-
 26 files changed, 513 insertions(+), 130 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/AbstractAWSCredentialsProviderProcessor.java
 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/AbstractAWSCredentialsProviderProcessor.java
index f9a81ad208..5215186e36 100644
--- 
a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/AbstractAWSCredentialsProviderProcessor.java
+++ 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/AbstractAWSCredentialsProviderProcessor.java
@@ -29,7 +29,6 @@ import com.github.benmanes.caffeine.cache.Caffeine;
 import org.apache.http.conn.ssl.DefaultHostnameVerifier;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
-import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.ConfigVerificationResult;
 import org.apache.nifi.components.ConfigVerificationResult.Outcome;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -58,6 +57,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
+import static org.apache.nifi.processors.aws.util.RegionUtilV1.REGION;
+
 /**
  * Base class for AWS processors that uses AWSCredentialsProvider interface 
for creating AWS clients.
  *
@@ -92,14 +93,6 @@ public abstract class 
AbstractAWSCredentialsProviderProcessor values = new ArrayList<>();
-for (final Regions region : Regions.values()) {
-values.add(createAllowableValue(region));
-}
-return values.toArray(new AllowableValue[0]);
-}
-
-
 @Override
 public void migrateProperties(final PropertyConfiguration config) {
 migrateAuthenticationProperties(config);
diff --git 
a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/s3/AbstractS3Processor.java
 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/s3/AbstractS3Processor.java
index 9d8f643c44..d449218d4f 100644
--- 
a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/s3/AbstractS3Processor.java
+++ 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/s3/AbstractS3Processor.java
@@ -22,7 +22,6 @@ import com.amazonaws.auth.AWSCredentialsProvider;
 import com.amazonaws.auth.Signer;
 import com.amazonaws.client.builder.AwsClientBuilder;
 import com.amazonaws.regions.Region;
-import com.amazonaws.regions.Regions

(nifi) branch main updated: NIFI-12775: Renamed provenance/FileResource to ProvenanceFileResource

2024-02-11 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new bf86103abf NIFI-12775: Renamed provenance/FileResource to 
ProvenanceFileResource
bf86103abf is described below

commit bf86103abfc28e7bd84d5421dc8db2eecb7357be
Author: lehelb 
AuthorDate: Sat Feb 10 20:27:57 2024 -0500

NIFI-12775: Renamed provenance/FileResource to ProvenanceFileResource

This closes #8392.

Signed-off-by: Peter Turcsanyi 
---
 .../provenance/{FileResource.java => ProvenanceFileResource.java}   | 4 ++--
 .../main/java/org/apache/nifi/provenance/ProvenanceReporter.java| 4 ++--
 .../src/main/java/org/apache/nifi/util/MockProvenanceReporter.java  | 6 +++---
 .../nifi/controller/repository/StandardProvenanceReporter.java  | 6 +++---
 4 files changed, 10 insertions(+), 10 deletions(-)

diff --git 
a/nifi-api/src/main/java/org/apache/nifi/provenance/FileResource.java 
b/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceFileResource.java
similarity index 86%
rename from nifi-api/src/main/java/org/apache/nifi/provenance/FileResource.java
rename to 
nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceFileResource.java
index 8ec84b58b0..71e7845e45 100644
--- a/nifi-api/src/main/java/org/apache/nifi/provenance/FileResource.java
+++ 
b/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceFileResource.java
@@ -20,10 +20,10 @@ package org.apache.nifi.provenance;
  * Holds information of a file resource for UPLOAD
  * provenance events.
  */
-public record FileResource(String location, long size) {
+public record ProvenanceFileResource(String location, long size) {
 
 @Override
 public String toString() {
-return "FileResource[location=%s, size=%d]".formatted(location, size);
+return "ProvenanceFileResource[location=%s, 
size=%d]".formatted(location, size);
 }
 }
diff --git 
a/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceReporter.java 
b/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceReporter.java
index 2f64864a00..706d6fef47 100644
--- a/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceReporter.java
+++ b/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceReporter.java
@@ -330,7 +330,7 @@ public interface ProvenanceReporter {
  * events to an external Enterprise-wide system that is then able to
  * correlate the SEND and RECEIVE events.
  */
-void upload(FlowFile flowFile, FileResource fileResource, String 
transitUri);
+void upload(FlowFile flowFile, ProvenanceFileResource fileResource, String 
transitUri);
 
 /**
  * Emits a Provenance Event of type {@link ProvenanceEventType#UPLOAD 
UPLOAD}
@@ -355,7 +355,7 @@ public interface ProvenanceReporter {
  * ProvenanceReporter is associated is rolled back. Otherwise, the Event
  * will be recorded only on a successful session commit.
  */
-void upload(FlowFile flowFile, FileResource fileResource, String 
transitUri, String details, long transmissionMillis, boolean force);
+void upload(FlowFile flowFile, ProvenanceFileResource fileResource, String 
transitUri, String details, long transmissionMillis, boolean force);
 
 /**
  * Emits a Provenance Event of type {@link 
ProvenanceEventType#REMOTE_INVOCATION}
diff --git 
a/nifi-mock/src/main/java/org/apache/nifi/util/MockProvenanceReporter.java 
b/nifi-mock/src/main/java/org/apache/nifi/util/MockProvenanceReporter.java
index 38c1f0e8dc..1879d11c6e 100644
--- a/nifi-mock/src/main/java/org/apache/nifi/util/MockProvenanceReporter.java
+++ b/nifi-mock/src/main/java/org/apache/nifi/util/MockProvenanceReporter.java
@@ -24,7 +24,7 @@ import org.apache.nifi.provenance.ProvenanceEventRecord;
 import org.apache.nifi.provenance.ProvenanceEventType;
 import org.apache.nifi.provenance.ProvenanceReporter;
 import org.apache.nifi.provenance.StandardProvenanceEventRecord;
-import org.apache.nifi.provenance.FileResource;
+import org.apache.nifi.provenance.ProvenanceFileResource;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -228,13 +228,13 @@ public class MockProvenanceReporter implements 
ProvenanceReporter {
 }
 
 @Override
-public void upload(final FlowFile flowFile, final FileResource 
fileResource, final String transitUri) {
+public void upload(final FlowFile flowFile, final ProvenanceFileResource 
fileResource, final String transitUri) {
 upload(flowFile, fileResource, transitUri, null, -1L, true);
 
 }
 
 @Override
-public void upload(FlowFile flowFile, FileResource fileResource, String 
transitUri, String details, long transmissionMillis, boolean force) {
+public void upload(FlowFile flowFile, ProvenanceFileResource fileResource, 
String transitUri, String details, long transmissionMillis, boolea

(nifi) branch support/nifi-1.x updated: NIFI-12441 Added No Tracking Strategy to ListS3

2024-02-02 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new ce56117e43 NIFI-12441 Added No Tracking Strategy to ListS3
ce56117e43 is described below

commit ce56117e43c7cdce37ca76d54550a2b45acb508a
Author: Juldrixx 
AuthorDate: Sat Dec 23 04:38:03 2023 -0600

NIFI-12441 Added No Tracking Strategy to ListS3

This closes #8088

Signed-off-by: David Handermann 
(cherry picked from commit 9a919339090b26b0b39321758b8f8074597320ca)
---
 .../org/apache/nifi/processors/aws/s3/ListS3.java  | 110 ++---
 .../apache/nifi/processors/aws/s3/TestListS3.java  |  59 +++
 2 files changed, 157 insertions(+), 12 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
index 4be64b0953..431778f1ef 100644
--- 
a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
+++ 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
@@ -144,12 +144,17 @@ public class ListS3 extends AbstractS3Processor 
implements VerifiableProcessor {
 " However an additional DistributedMapCache controller service is 
required and more JVM heap memory is used." +
 " For more information on how the 'Entity Tracking Time Window' 
property works, see the description.");
 
+public static final AllowableValue NO_TRACKING = new 
AllowableValue("none", "No Tracking",
+"This strategy lists all entities without any tracking. The same 
entities will be listed each time" +
+" this processor is scheduled. It is recommended to change 
the default run schedule value." +
+" Any property that relates to the persisting state will 
be ignored.");
+
 public static final PropertyDescriptor LISTING_STRATEGY = new Builder()
 .name("listing-strategy")
 .displayName("Listing Strategy")
 .description("Specify how to determine new/updated entities. See each 
strategy descriptions for detail.")
 .required(true)
-.allowableValues(BY_TIMESTAMPS, BY_ENTITIES)
+.allowableValues(BY_TIMESTAMPS, BY_ENTITIES, NO_TRACKING)
 .defaultValue(BY_TIMESTAMPS.getValue())
 .build();
 
@@ -345,7 +350,7 @@ public class ListS3 extends AbstractS3Processor implements 
VerifiableProcessor {
 try {
 listedEntityTracker.clearListedEntities();
 } catch (IOException e) {
-throw new RuntimeException("Failed to reset previously listed 
entities due to " + e, e);
+throw new RuntimeException("Failed to reset previously listed 
entities", e);
 }
 }
 resetEntityTrackingState = false;
@@ -465,11 +470,92 @@ public class ListS3 extends AbstractS3Processor 
implements VerifiableProcessor {
 listByTrackingTimestamps(context, session);
 } else if (BY_ENTITIES.equals(listingStrategy)) {
 listByTrackingEntities(context, session);
+} else if (NO_TRACKING.equals(listingStrategy)) {
+listNoTracking(context, session);
 } else {
 throw new ProcessException("Unknown listing strategy: " + 
listingStrategy);
 }
 }
 
+private void listNoTracking(ProcessContext context, ProcessSession 
session) {
+final AmazonS3 client = getClient(context);
+
+S3BucketLister bucketLister = getS3BucketLister(context, client);
+
+final long startNanos = System.nanoTime();
+final long minAgeMilliseconds = 
context.getProperty(MIN_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
+final Long maxAgeMilliseconds = context.getProperty(MAX_AGE) != null ? 
context.getProperty(MAX_AGE).asTimePeriod(TimeUnit.MILLISECONDS) : null;
+final long listingTimestamp = System.currentTimeMillis();
+
+final String bucket = 
context.getProperty(BUCKET).evaluateAttributeExpressions().getValue();
+final int batchSize = context.getProperty(BATCH_SIZE).asInteger();
+
+int listCount = 0;
+int totalListCount = 0;
+
+getLogger().trace("Start listing, listingTimestamp={}", 
listingTimestamp);
+
+final S3ObjectWriter writer;
+final RecordSetWriterFactory writerFactory = 
context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
+if (writerFactory == null) {
+writer = new AttributeObjectWriter(

(nifi) branch support/nifi-1.x updated: NIFI-12715 Updated Snowflake SDKs

2024-02-01 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new f211f3af28 NIFI-12715 Updated Snowflake SDKs
f211f3af28 is described below

commit f211f3af28a55e6a4d2ba44424d12abf232258a1
Author: mr1716 
AuthorDate: Thu Feb 1 08:24:43 2024 -0500

NIFI-12715 Updated Snowflake SDKs

This closes #8335.

Signed-off-by: Peter Turcsanyi 
(cherry picked from commit d5ed02ec5969efb82efffa467a6be8c53ceeaccf)
---
 nifi-nar-bundles/nifi-snowflake-bundle/pom.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml 
b/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
index 507846e61a..a0662c0b26 100644
--- a/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
@@ -36,13 +36,13 @@
 
 net.snowflake
 snowflake-ingest-sdk
-2.0.4
+2.0.5
 
 
 net.snowflake
 snowflake-jdbc
 
-3.14.4
+3.14.5
 
 
 



(nifi) branch main updated: NIFI-12715 Updated Snowflake SDKs

2024-02-01 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new d5ed02ec59 NIFI-12715 Updated Snowflake SDKs
d5ed02ec59 is described below

commit d5ed02ec5969efb82efffa467a6be8c53ceeaccf
Author: mr1716 
AuthorDate: Thu Feb 1 08:24:43 2024 -0500

NIFI-12715 Updated Snowflake SDKs

This closes #8335.

Signed-off-by: Peter Turcsanyi 
---
 nifi-nar-bundles/nifi-snowflake-bundle/pom.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml 
b/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
index 539e29a812..5b47436bbb 100644
--- a/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
@@ -36,13 +36,13 @@
 
 net.snowflake
 snowflake-ingest-sdk
-2.0.4
+2.0.5
 
 
 net.snowflake
 snowflake-jdbc
 
-3.14.4
+3.14.5
 
 
 



(nifi) branch main updated: NIFI-12642 Added support for FileResourceService in PutS3Object

2024-01-29 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new c1a21ad078 NIFI-12642 Added support for FileResourceService in 
PutS3Object
c1a21ad078 is described below

commit c1a21ad078d7b9c3b0422c48b9bb1f9faa3c0b6f
Author: Balázs Gerner 
AuthorDate: Mon Jan 22 13:11:18 2024 +0100

NIFI-12642 Added support for FileResourceService in PutS3Object

This closes #8295.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-aws-bundle/nifi-aws-processors/pom.xml|  15 +
 .../apache/nifi/processors/aws/s3/PutS3Object.java | 575 +++--
 .../nifi/processors/aws/s3/AbstractS3IT.java   |  83 +--
 .../nifi/processors/aws/s3/ITPutS3Object.java  |  65 +++
 .../nifi/processors/aws/s3/TestPutS3Object.java|  51 +-
 5 files changed, 462 insertions(+), 327 deletions(-)

diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/pom.xml 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/pom.xml
index c0748af598..44ac6da9f3 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/pom.xml
@@ -38,6 +38,15 @@
 nifi-listed-entity
 2.0.0-SNAPSHOT
 
+
+org.apache.nifi
+nifi-resource-transfer
+2.0.0-SNAPSHOT
+
+
+org.apache.nifi
+nifi-file-resource-service-api
+
 
 org.apache.nifi
 nifi-aws-abstract-processors
@@ -141,6 +150,12 @@
 2.0.0-SNAPSHOT
 test
 
+
+org.apache.nifi
+nifi-file-resource-service
+2.0.0-SNAPSHOT
+test
+
 
 
 
diff --git 
a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
index 3247f9a6a8..26c759417a 100644
--- 
a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
+++ 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
@@ -53,14 +53,15 @@ import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.fileresource.service.api.FileResource;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
 import org.apache.nifi.processor.DataUnit;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.exception.ProcessException;
-import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.transfer.ResourceTransferSource;
 
 import java.io.File;
 import java.io.FileInputStream;
@@ -78,6 +79,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Optional;
 import java.util.Properties;
 import java.util.Set;
 import java.util.TreeSet;
@@ -87,6 +89,10 @@ import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.stream.Collectors;
 
+import static 
org.apache.nifi.processors.transfer.ResourceTransferProperties.FILE_RESOURCE_SERVICE;
+import static 
org.apache.nifi.processors.transfer.ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE;
+import static 
org.apache.nifi.processors.transfer.ResourceTransferUtils.getFileResource;
+
 @SupportsBatching
 @SeeAlso({FetchS3Object.class, DeleteS3Object.class, ListS3.class})
 @InputRequirement(Requirement.INPUT_REQUIRED)
@@ -261,6 +267,8 @@ public class PutS3Object extends AbstractS3Processor {
 KEY,
 S3_REGION,
 AWS_CREDENTIALS_PROVIDER_SERVICE,
+RESOURCE_TRANSFER_SOURCE,
+FILE_RESOURCE_SERVICE,
 STORAGE_CLASS,
 ENCRYPTION_SERVICE,
 SERVER_SIDE_ENCRYPTION,
@@ -501,6 +509,8 @@ public class PutS3Object extends AbstractS3Processor {
 final FlowFile ff = flowFile;
 final Map attributes = new HashMap<>();
 final String ffFilename = 
ff.getAttributes().get(CoreAttributes.FILENAME.key());
+final ResourceTransferSource resourceTransferSource = 
context.getProperty(RESOURCE_TRANSFER_SOURCE).asAllowableValue(ResourceTransferSource.class);
+
 attributes.put(S3_BUCKET_KEY, bucket);
 attributes.put(S3_OBJECT_KEY, key);
 
@@ -519,329 +529,332 @@ public

(nifi) branch main updated: NIFI-12643 Added support for FileResourceService in PutGCSObject

2024-01-26 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new bce14f573b NIFI-12643 Added support for FileResourceService in 
PutGCSObject
bce14f573b is described below

commit bce14f573b57685637d776333029641e62730d26
Author: Balázs Gerner 
AuthorDate: Fri Jan 19 09:49:30 2024 +0100

NIFI-12643 Added support for FileResourceService in PutGCSObject

This closes #8281.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-gcp-bundle/nifi-gcp-processors/pom.xml|   5 +
 .../nifi/processors/gcp/storage/PutGCSObject.java  | 375 +++--
 .../processors/gcp/storage/PutGCSObjectTest.java   |  55 ++-
 3 files changed, 242 insertions(+), 193 deletions(-)

diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/pom.xml 
b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/pom.xml
index 70f104365e..897e710395 100644
--- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/pom.xml
@@ -69,6 +69,11 @@
 2.0.0-SNAPSHOT
 provided
 
+
+org.apache.nifi
+nifi-resource-transfer
+2.0.0-SNAPSHOT
+
 
 org.apache.nifi
 nifi-file-resource-service-api
diff --git 
a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java
 
b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java
index 07546637da..438fac19b8 100644
--- 
a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java
+++ 
b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java
@@ -22,15 +22,6 @@ import com.google.cloud.storage.BlobId;
 import com.google.cloud.storage.BlobInfo;
 import com.google.cloud.storage.Storage;
 import com.google.cloud.storage.StorageException;
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
@@ -43,13 +34,23 @@ import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.fileresource.service.api.FileResource;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.exception.ProcessException;
-import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.transfer.ResourceTransferSource;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
 
 import static 
com.google.cloud.storage.Storage.PredefinedAcl.ALL_AUTHENTICATED_USERS;
 import static 
com.google.cloud.storage.Storage.PredefinedAcl.AUTHENTICATED_READ;
@@ -102,6 +103,9 @@ import static 
org.apache.nifi.processors.gcp.storage.StorageAttributes.UPDATE_TI
 import static 
org.apache.nifi.processors.gcp.storage.StorageAttributes.UPDATE_TIME_DESC;
 import static 
org.apache.nifi.processors.gcp.storage.StorageAttributes.URI_ATTR;
 import static 
org.apache.nifi.processors.gcp.storage.StorageAttributes.URI_DESC;
+import static 
org.apache.nifi.processors.transfer.ResourceTransferProperties.FILE_RESOURCE_SERVICE;
+import static 
org.apache.nifi.processors.transfer.ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE;
+import static 
org.apache.nifi.processors.transfer.ResourceTransferUtils.getFileResource;
 
 
 @InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@@ -290,6 +294,8 @@ public class PutGCSObject extends AbstractGCSProcessor {
 final List descriptors = new 
ArrayList<>(super.getSupportedPropertyDescriptors());
 descriptors.add(BUCKET);
 descriptors.add(KEY);
+descriptors.add(RESOURCE_TRANSFER_SOURCE);
+descriptors.add(FILE_RESOURCE_SERVICE);
 descriptors.add(CONTENT_TYPE);
 descriptors.add(CRC32C);
 descriptors.a

(nifi) branch support/nifi-1.x updated: NIFI-12594: ListS3 - observe min/max object age when entity state tracking is used

2024-01-15 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new b4487a0bf0 NIFI-12594: ListS3 - observe min/max object age when entity 
state tracking is used
b4487a0bf0 is described below

commit b4487a0bf0c67530c19824adbe7d0e002dc255b5
Author: p-kimberley 
AuthorDate: Wed Jan 10 14:59:59 2024 +0100

NIFI-12594: ListS3 - observe min/max object age when entity state tracking 
is used

This closes #8231.

Signed-off-by: Peter Turcsanyi 

(cherry picked from commit 3ebad40fae458db3fe664ddd6738b770a26289c8)
---
 .../org/apache/nifi/processors/aws/s3/ListS3.java  | 181 ++---
 .../apache/nifi/processors/aws/s3/TestListS3.java  |   2 +-
 2 files changed, 85 insertions(+), 98 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
index c1d4c9a5ef..4be64b0953 100644
--- 
a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
+++ 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
@@ -162,11 +162,12 @@ public class ListS3 extends AbstractS3Processor 
implements VerifiableProcessor {
 public static final PropertyDescriptor INITIAL_LISTING_TARGET = new 
PropertyDescriptor.Builder()
 .fromPropertyDescriptor(ListedEntityTracker.INITIAL_LISTING_TARGET)
 .dependsOn(LISTING_STRATEGY, BY_ENTITIES)
+.required(true)
 .build();
 
 public static final PropertyDescriptor TRACKING_TIME_WINDOW = new 
PropertyDescriptor.Builder()
 .fromPropertyDescriptor(ListedEntityTracker.TRACKING_TIME_WINDOW)
-.dependsOn(INITIAL_LISTING_TARGET, 
ListedEntityTracker.INITIAL_LISTING_TARGET_WINDOW)
+.dependsOn(ListedEntityTracker.TRACKING_STATE_CACHE)
 .required(true)
 .build();
 
@@ -287,8 +288,8 @@ public class ListS3 extends AbstractS3Processor implements 
VerifiableProcessor {
 public static final List properties = 
Collections.unmodifiableList(Arrays.asList(
 LISTING_STRATEGY,
 TRACKING_STATE_CACHE,
-INITIAL_LISTING_TARGET,
 TRACKING_TIME_WINDOW,
+INITIAL_LISTING_TARGET,
 BUCKET,
 REGION,
 ACCESS_KEY,
@@ -329,6 +330,8 @@ public class ListS3 extends AbstractS3Processor implements 
VerifiableProcessor {
 private volatile boolean justElectedPrimaryNode = false;
 private volatile boolean resetEntityTrackingState = false;
 private volatile 
ListedEntityTracker> 
listedEntityTracker;
+private volatile Long minObjectAgeMilliseconds;
+private volatile Long maxObjectAgeMilliseconds;
 
 @OnPrimaryNodeStateChange
 public void onPrimaryNodeChange(final PrimaryNodeState newState) {
@@ -354,6 +357,9 @@ public class ListS3 extends AbstractS3Processor implements 
VerifiableProcessor {
 } else {
 listedEntityTracker = null;
 }
+
+minObjectAgeMilliseconds = 
context.getProperty(MIN_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
+maxObjectAgeMilliseconds = context.getProperty(MAX_AGE) != null ? 
context.getProperty(MAX_AGE).asTimePeriod(TimeUnit.MILLISECONDS) : null;
 }
 
 protected ListedEntityTracker> 
createListedEntityTracker() {
@@ -364,7 +370,7 @@ public class ListS3 extends AbstractS3Processor implements 
VerifiableProcessor {
 return new Validator() {
 @Override
 public ValidationResult validate(final String subject, final 
String input, final ValidationContext context) {
-boolean requesterPays = Boolean.valueOf(input);
+boolean requesterPays = Boolean.parseBoolean(input);
 boolean useVersions = 
context.getProperty(USE_VERSIONS).asBoolean();
 boolean valid = !requesterPays || !useVersions;
 return new ValidationResult.Builder()
@@ -474,26 +480,21 @@ public class ListS3 extends AbstractS3Processor 
implements VerifiableProcessor {
 }
 
 final AmazonS3 client = getClient(context);
-
-S3BucketLister bucketLister = getS3BucketLister(context, client);
-
-final long startNanos = System.nanoTime();
-final long minAgeMilliseconds = 
context.getProperty(MIN_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
-final Long maxAgeMilliseconds = context.getProperty(MAX_AGE) != null ? 
context.getProperty(MAX_AGE).asTimePeriod(TimeUnit.MILLISECONDS) : null;
-final long listingTimestamp = System.currentTimeMillis();
-
+final S3BucketLister bucketLister = getS3BucketLister(c

(nifi) branch main updated: NIFI-12594: ListS3 - observe min/max object age when entity state tracking is used

2024-01-15 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 3ebad40fae NIFI-12594: ListS3 - observe min/max object age when entity 
state tracking is used
3ebad40fae is described below

commit 3ebad40fae458db3fe664ddd6738b770a26289c8
Author: p-kimberley 
AuthorDate: Thu Jan 11 00:59:59 2024 +1100

NIFI-12594: ListS3 - observe min/max object age when entity state tracking 
is used

This closes #8231.

Signed-off-by: Peter Turcsanyi 
---
 .../org/apache/nifi/processors/aws/s3/ListS3.java  | 183 ++---
 .../apache/nifi/processors/aws/s3/TestListS3.java  |   2 +-
 2 files changed, 86 insertions(+), 99 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
index 13e15a4aae..ba150c453a 100644
--- 
a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
+++ 
b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
@@ -161,11 +161,12 @@ public class ListS3 extends AbstractS3Processor 
implements VerifiableProcessor {
 public static final PropertyDescriptor INITIAL_LISTING_TARGET = new 
PropertyDescriptor.Builder()
 .fromPropertyDescriptor(ListedEntityTracker.INITIAL_LISTING_TARGET)
 .dependsOn(LISTING_STRATEGY, BY_ENTITIES)
+.required(true)
 .build();
 
 public static final PropertyDescriptor TRACKING_TIME_WINDOW = new 
PropertyDescriptor.Builder()
 .fromPropertyDescriptor(ListedEntityTracker.TRACKING_TIME_WINDOW)
-.dependsOn(INITIAL_LISTING_TARGET, 
ListedEntityTracker.INITIAL_LISTING_TARGET_WINDOW)
+.dependsOn(ListedEntityTracker.TRACKING_STATE_CACHE)
 .required(true)
 .build();
 
@@ -289,8 +290,8 @@ public class ListS3 extends AbstractS3Processor implements 
VerifiableProcessor {
 AWS_CREDENTIALS_PROVIDER_SERVICE,
 LISTING_STRATEGY,
 TRACKING_STATE_CACHE,
-INITIAL_LISTING_TARGET,
 TRACKING_TIME_WINDOW,
+INITIAL_LISTING_TARGET,
 RECORD_WRITER,
 MIN_AGE,
 MAX_AGE,
@@ -321,6 +322,8 @@ public class ListS3 extends AbstractS3Processor implements 
VerifiableProcessor {
 private volatile boolean justElectedPrimaryNode = false;
 private volatile boolean resetEntityTrackingState = false;
 private volatile 
ListedEntityTracker> 
listedEntityTracker;
+private volatile Long minObjectAgeMilliseconds;
+private volatile Long maxObjectAgeMilliseconds;
 
 @OnPrimaryNodeStateChange
 public void onPrimaryNodeChange(final PrimaryNodeState newState) {
@@ -346,6 +349,9 @@ public class ListS3 extends AbstractS3Processor implements 
VerifiableProcessor {
 } else {
 listedEntityTracker = null;
 }
+
+minObjectAgeMilliseconds = 
context.getProperty(MIN_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
+maxObjectAgeMilliseconds = context.getProperty(MAX_AGE) != null ? 
context.getProperty(MAX_AGE).asTimePeriod(TimeUnit.MILLISECONDS) : null;
 }
 
 protected ListedEntityTracker> 
createListedEntityTracker() {
@@ -356,7 +362,7 @@ public class ListS3 extends AbstractS3Processor implements 
VerifiableProcessor {
 return new Validator() {
 @Override
 public ValidationResult validate(final String subject, final 
String input, final ValidationContext context) {
-boolean requesterPays = Boolean.valueOf(input);
+boolean requesterPays = Boolean.parseBoolean(input);
 boolean useVersions = 
context.getProperty(USE_VERSIONS).asBoolean();
 boolean valid = !requesterPays || !useVersions;
 return new ValidationResult.Builder()
@@ -407,7 +413,7 @@ public class ListS3 extends AbstractS3Processor implements 
VerifiableProcessor {
 
 private void restoreState(final ProcessSession session) throws IOException 
{
 final StateMap stateMap = session.getState(Scope.CLUSTER);
-if (!stateMap.getStateVersion().isPresent() || 
stateMap.get(CURRENT_TIMESTAMP) == null || stateMap.get(CURRENT_KEY_PREFIX+"0") 
== null) {
+if (stateMap.getStateVersion().isEmpty() || 
stateMap.get(CURRENT_TIMESTAMP) == null || stateMap.get(CURRENT_KEY_PREFIX + 
"0") == null) {
 forcefullyUpdateListing(0L, Collections.emptySet());
 } else {
 final long timestamp = 
Long.parseLong(stateMap.get(CURRENT_TIMESTAMP));
@@ -466,26 +472,21 @@ public class ListS3 extends AbstractS3Processor 
implements VerifiableProcessor {
 }
 
 final A

(nifi) branch support/nifi-1.x updated: NIFI-12520: ExtractHL7Attributes processor ignores repeatable field values

2023-12-16 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 6a3301dfbf NIFI-12520: ExtractHL7Attributes processor ignores 
repeatable field values
6a3301dfbf is described below

commit 6a3301dfbf4456c3ab1c6bca4ab797f302c8621d
Author: Mark Bathori 
AuthorDate: Fri Dec 15 22:41:17 2023 +0100

NIFI-12520: ExtractHL7Attributes processor ignores repeatable field values

This closes #8167.

Signed-off-by: Peter Turcsanyi 

(cherry picked from commit 16d170fdfdbc12723746ae1f7ae8568246227ab2)
---
 .../nifi/processors/hl7/ExtractHL7Attributes.java  |  20 ++--
 .../processors/hl7/TestExtractHL7Attributes.java   | 129 ++---
 2 files changed, 101 insertions(+), 48 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
 
b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
index aa7a867d9c..063d8e9b3b 100644
--- 
a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
+++ 
b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
@@ -281,8 +281,8 @@ public class ExtractHL7Attributes extends AbstractProcessor 
{
 final Map fields = new TreeMap<>();
 final String[] segmentNames = segment.getNames();
 for (int i = 1; i <= segment.numFields(); i++) {
-final Type field = segment.getField(i, 0);
-if (!isEmpty(field)) {
+final Type[] fieldValues = segment.getField(i);
+if (fieldValues != null && fieldValues.length != 0) {
 final String fieldName;
 //Some user defined segments (e.g. Z segments) will not have 
corresponding names returned
 //from segment.getNames() above. If we encounter one of these, 
do the next best thing
@@ -294,13 +294,17 @@ public class ExtractHL7Attributes extends 
AbstractProcessor {
 fieldName = String.valueOf(i);
 }
 
-final String fieldKey = new StringBuilder()
-.append(segmentKey)
-.append(".")
-.append(fieldName)
-.toString();
+final String fieldKey = String.format("%s.%s", segmentKey, 
fieldName);
 
-fields.put(fieldKey, field);
+//Checks if the field is repeatable, if the max cardinality 
value is 0 or more than 1 then the field is repeatable
+if (segment.getMaxCardinality(i) == 1) {
+fields.put(fieldKey, fieldValues[0]);
+} else {
+for (int j = 0; j < fieldValues.length; j++) {
+final String repeatableFieldKey = 
String.format("%s_%s", fieldKey, j + 1);
+fields.put(repeatableFieldKey, fieldValues[j]);
+}
+}
 }
 }
 return fields;
diff --git 
a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/test/java/org/apache/nifi/processors/hl7/TestExtractHL7Attributes.java
 
b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/test/java/org/apache/nifi/processors/hl7/TestExtractHL7Attributes.java
index e1b5fdc51d..8f02e3b05f 100644
--- 
a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/test/java/org/apache/nifi/processors/hl7/TestExtractHL7Attributes.java
+++ 
b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/test/java/org/apache/nifi/processors/hl7/TestExtractHL7Attributes.java
@@ -22,7 +22,6 @@ import org.apache.nifi.util.TestRunners;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Test;
 
-import java.io.IOException;
 import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.SortedMap;
@@ -105,17 +104,17 @@ public class TestExtractHL7Attributes {
 expectedAttributes.put("MSH.12", "2.3");
 
 expectedAttributes.put("ORC_1.1", "NW");
-expectedAttributes.put("ORC_1.2", "987654321^EPC");
+expectedAttributes.put("ORC_1.2_1", "987654321^EPC");
 expectedAttributes.put("ORC_1.3", "123456789^EPC");
 expectedAttributes.put("ORC_1.9", "2016100300");
-expectedAttributes.put("ORC_1.12", "SMITH");
+expectedAttributes.put("ORC_1.12_1", "SMITH");
 
 expectedAttributes.put("OBR_1.1", "1");
-expectedA

(nifi) branch main updated: NIFI-12520: ExtractHL7Attributes processor ignores repeatable field values

2023-12-16 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 16d170fdfd NIFI-12520: ExtractHL7Attributes processor ignores 
repeatable field values
16d170fdfd is described below

commit 16d170fdfdbc12723746ae1f7ae8568246227ab2
Author: Mark Bathori 
AuthorDate: Fri Dec 15 22:41:17 2023 +0100

NIFI-12520: ExtractHL7Attributes processor ignores repeatable field values

This closes #8167.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi/processors/hl7/ExtractHL7Attributes.java  |  15 ++-
 .../processors/hl7/TestExtractHL7Attributes.java   | 129 ++---
 2 files changed, 101 insertions(+), 43 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
 
b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
index 08cf0a2825..34288e6ce2 100644
--- 
a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
+++ 
b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
@@ -278,8 +278,8 @@ public class ExtractHL7Attributes extends AbstractProcessor 
{
 final Map fields = new TreeMap<>();
 final String[] segmentNames = segment.getNames();
 for (int i = 1; i <= segment.numFields(); i++) {
-final Type field = segment.getField(i, 0);
-if (!isEmpty(field)) {
+final Type[] fieldValues = segment.getField(i);
+if (fieldValues != null && fieldValues.length != 0) {
 final String fieldName;
 //Some user defined segments (e.g. Z segments) will not have 
corresponding names returned
 //from segment.getNames() above. If we encounter one of these, 
do the next best thing
@@ -292,7 +292,16 @@ public class ExtractHL7Attributes extends 
AbstractProcessor {
 }
 
 final String fieldKey = "%s.%s".formatted(segmentKey, 
fieldName);
-fields.put(fieldKey, field);
+
+//Checks if the field is repeatable, if the max cardinality 
value is 0 or more than 1 then the field is repeatable
+if (segment.getMaxCardinality(i) == 1) {
+fields.put(fieldKey, fieldValues[0]);
+} else {
+for (int j = 0; j < fieldValues.length; j++) {
+final String repeatableFieldKey = 
"%s_%s".formatted(fieldKey, j + 1);
+fields.put(repeatableFieldKey, fieldValues[j]);
+}
+}
 }
 }
 return fields;
diff --git 
a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/test/java/org/apache/nifi/processors/hl7/TestExtractHL7Attributes.java
 
b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/test/java/org/apache/nifi/processors/hl7/TestExtractHL7Attributes.java
index e1b5fdc51d..8f02e3b05f 100644
--- 
a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/test/java/org/apache/nifi/processors/hl7/TestExtractHL7Attributes.java
+++ 
b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/test/java/org/apache/nifi/processors/hl7/TestExtractHL7Attributes.java
@@ -22,7 +22,6 @@ import org.apache.nifi.util.TestRunners;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Test;
 
-import java.io.IOException;
 import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.SortedMap;
@@ -105,17 +104,17 @@ public class TestExtractHL7Attributes {
 expectedAttributes.put("MSH.12", "2.3");
 
 expectedAttributes.put("ORC_1.1", "NW");
-expectedAttributes.put("ORC_1.2", "987654321^EPC");
+expectedAttributes.put("ORC_1.2_1", "987654321^EPC");
 expectedAttributes.put("ORC_1.3", "123456789^EPC");
 expectedAttributes.put("ORC_1.9", "2016100300");
-expectedAttributes.put("ORC_1.12", "SMITH");
+expectedAttributes.put("ORC_1.12_1", "SMITH");
 
 expectedAttributes.put("OBR_1.1", "1");
-expectedAttributes.put("OBR_1.2", "341856649^HNAM_ORDERID");
+expectedAttributes.put("OBR_1.2_1", "341856649^HNAM_ORDERID");
 expectedAttributes.put("OBR_1.3", "00");
 expectedAttributes.put("OBR_1.4", "648088^Basic Metabolic Panel");
 expectedAttributes.put("OBR_1.7&

(nifi) branch support/nifi-1.x updated: NIFI-6730 AMQP QoS support

2023-12-14 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 3678a4bca3 NIFI-6730 AMQP QoS support
3678a4bca3 is described below

commit 3678a4bca3447bcf8f70efc19bbec33894a44f6f
Author: Mikhail Sapozhnikov 
AuthorDate: Fri Dec 8 14:38:14 2023 +0300

NIFI-6730 AMQP QoS support

This closes #8146.

Signed-off-by: Peter Turcsanyi 
(cherry picked from commit 76613a0ed4a90c5e264e0537990278ec9e422536)
---
 .../apache/nifi/amqp/processors/AMQPConsumer.java  |  4 +++-
 .../apache/nifi/amqp/processors/ConsumeAMQP.java   | 15 +++-
 .../nifi/amqp/processors/AMQPConsumerTest.java | 28 +++---
 .../nifi/amqp/processors/ConsumeAMQPTest.java  |  4 +++-
 .../apache/nifi/amqp/processors/TestChannel.java   |  6 -
 5 files changed, 45 insertions(+), 12 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/AMQPConsumer.java
 
b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/AMQPConsumer.java
index e11044845c..04f951836a 100644
--- 
a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/AMQPConsumer.java
+++ 
b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/AMQPConsumer.java
@@ -43,7 +43,8 @@ final class AMQPConsumer extends AMQPWorker {
 private final boolean autoAcknowledge;
 private final Consumer consumer;
 
-AMQPConsumer(final Connection connection, final String queueName, final 
boolean autoAcknowledge, ComponentLog processorLog) throws IOException {
+AMQPConsumer(final Connection connection, final String queueName, final 
boolean autoAcknowledge, final int prefetchCount,
+ComponentLog processorLog) throws IOException {
 super(connection, processorLog);
 this.validateStringProperty("queueName", queueName);
 this.queueName = queueName;
@@ -80,6 +81,7 @@ final class AMQPConsumer extends AMQPWorker {
 }
 };
 
+channel.basicQos(prefetchCount);
 channel.basicConsume(queueName, autoAcknowledge, consumer);
 }
 
diff --git 
a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/ConsumeAMQP.java
 
b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/ConsumeAMQP.java
index 2a7f68d48a..771db0e32a 100644
--- 
a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/ConsumeAMQP.java
+++ 
b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/ConsumeAMQP.java
@@ -112,6 +112,17 @@ public class ConsumeAMQP extends 
AbstractAMQPProcessor {
 .defaultValue("10")
 .required(true)
 .build();
+static final PropertyDescriptor PREFETCH_COUNT = new 
PropertyDescriptor.Builder()
+.name("prefetch.count")
+.displayName("Prefetch Count")
+.description("The maximum number of unacknowledged messages for the 
consumer. If consumer has this number of unacknowledged messages, AMQP broker 
will "
+   + "no longer send new messages until consumer acknowledges some 
of the messages already delivered to it."
+   + "Allowed values: from 0 to 65535. 0 means no limit")
+.addValidator(StandardValidators.createLongValidator(0, 65535, true))
+.expressionLanguageSupported(ExpressionLanguageScope.NONE)
+.defaultValue("0")
+.required(true)
+.build();
 
 public static final PropertyDescriptor HEADER_FORMAT = new 
PropertyDescriptor.Builder()
 .name("header.format")
@@ -167,6 +178,7 @@ public class ConsumeAMQP extends 
AbstractAMQPProcessor {
 properties.add(QUEUE);
 properties.add(AUTO_ACKNOWLEDGE);
 properties.add(BATCH_SIZE);
+properties.add(PREFETCH_COUNT);
 properties.add(HEADER_FORMAT);
 properties.add(HEADER_KEY_PREFIX);
 properties.add(HEADER_SEPARATOR);
@@ -301,7 +313,8 @@ public class ConsumeAMQP extends 
AbstractAMQPProcessor {
 try {
 final String queueName = context.getProperty(QUEUE).getValue();
 final boolean autoAcknowledge = 
context.getProperty(AUTO_ACKNOWLEDGE).asBoolean();
-final AMQPConsumer amqpConsumer = new AMQPConsumer(connection, 
queueName, autoAcknowledge, getLogger());
+final int prefetchCount =  
context.getProperty(PREFETCH_COUNT).asInteger();
+final AMQPConsumer amqpConsumer = new AMQPConsumer(connection, 
queueName, autoAckn

(nifi) branch main updated: NIFI-6730 AMQP QoS support

2023-12-14 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 76613a0ed4 NIFI-6730 AMQP QoS support
76613a0ed4 is described below

commit 76613a0ed4a90c5e264e0537990278ec9e422536
Author: Mikhail Sapozhnikov 
AuthorDate: Fri Dec 8 14:38:14 2023 +0300

NIFI-6730 AMQP QoS support

This closes #8146.

Signed-off-by: Peter Turcsanyi 
---
 .../apache/nifi/amqp/processors/AMQPConsumer.java  |  4 +++-
 .../apache/nifi/amqp/processors/ConsumeAMQP.java   | 15 +++-
 .../nifi/amqp/processors/AMQPConsumerTest.java | 28 +++---
 .../nifi/amqp/processors/ConsumeAMQPTest.java  |  4 +++-
 .../apache/nifi/amqp/processors/TestChannel.java   |  6 -
 5 files changed, 45 insertions(+), 12 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/AMQPConsumer.java
 
b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/AMQPConsumer.java
index e11044845c..04f951836a 100644
--- 
a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/AMQPConsumer.java
+++ 
b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/AMQPConsumer.java
@@ -43,7 +43,8 @@ final class AMQPConsumer extends AMQPWorker {
 private final boolean autoAcknowledge;
 private final Consumer consumer;
 
-AMQPConsumer(final Connection connection, final String queueName, final 
boolean autoAcknowledge, ComponentLog processorLog) throws IOException {
+AMQPConsumer(final Connection connection, final String queueName, final 
boolean autoAcknowledge, final int prefetchCount,
+ComponentLog processorLog) throws IOException {
 super(connection, processorLog);
 this.validateStringProperty("queueName", queueName);
 this.queueName = queueName;
@@ -80,6 +81,7 @@ final class AMQPConsumer extends AMQPWorker {
 }
 };
 
+channel.basicQos(prefetchCount);
 channel.basicConsume(queueName, autoAcknowledge, consumer);
 }
 
diff --git 
a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/ConsumeAMQP.java
 
b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/ConsumeAMQP.java
index 23552d6430..6d4e5d01a6 100644
--- 
a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/ConsumeAMQP.java
+++ 
b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/ConsumeAMQP.java
@@ -112,6 +112,17 @@ public class ConsumeAMQP extends 
AbstractAMQPProcessor {
 .defaultValue("10")
 .required(true)
 .build();
+static final PropertyDescriptor PREFETCH_COUNT = new 
PropertyDescriptor.Builder()
+.name("prefetch.count")
+.displayName("Prefetch Count")
+.description("The maximum number of unacknowledged messages for the 
consumer. If consumer has this number of unacknowledged messages, AMQP broker 
will "
+   + "no longer send new messages until consumer acknowledges some 
of the messages already delivered to it."
+   + "Allowed values: from 0 to 65535. 0 means no limit")
+.addValidator(StandardValidators.createLongValidator(0, 65535, true))
+.expressionLanguageSupported(ExpressionLanguageScope.NONE)
+.defaultValue("0")
+.required(true)
+.build();
 
 public static final PropertyDescriptor HEADER_FORMAT = new 
PropertyDescriptor.Builder()
 .name("header.format")
@@ -167,6 +178,7 @@ public class ConsumeAMQP extends 
AbstractAMQPProcessor {
 properties.add(QUEUE);
 properties.add(AUTO_ACKNOWLEDGE);
 properties.add(BATCH_SIZE);
+properties.add(PREFETCH_COUNT);
 properties.add(HEADER_FORMAT);
 properties.add(HEADER_KEY_PREFIX);
 properties.add(HEADER_SEPARATOR);
@@ -301,7 +313,8 @@ public class ConsumeAMQP extends 
AbstractAMQPProcessor {
 try {
 final String queueName = context.getProperty(QUEUE).getValue();
 final boolean autoAcknowledge = 
context.getProperty(AUTO_ACKNOWLEDGE).asBoolean();
-final AMQPConsumer amqpConsumer = new AMQPConsumer(connection, 
queueName, autoAcknowledge, getLogger());
+final int prefetchCount =  
context.getProperty(PREFETCH_COUNT).asInteger();
+final AMQPConsumer amqpConsumer = new AMQPConsumer(connection, 
queueName, autoAcknowledge, prefetchCount, getLogger());
 
 return amqpConsumer;
 } catch (final I

(nifi) branch support/nifi-1.x updated: NIFI-12509 Changing default TTL of HazelcastMapCacheClient

2023-12-14 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new dc3b8cea86 NIFI-12509 Changing default TTL of HazelcastMapCacheClient
dc3b8cea86 is described below

commit dc3b8cea86183657aaa9d01e3d36679d7f6b1339
Author: Bence Simon 
AuthorDate: Wed Dec 13 11:54:05 2023 +0100

NIFI-12509 Changing default TTL of HazelcastMapCacheClient

This closes #8154.

Signed-off-by: Peter Turcsanyi 
(cherry picked from commit 3c4ccd2c647d593c04c0f2dac85df7c5a652d885)
---
 .../nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java
 
b/nifi-nar-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java
index a801d3a681..f973aaf1db 100644
--- 
a/nifi-nar-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java
+++ 
b/nifi-nar-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java
@@ -78,10 +78,11 @@ public class HazelcastMapCacheClient extends 
AbstractControllerService implement
 .name("hazelcast-entry-ttl")
 .displayName("Hazelcast Entry Lifetime")
 .description("Indicates how long the written entries should exist 
in Hazelcast. Setting it to '0 secs' means that the data" +
-"will exists until its deletion or until the Hazelcast 
server is shut down.")
+"will exists until its deletion or until the Hazelcast 
server is shut down. Using `EmbeddedHazelcastCacheManager` as" +
+"cache manager will not provide policies to limit the size 
of the cache.")
 .required(true)
 .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
-.defaultValue("0 secs") // Note: in case of Hazelcast IMap, 
negative value would mean "map default" which might be overridden by a 
different client.
+.defaultValue("5 min") // Note: in case of Hazelcast IMap, 
negative value would mean "map default" which might be overridden by a 
different client.
 .build();
 
 private static final long STARTING_REVISION = 1;



(nifi) branch main updated: NIFI-12509 Changing default TTL of HazelcastMapCacheClient

2023-12-14 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 3c4ccd2c64 NIFI-12509 Changing default TTL of HazelcastMapCacheClient
3c4ccd2c64 is described below

commit 3c4ccd2c647d593c04c0f2dac85df7c5a652d885
Author: Bence Simon 
AuthorDate: Wed Dec 13 11:54:05 2023 +0100

NIFI-12509 Changing default TTL of HazelcastMapCacheClient

This closes #8154.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java
 
b/nifi-nar-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java
index 0d4eb4f952..d3aec44729 100644
--- 
a/nifi-nar-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java
+++ 
b/nifi-nar-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java
@@ -78,10 +78,11 @@ public class HazelcastMapCacheClient extends 
AbstractControllerService implement
 .name("hazelcast-entry-ttl")
 .displayName("Hazelcast Entry Lifetime")
 .description("Indicates how long the written entries should exist 
in Hazelcast. Setting it to '0 secs' means that the data" +
-"will exists until its deletion or until the Hazelcast 
server is shut down.")
+"will exists until its deletion or until the Hazelcast 
server is shut down. Using `EmbeddedHazelcastCacheManager` as" +
+"cache manager will not provide policies to limit the size 
of the cache.")
 .required(true)
 .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
-.defaultValue("0 secs") // Note: in case of Hazelcast IMap, 
negative value would mean "map default" which might be overridden by a 
different client.
+.defaultValue("5 min") // Note: in case of Hazelcast IMap, 
negative value would mean "map default" which might be overridden by a 
different client.
 .build();
 
 private static final long STARTING_REVISION = 1;



(nifi) branch support/nifi-1.x updated: NIFI-12105: remove unnecessary checking of compressed content from testAdditionalNarDirectoriesSpecified test case

2023-12-11 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new a3e7998139 NIFI-12105: remove unnecessary checking of compressed 
content from testAdditionalNarDirectoriesSpecified test case
a3e7998139 is described below

commit a3e7998139f3c943339b05987ca568f5ff38723f
Author: Peter Gyori 
AuthorDate: Mon Dec 11 16:07:26 2023 +0100

NIFI-12105: remove unnecessary checking of compressed content from 
testAdditionalNarDirectoriesSpecified test case

This closes #8150.

Signed-off-by: Peter Turcsanyi 
(cherry picked from commit 78b822c4520ff59653ffca1ca464a6919d000b03)
---
 .../org/apache/nifi/processors/stateless/TestExecuteStateless.java | 3 ---
 1 file changed, 3 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-stateless-processor-bundle/nifi-stateless-processor-tests/src/test/java/org/apache/nifi/processors/stateless/TestExecuteStateless.java
 
b/nifi-nar-bundles/nifi-stateless-processor-bundle/nifi-stateless-processor-tests/src/test/java/org/apache/nifi/processors/stateless/TestExecuteStateless.java
index d7f184810c..c37b626b09 100644
--- 
a/nifi-nar-bundles/nifi-stateless-processor-bundle/nifi-stateless-processor-tests/src/test/java/org/apache/nifi/processors/stateless/TestExecuteStateless.java
+++ 
b/nifi-nar-bundles/nifi-stateless-processor-bundle/nifi-stateless-processor-tests/src/test/java/org/apache/nifi/processors/stateless/TestExecuteStateless.java
@@ -29,7 +29,6 @@ import java.util.Collections;
 import java.util.List;
 import java.util.function.Function;
 
-import static org.junit.jupiter.api.Assertions.assertArrayEquals;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
@@ -227,7 +226,6 @@ public class TestExecuteStateless {
 runner.setProperty(ExecuteStateless.ADDITIONAL_LIB_DIRECTORIES, 
JSLT_DIR + "," + COMPRESS_DIR);
 runner.setProperty(ExecuteStateless.INPUT_PORT, "input");
 runner.setProperty(ExecuteStateless.FAILURE_PORTS, "failure");
-byte[] expectedContents = new byte[] {31, -117, 8, 0, 0, 0, 0, 0, 0, 
-1, 83, -14, -54, -49, -56, 83, 2, 0, 118, 63, 122, -30, 6, 0, 0, 0};
 
 runner.enqueue(JSON_OBJECT.getBytes(), Collections.singletonMap("abc", 
"xyz"));
 runner.run();
@@ -235,7 +233,6 @@ public class TestExecuteStateless {
 runner.assertTransferCount(ExecuteStateless.REL_OUTPUT, 1);
 final List output = 
runner.getFlowFilesForRelationship(ExecuteStateless.REL_OUTPUT);
 output.get(0).assertAttributeEquals("mime.type", "application/gzip");
-assertArrayEquals(output.get(0).getData(), expectedContents);
 }
 
 private void testBulletinSurfaced(final String logLevel, final boolean 
shouldBeSurfaced, final Function> 
getMessageFunction) {



(nifi) branch main updated: NIFI-12105: remove unnecessary checking of compressed content from testAdditionalNarDirectoriesSpecified test case

2023-12-11 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 78b822c452 NIFI-12105: remove unnecessary checking of compressed 
content from testAdditionalNarDirectoriesSpecified test case
78b822c452 is described below

commit 78b822c4520ff59653ffca1ca464a6919d000b03
Author: Peter Gyori 
AuthorDate: Mon Dec 11 16:07:26 2023 +0100

NIFI-12105: remove unnecessary checking of compressed content from 
testAdditionalNarDirectoriesSpecified test case

This closes #8150.

Signed-off-by: Peter Turcsanyi 
---
 .../org/apache/nifi/processors/stateless/TestExecuteStateless.java | 3 ---
 1 file changed, 3 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-stateless-processor-bundle/nifi-stateless-processor-tests/src/test/java/org/apache/nifi/processors/stateless/TestExecuteStateless.java
 
b/nifi-nar-bundles/nifi-stateless-processor-bundle/nifi-stateless-processor-tests/src/test/java/org/apache/nifi/processors/stateless/TestExecuteStateless.java
index d7f184810c..c37b626b09 100644
--- 
a/nifi-nar-bundles/nifi-stateless-processor-bundle/nifi-stateless-processor-tests/src/test/java/org/apache/nifi/processors/stateless/TestExecuteStateless.java
+++ 
b/nifi-nar-bundles/nifi-stateless-processor-bundle/nifi-stateless-processor-tests/src/test/java/org/apache/nifi/processors/stateless/TestExecuteStateless.java
@@ -29,7 +29,6 @@ import java.util.Collections;
 import java.util.List;
 import java.util.function.Function;
 
-import static org.junit.jupiter.api.Assertions.assertArrayEquals;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
@@ -227,7 +226,6 @@ public class TestExecuteStateless {
 runner.setProperty(ExecuteStateless.ADDITIONAL_LIB_DIRECTORIES, 
JSLT_DIR + "," + COMPRESS_DIR);
 runner.setProperty(ExecuteStateless.INPUT_PORT, "input");
 runner.setProperty(ExecuteStateless.FAILURE_PORTS, "failure");
-byte[] expectedContents = new byte[] {31, -117, 8, 0, 0, 0, 0, 0, 0, 
-1, 83, -14, -54, -49, -56, 83, 2, 0, 118, 63, 122, -30, 6, 0, 0, 0};
 
 runner.enqueue(JSON_OBJECT.getBytes(), Collections.singletonMap("abc", 
"xyz"));
 runner.run();
@@ -235,7 +233,6 @@ public class TestExecuteStateless {
 runner.assertTransferCount(ExecuteStateless.REL_OUTPUT, 1);
 final List output = 
runner.getFlowFilesForRelationship(ExecuteStateless.REL_OUTPUT);
 output.get(0).assertAttributeEquals("mime.type", "application/gzip");
-assertArrayEquals(output.get(0).getData(), expectedContents);
 }
 
 private void testBulletinSurfaced(final String logLevel, final boolean 
shouldBeSurfaced, final Function> 
getMessageFunction) {



(nifi) branch support/nifi-1.x updated: NIFI-12105 Corrected internal snapshot dependency version for nifi 1.x line

2023-12-09 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new f2ba912770 NIFI-12105 Corrected internal snapshot dependency version 
for nifi 1.x line
f2ba912770 is described below

commit f2ba91277064beeda939360f042a09d8f415ef06
Author: Peter Turcsanyi 
AuthorDate: Sat Dec 9 13:31:17 2023 +0100

NIFI-12105 Corrected internal snapshot dependency version for nifi 1.x line

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-stateless-processor-tests/pom.xml  | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-stateless-processor-bundle/nifi-stateless-processor-tests/pom.xml
 
b/nifi-nar-bundles/nifi-stateless-processor-bundle/nifi-stateless-processor-tests/pom.xml
index 9e39990f7e..996dd5d740 100644
--- 
a/nifi-nar-bundles/nifi-stateless-processor-bundle/nifi-stateless-processor-tests/pom.xml
+++ 
b/nifi-nar-bundles/nifi-stateless-processor-bundle/nifi-stateless-processor-tests/pom.xml
@@ -126,21 +126,21 @@
 
 org.apache.nifi
 nifi-property-utils
-2.0.0-SNAPSHOT
+1.25.0-SNAPSHOT
 compile
 jar
 
 
 org.apache.nifi
 nifi-jslt-nar
-2.0.0-SNAPSHOT
+1.25.0-SNAPSHOT
 compile
 nar
 
 
 org.apache.nifi
 nifi-compress-nar
-2.0.0-SNAPSHOT
+1.25.0-SNAPSHOT
 compile
 nar
 



(nifi) branch support/nifi-1.x updated: NIFI-12483 Corrected String and Predicate API calls for Java 8

2023-12-09 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 0661c45585 NIFI-12483 Corrected String and Predicate API calls for 
Java 8
0661c45585 is described below

commit 0661c45585af58ca96b4f3465d54771082cf9411
Author: Peter Turcsanyi 
AuthorDate: Sat Dec 9 13:19:01 2023 +0100

NIFI-12483 Corrected String and Predicate API calls for Java 8

Signed-off-by: Peter Turcsanyi 
---
 .../java/org/apache/nifi/web/api/metrics/jmx/JmxMetricsFilter.java| 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/metrics/jmx/JmxMetricsFilter.java
 
b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/metrics/jmx/JmxMetricsFilter.java
index 58c6596218..a0662d7600 100644
--- 
a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/metrics/jmx/JmxMetricsFilter.java
+++ 
b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/metrics/jmx/JmxMetricsFilter.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api.metrics.jmx;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.web.api.dto.JmxMetricsResultDTO;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -24,7 +25,6 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Set;
-import java.util.function.Predicate;
 import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
 import java.util.stream.Collectors;
@@ -64,7 +64,7 @@ public class JmxMetricsFilter {
 filter.split(NAME_SEPARATOR)).map(
 name -> name.replaceAll(REPLACE_CHARACTERS, EMPTY)
 )
-.filter(Predicate.not(String::isBlank))
+.filter(StringUtils::isNotBlank)
 .collect(Collectors.toSet());
 }
 }



(nifi) branch support/nifi-1.x updated: NIFI-12364 Upgrade snowflake-ingest-sdk to 2.0.4 and snowflake-jdbc to 3.14.3

2023-11-14 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new e0c12be1bb NIFI-12364 Upgrade snowflake-ingest-sdk to 2.0.4 and 
snowflake-jdbc to 3.14.3
e0c12be1bb is described below

commit e0c12be1bb83ef66182392d1bd976a41756503f7
Author: mr1716 
AuthorDate: Mon Nov 13 15:08:20 2023 -0500

NIFI-12364 Upgrade snowflake-ingest-sdk to 2.0.4 and snowflake-jdbc to 
3.14.3

This closes #8022.

Signed-off-by: Peter Turcsanyi 
(cherry picked from commit e0527a81bbaad69b0fef9b95b34aa8579c9250b2)
---
 nifi-nar-bundles/nifi-snowflake-bundle/pom.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml 
b/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
index e1e48812d2..b7e7442199 100644
--- a/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
@@ -36,13 +36,13 @@
 
 net.snowflake
 snowflake-ingest-sdk
-2.0.3
+2.0.4
 
 
 net.snowflake
 snowflake-jdbc
 
-3.13.33
+3.14.3
 
 
 



(nifi) branch main updated: NIFI-12364 Upgrade snowflake-ingest-sdk to 2.0.4 and snowflake-jdbc to 3.14.3

2023-11-14 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new e0527a81bb NIFI-12364 Upgrade snowflake-ingest-sdk to 2.0.4 and 
snowflake-jdbc to 3.14.3
e0527a81bb is described below

commit e0527a81bbaad69b0fef9b95b34aa8579c9250b2
Author: mr1716 
AuthorDate: Mon Nov 13 15:08:20 2023 -0500

NIFI-12364 Upgrade snowflake-ingest-sdk to 2.0.4 and snowflake-jdbc to 
3.14.3

This closes #8022.

Signed-off-by: Peter Turcsanyi 
---
 nifi-nar-bundles/nifi-snowflake-bundle/pom.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml 
b/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
index 1e2bfd8e43..059db5453b 100644
--- a/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-snowflake-bundle/pom.xml
@@ -36,13 +36,13 @@
 
 net.snowflake
 snowflake-ingest-sdk
-2.0.3
+2.0.4
 
 
 net.snowflake
 snowflake-jdbc
 
-3.13.33
+3.14.3
 
 
 



[nifi] branch support/nifi-1.x updated: NIFI-12022 Extract verification logic from JMSConnectionFactoryProvider

2023-09-21 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new ed1ab84cd8 NIFI-12022 Extract verification logic from 
JMSConnectionFactoryProvider
ed1ab84cd8 is described below

commit ed1ab84cd8e14ade0da226fbf68e5e7608c15ed7
Author: Nandor Soma Abonyi 
AuthorDate: Mon Aug 28 10:39:30 2023 +0200

NIFI-12022 Extract verification logic from JMSConnectionFactoryProvider

This closes #7667.

Signed-off-by: Peter Turcsanyi 
(cherry picked from commit badbc1aca66684187093ff82fde6bfb28db549c7)
---
 .../cf/AbstractJMSConnectionFactoryProvider.java}  |  66 ++-
 .../jms/cf/CachedJMSConnectionFactoryHandler.java  |  53 +
 .../cf/JMSConnectionFactoryHandlerDefinition.java} |  20 +---
 .../nifi/jms/cf/JMSConnectionFactoryHandler.java   |  80 ++---
 .../nifi/jms/cf/JMSConnectionFactoryProvider.java  | 126 +
 .../jms/cf/JndiJmsConnectionFactoryHandler.java|  24 +---
 .../jms/cf/JMSConnectionFactoryHandlerForTest.java |   6 +-
 .../cf/JMSConnectionFactoryProviderForTest.java|   1 -
 8 files changed, 111 insertions(+), 265 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JMSConnectionFactoryProvider.java
 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-cf-service/src/main/java/org/apache/nifi/jms/cf/AbstractJMSConnectionFactoryProvider.java
similarity index 64%
copy from 
nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JMSConnectionFactoryProvider.java
copy to 
nifi-nar-bundles/nifi-jms-bundle/nifi-jms-cf-service/src/main/java/org/apache/nifi/jms/cf/AbstractJMSConnectionFactoryProvider.java
index 8a8c4b12fe..a2be311727 100644
--- 
a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JMSConnectionFactoryProvider.java
+++ 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-cf-service/src/main/java/org/apache/nifi/jms/cf/AbstractJMSConnectionFactoryProvider.java
@@ -16,23 +16,14 @@
  */
 package org.apache.nifi.jms.cf;
 
-import org.apache.nifi.annotation.behavior.DynamicProperty;
-import org.apache.nifi.annotation.behavior.Restricted;
-import org.apache.nifi.annotation.behavior.Restriction;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.SeeAlso;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnDisabled;
 import org.apache.nifi.annotation.lifecycle.OnEnabled;
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.components.RequiredPermission;
-import org.apache.nifi.controller.AbstractControllerService;
-import org.apache.nifi.controller.ConfigurationContext;
-import org.apache.nifi.expression.ExpressionLanguageScope;
-import org.apache.nifi.logging.ComponentLog;
 import org.apache.nifi.components.ConfigVerificationResult;
 import org.apache.nifi.components.ConfigVerificationResult.Outcome;
+import org.apache.nifi.controller.AbstractControllerService;
+import org.apache.nifi.controller.ConfigurationContext;
 import org.apache.nifi.controller.VerifiableControllerService;
+import org.apache.nifi.logging.ComponentLog;
 
 import javax.jms.Connection;
 import javax.jms.ConnectionFactory;
@@ -45,52 +36,19 @@ import java.util.Map;
 import java.util.concurrent.atomic.AtomicReference;
 
 /**
- * Provides a factory service that creates and initializes
- * {@link ConnectionFactory} specific to the third party JMS system.
- * 
- * It accomplishes it by adjusting current classpath by adding to it the
- * additional resources (i.e., JMS client libraries) provided by the user via
- * {@link JMSConnectionFactoryProperties#JMS_CLIENT_LIBRARIES}, allowing it 
then to create an instance of the
- * target {@link ConnectionFactory} based on the provided
- * {@link JMSConnectionFactoryProperties#JMS_CONNECTION_FACTORY_IMPL} which 
can be than access via
- * {@link #getConnectionFactory()} method.
+ * Base JMS controller service implementation that provides verification logic.
  */
-@Tags({"jms", "messaging", "integration", "queue", "topic", "publish", 
"subscribe"})
-@CapabilityDescription("Provides a generic service to create vendor specific 
javax.jms.ConnectionFactory implementations. "
-+ "The Connection Factory can be served once this service is 
configured successfully.")
-@DynamicProperty(name = "The name of a Connection Factory configuration 
property.", value = "The value of a given Connection Factory configuration 
property.",
-description = "The properties that are set following Java Beans 
convention where a property name is derived f

[nifi] branch main updated: NIFI-12022 Extract verification logic from JMSConnectionFactoryProvider

2023-09-21 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new badbc1aca6 NIFI-12022 Extract verification logic from 
JMSConnectionFactoryProvider
badbc1aca6 is described below

commit badbc1aca66684187093ff82fde6bfb28db549c7
Author: Nandor Soma Abonyi 
AuthorDate: Mon Aug 28 10:39:30 2023 +0200

NIFI-12022 Extract verification logic from JMSConnectionFactoryProvider

This closes #7667.

Signed-off-by: Peter Turcsanyi 
---
 .../cf/AbstractJMSConnectionFactoryProvider.java}  |  66 ++-
 .../jms/cf/CachedJMSConnectionFactoryHandler.java  |  53 +
 .../cf/JMSConnectionFactoryHandlerDefinition.java} |  20 +---
 .../nifi/jms/cf/JMSConnectionFactoryHandler.java   |  80 ++---
 .../nifi/jms/cf/JMSConnectionFactoryProvider.java  | 126 +
 .../jms/cf/JndiJmsConnectionFactoryHandler.java|  24 +---
 .../jms/cf/JMSConnectionFactoryHandlerForTest.java |   6 +-
 .../cf/JMSConnectionFactoryProviderForTest.java|   1 -
 8 files changed, 111 insertions(+), 265 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JMSConnectionFactoryProvider.java
 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-cf-service/src/main/java/org/apache/nifi/jms/cf/AbstractJMSConnectionFactoryProvider.java
similarity index 64%
copy from 
nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JMSConnectionFactoryProvider.java
copy to 
nifi-nar-bundles/nifi-jms-bundle/nifi-jms-cf-service/src/main/java/org/apache/nifi/jms/cf/AbstractJMSConnectionFactoryProvider.java
index 8a8c4b12fe..a2be311727 100644
--- 
a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JMSConnectionFactoryProvider.java
+++ 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-cf-service/src/main/java/org/apache/nifi/jms/cf/AbstractJMSConnectionFactoryProvider.java
@@ -16,23 +16,14 @@
  */
 package org.apache.nifi.jms.cf;
 
-import org.apache.nifi.annotation.behavior.DynamicProperty;
-import org.apache.nifi.annotation.behavior.Restricted;
-import org.apache.nifi.annotation.behavior.Restriction;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.SeeAlso;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnDisabled;
 import org.apache.nifi.annotation.lifecycle.OnEnabled;
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.components.RequiredPermission;
-import org.apache.nifi.controller.AbstractControllerService;
-import org.apache.nifi.controller.ConfigurationContext;
-import org.apache.nifi.expression.ExpressionLanguageScope;
-import org.apache.nifi.logging.ComponentLog;
 import org.apache.nifi.components.ConfigVerificationResult;
 import org.apache.nifi.components.ConfigVerificationResult.Outcome;
+import org.apache.nifi.controller.AbstractControllerService;
+import org.apache.nifi.controller.ConfigurationContext;
 import org.apache.nifi.controller.VerifiableControllerService;
+import org.apache.nifi.logging.ComponentLog;
 
 import javax.jms.Connection;
 import javax.jms.ConnectionFactory;
@@ -45,52 +36,19 @@ import java.util.Map;
 import java.util.concurrent.atomic.AtomicReference;
 
 /**
- * Provides a factory service that creates and initializes
- * {@link ConnectionFactory} specific to the third party JMS system.
- * 
- * It accomplishes it by adjusting current classpath by adding to it the
- * additional resources (i.e., JMS client libraries) provided by the user via
- * {@link JMSConnectionFactoryProperties#JMS_CLIENT_LIBRARIES}, allowing it 
then to create an instance of the
- * target {@link ConnectionFactory} based on the provided
- * {@link JMSConnectionFactoryProperties#JMS_CONNECTION_FACTORY_IMPL} which 
can be than access via
- * {@link #getConnectionFactory()} method.
+ * Base JMS controller service implementation that provides verification logic.
  */
-@Tags({"jms", "messaging", "integration", "queue", "topic", "publish", 
"subscribe"})
-@CapabilityDescription("Provides a generic service to create vendor specific 
javax.jms.ConnectionFactory implementations. "
-+ "The Connection Factory can be served once this service is 
configured successfully.")
-@DynamicProperty(name = "The name of a Connection Factory configuration 
property.", value = "The value of a given Connection Factory configuration 
property.",
-description = "The properties that are set following Java Beans 
convention where a property name is derived from the 'set*' method of the 
vendor "
-+ "specific C

[nifi] branch support/nifi-1.x updated: NIFI-12054: PutIceberg should produce a provenance send event

2023-09-14 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 9b15bbf6b9 NIFI-12054: PutIceberg should produce a provenance send 
event
9b15bbf6b9 is described below

commit 9b15bbf6b9005f9a3e1aece0932cdf6e517086e5
Author: Mark Bathori 
AuthorDate: Thu Sep 14 14:01:36 2023 +0200

NIFI-12054: PutIceberg should produce a provenance send event

This closes #7690.

Signed-off-by: Peter Turcsanyi 
---
 .../org/apache/nifi/processors/iceberg/PutIceberg.java   |  3 +++
 .../iceberg/TestPutIcebergWithHiveCatalog.java   | 16 
 2 files changed, 19 insertions(+)

diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
index 360ea17f1b..02bd0b074f 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
@@ -234,6 +234,7 @@ public class PutIceberg extends AbstractIcebergProcessor {
 
 @Override
 public void doOnTrigger(ProcessContext context, ProcessSession session, 
FlowFile flowFile) throws ProcessException {
+final long startNanos = System.nanoTime();
 final RecordReaderFactory readerFactory = 
context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
 final String fileFormat = context.getProperty(FILE_FORMAT).getValue();
 final String maximumFileSize = 
context.getProperty(MAXIMUM_FILE_SIZE).evaluateAttributeExpressions(flowFile).getValue();
@@ -281,6 +282,8 @@ public class PutIceberg extends AbstractIcebergProcessor {
 }
 
 flowFile = session.putAttribute(flowFile, ICEBERG_RECORD_COUNT, 
String.valueOf(recordCount));
+final long transferMillis = 
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
+session.getProvenanceReporter().send(flowFile, table.location(), 
transferMillis);
 session.transfer(flowFile, REL_SUCCESS);
 }
 
diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/test/java/org/apache/nifi/processors/iceberg/TestPutIcebergWithHiveCatalog.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/test/java/org/apache/nifi/processors/iceberg/TestPutIcebergWithHiveCatalog.java
index c672d90e8b..bc159ef470 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/test/java/org/apache/nifi/processors/iceberg/TestPutIcebergWithHiveCatalog.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/test/java/org/apache/nifi/processors/iceberg/TestPutIcebergWithHiveCatalog.java
@@ -32,6 +32,8 @@ import org.apache.nifi.hive.metastore.ThriftMetastore;
 import org.apache.nifi.processors.iceberg.catalog.IcebergCatalogFactory;
 import org.apache.nifi.processors.iceberg.catalog.TestHiveCatalogService;
 import org.apache.nifi.processors.iceberg.util.IcebergTestUtils;
+import org.apache.nifi.provenance.ProvenanceEventRecord;
+import org.apache.nifi.provenance.ProvenanceEventType;
 import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.serialization.record.MockRecordParser;
 import org.apache.nifi.serialization.record.RecordField;
@@ -60,6 +62,8 @@ import static 
org.apache.nifi.processors.iceberg.PutIceberg.ICEBERG_RECORD_COUNT
 import static 
org.apache.nifi.processors.iceberg.util.IcebergTestUtils.validateData;
 import static 
org.apache.nifi.processors.iceberg.util.IcebergTestUtils.validateNumberOfDataFiles;
 import static 
org.apache.nifi.processors.iceberg.util.IcebergTestUtils.validatePartitionFolders;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.junit.jupiter.api.condition.OS.WINDOWS;
 
 @DisabledOnOs(WINDOWS)
@@ -174,6 +178,7 @@ public class TestPutIcebergWithHiveCatalog {
 validateNumberOfDataFiles(tableLocation, 3);
 validatePartitionFolders(tableLocation, Arrays.asList(
 "department_bucket=0", "department_bucket=1", 
"department_bucket=2"));
+assertProvenanceEvents();
 }
 
 @ParameterizedTest
@@ -211,6 +216,7 @@ public class TestPutIcebergWithHiveCatalog {
 validateNumberOfDataFiles(tableLocation, 3);
 validatePartitionFolders(tableLocation, Arrays.asList(
 "department=Finance", "department=Marketing", 
"department=Sales"));
+assertProvenanceEvents();
 

[nifi] branch main updated: NIFI-12054: PutIceberg should produce a provenance send event

2023-09-14 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new ea4c2055d6 NIFI-12054: PutIceberg should produce a provenance send 
event
ea4c2055d6 is described below

commit ea4c2055d6d884a13e23304fff2f72309587e785
Author: Mark Bathori 
AuthorDate: Thu Sep 14 14:01:36 2023 +0200

NIFI-12054: PutIceberg should produce a provenance send event

This closes #7690.

Signed-off-by: Peter Turcsanyi 
---
 .../org/apache/nifi/processors/iceberg/PutIceberg.java   |  3 +++
 .../iceberg/TestPutIcebergWithHiveCatalog.java   | 16 
 2 files changed, 19 insertions(+)

diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
index 360ea17f1b..02bd0b074f 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
@@ -234,6 +234,7 @@ public class PutIceberg extends AbstractIcebergProcessor {
 
 @Override
 public void doOnTrigger(ProcessContext context, ProcessSession session, 
FlowFile flowFile) throws ProcessException {
+final long startNanos = System.nanoTime();
 final RecordReaderFactory readerFactory = 
context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
 final String fileFormat = context.getProperty(FILE_FORMAT).getValue();
 final String maximumFileSize = 
context.getProperty(MAXIMUM_FILE_SIZE).evaluateAttributeExpressions(flowFile).getValue();
@@ -281,6 +282,8 @@ public class PutIceberg extends AbstractIcebergProcessor {
 }
 
 flowFile = session.putAttribute(flowFile, ICEBERG_RECORD_COUNT, 
String.valueOf(recordCount));
+final long transferMillis = 
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
+session.getProvenanceReporter().send(flowFile, table.location(), 
transferMillis);
 session.transfer(flowFile, REL_SUCCESS);
 }
 
diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/test/java/org/apache/nifi/processors/iceberg/TestPutIcebergWithHiveCatalog.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/test/java/org/apache/nifi/processors/iceberg/TestPutIcebergWithHiveCatalog.java
index c672d90e8b..bc159ef470 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/test/java/org/apache/nifi/processors/iceberg/TestPutIcebergWithHiveCatalog.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/test/java/org/apache/nifi/processors/iceberg/TestPutIcebergWithHiveCatalog.java
@@ -32,6 +32,8 @@ import org.apache.nifi.hive.metastore.ThriftMetastore;
 import org.apache.nifi.processors.iceberg.catalog.IcebergCatalogFactory;
 import org.apache.nifi.processors.iceberg.catalog.TestHiveCatalogService;
 import org.apache.nifi.processors.iceberg.util.IcebergTestUtils;
+import org.apache.nifi.provenance.ProvenanceEventRecord;
+import org.apache.nifi.provenance.ProvenanceEventType;
 import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.serialization.record.MockRecordParser;
 import org.apache.nifi.serialization.record.RecordField;
@@ -60,6 +62,8 @@ import static 
org.apache.nifi.processors.iceberg.PutIceberg.ICEBERG_RECORD_COUNT
 import static 
org.apache.nifi.processors.iceberg.util.IcebergTestUtils.validateData;
 import static 
org.apache.nifi.processors.iceberg.util.IcebergTestUtils.validateNumberOfDataFiles;
 import static 
org.apache.nifi.processors.iceberg.util.IcebergTestUtils.validatePartitionFolders;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.junit.jupiter.api.condition.OS.WINDOWS;
 
 @DisabledOnOs(WINDOWS)
@@ -174,6 +178,7 @@ public class TestPutIcebergWithHiveCatalog {
 validateNumberOfDataFiles(tableLocation, 3);
 validatePartitionFolders(tableLocation, Arrays.asList(
 "department_bucket=0", "department_bucket=1", 
"department_bucket=2"));
+assertProvenanceEvents();
 }
 
 @ParameterizedTest
@@ -211,6 +216,7 @@ public class TestPutIcebergWithHiveCatalog {
 validateNumberOfDataFiles(tableLocation, 3);
 validatePartitionFolders(tableLocation, Arrays.asList(
 "department=Finance", "department=Marketing", 
"department=Sales"));
+assertProvenanceEvents();
 }
 
 @ParameterizedTest
@@

[nifi] branch support/nifi-1.x updated: NIFI-12014 NullPointerException in PutSQL when adding error attributes

2023-09-05 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 748a3149a1 NIFI-12014 NullPointerException in PutSQL when adding error 
attributes
748a3149a1 is described below

commit 748a3149a180f14ac50aca017462540f105f39de
Author: krisztina-zsihovszki 
AuthorDate: Thu Aug 31 18:29:04 2023 +0200

NIFI-12014 NullPointerException in PutSQL when adding error attributes

This closes #7666.

Signed-off-by: Peter Turcsanyi 
---
 .../apache/nifi/processors/standard/PutSQL.java| 36 +-
 .../nifi/processors/standard/TestPutSQL.java   | 34 +---
 2 files changed, 59 insertions(+), 11 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
index a8d9a95e3c..4c365c91f2 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.processors.standard;
 
+import java.util.Optional;
 import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
@@ -75,9 +76,9 @@ import java.util.function.BiFunction;
 
 import static java.lang.String.format;
 import static java.lang.String.valueOf;
+import static java.util.Collections.emptyList;
 import static java.util.Collections.emptyMap;
 import static java.util.stream.Collectors.toList;
-import static 
org.apache.nifi.processor.util.pattern.ErrorTypes.Destination.Failure;
 import static 
org.apache.nifi.processor.util.pattern.ExceptionHandler.createOnError;
 
 @SupportsBatching
@@ -462,15 +463,16 @@ public class PutSQL extends 
AbstractSessionFactoryProcessor {
 private ExceptionHandler.OnError 
onFlowFileError(final ProcessContext context, final ProcessSession session, 
final RoutingResult result) {
 ExceptionHandler.OnError onFlowFileError = 
createOnError(context, session, result, REL_FAILURE, REL_RETRY);
 onFlowFileError = onFlowFileError.andThen((ctx, flowFile, 
errorTypesResult, exception) -> {
-flowFile = addErrorAttributesToFlowFile(session, flowFile, 
exception);
 
 switch (errorTypesResult.destination()) {
 case Failure:
 getLogger().error("Failed to update database for {} due to 
{}; routing to failure", flowFile, exception, exception);
+addErrorAttributesToFlowFile(session, flowFile, exception);
 break;
 case Retry:
 getLogger().error("Failed to update database for {} due to 
{}; it is possible that retrying the operation will succeed, so routing to 
retry",
flowFile, exception, exception);
+addErrorAttributesToFlowFile(session, flowFile, exception);
 break;
 case Self:
 getLogger().error("Failed to update database for {} due to 
{};",  flowFile, exception, exception);
@@ -485,14 +487,26 @@ public class PutSQL extends 
AbstractSessionFactoryProcessor {
 ExceptionHandler.createOnGroupError(context, session, result, 
REL_FAILURE, REL_RETRY);
 
 onGroupError = onGroupError.andThen((ctx, flowFileGroup, 
errorTypesResult, exception) -> {
-Relationship relationship = errorTypesResult.destination() == 
Failure ? REL_FAILURE : REL_RETRY;
-List flowFilesToRelationship = 
result.getRoutedFlowFiles().get(relationship);
-result.getRoutedFlowFiles().put(relationship, 
addErrorAttributesToFlowFilesInGroup(session, flowFilesToRelationship, 
flowFileGroup.getFlowFiles(), exception));
+switch (errorTypesResult.destination()) {
+case Failure:
+List flowFilesToFailure = 
getFlowFilesOnRelationship(result, REL_FAILURE);
+result.getRoutedFlowFiles().put(REL_FAILURE, 
addErrorAttributesToFlowFilesInGroup(session, flowFilesToFailure, 
flowFileGroup.getFlowFiles(), exception));
+break;
+case Retry:
+List flowFilesToRetry = 
getFlowFilesOnRelationship(result, REL_RETRY);
+result.getRoutedFlowFiles().put(REL_RETRY, 
addErrorAttributesToFlowFilesInGroup(session, flowFilesToRetry, 
flowFileGroup.getFlowFiles(), exc

[nifi] branch main updated: NIFI-12014 NullPointerException in PutSQL when adding error attributes

2023-09-05 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 75cfe21e4c NIFI-12014 NullPointerException in PutSQL when adding error 
attributes
75cfe21e4c is described below

commit 75cfe21e4c37cc4a7340e1a44d5a91fa2663889c
Author: krisztina-zsihovszki 
AuthorDate: Thu Aug 31 18:29:04 2023 +0200

NIFI-12014 NullPointerException in PutSQL when adding error attributes

This closes #7666.

Signed-off-by: Peter Turcsanyi 
---
 .../apache/nifi/processors/standard/PutSQL.java| 36 +-
 .../nifi/processors/standard/TestPutSQL.java   | 34 +---
 2 files changed, 59 insertions(+), 11 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
index a8d9a95e3c..4c365c91f2 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.processors.standard;
 
+import java.util.Optional;
 import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
@@ -75,9 +76,9 @@ import java.util.function.BiFunction;
 
 import static java.lang.String.format;
 import static java.lang.String.valueOf;
+import static java.util.Collections.emptyList;
 import static java.util.Collections.emptyMap;
 import static java.util.stream.Collectors.toList;
-import static 
org.apache.nifi.processor.util.pattern.ErrorTypes.Destination.Failure;
 import static 
org.apache.nifi.processor.util.pattern.ExceptionHandler.createOnError;
 
 @SupportsBatching
@@ -462,15 +463,16 @@ public class PutSQL extends 
AbstractSessionFactoryProcessor {
 private ExceptionHandler.OnError 
onFlowFileError(final ProcessContext context, final ProcessSession session, 
final RoutingResult result) {
 ExceptionHandler.OnError onFlowFileError = 
createOnError(context, session, result, REL_FAILURE, REL_RETRY);
 onFlowFileError = onFlowFileError.andThen((ctx, flowFile, 
errorTypesResult, exception) -> {
-flowFile = addErrorAttributesToFlowFile(session, flowFile, 
exception);
 
 switch (errorTypesResult.destination()) {
 case Failure:
 getLogger().error("Failed to update database for {} due to 
{}; routing to failure", flowFile, exception, exception);
+addErrorAttributesToFlowFile(session, flowFile, exception);
 break;
 case Retry:
 getLogger().error("Failed to update database for {} due to 
{}; it is possible that retrying the operation will succeed, so routing to 
retry",
flowFile, exception, exception);
+addErrorAttributesToFlowFile(session, flowFile, exception);
 break;
 case Self:
 getLogger().error("Failed to update database for {} due to 
{};",  flowFile, exception, exception);
@@ -485,14 +487,26 @@ public class PutSQL extends 
AbstractSessionFactoryProcessor {
 ExceptionHandler.createOnGroupError(context, session, result, 
REL_FAILURE, REL_RETRY);
 
 onGroupError = onGroupError.andThen((ctx, flowFileGroup, 
errorTypesResult, exception) -> {
-Relationship relationship = errorTypesResult.destination() == 
Failure ? REL_FAILURE : REL_RETRY;
-List flowFilesToRelationship = 
result.getRoutedFlowFiles().get(relationship);
-result.getRoutedFlowFiles().put(relationship, 
addErrorAttributesToFlowFilesInGroup(session, flowFilesToRelationship, 
flowFileGroup.getFlowFiles(), exception));
+switch (errorTypesResult.destination()) {
+case Failure:
+List flowFilesToFailure = 
getFlowFilesOnRelationship(result, REL_FAILURE);
+result.getRoutedFlowFiles().put(REL_FAILURE, 
addErrorAttributesToFlowFilesInGroup(session, flowFilesToFailure, 
flowFileGroup.getFlowFiles(), exception));
+break;
+case Retry:
+List flowFilesToRetry = 
getFlowFilesOnRelationship(result, REL_RETRY);
+result.getRoutedFlowFiles().put(REL_RETRY, 
addErrorAttributesToFlowFilesInGroup(session, flowFilesToRetry, 
flowFileGroup.getFlowFiles(), exception));
+break;
+

[nifi] branch support/nifi-1.x updated: NIFI-11924: Corrected HDFSResourceInputStream for Java 8

2023-08-22 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new fb217406c7 NIFI-11924: Corrected HDFSResourceInputStream for Java 8
fb217406c7 is described below

commit fb217406c71762f1ffd169bf2db0fd1da180e83d
Author: Peter Turcsanyi 
AuthorDate: Tue Aug 22 19:10:06 2023 +0200

NIFI-11924: Corrected HDFSResourceInputStream for Java 8

Signed-off-by: Peter Turcsanyi 
---
 .../resource/hadoop/HDFSResourceInputStream.java   | 25 --
 1 file changed, 25 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/flow/resource/hadoop/HDFSResourceInputStream.java
 
b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/flow/resource/hadoop/HDFSResourceInputStream.java
index c8108b660b..8003207c20 100644
--- 
a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/flow/resource/hadoop/HDFSResourceInputStream.java
+++ 
b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/flow/resource/hadoop/HDFSResourceInputStream.java
@@ -48,31 +48,11 @@ final class HDFSResourceInputStream extends InputStream {
 return inputStream.read(b, off, len);
 }
 
-@Override
-public byte[] readAllBytes() throws IOException {
-return inputStream.readAllBytes();
-}
-
-@Override
-public byte[] readNBytes(final int len) throws IOException {
-return inputStream.readNBytes(len);
-}
-
-@Override
-public int readNBytes(final byte[] b, final int off, final int len) throws 
IOException {
-return inputStream.readNBytes(b, off, len);
-}
-
 @Override
 public long skip(final long n) throws IOException {
 return inputStream.skip(n);
 }
 
-@Override
-public void skipNBytes(final long n) throws IOException {
-inputStream.skipNBytes(n);
-}
-
 @Override
 public int available() throws IOException {
 return inputStream.available();
@@ -98,9 +78,4 @@ final class HDFSResourceInputStream extends InputStream {
 public boolean markSupported() {
 return inputStream.markSupported();
 }
-
-@Override
-public long transferTo(final OutputStream out) throws IOException {
-return inputStream.transferTo(out);
-}
 }



[nifi] branch support/nifi-1.x updated: NIFI-11924 Closing FileSystem after using in HDFSExternalResourceProvider

2023-08-22 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 5802db2667 NIFI-11924 Closing FileSystem after using in 
HDFSExternalResourceProvider
5802db2667 is described below

commit 5802db26677464e1297b773e40f65a9dac75a5ac
Author: Bence Simon 
AuthorDate: Wed Aug 9 13:36:26 2023 +0200

NIFI-11924 Closing FileSystem after using in HDFSExternalResourceProvider

This closes #7588.

Signed-off-by: Peter Turcsanyi 
---
 .../processors/hadoop/AbstractHadoopProcessor.java |  15 +--
 .../nifi/processors/hadoop/HDFSResourceHelper.java |  72 ++
 .../hadoop/HDFSExternalResourceProvider.java   |  28 --
 .../resource/hadoop/HDFSResourceInputStream.java   | 106 +
 4 files changed, 199 insertions(+), 22 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
index f594ed2549..960e7a5b7c 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
@@ -376,20 +376,7 @@ public abstract class AbstractHadoopProcessor extends 
AbstractProcessor implemen
 if (resources != null) {
 // Attempt to close the FileSystem
 final FileSystem fileSystem = resources.getFileSystem();
-try {
-interruptStatisticsThread(fileSystem);
-} catch (Exception e) {
-getLogger().warn("Error stopping FileSystem statistics thread: 
" + e.getMessage());
-getLogger().debug("", e);
-} finally {
-if (fileSystem != null) {
-try {
-fileSystem.close();
-} catch (IOException e) {
-getLogger().warn("Error close FileSystem: " + 
e.getMessage(), e);
-}
-}
-}
+HDFSResourceHelper.closeFileSystem(fileSystem);
 }
 
 // Clear out the reference to the resources
diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/HDFSResourceHelper.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/HDFSResourceHelper.java
new file mode 100644
index 00..18650ac33e
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/HDFSResourceHelper.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.hadoop;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+
+public final class HDFSResourceHelper {
+private static final Logger LOGGER = 
LoggerFactory.getLogger(HDFSResourceHelper.class);
+
+private HDFSResourceHelper() {
+// Not to be instantiated
+}
+
+public static void closeFileSystem(final FileSystem fileSystem) {
+try {
+interruptStatisticsThread(fileSystem);
+} catch (Exception e) {
+LOGGER.warn("Error stopping FileSystem statistics thread: " + 
e.getMessage());
+LOGGER.debug("", e);
+} finally {
+if (fileSystem != null) {
+try {
+fileSystem.close();
+} catch (IOException e) {
+LOGGER.warn("Error close FileSystem: " + e.getMessage(), 
e);
+}
+}
+}
+}
+
+pr

[nifi] branch main updated: NIFI-11924 Closing FileSystem after using in HDFSExternalResourceProvider

2023-08-22 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 7340bb8153 NIFI-11924 Closing FileSystem after using in 
HDFSExternalResourceProvider
7340bb8153 is described below

commit 7340bb81535d8095205f198f8363c7c70cc5d356
Author: Bence Simon 
AuthorDate: Wed Aug 9 13:36:26 2023 +0200

NIFI-11924 Closing FileSystem after using in HDFSExternalResourceProvider

This closes #7588.

Signed-off-by: Peter Turcsanyi 
---
 .../processors/hadoop/AbstractHadoopProcessor.java |  15 +--
 .../nifi/processors/hadoop/HDFSResourceHelper.java |  72 ++
 .../hadoop/HDFSExternalResourceProvider.java   |  28 --
 .../resource/hadoop/HDFSResourceInputStream.java   | 106 +
 4 files changed, 199 insertions(+), 22 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
index f594ed2549..960e7a5b7c 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
@@ -376,20 +376,7 @@ public abstract class AbstractHadoopProcessor extends 
AbstractProcessor implemen
 if (resources != null) {
 // Attempt to close the FileSystem
 final FileSystem fileSystem = resources.getFileSystem();
-try {
-interruptStatisticsThread(fileSystem);
-} catch (Exception e) {
-getLogger().warn("Error stopping FileSystem statistics thread: 
" + e.getMessage());
-getLogger().debug("", e);
-} finally {
-if (fileSystem != null) {
-try {
-fileSystem.close();
-} catch (IOException e) {
-getLogger().warn("Error close FileSystem: " + 
e.getMessage(), e);
-}
-}
-}
+HDFSResourceHelper.closeFileSystem(fileSystem);
 }
 
 // Clear out the reference to the resources
diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/HDFSResourceHelper.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/HDFSResourceHelper.java
new file mode 100644
index 00..18650ac33e
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/HDFSResourceHelper.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.hadoop;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+
+public final class HDFSResourceHelper {
+private static final Logger LOGGER = 
LoggerFactory.getLogger(HDFSResourceHelper.class);
+
+private HDFSResourceHelper() {
+// Not to be instantiated
+}
+
+public static void closeFileSystem(final FileSystem fileSystem) {
+try {
+interruptStatisticsThread(fileSystem);
+} catch (Exception e) {
+LOGGER.warn("Error stopping FileSystem statistics thread: " + 
e.getMessage());
+LOGGER.debug("", e);
+} finally {
+if (fileSystem != null) {
+try {
+fileSystem.close();
+} catch (IOException e) {
+LOGGER.warn("Error close FileSystem: " + e.getMessage(), 
e);
+}
+}
+}
+}
+
+private static void int

[nifi] branch support/nifi-1.x updated: NIFI-11916 Iceberg processor extensibility improvement

2023-08-08 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new c4b779605a NIFI-11916 Iceberg processor extensibility improvement
c4b779605a is described below

commit c4b779605ac170002da679fa48ba17361beff7c3
Author: krisztina-zsihovszki 
AuthorDate: Tue Aug 8 09:33:27 2023 +0200

NIFI-11916 Iceberg processor extensibility improvement

This closes #7583.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-iceberg-common/pom.xml| 128 +
 .../iceberg/AbstractIcebergProcessor.java  |   0
 .../nifi/processors/iceberg/IcebergUtils.java  |   0
 .../iceberg/catalog/IcebergCatalogFactory.java |   0
 .../iceberg/converter/ArrayElementGetter.java  |   0
 .../iceberg/converter/DataConverter.java   |   0
 .../iceberg/converter/GenericDataConverters.java   |   0
 .../iceberg/converter/IcebergRecordConverter.java  |   0
 .../iceberg/converter/RecordFieldGetter.java   |   0
 .../nifi-iceberg-processors/pom.xml|  88 --
 nifi-nar-bundles/nifi-iceberg-bundle/pom.xml   |   1 +
 11 files changed, 151 insertions(+), 66 deletions(-)

diff --git a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-common/pom.xml 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-common/pom.xml
new file mode 100644
index 00..89475841ad
--- /dev/null
+++ b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-common/pom.xml
@@ -0,0 +1,128 @@
+
+
+http://maven.apache.org/POM/4.0.0;
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance;
+xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd;>
+4.0.0
+
+org.apache.nifi
+nifi-iceberg-bundle
+1.24.0-SNAPSHOT
+
+
+nifi-iceberg-common
+jar
+
+
+
+
+
+org.apache.nifi
+nifi-api
+
+
+org.apache.nifi
+nifi-record
+
+
+org.apache.nifi
+nifi-utils
+1.24.0-SNAPSHOT
+
+
+org.apache.nifi
+nifi-record-serialization-service-api
+
+
+org.apache.nifi
+nifi-iceberg-services-api
+1.24.0-SNAPSHOT
+provided
+
+
+org.apache.nifi
+nifi-kerberos-user-service-api
+
+
+org.apache.nifi
+nifi-security-kerberos-api
+
+
+org.apache.nifi
+nifi-hadoop-utils
+1.24.0-SNAPSHOT
+
+
+
+
+org.apache.iceberg
+iceberg-core
+${iceberg.version}
+
+
+org.apache.iceberg
+iceberg-hive-metastore
+${iceberg.version}
+
+
+org.apache.iceberg
+iceberg-data
+${iceberg.version}
+
+
+org.apache.iceberg
+iceberg-parquet
+${iceberg.version}
+
+
+org.apache.iceberg
+iceberg-orc
+${iceberg.version}
+
+
+org.apache.hadoop
+hadoop-client
+${hadoop.version}
+
+
+log4j
+log4j
+
+
+org.slf4j
+slf4j-log4j12
+
+
+org.slf4j
+slf4j-reload4j
+
+
+commons-logging
+commons-logging
+
+
+javax.servlet
+javax.servlet-api
+
+
+
+
+commons-lang
+commons-lang
+2.6
+
+
+
\ No newline at end of file
diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-common/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
similarity index 100%
rename from 
nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
rename to 
nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-common/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/IcebergUtils.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-common/src/main/java/or

[nifi] branch main updated: NIFI-11916 Iceberg processor extensibility improvement

2023-08-08 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 0446990d54 NIFI-11916 Iceberg processor extensibility improvement
0446990d54 is described below

commit 0446990d543d653f91a3e28654a8d57eb927e774
Author: krisztina-zsihovszki 
AuthorDate: Tue Aug 8 09:33:27 2023 +0200

NIFI-11916 Iceberg processor extensibility improvement

This closes #7583.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-iceberg-common/pom.xml| 128 +
 .../iceberg/AbstractIcebergProcessor.java  |   0
 .../nifi/processors/iceberg/IcebergUtils.java  |   0
 .../iceberg/catalog/IcebergCatalogFactory.java |   0
 .../iceberg/converter/ArrayElementGetter.java  |   0
 .../iceberg/converter/DataConverter.java   |   0
 .../iceberg/converter/GenericDataConverters.java   |   0
 .../iceberg/converter/IcebergRecordConverter.java  |   0
 .../iceberg/converter/RecordFieldGetter.java   |   0
 .../nifi-iceberg-processors/pom.xml|  88 --
 nifi-nar-bundles/nifi-iceberg-bundle/pom.xml   |   1 +
 11 files changed, 151 insertions(+), 66 deletions(-)

diff --git a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-common/pom.xml 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-common/pom.xml
new file mode 100644
index 00..2d1b987b18
--- /dev/null
+++ b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-common/pom.xml
@@ -0,0 +1,128 @@
+
+
+http://maven.apache.org/POM/4.0.0;
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance;
+xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd;>
+4.0.0
+
+org.apache.nifi
+nifi-iceberg-bundle
+2.0.0-SNAPSHOT
+
+
+nifi-iceberg-common
+jar
+
+
+
+
+
+org.apache.nifi
+nifi-api
+
+
+org.apache.nifi
+nifi-record
+
+
+org.apache.nifi
+nifi-utils
+2.0.0-SNAPSHOT
+
+
+org.apache.nifi
+nifi-record-serialization-service-api
+
+
+org.apache.nifi
+nifi-iceberg-services-api
+2.0.0-SNAPSHOT
+provided
+
+
+org.apache.nifi
+nifi-kerberos-user-service-api
+
+
+org.apache.nifi
+nifi-security-kerberos-api
+
+
+org.apache.nifi
+nifi-hadoop-utils
+2.0.0-SNAPSHOT
+
+
+
+
+org.apache.iceberg
+iceberg-core
+${iceberg.version}
+
+
+org.apache.iceberg
+iceberg-hive-metastore
+${iceberg.version}
+
+
+org.apache.iceberg
+iceberg-data
+${iceberg.version}
+
+
+org.apache.iceberg
+iceberg-parquet
+${iceberg.version}
+
+
+org.apache.iceberg
+iceberg-orc
+${iceberg.version}
+
+
+org.apache.hadoop
+hadoop-client
+${hadoop.version}
+
+
+log4j
+log4j
+
+
+org.slf4j
+slf4j-log4j12
+
+
+org.slf4j
+slf4j-reload4j
+
+
+commons-logging
+commons-logging
+
+
+javax.servlet
+javax.servlet-api
+
+
+
+
+commons-lang
+commons-lang
+2.6
+
+
+
\ No newline at end of file
diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-common/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
similarity index 100%
rename from 
nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
rename to 
nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-common/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/IcebergUtils.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-common/src/main/java/org/apache/nifi/proce

[nifi] branch support/nifi-1.x updated: NIFI-11823 - fix NUMERIC support in PutBigQuery

2023-08-05 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 6ee0bea306 NIFI-11823 - fix NUMERIC support in PutBigQuery
6ee0bea306 is described below

commit 6ee0bea30650e0203f46483396aa3c6a35a5e2ef
Author: Pierre Villard 
AuthorDate: Mon Jul 17 16:55:24 2023 +0200

NIFI-11823 - fix NUMERIC support in PutBigQuery

This closes #7489.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-gcp-bundle/nifi-gcp-processors/pom.xml|   6 +
 .../nifi/processors/gcp/bigquery/PutBigQuery.java  |  15 ++-
 .../processors/gcp/bigquery/proto/ProtoUtils.java  | 124 +--
 .../processors/gcp/bigquery/PutBigQueryIT.java | 132 -
 .../src/test/resources/bigquery/avrodecimal.avro   | Bin 0 -> 30 bytes
 .../src/test/resources/bigquery/avrodecimal.avsc   |  19 +++
 .../src/test/resources/bigquery/avrofloat.avro | Bin 0 -> 126 bytes
 .../src/test/resources/bigquery/avrofloat.avsc |  14 +++
 .../src/test/resources/bigquery/avroint.avro   | Bin 0 -> 2 bytes
 .../src/test/resources/bigquery/avroint.avsc   |  14 +++
 .../bigquery/schema-correct-data-with-date.avsc|  12 ++
 ...streaming-correct-data-with-date-formatted.json |   7 +-
 12 files changed, 296 insertions(+), 47 deletions(-)

diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/pom.xml 
b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/pom.xml
index 08c4af371f..0781e29bbb 100644
--- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/pom.xml
@@ -198,6 +198,12 @@
 
src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker
 
src/test/resources/mock-gcp-service-account.json
 
src/test/resources/mock-gcp-application-default-credentials.json
+
src/test/resources/bigquery/avrodecimal.avsc
+
src/test/resources/bigquery/avrodecimal.avro
+
src/test/resources/bigquery/avrofloat.avsc
+
src/test/resources/bigquery/avrofloat.avro
+
src/test/resources/bigquery/avroint.avsc
+
src/test/resources/bigquery/avroint.avro
 
src/test/resources/bigquery/streaming-bad-data.json
 
src/test/resources/bigquery/streaming-correct-data.json
 
src/test/resources/bigquery/schema-correct-data-with-date.avsc
diff --git 
a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQuery.java
 
b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQuery.java
index d3a274b9b1..e7b51093ad 100644
--- 
a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQuery.java
+++ 
b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQuery.java
@@ -40,6 +40,7 @@ import 
com.google.cloud.bigquery.storage.v1.ProtoSchemaConverter;
 import com.google.cloud.bigquery.storage.v1.StorageError;
 import com.google.cloud.bigquery.storage.v1.StreamWriter;
 import com.google.cloud.bigquery.storage.v1.TableName;
+import com.google.cloud.bigquery.storage.v1.TableSchema;
 import com.google.cloud.bigquery.storage.v1.WriteStream;
 import com.google.cloud.bigquery.storage.v1.stub.BigQueryWriteStubSettings;
 import com.google.protobuf.Descriptors;
@@ -224,9 +225,11 @@ public class PutBigQuery extends AbstractBigQueryProcessor 
{
 
 WriteStream writeStream;
 Descriptors.Descriptor protoDescriptor;
+TableSchema tableSchema;
 try {
 writeStream = createWriteStream(tableName);
-protoDescriptor = 
BQTableSchemaToProtoDescriptor.convertBQTableSchemaToProtoDescriptor(writeStream.getTableSchema());
+tableSchema = writeStream.getTableSchema();
+protoDescriptor = 
BQTableSchemaToProtoDescriptor.convertBQTableSchemaToProtoDescriptor(tableSchema);
 streamWriter = createStreamWriter(writeStream.getName(), 
protoDescriptor, getGoogleCredentials(context));
 } catch (Descriptors.DescriptorValidationException | IOException e) {
 getLogger().error("Failed to create Big Query Stream Writer for 
writing", e);
@@ -242,7 +245,7 @@ public class PutBigQuery extends AbstractBigQueryProcessor {
 try {
 try (InputStream in = session.read(flowFile);
 RecordReader reader = 
readerFactory.createRecordReader(flowFile, in, getLogger())) {
-recordNumWritten = writeRecordsToStream(reade

[nifi] branch main updated: NIFI-11823 - fix NUMERIC support in PutBigQuery

2023-08-05 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new b056bf8f7b NIFI-11823 - fix NUMERIC support in PutBigQuery
b056bf8f7b is described below

commit b056bf8f7bda9c5fcbfb80ef6d6b46fe7c118cae
Author: Pierre Villard 
AuthorDate: Mon Jul 17 16:55:24 2023 +0200

NIFI-11823 - fix NUMERIC support in PutBigQuery

This closes #7489.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-gcp-bundle/nifi-gcp-processors/pom.xml|   6 +
 .../nifi/processors/gcp/bigquery/PutBigQuery.java  |  15 ++-
 .../processors/gcp/bigquery/proto/ProtoUtils.java  | 124 +--
 .../processors/gcp/bigquery/PutBigQueryIT.java | 132 -
 .../src/test/resources/bigquery/avrodecimal.avro   | Bin 0 -> 30 bytes
 .../src/test/resources/bigquery/avrodecimal.avsc   |  19 +++
 .../src/test/resources/bigquery/avrofloat.avro | Bin 0 -> 126 bytes
 .../src/test/resources/bigquery/avrofloat.avsc |  14 +++
 .../src/test/resources/bigquery/avroint.avro   | Bin 0 -> 2 bytes
 .../src/test/resources/bigquery/avroint.avsc   |  14 +++
 .../bigquery/schema-correct-data-with-date.avsc|  12 ++
 ...streaming-correct-data-with-date-formatted.json |   7 +-
 12 files changed, 296 insertions(+), 47 deletions(-)

diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/pom.xml 
b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/pom.xml
index ae624dba29..84e84417ea 100644
--- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/pom.xml
@@ -198,6 +198,12 @@
 
src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker
 
src/test/resources/mock-gcp-service-account.json
 
src/test/resources/mock-gcp-application-default-credentials.json
+
src/test/resources/bigquery/avrodecimal.avsc
+
src/test/resources/bigquery/avrodecimal.avro
+
src/test/resources/bigquery/avrofloat.avsc
+
src/test/resources/bigquery/avrofloat.avro
+
src/test/resources/bigquery/avroint.avsc
+
src/test/resources/bigquery/avroint.avro
 
src/test/resources/bigquery/streaming-bad-data.json
 
src/test/resources/bigquery/streaming-correct-data.json
 
src/test/resources/bigquery/schema-correct-data-with-date.avsc
diff --git 
a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQuery.java
 
b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQuery.java
index 057d6d43fe..0b17f0d939 100644
--- 
a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQuery.java
+++ 
b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQuery.java
@@ -40,6 +40,7 @@ import 
com.google.cloud.bigquery.storage.v1.ProtoSchemaConverter;
 import com.google.cloud.bigquery.storage.v1.StorageError;
 import com.google.cloud.bigquery.storage.v1.StreamWriter;
 import com.google.cloud.bigquery.storage.v1.TableName;
+import com.google.cloud.bigquery.storage.v1.TableSchema;
 import com.google.cloud.bigquery.storage.v1.WriteStream;
 import com.google.cloud.bigquery.storage.v1.stub.BigQueryWriteStubSettings;
 import com.google.protobuf.Descriptors;
@@ -220,9 +221,11 @@ public class PutBigQuery extends AbstractBigQueryProcessor 
{
 
 WriteStream writeStream;
 Descriptors.Descriptor protoDescriptor;
+TableSchema tableSchema;
 try {
 writeStream = createWriteStream(tableName);
-protoDescriptor = 
BQTableSchemaToProtoDescriptor.convertBQTableSchemaToProtoDescriptor(writeStream.getTableSchema());
+tableSchema = writeStream.getTableSchema();
+protoDescriptor = 
BQTableSchemaToProtoDescriptor.convertBQTableSchemaToProtoDescriptor(tableSchema);
 streamWriter = createStreamWriter(writeStream.getName(), 
protoDescriptor, getGoogleCredentials(context));
 } catch (Descriptors.DescriptorValidationException | IOException e) {
 getLogger().error("Failed to create Big Query Stream Writer for 
writing", e);
@@ -238,7 +241,7 @@ public class PutBigQuery extends AbstractBigQueryProcessor {
 try {
 try (InputStream in = session.read(flowFile);
 RecordReader reader = 
readerFactory.createRecordReader(flowFile, in, getLogger())) {
-recordNumWritten = writeRecordsToStream(reader, 
protoDescript

[nifi] branch support/nifi-1.x updated: NIFI-11902: Fix ListHDFS closes FileSystem in first run

2023-08-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 4e65240ddb NIFI-11902: Fix ListHDFS closes FileSystem in first run
4e65240ddb is described below

commit 4e65240ddbb6e61cb3674b488251a6c3e675d67c
Author: Lehel Boer 
AuthorDate: Thu Aug 3 00:07:38 2023 +0200

NIFI-11902: Fix ListHDFS closes FileSystem in first run

This closes #7565.

Signed-off-by: Peter Turcsanyi 
---
 .../apache/nifi/processors/hadoop/ListHDFS.java| 98 +++---
 1 file changed, 48 insertions(+), 50 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
 
b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
index e706ab65f0..4bba8572ca 100644
--- 
a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
+++ 
b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
@@ -257,58 +257,56 @@ public class ListHDFS extends AbstractHadoopProcessor {
 }
 
 // Pull in any file that is newer than the timestamp that we have.
-try (final FileSystem hdfs = getFileSystem()) {
-final boolean recursive = 
context.getProperty(RECURSE_SUBDIRS).asBoolean();
-final PathFilter pathFilter = createPathFilter(context);
-final RecordSetWriterFactory writerFactory = 
context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
-
-final FileStatusManager fileStatusManager = new 
FileStatusManager(latestTimestamp, latestFiles);
-final Path rootPath = getNormalizedPath(context, DIRECTORY);
-final FileStatusIterable fileStatusIterable = new 
FileStatusIterable(rootPath, recursive, hdfs, getUserGroupInformation());
-
-final Long minAgeProp = 
context.getProperty(MINIMUM_FILE_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
-final long minimumAge = (minAgeProp == null) ? Long.MIN_VALUE : 
minAgeProp;
-final Long maxAgeProp = 
context.getProperty(MAXIMUM_FILE_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
-final long maximumAge = (maxAgeProp == null) ? Long.MAX_VALUE : 
maxAgeProp;
-
-final HadoopFileStatusWriter writer = 
HadoopFileStatusWriter.builder()
-.session(session)
-.successRelationship(getSuccessRelationship())
-.fileStatusIterable(fileStatusIterable)
-.fileStatusManager(fileStatusManager)
-.pathFilter(pathFilter)
-.minimumAge(minimumAge)
-.maximumAge(maximumAge)
-.previousLatestTimestamp(latestTimestamp)
-.previousLatestFiles(latestFiles)
-.writerFactory(writerFactory)
-.hdfsPrefix(getAttributePrefix())
-.logger(getLogger())
-.build();
-
-writer.write();
-
-getLogger().debug("Found a total of {} files in HDFS, {} are 
listed", fileStatusIterable.getTotalFileCount(), writer.getListedFileCount());
-
-if (writer.getListedFileCount() > 0) {
-final Map updatedState = new HashMap<>();
-updatedState.put(LATEST_TIMESTAMP_KEY, 
String.valueOf(fileStatusManager.getCurrentLatestTimestamp()));
-final List files = 
fileStatusManager.getCurrentLatestFiles();
-for (int i = 0; i < files.size(); i++) {
-final String currentFilePath = files.get(i);
-updatedState.put(String.format(LATEST_FILES_KEY, i), 
currentFilePath);
-}
-getLogger().debug("New state map: {}", updatedState);
-updateState(session, updatedState);
-
-getLogger().info("Successfully created listing with {} new 
files from HDFS", writer.getListedFileCount());
-} else {
-getLogger().debug("There is no data to list. Yielding.");
-context.yield();
+final FileSystem hdfs = getFileSystem();
+final boolean recursive = 
context.getProperty(RECURSE_SUBDIRS).asBoolean();
+final PathFilter pathFilter = createPathFilter(context);
+final RecordSetWriterFactory writerFactory = 
context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
+
+final FileStatusManager fileStatusManager = new 
FileStatusManager(latestTimestamp, latestFiles);
+final Path rootPath = getNormalizedPath(context, DIRECTORY);
+ 

[nifi] branch main updated: NIFI-11902: Fix ListHDFS closes FileSystem in first run

2023-08-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 6a8a8caa4c NIFI-11902: Fix ListHDFS closes FileSystem in first run
6a8a8caa4c is described below

commit 6a8a8caa4c202ff50bf0f51edce749eb915721f7
Author: Lehel Boer 
AuthorDate: Thu Aug 3 00:07:38 2023 +0200

NIFI-11902: Fix ListHDFS closes FileSystem in first run

This closes #7565.

Signed-off-by: Peter Turcsanyi 
---
 .../apache/nifi/processors/hadoop/ListHDFS.java| 98 +++---
 1 file changed, 48 insertions(+), 50 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
 
b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
index e706ab65f0..4bba8572ca 100644
--- 
a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
+++ 
b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
@@ -257,58 +257,56 @@ public class ListHDFS extends AbstractHadoopProcessor {
 }
 
 // Pull in any file that is newer than the timestamp that we have.
-try (final FileSystem hdfs = getFileSystem()) {
-final boolean recursive = 
context.getProperty(RECURSE_SUBDIRS).asBoolean();
-final PathFilter pathFilter = createPathFilter(context);
-final RecordSetWriterFactory writerFactory = 
context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
-
-final FileStatusManager fileStatusManager = new 
FileStatusManager(latestTimestamp, latestFiles);
-final Path rootPath = getNormalizedPath(context, DIRECTORY);
-final FileStatusIterable fileStatusIterable = new 
FileStatusIterable(rootPath, recursive, hdfs, getUserGroupInformation());
-
-final Long minAgeProp = 
context.getProperty(MINIMUM_FILE_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
-final long minimumAge = (minAgeProp == null) ? Long.MIN_VALUE : 
minAgeProp;
-final Long maxAgeProp = 
context.getProperty(MAXIMUM_FILE_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
-final long maximumAge = (maxAgeProp == null) ? Long.MAX_VALUE : 
maxAgeProp;
-
-final HadoopFileStatusWriter writer = 
HadoopFileStatusWriter.builder()
-.session(session)
-.successRelationship(getSuccessRelationship())
-.fileStatusIterable(fileStatusIterable)
-.fileStatusManager(fileStatusManager)
-.pathFilter(pathFilter)
-.minimumAge(minimumAge)
-.maximumAge(maximumAge)
-.previousLatestTimestamp(latestTimestamp)
-.previousLatestFiles(latestFiles)
-.writerFactory(writerFactory)
-.hdfsPrefix(getAttributePrefix())
-.logger(getLogger())
-.build();
-
-writer.write();
-
-getLogger().debug("Found a total of {} files in HDFS, {} are 
listed", fileStatusIterable.getTotalFileCount(), writer.getListedFileCount());
-
-if (writer.getListedFileCount() > 0) {
-final Map updatedState = new HashMap<>();
-updatedState.put(LATEST_TIMESTAMP_KEY, 
String.valueOf(fileStatusManager.getCurrentLatestTimestamp()));
-final List files = 
fileStatusManager.getCurrentLatestFiles();
-for (int i = 0; i < files.size(); i++) {
-final String currentFilePath = files.get(i);
-updatedState.put(String.format(LATEST_FILES_KEY, i), 
currentFilePath);
-}
-getLogger().debug("New state map: {}", updatedState);
-updateState(session, updatedState);
-
-getLogger().info("Successfully created listing with {} new 
files from HDFS", writer.getListedFileCount());
-} else {
-getLogger().debug("There is no data to list. Yielding.");
-context.yield();
+final FileSystem hdfs = getFileSystem();
+final boolean recursive = 
context.getProperty(RECURSE_SUBDIRS).asBoolean();
+final PathFilter pathFilter = createPathFilter(context);
+final RecordSetWriterFactory writerFactory = 
context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
+
+final FileStatusManager fileStatusManager = new 
FileStatusManager(latestTimestamp, latestFiles);
+final Path rootPath = getNormalizedPath(context, DIRECTORY);
+final FileStatusI

[nifi] branch support/nifi-1.x updated: NIFI-11334: Fixed dependency version in nifi-iceberg-services

2023-07-05 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 53a0aef422 NIFI-11334: Fixed dependency version in 
nifi-iceberg-services
53a0aef422 is described below

commit 53a0aef4228f7b14cb8c9941a7eca158f0a42093
Author: Zoltan Kornel Torok 
AuthorDate: Wed Jul 5 08:57:54 2023 +0200

NIFI-11334: Fixed dependency version in nifi-iceberg-services

This closes #7459.

Signed-off-by: Peter Turcsanyi 
---
 nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-services/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-services/pom.xml 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-services/pom.xml
index c4db27ca94..3056685a66 100644
--- a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-services/pom.xml
+++ b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-services/pom.xml
@@ -39,7 +39,7 @@
 
 org.apache.nifi
 nifi-xml-processing
-2.0.0-SNAPSHOT
+1.23.0-SNAPSHOT
 
 
 



[nifi] branch support/nifi-1.x updated: NIFI-11334: Fixed PutIceberg processor instance interference due to same class loader usage

2023-06-30 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 331e0aead0 NIFI-11334: Fixed PutIceberg processor instance 
interference due to same class loader usage
331e0aead0 is described below

commit 331e0aead0f29e3e7aa2c12b59c5b658d6b09423
Author: Mark Bathori 
AuthorDate: Thu Jun 29 08:59:43 2023 +0200

NIFI-11334: Fixed PutIceberg processor instance interference due to same 
class loader usage

This closes #7449.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-iceberg-processors-nar/pom.xml| 258 +
 .../nifi-iceberg-processors/pom.xml| 102 +++-
 .../iceberg/AbstractIcebergProcessor.java  |  22 +-
 .../nifi/processors/iceberg/IcebergUtils.java} |  28 ++-
 .../apache/nifi/processors/iceberg/PutIceberg.java |   7 +-
 .../iceberg/catalog/IcebergCatalogFactory.java |  87 +++
 .../processors/iceberg/TestDataFileActions.java|   4 +-
 .../iceberg/TestPutIcebergCustomValidation.java|  24 +-
 .../iceberg/TestPutIcebergWithHadoopCatalog.java   |  15 +-
 .../iceberg/TestPutIcebergWithHiveCatalog.java |  29 ++-
 .../iceberg/catalog/TestHadoopCatalogService.java  |  25 +-
 .../iceberg/catalog/TestHiveCatalogService.java|  64 ++---
 .../src/test/resources/secured-core-site.xml   |  22 ++
 .../src/test/resources/unsecured-core-site.xml |  22 ++
 .../nifi-iceberg-services-api-nar/pom.xml  | 162 -
 .../nifi-iceberg-services-api/pom.xml  | 115 -
 ...logService.java => IcebergCatalogProperty.java} |  21 +-
 .../services/iceberg/IcebergCatalogService.java|  11 +-
 ...CatalogService.java => IcebergCatalogType.java} |  15 +-
 .../nifi-iceberg-services/pom.xml  |   5 +
 .../services/iceberg/AbstractCatalogService.java   |  62 +++--
 .../services/iceberg/HadoopCatalogService.java |  24 +-
 .../nifi/services/iceberg/HiveCatalogService.java  |  62 ++---
 23 files changed, 631 insertions(+), 555 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors-nar/pom.xml 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors-nar/pom.xml
index 3656e18974..e0d8bb1dcf 100644
--- a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors-nar/pom.xml
+++ b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors-nar/pom.xml
@@ -43,101 +43,165 @@
 
 
 
-
-
-
-
-org.apache.iceberg
-iceberg-core
-provided
-
-
-org.apache.hive
-hive-shims
-provided
-
-
-org.codehaus.groovy
-groovy-all
-provided
-
-
-org.apache.hadoop
-hadoop-common
-provided
-
-
-org.slf4j
-slf4j-reload4j
-
-
-
-
-org.apache.hadoop
-hadoop-yarn-api
-provided
-
-
-org.apache.hadoop
-hadoop-yarn-registry
-provided
-
-
-org.apache.zookeeper
-zookeeper
-provided
-
-
-org.apache.curator
-curator-client
-provided
-
-
-org.apache.curator
-curator-framework
-provided
-
-
-com.fasterxml.jackson.core
-jackson-databind
-provided
-
-
-com.fasterxml.jackson.core
-jackson-core
-provided
-
-
-com.fasterxml.jackson.core
-jackson-annotations
-provided
-
-
-org.xerial.snappy
-snappy-java
-provided
-
-
-org.apache.ant
-ant
-provided
-
-
-org.apache.ivy
-ivy
-provided
-
-
-org.apache.orc
-orc-core
-provided
-nohive
-
-
-org.apache.parquet
-parquet-avro
-provided
-
-
-
+
+
+
+include-hadoop-aws
+
+   

[nifi] branch main updated: NIFI-11334: Fixed PutIceberg processor instance interference due to same class loader usage

2023-06-30 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new b1be71f918 NIFI-11334: Fixed PutIceberg processor instance 
interference due to same class loader usage
b1be71f918 is described below

commit b1be71f918e45497099b069d04482bde8aff025d
Author: Mark Bathori 
AuthorDate: Thu Jun 29 08:59:43 2023 +0200

NIFI-11334: Fixed PutIceberg processor instance interference due to same 
class loader usage

This closes #7449.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-iceberg-processors-nar/pom.xml| 258 +
 .../nifi-iceberg-processors/pom.xml| 102 +++-
 .../iceberg/AbstractIcebergProcessor.java  |  22 +-
 .../nifi/processors/iceberg/IcebergUtils.java} |  28 ++-
 .../apache/nifi/processors/iceberg/PutIceberg.java |   7 +-
 .../iceberg/catalog/IcebergCatalogFactory.java |  87 +++
 .../processors/iceberg/TestDataFileActions.java|   4 +-
 .../iceberg/TestPutIcebergCustomValidation.java|  24 +-
 .../iceberg/TestPutIcebergWithHadoopCatalog.java   |  15 +-
 .../iceberg/TestPutIcebergWithHiveCatalog.java |  29 ++-
 .../iceberg/catalog/TestHadoopCatalogService.java  |  25 +-
 .../iceberg/catalog/TestHiveCatalogService.java|  64 ++---
 .../src/test/resources/secured-core-site.xml   |  22 ++
 .../src/test/resources/unsecured-core-site.xml |  22 ++
 .../nifi-iceberg-services-api-nar/pom.xml  | 162 -
 .../nifi-iceberg-services-api/pom.xml  | 115 -
 ...logService.java => IcebergCatalogProperty.java} |  21 +-
 .../services/iceberg/IcebergCatalogService.java|  11 +-
 ...CatalogService.java => IcebergCatalogType.java} |  15 +-
 .../nifi-iceberg-services/pom.xml  |   5 +
 .../services/iceberg/AbstractCatalogService.java   |  62 +++--
 .../services/iceberg/HadoopCatalogService.java |  24 +-
 .../nifi/services/iceberg/HiveCatalogService.java  |  62 ++---
 23 files changed, 631 insertions(+), 555 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors-nar/pom.xml 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors-nar/pom.xml
index 3c7e102bca..4ed687eb42 100644
--- a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors-nar/pom.xml
+++ b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors-nar/pom.xml
@@ -43,101 +43,165 @@
 
 
 
-
-
-
-
-org.apache.iceberg
-iceberg-core
-provided
-
-
-org.apache.hive
-hive-shims
-provided
-
-
-org.codehaus.groovy
-groovy-all
-provided
-
-
-org.apache.hadoop
-hadoop-common
-provided
-
-
-org.slf4j
-slf4j-reload4j
-
-
-
-
-org.apache.hadoop
-hadoop-yarn-api
-provided
-
-
-org.apache.hadoop
-hadoop-yarn-registry
-provided
-
-
-org.apache.zookeeper
-zookeeper
-provided
-
-
-org.apache.curator
-curator-client
-provided
-
-
-org.apache.curator
-curator-framework
-provided
-
-
-com.fasterxml.jackson.core
-jackson-databind
-provided
-
-
-com.fasterxml.jackson.core
-jackson-core
-provided
-
-
-com.fasterxml.jackson.core
-jackson-annotations
-provided
-
-
-org.xerial.snappy
-snappy-java
-provided
-
-
-org.apache.ant
-ant
-provided
-
-
-org.apache.ivy
-ivy
-provided
-
-
-org.apache.orc
-orc-core
-provided
-nohive
-
-
-org.apache.parquet
-parquet-avro
-provided
-
-
-
+
+
+
+include-hadoop-aws
+
+   

[nifi] branch support/nifi-1.x updated: NIFI-11614 Improved Validation for JndiJmsConnectionFactoryProvider

2023-06-01 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 3fcb82ee45 NIFI-11614 Improved Validation for 
JndiJmsConnectionFactoryProvider
3fcb82ee45 is described below

commit 3fcb82ee4509d1ad73893d8dca003be6d086c5d6
Author: exceptionfactory 
AuthorDate: Tue May 30 13:05:07 2023 -0500

NIFI-11614 Improved Validation for JndiJmsConnectionFactoryProvider

This closes #7313.

Signed-off-by: Peter Turcsanyi 
---
 .../jms/cf/JndiJmsConnectionFactoryProperties.java |  90 +++-
 .../additionalDetails.html |  54 +-
 .../cf/JndiJmsConnectionFactoryProviderTest.java   | 116 +
 3 files changed, 255 insertions(+), 5 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JndiJmsConnectionFactoryProperties.java
 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JndiJmsConnectionFactoryProperties.java
index 93d98341f3..823ae0d976 100644
--- 
a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JndiJmsConnectionFactoryProperties.java
+++ 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JndiJmsConnectionFactoryProperties.java
@@ -18,6 +18,8 @@ package org.apache.nifi.jms.cf;
 
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.PropertyDescriptor.Builder;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.components.Validator;
 import org.apache.nifi.components.resource.ResourceCardinality;
 import org.apache.nifi.components.resource.ResourceType;
@@ -25,12 +27,20 @@ import org.apache.nifi.expression.ExpressionLanguageScope;
 import org.apache.nifi.processor.util.StandardValidators;
 
 import java.util.Arrays;
+import java.util.Collections;
+import java.util.LinkedHashSet;
 import java.util.List;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
 
 import static 
org.apache.nifi.processor.util.StandardValidators.NON_EMPTY_VALIDATOR;
 
 public class JndiJmsConnectionFactoryProperties {
 
+public static final String URL_SCHEMES_ALLOWED_PROPERTY = 
"org.apache.nifi.jms.cf.jndi.provider.url.schemes.allowed";
+
 public static final PropertyDescriptor JNDI_INITIAL_CONTEXT_FACTORY = new 
Builder()
 .name("java.naming.factory.initial")
 .displayName("JNDI Initial Context Factory Class")
@@ -43,9 +53,9 @@ public class JndiJmsConnectionFactoryProperties {
 public static final PropertyDescriptor JNDI_PROVIDER_URL = new Builder()
 .name("java.naming.provider.url")
 .displayName("JNDI Provider URL")
-.description("The URL of the JNDI Provider to use 
(java.naming.provider.url).")
+.description("The URL of the JNDI Provider to use as the value for 
java.naming.provider.url. See additional details documentation for allowed URL 
schemes.")
 .required(true)
-.addValidator(NON_EMPTY_VALIDATOR)
+.addValidator(new JndiJmsProviderUrlValidator())
 
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
 .build();
 
@@ -114,4 +124,80 @@ public class JndiJmsConnectionFactoryProperties {
 .build();
 }
 
+static class JndiJmsProviderUrlValidator implements Validator {
+
+private static final Pattern URL_SCHEME_PATTERN = 
Pattern.compile("^([^:]+)://.+$");
+
+private static final int SCHEME_GROUP = 1;
+
+private static final String SPACE_SEPARATOR = " ";
+
+private static final Set DEFAULT_ALLOWED_SCHEMES = 
Collections.unmodifiableSet(new LinkedHashSet<>(Arrays.asList(
+"file",
+"jgroups",
+"t3",
+"t3s",
+"tcp",
+"ssl",
+"udp",
+"vm"
+)));
+
+private final Set allowedSchemes;
+
+JndiJmsProviderUrlValidator() {
+final String allowed = 
System.getProperty(URL_SCHEMES_ALLOWED_PROPERTY);
+if (allowed == null || allowed.isEmpty()) {
+allowedSchemes = DEFAULT_ALLOWED_SCHEMES;
+} else {
+allowedSchemes = 
Arrays.stream(allowed.split(SPACE_SEPARATOR)).collect(Collectors.toSet());
+}
+}
+
+@Override
+public ValidationResult validate(final String sub

[nifi] branch main updated: NIFI-11614 Improved Validation for JndiJmsConnectionFactoryProvider

2023-06-01 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new b042eb01e8 NIFI-11614 Improved Validation for 
JndiJmsConnectionFactoryProvider
b042eb01e8 is described below

commit b042eb01e8ac1a23e2f5d4c9eee5b68ffe854e48
Author: exceptionfactory 
AuthorDate: Tue May 30 13:05:07 2023 -0500

NIFI-11614 Improved Validation for JndiJmsConnectionFactoryProvider

This closes #7313.

Signed-off-by: Peter Turcsanyi 
---
 .../jms/cf/JndiJmsConnectionFactoryProperties.java |  90 +++-
 .../additionalDetails.html |  54 +-
 .../cf/JndiJmsConnectionFactoryProviderTest.java   | 116 +
 3 files changed, 255 insertions(+), 5 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JndiJmsConnectionFactoryProperties.java
 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JndiJmsConnectionFactoryProperties.java
index 93d98341f3..823ae0d976 100644
--- 
a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JndiJmsConnectionFactoryProperties.java
+++ 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JndiJmsConnectionFactoryProperties.java
@@ -18,6 +18,8 @@ package org.apache.nifi.jms.cf;
 
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.PropertyDescriptor.Builder;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.components.Validator;
 import org.apache.nifi.components.resource.ResourceCardinality;
 import org.apache.nifi.components.resource.ResourceType;
@@ -25,12 +27,20 @@ import org.apache.nifi.expression.ExpressionLanguageScope;
 import org.apache.nifi.processor.util.StandardValidators;
 
 import java.util.Arrays;
+import java.util.Collections;
+import java.util.LinkedHashSet;
 import java.util.List;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
 
 import static 
org.apache.nifi.processor.util.StandardValidators.NON_EMPTY_VALIDATOR;
 
 public class JndiJmsConnectionFactoryProperties {
 
+public static final String URL_SCHEMES_ALLOWED_PROPERTY = 
"org.apache.nifi.jms.cf.jndi.provider.url.schemes.allowed";
+
 public static final PropertyDescriptor JNDI_INITIAL_CONTEXT_FACTORY = new 
Builder()
 .name("java.naming.factory.initial")
 .displayName("JNDI Initial Context Factory Class")
@@ -43,9 +53,9 @@ public class JndiJmsConnectionFactoryProperties {
 public static final PropertyDescriptor JNDI_PROVIDER_URL = new Builder()
 .name("java.naming.provider.url")
 .displayName("JNDI Provider URL")
-.description("The URL of the JNDI Provider to use 
(java.naming.provider.url).")
+.description("The URL of the JNDI Provider to use as the value for 
java.naming.provider.url. See additional details documentation for allowed URL 
schemes.")
 .required(true)
-.addValidator(NON_EMPTY_VALIDATOR)
+.addValidator(new JndiJmsProviderUrlValidator())
 
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
 .build();
 
@@ -114,4 +124,80 @@ public class JndiJmsConnectionFactoryProperties {
 .build();
 }
 
+static class JndiJmsProviderUrlValidator implements Validator {
+
+private static final Pattern URL_SCHEME_PATTERN = 
Pattern.compile("^([^:]+)://.+$");
+
+private static final int SCHEME_GROUP = 1;
+
+private static final String SPACE_SEPARATOR = " ";
+
+private static final Set DEFAULT_ALLOWED_SCHEMES = 
Collections.unmodifiableSet(new LinkedHashSet<>(Arrays.asList(
+"file",
+"jgroups",
+"t3",
+"t3s",
+"tcp",
+"ssl",
+"udp",
+"vm"
+)));
+
+private final Set allowedSchemes;
+
+JndiJmsProviderUrlValidator() {
+final String allowed = 
System.getProperty(URL_SCHEMES_ALLOWED_PROPERTY);
+if (allowed == null || allowed.isEmpty()) {
+allowedSchemes = DEFAULT_ALLOWED_SCHEMES;
+} else {
+allowedSchemes = 
Arrays.stream(allowed.split(SPACE_SEPARATOR)).collect(Collectors.toSet());
+}
+}
+
+@Override
+public ValidationResult validate(final String subject, final String 
input, 

[nifi] branch support/nifi-1.x updated: NIFI-11590: Missing Enum data type handling in FlowFileTable

2023-05-26 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 7e705b6372 NIFI-11590: Missing Enum data type handling in FlowFileTable
7e705b6372 is described below

commit 7e705b6372eb94e3a2d9366480008c0287475b03
Author: Mark Bathori 
AuthorDate: Thu May 25 14:41:42 2023 +0200

NIFI-11590: Missing Enum data type handling in FlowFileTable

This closes #7294.

Signed-off-by: Peter Turcsanyi 
---
 .../main/java/org/apache/nifi/queryrecord/FlowFileTable.java |  2 ++
 .../org/apache/nifi/processors/standard/TestQueryRecord.java | 12 +++-
 2 files changed, 13 insertions(+), 1 deletion(-)

diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/queryrecord/FlowFileTable.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/queryrecord/FlowFileTable.java
index a2debbdce5..e462972387 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/queryrecord/FlowFileTable.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/queryrecord/FlowFileTable.java
@@ -227,6 +227,8 @@ public class FlowFileTable extends AbstractTable implements 
QueryableTable, Tran
 return typeFactory.createJavaType(BigInteger.class);
 case DECIMAL:
 return typeFactory.createJavaType(BigDecimal.class);
+case ENUM:
+return typeFactory.createJavaType(Enum.class);
 case CHOICE:
 final ChoiceDataType choiceDataType = (ChoiceDataType) 
fieldType;
 DataType widestDataType = 
choiceDataType.getPossibleSubTypes().get(0);
diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestQueryRecord.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestQueryRecord.java
index 20f6dca199..d8a9a1f069 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestQueryRecord.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestQueryRecord.java
@@ -251,7 +251,7 @@ public class TestQueryRecord {
 runner.setProperty(QueryRecord.RECORD_READER_FACTORY, "reader");
 runner.setProperty(QueryRecord.RECORD_WRITER_FACTORY, "writer");
 runner.setProperty(REL_NAME,
-"SELECT title, name" +
+"SELECT title, name, jobLevel" +
 "FROM FLOWFILE" +
 "WHERE CARDINALITY(addresses) > 1");
 
@@ -270,6 +270,7 @@ public class TestQueryRecord {
 final Record output = written.get(0);
 assertEquals("John Doe", output.getValue("name"));
 assertEquals("Software Engineer", output.getValue("title"));
+assertEquals(JobLevel.IC2, output.getValue("jobLevel"));
 }
 
 @Test
@@ -777,6 +778,7 @@ public class TestQueryRecord {
  *  {
  *   "name": "John Doe",
  *   "title": "Software Engineer",
+ *   "jobLevel": "IC2",
  *   "age": 40,
  *   "addresses": [{
  *   "streetNumber": 4820,
@@ -815,6 +817,7 @@ public class TestQueryRecord {
 personFields.add(new RecordField("name", 
RecordFieldType.STRING.getDataType()));
 personFields.add(new RecordField("age", 
RecordFieldType.INT.getDataType()));
 personFields.add(new RecordField("title", 
RecordFieldType.STRING.getDataType()));
+personFields.add(new RecordField("jobLevel", 
RecordFieldType.ENUM.getDataType()));
 personFields.add(new RecordField("height", 
RecordFieldType.CHOICE.getChoiceDataType(RecordFieldType.DOUBLE.getDataType(), 
RecordFieldType.INT.getDataType(;
 personFields.add(new RecordField("addresses", 
RecordFieldType.ARRAY.getArrayDataType( 
RecordFieldType.RECORD.getRecordDataType(addressSchema)) ));
 final RecordSchema personSchema = new SimpleRecordSchema(personFields);
@@ -844,6 +847,7 @@ public class TestQueryRecord {
 map.put("age", 30);
 map.put("height", 60.5);
 map.put("title", "Software Engineer");
+map.put("jobLevel", JobLevel.IC2);
 map.put("addresses", new Record[] {homeAddress, workAddress});
 return new MapRecord(personSchema, map);
 }
@@ -1281,4 +1285,10 @@ public class TestQueryRecord {
 
 }
 
+public enum JobLevel {
+IC1,
+IC2,
+IC3
+}
+
 }



[nifi] 01/02: Revert "NIFI-11590: Missing Enum data type handling in FlowFileTable"

2023-05-26 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git

commit de62c6261f84e5cb802fbfe08e7427e61e3656da
Author: Peter Turcsanyi 
AuthorDate: Fri May 26 11:07:45 2023 +0200

Revert "NIFI-11590: Missing Enum data type handling in FlowFileTable"

This reverts commit 4e304ac58649ed5a713dc2ea50990e897eed981c.
---
 .../processors/helloworld/HelloStateProcessor.java | 167 -
 .../processors/helloworld/HelloWorldProcessor.java | 157 ---
 .../org/apache/nifi/queryrecord/FlowFileTable.java |   2 -
 .../nifi/processors/standard/TestQueryRecord.java  |  12 +-
 4 files changed, 1 insertion(+), 337 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/helloworld/HelloStateProcessor.java
 
b/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/helloworld/HelloStateProcessor.java
deleted file mode 100644
index 0e058f467e..00
--- 
a/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/helloworld/HelloStateProcessor.java
+++ /dev/null
@@ -1,167 +0,0 @@
-package org.apache.nifi.processors.helloworld;
-
-import org.apache.nifi.annotation.behavior.InputRequirement;
-import org.apache.nifi.annotation.behavior.Stateful;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.lifecycle.OnShutdown;
-import org.apache.nifi.annotation.lifecycle.OnStopped;
-import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
-import org.apache.nifi.components.ValidationContext;
-import org.apache.nifi.components.ValidationResult;
-import org.apache.nifi.components.state.Scope;
-import org.apache.nifi.components.state.StateManager;
-import org.apache.nifi.components.state.StateMap;
-import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.processor.AbstractProcessor;
-import org.apache.nifi.processor.ProcessContext;
-import org.apache.nifi.processor.ProcessSession;
-import org.apache.nifi.processor.ProcessorInitializationContext;
-import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.processor.exception.ProcessException;
-
-import java.io.IOException;
-import java.time.LocalDateTime;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-@Tags("state")
-@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
-@Stateful(scopes = Scope.CLUSTER, description = "")
-public class HelloStateProcessor extends AbstractProcessor {
-
-private static final String COUNTER_KEY = "counter";
-private static final String TIMESTAMP_KEY = "timestamp";
-
-public static final Relationship REL_SUCCESS = new Relationship.Builder()
-.name("success")
-.build();
-
-public static final Relationship REL_FAILURE = new Relationship.Builder()
-.name("failure")
-.build();
-
-public static final Set RELATIONSHIPS = 
Collections.unmodifiableSet(
-new HashSet<>(Arrays.asList(REL_SUCCESS, REL_FAILURE)));
-
-@Override
-public Set getRelationships() {
-return RELATIONSHIPS;
-}
-
-@Override
-protected void init(ProcessorInitializationContext context) {
-getLogger().info("init");
-}
-
-@Override
-protected Collection customValidate(ValidationContext 
validationContext) {
-getLogger().info("customValidate");
-return Collections.emptyList();
-}
-
-@OnScheduled
-public void onScheduled(ProcessContext context) throws IOException {
-getLogger().info("onScheduled");
-if (getNodeTypeProvider().isPrimary()) {
-final StateManager stateManager = context.getStateManager();
-final StateMap state = stateManager.getState(Scope.CLUSTER);
-
-if (!state.getStateVersion().isPresent()) {
-stateManager.setState(new HashMap<>(), Scope.CLUSTER);
-}
-}
-}
-
-@OnUnscheduled
-public void onUnscheduled() {
-getLogger().info("onUnscheduled");
-}
-
-@OnStopped
-public void onStopped() {
-getLogger().info("onStopped");
-}
-
-@OnShutdown
-public void onShutdown() {
-getLogger().info("onShutdown");
-}
-
-//public void onTrigger(ProcessContext context, ProcessSession session) 
throws ProcessException {
-//try {
-//getLogger().info("onTrigger");
-//
-//FlowFile flowFile = session.get();
-//if (flowFile == null) {
-//

[nifi] branch main updated (4e304ac586 -> cfd62c9511)

2023-05-26 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a change to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


from 4e304ac586 NIFI-11590: Missing Enum data type handling in FlowFileTable
 new de62c6261f Revert "NIFI-11590: Missing Enum data type handling in 
FlowFileTable"
 new cfd62c9511 NIFI-11590: Missing Enum data type handling in FlowFileTable

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../processors/helloworld/HelloStateProcessor.java | 167 -
 .../processors/helloworld/HelloWorldProcessor.java | 157 ---
 2 files changed, 324 deletions(-)
 delete mode 100644 
nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/helloworld/HelloStateProcessor.java
 delete mode 100644 
nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/helloworld/HelloWorldProcessor.java



[nifi] 02/02: NIFI-11590: Missing Enum data type handling in FlowFileTable

2023-05-26 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git

commit cfd62c9511e43d5010fbfbb12b98b40bdfdb3fc2
Author: Mark Bathori 
AuthorDate: Thu May 25 14:41:42 2023 +0200

NIFI-11590: Missing Enum data type handling in FlowFileTable

This closes #7294.

Signed-off-by: Peter Turcsanyi 
---
 .../main/java/org/apache/nifi/queryrecord/FlowFileTable.java |  2 ++
 .../org/apache/nifi/processors/standard/TestQueryRecord.java | 12 +++-
 2 files changed, 13 insertions(+), 1 deletion(-)

diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/queryrecord/FlowFileTable.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/queryrecord/FlowFileTable.java
index a2debbdce5..e462972387 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/queryrecord/FlowFileTable.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/queryrecord/FlowFileTable.java
@@ -227,6 +227,8 @@ public class FlowFileTable extends AbstractTable implements 
QueryableTable, Tran
 return typeFactory.createJavaType(BigInteger.class);
 case DECIMAL:
 return typeFactory.createJavaType(BigDecimal.class);
+case ENUM:
+return typeFactory.createJavaType(Enum.class);
 case CHOICE:
 final ChoiceDataType choiceDataType = (ChoiceDataType) 
fieldType;
 DataType widestDataType = 
choiceDataType.getPossibleSubTypes().get(0);
diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestQueryRecord.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestQueryRecord.java
index 20f6dca199..d8a9a1f069 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestQueryRecord.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestQueryRecord.java
@@ -251,7 +251,7 @@ public class TestQueryRecord {
 runner.setProperty(QueryRecord.RECORD_READER_FACTORY, "reader");
 runner.setProperty(QueryRecord.RECORD_WRITER_FACTORY, "writer");
 runner.setProperty(REL_NAME,
-"SELECT title, name" +
+"SELECT title, name, jobLevel" +
 "FROM FLOWFILE" +
 "WHERE CARDINALITY(addresses) > 1");
 
@@ -270,6 +270,7 @@ public class TestQueryRecord {
 final Record output = written.get(0);
 assertEquals("John Doe", output.getValue("name"));
 assertEquals("Software Engineer", output.getValue("title"));
+assertEquals(JobLevel.IC2, output.getValue("jobLevel"));
 }
 
 @Test
@@ -777,6 +778,7 @@ public class TestQueryRecord {
  *  {
  *   "name": "John Doe",
  *   "title": "Software Engineer",
+ *   "jobLevel": "IC2",
  *   "age": 40,
  *   "addresses": [{
  *   "streetNumber": 4820,
@@ -815,6 +817,7 @@ public class TestQueryRecord {
 personFields.add(new RecordField("name", 
RecordFieldType.STRING.getDataType()));
 personFields.add(new RecordField("age", 
RecordFieldType.INT.getDataType()));
 personFields.add(new RecordField("title", 
RecordFieldType.STRING.getDataType()));
+personFields.add(new RecordField("jobLevel", 
RecordFieldType.ENUM.getDataType()));
 personFields.add(new RecordField("height", 
RecordFieldType.CHOICE.getChoiceDataType(RecordFieldType.DOUBLE.getDataType(), 
RecordFieldType.INT.getDataType(;
 personFields.add(new RecordField("addresses", 
RecordFieldType.ARRAY.getArrayDataType( 
RecordFieldType.RECORD.getRecordDataType(addressSchema)) ));
 final RecordSchema personSchema = new SimpleRecordSchema(personFields);
@@ -844,6 +847,7 @@ public class TestQueryRecord {
 map.put("age", 30);
 map.put("height", 60.5);
 map.put("title", "Software Engineer");
+map.put("jobLevel", JobLevel.IC2);
 map.put("addresses", new Record[] {homeAddress, workAddress});
 return new MapRecord(personSchema, map);
 }
@@ -1281,4 +1285,10 @@ public class TestQueryRecord {
 
 }
 
+public enum JobLevel {
+IC1,
+IC2,
+IC3
+}
+
 }



[nifi] branch main updated: NIFI-11590: Missing Enum data type handling in FlowFileTable

2023-05-26 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 4e304ac586 NIFI-11590: Missing Enum data type handling in FlowFileTable
4e304ac586 is described below

commit 4e304ac58649ed5a713dc2ea50990e897eed981c
Author: Mark Bathori 
AuthorDate: Thu May 25 14:41:42 2023 +0200

NIFI-11590: Missing Enum data type handling in FlowFileTable

This closes #7294.

Signed-off-by: Peter Turcsanyi 
---
 .../processors/helloworld/HelloStateProcessor.java | 167 +
 .../processors/helloworld/HelloWorldProcessor.java | 157 +++
 .../org/apache/nifi/queryrecord/FlowFileTable.java |   2 +
 .../nifi/processors/standard/TestQueryRecord.java  |  12 +-
 4 files changed, 337 insertions(+), 1 deletion(-)

diff --git 
a/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/helloworld/HelloStateProcessor.java
 
b/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/helloworld/HelloStateProcessor.java
new file mode 100644
index 00..0e058f467e
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/helloworld/HelloStateProcessor.java
@@ -0,0 +1,167 @@
+package org.apache.nifi.processors.helloworld;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.Stateful;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.annotation.lifecycle.OnShutdown;
+import org.apache.nifi.annotation.lifecycle.OnStopped;
+import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.components.state.Scope;
+import org.apache.nifi.components.state.StateManager;
+import org.apache.nifi.components.state.StateMap;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+
+import java.io.IOException;
+import java.time.LocalDateTime;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+@Tags("state")
+@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
+@Stateful(scopes = Scope.CLUSTER, description = "")
+public class HelloStateProcessor extends AbstractProcessor {
+
+private static final String COUNTER_KEY = "counter";
+private static final String TIMESTAMP_KEY = "timestamp";
+
+public static final Relationship REL_SUCCESS = new Relationship.Builder()
+.name("success")
+.build();
+
+public static final Relationship REL_FAILURE = new Relationship.Builder()
+.name("failure")
+.build();
+
+public static final Set RELATIONSHIPS = 
Collections.unmodifiableSet(
+new HashSet<>(Arrays.asList(REL_SUCCESS, REL_FAILURE)));
+
+@Override
+public Set getRelationships() {
+return RELATIONSHIPS;
+}
+
+@Override
+protected void init(ProcessorInitializationContext context) {
+getLogger().info("init");
+}
+
+@Override
+protected Collection customValidate(ValidationContext 
validationContext) {
+getLogger().info("customValidate");
+return Collections.emptyList();
+}
+
+@OnScheduled
+public void onScheduled(ProcessContext context) throws IOException {
+getLogger().info("onScheduled");
+if (getNodeTypeProvider().isPrimary()) {
+final StateManager stateManager = context.getStateManager();
+final StateMap state = stateManager.getState(Scope.CLUSTER);
+
+if (!state.getStateVersion().isPresent()) {
+stateManager.setState(new HashMap<>(), Scope.CLUSTER);
+}
+}
+}
+
+@OnUnscheduled
+public void onUnscheduled() {
+getLogger().info("onUnscheduled");
+}
+
+@OnStopped
+public void onStopped() {
+getLogger().info("onStopped");
+}
+
+@OnShutdown
+public void onShutdown() {
+getLogger().info("onShutdown");
+}
+
+//public void onTrigger(ProcessContext context, ProcessSession session) 
throws ProcessException {
+//try {
+//

[nifi] branch support/nifi-1.x updated: NIFI-11535: Transfer ConnectWebsocket connection configuration FlowFile to relationships

2023-05-16 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new f87b6896ac NIFI-11535: Transfer ConnectWebsocket connection 
configuration FlowFile to relationships
f87b6896ac is described below

commit f87b6896ac55cb1545e16d84cb2d56c83988ed6b
Author: Lehel 
AuthorDate: Mon May 15 15:52:12 2023 +0200

NIFI-11535: Transfer ConnectWebsocket connection configuration FlowFile to 
relationships

Also moved dto and util packages under org.apache.nifi.websocket.jetty

This closes #7246.

Signed-off-by: Peter Turcsanyi 
---
 .../AbstractWebSocketGatewayProcessor.java | 19 +++-
 .../processors/websocket/ConnectWebSocket.java |  2 +
 .../processors/websocket/TestConnectWebSocket.java | 52 ++
 .../nifi/websocket/jetty/JettyWebSocketClient.java |  4 +-
 .../nifi/websocket/jetty}/dto/SessionInfo.java |  2 +-
 .../websocket/jetty}/util/HeaderMapExtractor.java  |  2 +-
 .../websocket/util/HeaderMapExtractorTest.java |  2 +-
 7 files changed, 67 insertions(+), 16 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java
 
b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java
index 9824c4a6d2..25ea1da832 100644
--- 
a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java
+++ 
b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java
@@ -77,6 +77,18 @@ public abstract class AbstractWebSocketGatewayProcessor 
extends AbstractSessionF
 .description("The WebSocket binary message output")
 .build();
 
+public static final Relationship REL_SUCCESS = new Relationship.Builder()
+.name("success")
+.description("FlowFile holding connection configuration attributes 
(like URL or HTTP headers) in case of successful connection")
+.autoTerminateDefault(true)
+.build();
+
+public static final Relationship REL_FAILURE = new Relationship.Builder()
+.name("failure")
+.description("FlowFile holding connection configuration attributes 
(like URL or HTTP headers) in case of connection failure")
+.autoTerminateDefault(true)
+.build();
+
 static Set getAbstractRelationships() {
 final Set relationships = new HashSet<>();
 relationships.add(REL_CONNECTED);
@@ -130,8 +142,11 @@ public abstract class AbstractWebSocketGatewayProcessor 
extends AbstractSessionF
 final FlowFile flowFile = session.get();
 try {
 webSocketClientService.connect(endpointId, 
flowFile.getAttributes());
-} finally {
-session.remove(flowFile);
+session.transfer(flowFile, REL_SUCCESS);
+session.commitAsync();
+} catch (Exception e) {
+getLogger().error("Websocket connection failure", e);
+session.transfer(flowFile, REL_FAILURE);
 session.commitAsync();
 }
 } else {
diff --git 
a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/ConnectWebSocket.java
 
b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/ConnectWebSocket.java
index be53854d7b..7e2e142014 100644
--- 
a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/ConnectWebSocket.java
+++ 
b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/ConnectWebSocket.java
@@ -89,6 +89,8 @@ public class ConnectWebSocket extends 
AbstractWebSocketGatewayProcessor {
 descriptors = Collections.unmodifiableList(innerDescriptorsList);
 
 final Set innerRelationshipsSet = 
getAbstractRelationships();
+innerRelationshipsSet.add(REL_SUCCESS);
+innerRelationshipsSet.add(REL_FAILURE);
 relationships = Collections.unmodifiableSet(innerRelationshipsSet);
 }
 
diff --git 
a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/test/java/org/apache/nifi/processors/websocket/TestConnectWebSocket.java
 
b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/test/java/org/apache/ni

[nifi] branch main updated: NIFI-11535: Transfer ConnectWebsocket connection configuration FlowFile to relationships

2023-05-16 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 335365874a NIFI-11535: Transfer ConnectWebsocket connection 
configuration FlowFile to relationships
335365874a is described below

commit 335365874a249dfb3c1044cf39bfbc584085d83a
Author: Lehel 
AuthorDate: Mon May 15 15:52:12 2023 +0200

NIFI-11535: Transfer ConnectWebsocket connection configuration FlowFile to 
relationships

Also moved dto and util packages under org.apache.nifi.websocket.jetty

This closes #7246.

Signed-off-by: Peter Turcsanyi 
---
 .../AbstractWebSocketGatewayProcessor.java | 19 +++-
 .../processors/websocket/ConnectWebSocket.java |  2 +
 .../processors/websocket/TestConnectWebSocket.java | 52 ++
 .../nifi/websocket/jetty/JettyWebSocketClient.java |  4 +-
 .../nifi/websocket/jetty}/dto/SessionInfo.java |  2 +-
 .../websocket/jetty}/util/HeaderMapExtractor.java  |  2 +-
 .../websocket/util/HeaderMapExtractorTest.java |  2 +-
 7 files changed, 67 insertions(+), 16 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java
 
b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java
index 9824c4a6d2..25ea1da832 100644
--- 
a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java
+++ 
b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java
@@ -77,6 +77,18 @@ public abstract class AbstractWebSocketGatewayProcessor 
extends AbstractSessionF
 .description("The WebSocket binary message output")
 .build();
 
+public static final Relationship REL_SUCCESS = new Relationship.Builder()
+.name("success")
+.description("FlowFile holding connection configuration attributes 
(like URL or HTTP headers) in case of successful connection")
+.autoTerminateDefault(true)
+.build();
+
+public static final Relationship REL_FAILURE = new Relationship.Builder()
+.name("failure")
+.description("FlowFile holding connection configuration attributes 
(like URL or HTTP headers) in case of connection failure")
+.autoTerminateDefault(true)
+.build();
+
 static Set getAbstractRelationships() {
 final Set relationships = new HashSet<>();
 relationships.add(REL_CONNECTED);
@@ -130,8 +142,11 @@ public abstract class AbstractWebSocketGatewayProcessor 
extends AbstractSessionF
 final FlowFile flowFile = session.get();
 try {
 webSocketClientService.connect(endpointId, 
flowFile.getAttributes());
-} finally {
-session.remove(flowFile);
+session.transfer(flowFile, REL_SUCCESS);
+session.commitAsync();
+} catch (Exception e) {
+getLogger().error("Websocket connection failure", e);
+session.transfer(flowFile, REL_FAILURE);
 session.commitAsync();
 }
 } else {
diff --git 
a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/ConnectWebSocket.java
 
b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/ConnectWebSocket.java
index be53854d7b..7e2e142014 100644
--- 
a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/ConnectWebSocket.java
+++ 
b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/ConnectWebSocket.java
@@ -89,6 +89,8 @@ public class ConnectWebSocket extends 
AbstractWebSocketGatewayProcessor {
 descriptors = Collections.unmodifiableList(innerDescriptorsList);
 
 final Set innerRelationshipsSet = 
getAbstractRelationships();
+innerRelationshipsSet.add(REL_SUCCESS);
+innerRelationshipsSet.add(REL_FAILURE);
 relationships = Collections.unmodifiableSet(innerRelationshipsSet);
 }
 
diff --git 
a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/test/java/org/apache/nifi/processors/websocket/TestConnectWebSocket.java
 
b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/test/java/org/apache/nifi/processors/webso

[nifi] branch support/nifi-1.x updated: NIFI-11544 Fixed REL_FAILURE usage in AbstractIcebergProcessor

2023-05-16 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 8aa0d5549e NIFI-11544 Fixed REL_FAILURE usage in 
AbstractIcebergProcessor
8aa0d5549e is described below

commit 8aa0d5549ed034bd931902136d4390ad81167e6c
Author: krisztina-zsihovszki 
AuthorDate: Fri May 12 17:09:59 2023 +0200

NIFI-11544 Fixed REL_FAILURE usage in AbstractIcebergProcessor

This closes #7241.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi/processors/iceberg/AbstractIcebergProcessor.java | 11 ---
 .../java/org/apache/nifi/processors/iceberg/PutIceberg.java   |  5 -
 .../processors/iceberg/writer/IcebergPartitionedWriter.java   |  2 +-
 3 files changed, 9 insertions(+), 9 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
index a4ec2ccf07..9f527344ec 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
@@ -26,6 +26,7 @@ import org.apache.nifi.kerberos.KerberosUserService;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.security.krb.KerberosLoginException;
 import org.apache.nifi.security.krb.KerberosUser;
@@ -35,14 +36,13 @@ import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
 
 import static org.apache.nifi.hadoop.SecurityUtil.getUgiForKerberosUser;
-import static org.apache.nifi.processors.iceberg.PutIceberg.REL_FAILURE;
 
 /**
  * Base Iceberg processor class.
  */
 public abstract class AbstractIcebergProcessor extends AbstractProcessor {
 
-static final PropertyDescriptor CATALOG = new PropertyDescriptor.Builder()
+public static final PropertyDescriptor CATALOG = new 
PropertyDescriptor.Builder()
 .name("catalog-service")
 .displayName("Catalog Service")
 .description("Specifies the Controller Service to use for handling 
references to table’s metadata files.")
@@ -50,13 +50,18 @@ public abstract class AbstractIcebergProcessor extends 
AbstractProcessor {
 .required(true)
 .build();
 
-static final PropertyDescriptor KERBEROS_USER_SERVICE = new 
PropertyDescriptor.Builder()
+public static final PropertyDescriptor KERBEROS_USER_SERVICE = new 
PropertyDescriptor.Builder()
 .name("kerberos-user-service")
 .displayName("Kerberos User Service")
 .description("Specifies the Kerberos User Controller Service that 
should be used for authenticating with Kerberos.")
 .identifiesControllerService(KerberosUserService.class)
 .build();
 
+public static final Relationship REL_FAILURE = new Relationship.Builder()
+.name("failure")
+.description("A FlowFile is routed to this relationship if the 
operation failed and retrying the operation will also fail, such as an invalid 
data or schema.")
+.build();
+
 private volatile KerberosUser kerberosUser;
 private volatile UserGroupInformation ugi;
 
diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
index d85a590a08..e5142c3345 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
@@ -163,11 +163,6 @@ public class PutIceberg extends AbstractIcebergProcessor {
 .description("A FlowFile is routed to this relationship after the 
data ingestion was successful.")
 .build();
 
-static final Relationship REL_FAILURE = new Relationship.Builder()
-.name("failure")
-.description("A FlowFile is routed to this relationship if the 
data ingestion failed and retrying the operation will also fail, such as an 
invalid data or schema.")
- 

[nifi] branch main updated: NIFI-11544 Fixed REL_FAILURE usage in AbstractIcebergProcessor

2023-05-16 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 3051b69a6c NIFI-11544 Fixed REL_FAILURE usage in 
AbstractIcebergProcessor
3051b69a6c is described below

commit 3051b69a6c3bc131c6ac78668506aa5915c64c19
Author: krisztina-zsihovszki 
AuthorDate: Fri May 12 17:09:59 2023 +0200

NIFI-11544 Fixed REL_FAILURE usage in AbstractIcebergProcessor

This closes #7241.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi/processors/iceberg/AbstractIcebergProcessor.java | 11 ---
 .../java/org/apache/nifi/processors/iceberg/PutIceberg.java   |  5 -
 .../processors/iceberg/writer/IcebergPartitionedWriter.java   |  2 +-
 3 files changed, 9 insertions(+), 9 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
index a4ec2ccf07..9f527344ec 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
@@ -26,6 +26,7 @@ import org.apache.nifi.kerberos.KerberosUserService;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.security.krb.KerberosLoginException;
 import org.apache.nifi.security.krb.KerberosUser;
@@ -35,14 +36,13 @@ import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
 
 import static org.apache.nifi.hadoop.SecurityUtil.getUgiForKerberosUser;
-import static org.apache.nifi.processors.iceberg.PutIceberg.REL_FAILURE;
 
 /**
  * Base Iceberg processor class.
  */
 public abstract class AbstractIcebergProcessor extends AbstractProcessor {
 
-static final PropertyDescriptor CATALOG = new PropertyDescriptor.Builder()
+public static final PropertyDescriptor CATALOG = new 
PropertyDescriptor.Builder()
 .name("catalog-service")
 .displayName("Catalog Service")
 .description("Specifies the Controller Service to use for handling 
references to table’s metadata files.")
@@ -50,13 +50,18 @@ public abstract class AbstractIcebergProcessor extends 
AbstractProcessor {
 .required(true)
 .build();
 
-static final PropertyDescriptor KERBEROS_USER_SERVICE = new 
PropertyDescriptor.Builder()
+public static final PropertyDescriptor KERBEROS_USER_SERVICE = new 
PropertyDescriptor.Builder()
 .name("kerberos-user-service")
 .displayName("Kerberos User Service")
 .description("Specifies the Kerberos User Controller Service that 
should be used for authenticating with Kerberos.")
 .identifiesControllerService(KerberosUserService.class)
 .build();
 
+public static final Relationship REL_FAILURE = new Relationship.Builder()
+.name("failure")
+.description("A FlowFile is routed to this relationship if the 
operation failed and retrying the operation will also fail, such as an invalid 
data or schema.")
+.build();
+
 private volatile KerberosUser kerberosUser;
 private volatile UserGroupInformation ugi;
 
diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
index d85a590a08..e5142c3345 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
@@ -163,11 +163,6 @@ public class PutIceberg extends AbstractIcebergProcessor {
 .description("A FlowFile is routed to this relationship after the 
data ingestion was successful.")
 .build();
 
-static final Relationship REL_FAILURE = new Relationship.Builder()
-.name("failure")
-.description("A FlowFile is routed to this relationship if the 
data ingestion failed and retrying the operation will also fail, such as an 
invalid data or schema.")
-.build();
-
 

[nifi] branch support/nifi-1.x updated: NIFI-11493: Defaulted dynamically modified classpath fix

2023-05-04 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new ab20a93a90 NIFI-11493: Defaulted dynamically modified classpath fix
ab20a93a90 is described below

commit ab20a93a90ab571df1d033db0d77e166e6ea10af
Author: Lehel Boér 
AuthorDate: Thu Apr 27 21:43:12 2023 +0200

NIFI-11493: Defaulted dynamically modified classpath fix

This closes #7201.

Co-authored-by: Peter Turcsanyi 
Signed-off-by: Peter Turcsanyi 
---
 .../nifi/controller/AbstractComponentNode.java |  38 +---
 .../DefaultedDynamicallyModifyClasspath.java   | 104 +
 .../services/org.apache.nifi.processor.Processor   |   1 +
 .../DefaultedDynamicClassPathModificationIT.java   |  94 +++
 4 files changed, 222 insertions(+), 15 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/AbstractComponentNode.java
 
b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/AbstractComponentNode.java
index 5935bf7cc9..59dfa865d5 100644
--- 
a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/AbstractComponentNode.java
+++ 
b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/AbstractComponentNode.java
@@ -604,6 +604,14 @@ public abstract class AbstractComponentNode implements 
ComponentNode {
 return getProperty(property).getEffectiveValue(getParameterContext());
 }
 
+private String getEffectivePropertyValueWithDefault(final 
PropertyDescriptor property) {
+String value = 
getProperty(property).getEffectiveValue(getParameterContext());
+if (value == null) {
+value = property.getDefaultValue();
+}
+return value;
+}
+
 @Override
 public String getRawPropertyValue(final PropertyDescriptor property) {
 return getProperty(property).getRawValue();
@@ -662,23 +670,23 @@ public abstract class AbstractComponentNode implements 
ComponentNode {
  */
 @Override
 public synchronized void reloadAdditionalResourcesIfNecessary() {
-// Components that don't have any PropertyDescriptors marked 
`dynamicallyModifiesClasspath`
-// won't have the fingerprint i.e. will be null, in such cases do 
nothing
-if (additionalResourcesFingerprint == null) {
-return;
-}
-
 final Set descriptors = 
this.getProperties().keySet();
-final Set additionalUrls = 
this.getAdditionalClasspathResources(descriptors);
 
-final String newFingerprint = 
ClassLoaderUtils.generateAdditionalUrlsFingerprint(additionalUrls, 
determineClasloaderIsolationKey());
-if(!StringUtils.equals(additionalResourcesFingerprint, 
newFingerprint)) {
-setAdditionalResourcesFingerprint(newFingerprint);
-try {
-logger.info("Updating classpath for " + this.componentType + " 
with the ID " + this.getIdentifier());
-reload(additionalUrls);
-} catch (Exception e) {
-logger.error("Error reloading component with id " + id + ": " 
+ e.getMessage(), e);
+final boolean dynamicallyModifiesClasspath = descriptors.stream()
+.anyMatch(PropertyDescriptor::isDynamicClasspathModifier);
+
+if (dynamicallyModifiesClasspath) {
+final Set additionalUrls = 
this.getAdditionalClasspathResources(descriptors, 
this::getEffectivePropertyValueWithDefault);
+
+final String newFingerprint = 
ClassLoaderUtils.generateAdditionalUrlsFingerprint(additionalUrls, 
determineClasloaderIsolationKey());
+if (!StringUtils.equals(additionalResourcesFingerprint, 
newFingerprint)) {
+setAdditionalResourcesFingerprint(newFingerprint);
+try {
+logger.info("Updating classpath for [{}] with the ID 
[{}]", this.componentType, this.getIdentifier());
+reload(additionalUrls);
+} catch (Exception e) {
+logger.error("Error reloading component with id [{}]: {}", 
id, e.getMessage(), e);
+}
 }
 }
 }
diff --git 
a/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/DefaultedDynamicallyModifyClasspath.java
 
b/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/DefaultedDynamicallyModifyClasspath.java
new file mode 100644
index 0

[nifi] branch main updated: NIFI-11493: Defaulted dynamically modified classpath fix

2023-05-04 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 265b132e73 NIFI-11493: Defaulted dynamically modified classpath fix
265b132e73 is described below

commit 265b132e73c01c956ec679bbbd339ce4649cdb54
Author: Lehel Boér 
AuthorDate: Thu Apr 27 21:43:12 2023 +0200

NIFI-11493: Defaulted dynamically modified classpath fix

This closes #7201.

Co-authored-by: Peter Turcsanyi 
Signed-off-by: Peter Turcsanyi 
---
 .../nifi/controller/AbstractComponentNode.java |  38 +---
 .../DefaultedDynamicallyModifyClasspath.java   | 104 +
 .../services/org.apache.nifi.processor.Processor   |   1 +
 .../DefaultedDynamicClassPathModificationIT.java   |  94 +++
 4 files changed, 222 insertions(+), 15 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/AbstractComponentNode.java
 
b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/AbstractComponentNode.java
index 07200140a8..94d87ad6f9 100644
--- 
a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/AbstractComponentNode.java
+++ 
b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/AbstractComponentNode.java
@@ -604,6 +604,14 @@ public abstract class AbstractComponentNode implements 
ComponentNode {
 return getProperty(property).getEffectiveValue(getParameterContext());
 }
 
+private String getEffectivePropertyValueWithDefault(final 
PropertyDescriptor property) {
+String value = 
getProperty(property).getEffectiveValue(getParameterContext());
+if (value == null) {
+value = property.getDefaultValue();
+}
+return value;
+}
+
 @Override
 public String getRawPropertyValue(final PropertyDescriptor property) {
 return getProperty(property).getRawValue();
@@ -662,23 +670,23 @@ public abstract class AbstractComponentNode implements 
ComponentNode {
  */
 @Override
 public synchronized void reloadAdditionalResourcesIfNecessary() {
-// Components that don't have any PropertyDescriptors marked 
`dynamicallyModifiesClasspath`
-// won't have the fingerprint i.e. will be null, in such cases do 
nothing
-if (additionalResourcesFingerprint == null) {
-return;
-}
-
 final Set descriptors = 
this.getProperties().keySet();
-final Set additionalUrls = 
this.getAdditionalClasspathResources(descriptors);
 
-final String newFingerprint = 
ClassLoaderUtils.generateAdditionalUrlsFingerprint(additionalUrls, 
determineClasloaderIsolationKey());
-if(!StringUtils.equals(additionalResourcesFingerprint, 
newFingerprint)) {
-setAdditionalResourcesFingerprint(newFingerprint);
-try {
-logger.info("Updating classpath for " + this.componentType + " 
with the ID " + this.getIdentifier());
-reload(additionalUrls);
-} catch (Exception e) {
-logger.error("Error reloading component with id " + id + ": " 
+ e.getMessage(), e);
+final boolean dynamicallyModifiesClasspath = descriptors.stream()
+.anyMatch(PropertyDescriptor::isDynamicClasspathModifier);
+
+if (dynamicallyModifiesClasspath) {
+final Set additionalUrls = 
this.getAdditionalClasspathResources(descriptors, 
this::getEffectivePropertyValueWithDefault);
+
+final String newFingerprint = 
ClassLoaderUtils.generateAdditionalUrlsFingerprint(additionalUrls, 
determineClasloaderIsolationKey());
+if (!StringUtils.equals(additionalResourcesFingerprint, 
newFingerprint)) {
+setAdditionalResourcesFingerprint(newFingerprint);
+try {
+logger.info("Updating classpath for [{}] with the ID 
[{}]", this.componentType, this.getIdentifier());
+reload(additionalUrls);
+} catch (Exception e) {
+logger.error("Error reloading component with id [{}]: {}", 
id, e.getMessage(), e);
+}
 }
 }
 }
diff --git 
a/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/DefaultedDynamicallyModifyClasspath.java
 
b/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/DefaultedDynamicallyModifyClasspath.java
new file mode 100644
index 00..673c3c

[nifi] branch support/nifi-1.x updated: NIFI-11342: HDFS processors fail to get ClassloaderIsolationKey at startup

2023-04-05 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 79097835a6 NIFI-11342: HDFS processors fail to get 
ClassloaderIsolationKey at startup
79097835a6 is described below

commit 79097835a6df718bf4b6756963e4eececd7b5694
Author: Mark Bathori 
AuthorDate: Tue Apr 4 16:42:35 2023 +0200

NIFI-11342: HDFS processors fail to get ClassloaderIsolationKey at startup

This closes #7121.

Signed-off-by: Peter Turcsanyi 
---
 .../processors/hadoop/AbstractHadoopProcessor.java | 24 +-
 1 file changed, 14 insertions(+), 10 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
index a967b9037a..f594ed2549 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
@@ -211,18 +211,22 @@ public abstract class AbstractHadoopProcessor extends 
AbstractProcessor implemen
 return explicitKerberosPrincipal;
 }
 
-final KerberosCredentialsService credentialsService = 
context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
-if (credentialsService != null) {
-final String credentialsServicePrincipal = 
credentialsService.getPrincipal();
-if (credentialsServicePrincipal != null) {
-return credentialsServicePrincipal;
+try {
+final KerberosCredentialsService credentialsService = 
context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
+if (credentialsService != null) {
+final String credentialsServicePrincipal = 
credentialsService.getPrincipal();
+if (credentialsServicePrincipal != null) {
+return credentialsServicePrincipal;
+}
 }
-}
 
-final KerberosUserService kerberosUserService = 
context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
-if (kerberosUserService != null) {
-final KerberosUser kerberosUser = 
kerberosUserService.createKerberosUser();
-return kerberosUser.getPrincipal();
+final KerberosUserService kerberosUserService = 
context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
+if (kerberosUserService != null) {
+final KerberosUser kerberosUser = 
kerberosUserService.createKerberosUser();
+return kerberosUser.getPrincipal();
+}
+} catch (IllegalStateException e) {
+return null;
 }
 
 return null;



[nifi] branch main updated: NIFI-11342: HDFS processors fail to get ClassloaderIsolationKey at startup

2023-04-05 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new d15eeb445b NIFI-11342: HDFS processors fail to get 
ClassloaderIsolationKey at startup
d15eeb445b is described below

commit d15eeb445bf38905166dc50a012dcd2787f62646
Author: Mark Bathori 
AuthorDate: Tue Apr 4 16:42:35 2023 +0200

NIFI-11342: HDFS processors fail to get ClassloaderIsolationKey at startup

This closes #7121.

Signed-off-by: Peter Turcsanyi 
---
 .../processors/hadoop/AbstractHadoopProcessor.java | 24 +-
 1 file changed, 14 insertions(+), 10 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
index a967b9037a..f594ed2549 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
@@ -211,18 +211,22 @@ public abstract class AbstractHadoopProcessor extends 
AbstractProcessor implemen
 return explicitKerberosPrincipal;
 }
 
-final KerberosCredentialsService credentialsService = 
context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
-if (credentialsService != null) {
-final String credentialsServicePrincipal = 
credentialsService.getPrincipal();
-if (credentialsServicePrincipal != null) {
-return credentialsServicePrincipal;
+try {
+final KerberosCredentialsService credentialsService = 
context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
+if (credentialsService != null) {
+final String credentialsServicePrincipal = 
credentialsService.getPrincipal();
+if (credentialsServicePrincipal != null) {
+return credentialsServicePrincipal;
+}
 }
-}
 
-final KerberosUserService kerberosUserService = 
context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
-if (kerberosUserService != null) {
-final KerberosUser kerberosUser = 
kerberosUserService.createKerberosUser();
-return kerberosUser.getPrincipal();
+final KerberosUserService kerberosUserService = 
context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
+if (kerberosUserService != null) {
+final KerberosUser kerberosUser = 
kerberosUserService.createKerberosUser();
+return kerberosUser.getPrincipal();
+}
+} catch (IllegalStateException e) {
+return null;
 }
 
 return null;



[nifi] branch support/nifi-1.x updated: NIFI-11137 Add record support to Consume/PublishJMS (#6987)

2023-04-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 0510f8cca2 NIFI-11137 Add record support to Consume/PublishJMS (#6987)
0510f8cca2 is described below

commit 0510f8cca29a2f92c6008b345ba40c2bc33f2e75
Author: Nandor Soma Abonyi 
AuthorDate: Sun Mar 12 22:58:55 2023 +0100

NIFI-11137 Add record support to Consume/PublishJMS (#6987)
---
 .../nifi-jms-bundle/nifi-jms-processors/pom.xml|  22 ++
 .../nifi/jms/processors/AbstractJMSProcessor.java  |  16 ++
 .../org/apache/nifi/jms/processors/ConsumeJMS.java | 204 ++---
 .../apache/nifi/jms/processors/JMSConsumer.java| 194 ++--
 .../apache/nifi/jms/processors/JMSPublisher.java   |   2 +-
 .../org/apache/nifi/jms/processors/PublishJMS.java | 140 +---
 .../ioconcept/reader/FlowFileReader.java   |  24 ++
 .../ioconcept/reader/FlowFileReaderCallback.java   |  24 ++
 .../ioconcept/reader/MessageHandler.java   |  21 ++
 .../reader/StateTrackingFlowFileReader.java|  73 ++
 .../reader/record/ProvenanceEventTemplates.java|  25 +++
 .../ioconcept/reader/record/RecordSupplier.java|  82 +++
 .../ioconcept/writer/AttributeSource.java  |  23 ++
 .../ioconcept/writer/FlowFileWriter.java   |  25 +++
 .../ioconcept/writer/FlowFileWriterCallback.java   |  27 +++
 .../processors/ioconcept/writer/Marshaller.java|  21 ++
 .../ioconcept/writer/record/OutputStrategy.java|  57 +
 .../ioconcept/writer/record/RecordUtils.java   |  78 +++
 .../ioconcept/writer/record/RecordWriter.java  | 205 +
 .../apache/nifi/jms/processors/ConsumeJMSIT.java   | 199 +
 .../jms/processors/JMSPublisherConsumerIT.java | 164 ++
 .../apache/nifi/jms/processors/PublishJMSIT.java   | 248 -
 .../nifi/jms/processors/helpers/JMSTestUtil.java   |  44 
 23 files changed, 1679 insertions(+), 239 deletions(-)

diff --git a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
index d171a03946..e57e1f991a 100644
--- a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
@@ -58,6 +58,14 @@
 commons-io
 commons-io
 
+
+org.apache.nifi
+nifi-record-serialization-service-api
+
+
+org.apache.nifi
+nifi-record
+
 
 org.apache.activemq
 activemq-client
@@ -83,6 +91,20 @@
 1.21.0-SNAPSHOT
 test
 
+
+
+
+org.apache.nifi
+nifi-record-serialization-services
+1.21.0-SNAPSHOT
+test
+
+
+org.apache.nifi
+nifi-schema-registry-service-api
+1.21.0-SNAPSHOT
+test
+
 
 
 
diff --git 
a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/AbstractJMSProcessor.java
 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/AbstractJMSProcessor.java
index aa8fa4dd64..470260cfc9 100644
--- 
a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/AbstractJMSProcessor.java
+++ 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/AbstractJMSProcessor.java
@@ -37,6 +37,8 @@ import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.Processor;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.serialization.RecordReaderFactory;
+import org.apache.nifi.serialization.RecordSetWriterFactory;
 import org.springframework.jms.connection.CachingConnectionFactory;
 import org.springframework.jms.connection.SingleConnectionFactory;
 import 
org.springframework.jms.connection.UserCredentialsConnectionFactoryAdapter;
@@ -151,6 +153,20 @@ public abstract class AbstractJMSProcessor extends Abstract
 .collect(Collectors.toList())
 );
 
+static final PropertyDescriptor BASE_RECORD_READER = new 
PropertyDescriptor.Builder()
+.name("record-reader")
+.displayName("Record Reader")
+.identifiesControllerService(RecordReaderFactory.class)
+.required(false)
+.build();
+
+static final PropertyDescriptor BASE_RECORD_WRITER = new 
PropertyDescriptor.Builder()
+.name("record-writer")
+.displayName("Record Writer")
+.identifiesContro

[nifi] branch main updated: NIFI-11137 Add record support to Consume/PublishJMS (#6987)

2023-04-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 32df0fa484 NIFI-11137 Add record support to Consume/PublishJMS (#6987)
32df0fa484 is described below

commit 32df0fa4849325d742d36e57b686cb88bd65d2b6
Author: Nandor Soma Abonyi 
AuthorDate: Mon Apr 3 21:42:27 2023 +0200

NIFI-11137 Add record support to Consume/PublishJMS (#6987)
---
 .../nifi-jms-bundle/nifi-jms-processors/pom.xml|  22 ++
 .../nifi/jms/processors/AbstractJMSProcessor.java  |  16 ++
 .../org/apache/nifi/jms/processors/ConsumeJMS.java | 204 ++---
 .../apache/nifi/jms/processors/JMSConsumer.java| 194 ++--
 .../apache/nifi/jms/processors/JMSPublisher.java   |   2 +-
 .../org/apache/nifi/jms/processors/PublishJMS.java | 140 +---
 .../ioconcept/reader/FlowFileReader.java   |  24 ++
 .../ioconcept/reader/FlowFileReaderCallback.java   |  24 ++
 .../ioconcept/reader/MessageHandler.java   |  21 ++
 .../reader/StateTrackingFlowFileReader.java|  73 ++
 .../reader/record/ProvenanceEventTemplates.java|  25 +++
 .../ioconcept/reader/record/RecordSupplier.java|  82 +++
 .../ioconcept/writer/AttributeSource.java  |  23 ++
 .../ioconcept/writer/FlowFileWriter.java   |  25 +++
 .../ioconcept/writer/FlowFileWriterCallback.java   |  27 +++
 .../processors/ioconcept/writer/Marshaller.java|  21 ++
 .../ioconcept/writer/record/OutputStrategy.java|  57 +
 .../ioconcept/writer/record/RecordUtils.java   |  78 +++
 .../ioconcept/writer/record/RecordWriter.java  | 205 +
 .../apache/nifi/jms/processors/ConsumeJMSIT.java   | 199 +
 .../jms/processors/JMSPublisherConsumerIT.java | 164 ++
 .../apache/nifi/jms/processors/PublishJMSIT.java   | 248 -
 .../nifi/jms/processors/helpers/JMSTestUtil.java   |  44 
 23 files changed, 1679 insertions(+), 239 deletions(-)

diff --git a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
index b05909b360..ebfe0f6dbc 100644
--- a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
@@ -58,6 +58,14 @@
 commons-io
 commons-io
 
+
+org.apache.nifi
+nifi-record-serialization-service-api
+
+
+org.apache.nifi
+nifi-record
+
 
 org.apache.activemq
 activemq-client
@@ -83,6 +91,20 @@
 2.0.0-SNAPSHOT
 test
 
+
+
+
+org.apache.nifi
+nifi-record-serialization-services
+2.0.0-SNAPSHOT
+test
+
+
+org.apache.nifi
+nifi-schema-registry-service-api
+2.0.0-SNAPSHOT
+test
+
 
 
 
diff --git 
a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/AbstractJMSProcessor.java
 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/AbstractJMSProcessor.java
index aa8fa4dd64..470260cfc9 100644
--- 
a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/AbstractJMSProcessor.java
+++ 
b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/AbstractJMSProcessor.java
@@ -37,6 +37,8 @@ import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.Processor;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.serialization.RecordReaderFactory;
+import org.apache.nifi.serialization.RecordSetWriterFactory;
 import org.springframework.jms.connection.CachingConnectionFactory;
 import org.springframework.jms.connection.SingleConnectionFactory;
 import 
org.springframework.jms.connection.UserCredentialsConnectionFactoryAdapter;
@@ -151,6 +153,20 @@ public abstract class AbstractJMSProcessor extends Abstract
 .collect(Collectors.toList())
 );
 
+static final PropertyDescriptor BASE_RECORD_READER = new 
PropertyDescriptor.Builder()
+.name("record-reader")
+.displayName("Record Reader")
+.identifiesControllerService(RecordReaderFactory.class)
+.required(false)
+.build();
+
+static final PropertyDescriptor BASE_RECORD_WRITER = new 
PropertyDescriptor.Builder()
+.name("record-writer")
+.displayName("Record Writer")
+.identifiesControllerService(RecordSetWriterFactory.class)
+ 

[nifi] branch support/nifi-1.x updated: NIFI-11367 Database DATE value displayed incorrectly in record output if timezone is GMT-X

2023-04-02 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new b12572af20 NIFI-11367 Database DATE value displayed incorrectly in 
record output if timezone is GMT-X
b12572af20 is described below

commit b12572af200fcdb33f37ef60683402bc408ffce8
Author: krisztina-zsihovszki 
AuthorDate: Thu Mar 30 17:25:28 2023 +0200

NIFI-11367 Database DATE value displayed incorrectly in record output if 
timezone is GMT-X

This closes #7103.

Signed-off-by: Peter Turcsanyi 
---
 .../org/apache/nifi/serialization/record/ResultSetRecordSet.java   | 7 ---
 .../apache/nifi/serialization/record/ResultSetRecordSetTest.java   | 5 +
 2 files changed, 1 insertion(+), 11 deletions(-)

diff --git 
a/nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/ResultSetRecordSet.java
 
b/nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/ResultSetRecordSet.java
index d2c4f6cb09..84a7b4569a 100644
--- 
a/nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/ResultSetRecordSet.java
+++ 
b/nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/ResultSetRecordSet.java
@@ -19,7 +19,6 @@ package org.apache.nifi.serialization.record;
 
 import org.apache.nifi.serialization.SimpleRecordSchema;
 import org.apache.nifi.serialization.record.type.ArrayDataType;
-import org.apache.nifi.serialization.record.util.DataTypeUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -158,12 +157,6 @@ public class ResultSetRecordSet implements RecordSet, 
Closeable {
 return null;
 }
 
-if (value instanceof java.sql.Date) {
-// Date objects should be stored in records as UTC normalized 
dates (UTC 00:00:00)
-// but they come from the driver in JVM's local time zone 00:00:00 
and need to be converted.
-return DataTypeUtils.convertDateToUTC((java.sql.Date) value);
-}
-
 if (value instanceof List) {
 return ((List) value).toArray();
 }
diff --git 
a/nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/ResultSetRecordSetTest.java
 
b/nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/ResultSetRecordSetTest.java
index 9b469a2e56..e9e5ec861b 100644
--- 
a/nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/ResultSetRecordSetTest.java
+++ 
b/nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/ResultSetRecordSetTest.java
@@ -42,7 +42,6 @@ import java.sql.Timestamp;
 import java.sql.Types;
 import java.time.LocalDate;
 import java.time.LocalDateTime;
-import java.time.ZoneOffset;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -310,9 +309,7 @@ public class ResultSetRecordSetTest {
 assertEquals(booleanValue, record.getAsBoolean(COLUMN_NAME_BOOLEAN));
 assertEquals(charValue, record.getValue(COLUMN_NAME_CHAR));
 
-// Date is expected in UTC normalized form
-Date expectedDate = new 
Date(testDate.atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli());
-assertEquals(expectedDate, record.getAsDate(COLUMN_NAME_DATE, null));
+assertEquals(dateValue, record.getAsDate(COLUMN_NAME_DATE, null));
 assertEquals(timestampValue, 
DataTypeUtils.toTimestamp(record.getValue(COLUMN_NAME_TIMESTAMP), null, 
COLUMN_NAME_TIMESTAMP));
 
 assertEquals(integerValue, record.getAsInt(COLUMN_NAME_INTEGER));



[nifi] branch main updated: NIFI-11367 Database DATE value displayed incorrectly in record output if timezone is GMT-X

2023-04-02 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new a36040fded NIFI-11367 Database DATE value displayed incorrectly in 
record output if timezone is GMT-X
a36040fded is described below

commit a36040fded2e4cc9b4db4d13aa724e737da48953
Author: krisztina-zsihovszki 
AuthorDate: Thu Mar 30 17:25:28 2023 +0200

NIFI-11367 Database DATE value displayed incorrectly in record output if 
timezone is GMT-X

This closes #7103.

Signed-off-by: Peter Turcsanyi 
---
 .../org/apache/nifi/serialization/record/ResultSetRecordSet.java   | 7 ---
 .../apache/nifi/serialization/record/ResultSetRecordSetTest.java   | 5 +
 2 files changed, 1 insertion(+), 11 deletions(-)

diff --git 
a/nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/ResultSetRecordSet.java
 
b/nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/ResultSetRecordSet.java
index d2c4f6cb09..84a7b4569a 100644
--- 
a/nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/ResultSetRecordSet.java
+++ 
b/nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/ResultSetRecordSet.java
@@ -19,7 +19,6 @@ package org.apache.nifi.serialization.record;
 
 import org.apache.nifi.serialization.SimpleRecordSchema;
 import org.apache.nifi.serialization.record.type.ArrayDataType;
-import org.apache.nifi.serialization.record.util.DataTypeUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -158,12 +157,6 @@ public class ResultSetRecordSet implements RecordSet, 
Closeable {
 return null;
 }
 
-if (value instanceof java.sql.Date) {
-// Date objects should be stored in records as UTC normalized 
dates (UTC 00:00:00)
-// but they come from the driver in JVM's local time zone 00:00:00 
and need to be converted.
-return DataTypeUtils.convertDateToUTC((java.sql.Date) value);
-}
-
 if (value instanceof List) {
 return ((List) value).toArray();
 }
diff --git 
a/nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/ResultSetRecordSetTest.java
 
b/nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/ResultSetRecordSetTest.java
index 9b469a2e56..e9e5ec861b 100644
--- 
a/nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/ResultSetRecordSetTest.java
+++ 
b/nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/ResultSetRecordSetTest.java
@@ -42,7 +42,6 @@ import java.sql.Timestamp;
 import java.sql.Types;
 import java.time.LocalDate;
 import java.time.LocalDateTime;
-import java.time.ZoneOffset;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -310,9 +309,7 @@ public class ResultSetRecordSetTest {
 assertEquals(booleanValue, record.getAsBoolean(COLUMN_NAME_BOOLEAN));
 assertEquals(charValue, record.getValue(COLUMN_NAME_CHAR));
 
-// Date is expected in UTC normalized form
-Date expectedDate = new 
Date(testDate.atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli());
-assertEquals(expectedDate, record.getAsDate(COLUMN_NAME_DATE, null));
+assertEquals(dateValue, record.getAsDate(COLUMN_NAME_DATE, null));
 assertEquals(timestampValue, 
DataTypeUtils.toTimestamp(record.getValue(COLUMN_NAME_TIMESTAMP), null, 
COLUMN_NAME_TIMESTAMP));
 
 assertEquals(integerValue, record.getAsInt(COLUMN_NAME_INTEGER));



[nifi] 01/02: NIFI-11154 Removed Default Schedule from ConsumeMQTT

2023-03-29 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git

commit 3994c0a93e5f90cc22cfd435304dd8cfeaa551f7
Author: exceptionfactory 
AuthorDate: Wed Feb 8 14:36:18 2023 -0600

NIFI-11154 Removed Default Schedule from ConsumeMQTT

Signed-off-by: Pierre Villard 

This closes #6936.
---
 .../src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java | 3 ---
 1 file changed, 3 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
 
b/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
index e4fd2caef3..f0d529a233 100644
--- 
a/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
+++ 
b/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
@@ -24,7 +24,6 @@ import 
org.apache.nifi.annotation.behavior.SystemResourceConsideration;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
-import org.apache.nifi.annotation.configuration.DefaultSchedule;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -47,7 +46,6 @@ import 
org.apache.nifi.processors.mqtt.common.AbstractMQTTProcessor;
 import org.apache.nifi.processors.mqtt.common.MqttCallback;
 import org.apache.nifi.processors.mqtt.common.MqttException;
 import org.apache.nifi.processors.mqtt.common.ReceivedMqttMessage;
-import org.apache.nifi.scheduling.SchedulingStrategy;
 import org.apache.nifi.serialization.MalformedRecordException;
 import org.apache.nifi.serialization.RecordReader;
 import org.apache.nifi.serialization.RecordReaderFactory;
@@ -106,7 +104,6 @@ import static 
org.apache.nifi.processors.mqtt.common.MqttConstants.ALLOWABLE_VAL
 "on the topic.")})
 @SystemResourceConsideration(resource = SystemResource.MEMORY, description = 
"The 'Max Queue Size' specifies the maximum number of messages that can be hold 
in memory by NiFi by a single "
 + "instance of this processor. A high value for this property could 
represent a lot of data being stored in memory.")
-@DefaultSchedule(strategy = SchedulingStrategy.TIMER_DRIVEN, period = "1 min")
 public class ConsumeMQTT extends AbstractMQTTProcessor implements MqttCallback 
{
 
 public final static String RECORD_COUNT_KEY = "record.count";



[nifi] 02/02: NIFI-11270 Refactoring of the overly Paho-specific MQTT interface

2023-03-29 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git

commit b387576fc0fd192cbfbde6991c7c40db47fa47b8
Author: Nandor Soma Abonyi 
AuthorDate: Sun Mar 12 22:58:55 2023 +0100

NIFI-11270 Refactoring of the overly Paho-specific MQTT interface

This closes #7032.

Signed-off-by: Peter Turcsanyi 
---
 .../apache/nifi/processors/mqtt/ConsumeMQTT.java   | 21 +
 .../apache/nifi/processors/mqtt/PublishMQTT.java   | 23 +-
 .../mqtt/adapters/HiveMqV5ClientAdapter.java   | 15 +---
 .../mqtt/adapters/PahoMqttClientAdapter.java   | 89 +++---
 .../nifi/processors/mqtt/common/MqttClient.java| 10 +--
 ...llback.java => ReceivedMqttMessageHandler.java} | 12 ++-
 .../processors/mqtt/common/MqttTestClient.java | 13 +---
 7 files changed, 84 insertions(+), 99 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
 
b/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
index f0d529a233..02982600f7 100644
--- 
a/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
+++ 
b/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
@@ -43,7 +43,6 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.mqtt.common.AbstractMQTTProcessor;
-import org.apache.nifi.processors.mqtt.common.MqttCallback;
 import org.apache.nifi.processors.mqtt.common.MqttException;
 import org.apache.nifi.processors.mqtt.common.ReceivedMqttMessage;
 import org.apache.nifi.serialization.MalformedRecordException;
@@ -104,7 +103,7 @@ import static 
org.apache.nifi.processors.mqtt.common.MqttConstants.ALLOWABLE_VAL
 "on the topic.")})
 @SystemResourceConsideration(resource = SystemResource.MEMORY, description = 
"The 'Max Queue Size' specifies the maximum number of messages that can be hold 
in memory by NiFi by a single "
 + "instance of this processor. A high value for this property could 
represent a lot of data being stored in memory.")
-public class ConsumeMQTT extends AbstractMQTTProcessor implements MqttCallback 
{
+public class ConsumeMQTT extends AbstractMQTTProcessor {
 
 public final static String RECORD_COUNT_KEY = "record.count";
 public final static String BROKER_ATTRIBUTE_KEY = "mqtt.broker";
@@ -383,9 +382,8 @@ public class ConsumeMQTT extends AbstractMQTTProcessor 
implements MqttCallback {
 // non-null but not connected, so we need to handle each case and only 
create a new client when it is null
 try {
 mqttClient = createMqttClient();
-mqttClient.setCallback(this);
 mqttClient.connect();
-mqttClient.subscribe(topicPrefix + topicFilter, qos);
+mqttClient.subscribe(topicPrefix + topicFilter, qos, 
this::handleReceivedMessage);
 } catch (Exception e) {
 logger.error("Connection failed to {}. Yielding processor", 
clientProperties.getRawBrokerUris(), e);
 mqttClient = null; // prevent stucked processor when subscribe 
fails
@@ -614,13 +612,7 @@ public class ConsumeMQTT extends AbstractMQTTProcessor 
implements MqttCallback {
 return stringBuilder.toString();
 }
 
-@Override
-public void connectionLost(Throwable cause) {
-logger.error("Connection to {} lost", 
clientProperties.getRawBrokerUris(), cause);
-}
-
-@Override
-public void messageArrived(ReceivedMqttMessage message) {
+private void handleReceivedMessage(ReceivedMqttMessage message) {
 if (logger.isDebugEnabled()) {
 byte[] payload = message.getPayload();
 final String text = new String(payload, StandardCharsets.UTF_8);
@@ -639,11 +631,4 @@ public class ConsumeMQTT extends AbstractMQTTProcessor 
implements MqttCallback {
 throw new MqttException("Failed to process message arrived from 
topic " + message.getTopic());
 }
 }
-
-@Override
-public void deliveryComplete(String token) {
-// Unlikely situation. Api uses the same callback for publisher and 
consumer as well.
-// That's why we have this log message here to indicate something 
really messy thing happened.
-logger.error("Received MQTT 'delivery complete' message to subscriber. 
Token: [{}]", token);
-}
 }
diff --git 
a/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/PublishMQTT.java
 
b/nifi-nar-bundles/nifi-mq

[nifi] branch support/nifi-1.x updated (6b6d779069 -> b387576fc0)

2023-03-29 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a change to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


from 6b6d779069 NIFI-11349 Excluded reload4j from HBase 2 jigsaw profile
 new 3994c0a93e NIFI-11154 Removed Default Schedule from ConsumeMQTT
 new b387576fc0 NIFI-11270 Refactoring of the overly Paho-specific MQTT 
interface

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../apache/nifi/processors/mqtt/ConsumeMQTT.java   | 24 +-
 .../apache/nifi/processors/mqtt/PublishMQTT.java   | 23 +-
 .../mqtt/adapters/HiveMqV5ClientAdapter.java   | 15 +---
 .../mqtt/adapters/PahoMqttClientAdapter.java   | 89 +++---
 .../nifi/processors/mqtt/common/MqttClient.java| 10 +--
 ...llback.java => ReceivedMqttMessageHandler.java} | 12 ++-
 .../processors/mqtt/common/MqttTestClient.java | 13 +---
 7 files changed, 84 insertions(+), 102 deletions(-)
 rename 
nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/common/{MqttCallback.java
 => ReceivedMqttMessageHandler.java} (80%)



[nifi] branch main updated: NIFI-11270 Refactoring of the overly Paho-specific MQTT interface

2023-03-29 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 2b9f2071ed NIFI-11270 Refactoring of the overly Paho-specific MQTT 
interface
2b9f2071ed is described below

commit 2b9f2071ed111f461236900277976f91bc1ac029
Author: Nandor Soma Abonyi 
AuthorDate: Sun Mar 12 22:58:55 2023 +0100

NIFI-11270 Refactoring of the overly Paho-specific MQTT interface

This closes #7032.

Signed-off-by: Peter Turcsanyi 
---
 .../apache/nifi/processors/mqtt/ConsumeMQTT.java   | 21 +
 .../apache/nifi/processors/mqtt/PublishMQTT.java   | 23 +-
 .../mqtt/adapters/HiveMqV5ClientAdapter.java   | 15 +---
 .../mqtt/adapters/PahoMqttClientAdapter.java   | 89 +++---
 .../nifi/processors/mqtt/common/MqttClient.java| 10 +--
 ...llback.java => ReceivedMqttMessageHandler.java} | 12 ++-
 .../processors/mqtt/common/MqttTestClient.java | 13 +---
 7 files changed, 84 insertions(+), 99 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
 
b/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
index f0d529a233..02982600f7 100644
--- 
a/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
+++ 
b/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
@@ -43,7 +43,6 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.mqtt.common.AbstractMQTTProcessor;
-import org.apache.nifi.processors.mqtt.common.MqttCallback;
 import org.apache.nifi.processors.mqtt.common.MqttException;
 import org.apache.nifi.processors.mqtt.common.ReceivedMqttMessage;
 import org.apache.nifi.serialization.MalformedRecordException;
@@ -104,7 +103,7 @@ import static 
org.apache.nifi.processors.mqtt.common.MqttConstants.ALLOWABLE_VAL
 "on the topic.")})
 @SystemResourceConsideration(resource = SystemResource.MEMORY, description = 
"The 'Max Queue Size' specifies the maximum number of messages that can be hold 
in memory by NiFi by a single "
 + "instance of this processor. A high value for this property could 
represent a lot of data being stored in memory.")
-public class ConsumeMQTT extends AbstractMQTTProcessor implements MqttCallback 
{
+public class ConsumeMQTT extends AbstractMQTTProcessor {
 
 public final static String RECORD_COUNT_KEY = "record.count";
 public final static String BROKER_ATTRIBUTE_KEY = "mqtt.broker";
@@ -383,9 +382,8 @@ public class ConsumeMQTT extends AbstractMQTTProcessor 
implements MqttCallback {
 // non-null but not connected, so we need to handle each case and only 
create a new client when it is null
 try {
 mqttClient = createMqttClient();
-mqttClient.setCallback(this);
 mqttClient.connect();
-mqttClient.subscribe(topicPrefix + topicFilter, qos);
+mqttClient.subscribe(topicPrefix + topicFilter, qos, 
this::handleReceivedMessage);
 } catch (Exception e) {
 logger.error("Connection failed to {}. Yielding processor", 
clientProperties.getRawBrokerUris(), e);
 mqttClient = null; // prevent stucked processor when subscribe 
fails
@@ -614,13 +612,7 @@ public class ConsumeMQTT extends AbstractMQTTProcessor 
implements MqttCallback {
 return stringBuilder.toString();
 }
 
-@Override
-public void connectionLost(Throwable cause) {
-logger.error("Connection to {} lost", 
clientProperties.getRawBrokerUris(), cause);
-}
-
-@Override
-public void messageArrived(ReceivedMqttMessage message) {
+private void handleReceivedMessage(ReceivedMqttMessage message) {
 if (logger.isDebugEnabled()) {
 byte[] payload = message.getPayload();
 final String text = new String(payload, StandardCharsets.UTF_8);
@@ -639,11 +631,4 @@ public class ConsumeMQTT extends AbstractMQTTProcessor 
implements MqttCallback {
 throw new MqttException("Failed to process message arrived from 
topic " + message.getTopic());
 }
 }
-
-@Override
-public void deliveryComplete(String token) {
-// Unlikely situation. Api uses the same callback for publisher and 
consumer as well.
-// That's why we have this log message here to indicate something 
really messy thing happened.
-logger.error("Received MQTT 'delivery complete' message to subscriber. 
Token: [{}]", token);

[nifi] branch support/nifi-1.x updated: NIFI-11215: Add custom validation for KerberosUserService in PutIceberg

2023-03-24 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 986a1f0891 NIFI-11215: Add custom validation for KerberosUserService 
in PutIceberg
986a1f0891 is described below

commit 986a1f089106b078d5c6c73eb8d0e23a8753bbdf
Author: Mark Bathori 
AuthorDate: Thu Feb 23 16:40:55 2023 +0100

NIFI-11215: Add custom validation for KerberosUserService in PutIceberg

This closes #6985.

Signed-off-by: Peter Turcsanyi 
---
 .../apache/nifi/processors/iceberg/PutIceberg.java |  35 ++
 .../iceberg/TestPutIcebergCustomValidation.java| 129 +
 .../iceberg/TestPutIcebergWithHiveCatalog.java |   5 +-
 .../iceberg/catalog/TestHadoopCatalogService.java  |   4 +-
 .../iceberg/catalog/TestHiveCatalogService.java|  51 ++--
 .../nifi/services/iceberg/HiveCatalogService.java  |   9 +-
 6 files changed, 216 insertions(+), 17 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
index 1af97768f3..d85a590a08 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
@@ -34,8 +34,11 @@ import 
org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.context.PropertyContext;
 import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.hadoop.SecurityUtil;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.Relationship;
@@ -49,7 +52,9 @@ import org.apache.nifi.serialization.record.Record;
 import org.apache.nifi.services.iceberg.IcebergCatalogService;
 
 import java.io.InputStream;
+import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
@@ -192,6 +197,36 @@ public class PutIceberg extends AbstractIcebergProcessor {
 return RELATIONSHIPS;
 }
 
+@Override
+protected Collection customValidate(ValidationContext 
context) {
+final List problems = new ArrayList<>();
+final IcebergCatalogService catalogService = 
context.getProperty(CATALOG).asControllerService(IcebergCatalogService.class);
+boolean catalogServiceEnabled = 
context.getControllerServiceLookup().isControllerServiceEnabled(catalogService);
+
+if (catalogServiceEnabled) {
+final boolean kerberosUserServiceIsSet = 
context.getProperty(KERBEROS_USER_SERVICE).isSet();
+final boolean securityEnabled = 
SecurityUtil.isSecurityEnabled(catalogService.getConfiguration());
+
+if (securityEnabled && !kerberosUserServiceIsSet) {
+problems.add(new ValidationResult.Builder()
+.subject(KERBEROS_USER_SERVICE.getDisplayName())
+.valid(false)
+.explanation("'hadoop.security.authentication' is set 
to 'kerberos' in the hadoop configuration files but no KerberosUserService is 
configured.")
+.build());
+}
+
+if (!securityEnabled && kerberosUserServiceIsSet) {
+problems.add(new ValidationResult.Builder()
+.subject(KERBEROS_USER_SERVICE.getDisplayName())
+.valid(false)
+.explanation("KerberosUserService is configured but 
'hadoop.security.authentication' is not set to 'kerberos' in the hadoop 
configuration files.")
+.build());
+}
+}
+
+return problems;
+}
+
 @Override
 public void doOnTrigger(ProcessContext context, ProcessSession session, 
FlowFile flowFile) throws ProcessException {
 final RecordReaderFactory readerFactory = 
context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/test/java/org/apache/nifi/processors/iceberg/TestPutIcebergCustomValidation.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/test/java/org/apache/nifi/processors/iceberg/TestPut

[nifi] branch main updated: NIFI-11215: Add custom validation for KerberosUserService in PutIceberg

2023-03-24 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new b50c8101cf NIFI-11215: Add custom validation for KerberosUserService 
in PutIceberg
b50c8101cf is described below

commit b50c8101cfd49cb314a470b5408196d2b0a50bcf
Author: Mark Bathori 
AuthorDate: Thu Feb 23 16:40:55 2023 +0100

NIFI-11215: Add custom validation for KerberosUserService in PutIceberg

This closes #6985.

Signed-off-by: Peter Turcsanyi 
---
 .../apache/nifi/processors/iceberg/PutIceberg.java |  35 ++
 .../iceberg/TestPutIcebergCustomValidation.java| 129 +
 .../iceberg/TestPutIcebergWithHiveCatalog.java |   5 +-
 .../iceberg/catalog/TestHadoopCatalogService.java  |   4 +-
 .../iceberg/catalog/TestHiveCatalogService.java|  51 ++--
 .../nifi/services/iceberg/HiveCatalogService.java  |   9 +-
 6 files changed, 216 insertions(+), 17 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
index 1af97768f3..d85a590a08 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
@@ -34,8 +34,11 @@ import 
org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.context.PropertyContext;
 import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.hadoop.SecurityUtil;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.Relationship;
@@ -49,7 +52,9 @@ import org.apache.nifi.serialization.record.Record;
 import org.apache.nifi.services.iceberg.IcebergCatalogService;
 
 import java.io.InputStream;
+import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
@@ -192,6 +197,36 @@ public class PutIceberg extends AbstractIcebergProcessor {
 return RELATIONSHIPS;
 }
 
+@Override
+protected Collection customValidate(ValidationContext 
context) {
+final List problems = new ArrayList<>();
+final IcebergCatalogService catalogService = 
context.getProperty(CATALOG).asControllerService(IcebergCatalogService.class);
+boolean catalogServiceEnabled = 
context.getControllerServiceLookup().isControllerServiceEnabled(catalogService);
+
+if (catalogServiceEnabled) {
+final boolean kerberosUserServiceIsSet = 
context.getProperty(KERBEROS_USER_SERVICE).isSet();
+final boolean securityEnabled = 
SecurityUtil.isSecurityEnabled(catalogService.getConfiguration());
+
+if (securityEnabled && !kerberosUserServiceIsSet) {
+problems.add(new ValidationResult.Builder()
+.subject(KERBEROS_USER_SERVICE.getDisplayName())
+.valid(false)
+.explanation("'hadoop.security.authentication' is set 
to 'kerberos' in the hadoop configuration files but no KerberosUserService is 
configured.")
+.build());
+}
+
+if (!securityEnabled && kerberosUserServiceIsSet) {
+problems.add(new ValidationResult.Builder()
+.subject(KERBEROS_USER_SERVICE.getDisplayName())
+.valid(false)
+.explanation("KerberosUserService is configured but 
'hadoop.security.authentication' is not set to 'kerberos' in the hadoop 
configuration files.")
+.build());
+}
+}
+
+return problems;
+}
+
 @Override
 public void doOnTrigger(ProcessContext context, ProcessSession session, 
FlowFile flowFile) throws ProcessException {
 final RecordReaderFactory readerFactory = 
context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/test/java/org/apache/nifi/processors/iceberg/TestPutIcebergCustomValidation.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/test/java/org/apache/nifi/processors/iceberg/TestPutIcebergCustomValidation.

[nifi] branch support/nifi-1.x updated: NIFI-11204: Add configurable retry logic for table commits in PutIceberg processor

2023-03-20 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 835c95fca0 NIFI-11204: Add configurable retry logic for table commits 
in PutIceberg processor
835c95fca0 is described below

commit 835c95fca0d4d4aff69096e9683f73a05dc4c196
Author: Mark Bathori 
AuthorDate: Tue Feb 21 15:11:19 2023 +0100

NIFI-11204: Add configurable retry logic for table commits in PutIceberg 
processor

This closes #6976.

Signed-off-by: Peter Turcsanyi 
---
 .../iceberg/AbstractIcebergProcessor.java  |   2 +-
 .../apache/nifi/processors/iceberg/PutIceberg.java |  68 ++-
 .../additionalDetails.html |  58 ++
 .../processors/iceberg/TestDataFileActions.java| 195 +
 .../nifi/processors/iceberg/TestFileAbort.java | 108 
 5 files changed, 314 insertions(+), 117 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
index 7dc53eefd7..a4ec2ccf07 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
@@ -107,7 +107,7 @@ public abstract class AbstractIcebergProcessor extends 
AbstractProcessor {
 
 } catch (Exception e) {
 getLogger().error("Privileged action failed with kerberos user 
" + kerberosUser, e);
-session.transfer(flowFile, REL_FAILURE);
+session.transfer(session.penalize(flowFile), REL_FAILURE);
 }
 }
 }
diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
index cdd2997a58..1af97768f3 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
@@ -19,10 +19,12 @@ package org.apache.nifi.processors.iceberg;
 import org.apache.iceberg.AppendFiles;
 import org.apache.iceberg.DataFile;
 import org.apache.iceberg.FileFormat;
+import org.apache.iceberg.PendingUpdate;
 import org.apache.iceberg.Table;
 import org.apache.iceberg.catalog.Catalog;
 import org.apache.iceberg.catalog.Namespace;
 import org.apache.iceberg.catalog.TableIdentifier;
+import org.apache.iceberg.exceptions.CommitFailedException;
 import org.apache.iceberg.io.TaskWriter;
 import org.apache.iceberg.io.WriteResult;
 import org.apache.iceberg.util.Tasks;
@@ -54,6 +56,7 @@ import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.TimeUnit;
 
 import static org.apache.iceberg.TableProperties.DEFAULT_FILE_FORMAT;
 import static org.apache.iceberg.TableProperties.DEFAULT_FILE_FORMAT_DEFAULT;
@@ -114,6 +117,42 @@ public class PutIceberg extends AbstractIcebergProcessor {
 .addValidator(StandardValidators.LONG_VALIDATOR)
 .build();
 
+static final PropertyDescriptor NUMBER_OF_COMMIT_RETRIES = new 
PropertyDescriptor.Builder()
+.name("number-of-commit-retries")
+.displayName("Number of Commit Retries")
+.description("Number of times to retry a commit before failing.")
+.required(true)
+.defaultValue("10")
+.addValidator(StandardValidators.INTEGER_VALIDATOR)
+.build();
+
+static final PropertyDescriptor MINIMUM_COMMIT_WAIT_TIME = new 
PropertyDescriptor.Builder()
+.name("minimum-commit-wait-time")
+.displayName("Minimum Commit Wait Time")
+.description("Minimum time to wait before retrying a commit.")
+.required(true)
+.defaultValue("100 ms")
+.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
+.build();
+
+static final PropertyDescriptor MAXIMUM_COMMIT_WAIT_TIME = new 
PropertyDescriptor.Builder()
+.name("maximum-commit-wait-time")
+.displayName("Maximum Commit Wait Time")
+.description("Maximum time to wait befo

[nifi] branch main updated: NIFI-11204: Add configurable retry logic for table commits in PutIceberg processor

2023-03-20 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new e370292d7f NIFI-11204: Add configurable retry logic for table commits 
in PutIceberg processor
e370292d7f is described below

commit e370292d7f3c295a1810532e9349a11b4104e82b
Author: Mark Bathori 
AuthorDate: Tue Feb 21 15:11:19 2023 +0100

NIFI-11204: Add configurable retry logic for table commits in PutIceberg 
processor

This closes #6976.

Signed-off-by: Peter Turcsanyi 
---
 .../iceberg/AbstractIcebergProcessor.java  |   2 +-
 .../apache/nifi/processors/iceberg/PutIceberg.java |  68 ++-
 .../additionalDetails.html |  58 ++
 .../processors/iceberg/TestDataFileActions.java| 195 +
 .../nifi/processors/iceberg/TestFileAbort.java | 108 
 5 files changed, 314 insertions(+), 117 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
index 7dc53eefd7..a4ec2ccf07 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/AbstractIcebergProcessor.java
@@ -107,7 +107,7 @@ public abstract class AbstractIcebergProcessor extends 
AbstractProcessor {
 
 } catch (Exception e) {
 getLogger().error("Privileged action failed with kerberos user 
" + kerberosUser, e);
-session.transfer(flowFile, REL_FAILURE);
+session.transfer(session.penalize(flowFile), REL_FAILURE);
 }
 }
 }
diff --git 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
index cdd2997a58..1af97768f3 100644
--- 
a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
+++ 
b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java
@@ -19,10 +19,12 @@ package org.apache.nifi.processors.iceberg;
 import org.apache.iceberg.AppendFiles;
 import org.apache.iceberg.DataFile;
 import org.apache.iceberg.FileFormat;
+import org.apache.iceberg.PendingUpdate;
 import org.apache.iceberg.Table;
 import org.apache.iceberg.catalog.Catalog;
 import org.apache.iceberg.catalog.Namespace;
 import org.apache.iceberg.catalog.TableIdentifier;
+import org.apache.iceberg.exceptions.CommitFailedException;
 import org.apache.iceberg.io.TaskWriter;
 import org.apache.iceberg.io.WriteResult;
 import org.apache.iceberg.util.Tasks;
@@ -54,6 +56,7 @@ import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.TimeUnit;
 
 import static org.apache.iceberg.TableProperties.DEFAULT_FILE_FORMAT;
 import static org.apache.iceberg.TableProperties.DEFAULT_FILE_FORMAT_DEFAULT;
@@ -114,6 +117,42 @@ public class PutIceberg extends AbstractIcebergProcessor {
 .addValidator(StandardValidators.LONG_VALIDATOR)
 .build();
 
+static final PropertyDescriptor NUMBER_OF_COMMIT_RETRIES = new 
PropertyDescriptor.Builder()
+.name("number-of-commit-retries")
+.displayName("Number of Commit Retries")
+.description("Number of times to retry a commit before failing.")
+.required(true)
+.defaultValue("10")
+.addValidator(StandardValidators.INTEGER_VALIDATOR)
+.build();
+
+static final PropertyDescriptor MINIMUM_COMMIT_WAIT_TIME = new 
PropertyDescriptor.Builder()
+.name("minimum-commit-wait-time")
+.displayName("Minimum Commit Wait Time")
+.description("Minimum time to wait before retrying a commit.")
+.required(true)
+.defaultValue("100 ms")
+.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
+.build();
+
+static final PropertyDescriptor MAXIMUM_COMMIT_WAIT_TIME = new 
PropertyDescriptor.Builder()
+.name("maximum-commit-wait-time")
+.displayName("Maximum Commit Wait Time")
+.description("Maximum time to wait before retrying a commit.")
+   

[nifi] branch support/nifi-1.x updated: NIFI-11261 Added Primary Node State handling to GetAzureEventHub

2023-03-09 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 8511191ad1 NIFI-11261 Added Primary Node State handling to 
GetAzureEventHub
8511191ad1 is described below

commit 8511191ad149907eaf658845ab0e7669d4b712d7
Author: exceptionfactory 
AuthorDate: Wed Mar 8 14:53:52 2023 -0600

NIFI-11261 Added Primary Node State handling to GetAzureEventHub

- Updated Qpid Proton J from 0.34.0 to 0.34.1

This closes #7023.

Signed-off-by: Peter Turcsanyi 
---
 .../azure/eventhub/GetAzureEventHub.java   | 98 --
 .../azure/eventhub/GetAzureEventHubTest.java   | 49 ++-
 nifi-nar-bundles/nifi-azure-bundle/pom.xml |  2 +-
 3 files changed, 140 insertions(+), 9 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/GetAzureEventHub.java
 
b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/GetAzureEventHub.java
index d9f00839d3..855b531d92 100644
--- 
a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/GetAzureEventHub.java
+++ 
b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/GetAzureEventHub.java
@@ -23,13 +23,16 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Optional;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
 
+import com.azure.core.amqp.AmqpClientOptions;
 import com.azure.core.credential.AzureNamedKeyCredential;
 import com.azure.identity.ManagedIdentityCredential;
 import com.azure.identity.ManagedIdentityCredentialBuilder;
@@ -49,10 +52,13 @@ import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
+import org.apache.nifi.annotation.notification.OnPrimaryNodeStateChange;
+import org.apache.nifi.annotation.notification.PrimaryNodeState;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.PropertyValue;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.controller.NodeTypeProvider;
 import org.apache.nifi.expression.ExpressionLanguageScope;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.processor.AbstractProcessor;
@@ -61,6 +67,7 @@ import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.scheduling.ExecutionNode;
 import org.apache.nifi.util.StopWatch;
 import org.apache.nifi.processors.azure.eventhub.utils.AzureEventHubUtils;
 
@@ -85,6 +92,8 @@ public class GetAzureEventHub extends AbstractProcessor {
 private static final Duration DEFAULT_FETCH_TIMEOUT = 
Duration.ofSeconds(60);
 private static final int DEFAULT_FETCH_SIZE = 100;
 
+private static final String NODE_CLIENT_IDENTIFIER_FORMAT = "%s-%s";
+
 static final PropertyDescriptor EVENT_HUB_NAME = new 
PropertyDescriptor.Builder()
 .name("Event Hub Name")
 .description("Name of Azure Event Hubs source")
@@ -180,10 +189,16 @@ public class GetAzureEventHub extends AbstractProcessor {
 
 private final Map partitionEventPositions = new 
ConcurrentHashMap<>();
 
-private volatile BlockingQueue partitionIds = new 
LinkedBlockingQueue<>();
+private final BlockingQueue partitionIds = new 
LinkedBlockingQueue<>();
+
+private final AtomicReference configuredExecutionNode = new 
AtomicReference<>(ExecutionNode.ALL);
+
 private volatile int receiverFetchSize;
+
 private volatile Duration receiverFetchTimeout;
 
+private EventHubClientBuilder configuredClientBuilder;
+
 private EventHubConsumerClient eventHubConsumerClient;
 
 @Override
@@ -201,20 +216,40 @@ public class GetAzureEventHub extends AbstractProcessor {
 return AzureEventHubUtils.customValidate(ACCESS_POLICY, 
POLICY_PRIMARY_KEY, context);
 }
 
+@OnPrimaryNodeStateChange
+public void onPrimaryNodeStateChange(final PrimaryNodeState 
primaryNodeState) {
+final ExecutionNode executionNode = con

[nifi] branch main updated: NIFI-11261 Added Primary Node State handling to GetAzureEventHub

2023-03-09 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new dbc627e0b0 NIFI-11261 Added Primary Node State handling to 
GetAzureEventHub
dbc627e0b0 is described below

commit dbc627e0b0b6602231955153474943c3763cc448
Author: exceptionfactory 
AuthorDate: Wed Mar 8 14:53:52 2023 -0600

NIFI-11261 Added Primary Node State handling to GetAzureEventHub

- Updated Qpid Proton J from 0.34.0 to 0.34.1

This closes #7023.

Signed-off-by: Peter Turcsanyi 
---
 .../azure/eventhub/GetAzureEventHub.java   | 98 --
 .../azure/eventhub/GetAzureEventHubTest.java   | 49 ++-
 nifi-nar-bundles/nifi-azure-bundle/pom.xml |  2 +-
 3 files changed, 140 insertions(+), 9 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/GetAzureEventHub.java
 
b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/GetAzureEventHub.java
index d9f00839d3..855b531d92 100644
--- 
a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/GetAzureEventHub.java
+++ 
b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/GetAzureEventHub.java
@@ -23,13 +23,16 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Optional;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
 
+import com.azure.core.amqp.AmqpClientOptions;
 import com.azure.core.credential.AzureNamedKeyCredential;
 import com.azure.identity.ManagedIdentityCredential;
 import com.azure.identity.ManagedIdentityCredentialBuilder;
@@ -49,10 +52,13 @@ import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
+import org.apache.nifi.annotation.notification.OnPrimaryNodeStateChange;
+import org.apache.nifi.annotation.notification.PrimaryNodeState;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.PropertyValue;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.controller.NodeTypeProvider;
 import org.apache.nifi.expression.ExpressionLanguageScope;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.processor.AbstractProcessor;
@@ -61,6 +67,7 @@ import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.scheduling.ExecutionNode;
 import org.apache.nifi.util.StopWatch;
 import org.apache.nifi.processors.azure.eventhub.utils.AzureEventHubUtils;
 
@@ -85,6 +92,8 @@ public class GetAzureEventHub extends AbstractProcessor {
 private static final Duration DEFAULT_FETCH_TIMEOUT = 
Duration.ofSeconds(60);
 private static final int DEFAULT_FETCH_SIZE = 100;
 
+private static final String NODE_CLIENT_IDENTIFIER_FORMAT = "%s-%s";
+
 static final PropertyDescriptor EVENT_HUB_NAME = new 
PropertyDescriptor.Builder()
 .name("Event Hub Name")
 .description("Name of Azure Event Hubs source")
@@ -180,10 +189,16 @@ public class GetAzureEventHub extends AbstractProcessor {
 
 private final Map partitionEventPositions = new 
ConcurrentHashMap<>();
 
-private volatile BlockingQueue partitionIds = new 
LinkedBlockingQueue<>();
+private final BlockingQueue partitionIds = new 
LinkedBlockingQueue<>();
+
+private final AtomicReference configuredExecutionNode = new 
AtomicReference<>(ExecutionNode.ALL);
+
 private volatile int receiverFetchSize;
+
 private volatile Duration receiverFetchTimeout;
 
+private EventHubClientBuilder configuredClientBuilder;
+
 private EventHubConsumerClient eventHubConsumerClient;
 
 @Override
@@ -201,20 +216,40 @@ public class GetAzureEventHub extends AbstractProcessor {
 return AzureEventHubUtils.customValidate(ACCESS_POLICY, 
POLICY_PRIMARY_KEY, context);
 }
 
+@OnPrimaryNodeStateChange
+public void onPrimaryNodeStateChange(final PrimaryNodeState 
primaryNodeState) {
+final ExecutionNode executionNode = configuredExecutionNode.get(

[nifi] branch support/nifi-1.x updated: NIFI-11151: Improving code reusability of DBCP services

2023-03-07 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new b3242304f8 NIFI-11151: Improving code reusability of DBCP services
b3242304f8 is described below

commit b3242304f8cbd14476b30fdb35e4173c9edac0ed
Author: Lehel Boér 
AuthorDate: Wed Feb 8 20:08:24 2023 +0100

NIFI-11151: Improving code reusability of DBCP services

This closes #6935.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi/dbcp/AbstractDBCPConnectionPool.java  | 461 -
 .../org/apache/nifi/dbcp/utils/DBCPProperties.java | 199 +
 .../nifi/dbcp/utils/DataSourceConfiguration.java   | 174 
 .../nifi/dbcp/utils/DefaultDataSourceValues.java   |  79 
 .../service/SnowflakeComputingConnectionPool.java  |  88 +++-
 .../org/apache/nifi/dbcp/DBCPConnectionPool.java   | 171 +++-
 .../record/sink/db/DatabaseRecordSinkTest.groovy   |  32 +-
 .../java/org/apache/nifi/dbcp/DBCPServiceTest.java |  37 +-
 8 files changed, 800 insertions(+), 441 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-dbcp-base/src/main/java/org/apache/nifi/dbcp/AbstractDBCPConnectionPool.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-dbcp-base/src/main/java/org/apache/nifi/dbcp/AbstractDBCPConnectionPool.java
index c71ac4766b..cc15e36c11 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-dbcp-base/src/main/java/org/apache/nifi/dbcp/AbstractDBCPConnectionPool.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-dbcp-base/src/main/java/org/apache/nifi/dbcp/AbstractDBCPConnectionPool.java
@@ -16,290 +16,43 @@
  */
 package org.apache.nifi.dbcp;
 
-import java.util.HashMap;
 import org.apache.commons.dbcp2.BasicDataSource;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
 import org.apache.nifi.annotation.lifecycle.OnDisabled;
 import org.apache.nifi.annotation.lifecycle.OnEnabled;
 import org.apache.nifi.components.ConfigVerificationResult;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.PropertyValue;
-import org.apache.nifi.components.resource.ResourceCardinality;
-import org.apache.nifi.components.resource.ResourceType;
 import org.apache.nifi.controller.AbstractControllerService;
 import org.apache.nifi.controller.ConfigurationContext;
 import org.apache.nifi.controller.VerifiableControllerService;
-import org.apache.nifi.expression.AttributeExpression;
-import org.apache.nifi.expression.ExpressionLanguageScope;
-import org.apache.nifi.kerberos.KerberosCredentialsService;
+import org.apache.nifi.dbcp.utils.DataSourceConfiguration;
 import org.apache.nifi.kerberos.KerberosUserService;
 import org.apache.nifi.logging.ComponentLog;
 import org.apache.nifi.processor.exception.ProcessException;
-import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.security.krb.KerberosAction;
-import org.apache.nifi.security.krb.KerberosKeytabUser;
 import org.apache.nifi.security.krb.KerberosLoginException;
-import org.apache.nifi.security.krb.KerberosPasswordUser;
 import org.apache.nifi.security.krb.KerberosUser;
 
-import javax.security.auth.login.LoginException;
 import java.sql.Connection;
 import java.sql.Driver;
-import java.sql.DriverManager;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 
 import static 
org.apache.nifi.components.ConfigVerificationResult.Outcome.FAILED;
 import static 
org.apache.nifi.components.ConfigVerificationResult.Outcome.SUCCESSFUL;
+import static org.apache.nifi.dbcp.utils.DBCPProperties.DB_DRIVER_LOCATION;
+import static org.apache.nifi.dbcp.utils.DBCPProperties.KERBEROS_USER_SERVICE;
 
-/**
- * Abstract base class for Database Connection Pooling Services using Apache 
Commons DBCP as the underlying connection pool implementation.
- *
- */
 public abstract class AbstractDBCPConnectionPool extends 
AbstractControllerService implements DBCPService, VerifiableControllerService {
-/** Property Name Prefix for Sensitive Dynamic Properties */
-protected static final String SENSITIVE_PROPERTY_PREFIX = "SENSITIVE.";
-
-/**
- * Copied from {@link GenericObjectPoolConfig#DEFAULT_MIN_IDLE} in 
Commons-DBCP 2.7.0
- */
-private static final String DEFAULT_MIN_IDLE = "0";
-/**
- * Copied from {@link GenericObjectPoolConfig#DEFAULT_MAX_IDLE} in 
Commons-DBCP 2.7.0
- */
-private static final String DEFAULT_MAX_IDLE = "8";
-/**
- * Copied from private variable {@link 
BasicDataSource#maxConnLifetimeMillis} in Commons-DBCP 2.7.0
- */
-private static final String D

[nifi] branch main updated: NIFI-11151: Improving code reusability of DBCP services

2023-03-07 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 13d343d5ee NIFI-11151: Improving code reusability of DBCP services
13d343d5ee is described below

commit 13d343d5ee45269c7d882b6e55bbf783217bc38b
Author: Lehel Boér 
AuthorDate: Wed Feb 8 20:08:24 2023 +0100

NIFI-11151: Improving code reusability of DBCP services

This closes #6935.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi/dbcp/AbstractDBCPConnectionPool.java  | 461 -
 .../org/apache/nifi/dbcp/utils/DBCPProperties.java | 199 +
 .../nifi/dbcp/utils/DataSourceConfiguration.java   | 174 
 .../nifi/dbcp/utils/DefaultDataSourceValues.java   |  79 
 .../service/SnowflakeComputingConnectionPool.java  |  88 +++-
 .../org/apache/nifi/dbcp/DBCPConnectionPool.java   | 171 +++-
 .../record/sink/db/DatabaseRecordSinkTest.groovy   |  32 +-
 .../java/org/apache/nifi/dbcp/DBCPServiceTest.java |  37 +-
 8 files changed, 800 insertions(+), 441 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-dbcp-base/src/main/java/org/apache/nifi/dbcp/AbstractDBCPConnectionPool.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-dbcp-base/src/main/java/org/apache/nifi/dbcp/AbstractDBCPConnectionPool.java
index c71ac4766b..cc15e36c11 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-dbcp-base/src/main/java/org/apache/nifi/dbcp/AbstractDBCPConnectionPool.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-dbcp-base/src/main/java/org/apache/nifi/dbcp/AbstractDBCPConnectionPool.java
@@ -16,290 +16,43 @@
  */
 package org.apache.nifi.dbcp;
 
-import java.util.HashMap;
 import org.apache.commons.dbcp2.BasicDataSource;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
 import org.apache.nifi.annotation.lifecycle.OnDisabled;
 import org.apache.nifi.annotation.lifecycle.OnEnabled;
 import org.apache.nifi.components.ConfigVerificationResult;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.PropertyValue;
-import org.apache.nifi.components.resource.ResourceCardinality;
-import org.apache.nifi.components.resource.ResourceType;
 import org.apache.nifi.controller.AbstractControllerService;
 import org.apache.nifi.controller.ConfigurationContext;
 import org.apache.nifi.controller.VerifiableControllerService;
-import org.apache.nifi.expression.AttributeExpression;
-import org.apache.nifi.expression.ExpressionLanguageScope;
-import org.apache.nifi.kerberos.KerberosCredentialsService;
+import org.apache.nifi.dbcp.utils.DataSourceConfiguration;
 import org.apache.nifi.kerberos.KerberosUserService;
 import org.apache.nifi.logging.ComponentLog;
 import org.apache.nifi.processor.exception.ProcessException;
-import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.security.krb.KerberosAction;
-import org.apache.nifi.security.krb.KerberosKeytabUser;
 import org.apache.nifi.security.krb.KerberosLoginException;
-import org.apache.nifi.security.krb.KerberosPasswordUser;
 import org.apache.nifi.security.krb.KerberosUser;
 
-import javax.security.auth.login.LoginException;
 import java.sql.Connection;
 import java.sql.Driver;
-import java.sql.DriverManager;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 
 import static 
org.apache.nifi.components.ConfigVerificationResult.Outcome.FAILED;
 import static 
org.apache.nifi.components.ConfigVerificationResult.Outcome.SUCCESSFUL;
+import static org.apache.nifi.dbcp.utils.DBCPProperties.DB_DRIVER_LOCATION;
+import static org.apache.nifi.dbcp.utils.DBCPProperties.KERBEROS_USER_SERVICE;
 
-/**
- * Abstract base class for Database Connection Pooling Services using Apache 
Commons DBCP as the underlying connection pool implementation.
- *
- */
 public abstract class AbstractDBCPConnectionPool extends 
AbstractControllerService implements DBCPService, VerifiableControllerService {
-/** Property Name Prefix for Sensitive Dynamic Properties */
-protected static final String SENSITIVE_PROPERTY_PREFIX = "SENSITIVE.";
-
-/**
- * Copied from {@link GenericObjectPoolConfig#DEFAULT_MIN_IDLE} in 
Commons-DBCP 2.7.0
- */
-private static final String DEFAULT_MIN_IDLE = "0";
-/**
- * Copied from {@link GenericObjectPoolConfig#DEFAULT_MAX_IDLE} in 
Commons-DBCP 2.7.0
- */
-private static final String DEFAULT_MAX_IDLE = "8";
-/**
- * Copied from private variable {@link 
BasicDataSource#maxConnLifetimeMillis} in Commons-DBCP 2.7.0
- */
-private static final String DEFAULT_MAX_CONN_LIFETIME = &quo

[nifi] branch support/nifi-1.x updated: NIFI-11157 - Doc update for scheduling strategy for MergeContent/Record

2023-03-06 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 1b43be6f3c NIFI-11157 - Doc update for scheduling strategy for 
MergeContent/Record
1b43be6f3c is described below

commit 1b43be6f3c1745f831630685babca6df98c19819
Author: Pierre Villard 
AuthorDate: Wed Mar 1 20:09:04 2023 +0100

NIFI-11157 - Doc update for scheduling strategy for MergeContent/Record

This closes #6999.

Signed-off-by: Peter Turcsanyi 
---
 .../main/java/org/apache/nifi/processors/standard/MergeContent.java| 3 ++-
 .../src/main/java/org/apache/nifi/processors/standard/MergeRecord.java | 3 ++-
 2 files changed, 4 insertions(+), 2 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
index d323bce37e..9dc33c3698 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
@@ -107,7 +107,8 @@ import java.util.zip.ZipOutputStream;
 @Tags({"merge", "content", "correlation", "tar", "zip", "stream", 
"concatenation", "archive", "flowfile-stream", "flowfile-stream-v3"})
 @CapabilityDescription("Merges a Group of FlowFiles together based on a 
user-defined strategy and packages them into a single FlowFile. "
 + "It is recommended that the Processor be configured with only a 
single incoming connection, as Group of FlowFiles will not be "
-+ "created from FlowFiles in different connections. This processor 
updates the mime.type attribute as appropriate.")
++ "created from FlowFiles in different connections. This processor 
updates the mime.type attribute as appropriate. "
++ "NOTE: this processor should NOT be configured with Cron Driven for 
the Scheduling Strategy.")
 @ReadsAttributes({
 @ReadsAttribute(attribute = "fragment.identifier", description = 
"Applicable only if the  property is set to Defragment. "
 + "All FlowFiles with the same value for this attribute will be 
bundled together."),
diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeRecord.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeRecord.java
index 187bcec634..bcba47bc9b 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeRecord.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeRecord.java
@@ -76,7 +76,8 @@ import java.util.stream.Collectors;
 + "This Processor works by creating 'bins' and then adding FlowFiles to 
these bins until they are full. Once a bin is full, all of the FlowFiles will 
be combined into "
 + "a single output FlowFile, and that FlowFile will be routed to the 
'merged' Relationship. A bin will consist of potentially many 'like FlowFiles'. 
In order for two "
 + "FlowFiles to be considered 'like FlowFiles', they must have the same 
Schema (as identified by the Record Reader) and, if the  property "
-+ "is set, the same value for the specified attribute. See Processor Usage 
and Additional Details for more information.")
++ "is set, the same value for the specified attribute. See Processor Usage 
and Additional Details for more information. NOTE: this processor should NOT be 
configured "
++ "with Cron Driven for the Scheduling Strategy.")
 @ReadsAttributes({
 @ReadsAttribute(attribute = "fragment.identifier", description = 
"Applicable only if the  property is set to Defragment. "
 + "All FlowFiles with the same value for this attribute will be 
bundled together."),



[nifi] branch main updated: NIFI-11157 - Doc update for scheduling strategy for MergeContent/Record

2023-03-06 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 169b53feaa NIFI-11157 - Doc update for scheduling strategy for 
MergeContent/Record
169b53feaa is described below

commit 169b53feaa50a1072ba793c215e0afb7330f0d6a
Author: Pierre Villard 
AuthorDate: Wed Mar 1 20:09:04 2023 +0100

NIFI-11157 - Doc update for scheduling strategy for MergeContent/Record

This closes #6999.

Signed-off-by: Peter Turcsanyi 
---
 .../main/java/org/apache/nifi/processors/standard/MergeContent.java| 3 ++-
 .../src/main/java/org/apache/nifi/processors/standard/MergeRecord.java | 3 ++-
 2 files changed, 4 insertions(+), 2 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
index d323bce37e..9dc33c3698 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
@@ -107,7 +107,8 @@ import java.util.zip.ZipOutputStream;
 @Tags({"merge", "content", "correlation", "tar", "zip", "stream", 
"concatenation", "archive", "flowfile-stream", "flowfile-stream-v3"})
 @CapabilityDescription("Merges a Group of FlowFiles together based on a 
user-defined strategy and packages them into a single FlowFile. "
 + "It is recommended that the Processor be configured with only a 
single incoming connection, as Group of FlowFiles will not be "
-+ "created from FlowFiles in different connections. This processor 
updates the mime.type attribute as appropriate.")
++ "created from FlowFiles in different connections. This processor 
updates the mime.type attribute as appropriate. "
++ "NOTE: this processor should NOT be configured with Cron Driven for 
the Scheduling Strategy.")
 @ReadsAttributes({
 @ReadsAttribute(attribute = "fragment.identifier", description = 
"Applicable only if the  property is set to Defragment. "
 + "All FlowFiles with the same value for this attribute will be 
bundled together."),
diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeRecord.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeRecord.java
index 187bcec634..bcba47bc9b 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeRecord.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeRecord.java
@@ -76,7 +76,8 @@ import java.util.stream.Collectors;
 + "This Processor works by creating 'bins' and then adding FlowFiles to 
these bins until they are full. Once a bin is full, all of the FlowFiles will 
be combined into "
 + "a single output FlowFile, and that FlowFile will be routed to the 
'merged' Relationship. A bin will consist of potentially many 'like FlowFiles'. 
In order for two "
 + "FlowFiles to be considered 'like FlowFiles', they must have the same 
Schema (as identified by the Record Reader) and, if the  property "
-+ "is set, the same value for the specified attribute. See Processor Usage 
and Additional Details for more information.")
++ "is set, the same value for the specified attribute. See Processor Usage 
and Additional Details for more information. NOTE: this processor should NOT be 
configured "
++ "with Cron Driven for the Scheduling Strategy.")
 @ReadsAttributes({
 @ReadsAttribute(attribute = "fragment.identifier", description = 
"Applicable only if the  property is set to Defragment. "
 + "All FlowFiles with the same value for this attribute will be 
bundled together."),



[nifi] branch support/nifi-1.x updated: NIFI-11150 Add Service Account JSON credentials support to Google Pub/Sub Lite procesors

2023-03-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new f7d3b75bcd NIFI-11150 Add Service Account JSON credentials support to 
Google Pub/Sub Lite procesors
f7d3b75bcd is described below

commit f7d3b75bcd59d7bf8652d14581b19414f0ac23d9
Author: Robert Kalmar 
AuthorDate: Wed Feb 8 12:38:25 2023 +0100

NIFI-11150 Add Service Account JSON credentials support to Google Pub/Sub 
Lite procesors

This closes #6933.

Signed-off-by: Peter Turcsanyi 
---
 .../processors/gcp/pubsub/lite/ConsumeGCPubSubLite.java| 14 +-
 .../processors/gcp/pubsub/lite/PublishGCPubSubLite.java| 13 +
 .../org/apache/nifi/processors/gcp/util/GoogleUtils.java   |  3 +++
 3 files changed, 21 insertions(+), 9 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/ConsumeGCPubSubLite.java
 
b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/ConsumeGCPubSubLite.java
index ac40fb79df..d0797fee42 100644
--- 
a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/ConsumeGCPubSubLite.java
+++ 
b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/ConsumeGCPubSubLite.java
@@ -18,6 +18,7 @@ package org.apache.nifi.processors.gcp.pubsub.lite;
 
 import com.google.api.gax.core.FixedCredentialsProvider;
 import com.google.api.gax.rpc.ApiException;
+import com.google.auth.oauth2.GoogleCredentials;
 import com.google.cloud.pubsub.v1.AckReplyConsumer;
 import com.google.cloud.pubsub.v1.MessageReceiver;
 import com.google.cloud.pubsublite.SubscriptionPath;
@@ -50,7 +51,6 @@ import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.gcp.pubsub.AbstractGCPubSubProcessor;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -72,13 +72,12 @@ import static 
org.apache.nifi.processors.gcp.pubsub.PubSubAttributes.MSG_PUBLISH
 import static 
org.apache.nifi.processors.gcp.pubsub.PubSubAttributes.MSG_PUBLISH_TIME_DESCRIPTION;
 import static 
org.apache.nifi.processors.gcp.pubsub.PubSubAttributes.ORDERING_KEY_ATTRIBUTE;
 import static 
org.apache.nifi.processors.gcp.pubsub.PubSubAttributes.ORDERING_KEY_DESCRIPTION;
+import static 
org.apache.nifi.processors.gcp.util.GoogleUtils.GOOGLE_CLOUD_PLATFORM_SCOPE;
 
 @SeeAlso({PublishGCPubSubLite.class})
 @InputRequirement(InputRequirement.Requirement.INPUT_FORBIDDEN)
 @Tags({"google", "google-cloud", "gcp", "message", "pubsub", "consume", 
"lite"})
-@CapabilityDescription("Consumes message from the configured Google Cloud 
PubSub Lite subscription. In its current state, this processor "
-+ "will only work if running on a Google Cloud Compute Engine instance 
and if using the GCP Credentials Controller Service with "
-+ "'Use Application Default Credentials' or 'Use Compute Engine 
Credentials'.")
+@CapabilityDescription("Consumes message from the configured Google Cloud 
PubSub Lite subscription.")
 @WritesAttributes({
 @WritesAttribute(attribute = MESSAGE_ID_ATTRIBUTE, description = 
MESSAGE_ID_DESCRIPTION),
 @WritesAttribute(attribute = ORDERING_KEY_ATTRIBUTE, description = 
ORDERING_KEY_DESCRIPTION),
@@ -219,7 +218,7 @@ public class ConsumeGCPubSubLite extends 
AbstractGCPubSubProcessor implements Ve
 message.getConsumer().ack();
 }
 
-private Subscriber getSubscriber(final ProcessContext context) throws 
IOException {
+private Subscriber getSubscriber(final ProcessContext context) {
 
 final SubscriptionPath subscriptionPath = 
SubscriptionPath.parse(context.getProperty(SUBSCRIPTION).evaluateAttributeExpressions().getValue());
 
@@ -286,4 +285,9 @@ public class ConsumeGCPubSubLite extends 
AbstractGCPubSubProcessor implements Ve
 }
 return verificationResults;
 }
+
+@Override
+protected GoogleCredentials getGoogleCredentials(final ProcessContext 
context) {
+return 
super.getGoogleCredentials(context).createScoped(GOOGLE_CLOUD_PLATFORM_SCOPE);
+}
 }
diff --git 
a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/PublishGCPubSubLite.java
 
b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/PublishGCPubSubLite.java
index 55edee02f7..603cfb954a 100644
--- 
a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/

[nifi] 02/06: NIFI-11146 Proxy usage in BoxFile processors

2023-03-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git

commit 20914c1af913919d89a07fe03a11816b38c70f41
Author: krisztina-zsihovszki 
AuthorDate: Mon Feb 6 18:17:33 2023 +0100

NIFI-11146 Proxy usage in BoxFile processors

This closes #6932.

Signed-off-by: Peter Turcsanyi 
---
 .../nifi-box-bundle/nifi-box-services-api/pom.xml  |  2 +-
 .../nifi-box-bundle/nifi-box-services/pom.xml  |  4 ++
 .../JsonConfigBasedBoxClientService.java   | 44 ++
 3 files changed, 34 insertions(+), 16 deletions(-)

diff --git a/nifi-nar-bundles/nifi-box-bundle/nifi-box-services-api/pom.xml 
b/nifi-nar-bundles/nifi-box-bundle/nifi-box-services-api/pom.xml
index ec329c64b2..67f332e44f 100644
--- a/nifi-nar-bundles/nifi-box-bundle/nifi-box-services-api/pom.xml
+++ b/nifi-nar-bundles/nifi-box-bundle/nifi-box-services-api/pom.xml
@@ -29,7 +29,7 @@
 
 com.box
 box-java-sdk
-3.8.2
+4.0.0
 
 
 org.bouncycastle
diff --git a/nifi-nar-bundles/nifi-box-bundle/nifi-box-services/pom.xml 
b/nifi-nar-bundles/nifi-box-bundle/nifi-box-services/pom.xml
index 90cadf875a..0953480005 100644
--- a/nifi-nar-bundles/nifi-box-bundle/nifi-box-services/pom.xml
+++ b/nifi-nar-bundles/nifi-box-bundle/nifi-box-services/pom.xml
@@ -42,6 +42,10 @@
 nifi-json-utils
 1.21.0-SNAPSHOT
 
+
+org.apache.nifi
+nifi-proxy-configuration-api
+
 
 org.apache.nifi
 nifi-mock
diff --git 
a/nifi-nar-bundles/nifi-box-bundle/nifi-box-services/src/main/java/org/apache/nifi/box/controllerservices/JsonConfigBasedBoxClientService.java
 
b/nifi-nar-bundles/nifi-box-bundle/nifi-box-services/src/main/java/org/apache/nifi/box/controllerservices/JsonConfigBasedBoxClientService.java
index f384b015c3..26662bf28c 100644
--- 
a/nifi-nar-bundles/nifi-box-bundle/nifi-box-services/src/main/java/org/apache/nifi/box/controllerservices/JsonConfigBasedBoxClientService.java
+++ 
b/nifi-nar-bundles/nifi-box-bundle/nifi-box-services/src/main/java/org/apache/nifi/box/controllerservices/JsonConfigBasedBoxClientService.java
@@ -19,6 +19,16 @@ package org.apache.nifi.box.controllerservices;
 import com.box.sdk.BoxAPIConnection;
 import com.box.sdk.BoxConfig;
 import com.box.sdk.BoxDeveloperEditionAPIConnection;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.net.Proxy;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnEnabled;
@@ -33,16 +43,9 @@ import org.apache.nifi.expression.ExpressionLanguageScope;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.JsonValidator;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.proxy.ProxyConfiguration;
+import org.apache.nifi.proxy.ProxySpec;
 
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Reader;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
 
 @CapabilityDescription("Provides Box client objects through which Box API 
calls can be used.")
 @Tags({"box", "client", "provider"})
@@ -75,10 +78,13 @@ public class JsonConfigBasedBoxClientService extends 
AbstractControllerService i
 .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
 .build();
 
+private static final ProxySpec[] PROXY_SPECS = {ProxySpec.HTTP, 
ProxySpec.HTTP_AUTH};
+
 private static final List PROPERTIES = 
Collections.unmodifiableList(Arrays.asList(
 ACCOUNT_ID,
 APP_CONFIG_FILE,
-APP_CONFIG_JSON
+APP_CONFIG_JSON,
+ProxyConfiguration.createProxyConfigPropertyDescriptor(false, 
PROXY_SPECS)
 ));
 
 private volatile BoxAPIConnection boxAPIConnection;
@@ -128,15 +134,16 @@ public class JsonConfigBasedBoxClientService extends 
AbstractControllerService i
 }
 
 private BoxAPIConnection createBoxApiConnection(ConfigurationContext 
context) {
-BoxAPIConnection api;
+final BoxAPIConnection api;
 
-String accountId = 
context.getProperty(ACCOUNT_ID).evaluateAttributeExpressions().getValue();
+final String accountId = 
context.getProperty(ACCOUNT_ID).evaluateAttributeExpressions().getValue();
+final ProxyConfiguration proxyConfiguration = 
ProxyConfiguration.getConf

[nifi] 05/06: NIFI-11045: Sensitive dynamic property support for parameterized queries in ExecuteSQL and ExecuteSQLRecord

2023-03-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git

commit 7475232bb09c15bad67bf1fe863d9afd7f503569
Author: Lehel 
AuthorDate: Tue Jan 17 16:15:15 2023 +0100

NIFI-11045: Sensitive dynamic property support for parameterized queries in 
ExecuteSQL and ExecuteSQLRecord

This closes #6853.

Signed-off-by: Peter Turcsanyi 
---
 .../java/org/apache/nifi/util/db/JdbcCommon.java   | 78 ++
 .../apache/nifi/util/db/SensitiveValueWrapper.java | 36 ++
 .../org/apache/nifi/util/db/TestJdbcCommon.java| 34 +-
 .../nifi-standard-processors/pom.xml   |  9 +--
 .../processors/standard/AbstractExecuteSQL.java| 32 ++---
 .../nifi/processors/standard/ExecuteSQL.java   | 61 ++---
 .../nifi/processors/standard/ExecuteSQLRecord.java | 47 -
 7 files changed, 243 insertions(+), 54 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/JdbcCommon.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/JdbcCommon.java
index ad7471a54c..dbd18e931d 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/JdbcCommon.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/JdbcCommon.java
@@ -68,9 +68,11 @@ import java.time.ZoneId;
 import java.time.format.DateTimeFormatter;
 import java.util.Date;
 import java.util.Map;
+import java.util.Optional;
 import java.util.function.Function;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
+import java.util.stream.Collectors;
 
 import static java.sql.Types.ARRAY;
 import static java.sql.Types.BIGINT;
@@ -120,6 +122,7 @@ public class JdbcCommon {
 public static final Pattern NUMBER_PATTERN = Pattern.compile("-?\\d+");
 
 public static final String MIME_TYPE_AVRO_BINARY = 
"application/avro-binary";
+public static final String MASKED_LOG_VALUE = "MASKED VALUE";
 
 public static long convertToAvroStream(final ResultSet rs, final 
OutputStream outStream, boolean convertNames) throws SQLException, IOException {
 return convertToAvroStream(rs, outStream, null, null, convertNames);
@@ -681,32 +684,55 @@ public class JdbcCommon {
  * @throws SQLException if the PreparedStatement throws a SQLException 
when the appropriate setter is called
  */
 public static void setParameters(final PreparedStatement stmt, final 
Map attributes) throws SQLException {
-for (final Map.Entry entry : attributes.entrySet()) {
-final String key = entry.getKey();
-final Matcher matcher = SQL_TYPE_ATTRIBUTE_PATTERN.matcher(key);
-if (matcher.matches()) {
-final int parameterIndex = Integer.parseInt(matcher.group(1));
-
-final boolean isNumeric = 
NUMBER_PATTERN.matcher(entry.getValue()).matches();
-if (!isNumeric) {
-throw new SQLDataException("Value of the " + key + " 
attribute is '" + entry.getValue() + "', which is not a valid JDBC numeral 
type");
-}
+final Map sensitiveValueWrapperMap = 
attributes.entrySet()
+.stream()
+.collect(Collectors.toMap(Map.Entry::getKey, e -> new 
SensitiveValueWrapper(e.getValue(), false)));
+setSensitiveParameters(stmt, sensitiveValueWrapperMap);
+}
+
+/**
+ * Sets all of the appropriate parameters on the given PreparedStatement, 
based on the given FlowFile attributes
+ * and masks sensitive values.
+ *
+ * @param stmt the statement to set the parameters on
+ * @param attributes the attributes from which to derive parameter 
indices, values, and types
+ * @throws SQLException if the PreparedStatement throws a SQLException 
when the appropriate setter is called
+ */
+public static void setSensitiveParameters(final PreparedStatement stmt, 
final Map attributes) throws SQLException {
+for (final Map.Entry entry : 
attributes.entrySet()) {
+final String flowFileAttributeKey = entry.getKey();
+setParameterAtIndex(stmt, attributes, flowFileAttributeKey);
+}
+}
 
-final int jdbcType = Integer.parseInt(entry.getValue());
-final String valueAttrName = "sql.args." + parameterIndex + 
".value";
-final String parameterValue = attributes.get(valueAttrName);
-final String formatAttrName = "sql.args." + parameterIndex + 
".format";
-final String parameterFormat = 
attributes.containsKey(formatAttrName)? attributes.get(formatAttrName):""

[nifi] branch support/nifi-1.x updated (88d2c03b37 -> 1273152d78)

2023-03-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a change to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


from 88d2c03b37 NIFI-11245 Corrected reducing Maximum Thread Count while 
running (#7005)
 new 85b36cac60 NIFI-11124: Add hadoop.file.url attribute to HDFS processors
 new 20914c1af9 NIFI-11146 Proxy usage in BoxFile processors
 new b9a0216ee6 NIFI-11173 Add default values to DeleteAzureBlobStorage_v12
 new 9438eb8b86 NIFI-11190: Added provenance events to GetHubSpot and 
GetShopify
 new 7475232bb0 NIFI-11045: Sensitive dynamic property support for 
parameterized queries in ExecuteSQL and ExecuteSQLRecord
 new 1273152d78 NIFI-11158 PutSalesforceObject processor improvements

The 6 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../nifi/util/StandardProcessorTestRunner.java | 13 +++-
 .../main/java/org/apache/nifi/util/TestRunner.java |  7 ++
 .../azure/storage/DeleteAzureBlobStorage_v12.java  | 15 -
 .../azure/storage/FetchAzureBlobStorage_v12.java   |  4 +-
 .../azure/storage/ITListAzureBlobStorage_v12.java  |  3 +-
 .../nifi-box-bundle/nifi-box-services-api/pom.xml  |  2 +-
 .../nifi-box-bundle/nifi-box-services/pom.xml  |  4 ++
 .../JsonConfigBasedBoxClientService.java   | 44 +++-
 .../java/org/apache/nifi/util/db/JdbcCommon.java   | 78 ++
 .../apache/nifi/util/db/SensitiveValueWrapper.java | 71 ++--
 .../org/apache/nifi/util/db/TestJdbcCommon.java| 34 +-
 .../processors/hadoop/AbstractHadoopProcessor.java |  1 +
 .../processors/hadoop/AbstractFetchHDFSRecord.java | 31 -
 .../processors/hadoop/AbstractPutHDFSRecord.java   |  1 +
 .../apache/nifi/processors/hadoop/DeleteHDFS.java  |  2 +
 .../apache/nifi/processors/hadoop/FetchHDFS.java   |  9 ++-
 .../apache/nifi/processors/hadoop/MoveHDFS.java|  6 +-
 .../org/apache/nifi/processors/hadoop/PutHDFS.java |  2 +
 .../apache/nifi/processors/hadoop/PutHDFSTest.java |  6 ++
 .../nifi/processors/hadoop/TestDeleteHDFS.java |  5 ++
 .../nifi/processors/hadoop/TestFetchHDFS.java  |  4 ++
 .../org/apache/nifi/processors/orc/PutORC.java |  1 +
 .../org/apache/nifi/processors/orc/PutORCTest.java |  3 +
 .../apache/nifi/processors/hubspot/GetHubSpot.java |  1 +
 .../nifi/processors/hubspot/GetHubSpotTest.java|  9 +++
 .../nifi/processors/parquet/FetchParquet.java  |  3 +-
 .../apache/nifi/processors/parquet/PutParquet.java |  1 +
 .../nifi/processors/parquet/FetchParquetTest.java  |  2 +
 .../nifi/processors/parquet/PutParquetTest.java| 36 +-
 .../processors/salesforce/PutSalesforceObject.java | 34 +++---
 .../salesforce/QuerySalesforceObject.java  |  8 ++-
 .../processors/salesforce/util/RecordExtender.java |  4 +-
 .../salesforce/util/SalesforceRestService.java | 15 -
 .../salesforce/PutSalesforceObjectIT.java  | 67 ---
 .../salesforce/QuerySalesforceObjectIT.java|  2 +
 .../apache/nifi/processors/shopify/GetShopify.java |  3 +
 .../shopify/rest/ShopifyRestService.java   |  4 ++
 .../{GetShopifyIT.java => GetShopifyTest.java} | 12 ++--
 .../nifi-standard-processors/pom.xml   |  9 +--
 .../processors/standard/AbstractExecuteSQL.java| 32 ++---
 .../nifi/processors/standard/ExecuteSQL.java   | 61 ++---
 .../nifi/processors/standard/ExecuteSQLRecord.java | 47 -
 42 files changed, 523 insertions(+), 173 deletions(-)
 copy 
nifi-registry/nifi-registry-core/nifi-registry-framework/src/main/java/org/apache/nifi/registry/security/ldap/ReferralStrategy.java
 => 
nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/SensitiveValueWrapper.java
 (74%)
 rename 
nifi-nar-bundles/nifi-shopify-bundle/nifi-shopify-processors/src/test/java/org/apache/nifi/processors/shopify/{GetShopifyIT.java
 => GetShopifyTest.java} (92%)



[nifi] 03/06: NIFI-11173 Add default values to DeleteAzureBlobStorage_v12

2023-03-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git

commit b9a0216ee6bac1531e9cc19e15003bcc4ece6faf
Author: Nandor Soma Abonyi 
AuthorDate: Mon Feb 13 17:29:52 2023 +0100

NIFI-11173 Add default values to DeleteAzureBlobStorage_v12

This closes #6945.

Signed-off-by: Peter Turcsanyi 
---
 .../azure/storage/DeleteAzureBlobStorage_v12.java | 15 ++-
 .../azure/storage/FetchAzureBlobStorage_v12.java  |  4 ++--
 .../azure/storage/ITListAzureBlobStorage_v12.java |  3 ++-
 3 files changed, 18 insertions(+), 4 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureBlobStorage_v12.java
 
b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureBlobStorage_v12.java
index 1ba5f7c7d7..b49cbd0247 100644
--- 
a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureBlobStorage_v12.java
+++ 
b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureBlobStorage_v12.java
@@ -40,6 +40,9 @@ import java.util.Collections;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
 
+import static 
org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_NAME_BLOBNAME;
+import static 
org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_NAME_CONTAINER;
+
 @Tags({"azure", "microsoft", "cloud", "storage", "blob"})
 @SeeAlso({ListAzureBlobStorage_v12.class, FetchAzureBlobStorage_v12.class, 
PutAzureBlobStorage_v12.class})
 @CapabilityDescription("Deletes the specified blob from Azure Blob Storage. 
The processor uses Azure Blob Storage client library v12.")
@@ -52,6 +55,16 @@ public class DeleteAzureBlobStorage_v12 extends 
AbstractAzureBlobProcessor_v12 {
 
 public static final AllowableValue DELETE_SNAPSHOTS_ONLY = new 
AllowableValue(DeleteSnapshotsOptionType.ONLY.name(), "Delete Snapshots Only", 
"Delete only the blob's snapshots.");
 
+public static final PropertyDescriptor CONTAINER = new 
PropertyDescriptor.Builder()
+.fromPropertyDescriptor(AzureStorageUtils.CONTAINER)
+.defaultValue(String.format("${%s}", ATTR_NAME_CONTAINER))
+.build();
+
+public static final PropertyDescriptor BLOB_NAME = new 
PropertyDescriptor.Builder()
+.fromPropertyDescriptor(AbstractAzureBlobProcessor_v12.BLOB_NAME)
+.defaultValue(String.format("${%s}", ATTR_NAME_BLOBNAME))
+.build();
+
 public static final PropertyDescriptor DELETE_SNAPSHOTS_OPTION = new 
PropertyDescriptor.Builder()
 .name("delete-snapshots-option")
 .displayName("Delete Snapshots Option")
@@ -64,7 +77,7 @@ public class DeleteAzureBlobStorage_v12 extends 
AbstractAzureBlobProcessor_v12 {
 
 private static final List PROPERTIES = 
Collections.unmodifiableList(Arrays.asList(
 STORAGE_CREDENTIALS_SERVICE,
-AzureStorageUtils.CONTAINER,
+CONTAINER,
 BLOB_NAME,
 DELETE_SNAPSHOTS_OPTION,
 AzureStorageUtils.PROXY_CONFIGURATION_SERVICE
diff --git 
a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureBlobStorage_v12.java
 
b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureBlobStorage_v12.java
index ac6dcae8b3..0a12de3051 100644
--- 
a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureBlobStorage_v12.java
+++ 
b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureBlobStorage_v12.java
@@ -80,12 +80,12 @@ public class FetchAzureBlobStorage_v12 extends 
AbstractAzureBlobProcessor_v12 {
 
 public static final PropertyDescriptor CONTAINER = new 
PropertyDescriptor.Builder()
 .fromPropertyDescriptor(AzureStorageUtils.CONTAINER)
-.defaultValue("${azure.container}")
+.defaultValue(String.format("${%s}", ATTR_NAME_CONTAINER))
 .build();
 
 public static final PropertyDescriptor BLOB_NAME = new 
PropertyDescriptor.Builder()
 .fromPropertyDescriptor(AbstractAzureBlobProcessor_v12.BLOB_NAME)
-.defaultValue("${azure.blobname}")
+.defaultValue(String.format("${%s}", ATTR_NAME_BLOBNAME))
 .build();
 
 public static final PropertyDescriptor RANGE_START =

[nifi] 06/06: NIFI-11158 PutSalesforceObject processor improvements

2023-03-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git

commit 1273152d781b754d0ba0a57506fa44783bb65d1d
Author: krisztina-zsihovszki 
AuthorDate: Wed Feb 15 12:37:31 2023 +0100

NIFI-11158 PutSalesforceObject processor improvements

This closes #6959.

Reviewed-by: Lehel 
Reviewed-by: Mark Bathori 

Signed-off-by: Peter Turcsanyi 
---
 .../nifi/util/StandardProcessorTestRunner.java | 13 -
 .../main/java/org/apache/nifi/util/TestRunner.java |  7 +++
 .../processors/salesforce/PutSalesforceObject.java | 34 ---
 .../salesforce/QuerySalesforceObject.java  |  8 ++-
 .../processors/salesforce/util/RecordExtender.java |  4 +-
 .../salesforce/util/SalesforceRestService.java | 15 -
 .../salesforce/PutSalesforceObjectIT.java  | 67 +++---
 .../salesforce/QuerySalesforceObjectIT.java|  2 +
 8 files changed, 126 insertions(+), 24 deletions(-)

diff --git 
a/nifi-mock/src/main/java/org/apache/nifi/util/StandardProcessorTestRunner.java 
b/nifi-mock/src/main/java/org/apache/nifi/util/StandardProcessorTestRunner.java
index b2b0557dad..99bec62b9e 100644
--- 
a/nifi-mock/src/main/java/org/apache/nifi/util/StandardProcessorTestRunner.java
+++ 
b/nifi-mock/src/main/java/org/apache/nifi/util/StandardProcessorTestRunner.java
@@ -40,6 +40,7 @@ import org.apache.nifi.processor.ProcessSessionFactory;
 import org.apache.nifi.processor.Processor;
 import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.provenance.ProvenanceEventRecord;
+import org.apache.nifi.provenance.ProvenanceEventType;
 import org.apache.nifi.registry.VariableDescriptor;
 import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.state.MockStateManager;
@@ -72,6 +73,7 @@ import java.util.function.Predicate;
 import java.util.stream.Collectors;
 
 import static java.util.Objects.requireNonNull;
+import static java.util.stream.Collectors.toSet;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class StandardProcessorTestRunner implements TestRunner {
@@ -367,7 +369,7 @@ public class StandardProcessorTestRunner implements 
TestRunner {
 .collect(Collectors.toMap(Map.Entry::getKey, 
Map.Entry::getValue))
 
 )
-.collect(Collectors.toSet());
+.collect(toSet());
 
 assertEquals(expectedAttributes, actualAttributes);
 }
@@ -1056,4 +1058,13 @@ public class StandardProcessorTestRunner implements 
TestRunner {
 public void setRunSchedule(long runSchedule) {
 this.runSchedule = runSchedule;
 }
+
+@Override
+public void assertProvenanceEvent(final ProvenanceEventType eventType) {
+Set expectedEventTypes = 
Collections.singleton(eventType);
+Set actualEventTypes = 
getProvenanceEvents().stream()
+.map(ProvenanceEventRecord::getEventType)
+.collect(toSet());
+assertEquals(expectedEventTypes, actualEventTypes);
+}
 }
diff --git a/nifi-mock/src/main/java/org/apache/nifi/util/TestRunner.java 
b/nifi-mock/src/main/java/org/apache/nifi/util/TestRunner.java
index 7b01ed9709..dbe31e73c8 100644
--- a/nifi-mock/src/main/java/org/apache/nifi/util/TestRunner.java
+++ b/nifi-mock/src/main/java/org/apache/nifi/util/TestRunner.java
@@ -28,6 +28,7 @@ import org.apache.nifi.processor.ProcessSessionFactory;
 import org.apache.nifi.processor.Processor;
 import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.provenance.ProvenanceEventRecord;
+import org.apache.nifi.provenance.ProvenanceEventType;
 import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.state.MockStateManager;
 
@@ -1062,4 +1063,10 @@ public interface TestRunner {
  */
  void setRunSchedule(long runSchedule);
 
+/**
+ * Assert that provenance event was created with the specified event type.
+ *
+ * @param eventType Provenance event type
+ */
+ void assertProvenanceEvent(ProvenanceEventType eventType);
 }
diff --git 
a/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/PutSalesforceObject.java
 
b/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/PutSalesforceObject.java
index 00d0c2b783..83605800f9 100644
--- 
a/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/PutSalesforceObject.java
+++ 
b/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/PutSalesforceObject.java
@@ -20,7 +20,9 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
 import org.apache.nifi.NullSuppression;
 import org.apache.nifi.annotation.behavior.InputRequirement;
 import

[nifi] 01/06: NIFI-11124: Add hadoop.file.url attribute to HDFS processors

2023-03-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git

commit 85b36cac609f752040bbe8b3a5a676a22988d8d6
Author: Mark Bathori 
AuthorDate: Wed Feb 1 20:21:51 2023 +0100

NIFI-11124: Add hadoop.file.url attribute to HDFS processors

This closes #6916.

Signed-off-by: Peter Turcsanyi 
---
 .../processors/hadoop/AbstractHadoopProcessor.java |  1 +
 .../processors/hadoop/AbstractFetchHDFSRecord.java | 31 ++-
 .../processors/hadoop/AbstractPutHDFSRecord.java   |  1 +
 .../apache/nifi/processors/hadoop/DeleteHDFS.java  |  2 ++
 .../apache/nifi/processors/hadoop/FetchHDFS.java   |  9 --
 .../apache/nifi/processors/hadoop/MoveHDFS.java|  6 +++-
 .../org/apache/nifi/processors/hadoop/PutHDFS.java |  2 ++
 .../apache/nifi/processors/hadoop/PutHDFSTest.java |  6 
 .../nifi/processors/hadoop/TestDeleteHDFS.java |  5 +++
 .../nifi/processors/hadoop/TestFetchHDFS.java  |  4 +++
 .../org/apache/nifi/processors/orc/PutORC.java |  1 +
 .../org/apache/nifi/processors/orc/PutORCTest.java |  3 ++
 .../nifi/processors/parquet/FetchParquet.java  |  3 +-
 .../apache/nifi/processors/parquet/PutParquet.java |  1 +
 .../nifi/processors/parquet/FetchParquetTest.java  |  2 ++
 .../nifi/processors/parquet/PutParquetTest.java| 36 --
 16 files changed, 77 insertions(+), 36 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
index f7c082bf12..a967b9037a 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
@@ -158,6 +158,7 @@ public abstract class AbstractHadoopProcessor extends 
AbstractProcessor implemen
 
 
 public static final String ABSOLUTE_HDFS_PATH_ATTRIBUTE = 
"absolute.hdfs.path";
+public static final String HADOOP_FILE_URL_ATTRIBUTE = "hadoop.file.url";
 
 protected static final String TARGET_HDFS_DIR_CREATED_ATTRIBUTE = 
"target.dir.created";
 
diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-hadoop-record-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractFetchHDFSRecord.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-hadoop-record-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractFetchHDFSRecord.java
index 33e762fa31..03770ac56f 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-hadoop-record-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractFetchHDFSRecord.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-hadoop-record-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractFetchHDFSRecord.java
@@ -16,21 +16,6 @@
  */
 package org.apache.nifi.processors.hadoop;
 
-import java.io.BufferedOutputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicReference;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -57,6 +42,21 @@ import org.apache.nifi.serialization.record.Record;
 import org.apache.nifi.serialization.record.RecordSchema;
 import org.apache.nifi.util.StopWatch;
 
+import java.io.BufferedOutputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.security.PrivilegedAction;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+
 /**
  * Base processor for reading a data from HDFS that can be fetched into 
records.
  */
@@ -234,6 +234,7 @@ public abstract class AbstractFetchHDFSRecord extends 
AbstractHadoopProcessor {
 
 
 final Path qualifiedPath = 
path.makeQualified(fileSystem.getUri(), fileSystem.getWorkingDirectory());
+successFlowFile = session.putAttribute(successFlowFile, 
HADOOP_FILE_URL_ATTRIBUTE, qualifiedPath.toString());
 getLogger().info("Successfully received content from {} for 

[nifi] 04/06: NIFI-11190: Added provenance events to GetHubSpot and GetShopify

2023-03-03 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git

commit 9438eb8b86bde3506b92468f6bfa33d988531a93
Author: Lehel 
AuthorDate: Thu Feb 16 17:28:19 2023 +0100

NIFI-11190: Added provenance events to GetHubSpot and GetShopify

This closes #6965.

Signed-off-by: Peter Turcsanyi 
---
 .../java/org/apache/nifi/processors/hubspot/GetHubSpot.java  |  1 +
 .../org/apache/nifi/processors/hubspot/GetHubSpotTest.java   |  9 +
 .../java/org/apache/nifi/processors/shopify/GetShopify.java  |  3 +++
 .../nifi/processors/shopify/rest/ShopifyRestService.java |  4 
 .../shopify/{GetShopifyIT.java => GetShopifyTest.java}   | 12 
 5 files changed, 25 insertions(+), 4 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java
 
b/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java
index 53567ff212..d624b58afe 100644
--- 
a/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java
+++ 
b/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java
@@ -239,6 +239,7 @@ public class GetHubSpot extends AbstractProcessor {
 if (total.get() > 0) {
 flowFile = session.putAttribute(flowFile, 
CoreAttributes.MIME_TYPE.key(), "application/json");
 session.transfer(flowFile, REL_SUCCESS);
+session.getProvenanceReporter().receive(flowFile, 
uri.toString());
 } else {
 getLogger().debug("Empty response when requested HubSpot 
endpoint: [{}]", endpoint);
 context.yield();
diff --git 
a/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/test/java/org/apache/nifi/processors/hubspot/GetHubSpotTest.java
 
b/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/test/java/org/apache/nifi/processors/hubspot/GetHubSpotTest.java
index fd8ed18fed..55f371acf6 100644
--- 
a/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/test/java/org/apache/nifi/processors/hubspot/GetHubSpotTest.java
+++ 
b/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/test/java/org/apache/nifi/processors/hubspot/GetHubSpotTest.java
@@ -45,6 +45,7 @@ import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.state.Scope;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
 import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.provenance.ProvenanceEventRecord;
 import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.util.MockFlowFile;
 import org.apache.nifi.util.TestRunner;
@@ -109,6 +110,8 @@ class GetHubSpotTest {
 
 flowFile.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), 
"application/json");
 assertEquals(expectedJsonNode, actualJsonNode);
+List provenanceEvents = 
runner.getProvenanceEvents();
+assertEquals(baseUrl.toString(), 
provenanceEvents.get(0).getTransitUri());
 }
 
 @Test
@@ -122,6 +125,7 @@ class GetHubSpotTest {
 final List flowFiles = 
runner.getFlowFilesForRelationship(GetHubSpot.REL_SUCCESS);
 
 assertTrue(flowFiles.isEmpty());
+assertTrue(runner.getProvenanceEvents().isEmpty());
 }
 
 @Test
@@ -131,6 +135,7 @@ class GetHubSpotTest {
 server.enqueue(new 
MockResponse().setBody(response).setResponseCode(429));
 
 assertThrows(AssertionError.class, () -> runner.run(1));
+assertTrue(runner.getProvenanceEvents().isEmpty());
 }
 
 @Test
@@ -173,6 +178,8 @@ class GetHubSpotTest {
 final String expectedJsonString = root.toString();
 
 assertEquals(OBJECT_MAPPER.readTree(expectedJsonString), 
OBJECT_MAPPER.readTree(requestBodyString));
+List provenanceEvents = 
runner.getProvenanceEvents();
+assertEquals(baseUrl.toString(), 
provenanceEvents.get(0).getTransitUri());
 }
 
 @Test
@@ -220,6 +227,8 @@ class GetHubSpotTest {
 final String expectedJsonString = root.toString();
 
 assertEquals(OBJECT_MAPPER.readTree(expectedJsonString), 
OBJECT_MAPPER.readTree(requestBodyString));
+List provenanceEvents = 
runner.getProvenanceEvents();
+assertEquals(baseUrl.toString(), 
provenanceEvents.get(0).getTransitUri());
 }
 
 static class MockGetHubSpot extends GetHubSpot {
diff --git 
a/nifi-nar-bundles/nifi-shopify-bundle/nifi-shopify-processors/src/main/java/org/apache/nifi/processors/shopify/GetShopify.java
 
b/nifi-nar-bundles/nifi-shopify-bundle/nifi-shopify-processors/src/main/java/org/apache/nifi/processors/shopify/GetShopify.j

[nifi] branch support/nifi-1.x updated: NIFI-11044 Script/commands to migrate Kafka processors

2023-03-02 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new 984c0a0baf NIFI-11044 Script/commands to migrate Kafka processors
984c0a0baf is described below

commit 984c0a0baf0cac189c2e2f6d711978e5de651db1
Author: Timea Barna 
AuthorDate: Wed Mar 1 15:03:53 2023 +0100

NIFI-11044 Script/commands to migrate Kafka processors

This closes #6998.

Signed-off-by: Peter Turcsanyi 
---
 nifi-docs/src/main/asciidoc/toolkit-guide.adoc |  69 -
 nifi-toolkit/nifi-toolkit-assembly/pom.xml |   5 +
 .../src/main/resources/bin/kafka-migrator.bat  |  41 +++
 .../src/main/resources/bin/kafka-migrator.sh   | 119 +
 nifi-toolkit/nifi-toolkit-kafka-migrator/pom.xml   |  51 
 .../toolkit/kafkamigrator/KafkaMigratorMain.java   | 130 ++
 .../kafkamigrator/MigratorConfiguration.java   |  95 +++
 .../descriptor/FlowPropertyXpathDescriptor.java|  69 +
 .../descriptor/KafkaProcessorDescriptor.java   | 128 ++
 .../descriptor/KafkaProcessorType.java |  33 +++
 .../descriptor/ProcessorDescriptor.java|  26 ++
 .../descriptor/PropertyXpathDescriptor.java|  25 ++
 .../TemplatePropertyXpathDescriptor.java   |  69 +
 .../migrator/AbstractKafkaMigrator.java| 193 ++
 .../migrator/ConsumeKafkaFlowMigrator.java |  38 +++
 .../migrator/ConsumeKafkaTemplateMigrator.java |  52 
 .../toolkit/kafkamigrator/migrator/Migrator.java   |  29 +++
 .../migrator/PublishKafkaFlowMigrator.java |  48 
 .../migrator/PublishKafkaTemplateMigrator.java |  57 +
 .../service/KafkaFlowMigrationService.java |  76 ++
 .../service/KafkaMigrationService.java |  72 ++
 .../service/KafkaTemplateMigrationService.java |  75 ++
 .../kafkamigrator/KafkaMigrationServiceTest.java   | 155 
 .../toolkit/kafkamigrator/KafkaMigrationUtil.java  |  32 +++
 .../toolkit/kafkamigrator/KafkaMigratorTest.java   | 278 +
 .../src/test/resources/flow.xml| 136 ++
 nifi-toolkit/pom.xml   |   1 +
 27 files changed, 2101 insertions(+), 1 deletion(-)

diff --git a/nifi-docs/src/main/asciidoc/toolkit-guide.adoc 
b/nifi-docs/src/main/asciidoc/toolkit-guide.adoc
index 6e24472adf..4c05dd6980 100644
--- a/nifi-docs/src/main/asciidoc/toolkit-guide.adoc
+++ b/nifi-docs/src/main/asciidoc/toolkit-guide.adoc
@@ -1583,4 +1583,71 @@ NOTE: As of NiFi 1.10.x, because of an upgrade to 
ZooKeeper 3.5.x, the migrator
 * For a ZooKeeper using Kerberos for authentication:
 ** `zk-migrator.sh -s -z 
destinationHostname:destinationClientPort/destinationRootPath/components -k 
/path/to/jaasconfig/jaas-config.conf -f /path/to/export/zk-source-data.json`
 
-6. Once the migration has completed successfully, start the processors in the 
NiFi flow.  Processing should continue from the point at which it was stopped 
when the NiFi flow was stopped.
\ No newline at end of file
+6. Once the migration has completed successfully, start the processors in the 
NiFi flow.  Processing should continue from the point at which it was stopped 
when the NiFi flow was stopped.
+
+[[kafka_migrator]]
+== Kafka Processor Migrator
+With NiFi version 1.15.3, Kafka processor versions 0.8, 0.9, 0.10 and 0.11 
were removed.
+In large flows having many numbers of components it is challenging to replace 
these processors manually.
+This tool can be used to update a flow in an automated way.
+
+=== Usage
+Running the script requires 3 mandatory and 1 optional parameters:
+
+* Input file, the full path of the flow.xml.gz in which the replacement is 
required.
+* Output file, the full path of the file where the results should be saved.
+* Transaction, whether the new processors should be configured with or without 
transaction usage.
+* Optional: Kafka Brokers, a comma separated list of Kafka Brokers in 
: format.
+
+Different input and output files must be used.
+Kafka Broker argument can be omitted if flow does not contain GetKafka or 
PutKafka processors.
+
+1. Run script, a possible example:
+
+ ./bin/kafka-migrator.sh -i "/tmp/flow/flow.xml.gz" -o 
"/tmp/flow/flow_result.xml.gz" -t false -k 
"mykafkaserver1:1234,mykafkaserver2:1235"
+
+2. Rename flow_result.xml.gz file to flow.xml.gz, do not overwrite your input 
file.
+3. Copy flow.xml.gz file to all the NiFi nodes conf directory
+4. Start NiFi
+5. Verify the results.
+
+=== Expected Behaviour
+* Flow replacement:
+* For all replaced processors:
+** changing class and artifact
+** configure transaction as true
+*** 'Delivery Guarantee' property will be set to 'Replicated'
+*** if 'Honor-Transactions' and 'Use-Transactions' properties are present

[nifi] branch main updated: NIFI-11158 PutSalesforceObject processor improvements

2023-02-28 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 60c02225d5 NIFI-11158 PutSalesforceObject processor improvements
60c02225d5 is described below

commit 60c02225d5694efe8f6dab151aa441448fb215c7
Author: krisztina-zsihovszki 
AuthorDate: Wed Feb 15 12:37:31 2023 +0100

NIFI-11158 PutSalesforceObject processor improvements

This closes #6959.

Reviewed-by: Lehel 
Reviewed-by: Mark Bathori 

Signed-off-by: Peter Turcsanyi 
---
 .../nifi/util/StandardProcessorTestRunner.java | 13 -
 .../main/java/org/apache/nifi/util/TestRunner.java |  7 +++
 .../processors/salesforce/PutSalesforceObject.java | 34 ---
 .../salesforce/QuerySalesforceObject.java  |  8 ++-
 .../processors/salesforce/util/RecordExtender.java |  4 +-
 .../salesforce/util/SalesforceRestService.java | 15 -
 .../salesforce/PutSalesforceObjectIT.java  | 67 +++---
 .../salesforce/QuerySalesforceObjectIT.java|  2 +
 8 files changed, 126 insertions(+), 24 deletions(-)

diff --git 
a/nifi-mock/src/main/java/org/apache/nifi/util/StandardProcessorTestRunner.java 
b/nifi-mock/src/main/java/org/apache/nifi/util/StandardProcessorTestRunner.java
index 63290be359..c113817411 100644
--- 
a/nifi-mock/src/main/java/org/apache/nifi/util/StandardProcessorTestRunner.java
+++ 
b/nifi-mock/src/main/java/org/apache/nifi/util/StandardProcessorTestRunner.java
@@ -40,6 +40,7 @@ import org.apache.nifi.processor.ProcessSessionFactory;
 import org.apache.nifi.processor.Processor;
 import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.provenance.ProvenanceEventRecord;
+import org.apache.nifi.provenance.ProvenanceEventType;
 import org.apache.nifi.registry.VariableDescriptor;
 import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.state.MockStateManager;
@@ -72,6 +73,7 @@ import java.util.function.Predicate;
 import java.util.stream.Collectors;
 
 import static java.util.Objects.requireNonNull;
+import static java.util.stream.Collectors.toSet;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class StandardProcessorTestRunner implements TestRunner {
@@ -366,7 +368,7 @@ public class StandardProcessorTestRunner implements 
TestRunner {
 .collect(Collectors.toMap(Map.Entry::getKey, 
Map.Entry::getValue))
 
 )
-.collect(Collectors.toSet());
+.collect(toSet());
 
 assertEquals(expectedAttributes, actualAttributes);
 }
@@ -1055,4 +1057,13 @@ public class StandardProcessorTestRunner implements 
TestRunner {
 public void setRunSchedule(long runSchedule) {
 this.runSchedule = runSchedule;
 }
+
+@Override
+public void assertProvenanceEvent(final ProvenanceEventType eventType) {
+Set expectedEventTypes = 
Collections.singleton(eventType);
+Set actualEventTypes = 
getProvenanceEvents().stream()
+.map(ProvenanceEventRecord::getEventType)
+.collect(toSet());
+assertEquals(expectedEventTypes, actualEventTypes);
+}
 }
diff --git a/nifi-mock/src/main/java/org/apache/nifi/util/TestRunner.java 
b/nifi-mock/src/main/java/org/apache/nifi/util/TestRunner.java
index 7b01ed9709..dbe31e73c8 100644
--- a/nifi-mock/src/main/java/org/apache/nifi/util/TestRunner.java
+++ b/nifi-mock/src/main/java/org/apache/nifi/util/TestRunner.java
@@ -28,6 +28,7 @@ import org.apache.nifi.processor.ProcessSessionFactory;
 import org.apache.nifi.processor.Processor;
 import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.provenance.ProvenanceEventRecord;
+import org.apache.nifi.provenance.ProvenanceEventType;
 import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.state.MockStateManager;
 
@@ -1062,4 +1063,10 @@ public interface TestRunner {
  */
  void setRunSchedule(long runSchedule);
 
+/**
+ * Assert that provenance event was created with the specified event type.
+ *
+ * @param eventType Provenance event type
+ */
+ void assertProvenanceEvent(ProvenanceEventType eventType);
 }
diff --git 
a/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/PutSalesforceObject.java
 
b/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/PutSalesforceObject.java
index 00d0c2b783..83605800f9 100644
--- 
a/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/PutSalesforceObject.java
+++ 
b/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/PutSalesforceObject.java
@@ -20,7 +20,9 @@ import

[nifi] branch support/nifi-1.x updated: NIFI-11219 Set Qpid Proton J 0.34.0 for Azure EventHubs

2023-02-26 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch support/nifi-1.x
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/support/nifi-1.x by this push:
 new f9d713494f NIFI-11219 Set Qpid Proton J 0.34.0 for Azure EventHubs
f9d713494f is described below

commit f9d713494fc0409d7a8ec69d82bf264b1c4197ea
Author: exceptionfactory 
AuthorDate: Sat Feb 25 13:03:37 2023 -0600

NIFI-11219 Set Qpid Proton J 0.34.0 for Azure EventHubs

- Overrides Qpid Proton J 0.33.8 from azure-core-amqp 2.8.1 to resolve 
PROTON-2347

This closes #6988.

Signed-off-by: Peter Turcsanyi 
---
 nifi-nar-bundles/nifi-azure-bundle/pom.xml | 7 +++
 1 file changed, 7 insertions(+)

diff --git a/nifi-nar-bundles/nifi-azure-bundle/pom.xml 
b/nifi-nar-bundles/nifi-azure-bundle/pom.xml
index 927eb403ac..db952a3f47 100644
--- a/nifi-nar-bundles/nifi-azure-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-azure-bundle/pom.xml
@@ -29,6 +29,7 @@
 1.2.9
 
8.6.6
 1.13.3
+0.34.0
 
 
 
@@ -60,6 +61,12 @@
 guava
 31.1-jre
 
+
+
+org.apache.qpid
+proton-j
+${qpid.proton.version}
+
 
 
 



[nifi] branch main updated: NIFI-11219 Set Qpid Proton J 0.34.0 for Azure EventHubs

2023-02-26 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 84047c135b NIFI-11219 Set Qpid Proton J 0.34.0 for Azure EventHubs
84047c135b is described below

commit 84047c135b43fdb81f030fe855e096ee8f5449cb
Author: exceptionfactory 
AuthorDate: Sat Feb 25 13:03:37 2023 -0600

NIFI-11219 Set Qpid Proton J 0.34.0 for Azure EventHubs

- Overrides Qpid Proton J 0.33.8 from azure-core-amqp 2.8.1 to resolve 
PROTON-2347

This closes #6988.

Signed-off-by: Peter Turcsanyi 
---
 nifi-nar-bundles/nifi-azure-bundle/pom.xml | 7 +++
 1 file changed, 7 insertions(+)

diff --git a/nifi-nar-bundles/nifi-azure-bundle/pom.xml 
b/nifi-nar-bundles/nifi-azure-bundle/pom.xml
index 94a9f74b53..9e674eea08 100644
--- a/nifi-nar-bundles/nifi-azure-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-azure-bundle/pom.xml
@@ -29,6 +29,7 @@
 1.2.9
 
8.6.6
 1.13.3
+0.34.0
 
 
 
@@ -60,6 +61,12 @@
 guava
 31.1-jre
 
+
+
+org.apache.qpid
+proton-j
+${qpid.proton.version}
+
 
 
 



[nifi] branch main updated: NIFI-11045: Sensitive dynamic property support for parameterized queries in ExecuteSQL and ExecuteSQLRecord

2023-02-22 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 7e97c1ce19 NIFI-11045: Sensitive dynamic property support for 
parameterized queries in ExecuteSQL and ExecuteSQLRecord
7e97c1ce19 is described below

commit 7e97c1ce1913ae8a567b40e94fc22e3924e5e40f
Author: Lehel 
AuthorDate: Tue Jan 17 16:15:15 2023 +0100

NIFI-11045: Sensitive dynamic property support for parameterized queries in 
ExecuteSQL and ExecuteSQLRecord

This closes #6853.

Signed-off-by: Peter Turcsanyi 
---
 .../java/org/apache/nifi/util/db/JdbcCommon.java   | 78 ++
 .../apache/nifi/util/db/SensitiveValueWrapper.java | 36 ++
 .../org/apache/nifi/util/db/TestJdbcCommon.java| 34 +-
 .../nifi-standard-processors/pom.xml   |  9 +--
 .../processors/standard/AbstractExecuteSQL.java| 32 ++---
 .../nifi/processors/standard/ExecuteSQL.java   | 61 ++---
 .../nifi/processors/standard/ExecuteSQLRecord.java | 47 -
 7 files changed, 243 insertions(+), 54 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/JdbcCommon.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/JdbcCommon.java
index 9914efcaf2..bdc66302fd 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/JdbcCommon.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/JdbcCommon.java
@@ -70,9 +70,11 @@ import java.time.format.DateTimeFormatter;
 import java.util.Base64;
 import java.util.Date;
 import java.util.Map;
+import java.util.Optional;
 import java.util.function.Function;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
+import java.util.stream.Collectors;
 
 import static java.sql.Types.ARRAY;
 import static java.sql.Types.BIGINT;
@@ -122,6 +124,7 @@ public class JdbcCommon {
 public static final Pattern NUMBER_PATTERN = Pattern.compile("-?\\d+");
 
 public static final String MIME_TYPE_AVRO_BINARY = 
"application/avro-binary";
+public static final String MASKED_LOG_VALUE = "MASKED VALUE";
 
 public static long convertToAvroStream(final ResultSet rs, final 
OutputStream outStream, boolean convertNames) throws SQLException, IOException {
 return convertToAvroStream(rs, outStream, null, null, convertNames);
@@ -683,32 +686,55 @@ public class JdbcCommon {
  * @throws SQLException if the PreparedStatement throws a SQLException 
when the appropriate setter is called
  */
 public static void setParameters(final PreparedStatement stmt, final 
Map attributes) throws SQLException {
-for (final Map.Entry entry : attributes.entrySet()) {
-final String key = entry.getKey();
-final Matcher matcher = SQL_TYPE_ATTRIBUTE_PATTERN.matcher(key);
-if (matcher.matches()) {
-final int parameterIndex = Integer.parseInt(matcher.group(1));
-
-final boolean isNumeric = 
NUMBER_PATTERN.matcher(entry.getValue()).matches();
-if (!isNumeric) {
-throw new SQLDataException("Value of the " + key + " 
attribute is '" + entry.getValue() + "', which is not a valid JDBC numeral 
type");
-}
+final Map sensitiveValueWrapperMap = 
attributes.entrySet()
+.stream()
+.collect(Collectors.toMap(Map.Entry::getKey, e -> new 
SensitiveValueWrapper(e.getValue(), false)));
+setSensitiveParameters(stmt, sensitiveValueWrapperMap);
+}
+
+/**
+ * Sets all of the appropriate parameters on the given PreparedStatement, 
based on the given FlowFile attributes
+ * and masks sensitive values.
+ *
+ * @param stmt the statement to set the parameters on
+ * @param attributes the attributes from which to derive parameter 
indices, values, and types
+ * @throws SQLException if the PreparedStatement throws a SQLException 
when the appropriate setter is called
+ */
+public static void setSensitiveParameters(final PreparedStatement stmt, 
final Map attributes) throws SQLException {
+for (final Map.Entry entry : 
attributes.entrySet()) {
+final String flowFileAttributeKey = entry.getKey();
+setParameterAtIndex(stmt, attributes, flowFileAttributeKey);
+}
+}
 
-final int jdbcType = Integer.parseInt(entry.getValue());
-final String valueAttrName = "sql.args." + parameterIndex + 
".value";
-final String parameterValue = attributes.get(valueAttrName);
-final String format

[nifi] branch main updated: NIFI-10803 - Upgrade Dropbox SDK to 5.4.4

2023-02-21 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 3115b9f28d NIFI-10803 - Upgrade Dropbox SDK to 5.4.4
3115b9f28d is described below

commit 3115b9f28d6b84930c950f27247b86fa8ff5970c
Author: Pierre Villard 
AuthorDate: Sat Feb 18 18:12:48 2023 -0500

NIFI-10803 - Upgrade Dropbox SDK to 5.4.4

This closes #6971.

Reviewed-by: krisztina-zsihovszki 

Signed-off-by: Peter Turcsanyi 
---
 .../src/main/java/org/apache/nifi/processors/dropbox/ListDropbox.java | 4 ++--
 .../test/java/org/apache/nifi/processors/dropbox/ListDropboxTest.java | 4 ++--
 nifi-nar-bundles/nifi-dropbox-bundle/pom.xml  | 2 +-
 3 files changed, 5 insertions(+), 5 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/main/java/org/apache/nifi/processors/dropbox/ListDropbox.java
 
b/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/main/java/org/apache/nifi/processors/dropbox/ListDropbox.java
index e0b5373e17..9005b37314 100644
--- 
a/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/main/java/org/apache/nifi/processors/dropbox/ListDropbox.java
+++ 
b/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/main/java/org/apache/nifi/processors/dropbox/ListDropbox.java
@@ -33,8 +33,8 @@ import static 
org.apache.nifi.processors.dropbox.DropboxAttributes.TIMESTAMP_DES
 
 import com.dropbox.core.DbxException;
 import com.dropbox.core.v2.DbxClientV2;
+import com.dropbox.core.v2.files.DbxUserListFolderBuilder;
 import com.dropbox.core.v2.files.FileMetadata;
-import com.dropbox.core.v2.files.ListFolderBuilder;
 import com.dropbox.core.v2.files.ListFolderResult;
 import java.io.IOException;
 import java.util.ArrayList;
@@ -201,7 +201,7 @@ public class ListDropbox extends 
AbstractListProcessor implemen
 try {
 Predicate metadataFilter = 
createMetadataFilter(minTimestamp, minAge);
 
-final ListFolderBuilder listFolderBuilder = 
dropboxApiClient.files().listFolderBuilder(convertFolderName(folderName));
+final DbxUserListFolderBuilder listFolderBuilder = 
dropboxApiClient.files().listFolderBuilder(convertFolderName(folderName));
 ListFolderResult result = listFolderBuilder
 .withRecursive(recursive)
 .start();
diff --git 
a/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/test/java/org/apache/nifi/processors/dropbox/ListDropboxTest.java
 
b/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/test/java/org/apache/nifi/processors/dropbox/ListDropboxTest.java
index be08c2dc94..6b14b5d2f6 100644
--- 
a/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/test/java/org/apache/nifi/processors/dropbox/ListDropboxTest.java
+++ 
b/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/test/java/org/apache/nifi/processors/dropbox/ListDropboxTest.java
@@ -26,8 +26,8 @@ import static org.mockito.Mockito.when;
 import com.dropbox.core.DbxException;
 import com.dropbox.core.v2.DbxClientV2;
 import com.dropbox.core.v2.files.DbxUserFilesRequests;
+import com.dropbox.core.v2.files.DbxUserListFolderBuilder;
 import com.dropbox.core.v2.files.FolderMetadata;
-import com.dropbox.core.v2.files.ListFolderBuilder;
 import com.dropbox.core.v2.files.ListFolderResult;
 import com.dropbox.core.v2.files.Metadata;
 import com.fasterxml.jackson.core.JsonProcessingException;
@@ -69,7 +69,7 @@ public class ListDropboxTest extends AbstractDropboxTest {
 private ListFolderResult mockListFolderResult;
 
 @Mock
-private ListFolderBuilder mockListFolderBuilder;
+private DbxUserListFolderBuilder mockListFolderBuilder;
 
 @BeforeEach
 protected void setUp() throws Exception {
diff --git a/nifi-nar-bundles/nifi-dropbox-bundle/pom.xml 
b/nifi-nar-bundles/nifi-dropbox-bundle/pom.xml
index c24fef70d2..dc7c2d293f 100644
--- a/nifi-nar-bundles/nifi-dropbox-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-dropbox-bundle/pom.xml
@@ -27,7 +27,7 @@
 pom
 
 
-4.0.1
+5.4.4
 
 
 



[nifi] branch main updated: NIFI-11190: Added provenance events to GetHubSpot and GetShopify

2023-02-20 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 1b951818e1 NIFI-11190: Added provenance events to GetHubSpot and 
GetShopify
1b951818e1 is described below

commit 1b951818e1fbf7f09113e93811008627a17f2ed0
Author: Lehel 
AuthorDate: Thu Feb 16 17:28:19 2023 +0100

NIFI-11190: Added provenance events to GetHubSpot and GetShopify

This closes #6965.

Signed-off-by: Peter Turcsanyi 
---
 .../java/org/apache/nifi/processors/hubspot/GetHubSpot.java  |  1 +
 .../org/apache/nifi/processors/hubspot/GetHubSpotTest.java   |  9 +
 .../java/org/apache/nifi/processors/shopify/GetShopify.java  |  3 +++
 .../nifi/processors/shopify/rest/ShopifyRestService.java |  4 
 .../shopify/{GetShopifyIT.java => GetShopifyTest.java}   | 12 
 5 files changed, 25 insertions(+), 4 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java
 
b/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java
index 53567ff212..d624b58afe 100644
--- 
a/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java
+++ 
b/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java
@@ -239,6 +239,7 @@ public class GetHubSpot extends AbstractProcessor {
 if (total.get() > 0) {
 flowFile = session.putAttribute(flowFile, 
CoreAttributes.MIME_TYPE.key(), "application/json");
 session.transfer(flowFile, REL_SUCCESS);
+session.getProvenanceReporter().receive(flowFile, 
uri.toString());
 } else {
 getLogger().debug("Empty response when requested HubSpot 
endpoint: [{}]", endpoint);
 context.yield();
diff --git 
a/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/test/java/org/apache/nifi/processors/hubspot/GetHubSpotTest.java
 
b/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/test/java/org/apache/nifi/processors/hubspot/GetHubSpotTest.java
index fd8ed18fed..55f371acf6 100644
--- 
a/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/test/java/org/apache/nifi/processors/hubspot/GetHubSpotTest.java
+++ 
b/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/test/java/org/apache/nifi/processors/hubspot/GetHubSpotTest.java
@@ -45,6 +45,7 @@ import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.state.Scope;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
 import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.provenance.ProvenanceEventRecord;
 import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.util.MockFlowFile;
 import org.apache.nifi.util.TestRunner;
@@ -109,6 +110,8 @@ class GetHubSpotTest {
 
 flowFile.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), 
"application/json");
 assertEquals(expectedJsonNode, actualJsonNode);
+List provenanceEvents = 
runner.getProvenanceEvents();
+assertEquals(baseUrl.toString(), 
provenanceEvents.get(0).getTransitUri());
 }
 
 @Test
@@ -122,6 +125,7 @@ class GetHubSpotTest {
 final List flowFiles = 
runner.getFlowFilesForRelationship(GetHubSpot.REL_SUCCESS);
 
 assertTrue(flowFiles.isEmpty());
+assertTrue(runner.getProvenanceEvents().isEmpty());
 }
 
 @Test
@@ -131,6 +135,7 @@ class GetHubSpotTest {
 server.enqueue(new 
MockResponse().setBody(response).setResponseCode(429));
 
 assertThrows(AssertionError.class, () -> runner.run(1));
+assertTrue(runner.getProvenanceEvents().isEmpty());
 }
 
 @Test
@@ -173,6 +178,8 @@ class GetHubSpotTest {
 final String expectedJsonString = root.toString();
 
 assertEquals(OBJECT_MAPPER.readTree(expectedJsonString), 
OBJECT_MAPPER.readTree(requestBodyString));
+List provenanceEvents = 
runner.getProvenanceEvents();
+assertEquals(baseUrl.toString(), 
provenanceEvents.get(0).getTransitUri());
 }
 
 @Test
@@ -220,6 +227,8 @@ class GetHubSpotTest {
 final String expectedJsonString = root.toString();
 
 assertEquals(OBJECT_MAPPER.readTree(expectedJsonString), 
OBJECT_MAPPER.readTree(requestBodyString));
+List provenanceEvents = 
runner.getProvenanceEvents();
+assertEquals(baseUrl.toString(), 
provenanceEvents.get(0).getTransitUri());
 }
 
 static class MockGetHubSpot extends GetHubSpot {
diff --git 
a/nifi-nar-bundles/nifi-shopify-bundle/nifi-shopify-processors/src/main/java/org/apache/

[nifi] branch main updated: Revert "NIFI-11044 Script/commands to migrate Kafka processors"

2023-02-14 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new c19ec90030 Revert "NIFI-11044 Script/commands to migrate Kafka 
processors"
c19ec90030 is described below

commit c19ec90030ac401315d425a2fb26e6bf3ecf8e66
Author: Peter Turcsanyi 
AuthorDate: Tue Feb 14 20:06:06 2023 +0100

Revert "NIFI-11044 Script/commands to migrate Kafka processors"

This reverts commit 00985edd803b06ace9a12b9fd19f29585e191330.

Reason for reverting: 0.x Kafka processors are not present in NiFi 2.x.
The migration tool needs to be added on the 1.x branch only.
---
 nifi-docs/src/main/asciidoc/toolkit-guide.adoc |  69 +
 nifi-toolkit/nifi-toolkit-assembly/pom.xml |   5 -
 .../src/main/resources/bin/kafka-migrator.bat  |  41 ---
 .../src/main/resources/bin/kafka-migrator.sh   | 119 -
 nifi-toolkit/nifi-toolkit-kafka-migrator/pom.xml   |  51 
 .../toolkit/kafkamigrator/KafkaMigratorMain.java   | 130 --
 .../kafkamigrator/MigratorConfiguration.java   |  95 ---
 .../descriptor/FlowPropertyXpathDescriptor.java|  69 -
 .../descriptor/KafkaProcessorDescriptor.java   | 128 --
 .../descriptor/KafkaProcessorType.java |  33 ---
 .../descriptor/ProcessorDescriptor.java|  26 --
 .../descriptor/PropertyXpathDescriptor.java|  25 --
 .../TemplatePropertyXpathDescriptor.java   |  69 -
 .../migrator/AbstractKafkaMigrator.java| 193 --
 .../migrator/ConsumeKafkaFlowMigrator.java |  38 ---
 .../migrator/ConsumeKafkaTemplateMigrator.java |  52 
 .../toolkit/kafkamigrator/migrator/Migrator.java   |  29 ---
 .../migrator/PublishKafkaFlowMigrator.java |  48 
 .../migrator/PublishKafkaTemplateMigrator.java |  57 -
 .../service/KafkaFlowMigrationService.java |  76 --
 .../service/KafkaMigrationService.java |  72 --
 .../service/KafkaTemplateMigrationService.java |  75 --
 .../kafkamigrator/KafkaMigrationServiceTest.java   | 155 
 .../toolkit/kafkamigrator/KafkaMigrationUtil.java  |  32 ---
 .../toolkit/kafkamigrator/KafkaMigratorTest.java   | 278 -
 .../src/test/resources/flow.xml| 136 --
 nifi-toolkit/pom.xml   |   1 -
 27 files changed, 1 insertion(+), 2101 deletions(-)

diff --git a/nifi-docs/src/main/asciidoc/toolkit-guide.adoc 
b/nifi-docs/src/main/asciidoc/toolkit-guide.adoc
index 4c05dd6980..6e24472adf 100644
--- a/nifi-docs/src/main/asciidoc/toolkit-guide.adoc
+++ b/nifi-docs/src/main/asciidoc/toolkit-guide.adoc
@@ -1583,71 +1583,4 @@ NOTE: As of NiFi 1.10.x, because of an upgrade to 
ZooKeeper 3.5.x, the migrator
 * For a ZooKeeper using Kerberos for authentication:
 ** `zk-migrator.sh -s -z 
destinationHostname:destinationClientPort/destinationRootPath/components -k 
/path/to/jaasconfig/jaas-config.conf -f /path/to/export/zk-source-data.json`
 
-6. Once the migration has completed successfully, start the processors in the 
NiFi flow.  Processing should continue from the point at which it was stopped 
when the NiFi flow was stopped.
-
-[[kafka_migrator]]
-== Kafka Processor Migrator
-With NiFi version 1.15.3, Kafka processor versions 0.8, 0.9, 0.10 and 0.11 
were removed.
-In large flows having many numbers of components it is challenging to replace 
these processors manually.
-This tool can be used to update a flow in an automated way.
-
-=== Usage
-Running the script requires 3 mandatory and 1 optional parameters:
-
-* Input file, the full path of the flow.xml.gz in which the replacement is 
required.
-* Output file, the full path of the file where the results should be saved.
-* Transaction, whether the new processors should be configured with or without 
transaction usage.
-* Optional: Kafka Brokers, a comma separated list of Kafka Brokers in 
: format.
-
-Different input and output files must be used.
-Kafka Broker argument can be omitted if flow does not contain GetKafka or 
PutKafka processors.
-
-1. Run script, a possible example:
-
- ./bin/kafka-migrator.sh -i "/tmp/flow/flow.xml.gz" -o 
"/tmp/flow/flow_result.xml.gz" -t false -k 
"mykafkaserver1:1234,mykafkaserver2:1235"
-
-2. Rename flow_result.xml.gz file to flow.xml.gz, do not overwrite your input 
file.
-3. Copy flow.xml.gz file to all the NiFi nodes conf directory
-4. Start NiFi
-5. Verify the results.
-
-=== Expected Behaviour
-* Flow replacement:
-* For all replaced processors:
-** changing class and artifact
-** configure transaction as true
-*** 'Delivery Guarantee' property will be set to 'Replicated'
-*** if 'Honor-Transactions' and 'Use-Transactions' properties are present in 
the file they will be set to true

[nifi] branch main updated: NIFI-11044 Script/commands to migrate Kafka processors

2023-02-14 Thread turcsanyi
This is an automated email from the ASF dual-hosted git repository.

turcsanyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
 new 00985edd80 NIFI-11044 Script/commands to migrate Kafka processors
00985edd80 is described below

commit 00985edd803b06ace9a12b9fd19f29585e191330
Author: Timea Barna 
AuthorDate: Thu Jan 12 09:31:38 2023 +0100

NIFI-11044 Script/commands to migrate Kafka processors

This closes #6838.

Reviewed-by: Robert Kalmar 
Reviewed-by: Zoltan Kornel Torok 

Signed-off-by: Peter Turcsanyi 
---
 nifi-docs/src/main/asciidoc/toolkit-guide.adoc |  69 -
 nifi-toolkit/nifi-toolkit-assembly/pom.xml |   5 +
 .../src/main/resources/bin/kafka-migrator.bat  |  41 +++
 .../src/main/resources/bin/kafka-migrator.sh   | 119 +
 nifi-toolkit/nifi-toolkit-kafka-migrator/pom.xml   |  51 
 .../toolkit/kafkamigrator/KafkaMigratorMain.java   | 130 ++
 .../kafkamigrator/MigratorConfiguration.java   |  95 +++
 .../descriptor/FlowPropertyXpathDescriptor.java|  69 +
 .../descriptor/KafkaProcessorDescriptor.java   | 128 ++
 .../descriptor/KafkaProcessorType.java |  33 +++
 .../descriptor/ProcessorDescriptor.java|  26 ++
 .../descriptor/PropertyXpathDescriptor.java|  25 ++
 .../TemplatePropertyXpathDescriptor.java   |  69 +
 .../migrator/AbstractKafkaMigrator.java| 193 ++
 .../migrator/ConsumeKafkaFlowMigrator.java |  38 +++
 .../migrator/ConsumeKafkaTemplateMigrator.java |  52 
 .../toolkit/kafkamigrator/migrator/Migrator.java   |  29 +++
 .../migrator/PublishKafkaFlowMigrator.java |  48 
 .../migrator/PublishKafkaTemplateMigrator.java |  57 +
 .../service/KafkaFlowMigrationService.java |  76 ++
 .../service/KafkaMigrationService.java |  72 ++
 .../service/KafkaTemplateMigrationService.java |  75 ++
 .../kafkamigrator/KafkaMigrationServiceTest.java   | 155 
 .../toolkit/kafkamigrator/KafkaMigrationUtil.java  |  32 +++
 .../toolkit/kafkamigrator/KafkaMigratorTest.java   | 278 +
 .../src/test/resources/flow.xml| 136 ++
 nifi-toolkit/pom.xml   |   1 +
 27 files changed, 2101 insertions(+), 1 deletion(-)

diff --git a/nifi-docs/src/main/asciidoc/toolkit-guide.adoc 
b/nifi-docs/src/main/asciidoc/toolkit-guide.adoc
index 6e24472adf..4c05dd6980 100644
--- a/nifi-docs/src/main/asciidoc/toolkit-guide.adoc
+++ b/nifi-docs/src/main/asciidoc/toolkit-guide.adoc
@@ -1583,4 +1583,71 @@ NOTE: As of NiFi 1.10.x, because of an upgrade to 
ZooKeeper 3.5.x, the migrator
 * For a ZooKeeper using Kerberos for authentication:
 ** `zk-migrator.sh -s -z 
destinationHostname:destinationClientPort/destinationRootPath/components -k 
/path/to/jaasconfig/jaas-config.conf -f /path/to/export/zk-source-data.json`
 
-6. Once the migration has completed successfully, start the processors in the 
NiFi flow.  Processing should continue from the point at which it was stopped 
when the NiFi flow was stopped.
\ No newline at end of file
+6. Once the migration has completed successfully, start the processors in the 
NiFi flow.  Processing should continue from the point at which it was stopped 
when the NiFi flow was stopped.
+
+[[kafka_migrator]]
+== Kafka Processor Migrator
+With NiFi version 1.15.3, Kafka processor versions 0.8, 0.9, 0.10 and 0.11 
were removed.
+In large flows having many numbers of components it is challenging to replace 
these processors manually.
+This tool can be used to update a flow in an automated way.
+
+=== Usage
+Running the script requires 3 mandatory and 1 optional parameters:
+
+* Input file, the full path of the flow.xml.gz in which the replacement is 
required.
+* Output file, the full path of the file where the results should be saved.
+* Transaction, whether the new processors should be configured with or without 
transaction usage.
+* Optional: Kafka Brokers, a comma separated list of Kafka Brokers in 
: format.
+
+Different input and output files must be used.
+Kafka Broker argument can be omitted if flow does not contain GetKafka or 
PutKafka processors.
+
+1. Run script, a possible example:
+
+ ./bin/kafka-migrator.sh -i "/tmp/flow/flow.xml.gz" -o 
"/tmp/flow/flow_result.xml.gz" -t false -k 
"mykafkaserver1:1234,mykafkaserver2:1235"
+
+2. Rename flow_result.xml.gz file to flow.xml.gz, do not overwrite your input 
file.
+3. Copy flow.xml.gz file to all the NiFi nodes conf directory
+4. Start NiFi
+5. Verify the results.
+
+=== Expected Behaviour
+* Flow replacement:
+* For all replaced processors:
+** changing class and artifact
+** configure transaction as true
+*** 'Delivery Guarantee' property will be set to 'Replicated'
+*** if 'Honor-Trans

  1   2   3   >