[nifi] branch master updated: NIFI-7348 Wait - Removes WAIT_START_TIMESTAMP after expiration

2020-04-15 Thread ijokarumawak
This is an automated email from the ASF dual-hosted git repository.

ijokarumawak pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/master by this push:
 new 8e3f420  NIFI-7348 Wait - Removes WAIT_START_TIMESTAMP after expiration
8e3f420 is described below

commit 8e3f42051fb3c7da27873de8b6d4507827a7b80d
Author: EndzeitBegins <1115+endzeitbeg...@users.noreply.github.com>
AuthorDate: Mon Apr 13 16:19:55 2020 +0200

NIFI-7348 Wait - Removes WAIT_START_TIMESTAMP after expiration

This closes #4201.

Signed-off-by: Koji Kawamura 
---
 .../org/apache/nifi/processors/standard/Wait.java  |  4 +-
 .../apache/nifi/processors/standard/TestWait.java  | 62 --
 2 files changed, 37 insertions(+), 29 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Wait.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Wait.java
index 45ffcb2..37f4479 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Wait.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Wait.java
@@ -87,7 +87,7 @@ import static 
org.apache.nifi.processor.FlowFileFilter.FlowFileFilterResult.REJE
 @WritesAttributes({
 @WritesAttribute(attribute = "wait.start.timestamp", description = 
"All FlowFiles will have an attribute 'wait.start.timestamp', which sets the "
 + "initial epoch timestamp when the file first entered this processor. 
 This is used to determine the expiration time of the FlowFile.  "
-+ "This attribute is not written when the FlowFile is transferred to 
failure or success"),
++ "This attribute is not written when the FlowFile is transferred to 
failure, expired or success"),
 @WritesAttribute(attribute = "wait.counter.", description 
= "If a signal exists when the processor runs, "
 + "each count value in the signal is copied.")
 })
@@ -375,7 +375,7 @@ public class Wait extends AbstractProcessor {
 final Relationship finalRelationship = relationship;
 final List flowFilesWithSignalAttributes = 
routedFlowFiles.getValue().stream()
 .map(f -> {
-if (REL_SUCCESS.equals(finalRelationship)) {
+if (REL_SUCCESS.equals(finalRelationship) || 
REL_EXPIRED.equals(finalRelationship)) {
 // These flowFiles will be exiting the wait, clear 
the timer
 f = clearWaitState(session, f);
 }
diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestWait.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestWait.java
index 7970601..d8fffdc 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestWait.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestWait.java
@@ -69,8 +69,8 @@ public class TestWait {
 
 // no cache key attribute
 runner.assertAllFlowFilesTransferred(Wait.REL_WAIT, 1);
-// timestamp must be present
-
runner.getFlowFilesForRelationship(Wait.REL_WAIT).get(0).assertAttributeExists(Wait.WAIT_START_TIMESTAMP);
+MockFlowFile ff = 
runner.getFlowFilesForRelationship(Wait.REL_WAIT).get(0);
+ff.assertAttributeExists(Wait.WAIT_START_TIMESTAMP); // timestamp must 
be set
 runner.clearTransferState();
 }
 
@@ -103,7 +103,7 @@ public class TestWait {
 
 runner.assertAllFlowFilesTransferred(Wait.REL_WAIT, 1);
 MockFlowFile ff = 
runner.getFlowFilesForRelationship(Wait.REL_WAIT).get(0);
-ff.assertAttributeExists(Wait.WAIT_START_TIMESTAMP);
+ff.assertAttributeExists(Wait.WAIT_START_TIMESTAMP); // timestamp must 
be set
 
 runner.clearTransferState();
 runner.enqueue(ff);
@@ -112,6 +112,8 @@ public class TestWait {
 runner.run();
 
 runner.assertAllFlowFilesTransferred(Wait.REL_EXPIRED, 1);
+ff = runner.getFlowFilesForRelationship(Wait.REL_EXPIRED).get(0);
+ff.assertAttributeNotExists(Wait.WAIT_START_TIMESTAMP); // timestamp 
must be cleared
 runner.clearTransferState();
 }
 
@@ -129,7 +131,7 @@ public class TestWait {
 
 runner.assertAllFlowFilesTransferred(Wait.REL_WAIT, 1);
 MockFlowFile ff = 
runner.getFlowFilesForRelationship(Wait.REL_WAIT).get(0);
-ff.assertAttributeExists(Wait.WAIT_START_TIMESTAMP);
+ff.assertAttributeExists(Wait.WAIT_

[nifi] branch master updated: NIFI-7359 Fix parent id on process metrics for Prometheus

2020-04-15 Thread mattyb149
This is an automated email from the ASF dual-hosted git repository.

mattyb149 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/master by this push:
 new b113a02  NIFI-7359 Fix parent id on process metrics for Prometheus
b113a02 is described below

commit b113a022e46ee755e6b9d4395b908c7f54b2bd37
Author: Alexandre Vautier 
AuthorDate: Tue Apr 14 22:21:52 2020 +0200

NIFI-7359 Fix parent id on process metrics for Prometheus

Signed-off-by: Matthew Burgess 

This closes #4209
---
 .../prometheus/util/PrometheusMetricsUtil.java | 24 +++---
 1 file changed, 12 insertions(+), 12 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-prometheus-utils/src/main/java/org/apache/nifi/prometheus/util/PrometheusMetricsUtil.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-prometheus-utils/src/main/java/org/apache/nifi/prometheus/util/PrometheusMetricsUtil.java
index 0ac5f47..01b91a8 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-prometheus-utils/src/main/java/org/apache/nifi/prometheus/util/PrometheusMetricsUtil.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-prometheus-utils/src/main/java/org/apache/nifi/prometheus/util/PrometheusMetricsUtil.java
@@ -438,28 +438,28 @@ public class PrometheusMetricsUtil {
 final String procComponentName = processorStatus.getName();
 final String parentId = processorStatus.getGroupId();
 
-AMOUNT_FLOWFILES_SENT.labels(instanceId, procComponentType, 
procComponentName, procComponentId, 
parentPGId).set(processorStatus.getFlowFilesSent());
-AMOUNT_FLOWFILES_RECEIVED.labels(instanceId, 
procComponentType, procComponentName, procComponentId, 
parentPGId).set(processorStatus.getFlowFilesReceived());
-AMOUNT_FLOWFILES_REMOVED.labels(instanceId, procComponentType, 
procComponentName, procComponentId, 
parentPGId).set(processorStatus.getFlowFilesRemoved());
+AMOUNT_FLOWFILES_SENT.labels(instanceId, procComponentType, 
procComponentName, procComponentId, 
parentId).set(processorStatus.getFlowFilesSent());
+AMOUNT_FLOWFILES_RECEIVED.labels(instanceId, 
procComponentType, procComponentName, procComponentId, 
parentId).set(processorStatus.getFlowFilesReceived());
+AMOUNT_FLOWFILES_REMOVED.labels(instanceId, procComponentType, 
procComponentName, procComponentId, 
parentId).set(processorStatus.getFlowFilesRemoved());
 
-AMOUNT_BYTES_SENT.labels(instanceId, procComponentType, 
procComponentName, procComponentId, 
parentPGId).set(processorStatus.getBytesSent());
-AMOUNT_BYTES_READ.labels(instanceId, procComponentType, 
procComponentName, procComponentId, 
parentPGId).set(processorStatus.getBytesRead());
-AMOUNT_BYTES_WRITTEN.labels(instanceId, procComponentType, 
procComponentName, procComponentId, 
parentPGId).set(processorStatus.getBytesWritten());
+AMOUNT_BYTES_SENT.labels(instanceId, procComponentType, 
procComponentName, procComponentId, 
parentId).set(processorStatus.getBytesSent());
+AMOUNT_BYTES_READ.labels(instanceId, procComponentType, 
procComponentName, procComponentId, 
parentId).set(processorStatus.getBytesRead());
+AMOUNT_BYTES_WRITTEN.labels(instanceId, procComponentType, 
procComponentName, procComponentId, 
parentId).set(processorStatus.getBytesWritten());
 TOTAL_BYTES_READ.labels(instanceId, procComponentType, 
procComponentName, procComponentId, parentId).inc(status.getBytesRead());
 TOTAL_BYTES_WRITTEN.labels(instanceId, procComponentType, 
procComponentName, procComponentId, parentId).inc(status.getBytesWritten());
-AMOUNT_BYTES_RECEIVED.labels(instanceId, procComponentType, 
procComponentName, procComponentId, 
parentPGId).set(processorStatus.getBytesReceived());
+AMOUNT_BYTES_RECEIVED.labels(instanceId, procComponentType, 
procComponentName, procComponentId, 
parentId).set(processorStatus.getBytesReceived());
 
-SIZE_CONTENT_OUTPUT_TOTAL.labels(instanceId, 
procComponentType, procComponentName, procComponentId, parentPGId, "", "", "", 
"")
+SIZE_CONTENT_OUTPUT_TOTAL.labels(instanceId, 
procComponentType, procComponentName, procComponentId, parentId, "", "", "", "")
 .set(processorStatus.getOutputBytes());
-SIZE_CONTENT_INPUT_TOTAL.labels(instanceId, procComponentType, 
procComponentName, procComponentId, parentPGId, "", "", "", "")
+SIZE_CONTENT_INPUT_TOTAL.labels(instanceId, procComponentType, 
procComponentName, procComponentId, parentId, "", "", "", "")
 .set(processorStatus.getInputBytes());
 
-AMOUNT_ITEMS_OUTPUT.labels(instanceId, procComponentType, 
procComponentName, procComponentId, parentPG

[nifi-registry] branch nifi-registry-0.5.0 created (now 8a1901e)

2020-04-15 Thread thenatog
This is an automated email from the ASF dual-hosted git repository.

thenatog pushed a change to branch nifi-registry-0.5.0
in repository https://gitbox.apache.org/repos/asf/nifi-registry.git.


  at 8a1901e  NIFIREG-312-RC1 prepare release nifi-registry-0.5.0-RC1

No new revisions were added by this update.



[nifi] branch master updated (8340078 -> 2224ace)

2020-04-15 Thread alopresto
This is an automated email from the ASF dual-hosted git repository.

alopresto pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/nifi.git.


from 8340078  NIFI-7292 Preventing file listing from fail because of 
insufficient privileges
 add 2224ace  Added Jira and security reporting links to README.md

No new revisions were added by this update.

Summary of changes:
 README.md | 2 ++
 1 file changed, 2 insertions(+)



[nifi-minifi-cpp] branch master updated: MINIFICPP-1092 - Make CoAP compile and work on Windows

2020-04-15 Thread aboda
This is an automated email from the ASF dual-hosted git repository.

aboda pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/nifi-minifi-cpp.git


The following commit(s) were added to refs/heads/master by this push:
 new 6b1  MINIFICPP-1092 - Make CoAP compile and work on Windows
6b1 is described below

commit 6b1ba7ffb3dae471fda1f2e9649ff33ca14e
Author: Daniel Bakai 
AuthorDate: Thu Nov 28 11:31:10 2019 +0100

MINIFICPP-1092 - Make CoAP compile and work on Windows

Signed-off-by: Arpad Boda 

This closes #687
---
 CMakeLists.txt |  2 +-
 .../{BundledLibCOAP.cmake => BundledLibCoAP.cmake} | 58 +-
 extensions/coap/COAPLoader.cpp | 27 +++
 extensions/coap/COAPLoader.h   | 19 -
 extensions/coap/nanofi/coap_connection.c   |  6 ++
 extensions/coap/nanofi/coap_connection.h   |  1 -
 extensions/coap/nanofi/coap_functions.h|  8 +-
 extensions/coap/protocols/CoapC2Protocol.h |  3 -
 extensions/coap/server/CoapServer.h| 32 
 extensions/coap/tests/CMakeLists.txt   |  2 +-
 extensions/coap/tests/CoapC2VerifyHeartbeat.cpp|  4 +-
 thirdparty/libcoap/libcoap-windows-cmake.patch | 93 ++
 win_build_vs.bat   |  6 +-
 13 files changed, 212 insertions(+), 49 deletions(-)

diff --git a/CMakeLists.txt b/CMakeLists.txt
index 87fe4f3..284a876 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -318,7 +318,7 @@ endif()
 
 option(ENABLE_COAP "Enables the CoAP extension." OFF)
 if (ENABLE_ALL OR ENABLE_COAP STREQUAL "ON")
-   include(BundledLibCOAP)
+   include(BundledLibCoAP)
use_bundled_libcoap(${CMAKE_CURRENT_SOURCE_DIR} 
${CMAKE_CURRENT_BINARY_DIR})
createExtension(COAP-EXTENSION "COAP EXTENSIONS" "Enables LibCOAP 
Functionality." "extensions/coap" "extensions/coap/tests/")
if( NOT DISABLE_CURL)
diff --git a/cmake/BundledLibCOAP.cmake b/cmake/BundledLibCoAP.cmake
similarity index 50%
rename from cmake/BundledLibCOAP.cmake
rename to cmake/BundledLibCoAP.cmake
index 7d52d5a..1f09362 100644
--- a/cmake/BundledLibCOAP.cmake
+++ b/cmake/BundledLibCoAP.cmake
@@ -20,30 +20,53 @@
 function(use_bundled_libcoap SOURCE_DIR BINARY_DIR)
 message("Using bundled libcoap")
 
+# Define patch step
+if (WIN32)
+set(PC "${Patch_EXECUTABLE}" -p1 -i 
"${SOURCE_DIR}/thirdparty/libcoap/libcoap-windows-cmake.patch")
+endif()
+
 # Define byproducts
 if (WIN32)
-set(BYPRODUCT "lib/libcoap-2.lib")
+set(BYPRODUCT "lib/coap.lib")
 else()
 set(BYPRODUCT "lib/libcoap-2.a")
 endif()
 
 # Build project
-ExternalProject_Add(
-coap-external
-GIT_REPOSITORY "https://github.com/obgm/libcoap.git";
-GIT_TAG "v4.2.0-rc2"
-BUILD_IN_SOURCE true
-SOURCE_DIR "${BINARY_DIR}/thirdparty/libcoap-src"
-BUILD_COMMAND make
-CMAKE_COMMAND ""
-UPDATE_COMMAND ""
-INSTALL_COMMAND make install
-BUILD_BYPRODUCTS 
"${BINARY_DIR}/thirdparty/libcoap-install/${BYPRODUCT}"
-CONFIGURE_COMMAND ""
-PATCH_COMMAND ./autogen.sh && ./configure --disable-examples 
--disable-dtls --disable-tests --disable-documentation 
--prefix=${BINARY_DIR}/thirdparty/libcoap-install
-STEP_TARGETS build
-EXCLUDE_FROM_ALL TRUE
-)
+set(LIBCOAP_URL https://github.com/obgm/libcoap/archive/v4.2.1.tar.gz)
+set(LIBCOAP_URL_HASH 
"SHA256=29a0394a265d3febee41e5e2dc03d34292a0aede37f5f80334e529ac0dab2321")
+
+if (WIN32)
+set(LIBCOAP_CMAKE_ARGS ${PASSTHROUGH_CMAKE_ARGS}
+
"-DCMAKE_INSTALL_PREFIX=${BINARY_DIR}/thirdparty/libcoap-install")
+
+ExternalProject_Add(
+coap-external
+URL ${LIBCOAP_URL}
+URL_HASH ${LIBCOAP_URL_HASH}
+CMAKE_ARGS ${LIBCOAP_CMAKE_ARGS}
+PATCH_COMMAND ${PC}
+BUILD_BYPRODUCTS 
"${BINARY_DIR}/thirdparty/libcoap-install/${BYPRODUCT}"
+EXCLUDE_FROM_ALL TRUE
+)
+else()
+ExternalProject_Add(
+coap-external
+URL ${LIBCOAP_URL}
+URL_HASH ${LIBCOAP_URL_HASH}
+BUILD_IN_SOURCE true
+SOURCE_DIR "${BINARY_DIR}/thirdparty/libcoap-src"
+BUILD_COMMAND make
+CMAKE_COMMAND ""
+UPDATE_COMMAND ""
+INSTALL_COMMAND make install
+BUILD_BYPRODUCTS 
"${BINARY_DIR}/thirdparty/libcoap-install/${BYPRODUCT}"
+CONFIGURE_COMMAND ""
+PATCH_COMMAND ./autogen.sh && ./configure --disable-examples 
--disable-dtls --disable-tests --disable-documentation 
--prefix=${BINARY_DIR}/thirdparty/libcoap-install
+

[nifi] branch master updated: NIFI-7292 Preventing file listing from fail because of insufficient privileges

2020-04-15 Thread pvillard
This is an automated email from the ASF dual-hosted git repository.

pvillard pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/master by this push:
 new 8340078  NIFI-7292 Preventing file listing from fail because of 
insufficient privileges
8340078 is described below

commit 83400789f6a8fc1edcbf0d622ae7063cb30e738d
Author: Bence Simon 
AuthorDate: Wed Apr 8 18:32:19 2020 +0200

NIFI-7292 Preventing file listing from fail because of insufficient 
privileges

Signed-off-by: Pierre Villard 

This closes #4195.
---
 .../apache/nifi/processors/standard/ListFile.java  |  82 
 .../nifi/processors/standard/TestListFile.java | 144 -
 2 files changed, 143 insertions(+), 83 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java
index 42e2d3f..1b6639f 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java
@@ -48,8 +48,11 @@ import org.apache.nifi.util.Tuple;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.file.AccessDeniedException;
 import java.nio.file.FileStore;
 import java.nio.file.FileVisitOption;
+import java.nio.file.FileVisitResult;
+import java.nio.file.FileVisitor;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -68,6 +71,7 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
@@ -80,8 +84,6 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.function.BiPredicate;
 import java.util.function.Supplier;
 import java.util.regex.Pattern;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
 
 import static 
org.apache.nifi.expression.ExpressionLanguageScope.VARIABLE_REGISTRY;
 import static 
org.apache.nifi.processor.util.StandardValidators.POSITIVE_INTEGER_VALIDATOR;
@@ -547,31 +549,65 @@ public class ListFile extends 
AbstractListProcessor {
 }
 };
 
-final Stream inputStream = getPathStream(basePath, maxDepth, 
matcher);
+try {
+final long start = System.currentTimeMillis();
+final List result = new LinkedList<>();
 
-final Stream listing = inputStream.map(p -> {
-File file = p.toFile();
-BasicFileAttributes attributes = lastModifiedMap.get(p);
+Files.walkFileTree(basePath, 
Collections.singleton(FileVisitOption.FOLLOW_LINKS), maxDepth, new 
FileVisitor() {
+@Override
+public FileVisitResult preVisitDirectory(final Path dir, final 
BasicFileAttributes attributes) throws IOException {
+if (Files.isReadable(dir)) {
+return FileVisitResult.CONTINUE;
+} else {
+getLogger().debug("The following directory is not 
readable: {}", new Object[] {dir.toString()});
+return FileVisitResult.SKIP_SUBTREE;
+}
+}
 
-final FileInfo fileInfo = new FileInfo.Builder()
-.directory(false)
-.filename(file.getName())
-.fullPathFileName(file.getAbsolutePath())
-.lastModifiedTime(attributes.lastModifiedTime().toMillis())
-.size(attributes.size())
-.build();
+@Override
+public FileVisitResult visitFile(final Path path, final 
BasicFileAttributes attributes) throws IOException {
+if (matcher.test(path, attributes)) {
+final File file = path.toFile();
+final BasicFileAttributes fileAttributes = 
lastModifiedMap.get(path);
+final FileInfo fileInfo = new FileInfo.Builder()
+.directory(false)
+.filename(file.getName())
+.fullPathFileName(file.getAbsolutePath())
+
.lastModifiedTime(fileAttributes.lastModifiedTime().toMillis())
+.size(fileAttributes.size())
+.build();
+
+result.add(fileInfo);
+}
 
-return fileInfo;
-});
+return FileVisitResult.CONTINUE;
+}
+
+@Override
+ 

[nifi] branch master updated: [NIFI-7358] - Fix: Sorting on 'Estimated Time to Back Pressure' in the Connection summary table does not work properly - fix style issues - review feedback

2020-04-15 Thread mcgilman
This is an automated email from the ASF dual-hosted git repository.

mcgilman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/master by this push:
 new c263daf  [NIFI-7358] - Fix: Sorting on 'Estimated Time to Back 
Pressure' in the Connection summary table does not work properly - fix style 
issues - review feedback
c263daf is described below

commit c263daf20ba534aaca435826b21435c529a05505
Author: Rob Fellows 
AuthorDate: Mon Apr 13 18:17:48 2020 -0400

[NIFI-7358] - Fix: Sorting on 'Estimated Time to Back Pressure' in the 
Connection summary table does not work properly
- fix style issues
- review feedback

This closes #4208
---
 .../main/webapp/js/nf/summary/nf-summary-table.js  | 38 ++
 1 file changed, 25 insertions(+), 13 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/summary/nf-summary-table.js
 
b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/summary/nf-summary-table.js
index 3585356..0bb87cf 100644
--- 
a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/summary/nf-summary-table.js
+++ 
b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/summary/nf-summary-table.js
@@ -2301,6 +2301,29 @@
 
 var sortState = {};
 
+var getMinTimeToBackPressure = function (connection) {
+var maxCurrentUsage = Math.max(_.get(connection, 'percentUseBytes', 
0), _.get(connection, 'percentUseCount', 0));
+
+if (maxCurrentUsage >= 100) {
+// currently experiencing back pressure
+return 0;
+}
+
+var bytesPrediction = _.get(connection, 
'predictions.predictedMillisUntilBytesBackpressure', -1);
+var countPrediction = _.get(connection, 
'predictions.predictedMillisUntilCountBackpressure', -1);
+
+if (bytesPrediction < 0) {
+// bytes prediction is unknown. return the count prediction if 
known, otherwise use the max
+return countPrediction < 0 ? Number.MAX_VALUE : countPrediction;
+} else if (countPrediction < 0) {
+// count prediction is unknown but we know bytes prediction is 
known, return that
+return bytesPrediction;
+}
+
+// if we get here, both predictions are known. return the minimum of 
the two
+return Math.min(bytesPrediction, countPrediction);
+}
+
 /**
  * Sorts the specified data using the specified sort details.
  *
@@ -2361,19 +2384,8 @@
 return aPercentUseDataSize - bPercentUseDataSize;
 }
 } else if (sortDetails.columnId === 'backpressurePrediction') {
-// if the connection is at backpressure currently, "now" 
displays and not the estimate. Should account for that when sorting.
-var aMaxCurrentUsage = Math.max(_.get(a, 'percentUseBytes', 
0), _.get(a, 'percentUseCount', 0));
-var bMaxCurrentUsage = Math.max(_.get(b, 'percentUseBytes', 
0), _.get(b, 'percentUseCount', 0));
-
-var aMinTime = Math.min(_.get(a, 
'predictions.predictedMillisUntilBytesBackpressure', Number.MAX_VALUE), 
_.get(a, 'predictions.predictedMillisUntilCountBackpressure', 
Number.MAX_VALUE));
-var bMinTime = Math.min(_.get(b, 
'predictions.predictedMillisUntilBytesBackpressure', Number.MAX_VALUE), 
_.get(b, 'predictions.predictedMillisUntilCountBackpressure', 
Number.MAX_VALUE));
-
-if (aMaxCurrentUsage >= 100) {
-aMinTime = 0;
-}
-if (bMaxCurrentUsage >= 100) {
-bMinTime = 0;
-}
+var aMinTime = getMinTimeToBackPressure(a);
+var bMinTime = getMinTimeToBackPressure(b);
 
 return aMinTime - bMinTime;
 } else if (sortDetails.columnId === 'sent' || sortDetails.columnId 
=== 'received' || sortDetails.columnId === 'input' || sortDetails.columnId === 
'output' || sortDetails.columnId === 'transferred') {



[nifi-minifi-cpp] branch master updated: MINIFICPP-1096 appveyor ctest output-on-failure

2020-04-15 Thread aboda
This is an automated email from the ASF dual-hosted git repository.

aboda pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/nifi-minifi-cpp.git


The following commit(s) were added to refs/heads/master by this push:
 new af2f302  MINIFICPP-1096 appveyor ctest output-on-failure
af2f302 is described below

commit af2f302b03edb7226f63847baf607b844f8c5050
Author: Marton Szasz 
AuthorDate: Wed Apr 15 11:16:34 2020 +0200

MINIFICPP-1096 appveyor ctest output-on-failure

Signed-off-by: Arpad Boda 

This closes #756
---
 win_build_vs.bat | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/win_build_vs.bat b/win_build_vs.bat
index 2ffef3d..095cb35 100644
--- a/win_build_vs.bat
+++ b/win_build_vs.bat
@@ -79,7 +79,7 @@ if [%cpack%] EQU [ON] (
IF !ERRORLEVEL! NEQ 0 ( popd & exit /b !ERRORLEVEL! )
 )
 if [%skiptests%] NEQ [ON] ( 
-   ctest -C %cmake_build_type%
+   ctest -C %cmake_build_type% --output-on-failure
IF !ERRORLEVEL! NEQ 0 ( popd & exit /b !ERRORLEVEL! )
 )
 popd



[nifi] branch master updated: NIFI-6977 - Change the reporting behavior of Azure Reporting task to report report the time when metrics are generated

2020-04-15 Thread pvillard
This is an automated email from the ASF dual-hosted git repository.

pvillard pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/master by this push:
 new 9df53e7  NIFI-6977 - Change the reporting behavior of Azure Reporting 
task to report report the time when metrics are generated
9df53e7 is described below

commit 9df53e7204789ff84fc4ed7bd4ed4675588a21d9
Author: sjyang18 
AuthorDate: Thu Jan 9 19:39:36 2020 +

NIFI-6977 - Change the reporting behavior of Azure Reporting task to report 
report the time when metrics are generated

Signed-off-by: Pierre Villard 

This closes #4211.
---
 .../loganalytics/AbstractAzureLogAnalyticsReportingTask.java |  6 +-
 .../loganalytics/TestAzureLogAnalyticsReportingTask.java | 12 
 2 files changed, 13 insertions(+), 5 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/main/java/org/apache/nifi/reporting/azure/loganalytics/AbstractAzureLogAnalyticsReportingTask.java
 
b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/main/java/org/apache/nifi/reporting/azure/loganalytics/AbstractAzureLogAnalyticsReportingTask.java
index 245e6bb..1017ee5 100644
--- 
a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/main/java/org/apache/nifi/reporting/azure/loganalytics/AbstractAzureLogAnalyticsReportingTask.java
+++ 
b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/main/java/org/apache/nifi/reporting/azure/loganalytics/AbstractAzureLogAnalyticsReportingTask.java
@@ -133,11 +133,15 @@ public abstract class 
AbstractAzureLogAnalyticsReportingTask extends AbstractRep
 
 protected void sendToLogAnalytics(final HttpPost request, final String 
workspaceId, final String linuxPrimaryKey,
 final String rawJson) throws IllegalArgumentException, 
RuntimeException, IOException {
+
 final int bodyLength = rawJson.getBytes(UTF8).length;
-final String nowRfc1123 = 
RFC_1123_DATE_TIME.format(ZonedDateTime.now(ZoneOffset.UTC));
+final ZonedDateTime zNow = ZonedDateTime.now(ZoneOffset.UTC);
+final String nowRfc1123 = 
zNow.format(DateTimeFormatter.RFC_1123_DATE_TIME);
+final String nowISO8601 = zNow.format(DateTimeFormatter.ISO_DATE_TIME);
 final String createAuthorization = createAuthorization(workspaceId, 
linuxPrimaryKey, bodyLength, nowRfc1123);
 request.addHeader("Authorization", createAuthorization);
 request.addHeader("x-ms-date", nowRfc1123);
+request.addHeader("time-generated-field", nowISO8601);
 request.setEntity(new StringEntity(rawJson));
 try (CloseableHttpClient httpClient = HttpClients.createDefault()) {
 postRequest(httpClient, request);
diff --git 
a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/test/java/org/apache/nifi/reporting/azure/loganalytics/TestAzureLogAnalyticsReportingTask.java
 
b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/test/java/org/apache/nifi/reporting/azure/loganalytics/TestAzureLogAnalyticsReportingTask.java
index b8c4c17..1dbefa1 100644
--- 
a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/test/java/org/apache/nifi/reporting/azure/loganalytics/TestAzureLogAnalyticsReportingTask.java
+++ 
b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/test/java/org/apache/nifi/reporting/azure/loganalytics/TestAzureLogAnalyticsReportingTask.java
@@ -213,9 +213,13 @@ public class TestAzureLogAnalyticsReportingTask {
 testedReportingTask.onTrigger(reportingContextStub);
 
 HttpPost postRequest = testedReportingTask.getPostRequest();
-ArgumentCaptor captor = ArgumentCaptor.forClass(String.class);
-verify(postRequest, atLeast(1)).addHeader( eq("Authorization"), 
captor.capture());
-assertTrue(captor.getValue().contains("SharedKey"));
+ArgumentCaptor captorAuthorization = 
ArgumentCaptor.forClass(String.class);
+ArgumentCaptor captorXMsDate = 
ArgumentCaptor.forClass(String.class);
+ArgumentCaptor captorTimeGeneratedField = 
ArgumentCaptor.forClass(String.class);
+verify(postRequest, atLeast(1)).addHeader( eq("Authorization"), 
captorAuthorization.capture());
+verify(postRequest, atLeast(1)).addHeader( eq("x-ms-date"), 
captorXMsDate.capture());
+verify(postRequest, atLeast(1)).addHeader( eq("time-generated-field"), 
captorTimeGeneratedField.capture());
+assertTrue(captorAuthorization.getValue().contains("SharedKey"));
 }
 
 
@@ -252,4 +256,4 @@ public class TestAzureLogAnalyticsReportingTask {
 super.postRequest(mockClient, request);
 }
 }
-}
\ No newline at end of file
+}



[nifi] branch master updated: NIFI-7347: Fixed NullPointerException that can happen if a bin is merged due to timeout and has no records

2020-04-15 Thread pvillard
This is an automated email from the ASF dual-hosted git repository.

pvillard pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/master by this push:
 new a61a4c2  NIFI-7347: Fixed NullPointerException that can happen if a 
bin is merged due to timeout and has no records
a61a4c2 is described below

commit a61a4c2b5878f53777df8df1dd91a78629ba23bb
Author: Mark Payne 
AuthorDate: Tue Apr 14 17:08:49 2020 -0400

NIFI-7347: Fixed NullPointerException that can happen if a bin is merged 
due to timeout and has no records

Signed-off-by: Pierre Villard 

This closes #4210.
---
 .../nifi/processors/standard/merge/RecordBin.java  | 18 +-
 1 file changed, 9 insertions(+), 9 deletions(-)

diff --git 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBin.java
 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBin.java
index c3dbbaa..b49fc2a 100644
--- 
a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBin.java
+++ 
b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBin.java
@@ -124,18 +124,18 @@ public class RecordBin {
 
 logger.debug("Migrating id={} to {}", new Object[] 
{flowFile.getId(), this});
 
-Record record;
-while ((record = recordReader.nextRecord()) != null) {
-if (recordWriter == null) {
-final OutputStream rawOut = session.write(merged);
-logger.debug("Created OutputStream using session {} for 
{}", new Object[] {session, this});
+if (recordWriter == null) {
+final OutputStream rawOut = session.write(merged);
+logger.debug("Created OutputStream using session {} for {}", 
new Object[] {session, this});
 
-this.out = new ByteCountingOutputStream(rawOut);
+this.out = new ByteCountingOutputStream(rawOut);
 
-recordWriter = writerFactory.createWriter(logger, 
record.getSchema(), out, flowFile);
-recordWriter.beginRecordSet();
-}
+recordWriter = writerFactory.createWriter(logger, 
recordReader.getSchema(), out, flowFile);
+recordWriter.beginRecordSet();
+}
 
+Record record;
+while ((record = recordReader.nextRecord()) != null) {
 recordWriter.write(record);
 recordCount++;
 }