[jira] [Commented] (NIFI-833) Add an "Undo" to the User Interface
[ https://issues.apache.org/jira/browse/NIFI-833?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359985#comment-15359985 ] Alexander Kell commented on NIFI-833: - I also agree with this feature wish , nifi is a great tool , but a little bit more comfort functionallity would be really nice and helps to improve the efficiency by working with nifi . Especially when you are building big Dataflows, without an undo functionallity it is really a pain. > Add an "Undo" to the User Interface > --- > > Key: NIFI-833 > URL: https://issues.apache.org/jira/browse/NIFI-833 > Project: Apache NiFi > Issue Type: Improvement > Components: Core UI >Reporter: John Titus > > As a NiFi user, I'd like to be able to quickly undo an action, or series of > actions, in case I accidentally delete one or more processors while editing > them. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (MINIFI-41) Command Line Configuration Utility
[ https://issues.apache.org/jira/browse/MINIFI-41?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359955#comment-15359955 ] ASF GitHub Bot commented on MINIFI-41: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi-minifi/pull/21#discussion_r69371933 --- Diff: minifi-toolkit/minifi-toolkit-configuration/src/main/java/org/apache/nifi/minifi/toolkit/configuration/ConfigMain.java --- @@ -0,0 +1,342 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.nifi.minifi.toolkit.configuration; + +import org.apache.nifi.controller.Template; +import org.apache.nifi.minifi.commons.schema.ConfigSchema; +import org.apache.nifi.minifi.commons.schema.SecurityPropertiesSchema; +import org.apache.nifi.minifi.commons.schema.common.BaseSchema; +import org.apache.nifi.minifi.commons.schema.serialization.SchemaLoader; +import org.apache.nifi.minifi.commons.schema.serialization.SchemaSaver; +import org.apache.nifi.minifi.commons.schema.exception.SchemaLoaderException; +import org.apache.nifi.ssl.StandardSSLContextService; +import org.apache.nifi.web.api.dto.ConnectableDTO; +import org.apache.nifi.web.api.dto.ConnectionDTO; +import org.apache.nifi.web.api.dto.ControllerServiceDTO; +import org.apache.nifi.web.api.dto.FlowSnippetDTO; +import org.apache.nifi.web.api.dto.NiFiComponentDTO; +import org.apache.nifi.web.api.dto.PortDTO; +import org.apache.nifi.web.api.dto.ProcessorDTO; +import org.apache.nifi.web.api.dto.RemoteProcessGroupContentsDTO; +import org.apache.nifi.web.api.dto.RemoteProcessGroupDTO; +import org.apache.nifi.web.api.dto.RemoteProcessGroupPortDTO; +import org.apache.nifi.web.api.dto.TemplateDTO; + +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBException; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class ConfigMain { +public static final int ERR_INVALID_ARGS = 1; +public static final int ERR_UNABLE_TO_OPEN_OUTPUT = 2; +public static final int ERR_UNABLE_TO_OPEN_INPUT = 3; +public static final int ERR_UNABLE_TO_READ_TEMPLATE = 4; +public static final int ERR_UNABLE_TO_TRANFORM_TEMPLATE = 5; +public static final int ERR_UNABLE_TO_PARSE_CONFIG = 6; +public static final int ERR_INVALID_CONFIG = 7; + +public static final int SUCCESS = 0; + +public static final String TRANSFORM = "transform"; +public static final String VALIDATE = "validate"; + +private final MapcommandMap; +private final PathInputStreamFactory pathInputStreamFactory; +private final PathOutputStreamFactory pathOutputStreamFactory; + +public ConfigMain() { +this(FileInputStream::new, FileOutputStream::new); +} + +public ConfigMain(PathInputStreamFactory pathInputStreamFactory, PathOutputStreamFactory pathOutputStreamFactory) { +this.pathInputStreamFactory = pathInputStreamFactory; +this.pathOutputStreamFactory = pathOutputStreamFactory; +this.commandMap = createCommandMap(); +} + +public static void main(String[] args) { +System.exit(new ConfigMain().execute(args)); +} + +public static void printValidateUsage() { +System.out.println("Validate Usage:"); +System.out.println(); +System.out.print("java "); +System.out.print(ConfigMain.class.getCanonicalName()); +
[jira] [Commented] (NIFI-2165) PutCassandraQL is handling timestamps as strings
[ https://issues.apache.org/jira/browse/NIFI-2165?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359916#comment-15359916 ] ASF GitHub Bot commented on NIFI-2165: -- Github user jeffoxenberg commented on the issue: https://github.com/apache/nifi/pull/602 Thanks for working with me on this. The real flow's value was double quoted, it was parsed from JSON. [Here's](https://gist.github.com/jeffoxenberg/6f12bb11b7359e10180712500cb528e7) the flow that I originally got the error from and here's a sample of the json message that nifi would consume from kafka: {"senstype": "temp3", "ts": "2016-07-01T20:52:46Z", "value": 8.72}. It also gave me an error when I used UpdateAttribute to insert the values directly instead of using variables from parsed JSON. > PutCassandraQL is handling timestamps as strings > > > Key: NIFI-2165 > URL: https://issues.apache.org/jira/browse/NIFI-2165 > Project: Apache NiFi > Issue Type: Bug > Components: Extensions >Affects Versions: 1.0.0, 0.6.1 > Environment: Ubuntu 14.04, openjdk version "1.8.0_91" >Reporter: Jeff Oxenberg >Priority: Minor > Labels: cassandra, putcassandraql, timestamp > Fix For: 1.0.0 > > Original Estimate: 1h > Remaining Estimate: 1h > > PutCassandraQL fails when attempting to insert a (iso-8601-formatted) > timestamp on both Nifi 0.6 and 1.0 due to handling timestamps as strings. > Input: > FlowFile Attribute Map Content > Key: 'cql.args.1.type' > Value: 'text' > Key: 'cql.args.1.value' > Value: 'temp3' > Key: 'cql.args.2.type' > Value: 'timestamp' > Key: 'cql.args.2.value' > Value: '2016-06-30T20:04:36Z' > Key: 'cql.args.3.type' > Value: 'float' > Key: 'cql.args.3.value' > Value: '6.7' > Key: 'j.id' > Value: 'temp3' > Key: 'j.ts' > Value: '2016-06-30T20:04:36Z' > Key: 'j.value' > Value: '6.7' > -- > INSERT INTO test.test2 (sensor, ts, value) VALUES(?,?,?) > Output: > com.datastax.driver.core.exceptions.CodecNotFoundException: Codec not found > for requested operation: [timestamp <-> java.lang.String] > Record does not get inserted into Cassandra > Expected output: > Record gets inserted into Cassandra -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2165) PutCassandraQL is handling timestamps as strings
[ https://issues.apache.org/jira/browse/NIFI-2165?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359900#comment-15359900 ] ASF GitHub Bot commented on NIFI-2165: -- Github user mattyb149 commented on the issue: https://github.com/apache/nifi/pull/602 Can you put your template in a Gist? I'll give it a try too > PutCassandraQL is handling timestamps as strings > > > Key: NIFI-2165 > URL: https://issues.apache.org/jira/browse/NIFI-2165 > Project: Apache NiFi > Issue Type: Bug > Components: Extensions >Affects Versions: 1.0.0, 0.6.1 > Environment: Ubuntu 14.04, openjdk version "1.8.0_91" >Reporter: Jeff Oxenberg >Priority: Minor > Labels: cassandra, putcassandraql, timestamp > Fix For: 1.0.0 > > Original Estimate: 1h > Remaining Estimate: 1h > > PutCassandraQL fails when attempting to insert a (iso-8601-formatted) > timestamp on both Nifi 0.6 and 1.0 due to handling timestamps as strings. > Input: > FlowFile Attribute Map Content > Key: 'cql.args.1.type' > Value: 'text' > Key: 'cql.args.1.value' > Value: 'temp3' > Key: 'cql.args.2.type' > Value: 'timestamp' > Key: 'cql.args.2.value' > Value: '2016-06-30T20:04:36Z' > Key: 'cql.args.3.type' > Value: 'float' > Key: 'cql.args.3.value' > Value: '6.7' > Key: 'j.id' > Value: 'temp3' > Key: 'j.ts' > Value: '2016-06-30T20:04:36Z' > Key: 'j.value' > Value: '6.7' > -- > INSERT INTO test.test2 (sensor, ts, value) VALUES(?,?,?) > Output: > com.datastax.driver.core.exceptions.CodecNotFoundException: Codec not found > for requested operation: [timestamp <-> java.lang.String] > Record does not get inserted into Cassandra > Expected output: > Record gets inserted into Cassandra -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2165) PutCassandraQL is handling timestamps as strings
[ https://issues.apache.org/jira/browse/NIFI-2165?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359899#comment-15359899 ] ASF GitHub Bot commented on NIFI-2165: -- Github user mattyb149 commented on the issue: https://github.com/apache/nifi/pull/602 Were there quotes (single or double) in the real flow's value? > PutCassandraQL is handling timestamps as strings > > > Key: NIFI-2165 > URL: https://issues.apache.org/jira/browse/NIFI-2165 > Project: Apache NiFi > Issue Type: Bug > Components: Extensions >Affects Versions: 1.0.0, 0.6.1 > Environment: Ubuntu 14.04, openjdk version "1.8.0_91" >Reporter: Jeff Oxenberg >Priority: Minor > Labels: cassandra, putcassandraql, timestamp > Fix For: 1.0.0 > > Original Estimate: 1h > Remaining Estimate: 1h > > PutCassandraQL fails when attempting to insert a (iso-8601-formatted) > timestamp on both Nifi 0.6 and 1.0 due to handling timestamps as strings. > Input: > FlowFile Attribute Map Content > Key: 'cql.args.1.type' > Value: 'text' > Key: 'cql.args.1.value' > Value: 'temp3' > Key: 'cql.args.2.type' > Value: 'timestamp' > Key: 'cql.args.2.value' > Value: '2016-06-30T20:04:36Z' > Key: 'cql.args.3.type' > Value: 'float' > Key: 'cql.args.3.value' > Value: '6.7' > Key: 'j.id' > Value: 'temp3' > Key: 'j.ts' > Value: '2016-06-30T20:04:36Z' > Key: 'j.value' > Value: '6.7' > -- > INSERT INTO test.test2 (sensor, ts, value) VALUES(?,?,?) > Output: > com.datastax.driver.core.exceptions.CodecNotFoundException: Codec not found > for requested operation: [timestamp <-> java.lang.String] > Record does not get inserted into Cassandra > Expected output: > Record gets inserted into Cassandra -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2165) PutCassandraQL is handling timestamps as strings
[ https://issues.apache.org/jira/browse/NIFI-2165?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359863#comment-15359863 ] ASF GitHub Bot commented on NIFI-2165: -- Github user mattyb149 commented on the issue: https://github.com/apache/nifi/pull/602 I cherry-picked in the commits that added unit tests, to show that they would fail without your fix, however they pass. Any idea how the tests differ from the error that spawned the Jira? > PutCassandraQL is handling timestamps as strings > > > Key: NIFI-2165 > URL: https://issues.apache.org/jira/browse/NIFI-2165 > Project: Apache NiFi > Issue Type: Bug > Components: Extensions >Affects Versions: 1.0.0, 0.6.1 > Environment: Ubuntu 14.04, openjdk version "1.8.0_91" >Reporter: Jeff Oxenberg >Priority: Minor > Labels: cassandra, putcassandraql, timestamp > Fix For: 1.0.0 > > Original Estimate: 1h > Remaining Estimate: 1h > > PutCassandraQL fails when attempting to insert a (iso-8601-formatted) > timestamp on both Nifi 0.6 and 1.0 due to handling timestamps as strings. > Input: > FlowFile Attribute Map Content > Key: 'cql.args.1.type' > Value: 'text' > Key: 'cql.args.1.value' > Value: 'temp3' > Key: 'cql.args.2.type' > Value: 'timestamp' > Key: 'cql.args.2.value' > Value: '2016-06-30T20:04:36Z' > Key: 'cql.args.3.type' > Value: 'float' > Key: 'cql.args.3.value' > Value: '6.7' > Key: 'j.id' > Value: 'temp3' > Key: 'j.ts' > Value: '2016-06-30T20:04:36Z' > Key: 'j.value' > Value: '6.7' > -- > INSERT INTO test.test2 (sensor, ts, value) VALUES(?,?,?) > Output: > com.datastax.driver.core.exceptions.CodecNotFoundException: Codec not found > for requested operation: [timestamp <-> java.lang.String] > Record does not get inserted into Cassandra > Expected output: > Record gets inserted into Cassandra -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-1899) Create ListenSMTP & ExtractEmailAttachment processors
[ https://issues.apache.org/jira/browse/NIFI-1899?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359798#comment-15359798 ] ASF GitHub Bot commented on NIFI-1899: -- Github user trixpan commented on the issue: https://github.com/apache/nifi/pull/483 @joewitt According to to https://mvnrepository.com/artifact/org.subethamail/subethasmtp/3.1.7 The only dependencies should be ![image](https://cloud.githubusercontent.com/assets/3108527/16536265/e92eb7e4-4032-11e6-99e5-3551e48f9687.png) From what I guess (no further dependencies are added). [jsr305 is used in kite-bundle](https://github.com/apache/nifi/blob/c4d06f203d204d4a3128e1b997144edcd82e48a5/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/pom.xml#L55) > Create ListenSMTP & ExtractEmailAttachment processors > - > > Key: NIFI-1899 > URL: https://issues.apache.org/jira/browse/NIFI-1899 > Project: Apache NiFi > Issue Type: New Feature >Reporter: Andre > -- This message was sent by Atlassian JIRA (v6.3.4#6332)
svn commit: r1751013 - /nifi/site/trunk/mailing_lists.html
Author: joewitt Date: Fri Jul 1 23:03:22 2016 New Revision: 1751013 URL: http://svn.apache.org/viewvc?rev=1751013=rev Log: added issues mailing list Modified: nifi/site/trunk/mailing_lists.html Modified: nifi/site/trunk/mailing_lists.html URL: http://svn.apache.org/viewvc/nifi/site/trunk/mailing_lists.html?rev=1751013=1751012=1751013=diff == --- nifi/site/trunk/mailing_lists.html (original) +++ nifi/site/trunk/mailing_lists.html Fri Jul 1 23:03:22 2016 @@ -125,6 +125,7 @@ Unsubscribe Post Archive + Purpose Users Mailing List @@ -132,6 +133,7 @@ mailto:users-unsubscr...@nifi.apache.org;>Unsubscribe mailto:us...@nifi.apache.org;>Post http://mail-archives.apache.org/mod_mbox/nifi-users/;>mail-archives.apache.org + For nifi users to get help, ask questions, report bugs Developers Mailing List @@ -139,6 +141,7 @@ mailto:dev-unsubscr...@nifi.apache.org;>Unsubscribe mailto:d...@nifi.apache.org;>Post http://mail-archives.apache.org/mod_mbox/nifi-dev/;>mail-archives.apache.org + For developers to discuss features, bugs, share ideas Commits Mailing List @@ -146,6 +149,15 @@ mailto:commits-unsubscr...@nifi.apache.org;>Unsubscribe mailto:commits@nifi.apache.org;>Post http://mail-archives.apache.org/mod_mbox/nifi-commits/;>mail-archives.apache.org + Automated e-mails each time code commits occur + + +Issues Mailing List +mailto:issues-subscr...@nifi.apache.org;>Subscribe +mailto:issues-unsubscr...@nifi.apache.org;>Unsubscribe +mailto:iss...@nifi.apache.org;>Post +http://mail-archives.apache.org/mod_mbox/nifi-issues/;>mail-archives.apache.org +Automated e-mails each time comments are made in JIRA or Github
nifi-site git commit: Added entry for issues mailing list
Repository: nifi-site Updated Branches: refs/heads/master c78ef2491 -> 0c0a56226 Added entry for issues mailing list Project: http://git-wip-us.apache.org/repos/asf/nifi-site/repo Commit: http://git-wip-us.apache.org/repos/asf/nifi-site/commit/0c0a5622 Tree: http://git-wip-us.apache.org/repos/asf/nifi-site/tree/0c0a5622 Diff: http://git-wip-us.apache.org/repos/asf/nifi-site/diff/0c0a5622 Branch: refs/heads/master Commit: 0c0a562266d7439ad6e30a9a99d699566719ed38 Parents: c78ef24 Author: joewittAuthored: Fri Jul 1 16:02:38 2016 -0700 Committer: joewitt Committed: Fri Jul 1 16:02:38 2016 -0700 -- src/pages/html/mailing_lists.hbs | 12 1 file changed, 12 insertions(+) -- http://git-wip-us.apache.org/repos/asf/nifi-site/blob/0c0a5622/src/pages/html/mailing_lists.hbs -- diff --git a/src/pages/html/mailing_lists.hbs b/src/pages/html/mailing_lists.hbs index e522c96..d437f26 100644 --- a/src/pages/html/mailing_lists.hbs +++ b/src/pages/html/mailing_lists.hbs @@ -28,6 +28,7 @@ title: Apache NiFi Mailing Lists Unsubscribe Post Archive + Purpose Users Mailing List @@ -35,6 +36,7 @@ title: Apache NiFi Mailing Lists mailto:users-unsubscr...@nifi.apache.org;>Unsubscribe mailto:us...@nifi.apache.org;>Post http://mail-archives.apache.org/mod_mbox/nifi-users/;>mail-archives.apache.org + For nifi users to get help, ask questions, report bugs Developers Mailing List @@ -42,6 +44,7 @@ title: Apache NiFi Mailing Lists mailto:dev-unsubscr...@nifi.apache.org;>Unsubscribe mailto:d...@nifi.apache.org;>Post http://mail-archives.apache.org/mod_mbox/nifi-dev/;>mail-archives.apache.org + For developers to discuss features, bugs, share ideas Commits Mailing List @@ -49,6 +52,15 @@ title: Apache NiFi Mailing Lists mailto:commits-unsubscr...@nifi.apache.org;>Unsubscribe mailto:commits@nifi.apache.org;>Post http://mail-archives.apache.org/mod_mbox/nifi-commits/;>mail-archives.apache.org + Automated e-mails each time code commits occur + + +Issues Mailing List +mailto:issues-subscr...@nifi.apache.org;>Subscribe +mailto:issues-unsubscr...@nifi.apache.org;>Unsubscribe +mailto:iss...@nifi.apache.org;>Post +http://mail-archives.apache.org/mod_mbox/nifi-issues/;>mail-archives.apache.org +Automated e-mails each time comments are made in JIRA or Github
[jira] [Commented] (NIFI-1899) Create ListenSMTP & ExtractEmailAttachment processors
[ https://issues.apache.org/jira/browse/NIFI-1899?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359785#comment-15359785 ] ASF GitHub Bot commented on NIFI-1899: -- Github user trixpan commented on a diff in the pull request: https://github.com/apache/nifi/pull/483#discussion_r69364210 --- Diff: nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/test/resources/attachment-only.eml --- @@ -0,0 +1,156 @@ +Date: Fri, 29 Jul 2011 09:41:11 +0200 (CET) +From: Siegfried GOESCHL+Subject: Kunde 100029 Auftrag 3600 +To: +Reply-To: Siegfried GOESCHL +Message-ID: +MIME-Version: 1.0 +Importance: Normal +X-Priority: 3 (Normal) +X-Mailer: SAP Web Application Server 7.00 +Content-Type: application/pdf; + name="Kunde 100029 Auftrag 3600.pdf" +Content-Transfer-Encoding: base64 +Content-Description: Kunde 100029 Auftrag 3600 --- End diff -- @joewitt Agreed. In a previous commit I changed the junit so that we generate the email dynamically. The files have been deleted and references to them removed from the `pom.xml` > Create ListenSMTP & ExtractEmailAttachment processors > - > > Key: NIFI-1899 > URL: https://issues.apache.org/jira/browse/NIFI-1899 > Project: Apache NiFi > Issue Type: New Feature >Reporter: Andre > -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-1899) Create ListenSMTP & ExtractEmailAttachment processors
[ https://issues.apache.org/jira/browse/NIFI-1899?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359772#comment-15359772 ] ASF GitHub Bot commented on NIFI-1899: -- Github user joewitt commented on the issue: https://github.com/apache/nifi/pull/483 For subthasmtp it looks like all that would be needed is to add to our nifi-assembly/NOTICE an entry such as (ASLv2) subethasmtp The following NOTICE information applies: Copyright (C) 2006-2007 SubEthaMail.org This is assuming you've already verified its transitive dependencies are also accounted for. If we already have all of those in our dependency then nothing more needed. > Create ListenSMTP & ExtractEmailAttachment processors > - > > Key: NIFI-1899 > URL: https://issues.apache.org/jira/browse/NIFI-1899 > Project: Apache NiFi > Issue Type: New Feature >Reporter: Andre > -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-1899) Create ListenSMTP & ExtractEmailAttachment processors
[ https://issues.apache.org/jira/browse/NIFI-1899?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359766#comment-15359766 ] ASF GitHub Bot commented on NIFI-1899: -- Github user joewitt commented on a diff in the pull request: https://github.com/apache/nifi/pull/483#discussion_r69363256 --- Diff: nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/test/resources/attachment-only.eml --- @@ -0,0 +1,156 @@ +Date: Fri, 29 Jul 2011 09:41:11 +0200 (CET) +From: Siegfried GOESCHL+Subject: Kunde 100029 Auftrag 3600 +To: +Reply-To: Siegfried GOESCHL +Message-ID: +MIME-Version: 1.0 +Importance: Normal +X-Priority: 3 (Normal) +X-Mailer: SAP Web Application Server 7.00 +Content-Type: application/pdf; + name="Kunde 100029 Auftrag 3600.pdf" +Content-Transfer-Encoding: base64 +Content-Description: Kunde 100029 Auftrag 3600 --- End diff -- I would advise just creating your own eml file and submitting that. It shoudl contain entirely bogus information. Much simpler than causing license or notice impacts. In this case we would be pulling from another apache project but it just doesn't seem worth it for a test resource like this so let's just make a new .eml that is fake and nifi only. > Create ListenSMTP & ExtractEmailAttachment processors > - > > Key: NIFI-1899 > URL: https://issues.apache.org/jira/browse/NIFI-1899 > Project: Apache NiFi > Issue Type: New Feature >Reporter: Andre > -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Updated] (NIFI-2160) Enabled ControllerServices disabled on restart
[ https://issues.apache.org/jira/browse/NIFI-2160?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Joseph Witt updated NIFI-2160: -- Fix Version/s: 1.0.0 > Enabled ControllerServices disabled on restart > -- > > Key: NIFI-2160 > URL: https://issues.apache.org/jira/browse/NIFI-2160 > Project: Apache NiFi > Issue Type: Bug >Affects Versions: 0.7.0 >Reporter: Brandon DeVries >Assignee: Oleg Zhurakousky >Priority: Critical > Fix For: 1.0.0, 0.7.0 > > > As a result of the fix for NIFI-2032, *previously enabled ControllerServices > become disabled after a restart* if they are not referenced by another > component. However, we use a custom domain specific langauge that can > reference a controller service from a query defined as a custom processor's > property. This means that we use a number of controller service that are > only used in this way (i.e. are never directly referred to by another > component). Upon restart, these are now disabled causing issues with our > flows. > I have not yet stepped through the new enableControllerServices() \[1\] > method to figure out exactly where the issue is coming from, but I wanted to > get the ticket out there and on the radar, as this breaks backwards > compatibility on a feature we heavily rely on. > \[1\] > https://github.com/apache/nifi/blob/0.x/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/service/StandardControllerServiceProvider.java#L301-336 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2165) PutCassandraQL is handling timestamps as strings
[ https://issues.apache.org/jira/browse/NIFI-2165?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359695#comment-15359695 ] ASF GitHub Bot commented on NIFI-2165: -- Github user jeffoxenberg commented on the issue: https://github.com/apache/nifi/pull/602 Thanks. I think I got it. > PutCassandraQL is handling timestamps as strings > > > Key: NIFI-2165 > URL: https://issues.apache.org/jira/browse/NIFI-2165 > Project: Apache NiFi > Issue Type: Bug > Components: Extensions >Affects Versions: 1.0.0, 0.6.1 > Environment: Ubuntu 14.04, openjdk version "1.8.0_91" >Reporter: Jeff Oxenberg >Priority: Minor > Labels: cassandra, putcassandraql, timestamp > Fix For: 1.0.0 > > Original Estimate: 1h > Remaining Estimate: 1h > > PutCassandraQL fails when attempting to insert a (iso-8601-formatted) > timestamp on both Nifi 0.6 and 1.0 due to handling timestamps as strings. > Input: > FlowFile Attribute Map Content > Key: 'cql.args.1.type' > Value: 'text' > Key: 'cql.args.1.value' > Value: 'temp3' > Key: 'cql.args.2.type' > Value: 'timestamp' > Key: 'cql.args.2.value' > Value: '2016-06-30T20:04:36Z' > Key: 'cql.args.3.type' > Value: 'float' > Key: 'cql.args.3.value' > Value: '6.7' > Key: 'j.id' > Value: 'temp3' > Key: 'j.ts' > Value: '2016-06-30T20:04:36Z' > Key: 'j.value' > Value: '6.7' > -- > INSERT INTO test.test2 (sensor, ts, value) VALUES(?,?,?) > Output: > com.datastax.driver.core.exceptions.CodecNotFoundException: Codec not found > for requested operation: [timestamp <-> java.lang.String] > Record does not get inserted into Cassandra > Expected output: > Record gets inserted into Cassandra -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359669#comment-15359669 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69357965 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or evaluate
[jira] [Commented] (NIFI-2165) PutCassandraQL is handling timestamps as strings
[ https://issues.apache.org/jira/browse/NIFI-2165?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359647#comment-15359647 ] ASF GitHub Bot commented on NIFI-2165: -- Github user mattyb149 commented on the issue: https://github.com/apache/nifi/pull/602 An example could be another test just like that one with cql.args.10.value set to "I'm not a timestamp" or a timestamp not in ISO-8601 format like "07.01.2016". Not saying the operation should succeed, but should be handled appropriately (error logged, flow file transferred to failure, e.g.) > PutCassandraQL is handling timestamps as strings > > > Key: NIFI-2165 > URL: https://issues.apache.org/jira/browse/NIFI-2165 > Project: Apache NiFi > Issue Type: Bug > Components: Extensions >Affects Versions: 1.0.0, 0.6.1 > Environment: Ubuntu 14.04, openjdk version "1.8.0_91" >Reporter: Jeff Oxenberg >Priority: Minor > Labels: cassandra, putcassandraql, timestamp > Fix For: 1.0.0 > > Original Estimate: 1h > Remaining Estimate: 1h > > PutCassandraQL fails when attempting to insert a (iso-8601-formatted) > timestamp on both Nifi 0.6 and 1.0 due to handling timestamps as strings. > Input: > FlowFile Attribute Map Content > Key: 'cql.args.1.type' > Value: 'text' > Key: 'cql.args.1.value' > Value: 'temp3' > Key: 'cql.args.2.type' > Value: 'timestamp' > Key: 'cql.args.2.value' > Value: '2016-06-30T20:04:36Z' > Key: 'cql.args.3.type' > Value: 'float' > Key: 'cql.args.3.value' > Value: '6.7' > Key: 'j.id' > Value: 'temp3' > Key: 'j.ts' > Value: '2016-06-30T20:04:36Z' > Key: 'j.value' > Value: '6.7' > -- > INSERT INTO test.test2 (sensor, ts, value) VALUES(?,?,?) > Output: > com.datastax.driver.core.exceptions.CodecNotFoundException: Codec not found > for requested operation: [timestamp <-> java.lang.String] > Record does not get inserted into Cassandra > Expected output: > Record gets inserted into Cassandra -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2165) PutCassandraQL is handling timestamps as strings
[ https://issues.apache.org/jira/browse/NIFI-2165?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359641#comment-15359641 ] ASF GitHub Bot commented on NIFI-2165: -- Github user jeffoxenberg commented on the issue: https://github.com/apache/nifi/pull/602 No problem. I think I added a test for a good value here: `https://github.com/jeffoxenberg/nifi/commit/335e4f08a60989909d2d152f426fa5d46dec9a1b`. Could you give me an example of writing a test for a bad value? Thanks! > PutCassandraQL is handling timestamps as strings > > > Key: NIFI-2165 > URL: https://issues.apache.org/jira/browse/NIFI-2165 > Project: Apache NiFi > Issue Type: Bug > Components: Extensions >Affects Versions: 1.0.0, 0.6.1 > Environment: Ubuntu 14.04, openjdk version "1.8.0_91" >Reporter: Jeff Oxenberg >Priority: Minor > Labels: cassandra, putcassandraql, timestamp > Fix For: 1.0.0 > > Original Estimate: 1h > Remaining Estimate: 1h > > PutCassandraQL fails when attempting to insert a (iso-8601-formatted) > timestamp on both Nifi 0.6 and 1.0 due to handling timestamps as strings. > Input: > FlowFile Attribute Map Content > Key: 'cql.args.1.type' > Value: 'text' > Key: 'cql.args.1.value' > Value: 'temp3' > Key: 'cql.args.2.type' > Value: 'timestamp' > Key: 'cql.args.2.value' > Value: '2016-06-30T20:04:36Z' > Key: 'cql.args.3.type' > Value: 'float' > Key: 'cql.args.3.value' > Value: '6.7' > Key: 'j.id' > Value: 'temp3' > Key: 'j.ts' > Value: '2016-06-30T20:04:36Z' > Key: 'j.value' > Value: '6.7' > -- > INSERT INTO test.test2 (sensor, ts, value) VALUES(?,?,?) > Output: > com.datastax.driver.core.exceptions.CodecNotFoundException: Codec not found > for requested operation: [timestamp <-> java.lang.String] > Record does not get inserted into Cassandra > Expected output: > Record gets inserted into Cassandra -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359630#comment-15359630 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69355805 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or evaluate
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359627#comment-15359627 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69355562 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or evaluate
[jira] [Commented] (NIFI-2167) Disabled state not including in a Processor that was copy/paste
[ https://issues.apache.org/jira/browse/NIFI-2167?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359618#comment-15359618 ] Matthew Clarke commented on NIFI-2167: -- Same hold true when adding templates that contain disabled processors. Those processor are enabled when the template is added to the NiFi canvas. > Disabled state not including in a Processor that was copy/paste > --- > > Key: NIFI-2167 > URL: https://issues.apache.org/jira/browse/NIFI-2167 > Project: Apache NiFi > Issue Type: Bug > Components: Core Framework >Reporter: Matt Gilman > Fix For: 1.0.0 > > -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Created] (NIFI-2167) Disabled state not including in a Processor that was copy/paste
Matt Gilman created NIFI-2167: - Summary: Disabled state not including in a Processor that was copy/paste Key: NIFI-2167 URL: https://issues.apache.org/jira/browse/NIFI-2167 Project: Apache NiFi Issue Type: Bug Components: Core Framework Reporter: Matt Gilman Fix For: 1.0.0 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2021) System Diagnostics not working in Clustered mode
[ https://issues.apache.org/jira/browse/NIFI-2021?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359596#comment-15359596 ] ASF subversion and git services commented on NIFI-2021: --- Commit e7e349631f2805662e418a3d72ab335d1211807d in nifi's branch refs/heads/master from [~markap14] [ https://git-wip-us.apache.org/repos/asf?p=nifi.git;h=e7e3496 ] NIFI-2021: Fixed infinite replicated recursion. This closes #597 > System Diagnostics not working in Clustered mode > > > Key: NIFI-2021 > URL: https://issues.apache.org/jira/browse/NIFI-2021 > Project: Apache NiFi > Issue Type: Bug > Components: Core Framework >Affects Versions: 1.0.0 >Reporter: Mark Payne >Assignee: Mark Payne >Priority: Blocker > Fix For: 1.0.0 > > > If I open the Summary panel in the UI and then click System Diagnostics, I > never get a response back. Grabbing a stack trace shows that the System > Diagnostics request seems to get replicated in an infinite loop (i.e., the > replication causes a replication, which causes a replication, ...) -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2021) System Diagnostics not working in Clustered mode
[ https://issues.apache.org/jira/browse/NIFI-2021?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359598#comment-15359598 ] ASF GitHub Bot commented on NIFI-2021: -- Github user asfgit closed the pull request at: https://github.com/apache/nifi/pull/597 > System Diagnostics not working in Clustered mode > > > Key: NIFI-2021 > URL: https://issues.apache.org/jira/browse/NIFI-2021 > Project: Apache NiFi > Issue Type: Bug > Components: Core Framework >Affects Versions: 1.0.0 >Reporter: Mark Payne >Assignee: Mark Payne >Priority: Blocker > Fix For: 1.0.0 > > > If I open the Summary panel in the UI and then click System Diagnostics, I > never get a response back. Grabbing a stack trace shows that the System > Diagnostics request seems to get replicated in an infinite loop (i.e., the > replication causes a replication, which causes a replication, ...) -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-1781) Update UI to reflect component level authorization
[ https://issues.apache.org/jira/browse/NIFI-1781?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359595#comment-15359595 ] ASF GitHub Bot commented on NIFI-1781: -- Github user markap14 commented on the issue: https://github.com/apache/nifi/pull/603 @mcgilman excellent - all looks good, as far as I can tell. This PR addresses several issues that existed in the application. I've pushed to master. > Update UI to reflect component level authorization > -- > > Key: NIFI-1781 > URL: https://issues.apache.org/jira/browse/NIFI-1781 > Project: Apache NiFi > Issue Type: Sub-task > Components: Core UI >Reporter: Matt Gilman >Assignee: Matt Gilman > Fix For: 1.0.0 > > Attachments: 0001-NIFI-1781.patch, nifi-component-samples.png, > nifi-sample-flow.png > > > - Update to UI to visual access level per component -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Resolved] (NIFI-1781) Update UI to reflect component level authorization
[ https://issues.apache.org/jira/browse/NIFI-1781?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Mark Payne resolved NIFI-1781. -- Resolution: Fixed > Update UI to reflect component level authorization > -- > > Key: NIFI-1781 > URL: https://issues.apache.org/jira/browse/NIFI-1781 > Project: Apache NiFi > Issue Type: Sub-task > Components: Core UI >Reporter: Matt Gilman >Assignee: Matt Gilman > Fix For: 1.0.0 > > Attachments: 0001-NIFI-1781.patch, nifi-component-samples.png, > nifi-sample-flow.png > > > - Update to UI to visual access level per component -- This message was sent by Atlassian JIRA (v6.3.4#6332)
nifi git commit: NIFI-2021: Fixed infinite replicated recursion. This closes #597
Repository: nifi Updated Branches: refs/heads/master ce5330330 -> e7e349631 NIFI-2021: Fixed infinite replicated recursion. This closes #597 Project: http://git-wip-us.apache.org/repos/asf/nifi/repo Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/e7e34963 Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/e7e34963 Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/e7e34963 Branch: refs/heads/master Commit: e7e349631f2805662e418a3d72ab335d1211807d Parents: ce53303 Author: Mark PayneAuthored: Thu Jun 30 11:35:09 2016 -0400 Committer: Matt Gilman Committed: Fri Jul 1 16:42:10 2016 -0400 -- .../java/org/apache/nifi/web/api/SystemDiagnosticsResource.java| 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/nifi/blob/e7e34963/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java -- diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java index 8d6229f..cbe6cf7 100644 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java +++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java @@ -122,7 +122,7 @@ public class SystemDiagnosticsResource extends ApplicationResource { throw new IllegalArgumentException("Nodewise requests cannot be directed at a specific node."); } -if (isConnectedToCluster()) { +if (isReplicateRequest()) { // determine where this request should be sent if (clusterNodeId == null) { final NodeResponse nodeResponse = getRequestReplicator().replicate(HttpMethod.GET, getAbsolutePath(), getRequestParameters(true), getHeaders()).awaitMergedResponse();
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359592#comment-15359592 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69353249 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or evaluate
[2/5] nifi git commit: NIFI-1781: - Updating UI according to permissions through out the application. - Shuffling provenance events, template, and cluster search REST APIs according to resources being
http://git-wip-us.apache.org/repos/asf/nifi/blob/ce533033/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-security/src/main/java/org/apache/nifi/web/security/node/NodeAuthorizedUserFilter.java -- diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-security/src/main/java/org/apache/nifi/web/security/node/NodeAuthorizedUserFilter.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-security/src/main/java/org/apache/nifi/web/security/node/NodeAuthorizedUserFilter.java deleted file mode 100644 index 8451c7c..000 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-security/src/main/java/org/apache/nifi/web/security/node/NodeAuthorizedUserFilter.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.nifi.web.security.node; - -import java.io.IOException; -import java.io.Serializable; -import java.security.cert.X509Certificate; -import javax.servlet.FilterChain; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; - -import org.apache.nifi.authorization.user.NiFiUserDetails; -import org.apache.nifi.controller.FlowController; -import org.apache.commons.lang3.StringUtils; -import org.apache.nifi.authentication.AuthenticationResponse; -import org.apache.nifi.authorization.user.NiFiUser; -import org.apache.nifi.util.NiFiProperties; -import org.apache.nifi.web.security.token.NiFiAuthenticationToken; -import org.apache.nifi.web.security.x509.X509CertificateExtractor; -import org.apache.nifi.web.security.x509.X509IdentityProvider; -import org.apache.nifi.web.util.WebUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.context.ApplicationContext; -import org.springframework.security.core.context.SecurityContextHolder; -import org.springframework.web.context.support.WebApplicationContextUtils; -import org.springframework.web.filter.GenericFilterBean; - -/** - * Custom filter to extract a user's authorities from the request where the user was authenticated by the cluster manager and populate the threadlocal with the authorized user. If the request contains - * the appropriate header with authorities and the application instance is a node connected to the cluster, then the authentication/authorization steps remaining in the filter chain are skipped. - * - * Checking if the application instance is a connected node is important because it prevents external clients from faking the request headers and bypassing the authentication processing chain. - */ -public class NodeAuthorizedUserFilter extends GenericFilterBean { - -private static final Logger LOGGER = LoggerFactory.getLogger(NodeAuthorizedUserFilter.class); - -public static final String PROXY_USER_DETAILS = "X-ProxiedEntityUserDetails"; - -private NiFiProperties properties; -private X509CertificateExtractor certificateExtractor; -private X509IdentityProvider certificateIdentityProvider; - -@Override -public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { -final HttpServletRequest httpServletRequest = (HttpServletRequest) request; - -// get the proxied user's authorities -final String hexEncodedUserDetails = httpServletRequest.getHeader(PROXY_USER_DETAILS); - -// check if the request has the necessary header information and this instance is configured as a node -if (StringUtils.isNotBlank(hexEncodedUserDetails) && properties.isNode()) { - -// get the flow controller from the Spring context -final ApplicationContext ctx = WebApplicationContextUtils.getWebApplicationContext(getServletContext()); -final FlowController flowController = ctx.getBean("flowController", FlowController.class); - -// check that we are connected to the cluster -if (flowController.getNodeId() != null) { -try { -//
[4/5] nifi git commit: NIFI-1781: - Updating UI according to permissions through out the application. - Shuffling provenance events, template, and cluster search REST APIs according to resources being
http://git-wip-us.apache.org/repos/asf/nifi/blob/ce533033/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerAuditor.java -- diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerAuditor.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerAuditor.java index d8c2736..01f6d70 100644 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerAuditor.java +++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerAuditor.java @@ -80,7 +80,6 @@ public class ControllerAuditor extends NiFiAuditor { // create the config action FlowChangeAction configAction = new FlowChangeAction(); configAction.setUserIdentity(user.getIdentity()); -configAction.setUserName(user.getUserName()); configAction.setOperation(Operation.Configure); configAction.setTimestamp(new Date()); configAction.setSourceId("Flow Controller"); @@ -133,7 +132,6 @@ public class ControllerAuditor extends NiFiAuditor { // create the config action FlowChangeAction configAction = new FlowChangeAction(); configAction.setUserIdentity(user.getIdentity()); -configAction.setUserName(user.getUserName()); configAction.setOperation(Operation.Configure); configAction.setTimestamp(new Date()); configAction.setSourceId("Flow Controller"); http://git-wip-us.apache.org/repos/asf/nifi/blob/ce533033/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerServiceAuditor.java -- diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerServiceAuditor.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerServiceAuditor.java index ded90f8..a122983 100644 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerServiceAuditor.java +++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerServiceAuditor.java @@ -16,13 +16,6 @@ */ package org.apache.nifi.audit; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - import org.apache.nifi.action.Action; import org.apache.nifi.action.Component; import org.apache.nifi.action.FlowChangeAction; @@ -30,13 +23,13 @@ import org.apache.nifi.action.Operation; import org.apache.nifi.action.component.details.FlowChangeExtensionDetails; import org.apache.nifi.action.details.ActionDetails; import org.apache.nifi.action.details.FlowChangeConfigureDetails; +import org.apache.nifi.authorization.user.NiFiUser; import org.apache.nifi.authorization.user.NiFiUserUtils; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.controller.ConfiguredComponent; import org.apache.nifi.controller.ProcessorNode; import org.apache.nifi.controller.ReportingTaskNode; import org.apache.nifi.controller.ScheduledState; -import org.apache.nifi.authorization.user.NiFiUser; import org.apache.nifi.controller.service.ControllerServiceNode; import org.apache.nifi.controller.service.ControllerServiceReference; import org.apache.nifi.controller.service.ControllerServiceState; @@ -49,6 +42,13 @@ import org.aspectj.lang.annotation.Aspect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + /** * Audits controller service creation/removal and configuration changes. */ @@ -168,7 +168,6 @@ public class ControllerServiceAuditor extends NiFiAuditor { // create a configuration action FlowChangeAction configurationAction = new FlowChangeAction(); configurationAction.setUserIdentity(user.getIdentity()); -configurationAction.setUserName(user.getUserName()); configurationAction.setOperation(operation); configurationAction.setTimestamp(actionTimestamp); configurationAction.setSourceId(controllerService.getIdentifier()); @@ -188,7 +187,6 @@
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359582#comment-15359582 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r6935 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or evaluate
[jira] [Commented] (MINIFI-41) Command Line Configuration Utility
[ https://issues.apache.org/jira/browse/MINIFI-41?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359581#comment-15359581 ] ASF GitHub Bot commented on MINIFI-41: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi-minifi/pull/21#discussion_r69352051 --- Diff: minifi-toolkit/minifi-toolkit-configuration/src/test/resources/Splunk_Processors_with_Listen_TCP.xml --- @@ -0,0 +1,18 @@ + + +Splunk Processors with Listen TCP420ccabf-c795-4a5f-a502-3deead673de3f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f699882610d65054-5258-4cc4-9589-bd1185b07f85PROCESSOR0 sec1failuref1bee4e8-470b-41b7-97af-ada9f69988260ac7aebc-7692-4f37-804d-a7f03d2ddd1bPROCESSOR0b171f7c1-c640-4d17-8a4a-9cee8a0c49a6f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f69988260ac7aebc-7692-4f37-804d-a7f03d2ddd1bPROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f69988266c7f545e-c272-4f45-893e-0838b75d744ePROCESSOR00c10de1c-4917-4bb7-94e3-d346d3a736c2f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f6998826889f8d28-f936-457a-addb-c49ef7eed23bPROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f699882641eb952d-e8c8-4d9d-85a7-0aabb542ec56PROCESSOR0c6000652-74be-4bf8-b01f-7fcb1dd0b48cf1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f69988269fafd271-dcbd-4a6c-9625-29f28259fa4aPROCESSOR0 sec1failuref1bee4e8-470b-41b7-97af-ada9f6998826ff3b49ee-8535-4084-82e2-3aa22a10629dPROCESSOR0c6529f3f-b4f5-49cb-9a7a-93b9e52c71faf1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f69988266c7f545e-c272-4f45-893e-0838b75d744ePROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f69988268bae21ad-6c89-4c01-9e71-b26340c50052PROCESSOR0fdbe0740-4be6-4712-917a-77613bb6089af1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f6998826ff3b49ee-8535-4084-82e2-3aa22a10629dPROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f699882673212a32-1340-4423-a302-a884df0231b4PROCESSOR067557f37-c2f5-42b0-b2de-e7660ebc8145f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f699882678471344-9f3d-4eb5-94f9-e25ee9a35f79PROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f6998826ed0fa412-fe42-477c-b799-eb3270287221PROCESSOR080944451-d3b5-45d8-a23d-ab214f6420b4f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f6998826792bf873-97f4-4af1-8aa9-0534e79da810PROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f69988262ddc92dd-5f5b-4b3e-911d-d0aac1823b71PROCESSOR0871f7215-56c4-4733-927c-00236e82486eKeystore FilenameThe fully-qualified filename of the KeystoreKeystore FilenamefalseKeystore FilenamefalsefalsefalseKeystore PasswordThe password for the KeystoreKeystore PasswordfalseKeystore PasswordfalsetruefalseKeystore TypeJKSJKSPKCS12PKCS12The Type of the KeystoreKeystore TypefalseKeystore TypefalsefalsefalseTruststore FilenameThe fully-qualified filename of the TruststoreTruststore FilenamefalseTruststore FilenamefalsefalsefalseTruststore PasswordThe password for the TruststoreTruststore PasswordfalseTruststore PasswordfalsetruefalseTruststore TypeJKSJKSPKCS12PKCS12The Type of the Truststore. Either JKS or PKCS12Truststore TypefalseTruststore TypefalsefalsefalseSSL ProtocolSSLSSLSSLv2HelloSSLv2HelloSSLv3SSLv3TLSTLSTLSv1TLSv1TLSv1.1TLSv1.1TLSv1.2TLSv1.2TLSThe algorithm to use for this SSL contextSSL ProtocolfalseSSL ProtocolfalsefalsefalseStandardSSLContextServiceKeystore Filename/home/osboxes/Dev/certs/distro-1.p12Keystore PasswordKeystore TypePKCS12Truststore Filename/home/osboxes/Dev/certs/truststore.jksTruststore PasswordTruststore TypeJKSSSL ProtocolTLS0PortThe port to listen on for communication.PortfalsePorttruefalsefalseReceive Buffer Size65507 BThe size of each buffer used to receive messages. Adjust this value appropriately based on the expected size of the incoming messages.Receive Buffer SizefalseReceive Buffer SizetruefalsefalseMax Size of Message Queue1The maximum size of the internal queue used to buffer messages being transferred from the underlying channel to the processor. Setting this value higher allows more messages to be buffered in memory during surges of incoming messages, but increases the total memory used by the processor.Max Size of Message QueuefalseMax Size of Message QueuetruefalsefalseMax Size of Socket Buffer1 MBThe maximum size of the socket buffer that should be used. This is a suggestion to the Operating System to indicate how big the socket buffer should be. If this value is set too low, the buffer may fill up before the data can be read, and incoming data will be dropped.Max Size of Socket BufferfalseMax Size of Socket BuffertruefalsefalseCharacter SetUTF-8Specifies the character set of the received data.Character SetfalseCharacter SettruefalsefalseMax Number of TCP Connections2The
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359575#comment-15359575 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69351172 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Commented] (MINIFI-41) Command Line Configuration Utility
[ https://issues.apache.org/jira/browse/MINIFI-41?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359525#comment-15359525 ] ASF GitHub Bot commented on MINIFI-41: -- Github user brosander commented on a diff in the pull request: https://github.com/apache/nifi-minifi/pull/21#discussion_r69346935 --- Diff: minifi-toolkit/minifi-toolkit-configuration/src/test/resources/Splunk_Processors_with_Listen_TCP.xml --- @@ -0,0 +1,18 @@ + + +Splunk Processors with Listen TCP420ccabf-c795-4a5f-a502-3deead673de3f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f699882610d65054-5258-4cc4-9589-bd1185b07f85PROCESSOR0 sec1failuref1bee4e8-470b-41b7-97af-ada9f69988260ac7aebc-7692-4f37-804d-a7f03d2ddd1bPROCESSOR0b171f7c1-c640-4d17-8a4a-9cee8a0c49a6f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f69988260ac7aebc-7692-4f37-804d-a7f03d2ddd1bPROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f69988266c7f545e-c272-4f45-893e-0838b75d744ePROCESSOR00c10de1c-4917-4bb7-94e3-d346d3a736c2f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f6998826889f8d28-f936-457a-addb-c49ef7eed23bPROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f699882641eb952d-e8c8-4d9d-85a7-0aabb542ec56PROCESSOR0c6000652-74be-4bf8-b01f-7fcb1dd0b48cf1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f69988269fafd271-dcbd-4a6c-9625-29f28259fa4aPROCESSOR0 sec1failuref1bee4e8-470b-41b7-97af-ada9f6998826ff3b49ee-8535-4084-82e2-3aa22a10629dPROCESSOR0c6529f3f-b4f5-49cb-9a7a-93b9e52c71faf1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f69988266c7f545e-c272-4f45-893e-0838b75d744ePROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f69988268bae21ad-6c89-4c01-9e71-b26340c50052PROCESSOR0fdbe0740-4be6-4712-917a-77613bb6089af1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f6998826ff3b49ee-8535-4084-82e2-3aa22a10629dPROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f699882673212a32-1340-4423-a302-a884df0231b4PROCESSOR067557f37-c2f5-42b0-b2de-e7660ebc8145f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f699882678471344-9f3d-4eb5-94f9-e25ee9a35f79PROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f6998826ed0fa412-fe42-477c-b799-eb3270287221PROCESSOR080944451-d3b5-45d8-a23d-ab214f6420b4f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f6998826792bf873-97f4-4af1-8aa9-0534e79da810PROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f69988262ddc92dd-5f5b-4b3e-911d-d0aac1823b71PROCESSOR0871f7215-56c4-4733-927c-00236e82486eKeystore FilenameThe fully-qualified filename of the KeystoreKeystore FilenamefalseKeystore FilenamefalsefalsefalseKeystore PasswordThe password for the KeystoreKeystore PasswordfalseKeystore PasswordfalsetruefalseKeystore TypeJKSJKSPKCS12PKCS12The Type of the KeystoreKeystore TypefalseKeystore TypefalsefalsefalseTruststore FilenameThe fully-qualified filename of the TruststoreTruststore FilenamefalseTruststore FilenamefalsefalsefalseTruststore PasswordThe password for the TruststoreTruststore PasswordfalseTruststore PasswordfalsetruefalseTruststore TypeJKSJKSPKCS12PKCS12The Type of the Truststore. Either JKS or PKCS12Truststore TypefalseTruststore TypefalsefalsefalseSSL ProtocolSSLSSLSSLv2HelloSSLv2HelloSSLv3SSLv3TLSTLSTLSv1TLSv1TLSv1.1TLSv1.1TLSv1.2TLSv1.2TLSThe algorithm to use for this SSL contextSSL ProtocolfalseSSL ProtocolfalsefalsefalseStandardSSLContextServiceKeystore Filename/home/osboxes/Dev/certs/distro-1.p12Keystore PasswordKeystore TypePKCS12Truststore Filename/home/osboxes/Dev/certs/truststore.jksTruststore PasswordTruststore TypeJKSSSL ProtocolTLS0PortThe port to listen on for communication.PortfalsePorttruefalsefalseReceive Buffer Size65507 BThe size of each buffer used to receive messages. Adjust this value appropriately based on the expected size of the incoming messages.Receive Buffer SizefalseReceive Buffer SizetruefalsefalseMax Size of Message Queue1The maximum size of the internal queue used to buffer messages being transferred from the underlying channel to the processor. Setting this value higher allows more messages to be buffered in memory during surges of incoming messages, but increases the total memory used by the processor.Max Size of Message QueuefalseMax Size of Message QueuetruefalsefalseMax Size of Socket Buffer1 MBThe maximum size of the socket buffer that should be used. This is a suggestion to the Operating System to indicate how big the socket buffer should be. If this value is set too low, the buffer may fill up before the data can be read, and incoming data will be dropped.Max Size of Socket BufferfalseMax Size of Socket BuffertruefalsefalseCharacter SetUTF-8Specifies the character set of the received data.Character SetfalseCharacter SettruefalsefalseMax Number of TCP Connections2The
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359519#comment-15359519 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69346410 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Commented] (NIFI-2165) PutCassandraQL is handling timestamps as strings
[ https://issues.apache.org/jira/browse/NIFI-2165?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359514#comment-15359514 ] ASF GitHub Bot commented on NIFI-2165: -- Github user mattyb149 commented on the issue: https://github.com/apache/nifi/pull/602 Looks good, mind adding unit test(s) to try various good and bad values? > PutCassandraQL is handling timestamps as strings > > > Key: NIFI-2165 > URL: https://issues.apache.org/jira/browse/NIFI-2165 > Project: Apache NiFi > Issue Type: Bug > Components: Extensions >Affects Versions: 1.0.0, 0.6.1 > Environment: Ubuntu 14.04, openjdk version "1.8.0_91" >Reporter: Jeff Oxenberg >Priority: Minor > Labels: cassandra, putcassandraql, timestamp > Fix For: 1.0.0 > > Original Estimate: 1h > Remaining Estimate: 1h > > PutCassandraQL fails when attempting to insert a (iso-8601-formatted) > timestamp on both Nifi 0.6 and 1.0 due to handling timestamps as strings. > Input: > FlowFile Attribute Map Content > Key: 'cql.args.1.type' > Value: 'text' > Key: 'cql.args.1.value' > Value: 'temp3' > Key: 'cql.args.2.type' > Value: 'timestamp' > Key: 'cql.args.2.value' > Value: '2016-06-30T20:04:36Z' > Key: 'cql.args.3.type' > Value: 'float' > Key: 'cql.args.3.value' > Value: '6.7' > Key: 'j.id' > Value: 'temp3' > Key: 'j.ts' > Value: '2016-06-30T20:04:36Z' > Key: 'j.value' > Value: '6.7' > -- > INSERT INTO test.test2 (sensor, ts, value) VALUES(?,?,?) > Output: > com.datastax.driver.core.exceptions.CodecNotFoundException: Codec not found > for requested operation: [timestamp <-> java.lang.String] > Record does not get inserted into Cassandra > Expected output: > Record gets inserted into Cassandra -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-1781) Update UI to reflect component level authorization
[ https://issues.apache.org/jira/browse/NIFI-1781?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359495#comment-15359495 ] ASF GitHub Bot commented on NIFI-1781: -- GitHub user mcgilman opened a pull request: https://github.com/apache/nifi/pull/603 NIFI-1781: Updating UI to respect access controls outside of canvas - Updating UI according to permissions through out the application. - Shuffling provenance events, template, and cluster search REST APIs according to resources being authorized. - Moving template upload controls. - Removing username where appropriate. - Addressing issues when authorizing flow configuration actions. - Code clean up. You can merge this pull request into a Git repository by running: $ git pull https://github.com/mcgilman/nifi NIFI-1781 Alternatively you can review and apply these changes as the patch at: https://github.com/apache/nifi/pull/603.patch To close this pull request, make a commit to your master/trunk branch with (at least) the following in the commit message: This closes #603 commit ce5330330ac1e377144fc1d29d5ef9fb2f60e29f Author: Matt GilmanDate: 2016-07-01T19:10:27Z NIFI-1781: - Updating UI according to permissions through out the application. - Shuffling provenance events, template, and cluster search REST APIs according to resources being authorized. - Moving template upload controls. - Removing username where appropriate. - Addressing issues when authorizing flow configuration actions. - Code clean up. > Update UI to reflect component level authorization > -- > > Key: NIFI-1781 > URL: https://issues.apache.org/jira/browse/NIFI-1781 > Project: Apache NiFi > Issue Type: Sub-task > Components: Core UI >Reporter: Matt Gilman >Assignee: Matt Gilman > Fix For: 1.0.0 > > Attachments: 0001-NIFI-1781.patch, nifi-component-samples.png, > nifi-sample-flow.png > > > - Update to UI to visual access level per component -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (MINIFI-41) Command Line Configuration Utility
[ https://issues.apache.org/jira/browse/MINIFI-41?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359472#comment-15359472 ] ASF GitHub Bot commented on MINIFI-41: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi-minifi/pull/21#discussion_r69342080 --- Diff: minifi-toolkit/minifi-toolkit-configuration/src/test/resources/Splunk_Processors_with_Listen_TCP.xml --- @@ -0,0 +1,18 @@ + + +Splunk Processors with Listen TCP420ccabf-c795-4a5f-a502-3deead673de3f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f699882610d65054-5258-4cc4-9589-bd1185b07f85PROCESSOR0 sec1failuref1bee4e8-470b-41b7-97af-ada9f69988260ac7aebc-7692-4f37-804d-a7f03d2ddd1bPROCESSOR0b171f7c1-c640-4d17-8a4a-9cee8a0c49a6f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f69988260ac7aebc-7692-4f37-804d-a7f03d2ddd1bPROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f69988266c7f545e-c272-4f45-893e-0838b75d744ePROCESSOR00c10de1c-4917-4bb7-94e3-d346d3a736c2f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f6998826889f8d28-f936-457a-addb-c49ef7eed23bPROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f699882641eb952d-e8c8-4d9d-85a7-0aabb542ec56PROCESSOR0c6000652-74be-4bf8-b01f-7fcb1dd0b48cf1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f69988269fafd271-dcbd-4a6c-9625-29f28259fa4aPROCESSOR0 sec1failuref1bee4e8-470b-41b7-97af-ada9f6998826ff3b49ee-8535-4084-82e2-3aa22a10629dPROCESSOR0c6529f3f-b4f5-49cb-9a7a-93b9e52c71faf1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f69988266c7f545e-c272-4f45-893e-0838b75d744ePROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f69988268bae21ad-6c89-4c01-9e71-b26340c50052PROCESSOR0fdbe0740-4be6-4712-917a-77613bb6089af1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f6998826ff3b49ee-8535-4084-82e2-3aa22a10629dPROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f699882673212a32-1340-4423-a302-a884df0231b4PROCESSOR067557f37-c2f5-42b0-b2de-e7660ebc8145f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f699882678471344-9f3d-4eb5-94f9-e25ee9a35f79PROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f6998826ed0fa412-fe42-477c-b799-eb3270287221PROCESSOR080944451-d3b5-45d8-a23d-ab214f6420b4f1bee4e8-470b-41b7-97af-ada9f69988260 MB0f1bee4e8-470b-41b7-97af-ada9f6998826792bf873-97f4-4af1-8aa9-0534e79da810PROCESSOR0 sec1successf1bee4e8-470b-41b7-97af-ada9f69988262ddc92dd-5f5b-4b3e-911d-d0aac1823b71PROCESSOR0871f7215-56c4-4733-927c-00236e82486eKeystore FilenameThe fully-qualified filename of the KeystoreKeystore FilenamefalseKeystore FilenamefalsefalsefalseKeystore PasswordThe password for the KeystoreKeystore PasswordfalseKeystore PasswordfalsetruefalseKeystore TypeJKSJKSPKCS12PKCS12The Type of the KeystoreKeystore TypefalseKeystore TypefalsefalsefalseTruststore FilenameThe fully-qualified filename of the TruststoreTruststore FilenamefalseTruststore FilenamefalsefalsefalseTruststore PasswordThe password for the TruststoreTruststore PasswordfalseTruststore PasswordfalsetruefalseTruststore TypeJKSJKSPKCS12PKCS12The Type of the Truststore. Either JKS or PKCS12Truststore TypefalseTruststore TypefalsefalsefalseSSL ProtocolSSLSSLSSLv2HelloSSLv2HelloSSLv3SSLv3TLSTLSTLSv1TLSv1TLSv1.1TLSv1.1TLSv1.2TLSv1.2TLSThe algorithm to use for this SSL contextSSL ProtocolfalseSSL ProtocolfalsefalsefalseStandardSSLContextServiceKeystore Filename/home/osboxes/Dev/certs/distro-1.p12Keystore PasswordKeystore TypePKCS12Truststore Filename/home/osboxes/Dev/certs/truststore.jksTruststore PasswordTruststore TypeJKSSSL ProtocolTLS0PortThe port to listen on for communication.PortfalsePorttruefalsefalseReceive Buffer Size65507 BThe size of each buffer used to receive messages. Adjust this value appropriately based on the expected size of the incoming messages.Receive Buffer SizefalseReceive Buffer SizetruefalsefalseMax Size of Message Queue1The maximum size of the internal queue used to buffer messages being transferred from the underlying channel to the processor. Setting this value higher allows more messages to be buffered in memory during surges of incoming messages, but increases the total memory used by the processor.Max Size of Message QueuefalseMax Size of Message QueuetruefalsefalseMax Size of Socket Buffer1 MBThe maximum size of the socket buffer that should be used. This is a suggestion to the Operating System to indicate how big the socket buffer should be. If this value is set too low, the buffer may fill up before the data can be read, and incoming data will be dropped.Max Size of Socket BufferfalseMax Size of Socket BuffertruefalsefalseCharacter SetUTF-8Specifies the character set of the received data.Character SetfalseCharacter SettruefalsefalseMax Number of TCP Connections2The
[jira] [Commented] (NIFI-2165) PutCassandraQL is handling timestamps as strings
[ https://issues.apache.org/jira/browse/NIFI-2165?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359442#comment-15359442 ] ASF GitHub Bot commented on NIFI-2165: -- GitHub user jeffoxenberg opened a pull request: https://github.com/apache/nifi/pull/602 NIFI-2165: fix support for inserting timestamps into cassandra As per https://issues.apache.org/jira/browse/NIFI-2165, fixing support for inserting timestamps into Cassandra. You can merge this pull request into a Git repository by running: $ git pull https://github.com/jeffoxenberg/nifi NIFI-2165 Alternatively you can review and apply these changes as the patch at: https://github.com/apache/nifi/pull/602.patch To close this pull request, make a commit to your master/trunk branch with (at least) the following in the commit message: This closes #602 commit b7c1ebc546bcf554cdfa3b9761596e6e1fcdc27b Author: jeffoxenbergDate: 2016-07-01T18:26:27Z NIFI-2165: fix support for inserting timestamps into cassandra > PutCassandraQL is handling timestamps as strings > > > Key: NIFI-2165 > URL: https://issues.apache.org/jira/browse/NIFI-2165 > Project: Apache NiFi > Issue Type: Bug > Components: Extensions >Affects Versions: 1.0.0, 0.6.1 > Environment: Ubuntu 14.04, openjdk version "1.8.0_91" >Reporter: Jeff Oxenberg >Priority: Minor > Labels: cassandra, putcassandraql, timestamp > Fix For: 1.0.0 > > Original Estimate: 1h > Remaining Estimate: 1h > > PutCassandraQL fails when attempting to insert a (iso-8601-formatted) > timestamp on both Nifi 0.6 and 1.0 due to handling timestamps as strings. > Input: > FlowFile Attribute Map Content > Key: 'cql.args.1.type' > Value: 'text' > Key: 'cql.args.1.value' > Value: 'temp3' > Key: 'cql.args.2.type' > Value: 'timestamp' > Key: 'cql.args.2.value' > Value: '2016-06-30T20:04:36Z' > Key: 'cql.args.3.type' > Value: 'float' > Key: 'cql.args.3.value' > Value: '6.7' > Key: 'j.id' > Value: 'temp3' > Key: 'j.ts' > Value: '2016-06-30T20:04:36Z' > Key: 'j.value' > Value: '6.7' > -- > INSERT INTO test.test2 (sensor, ts, value) VALUES(?,?,?) > Output: > com.datastax.driver.core.exceptions.CodecNotFoundException: Codec not found > for requested operation: [timestamp <-> java.lang.String] > Record does not get inserted into Cassandra > Expected output: > Record gets inserted into Cassandra -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2150) Templates contain a great deal of unused information
[ https://issues.apache.org/jira/browse/NIFI-2150?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359423#comment-15359423 ] ASF GitHub Bot commented on NIFI-2150: -- Github user asfgit closed the pull request at: https://github.com/apache/nifi/pull/593 > Templates contain a great deal of unused information > > > Key: NIFI-2150 > URL: https://issues.apache.org/jira/browse/NIFI-2150 > Project: Apache NiFi > Issue Type: Improvement > Components: Core Framework >Reporter: Mark Payne >Assignee: Mark Payne > Fix For: 1.0.0 > > > Templates currently contain a great deal of unused information, such as the > description of each property, the allowable values, the default values, etc. > These values are ignored when the template is instantiated, because the > Processor class itself drives these values. As a result, these should be > cleansed from templates. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2150) Templates contain a great deal of unused information
[ https://issues.apache.org/jira/browse/NIFI-2150?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359417#comment-15359417 ] ASF GitHub Bot commented on NIFI-2150: -- Github user markap14 commented on the issue: https://github.com/apache/nifi/pull/593 @mcgilman good call. Looks good to me! > Templates contain a great deal of unused information > > > Key: NIFI-2150 > URL: https://issues.apache.org/jira/browse/NIFI-2150 > Project: Apache NiFi > Issue Type: Improvement > Components: Core Framework >Reporter: Mark Payne >Assignee: Mark Payne > Fix For: 1.0.0 > > > Templates currently contain a great deal of unused information, such as the > description of each property, the allowable values, the default values, etc. > These values are ignored when the template is instantiated, because the > Processor class itself drives these values. As a result, these should be > cleansed from templates. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
nifi git commit: NIFI-2150: Cleanse more values from templates that are not necessary. Additionally, updated javadocs in ProcessorConfigDTO to provide further explanation of the getAutoTerminatedRelat
Repository: nifi Updated Branches: refs/heads/master ae9e2fdf0 -> 8eb0a3882 NIFI-2150: Cleanse more values from templates that are not necessary. Additionally, updated javadocs in ProcessorConfigDTO to provide further explanation of the getAutoTerminatedRelationships() method, since this was confusing Removed additional unused fields from templates Populating snippet response using actual components rather than the snippet contents. This closes #593 Project: http://git-wip-us.apache.org/repos/asf/nifi/repo Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/8eb0a388 Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/8eb0a388 Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/8eb0a388 Branch: refs/heads/master Commit: 8eb0a3882c4e7ff5b2b7f9ab527277fc432c6b3c Parents: ae9e2fd Author: Mark PayneAuthored: Wed Jun 29 15:12:50 2016 -0400 Committer: Matt Gilman Committed: Fri Jul 1 14:21:02 2016 -0400 -- .../nifi/web/api/dto/ProcessorConfigDTO.java| 4 +- .../org/apache/nifi/controller/Template.java| 2 +- .../apache/nifi/controller/TemplateUtils.java | 40 ++- .../org/apache/nifi/web/api/dto/DtoFactory.java | 118 --- 4 files changed, 118 insertions(+), 46 deletions(-) -- http://git-wip-us.apache.org/repos/asf/nifi/blob/8eb0a388/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorConfigDTO.java -- diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorConfigDTO.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorConfigDTO.java index ec5df96..a9006c9 100644 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorConfigDTO.java +++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorConfigDTO.java @@ -237,7 +237,9 @@ public class ProcessorConfigDTO { * @return the names of all processor relationships that cause a flow file to be terminated if the relationship is not connected to anything */ @ApiModelProperty( -value = "The names of all relationships that cause a flow file to be terminated if the relationship is not connected elsewhere." +value = "The names of all relationships that cause a flow file to be terminated if the relationship is not connected elsewhere. This property differs " ++ "from the 'isAutoTerminate' property of the RelationshipDTO in that the RelationshipDTO is meant to depict the current configuration, whereas this " ++ "property can be set in a DTO when updating a Processor in order to change which Relationships should be auto-terminated." ) public Set getAutoTerminatedRelationships() { return autoTerminatedRelationships; http://git-wip-us.apache.org/repos/asf/nifi/blob/8eb0a388/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/Template.java -- diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/Template.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/Template.java index e50fe39..b330581 100644 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/Template.java +++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/Template.java @@ -130,7 +130,7 @@ public class Template implements Authorizable { for (final ProcessGroupDTO groupDto : snippet.getProcessGroups()) { final ProcessGroup group = processGroup.getProcessGroup(groupDto.getId()); if (group != null) { -authComponents.addAll(getAuthorizableComponents(processGroup)); +authComponents.addAll(getAuthorizableComponents(group)); } } http://git-wip-us.apache.org/repos/asf/nifi/blob/8eb0a388/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateUtils.java -- diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateUtils.java
[jira] [Commented] (NIFI-2150) Templates contain a great deal of unused information
[ https://issues.apache.org/jira/browse/NIFI-2150?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359410#comment-15359410 ] ASF GitHub Bot commented on NIFI-2150: -- Github user mcgilman commented on the issue: https://github.com/apache/nifi/pull/593 @markap14 Looks great! I made one additional change to populate snippet results off of the actual components instead of just the snippet contents (since the contents are now pruned). https://github.com/mcgilman/nifi/commit/1b8d231cedbf192fd1027a5a3913fd04b525cc0f > Templates contain a great deal of unused information > > > Key: NIFI-2150 > URL: https://issues.apache.org/jira/browse/NIFI-2150 > Project: Apache NiFi > Issue Type: Improvement > Components: Core Framework >Reporter: Mark Payne >Assignee: Mark Payne > Fix For: 1.0.0 > > > Templates currently contain a great deal of unused information, such as the > description of each property, the allowable values, the default values, etc. > These values are ignored when the template is instantiated, because the > Processor class itself drives these values. As a result, these should be > cleansed from templates. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Created] (NIFI-2165) PutCassandraQL is handling timestamps as strings
Jeff Oxenberg created NIFI-2165: --- Summary: PutCassandraQL is handling timestamps as strings Key: NIFI-2165 URL: https://issues.apache.org/jira/browse/NIFI-2165 Project: Apache NiFi Issue Type: Bug Components: Extensions Affects Versions: 0.6.1, 1.0.0 Environment: Ubuntu 14.04, openjdk version "1.8.0_91" Reporter: Jeff Oxenberg Priority: Minor Fix For: 1.0.0 PutCassandraQL fails when attempting to insert a (iso-8601-formatted) timestamp on both Nifi 0.6 and 1.0 due to handling timestamps as strings. Input: FlowFile Attribute Map Content Key: 'cql.args.1.type' Value: 'text' Key: 'cql.args.1.value' Value: 'temp3' Key: 'cql.args.2.type' Value: 'timestamp' Key: 'cql.args.2.value' Value: '2016-06-30T20:04:36Z' Key: 'cql.args.3.type' Value: 'float' Key: 'cql.args.3.value' Value: '6.7' Key: 'j.id' Value: 'temp3' Key: 'j.ts' Value: '2016-06-30T20:04:36Z' Key: 'j.value' Value: '6.7' -- INSERT INTO test.test2 (sensor, ts, value) VALUES(?,?,?) Output: com.datastax.driver.core.exceptions.CodecNotFoundException: Codec not found for requested operation: [timestamp <-> java.lang.String] Record does not get inserted into Cassandra Expected output: Record gets inserted into Cassandra -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Created] (MINIFI-44) Perform Release Management Functions for 0.0.1
Aldrin Piri created MINIFI-44: - Summary: Perform Release Management Functions for 0.0.1 Key: MINIFI-44 URL: https://issues.apache.org/jira/browse/MINIFI-44 Project: Apache NiFi MiNiFi Issue Type: Bug Reporter: Aldrin Piri Assignee: Aldrin Piri Fix For: 0.0.1 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-1930) ListenHTTP does not use SSLContextService protocol
[ https://issues.apache.org/jira/browse/NIFI-1930?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359331#comment-15359331 ] Adam Taft commented on NIFI-1930: - Just noticed there is also a "setIncludeProtocols" method, which might be easier to work with than the "setExcludeProtocols" approach. > ListenHTTP does not use SSLContextService protocol > -- > > Key: NIFI-1930 > URL: https://issues.apache.org/jira/browse/NIFI-1930 > Project: Apache NiFi > Issue Type: Bug >Affects Versions: 0.6.1 >Reporter: Mark Bean > > The ListenHTTP processor ignores the protocol property of the > SSLContextService. Even when the service specifies a specific protocol, > ListenHTTP will negotiate and honor alternate protocols. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-1930) ListenHTTP does not use SSLContextService protocol
[ https://issues.apache.org/jira/browse/NIFI-1930?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359327#comment-15359327 ] Adam Taft commented on NIFI-1930: - Some Jetty config references for the lazy: http://www.eclipse.org/jetty/documentation/current/configuring-ssl.html http://download.eclipse.org/jetty/stable-9/apidocs/org/eclipse/jetty/util/ssl/SslContextFactory.html > ListenHTTP does not use SSLContextService protocol > -- > > Key: NIFI-1930 > URL: https://issues.apache.org/jira/browse/NIFI-1930 > Project: Apache NiFi > Issue Type: Bug >Affects Versions: 0.6.1 >Reporter: Mark Bean > > The ListenHTTP processor ignores the protocol property of the > SSLContextService. Even when the service specifies a specific protocol, > ListenHTTP will negotiate and honor alternate protocols. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2039) Provide ability to get InputStream from FlowFile without using Process Session callback
[ https://issues.apache.org/jira/browse/NIFI-2039?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359328#comment-15359328 ] ASF GitHub Bot commented on NIFI-2039: -- GitHub user markap14 opened a pull request: https://github.com/apache/nifi/pull/601 NIFI-2039: Provide a new ProcessSession.read() method that provides an InputStream instead of using a callback You can merge this pull request into a Git repository by running: $ git pull https://github.com/markap14/nifi NIFI-2039 Alternatively you can review and apply these changes as the patch at: https://github.com/apache/nifi/pull/601.patch To close this pull request, make a commit to your master/trunk branch with (at least) the following in the commit message: This closes #601 commit e84817b56792b2bbcfae477f53694b3c4b96abd8 Author: Mark PayneDate: 2016-07-01T17:24:12Z NIFI-2039: Provide a new ProcessSession.read() method that provides an InputStream instead of using a callback > Provide ability to get InputStream from FlowFile without using Process > Session callback > --- > > Key: NIFI-2039 > URL: https://issues.apache.org/jira/browse/NIFI-2039 > Project: Apache NiFi > Issue Type: Improvement > Components: Core Framework >Reporter: Mark Payne >Assignee: Mark Payne > Fix For: 1.0.0 > > > Provide a mechanism to obtain an InputStream for a FlowFile directly from the > ProcessSession without having to provide an InputStreamCallback. > Currently, we often have processors that create an AtomicReference so that > the value can be set within a callback and then the value must be obtained > using a get() once the callback returns. This is very un-intuitive. > Additionally, we don't have the ability to return an InputStream to other > code that may required to run in a callback. For instance, when using > Calcite, Calcite wants a callback, essentially, that provides an Enumerator > of data. We can't currently support this well because we can't create an > InputStream and return it. > We need to add the following method to ProcessSession: > InputStream read(FlowFile flowFile) throws ProcessException; > The InputStream that is returned must be tracked by ProcessSession such that > when the session is committed or rolled back, the stream is closed if it > hasn't been already. All of the logic that currently takes place in > ProcessSession.read() after the callback returns must be done on the closing > of the new InputStream that is returned. > The MockProcessSession should throw an Exception if commit() is called > without the InputStream being closed, ideally providing the stack trace of > where the stream was obtained and where commit() was called. This is to > ensure that the developer is being a good consumer, but the > StandardProcessSession doesn't really need to do this, as we can simply close > the InputStream so that it can no longer be consumed. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Comment Edited] (NIFI-1930) ListenHTTP does not use SSLContextService protocol
[ https://issues.apache.org/jira/browse/NIFI-1930?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359320#comment-15359320 ] Adam Taft edited comment on NIFI-1930 at 7/1/16 5:23 PM: - With regards to the TLS protocol(s) which ListenHTTP supports, the solution to this issue will need to be a configuration change made against Jetty. Specifically, the org.eclipse.jetty.util.ssl.SslContextFactory will need to have "excluded protocols" added. For example, let's says that ListenHTTP should be configured to only allow TLS1.1+. In order to achieve this, in Jetty you would need to exclude other protocols like so: {{sslContextFactory.addExcludeProtocols("SSL","SSLv2","SSLv2Hello","SSLv3","TLS","TLS1"}} A problem exists that the SSLContextService only allows a *single* protocol to be specified. This might work for client configurations, but generally for servers you want to negotiate the protocol. In this example, you'd want any TLS >= 1.1 to be supported. Therefore this issue might be difficult to resolve against the current behavior of the SSLContextService. Instead, it's possible that a "quick" change be made to allow weaker TLS protocols to be excluded. Possibly a temporary property called "minimumTLSVersion" or something like that could be added to ListentHTTP that would support the above use case. was (Author: taftster): With regards to the TLS protocol(s) which ListenHTTP supports, the solution to this issue will need to be a configuration change made against Jetty. Specifically, the org.eclipse.jetty.util.ssl.SslContextFactory will need to have "excluded protocols" added. For example, let's says that ListenHTTP should be configured to only allow TLS1.1+. In order to achieve this, in Jetty you would need to exclude other protocols like so: {{sslContextFactory.addExcludedProtocol("SSL","SSLv2","SSLv2Hello","SSLv3","TLS","TLS1"}} A problem exists that the SSLContextService only allows a *single* protocol to be specified. This might work for client configurations, but generally for servers you want to negotiate the protocol. In this example, you'd want any TLS >= 1.1 to be supported. Therefore this issue might be difficult to resolve against the current behavior of the SSLContextService. Instead, it's possible that a "quick" change be made to allow weaker TLS protocols to be excluded. Possibly a temporary property called "minimumTLSVersion" or something like that could be added to ListentHTTP that would support the above use case. > ListenHTTP does not use SSLContextService protocol > -- > > Key: NIFI-1930 > URL: https://issues.apache.org/jira/browse/NIFI-1930 > Project: Apache NiFi > Issue Type: Bug >Affects Versions: 0.6.1 >Reporter: Mark Bean > > The ListenHTTP processor ignores the protocol property of the > SSLContextService. Even when the service specifies a specific protocol, > ListenHTTP will negotiate and honor alternate protocols. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-700) GetHTTP, ListenHTTP, and PostHTTP should be migrated to use SSLContextService fully
[ https://issues.apache.org/jira/browse/NIFI-700?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359289#comment-15359289 ] Adam Taft commented on NIFI-700: Recommend that this be split into two different JIRA issues. Specifically, the difference between the fix for the Get/PostHTTP (client) processors vs. the ListenHTTP (server) process will be significant. In the client case, the solution will be made against Apache Commons. In the server case, the fix will be made against Jetty. It's probably best to track these changes separately. > GetHTTP, ListenHTTP, and PostHTTP should be migrated to use SSLContextService > fully > --- > > Key: NIFI-700 > URL: https://issues.apache.org/jira/browse/NIFI-700 > Project: Apache NiFi > Issue Type: Bug > Components: Extensions >Affects Versions: 0.1.0 >Reporter: Aldrin Piri >Assignee: Mark Payne > > These processors were created before SSLContextService was fully formed in > its current state and currently only extract the properties from the service, > creating their own SSLContext via the HTTPClient utility classes. Both > should derive their context from the controller service for the sake of > consistency. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Updated] (NIFI-2159) Fingerprint not detecting flow.xml differences
[ https://issues.apache.org/jira/browse/NIFI-2159?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Mark Payne updated NIFI-2159: - Fix Version/s: 1.0.0 > Fingerprint not detecting flow.xml differences > -- > > Key: NIFI-2159 > URL: https://issues.apache.org/jira/browse/NIFI-2159 > Project: Apache NiFi > Issue Type: Bug >Affects Versions: 0.6.1 >Reporter: Brandon DeVries >Priority: Critical > Fix For: 1.0.0, 0.7.0 > > > We have very occasionally observed situations where the flow.xml across a > cluster was able to become inconsistent, resulting in unexpected behavior. > While investigating, the following issue was found. While we are not 100% > that this problem is the one we were looking for, it is definitely an issue... > Within the FingerprintFactory, the text values from tags were > always being appended to the fingerprint as a default value of "NO_VALUE" > regardless of whether or not they actually contained a value. This can > manifest as two different flow.xml files being considered the same when they > have the same number of relationship tags, even though those tags contain > different values. Here is a suggested fix: > Change the following in FingerprintFactory.java\[1\]: > {code} > final List sortedRelationshipElems = sortElements(relationshipElems, > getConnectionRelationshipsComparator()); > for (final Element relationshipElem : sortedRelationshipElems) { > addConnectionRelationshipFingerprint(builder, relationshipElem); > } > {code} > To: > {code} > for (int i = 0;ibuilder.append(getValue(relationshipElems.item(i),"NO_VALUE")); > } > {code} > The following unit test will show that the value of the relationship is now > being used in the fingerprint: > {code} > @Test > public void testResourceValueInFingerprint() throws IOException{ >final String fp1 = > fingerprinter.createFingerprint(getResourceBytes("/nifi/fingerprint/flow1a.xml"),null); >assertEquals(3,StringUtils.countMatches(fp1,"success")); > } > {code} > Additionally, the tag isn't being included in the fingerprint either. > While the following solution has not been tested, it is possible that this > could be fixed by adding the following line to the addConnectionFingerprint() > method \[2\]: > {code} > appendFirstValue(builder, DomUtils.getChildNodesByTagName(connectionElem, > "name")); > {code} > \[1\] > https://github.com/apache/nifi/blob/270944ec692e12c221cdff202bdab56309dfcfd7/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/fingerprint/FingerprintFactory.java#L861-864 > \[2\] > https://github.com/apache/nifi/blob/270944ec692e12c221cdff202bdab56309dfcfd7/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/fingerprint/FingerprintFactory.java#L857 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Assigned] (NIFI-2164) ConsumeJMS should allow user to configure trade-off between 'best effort' and 'guaranteed receipt' of data
[ https://issues.apache.org/jira/browse/NIFI-2164?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Oleg Zhurakousky reassigned NIFI-2164: -- Assignee: Oleg Zhurakousky > ConsumeJMS should allow user to configure trade-off between 'best effort' and > 'guaranteed receipt' of data > -- > > Key: NIFI-2164 > URL: https://issues.apache.org/jira/browse/NIFI-2164 > Project: Apache NiFi > Issue Type: Improvement > Components: Extensions >Reporter: Mark Payne >Assignee: Oleg Zhurakousky > Fix For: 1.0.0 > > > Currently the ConsumeJMS Processor uses auto-acknowledge acknowledgement. > This is beneficial for many use cases but could result in data loss if NiFi > is shut down. We should expose a 'Delivery Guarantee' property that allows > the user to choose between 'Best Effort', which will provide better > performance or 'Guaranteed Receipt', which will guarantee that data has been > committed to NiFi's Content & FlowFile Repositories before acknowledging the > message. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Created] (NIFI-2164) ConsumeJMS should allow user to configure trade-off between 'best effort' and 'guaranteed receipt' of data
Mark Payne created NIFI-2164: Summary: ConsumeJMS should allow user to configure trade-off between 'best effort' and 'guaranteed receipt' of data Key: NIFI-2164 URL: https://issues.apache.org/jira/browse/NIFI-2164 Project: Apache NiFi Issue Type: Improvement Components: Extensions Reporter: Mark Payne Fix For: 1.0.0 Currently the ConsumeJMS Processor uses auto-acknowledge acknowledgement. This is beneficial for many use cases but could result in data loss if NiFi is shut down. We should expose a 'Delivery Guarantee' property that allows the user to choose between 'Best Effort', which will provide better performance or 'Guaranteed Receipt', which will guarantee that data has been committed to NiFi's Content & FlowFile Repositories before acknowledging the message. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2159) Fingerprint not detecting flow.xml differences
[ https://issues.apache.org/jira/browse/NIFI-2159?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359240#comment-15359240 ] Brandon DeVries commented on NIFI-2159: --- >From the guy who found the issue: {quote} In hindsight the list of nodes will still need to be sorted based on text contents, as the order of relationships should not matter. My proposed solution did not take that into account. {quote} So, whoever takes on this ticket will need to check for that as well... > Fingerprint not detecting flow.xml differences > -- > > Key: NIFI-2159 > URL: https://issues.apache.org/jira/browse/NIFI-2159 > Project: Apache NiFi > Issue Type: Bug >Affects Versions: 0.6.1 >Reporter: Brandon DeVries >Priority: Critical > Fix For: 0.7.0 > > > We have very occasionally observed situations where the flow.xml across a > cluster was able to become inconsistent, resulting in unexpected behavior. > While investigating, the following issue was found. While we are not 100% > that this problem is the one we were looking for, it is definitely an issue... > Within the FingerprintFactory, the text values from tags were > always being appended to the fingerprint as a default value of "NO_VALUE" > regardless of whether or not they actually contained a value. This can > manifest as two different flow.xml files being considered the same when they > have the same number of relationship tags, even though those tags contain > different values. Here is a suggested fix: > Change the following in FingerprintFactory.java\[1\]: > {code} > final List sortedRelationshipElems = sortElements(relationshipElems, > getConnectionRelationshipsComparator()); > for (final Element relationshipElem : sortedRelationshipElems) { > addConnectionRelationshipFingerprint(builder, relationshipElem); > } > {code} > To: > {code} > for (int i = 0;ibuilder.append(getValue(relationshipElems.item(i),"NO_VALUE")); > } > {code} > The following unit test will show that the value of the relationship is now > being used in the fingerprint: > {code} > @Test > public void testResourceValueInFingerprint() throws IOException{ >final String fp1 = > fingerprinter.createFingerprint(getResourceBytes("/nifi/fingerprint/flow1a.xml"),null); >assertEquals(3,StringUtils.countMatches(fp1,"success")); > } > {code} > Additionally, the tag isn't being included in the fingerprint either. > While the following solution has not been tested, it is possible that this > could be fixed by adding the following line to the addConnectionFingerprint() > method \[2\]: > {code} > appendFirstValue(builder, DomUtils.getChildNodesByTagName(connectionElem, > "name")); > {code} > \[1\] > https://github.com/apache/nifi/blob/270944ec692e12c221cdff202bdab56309dfcfd7/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/fingerprint/FingerprintFactory.java#L861-864 > \[2\] > https://github.com/apache/nifi/blob/270944ec692e12c221cdff202bdab56309dfcfd7/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/fingerprint/FingerprintFactory.java#L857 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359228#comment-15359228 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69325929 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/pom.xml --- @@ -56,6 +56,11 @@ language governing permissions and limitations under the License. --> ${es.version} +com.squareup.okhttp +okhttp +2.7.1 --- End diff -- Turns out the API is very similar (mostly just switched from get/set to a Builder), so we're good to go :) > Add Elasticsearch processors that use the REST API > -- > > Key: NIFI-2068 > URL: https://issues.apache.org/jira/browse/NIFI-2068 > Project: Apache NiFi > Issue Type: Improvement >Reporter: Matt Burgess >Assignee: Matt Burgess > Fix For: 1.0.0 > > > The current Elasticsearch processors use the Transport Client, and as a > result there can be some compatibility issues between multiple versions of ES > clusters. The REST API is much more standard between versions, so it would be > nice to have ES processors that use the REST API, to enable things like > migration from an Elasticsearch cluster with an older version to a cluster > with a newer version. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359216#comment-15359216 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69324539 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359205#comment-15359205 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69323436 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractElasticsearchHttpProcessor.java --- @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Request; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.ssl.SSLContextService; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.map.ObjectMapper; + +import javax.net.ssl.SSLContext; +import java.io.IOException; +import java.io.InputStream; +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.net.URL; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +/** + * A base class for Elasticsearch processors that use the HTTP API + */ +public abstract class AbstractElasticsearchHttpProcessor extends AbstractElasticsearchProcessor { + +public static final PropertyDescriptor ES_URL = new PropertyDescriptor.Builder() +.name("elasticsearch-http-url") +.displayName("Elasticsearch URL") +.description("Elasticsearch URL which will be connected to, including scheme, host, port, path. The default port for the REST API is 9200.") +.required(true) +.addValidator(StandardValidators.URL_VALIDATOR) +.build(); + +public static final PropertyDescriptor PROXY_HOST = new PropertyDescriptor.Builder() +.name("elasticsearch-http-proxy-host") +.displayName("Proxy Host") +.description("The fully qualified hostname or IP address of the proxy server") +.required(false) +.addValidator(StandardValidators.NON_EMPTY_VALIDATOR) +.build(); + +public static final PropertyDescriptor PROXY_PORT = new PropertyDescriptor.Builder() +.name("elasticsearch-http-proxy-port") +.displayName("Proxy Port") +.description("The port of the proxy server") +.required(false) +.addValidator(StandardValidators.PORT_VALIDATOR) +.build(); + +public static final PropertyDescriptor CONNECT_TIMEOUT = new PropertyDescriptor.Builder() +.name("elasticsearch-http-connect-timeout") +.displayName("Connection Timeout") +.description("Max wait time for the connection to the Elasticsearch REST API.") +.required(true) +.defaultValue("5 secs") +.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) +.build(); + +public static final PropertyDescriptor RESPONSE_TIMEOUT = new PropertyDescriptor.Builder() +.name("elasticsearch-http-response-timeout") +.displayName("Response Timeout") +.description("Max wait time for a response from the Elasticsearch REST API.") +.required(true) +.defaultValue("15 secs") +.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) +.build(); + +private final AtomicReference okHttpClientAtomicReference = new
[jira] [Updated] (NIFI-2163) Nifi Service does not follow LSB Sevice Spec
[ https://issues.apache.org/jira/browse/NIFI-2163?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Edgardo Vega updated NIFI-2163: --- Description: Trying to use the lastest off master with nifi.sh and nifi-env.sh and they do not follow the spec for services, whcih causes some configuration tools not to work as they use the return codes to determine if things are running. http://refspecs.linuxbase.org/LSB_3.1.0/LSB-Core-generic/LSB-Core-generic/iniscrptact.html was: Trying to use the lastest off maset with nifi.sh and nifi-env.sh and they do not follow the spec for services, whcih causes some configuration tools not to work as they use the return codes to determine if things are running. http://refspecs.linuxbase.org/LSB_3.1.0/LSB-Core-generic/LSB-Core-generic/iniscrptact.html > Nifi Service does not follow LSB Sevice Spec > > > Key: NIFI-2163 > URL: https://issues.apache.org/jira/browse/NIFI-2163 > Project: Apache NiFi > Issue Type: Bug > Components: Configuration >Affects Versions: 0.7.0 > Environment: Centos >Reporter: Edgardo Vega >Priority: Critical > > Trying to use the lastest off master with nifi.sh and nifi-env.sh and they do > not follow the spec for services, whcih causes some configuration tools not > to work as they use the return codes to determine if things are running. > http://refspecs.linuxbase.org/LSB_3.1.0/LSB-Core-generic/LSB-Core-generic/iniscrptact.html -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359177#comment-15359177 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69320973 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/test/java/org/apache/nifi/processors/elasticsearch/TestFetchElasticsearchHttp.java --- @@ -0,0 +1,292 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.Call; +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Protocol; +import com.squareup.okhttp.Request; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.ssl.SSLContextService; +import org.apache.nifi.util.MockFlowFile; +import org.apache.nifi.util.TestRunner; +import org.apache.nifi.util.TestRunners; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; + +import static org.junit.Assert.assertNotNull; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TestFetchElasticsearchHttp { + +private InputStream docExample; +private TestRunner runner; + +@Before +public void setUp() throws IOException { +ClassLoader classloader = Thread.currentThread().getContextClassLoader(); +docExample = classloader.getResourceAsStream("DocumentExample.json"); --- End diff -- Haha fair enough, as long as it works it doesn't matter to me > Add Elasticsearch processors that use the REST API > -- > > Key: NIFI-2068 > URL: https://issues.apache.org/jira/browse/NIFI-2068 > Project: Apache NiFi > Issue Type: Improvement >Reporter: Matt Burgess >Assignee: Matt Burgess > Fix For: 1.0.0 > > > The current Elasticsearch processors use the Transport Client, and as a > result there can be some compatibility issues between multiple versions of ES > clusters. The REST API is much more standard between versions, so it would be > nice to have ES processors that use the REST API, to enable things like > migration from an Elasticsearch cluster with an older version to a cluster > with a newer version. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359176#comment-15359176 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69320905 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359172#comment-15359172 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69320695 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/pom.xml --- @@ -56,6 +56,11 @@ language governing permissions and limitations under the License. --> ${es.version} +com.squareup.okhttp +okhttp +2.7.1 --- End diff -- That's fair but in response to an issue found by a user on the mail list[1] invokeHttp will need to be updated anyway. So might as well code this on OkHttp v3 [1] https://issues.apache.org/jira/browse/NIFI-2162 > Add Elasticsearch processors that use the REST API > -- > > Key: NIFI-2068 > URL: https://issues.apache.org/jira/browse/NIFI-2068 > Project: Apache NiFi > Issue Type: Improvement >Reporter: Matt Burgess >Assignee: Matt Burgess > Fix For: 1.0.0 > > > The current Elasticsearch processors use the Transport Client, and as a > result there can be some compatibility issues between multiple versions of ES > clusters. The REST API is much more standard between versions, so it would be > nice to have ES processors that use the REST API, to enable things like > migration from an Elasticsearch cluster with an older version to a cluster > with a newer version. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359171#comment-15359171 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69320414 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/test/java/org/apache/nifi/processors/elasticsearch/TestFetchElasticsearchHttp.java --- @@ -0,0 +1,292 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.Call; +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Protocol; +import com.squareup.okhttp.Request; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.ssl.SSLContextService; +import org.apache.nifi.util.MockFlowFile; +import org.apache.nifi.util.TestRunner; +import org.apache.nifi.util.TestRunners; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; + +import static org.junit.Assert.assertNotNull; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TestFetchElasticsearchHttp { + +private InputStream docExample; +private TestRunner runner; + +@Before +public void setUp() throws IOException { +ClassLoader classloader = Thread.currentThread().getContextClassLoader(); +docExample = classloader.getResourceAsStream("DocumentExample.json"); --- End diff -- Not necessary. But historically when I have the content in the test I get review comments saying to put them in a file, and vice versa :P > Add Elasticsearch processors that use the REST API > -- > > Key: NIFI-2068 > URL: https://issues.apache.org/jira/browse/NIFI-2068 > Project: Apache NiFi > Issue Type: Improvement >Reporter: Matt Burgess >Assignee: Matt Burgess > Fix For: 1.0.0 > > > The current Elasticsearch processors use the Transport Client, and as a > result there can be some compatibility issues between multiple versions of ES > clusters. The REST API is much more standard between versions, so it would be > nice to have ES processors that use the REST API, to enable things like > migration from an Elasticsearch cluster with an older version to a cluster > with a newer version. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Created] (NIFI-2163) Nifi Service does not follow LSB Sevice Spec
Edgardo Vega created NIFI-2163: -- Summary: Nifi Service does not follow LSB Sevice Spec Key: NIFI-2163 URL: https://issues.apache.org/jira/browse/NIFI-2163 Project: Apache NiFi Issue Type: Bug Components: Configuration Affects Versions: 0.7.0 Environment: Centos Reporter: Edgardo Vega Priority: Critical Trying to use the lastest off maset with nifi.sh and nifi-env.sh and they do not follow the spec for services, whcih causes some configuration tools not to work as they use the return codes to determine if things are running. http://refspecs.linuxbase.org/LSB_3.1.0/LSB-Core-generic/LSB-Core-generic/iniscrptact.html -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359162#comment-15359162 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69319909 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359163#comment-15359163 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69319929 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/test/java/org/apache/nifi/processors/elasticsearch/TestFetchElasticsearchHttp.java --- @@ -0,0 +1,292 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.Call; +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Protocol; +import com.squareup.okhttp.Request; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.ssl.SSLContextService; +import org.apache.nifi.util.MockFlowFile; +import org.apache.nifi.util.TestRunner; +import org.apache.nifi.util.TestRunners; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; + +import static org.junit.Assert.assertNotNull; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TestFetchElasticsearchHttp { + +private InputStream docExample; +private TestRunner runner; + +@Before +public void setUp() throws IOException { +ClassLoader classloader = Thread.currentThread().getContextClassLoader(); +docExample = classloader.getResourceAsStream("DocumentExample.json"); --- End diff -- Is it necessary to have a document on disk? Just hesitant regarding OS issues. > Add Elasticsearch processors that use the REST API > -- > > Key: NIFI-2068 > URL: https://issues.apache.org/jira/browse/NIFI-2068 > Project: Apache NiFi > Issue Type: Improvement >Reporter: Matt Burgess >Assignee: Matt Burgess > Fix For: 1.0.0 > > > The current Elasticsearch processors use the Transport Client, and as a > result there can be some compatibility issues between multiple versions of ES > clusters. The REST API is much more standard between versions, so it would be > nice to have ES processors that use the REST API, to enable things like > migration from an Elasticsearch cluster with an older version to a cluster > with a newer version. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359161#comment-15359161 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69319708 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359155#comment-15359155 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69319591 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359154#comment-15359154 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69319479 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359152#comment-15359152 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69319085 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359149#comment-15359149 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69318837 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359150#comment-15359150 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69318859 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359148#comment-15359148 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69318800 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractElasticsearchProcessor.java --- @@ -17,129 +17,25 @@ package org.apache.nifi.processors.elasticsearch; import org.apache.nifi.components.PropertyDescriptor; -import org.apache.nifi.components.ValidationContext; -import org.apache.nifi.components.ValidationResult; -import org.apache.nifi.components.Validator; -import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.ssl.SSLContextService; -import org.apache.nifi.util.StringUtils; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.InetSocketTransportAddress; - -import java.io.File; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.net.InetSocketAddress; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLClassLoader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; - +/** + * A base class for all Elasticsearch processors + */ public abstract class AbstractElasticsearchProcessor extends AbstractProcessor { -/** - * This validator ensures the Elasticsearch hosts property is a valid list of hostname:port entries - */ -private static final Validator HOSTNAME_PORT_VALIDATOR = new Validator() { -@Override -public ValidationResult validate(final String subject, final String input, final ValidationContext context) { -final List esList = Arrays.asList(input.split(",")); -for (String hostnamePort : esList) { -String[] addresses = hostnamePort.split(":"); -// Protect against invalid input like http://127.0.0.1:9300 (URL scheme should not be there) -if (addresses.length != 2) { -return new ValidationResult.Builder().subject(subject).input(input).explanation( -"Must be in hostname:port form (no scheme such as http://;).valid(false).build(); -} -} -return new ValidationResult.Builder().subject(subject).input(input).explanation( -"Valid cluster definition").valid(true).build(); -} -}; - -protected static final PropertyDescriptor CLUSTER_NAME = new PropertyDescriptor.Builder() -.name("Cluster Name") -.description("Name of the ES cluster (for example, elasticsearch_brew). Defaults to 'elasticsearch'") -.required(true) -.addValidator(StandardValidators.NON_EMPTY_VALIDATOR) -.defaultValue("elasticsearch") -.build(); - -protected static final PropertyDescriptor HOSTS = new PropertyDescriptor.Builder() -.name("ElasticSearch Hosts") -.description("ElasticSearch Hosts, which should be comma separated and colon for hostname/port " -+ "host1:port,host2:port, For example testcluster:9300.") -.required(true) -.expressionLanguageSupported(false) -.addValidator(HOSTNAME_PORT_VALIDATOR) -.build(); - public static final PropertyDescriptor PROP_SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder() .name("SSL Context Service") .description("The SSL Context Service used to provide client certificate information for TLS/SSL " -+ "connections. This service only applies if the Shield plugin is available.") ++ "connections. This service only applies if the Elasticsearch endpoints have been protected by SSLShield plugin is available.") --- End diff -- I've already changed that
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359146#comment-15359146 ] ASF GitHub Bot commented on NIFI-2068: -- Github user mattyb149 commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69318666 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/pom.xml --- @@ -56,6 +56,11 @@ language governing permissions and limitations under the License. --> ${es.version} +com.squareup.okhttp +okhttp +2.7.1 --- End diff -- Mostly to be able to borrow code from InvokeHttp, and in the hopes that we might have a single "common" NAR with oft-used libraries someday, rather than multiple versions of OkHttp (etc.) in various NARs. I will update the version (and the code that uses the API) to latest > Add Elasticsearch processors that use the REST API > -- > > Key: NIFI-2068 > URL: https://issues.apache.org/jira/browse/NIFI-2068 > Project: Apache NiFi > Issue Type: Improvement >Reporter: Matt Burgess >Assignee: Matt Burgess > Fix For: 1.0.0 > > > The current Elasticsearch processors use the Transport Client, and as a > result there can be some compatibility issues between multiple versions of ES > clusters. The REST API is much more standard between versions, so it would be > nice to have ES processors that use the REST API, to enable things like > migration from an Elasticsearch cluster with an older version to a cluster > with a newer version. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359141#comment-15359141 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69318008 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359137#comment-15359137 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69317662 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359133#comment-15359133 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69317330 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359132#comment-15359132 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69317281 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359131#comment-15359131 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69317035 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359127#comment-15359127 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69316350 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359125#comment-15359125 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69316019 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java --- @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.MediaType; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.RequestBody; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.io.IOUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.expression.AttributeExpression; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.apache.nifi.util.StringUtils; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.trimToEmpty; + + +@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "insert", "update", "upsert", "delete", "write", "put", "http"}) +@CapabilityDescription("Writes the contents of a FlowFile to Elasticsearch, using the specified parameters such as " ++ "the index to insert into and the type of the document.") +public class PutElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be written to Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") +.build(); + +public static final PropertyDescriptor ID_ATTRIBUTE = new PropertyDescriptor.Builder() +.name("Identifier Attribute") +.description("The name of the FlowFile attribute containing the identifier for the document. If the Index Operation is \"index\", " ++ "this property may be left empty or
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359112#comment-15359112 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69314307 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359111#comment-15359111 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69314193 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359109#comment-15359109 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69313807 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Commented] (NIFI-906) Docs do not display well on mobile/smaller res devices
[ https://issues.apache.org/jira/browse/NIFI-906?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359104#comment-15359104 ] ASF GitHub Bot commented on NIFI-906: - GitHub user ijokarumawak opened a pull request: https://github.com/apache/nifi/pull/600 NIFI-906: Make NiFi docs responsible - Switched from absolute layout to responsible using flex-box. - Added toggle switch to control component listing pane visibility. - Tested with Macbook and Android phone - Screenshots are attached with [NIFI-906](https://issues.apache.org/jira/browse/NIFI-906) I am not sure if it's rendered correctly at [Apache NiFi docs](http://nifi.apache.org/docs.html) page, since I don't know how to test. You can merge this pull request into a Git repository by running: $ git pull https://github.com/ijokarumawak/nifi nifi-906-flexbox Alternatively you can review and apply these changes as the patch at: https://github.com/apache/nifi/pull/600.patch To close this pull request, make a commit to your master/trunk branch with (at least) the following in the commit message: This closes #600 commit e12f05f58a6fae17d2a5267b6dd8aacef4528591 Author: Koji KawamuraDate: 2016-07-01T01:34:14Z NIFI-906: Make NiFi docs responsible - Switched from absolute layout to responsible using flex-box. - Added toggle switch to control component listing pane visibility. > Docs do not display well on mobile/smaller res devices > -- > > Key: NIFI-906 > URL: https://issues.apache.org/jira/browse/NIFI-906 > Project: Apache NiFi > Issue Type: Bug > Components: Documentation & Website > Environment: Mobile, Small res devices >Reporter: Joseph Witt >Assignee: Koji Kawamura > Attachments: pc-hide-list.png, pc.png, smartphone-hide-list.png, > smartphone.png > > > Tweet from @cyroxx > @apachenifi Unfortunately, the docs (http://nifi.apache.org/docs.html ) are > not mobile-friendly, makes it hard to read about this interesting project > https://twitter.com/cyroxx/status/637305973938483200 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359101#comment-15359101 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69313182 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Updated] (NIFI-906) Docs do not display well on mobile/smaller res devices
[ https://issues.apache.org/jira/browse/NIFI-906?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Koji Kawamura updated NIFI-906: --- Attachment: smartphone.png smartphone-hide-list.png pc.png pc-hide-list.png Finished making the docs to display well on mobile/smaller devices using flex-box. Please see attached images. > Docs do not display well on mobile/smaller res devices > -- > > Key: NIFI-906 > URL: https://issues.apache.org/jira/browse/NIFI-906 > Project: Apache NiFi > Issue Type: Bug > Components: Documentation & Website > Environment: Mobile, Small res devices >Reporter: Joseph Witt >Assignee: Koji Kawamura > Attachments: pc-hide-list.png, pc.png, smartphone-hide-list.png, > smartphone.png > > > Tweet from @cyroxx > @apachenifi Unfortunately, the docs (http://nifi.apache.org/docs.html ) are > not mobile-friendly, makes it hard to read about this interesting project > https://twitter.com/cyroxx/status/637305973938483200 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359093#comment-15359093 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69312668 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Updated] (NIFI-906) Docs do not display well on mobile/smaller res devices
[ https://issues.apache.org/jira/browse/NIFI-906?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Koji Kawamura updated NIFI-906: --- Attachment: (was: small-display-hide-list.png) > Docs do not display well on mobile/smaller res devices > -- > > Key: NIFI-906 > URL: https://issues.apache.org/jira/browse/NIFI-906 > Project: Apache NiFi > Issue Type: Bug > Components: Documentation & Website > Environment: Mobile, Small res devices >Reporter: Joseph Witt >Assignee: Koji Kawamura > > Tweet from @cyroxx > @apachenifi Unfortunately, the docs (http://nifi.apache.org/docs.html ) are > not mobile-friendly, makes it hard to read about this interesting project > https://twitter.com/cyroxx/status/637305973938483200 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Updated] (NIFI-906) Docs do not display well on mobile/smaller res devices
[ https://issues.apache.org/jira/browse/NIFI-906?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Koji Kawamura updated NIFI-906: --- Attachment: (was: large-display-hide-list.png) > Docs do not display well on mobile/smaller res devices > -- > > Key: NIFI-906 > URL: https://issues.apache.org/jira/browse/NIFI-906 > Project: Apache NiFi > Issue Type: Bug > Components: Documentation & Website > Environment: Mobile, Small res devices >Reporter: Joseph Witt >Assignee: Koji Kawamura > > Tweet from @cyroxx > @apachenifi Unfortunately, the docs (http://nifi.apache.org/docs.html ) are > not mobile-friendly, makes it hard to read about this interesting project > https://twitter.com/cyroxx/status/637305973938483200 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Updated] (NIFI-906) Docs do not display well on mobile/smaller res devices
[ https://issues.apache.org/jira/browse/NIFI-906?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Koji Kawamura updated NIFI-906: --- Attachment: (was: small-display.png) > Docs do not display well on mobile/smaller res devices > -- > > Key: NIFI-906 > URL: https://issues.apache.org/jira/browse/NIFI-906 > Project: Apache NiFi > Issue Type: Bug > Components: Documentation & Website > Environment: Mobile, Small res devices >Reporter: Joseph Witt >Assignee: Koji Kawamura > > Tweet from @cyroxx > @apachenifi Unfortunately, the docs (http://nifi.apache.org/docs.html ) are > not mobile-friendly, makes it hard to read about this interesting project > https://twitter.com/cyroxx/status/637305973938483200 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Updated] (NIFI-906) Docs do not display well on mobile/smaller res devices
[ https://issues.apache.org/jira/browse/NIFI-906?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Koji Kawamura updated NIFI-906: --- Attachment: (was: large-display.png) > Docs do not display well on mobile/smaller res devices > -- > > Key: NIFI-906 > URL: https://issues.apache.org/jira/browse/NIFI-906 > Project: Apache NiFi > Issue Type: Bug > Components: Documentation & Website > Environment: Mobile, Small res devices >Reporter: Joseph Witt >Assignee: Koji Kawamura > > Tweet from @cyroxx > @apachenifi Unfortunately, the docs (http://nifi.apache.org/docs.html ) are > not mobile-friendly, makes it hard to read about this interesting project > https://twitter.com/cyroxx/status/637305973938483200 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359089#comment-15359089 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69312240 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Assigned] (NIFI-906) Docs do not display well on mobile/smaller res devices
[ https://issues.apache.org/jira/browse/NIFI-906?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Koji Kawamura reassigned NIFI-906: -- Assignee: Koji Kawamura > Docs do not display well on mobile/smaller res devices > -- > > Key: NIFI-906 > URL: https://issues.apache.org/jira/browse/NIFI-906 > Project: Apache NiFi > Issue Type: Bug > Components: Documentation & Website > Environment: Mobile, Small res devices >Reporter: Joseph Witt >Assignee: Koji Kawamura > Attachments: large-display-hide-list.png, large-display.png, > small-display-hide-list.png, small-display.png > > > Tweet from @cyroxx > @apachenifi Unfortunately, the docs (http://nifi.apache.org/docs.html ) are > not mobile-friendly, makes it hard to read about this interesting project > https://twitter.com/cyroxx/status/637305973938483200 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Assigned] (NIFI-2160) Enabled ControllerServices disabled on restart
[ https://issues.apache.org/jira/browse/NIFI-2160?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Oleg Zhurakousky reassigned NIFI-2160: -- Assignee: Oleg Zhurakousky > Enabled ControllerServices disabled on restart > -- > > Key: NIFI-2160 > URL: https://issues.apache.org/jira/browse/NIFI-2160 > Project: Apache NiFi > Issue Type: Bug >Affects Versions: 0.7.0 >Reporter: Brandon DeVries >Assignee: Oleg Zhurakousky >Priority: Critical > Fix For: 0.7.0 > > > As a result of the fix for NIFI-2032, *previously enabled ControllerServices > become disabled after a restart* if they are not referenced by another > component. However, we use a custom domain specific langauge that can > reference a controller service from a query defined as a custom processor's > property. This means that we use a number of controller service that are > only used in this way (i.e. are never directly referred to by another > component). Upon restart, these are now disabled causing issues with our > flows. > I have not yet stepped through the new enableControllerServices() \[1\] > method to figure out exactly where the issue is coming from, but I wanted to > get the ticket out there and on the radar, as this breaks backwards > compatibility on a feature we heavily rely on. > \[1\] > https://github.com/apache/nifi/blob/0.x/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/service/StandardControllerServiceProvider.java#L301-336 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359083#comment-15359083 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69311424 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Commented] (NIFI-2160) Enabled ControllerServices disabled on restart
[ https://issues.apache.org/jira/browse/NIFI-2160?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359081#comment-15359081 ] Brandon DeVries commented on NIFI-2160: --- My statements which you refer to in your first paragraph were simply for background (to preempt the question "if they aren't being referenced, why does it matter that they're disabled?"). If you want to argue that "it would be the responsibility of your custom DSL to enable the CS if it is disabled", that is a conversation that can be had. However, a feature that was available no longer is, thus breaking backwards compatibility on a non-major release. This is a (critical) problem. As to your second paragraph... yes, the bug is that a "CS (regardless of it's relationship to anything) was in ENABLED state when NiFi was shut down and is DISABLED upon restart of NiFi". That could be the entire description of the ticket... the rest is just context. If you're going to take on the ticket, that's fantastic (and appreciated). Thanks. > Enabled ControllerServices disabled on restart > -- > > Key: NIFI-2160 > URL: https://issues.apache.org/jira/browse/NIFI-2160 > Project: Apache NiFi > Issue Type: Bug >Affects Versions: 0.7.0 >Reporter: Brandon DeVries >Priority: Critical > Fix For: 0.7.0 > > > As a result of the fix for NIFI-2032, *previously enabled ControllerServices > become disabled after a restart* if they are not referenced by another > component. However, we use a custom domain specific langauge that can > reference a controller service from a query defined as a custom processor's > property. This means that we use a number of controller service that are > only used in this way (i.e. are never directly referred to by another > component). Upon restart, these are now disabled causing issues with our > flows. > I have not yet stepped through the new enableControllerServices() \[1\] > method to figure out exactly where the issue is coming from, but I wanted to > get the ticket out there and on the radar, as this breaks backwards > compatibility on a feature we heavily rely on. > \[1\] > https://github.com/apache/nifi/blob/0.x/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/service/StandardControllerServiceProvider.java#L301-336 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359061#comment-15359061 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69309177 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Updated] (NIFI-2160) Enabled ControllerServices disabled on restart
[ https://issues.apache.org/jira/browse/NIFI-2160?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Brandon DeVries updated NIFI-2160: -- Description: As a result of the fix for NIFI-2032, *previously enabled ControllerServices become disabled after a restart* if they are not referenced by another component. However, we use a custom domain specific langauge that can reference a controller service from a query defined as a custom processor's property. This means that we use a number of controller service that are only used in this way (i.e. are never directly referred to by another component). Upon restart, these are now disabled causing issues with our flows. I have not yet stepped through the new enableControllerServices() \[1\] method to figure out exactly where the issue is coming from, but I wanted to get the ticket out there and on the radar, as this breaks backwards compatibility on a feature we heavily rely on. \[1\] https://github.com/apache/nifi/blob/0.x/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/service/StandardControllerServiceProvider.java#L301-336 was: As a result of the fix for NIFI-2032, previously enabled ControllerServices become disabled after a restart if they are not referenced by another component. However, we use a custom domain specific langauge that can reference a controller service from a query defined as a custom processor's property. This means that we use a number of controller service that are only used in this way (i.e. are never directly referred to by another component). Upon restart, these are now disabled causing issues with our flows. I have not yet stepped through the new enableControllerServices() \[1\] method to figure out exactly where the issue is coming from, but I wanted to get the ticket out there and on the radar, as this breaks backwards compatibility on a feature we heavily rely on. \[1\] https://github.com/apache/nifi/blob/0.x/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/service/StandardControllerServiceProvider.java#L301-336 > Enabled ControllerServices disabled on restart > -- > > Key: NIFI-2160 > URL: https://issues.apache.org/jira/browse/NIFI-2160 > Project: Apache NiFi > Issue Type: Bug >Affects Versions: 0.7.0 >Reporter: Brandon DeVries >Priority: Critical > Fix For: 0.7.0 > > > As a result of the fix for NIFI-2032, *previously enabled ControllerServices > become disabled after a restart* if they are not referenced by another > component. However, we use a custom domain specific langauge that can > reference a controller service from a query defined as a custom processor's > property. This means that we use a number of controller service that are > only used in this way (i.e. are never directly referred to by another > component). Upon restart, these are now disabled causing issues with our > flows. > I have not yet stepped through the new enableControllerServices() \[1\] > method to figure out exactly where the issue is coming from, but I wanted to > get the ticket out there and on the radar, as this breaks backwards > compatibility on a feature we heavily rely on. > \[1\] > https://github.com/apache/nifi/blob/0.x/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/service/StandardControllerServiceProvider.java#L301-336 -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359057#comment-15359057 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69308812 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java --- @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.elasticsearch; + +import com.squareup.okhttp.HttpUrl; +import com.squareup.okhttp.OkHttpClient; +import com.squareup.okhttp.Response; +import com.squareup.okhttp.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.apache.nifi.annotation.behavior.EventDriven; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.stream.io.ByteArrayInputStream; +import org.codehaus.jackson.JsonNode; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) +@EventDriven +@SupportsBatching +@Tags({"elasticsearch", "fetch", "read", "get", "http"}) +@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the " ++ "identifier of the document to retrieve.") +@WritesAttributes({ +@WritesAttribute(attribute = "filename", description = "The filename attributes is set to the document identifier"), +@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), +@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type") +}) +public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor { + +private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include"; + +public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") +.description("All FlowFiles that are read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") +.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship").build(); + +public static final Relationship REL_RETRY = new Relationship.Builder().name("retry") +.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may succeed") +.build(); + +public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found") +
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359026#comment-15359026 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69306232 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/pom.xml --- @@ -56,6 +56,11 @@ language governing permissions and limitations under the License. --> ${es.version} +com.squareup.okhttp +okhttp +2.7.1 --- End diff -- What's the motiviation for using this version of OkHttp instead of the latest[1][2]? [1] https://github.com/square/okhttp/tree/parent-3.3.1 [2] https://mvnrepository.com/artifact/com.squareup.okhttp3/okhttp > Add Elasticsearch processors that use the REST API > -- > > Key: NIFI-2068 > URL: https://issues.apache.org/jira/browse/NIFI-2068 > Project: Apache NiFi > Issue Type: Improvement >Reporter: Matt Burgess >Assignee: Matt Burgess > Fix For: 1.0.0 > > > The current Elasticsearch processors use the Transport Client, and as a > result there can be some compatibility issues between multiple versions of ES > clusters. The REST API is much more standard between versions, so it would be > nice to have ES processors that use the REST API, to enable things like > migration from an Elasticsearch cluster with an older version to a cluster > with a newer version. -- This message was sent by Atlassian JIRA (v6.3.4#6332)
[jira] [Commented] (NIFI-2068) Add Elasticsearch processors that use the REST API
[ https://issues.apache.org/jira/browse/NIFI-2068?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=15359038#comment-15359038 ] ASF GitHub Bot commented on NIFI-2068: -- Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/576#discussion_r69307262 --- Diff: nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractElasticsearchProcessor.java --- @@ -17,129 +17,25 @@ package org.apache.nifi.processors.elasticsearch; import org.apache.nifi.components.PropertyDescriptor; -import org.apache.nifi.components.ValidationContext; -import org.apache.nifi.components.ValidationResult; -import org.apache.nifi.components.Validator; -import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.ssl.SSLContextService; -import org.apache.nifi.util.StringUtils; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.InetSocketTransportAddress; - -import java.io.File; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.net.InetSocketAddress; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLClassLoader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; - +/** + * A base class for all Elasticsearch processors + */ public abstract class AbstractElasticsearchProcessor extends AbstractProcessor { -/** - * This validator ensures the Elasticsearch hosts property is a valid list of hostname:port entries - */ -private static final Validator HOSTNAME_PORT_VALIDATOR = new Validator() { -@Override -public ValidationResult validate(final String subject, final String input, final ValidationContext context) { -final List esList = Arrays.asList(input.split(",")); -for (String hostnamePort : esList) { -String[] addresses = hostnamePort.split(":"); -// Protect against invalid input like http://127.0.0.1:9300 (URL scheme should not be there) -if (addresses.length != 2) { -return new ValidationResult.Builder().subject(subject).input(input).explanation( -"Must be in hostname:port form (no scheme such as http://;).valid(false).build(); -} -} -return new ValidationResult.Builder().subject(subject).input(input).explanation( -"Valid cluster definition").valid(true).build(); -} -}; - -protected static final PropertyDescriptor CLUSTER_NAME = new PropertyDescriptor.Builder() -.name("Cluster Name") -.description("Name of the ES cluster (for example, elasticsearch_brew). Defaults to 'elasticsearch'") -.required(true) -.addValidator(StandardValidators.NON_EMPTY_VALIDATOR) -.defaultValue("elasticsearch") -.build(); - -protected static final PropertyDescriptor HOSTS = new PropertyDescriptor.Builder() -.name("ElasticSearch Hosts") -.description("ElasticSearch Hosts, which should be comma separated and colon for hostname/port " -+ "host1:port,host2:port, For example testcluster:9300.") -.required(true) -.expressionLanguageSupported(false) -.addValidator(HOSTNAME_PORT_VALIDATOR) -.build(); - public static final PropertyDescriptor PROP_SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder() .name("SSL Context Service") .description("The SSL Context Service used to provide client certificate information for TLS/SSL " -+ "connections. This service only applies if the Shield plugin is available.") ++ "connections. This service only applies if the Elasticsearch endpoints have been protected by SSLShield plugin is available.") --- End diff -- This sentence doesn't really