Repository: nifi
Updated Branches:
  refs/heads/master 2a90bd501 -> e748fd584


http://git-wip-us.apache.org/repos/asf/nifi/blob/e748fd58/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/src/main/resources/META-INF/NOTICE
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/src/main/resources/META-INF/NOTICE
 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/src/main/resources/META-INF/NOTICE
new file mode 100644
index 0000000..6fb6e51
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/src/main/resources/META-INF/NOTICE
@@ -0,0 +1,334 @@
+nifi-hbase_1_1_2-client-service-nar
+Copyright 2014-2015 The Apache Software Foundation
+
+This product includes software developed at
+The Apache Software Foundation (http://www.apache.org/).
+
+******************
+Apache Software License v2
+******************
+
+ (ASLv2) Apache Commons CLI
+    The following NOTICE information applies:
+      Apache Commons CLI
+      Copyright 2001-2009 The Apache Software Foundation
+
+  (ASLv2) Apache Curator
+    The following NOTICE information applies:
+      Curator Framework
+      Copyright 2011-2014 The Apache Software Foundation
+
+      Curator Client
+      Copyright 2011-2014 The Apache Software Foundation
+
+      Curator Recipes
+      Copyright 2011-2014 The Apache Software Foundation
+
+  (ASLv2) Apache Directory Server
+    The following NOTICE information applies:
+      ApacheDS Protocol Kerberos Codec
+      Copyright 2003-2013 The Apache Software Foundation
+
+      ApacheDS I18n
+      Copyright 2003-2013 The Apache Software Foundation
+
+      Apache Directory API ASN.1 API
+      Copyright 2003-2013 The Apache Software Foundation
+
+      Apache Directory LDAP API Utilities
+      Copyright 2003-2013 The Apache Software Foundation
+
+  (ASLv2) Apache Commons Math
+    The following NOTICE information applies:
+      Apache Commons Math
+      Copyright 2001-2012 The Apache Software Foundation
+
+      This product includes software developed by
+      The Apache Software Foundation (http://www.apache.org/).
+
+      
===============================================================================
+
+      The BracketFinder (package 
org.apache.commons.math3.optimization.univariate)
+      and PowellOptimizer (package 
org.apache.commons.math3.optimization.general)
+      classes are based on the Python code in module "optimize.py" (version 
0.5)
+      developed by Travis E. Oliphant for the SciPy library 
(http://www.scipy.org/)
+      Copyright © 2003-2009 SciPy Developers.
+      
===============================================================================
+
+      The LinearConstraint, LinearObjectiveFunction, LinearOptimizer,
+      RelationShip, SimplexSolver and SimplexTableau classes in package
+      org.apache.commons.math3.optimization.linear include software developed 
by
+      Benjamin McCann (http://www.benmccann.com) and distributed with
+      the following copyright: Copyright 2009 Google Inc.
+      
===============================================================================
+
+      This product includes software developed by the
+      University of Chicago, as Operator of Argonne National
+      Laboratory.
+      The LevenbergMarquardtOptimizer class in package
+      org.apache.commons.math3.optimization.general includes software
+      translated from the lmder, lmpar and qrsolv Fortran routines
+      from the Minpack package
+      Minpack Copyright Notice (1999) University of Chicago.  All rights 
reserved
+      
===============================================================================
+
+      The GraggBulirschStoerIntegrator class in package
+      org.apache.commons.math3.ode.nonstiff includes software translated
+      from the odex Fortran routine developed by E. Hairer and G. Wanner.
+      Original source copyright:
+      Copyright (c) 2004, Ernst Hairer
+      
===============================================================================
+
+      The EigenDecompositionImpl class in package
+      org.apache.commons.math3.linear includes software translated
+      from some LAPACK Fortran routines.  Original source copyright:
+      Copyright (c) 1992-2008 The University of Tennessee.  All rights 
reserved.
+      
===============================================================================
+
+      The MersenneTwister class in package org.apache.commons.math3.random
+      includes software translated from the 2002-01-26 version of
+      the Mersenne-Twister generator written in C by Makoto Matsumoto and 
Takuji
+      Nishimura. Original source copyright:
+      Copyright (C) 1997 - 2002, Makoto Matsumoto and Takuji Nishimura,
+      All rights reserved
+      
===============================================================================
+
+      The LocalizedFormatsTest class in the unit tests is an adapted version of
+      the OrekitMessagesTest class from the orekit library distributed under 
the
+      terms of the Apache 2 licence. Original source copyright:
+      Copyright 2010 CS Systèmes d'Information
+      
===============================================================================
+
+      The HermiteInterpolator class and its corresponding test have been 
imported from
+      the orekit library distributed under the terms of the Apache 2 licence. 
Original
+      source copyright:
+      Copyright 2010-2012 CS Systèmes d'Information
+      
===============================================================================
+
+      The creation of the package "o.a.c.m.analysis.integration.gauss" was 
inspired
+      by an original code donated by Sébastien Brisard.
+      
===============================================================================
+
+  (ASLv2) Apache Jakarta HttpClient
+    The following NOTICE information applies:
+      Apache Jakarta HttpClient
+      Copyright 1999-2007 The Apache Software Foundation
+
+  (ASLv2) Apache Commons Codec
+    The following NOTICE information applies:
+      Apache Commons Codec
+      Copyright 2002-2014 The Apache Software Foundation
+
+      src/test/org/apache/commons/codec/language/DoubleMetaphoneTest.java
+      contains test data from http://aspell.net/test/orig/batch0.tab.
+      Copyright (C) 2002 Kevin Atkinson (kev...@gnu.org)
+
+      
===============================================================================
+
+      The content of package org.apache.commons.codec.language.bm has been 
translated
+      from the original php source code available at 
http://stevemorse.org/phoneticinfo.htm
+      with permission from the original authors.
+      Original source copyright:
+      Copyright (c) 2008 Alexander Beider & Stephen P. Morse.
+
+  (ASLv2) Apache Commons IO
+    The following NOTICE information applies:
+      Apache Commons IO
+      Copyright 2002-2012 The Apache Software Foundation
+
+  (ASLv2) Apache Commons Net
+    The following NOTICE information applies:
+      Apache Commons Net
+      Copyright 2001-2013 The Apache Software Foundation
+
+  (ASLv2) Apache Commons Collections
+    The following NOTICE information applies:
+      Apache Commons Collections
+      Copyright 2001-2008 The Apache Software Foundation
+
+  (ASLv2) Jettison
+    The following NOTICE information applies:
+         Copyright 2006 Envoi Solutions LLC
+
+  (ASLv2) Apache Commons Logging
+    The following NOTICE information applies:
+      Apache Commons Logging
+      Copyright 2003-2013 The Apache Software Foundation
+
+  (ASLv2) Apache Commons Lang
+    The following NOTICE information applies:
+      Apache Commons Lang
+      Copyright 2001-2011 The Apache Software Foundation
+
+  (ASLv2) Apache log4j
+    The following NOTICE information applies:
+      Apache log4j
+      Copyright 2007 The Apache Software Foundation
+
+  (ASLv2) Apache HttpComponents
+    The following NOTICE information applies:
+      Apache HttpClient
+      Copyright 1999-2015 The Apache Software Foundation
+
+      Apache HttpComponents HttpCore
+      Copyright 2005-2011 The Apache Software Foundation
+
+  (ASLv2) Apache Commons Configuration
+    The following NOTICE information applies:
+      Apache Commons Configuration
+      Copyright 2001-2008 The Apache Software Foundation
+
+  (ASLv2) Apache Jakarta Commons Digester
+    The following NOTICE information applies:
+      Apache Jakarta Commons Digester
+      Copyright 2001-2006 The Apache Software Foundation
+
+  (ASLv2) Apache Commons BeanUtils
+    The following NOTICE information applies:
+      Apache Commons BeanUtils
+      Copyright 2000-2008 The Apache Software Foundation
+
+  (ASLv2) Apache Avro
+    The following NOTICE information applies:
+      Apache Avro
+      Copyright 2009-2013 The Apache Software Foundation
+
+  (ASLv2) Snappy Java
+    The following NOTICE information applies:
+      This product includes software developed by Google
+       Snappy: http://code.google.com/p/snappy/ (New BSD License)
+
+      This product includes software developed by Apache
+       PureJavaCrc32C from apache-hadoop-common http://hadoop.apache.org/
+       (Apache 2.0 license)
+
+      This library containd statically linked libstdc++. This inclusion is 
allowed by
+      "GCC RUntime Library Exception"
+      http://gcc.gnu.org/onlinedocs/libstdc++/manual/license.html
+
+  (ASLv2) ApacheDS
+    The following NOTICE information applies:
+      ApacheDS
+      Copyright 2003-2013 The Apache Software Foundation
+
+  (ASLv2) Apache ZooKeeper
+    The following NOTICE information applies:
+      Apache ZooKeeper
+      Copyright 2009-2012 The Apache Software Foundation
+
+  (ASLv2) Apache Commons Compress
+    The following NOTICE information applies:
+      Apache Commons Compress
+      Copyright 2002-2014 The Apache Software Foundation
+
+      The files in the package org.apache.commons.compress.archivers.sevenz
+      were derived from the LZMA SDK, version 9.20 (C/ and CPP/7zip/),
+      which has been placed in the public domain:
+
+      "LZMA SDK is placed in the public domain." 
(http://www.7-zip.org/sdk.html)
+
+  (ASLv2) Apache Commons Daemon
+    The following NOTICE information applies:
+      Apache Commons Daemon
+      Copyright 1999-2013 The Apache Software Foundation
+
+  (ASLv2) The Netty Project
+    The following NOTICE information applies:
+      The Netty Project
+      Copyright 2011 The Netty Project
+
+  (ASLv2) Apache Xerces Java
+    The following NOTICE information applies:
+      Apache Xerces Java
+      Copyright 1999-2007 The Apache Software Foundation
+
+      This product includes software developed at
+      The Apache Software Foundation (http://www.apache.org/).
+
+      Portions of this software were originally based on the following:
+        - software copyright (c) 1999, IBM Corporation., http://www.ibm.com.
+        - software copyright (c) 1999, Sun Microsystems., http://www.sun.com.
+        - voluntary contributions made by Paul Eng on behalf of the
+          Apache Software Foundation that were originally developed at iClick, 
Inc.,
+          software copyright (c) 1999.
+
+  (ASLv2) Google Guice
+    The following NOTICE information applies:
+      Google Guice - Core Library
+      Copyright 2006-2011 Google, Inc.
+
+      Google Guice - Extensions - Servlet
+      Copyright 2006-2011 Google, Inc.
+
+  (ASLv2) HBase Common
+      The following NOTICE information applies:
+        This product includes portions of the Guava project v14, specifically
+        
'hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java'
+
+        Copyright (C) 2007 The Guava Authors
+
+        Licensed under the Apache License, Version 2.0
+
+  (ASLv2) HTrace Core
+    The following NOTICE information applies:
+      In addition, this product includes software dependencies. See
+      the accompanying LICENSE.txt for a listing of dependencies
+      that are NOT Apache licensed (with pointers to their licensing)
+
+      Apache HTrace includes an Apache Thrift connector to Zipkin. Zipkin
+      is a distributed tracing system that is Apache 2.0 Licensed.
+      Copyright 2012 Twitter, Inc.
+
+  (ASLv2) Jackson Core ASL
+      The following NOTICE information applies:
+        This product currently only contains code developed by authors
+        of specific components, as identified by the source code files;
+        if such notes are missing files have been created by
+        Tatu Saloranta.
+
+        For additional credits (generally to people who reported problems)
+        see CREDITS file.
+
+    (ASLv2) Jackson Mapper ASL
+      The following NOTICE information applies:
+        This product currently only contains code developed by authors
+        of specific components, as identified by the source code files;
+        if such notes are missing files have been created by
+        Tatu Saloranta.
+
+        For additional credits (generally to people who reported problems)
+        see CREDITS file.
+
+************************
+Common Development and Distribution License 1.1
+************************
+
+The following binary components are provided under the Common Development and 
Distribution License 1.1. See project link for details.
+
+    (CDDL 1.1) (GPL2 w/ CPE) jersey-server 
(com.sun.jersey:jersey-server:jar:1.19 - https://jersey.java.net/jersey-server/)
+    (CDDL 1.1) (GPL2 w/ CPE) jersey-client 
(com.sun.jersey:jersey-client:jar:1.19 - https://jersey.java.net/jersey-client/)
+    (CDDL 1.1) (GPL2 w/ CPE) jersey-core (com.sun.jersey:jersey-core:jar:1.19 
- https://jersey.java.net/jersey-core/)
+    (CDDL 1.1) (GPL2 w/ CPE) jersey-json (com.sun.jersey:jersey-json:jar:1.19 
- https://jersey.java.net/index.html)
+    (CDDL 1.1) (GPL2 w/ CPE) jersey-juice (com.sun.jersey:jersey-juice:jar:1.9 
- https://jersey.java.net/index.html)
+    (CDDL 1.1) (GPL2 w/ CPE) Old JAXB Runtime 
(com.sun.xml.bind:jaxb-impl:jar:2.2.3-1 - http://jaxb.java.net/)
+    (CDDL 1.1) (GPL2 w/ CPE) Java Architecture For XML Binding 
(javax.xml.bind:jaxb-api:jar:2.2.2 - https://jaxb.dev.java.net/)
+
+************************
+Common Development and Distribution License 1.0
+************************
+
+The following binary components are provided under the Common Development and 
Distribution License 1.0.  See project link for details.
+
+    (CDDL 1.0) JavaServlet(TM) Specification 
(javax.servlet:servlet-api:jar:2.5 - no url available)
+    (CDDL 1.0) (GPL3) Streaming API For XML 
(javax.xml.stream:stax-api:jar:1.0-2 - no url provided)
+    (CDDL 1.0) JavaBeans Activation Framework (JAF) 
(javax.activation:activation:jar:1.1 - 
http://java.sun.com/products/javabeans/jaf/index.jsp)
+    (CDDL 1.0) JavaServer Pages(TM) API (javax.servlet.jsp:jsp-api:jar:2.1 - 
http://jsp.java.net)
+    (CDDL 1.0) JSR311 API (javax.ws.rs:jsr311-api:jar:1.1.1 - 
https://jsr311.dev.java.net)
+
+*****************
+Public Domain
+*****************
+
+The following binary components are provided to the 'Public Domain'.  See 
project link for details.
+
+    (Public Domain) AOP Alliance 1.0 (http://aopalliance.sourceforge.net/)

http://git-wip-us.apache.org/repos/asf/nifi/blob/e748fd58/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml
 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml
new file mode 100644
index 0000000..a90e0e3
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml
@@ -0,0 +1,78 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements. See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License. You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <groupId>org.apache.nifi</groupId>
+        <artifactId>nifi-hbase_1_1_2-client-service-bundle</artifactId>
+        <version>0.4.0-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>nifi-hbase_1_1_2-client-service</artifactId>
+    <packaging>jar</packaging>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-hbase-client-service-api</artifactId>
+            <version>0.4.0-SNAPSHOT</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-api</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-processor-utils</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-lang3</artifactId>
+            <version>3.4</version>
+        </dependency>
+
+        <!-- test dependencies -->
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-mock</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-simple</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/e748fd58/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ClientService.java
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ClientService.java
 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ClientService.java
new file mode 100644
index 0000000..9c300db
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ClientService.java
@@ -0,0 +1,207 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.hbase;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.ParseFilter;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnDisabled;
+import org.apache.nifi.annotation.lifecycle.OnEnabled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.controller.AbstractControllerService;
+import org.apache.nifi.controller.ConfigurationContext;
+import org.apache.nifi.controller.ControllerServiceInitializationContext;
+import org.apache.nifi.hbase.put.PutFlowFile;
+import org.apache.nifi.hbase.scan.Column;
+import org.apache.nifi.hbase.scan.ResultCell;
+import org.apache.nifi.hbase.scan.ResultHandler;
+import org.apache.nifi.reporting.InitializationException;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@Tags({ "hbase", "client"})
+@CapabilityDescription("Implementation of HBaseClientService for HBase 1.1.2.")
+public class HBase_1_1_2_ClientService extends AbstractControllerService 
implements HBaseClientService {
+
+    private volatile Connection connection;
+    private List<PropertyDescriptor> properties;
+
+    @Override
+    protected void init(ControllerServiceInitializationContext config) throws 
InitializationException {
+        List<PropertyDescriptor> props = new ArrayList<>();
+        props.add(HADOOP_CONF_FILES);
+        this.properties = Collections.unmodifiableList(props);
+    }
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return properties;
+    }
+
+    @OnEnabled
+    public void onEnabled(final ConfigurationContext context) throws 
InitializationException, IOException {
+        this.connection = createConnection(context);
+    }
+
+    protected Connection createConnection(final ConfigurationContext context) 
throws IOException {
+        final Configuration hbaseConfig = HBaseConfiguration.create();
+        for (final String configFile : 
context.getProperty(HADOOP_CONF_FILES).getValue().split(",")) {
+            hbaseConfig.addResource(new Path(configFile.trim()));
+        }
+        return ConnectionFactory.createConnection(hbaseConfig);
+    }
+
+    @OnDisabled
+    public void shutdown() {
+        if (connection != null) {
+            try {
+                connection.close();
+            } catch (final IOException ioe) {
+                getLogger().warn("Failed to close connection to HBase due to 
{}", new Object[]{ioe});
+            }
+        }
+    }
+
+    @Override
+    public void put(final String tableName, final Collection<PutFlowFile> 
puts) throws IOException {
+        try (final Table table = 
connection.getTable(TableName.valueOf(tableName))) {
+            // Create one Put per row....
+            final Map<String, Put> rowPuts = new HashMap<>();
+            for (final PutFlowFile putFlowFile : puts) {
+                Put put = rowPuts.get(putFlowFile.getRow());
+                if (put == null) {
+                    put = new 
Put(putFlowFile.getRow().getBytes(StandardCharsets.UTF_8));
+                    rowPuts.put(putFlowFile.getRow(), put);
+                }
+                
put.addColumn(putFlowFile.getColumnFamily().getBytes(StandardCharsets.UTF_8),
+                        
putFlowFile.getColumnQualifier().getBytes(StandardCharsets.UTF_8),
+                        putFlowFile.getBuffer());
+            }
+
+            table.put(new ArrayList<>(rowPuts.values()));
+        }
+    }
+
+    @Override
+    public void scan(final String tableName, final Collection<Column> columns, 
final String filterExpression, final long minTime, final ResultHandler handler)
+            throws IOException {
+
+        Filter filter = null;
+        if (!StringUtils.isBlank(filterExpression)) {
+            ParseFilter parseFilter = new ParseFilter();
+            filter = parseFilter.parseFilterString(filterExpression);
+        }
+
+        try (final Table table = 
connection.getTable(TableName.valueOf(tableName));
+             final ResultScanner scanner = getResults(table, columns, filter, 
minTime)) {
+
+            for (final Result result : scanner) {
+                final byte[] rowKey = result.getRow();
+                final Cell[] cells = result.rawCells();
+
+                if (cells == null) {
+                    continue;
+                }
+
+                // convert HBase cells to NiFi cells
+                final ResultCell[] resultCells = new ResultCell[cells.length];
+
+                for (int i=0; i < cells.length; i++) {
+                    final Cell cell = cells[i];
+
+                    final ResultCell resultCell = new ResultCell();
+                    resultCell.setRowArray(cell.getRowArray());
+                    resultCell.setRowOffset(cell.getRowOffset());
+                    resultCell.setRowLength(cell.getRowLength());
+
+                    resultCell.setFamilyArray(cell.getFamilyArray());
+                    resultCell.setFamilyOffset(cell.getFamilyOffset());
+                    resultCell.setFamilyLength(cell.getFamilyLength());
+
+                    resultCell.setQualifierArray(cell.getQualifierArray());
+                    resultCell.setQualifierOffset(cell.getQualifierOffset());
+                    resultCell.setQualifierLength(cell.getQualifierLength());
+
+                    resultCell.setTimestamp(cell.getTimestamp());
+                    resultCell.setTypeByte(cell.getTypeByte());
+                    resultCell.setSequenceId(cell.getSequenceId());
+
+                    resultCell.setValueArray(cell.getValueArray());
+                    resultCell.setValueOffset(cell.getValueOffset());
+                    resultCell.setValueLength(cell.getValueLength());
+
+                    resultCell.setTagsArray(cell.getTagsArray());
+                    resultCell.setTagsOffset(cell.getTagsOffset());
+                    resultCell.setTagsLength(cell.getTagsLength());
+
+                    resultCells[i] = resultCell;
+                }
+
+                // delegate to the handler
+                handler.handle(rowKey, resultCells);
+            }
+        }
+    }
+
+    // protected and extracted into separate method for testing
+    protected ResultScanner getResults(final Table table, final 
Collection<Column> columns, final Filter filter, final long minTime) throws 
IOException {
+        // Create a new scan. We will set the min timerange as the latest 
timestamp that
+        // we have seen so far. The minimum timestamp is inclusive, so we will 
get duplicates.
+        // We will record any cells that have the latest timestamp, so that 
when we scan again,
+        // we know to throw away those duplicates.
+        final Scan scan = new Scan();
+        scan.setTimeRange(minTime, Long.MAX_VALUE);
+
+        if (filter != null) {
+            scan.setFilter(filter);
+        }
+
+        if (columns != null) {
+            for (Column col : columns) {
+                if (col.getQualifier() == null) {
+                    scan.addFamily(col.getFamily());
+                } else {
+                    scan.addColumn(col.getFamily(), col.getQualifier());
+                }
+            }
+        }
+
+        return table.getScanner(scan);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/e748fd58/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService
 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService
new file mode 100644
index 0000000..258d50f
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService
@@ -0,0 +1,15 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+org.apache.nifi.hbase.HBase_1_1_2_ClientService
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/nifi/blob/e748fd58/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ClientService.java
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ClientService.java
 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ClientService.java
new file mode 100644
index 0000000..71dd51b
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ClientService.java
@@ -0,0 +1,380 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.hbase;
+
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.nifi.controller.ConfigurationContext;
+import org.apache.nifi.hbase.put.PutFlowFile;
+import org.apache.nifi.hbase.scan.Column;
+import org.apache.nifi.hbase.scan.ResultCell;
+import org.apache.nifi.hbase.scan.ResultHandler;
+import org.apache.nifi.reporting.InitializationException;
+import org.apache.nifi.util.TestRunner;
+import org.apache.nifi.util.TestRunners;
+import org.junit.Test;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mockito;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableMap;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+public class TestHBase_1_1_2_ClientService {
+
+    @Test
+    public void testSinglePut() throws InitializationException, IOException {
+        final String tableName = "nifi";
+        final String row = "row1";
+        final String columnFamily = "family1";
+        final String columnQualifier = "qualifier1";
+        final String content = "content1";
+
+        final PutFlowFile putFlowFile = new PutFlowFile(tableName, row, 
columnFamily, columnQualifier,
+                content.getBytes(StandardCharsets.UTF_8), null);
+
+        final TestRunner runner = 
TestRunners.newTestRunner(TestProcessor.class);
+
+        // Mock an HBase Table so we can verify the put operations later
+        final Table table = Mockito.mock(Table.class);
+        when(table.getName()).thenReturn(TableName.valueOf(tableName));
+
+        // create the controller service and link it to the test processor
+        final HBaseClientService service = configureHBaseClientService(runner, 
table);
+        runner.assertValid(service);
+
+        // try to put a single cell
+        final HBaseClientService hBaseClientService = 
runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
+                .asControllerService(HBaseClientService.class);
+
+        hBaseClientService.put(tableName, Arrays.asList(putFlowFile));
+
+        // verify only one call to put was made
+        ArgumentCaptor<List> capture = ArgumentCaptor.forClass(List.class);
+        verify(table, times(1)).put(capture.capture());
+
+        // verify only one put was in the list of puts
+        final List<Put> puts = capture.getValue();
+        assertEquals(1, puts.size());
+        verifyPut(row, columnFamily, columnQualifier, content, puts.get(0));
+    }
+
+    @Test
+    public void testMultiplePutsSameRow() throws IOException, 
InitializationException {
+        final String tableName = "nifi";
+        final String row = "row1";
+        final String columnFamily = "family1";
+        final String columnQualifier = "qualifier1";
+        final String content1 = "content1";
+        final String content2 = "content2";
+
+        final PutFlowFile putFlowFile1 = new PutFlowFile(tableName, row, 
columnFamily, columnQualifier,
+                content1.getBytes(StandardCharsets.UTF_8), null);
+
+        final PutFlowFile putFlowFile2 = new PutFlowFile(tableName, row, 
columnFamily, columnQualifier,
+                content2.getBytes(StandardCharsets.UTF_8), null);
+
+        final TestRunner runner = 
TestRunners.newTestRunner(TestProcessor.class);
+
+        // Mock an HBase Table so we can verify the put operations later
+        final Table table = Mockito.mock(Table.class);
+        when(table.getName()).thenReturn(TableName.valueOf(tableName));
+
+        // create the controller service and link it to the test processor
+        final HBaseClientService service = configureHBaseClientService(runner, 
table);
+        runner.assertValid(service);
+
+        // try to put a multiple cells for the same row
+        final HBaseClientService hBaseClientService = 
runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
+                .asControllerService(HBaseClientService.class);
+
+        hBaseClientService.put(tableName, Arrays.asList(putFlowFile1, 
putFlowFile2));
+
+        // verify put was only called once
+        ArgumentCaptor<List> capture = ArgumentCaptor.forClass(List.class);
+        verify(table, times(1)).put(capture.capture());
+
+        // verify there was only one put in the list of puts
+        final List<Put> puts = capture.getValue();
+        assertEquals(1, puts.size());
+
+        // verify two cells were added to this one put operation
+        final NavigableMap<byte[], List<Cell>> familyCells = 
puts.get(0).getFamilyCellMap();
+        Map.Entry<byte[], List<Cell>> entry = familyCells.firstEntry();
+        assertEquals(2, entry.getValue().size());
+    }
+
+    @Test
+    public void testMultiplePutsDifferentRow() throws IOException, 
InitializationException {
+        final String tableName = "nifi";
+        final String row1 = "row1";
+        final String row2 = "row2";
+        final String columnFamily = "family1";
+        final String columnQualifier = "qualifier1";
+        final String content1 = "content1";
+        final String content2 = "content2";
+
+        final PutFlowFile putFlowFile1 = new PutFlowFile(tableName, row1, 
columnFamily, columnQualifier,
+                content1.getBytes(StandardCharsets.UTF_8), null);
+
+        final PutFlowFile putFlowFile2 = new PutFlowFile(tableName, row2, 
columnFamily, columnQualifier,
+                content2.getBytes(StandardCharsets.UTF_8), null);
+
+        final TestRunner runner = 
TestRunners.newTestRunner(TestProcessor.class);
+
+        // Mock an HBase Table so we can verify the put operations later
+        final Table table = Mockito.mock(Table.class);
+        when(table.getName()).thenReturn(TableName.valueOf(tableName));
+
+        // create the controller service and link it to the test processor
+        final HBaseClientService service = configureHBaseClientService(runner, 
table);
+        runner.assertValid(service);
+
+        // try to put a multiple cells with different rows
+        final HBaseClientService hBaseClientService = 
runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
+                .asControllerService(HBaseClientService.class);
+
+        hBaseClientService.put(tableName, Arrays.asList(putFlowFile1, 
putFlowFile2));
+
+        // verify put was only called once
+        ArgumentCaptor<List> capture = ArgumentCaptor.forClass(List.class);
+        verify(table, times(1)).put(capture.capture());
+
+        // verify there were two puts in the list
+        final List<Put> puts = capture.getValue();
+        assertEquals(2, puts.size());
+    }
+
+    @Test
+    public void testScan() throws InitializationException, IOException {
+        final String tableName = "nifi";
+        final TestRunner runner = 
TestRunners.newTestRunner(TestProcessor.class);
+
+        // Mock an HBase Table so we can verify the put operations later
+        final Table table = Mockito.mock(Table.class);
+        when(table.getName()).thenReturn(TableName.valueOf(tableName));
+
+        // create the controller service and link it to the test processor
+        final MockHBaseClientService service = 
configureHBaseClientService(runner, table);
+        runner.assertValid(service);
+
+        // stage some results in the mock service...
+        final long now = System.currentTimeMillis();
+
+        final Map<String, String> cells = new HashMap<>();
+        cells.put("greeting", "hello");
+        cells.put("name", "nifi");
+
+        service.addResult("row0", cells, now - 2);
+        service.addResult("row1", cells, now - 1);
+        service.addResult("row2", cells, now - 1);
+        service.addResult("row3", cells, now);
+
+        // perform a scan and verify the four rows were returned
+        final CollectingResultHandler handler = new CollectingResultHandler();
+        final HBaseClientService hBaseClientService = 
runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
+                .asControllerService(HBaseClientService.class);
+
+        hBaseClientService.scan(tableName, new ArrayList<Column>(), null, now, 
handler);
+        assertEquals(4, handler.results.size());
+
+        // get row0 using the row id and verify it has 2 cells
+        final ResultCell[] results = handler.results.get("row0");
+        assertNotNull(results);
+        assertEquals(2, results.length);
+
+        verifyResultCell(results[0], "nifi", "greeting", "hello");
+        verifyResultCell(results[1], "nifi", "name", "nifi");
+    }
+
+    @Test
+    public void testScanWithValidFilter() throws InitializationException, 
IOException {
+        final String tableName = "nifi";
+        final TestRunner runner = 
TestRunners.newTestRunner(TestProcessor.class);
+
+        // Mock an HBase Table so we can verify the put operations later
+        final Table table = Mockito.mock(Table.class);
+        when(table.getName()).thenReturn(TableName.valueOf(tableName));
+
+        // create the controller service and link it to the test processor
+        final MockHBaseClientService service = 
configureHBaseClientService(runner, table);
+        runner.assertValid(service);
+
+        // perform a scan and verify the four rows were returned
+        final CollectingResultHandler handler = new CollectingResultHandler();
+        final HBaseClientService hBaseClientService = 
runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
+                .asControllerService(HBaseClientService.class);
+
+        // make sure we parse the filter expression without throwing an 
exception
+        final String filter = "PrefixFilter ('Row') AND PageFilter (1) AND 
FirstKeyOnlyFilter ()";
+        hBaseClientService.scan(tableName, new ArrayList<Column>(), filter, 
System.currentTimeMillis(), handler);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testScanWithInvalidFilter() throws InitializationException, 
IOException {
+        final String tableName = "nifi";
+        final TestRunner runner = 
TestRunners.newTestRunner(TestProcessor.class);
+
+        // Mock an HBase Table so we can verify the put operations later
+        final Table table = Mockito.mock(Table.class);
+        when(table.getName()).thenReturn(TableName.valueOf(tableName));
+
+        // create the controller service and link it to the test processor
+        final MockHBaseClientService service = 
configureHBaseClientService(runner, table);
+        runner.assertValid(service);
+
+        // perform a scan and verify the four rows were returned
+        final CollectingResultHandler handler = new CollectingResultHandler();
+        final HBaseClientService hBaseClientService = 
runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
+                .asControllerService(HBaseClientService.class);
+
+        // this should throw IllegalArgumentException
+        final String filter = "this is not a filter";
+        hBaseClientService.scan(tableName, new ArrayList<Column>(), filter, 
System.currentTimeMillis(), handler);
+    }
+
+    private MockHBaseClientService configureHBaseClientService(final 
TestRunner runner, final Table table) throws InitializationException {
+        final MockHBaseClientService service = new 
MockHBaseClientService(table);
+        runner.addControllerService("hbaseClient", service);
+        runner.setProperty(service, 
HBase_1_1_2_ClientService.HADOOP_CONF_FILES, 
"src/test/resources/core-site.xml");
+        runner.enableControllerService(service);
+        runner.setProperty(TestProcessor.HBASE_CLIENT_SERVICE, "hbaseClient");
+        return service;
+    }
+
+    private void verifyResultCell(final ResultCell result, final String cf, 
final String cq, final String val) {
+        final String colFamily = new String(result.getFamilyArray(), 
result.getFamilyOffset(), result.getFamilyLength());
+        assertEquals(cf, colFamily);
+
+        final String colQualifier = new String(result.getQualifierArray(), 
result.getQualifierOffset(), result.getQualifierLength());
+        assertEquals(cq, colQualifier);
+
+        final String value = new String(result.getValueArray(), 
result.getValueOffset(), result.getValueLength());
+        assertEquals(val, value);
+    }
+
+    private void verifyPut(String row, String columnFamily, String 
columnQualifier, String content, Put put) {
+        assertEquals(row, new String(put.getRow()));
+
+        NavigableMap<byte [], List<Cell>> familyCells = put.getFamilyCellMap();
+        assertEquals(1, familyCells.size());
+
+        Map.Entry<byte[], List<Cell>> entry = familyCells.firstEntry();
+        assertEquals(columnFamily, new String(entry.getKey()));
+        assertEquals(1, entry.getValue().size());
+
+        Cell cell = entry.getValue().get(0);
+        assertEquals(columnQualifier, new String(cell.getQualifierArray(), 
cell.getQualifierOffset(), cell.getQualifierLength()));
+        assertEquals(content, new String(cell.getValueArray(), 
cell.getValueOffset(), cell.getValueLength()));
+    }
+
+    // Override methods to create a mock service that can return staged data
+    private class MockHBaseClientService extends HBase_1_1_2_ClientService {
+
+        private Table table;
+        private List<Result> results = new ArrayList<>();
+
+        public MockHBaseClientService(final Table table) {
+            this.table = table;
+        }
+
+        public void addResult(final String rowKey, final Map<String, String> 
cells, final long timestamp) {
+            final byte[] rowArray = rowKey.getBytes(StandardCharsets.UTF_8);
+
+            final Cell[] cellArray = new Cell[cells.size()];
+            int i = 0;
+            for (final Map.Entry<String, String> cellEntry : cells.entrySet()) 
{
+                final Cell cell = Mockito.mock(Cell.class);
+                when(cell.getRowArray()).thenReturn(rowArray);
+                when(cell.getRowOffset()).thenReturn(0);
+                when(cell.getRowLength()).thenReturn((short) rowArray.length);
+
+                final String cellValue = cellEntry.getValue();
+                final byte[] valueArray = 
cellValue.getBytes(StandardCharsets.UTF_8);
+                when(cell.getValueArray()).thenReturn(valueArray);
+                when(cell.getValueOffset()).thenReturn(0);
+                when(cell.getValueLength()).thenReturn(valueArray.length);
+
+                final byte[] familyArray = 
"nifi".getBytes(StandardCharsets.UTF_8);
+                when(cell.getFamilyArray()).thenReturn(familyArray);
+                when(cell.getFamilyOffset()).thenReturn(0);
+                when(cell.getFamilyLength()).thenReturn((byte) 
familyArray.length);
+
+                final String qualifier = cellEntry.getKey();
+                final byte[] qualifierArray = 
qualifier.getBytes(StandardCharsets.UTF_8);
+                when(cell.getQualifierArray()).thenReturn(qualifierArray);
+                when(cell.getQualifierOffset()).thenReturn(0);
+                
when(cell.getQualifierLength()).thenReturn(qualifierArray.length);
+
+                when(cell.getTimestamp()).thenReturn(timestamp);
+
+                cellArray[i++] = cell;
+            }
+
+            final Result result = Mockito.mock(Result.class);
+            when(result.getRow()).thenReturn(rowArray);
+            when(result.rawCells()).thenReturn(cellArray);
+            results.add(result);
+        }
+
+        @Override
+        protected ResultScanner getResults(Table table, Collection<Column> 
columns, Filter filter, long minTime) throws IOException {
+            final ResultScanner scanner = Mockito.mock(ResultScanner.class);
+            Mockito.when(scanner.iterator()).thenReturn(results.iterator());
+            return scanner;
+        }
+
+        @Override
+        protected Connection createConnection(ConfigurationContext context) 
throws IOException {
+            Connection connection = Mockito.mock(Connection.class);
+            
Mockito.when(connection.getTable(table.getName())).thenReturn(table);
+            return connection;
+        }
+    }
+
+    // handler that saves results for verification
+    private static final class CollectingResultHandler implements 
ResultHandler {
+
+        Map<String,ResultCell[]> results = new LinkedHashMap<>();
+
+        @Override
+        public void handle(byte[] row, ResultCell[] resultCells) {
+            final String rowStr = new String(row, StandardCharsets.UTF_8);
+            results.put(rowStr, resultCells);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/e748fd58/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestProcessor.java
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestProcessor.java
 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestProcessor.java
new file mode 100644
index 0000000..44b7e8b
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestProcessor.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.hbase;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.hbase.HBaseClientService;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.exception.ProcessException;
+
+public class TestProcessor extends AbstractProcessor {
+
+    static final PropertyDescriptor HBASE_CLIENT_SERVICE = new 
PropertyDescriptor.Builder()
+            .name("HBase Client Service")
+            .description("HBaseClientService")
+            .identifiesControllerService(HBaseClientService.class)
+            .required(true)
+            .build();
+
+    @Override
+    public void onTrigger(ProcessContext context, ProcessSession session) 
throws ProcessException {
+    }
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        List<PropertyDescriptor> propDescs = new ArrayList<>();
+        propDescs.add(HBASE_CLIENT_SERVICE);
+        return propDescs;
+    }
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/e748fd58/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/core-site.xml
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/core-site.xml
 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/core-site.xml
new file mode 100644
index 0000000..d022099
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/core-site.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+      http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>fs.default.name</name>
+    <value>hdfs://hbase</value>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/nifi/blob/e748fd58/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/pom.xml
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/pom.xml
 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/pom.xml
new file mode 100644
index 0000000..8f900a3
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/pom.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements. See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License. You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <groupId>org.apache.nifi</groupId>
+        <artifactId>nifi-standard-services</artifactId>
+        <version>0.4.0-SNAPSHOT</version>
+    </parent>
+
+    <groupId>org.apache.nifi</groupId>
+    <artifactId>nifi-hbase_1_1_2-client-service-bundle</artifactId>
+    <version>0.4.0-SNAPSHOT</version>
+    <packaging>pom</packaging>
+
+    <modules>
+        <module>nifi-hbase_1_1_2-client-service</module>
+        <module>nifi-hbase_1_1_2-client-service-nar</module>
+    </modules>
+
+    <dependencyManagement>
+        <dependencies>
+            <dependency>
+                <groupId>org.apache.hbase</groupId>
+                <artifactId>hbase-client</artifactId>
+                <version>1.1.2</version>
+            </dependency>
+            <!-- the top-level pom forces 18.0, but Hadoop 2.6 expects 12.0.1 
-->
+            <dependency>
+                <groupId>com.google.guava</groupId>
+                <artifactId>guava</artifactId>
+                <version>${hadoop.guava.version}</version>
+            </dependency>
+        </dependencies>
+    </dependencyManagement>
+</project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/e748fd58/nifi-nar-bundles/nifi-standard-services/nifi-standard-services-api-nar/pom.xml
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-standard-services/nifi-standard-services-api-nar/pom.xml
 
b/nifi-nar-bundles/nifi-standard-services/nifi-standard-services-api-nar/pom.xml
index eb2eb7e..cb51335 100644
--- 
a/nifi-nar-bundles/nifi-standard-services/nifi-standard-services-api-nar/pom.xml
+++ 
b/nifi-nar-bundles/nifi-standard-services/nifi-standard-services-api-nar/pom.xml
@@ -47,5 +47,10 @@
             <artifactId>nifi-dbcp-service-api</artifactId>
             <scope>compile</scope>
         </dependency>
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-hbase-client-service-api</artifactId>
+            <scope>compile</scope>
+        </dependency>
     </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/e748fd58/nifi-nar-bundles/nifi-standard-services/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-services/pom.xml 
b/nifi-nar-bundles/nifi-standard-services/pom.xml
index 17318f4..aeca522 100644
--- a/nifi-nar-bundles/nifi-standard-services/pom.xml
+++ b/nifi-nar-bundles/nifi-standard-services/pom.xml
@@ -26,12 +26,14 @@
         <module>nifi-distributed-cache-client-service-api</module>
         <module>nifi-distributed-cache-services-bundle</module>
         <module>nifi-load-distribution-service-api</module>
-       <module>nifi-http-context-map-api</module>
+           <module>nifi-http-context-map-api</module>
         <module>nifi-ssl-context-bundle</module>
         <module>nifi-ssl-context-service-api</module>
-       <module>nifi-http-context-map-bundle</module>
+           <module>nifi-http-context-map-bundle</module>
         <module>nifi-standard-services-api-nar</module>
         <module>nifi-dbcp-service-api</module>
         <module>nifi-dbcp-service-bundle</module>
+        <module>nifi-hbase-client-service-api</module>
+        <module>nifi-hbase_1_1_2-client-service-bundle</module>
     </modules>
 </project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/e748fd58/nifi-nar-bundles/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/pom.xml b/nifi-nar-bundles/pom.xml
index ce93109..510935d 100644
--- a/nifi-nar-bundles/pom.xml
+++ b/nifi-nar-bundles/pom.xml
@@ -42,6 +42,7 @@
         <module>nifi-language-translation-bundle</module>
         <module>nifi-mongodb-bundle</module>
         <module>nifi-flume-bundle</module>
+               <module>nifi-hbase-bundle</module>
         <module>nifi-ambari-bundle</module>
         <module>nifi-image-bundle</module>
         <module>nifi-avro-bundle</module>

http://git-wip-us.apache.org/repos/asf/nifi/blob/e748fd58/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index d563408..e0744eb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -94,6 +94,7 @@
         <spring.security.version>3.2.7.RELEASE</spring.security.version>
         <jersey.version>1.19</jersey.version>
         <hadoop.version>2.6.2</hadoop.version>
+        <hadoop.guava.version>12.0.1</hadoop.guava.version>
         <yammer.metrics.version>2.2.0</yammer.metrics.version>
     </properties>
     <dependencyManagement>
@@ -914,6 +915,18 @@
             </dependency>
             <dependency>
                 <groupId>org.apache.nifi</groupId>
+                <artifactId>nifi-hbase_1_1_2-client-service-nar</artifactId>
+                <version>0.4.0-SNAPSHOT</version>
+                <type>nar</type>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.nifi</groupId>
+                <artifactId>nifi-hbase-nar</artifactId>
+                <version>0.4.0-SNAPSHOT</version>
+                <type>nar</type>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.nifi</groupId>
                 <artifactId>nifi-properties</artifactId>
                 <version>0.4.0-SNAPSHOT</version>
             </dependency>
@@ -959,6 +972,11 @@
                 <version>0.4.0-SNAPSHOT</version>
             </dependency>
             <dependency>
+                <groupId>org.apache.nifi</groupId>
+                <artifactId>nifi-hbase-client-service-api</artifactId>
+                <version>0.4.0-SNAPSHOT</version>
+            </dependency>
+            <dependency>
                 <groupId>com.jayway.jsonpath</groupId>
                 <artifactId>json-path</artifactId>
                 <version>2.0.0</version>

Reply via email to