Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 64c5cb649 -> aa9c7d6e4


PHOENIX-4489 HBase Connection leak in Phoenix MR Jobs


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/aa9c7d6e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/aa9c7d6e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/aa9c7d6e

Branch: refs/heads/4.x-HBase-1.1
Commit: aa9c7d6e4e6306e01b559bd7fc10e5d09a7172c5
Parents: 64c5cb6
Author: Karan Mehta <karanmeht...@gmail.com>
Authored: Tue Jan 23 16:07:24 2018 -0800
Committer: Karan Mehta <karanmeht...@gmail.com>
Committed: Tue Jan 23 23:19:39 2018 -0800

----------------------------------------------------------------------
 .../org/apache/phoenix/mapreduce/PhoenixInputFormat.java  | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/aa9c7d6e/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
index 2871809..9f16cc1 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
@@ -30,7 +30,6 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -48,6 +47,7 @@ import 
org.apache.phoenix.iterate.MapReduceParallelScanGrouper;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.mapreduce.util.ConnectionUtil;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
+import org.apache.phoenix.query.HBaseFactoryProvider;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.util.PhoenixRuntime;
 
@@ -95,13 +95,13 @@ public class PhoenixInputFormat<T extends DBWritable> 
extends InputFormat<NullWr
         Preconditions.checkNotNull(splits);
 
         // Get the RegionSizeCalculator
-        org.apache.hadoop.hbase.client.Connection connection = 
ConnectionFactory.createConnection(config);
+        try(org.apache.hadoop.hbase.client.Connection connection =
+                    
HBaseFactoryProvider.getHConnectionFactory().createConnection(config)) {
         RegionLocator regionLocator = 
connection.getRegionLocator(TableName.valueOf(qplan
                 .getTableRef().getTable().getPhysicalName().toString()));
         RegionSizeCalculator sizeCalculator = new 
RegionSizeCalculator(regionLocator, connection
                 .getAdmin());
 
-
         final List<InputSplit> psplits = 
Lists.newArrayListWithExpectedSize(splits.size());
         for (List<Scan> scans : qplan.getScans()) {
             // Get the region location
@@ -131,8 +131,7 @@ public class PhoenixInputFormat<T extends DBWritable> 
extends InputFormat<NullWr
 
                     psplits.add(new 
PhoenixInputSplit(Collections.singletonList(aScan), regionSize, 
regionLocation));
                 }
-            }
-            else {
+                } else {
                 if (LOG.isDebugEnabled()) {
                     LOG.debug("Scan count[" + scans.size() + "] : " + 
Bytes.toStringBinary(scans
                             .get(0).getStartRow()) + " ~ " + 
Bytes.toStringBinary(scans.get(scans
@@ -155,6 +154,7 @@ public class PhoenixInputFormat<T extends DBWritable> 
extends InputFormat<NullWr
         }
         return psplits;
     }
+    }
     
     /**
      * Returns the query plan associated with the select query.

Reply via email to