Jenkins build is back to normal : Phoenix | Master | Hadoop1 #265

2014-07-09 Thread Apache Jenkins Server
See 



Jenkins build is back to normal : Phoenix | 3.0 | Hadoop1 #129

2014-07-09 Thread Apache Jenkins Server
See 



Jenkins build is back to normal : Phoenix | 4.0 | Hadoop1 #197

2014-07-09 Thread Apache Jenkins Server
See 



git commit: Revert "PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or hbase cluster hasnt' started yet"

2014-07-09 Thread jeffreyz
Repository: phoenix
Updated Branches:
  refs/heads/4.0 46b445bef -> 0b5b14654


Revert "PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or 
hbase cluster hasnt' started yet"

This reverts commit 013e96c02b50f767e8e1e17d37c4cceef018d3eb.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0b5b1465
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0b5b1465
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0b5b1465

Branch: refs/heads/4.0
Commit: 0b5b14654cec20a987f514ccadb0ec05449a4793
Parents: 46b445b
Author: Jeffrey Zhong 
Authored: Wed Jul 9 19:19:36 2014 -0700
Committer: Jeffrey Zhong 
Committed: Wed Jul 9 19:19:36 2014 -0700

--
 bin/log4j.properties  | 4 +---
 .../org/apache/phoenix/query/ConnectionQueryServicesImpl.java | 7 ---
 2 files changed, 1 insertion(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0b5b1465/bin/log4j.properties
--
diff --git a/bin/log4j.properties b/bin/log4j.properties
index 47d45e8..30119bc 100644
--- a/bin/log4j.properties
+++ b/bin/log4j.properties
@@ -65,7 +65,5 @@ log4j.appender.console.layout=org.apache.log4j.PatternLayout
 log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n
 
 # Custom Logging levels
-log4j.logger.org.apache.zookeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=ERROR
+log4j.logger.org.apache.zookeeper=WARN
 log4j.logger.org.apache.hadoop.hbase.HBaseConfiguration=ERROR

http://git-wip-us.apache.org/repos/asf/phoenix/blob/0b5b1465/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index be0ac7a..a83a0c9 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -1457,8 +1457,6 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 
 SQLException sqlE = null;
 PhoenixConnection metaConnection = null;
-int origClientRetries = 
ConnectionQueryServicesImpl.this.config.getInt(
-HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
 try {
 openConnection();
 Properties scnProps = 
PropertiesUtil.deepCopy(props);
@@ -1466,9 +1464,6 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 PhoenixRuntime.CURRENT_SCN_ATTRIB,
 
Long.toString(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP));
 scnProps.remove(PhoenixRuntime.TENANT_ID_ATTRIB);
-// during initialization fast fail when connection 
has issues
-ConnectionQueryServicesImpl.this.config.setInt(
-HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
Math.min(2,origClientRetries));
 metaConnection = new PhoenixConnection(
 ConnectionQueryServicesImpl.this, url, 
scnProps, newEmptyMetaData());
 try {
@@ -1487,8 +1482,6 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 } catch (SQLException e) {
 sqlE = e;
 } finally {
-ConnectionQueryServicesImpl.this.config.setInt(
-HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
origClientRetries);
 try {
 if (metaConnection != null) 
metaConnection.close();
 } catch (SQLException e) {



git commit: Revert "PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or hbase cluster hasnt' started yet"

2014-07-09 Thread jeffreyz
Repository: phoenix
Updated Branches:
  refs/heads/3.0 e528d1663 -> 6085a4b30


Revert "PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or 
hbase cluster hasnt' started yet"

This reverts commit e528d1663cd5dd338916747e035e3de674e2a32b.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6085a4b3
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6085a4b3
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6085a4b3

Branch: refs/heads/3.0
Commit: 6085a4b30ba0702bdf1106136c4ec6364eae2e70
Parents: e528d16
Author: Jeffrey Zhong 
Authored: Wed Jul 9 19:20:41 2014 -0700
Committer: Jeffrey Zhong 
Committed: Wed Jul 9 19:20:41 2014 -0700

--
 bin/log4j.properties  | 4 +---
 .../org/apache/phoenix/query/ConnectionQueryServicesImpl.java | 7 ---
 2 files changed, 1 insertion(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6085a4b3/bin/log4j.properties
--
diff --git a/bin/log4j.properties b/bin/log4j.properties
index 47d45e8..30119bc 100644
--- a/bin/log4j.properties
+++ b/bin/log4j.properties
@@ -65,7 +65,5 @@ log4j.appender.console.layout=org.apache.log4j.PatternLayout
 log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n
 
 # Custom Logging levels
-log4j.logger.org.apache.zookeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=ERROR
+log4j.logger.org.apache.zookeeper=WARN
 log4j.logger.org.apache.hadoop.hbase.HBaseConfiguration=ERROR

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6085a4b3/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index dcfbae7..a9b9a56 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -1357,8 +1357,6 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 
 SQLException sqlE = null;
 PhoenixConnection metaConnection = null;
-int origClientRetries = 
ConnectionQueryServicesImpl.this.config.getInt(
-HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
 try {
 openConnection();
 Properties scnProps = 
PropertiesUtil.deepCopy(props);
@@ -1366,9 +1364,6 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 PhoenixRuntime.CURRENT_SCN_ATTRIB,
 
Long.toString(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP));
 scnProps.remove(PhoenixRuntime.TENANT_ID_ATTRIB);
-// during initialization fast fail when connection 
has issues
-ConnectionQueryServicesImpl.this.config.setInt(
-HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
Math.min(2,origClientRetries));
 metaConnection = new PhoenixConnection(
 ConnectionQueryServicesImpl.this, url, 
scnProps, newEmptyMetaData());
 try {
@@ -1387,8 +1382,6 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 } catch (SQLException e) {
 sqlE = e;
 } finally {
-ConnectionQueryServicesImpl.this.config.setInt(
-HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
origClientRetries);
 try {
 if (metaConnection != null) 
metaConnection.close();
 } catch (SQLException e) {



git commit: Revert "PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or hbase cluster hasnt' started yet"

2014-07-09 Thread jeffreyz
Repository: phoenix
Updated Branches:
  refs/heads/master 013e96c02 -> a33811cae


Revert "PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or 
hbase cluster hasnt' started yet"

This reverts commit 013e96c02b50f767e8e1e17d37c4cceef018d3eb.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a33811ca
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a33811ca
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a33811ca

Branch: refs/heads/master
Commit: a33811cae4d4a0174ed5c3c19502af1871cf732c
Parents: 013e96c
Author: Jeffrey Zhong 
Authored: Wed Jul 9 19:18:54 2014 -0700
Committer: Jeffrey Zhong 
Committed: Wed Jul 9 19:18:54 2014 -0700

--
 bin/log4j.properties  | 4 +---
 .../org/apache/phoenix/query/ConnectionQueryServicesImpl.java | 7 ---
 2 files changed, 1 insertion(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a33811ca/bin/log4j.properties
--
diff --git a/bin/log4j.properties b/bin/log4j.properties
index 47d45e8..30119bc 100644
--- a/bin/log4j.properties
+++ b/bin/log4j.properties
@@ -65,7 +65,5 @@ log4j.appender.console.layout=org.apache.log4j.PatternLayout
 log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n
 
 # Custom Logging levels
-log4j.logger.org.apache.zookeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=ERROR
+log4j.logger.org.apache.zookeeper=WARN
 log4j.logger.org.apache.hadoop.hbase.HBaseConfiguration=ERROR

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a33811ca/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index 04e65e7..5eb5314 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -1457,8 +1457,6 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 
 SQLException sqlE = null;
 PhoenixConnection metaConnection = null;
-int origClientRetries = 
ConnectionQueryServicesImpl.this.config.getInt(
-HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
 try {
 openConnection();
 Properties scnProps = 
PropertiesUtil.deepCopy(props);
@@ -1466,9 +1464,6 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 PhoenixRuntime.CURRENT_SCN_ATTRIB,
 
Long.toString(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP));
 scnProps.remove(PhoenixRuntime.TENANT_ID_ATTRIB);
-// during initialization fast fail when connection 
has issues
-ConnectionQueryServicesImpl.this.config.setInt(
-HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
Math.min(2,origClientRetries));
 metaConnection = new PhoenixConnection(
 ConnectionQueryServicesImpl.this, url, 
scnProps, newEmptyMetaData());
 try {
@@ -1487,8 +1482,6 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 } catch (SQLException e) {
 sqlE = e;
 } finally {
-ConnectionQueryServicesImpl.this.config.setInt(
-HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
origClientRetries);
 try {
 if (metaConnection != null) 
metaConnection.close();
 } catch (SQLException e) {



Build failed in Jenkins: Phoenix | 3.0 | Hadoop1 #128

2014-07-09 Thread Apache Jenkins Server
See 

Changes:

[jeffreyz] PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or 
hbase cluster hasnt' started yet

--
[...truncated 368 lines...]
Running org.apache.phoenix.end2end.AlterTableIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.779 sec - in 
org.apache.phoenix.end2end.DynamicFamilyIT
Running org.apache.phoenix.end2end.ArithmeticQueryIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.988 sec - in 
org.apache.phoenix.end2end.TenantSpecificViewIndexSaltedIT
Running org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.891 sec - in 
org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
Running org.apache.phoenix.end2end.UpsertBigValuesIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.462 sec - in 
org.apache.phoenix.end2end.ArithmeticQueryIT
Running org.apache.phoenix.end2end.QueryPlanIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.807 sec - in 
org.apache.phoenix.end2end.QueryPlanIT
Running org.apache.phoenix.end2end.ViewIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.013 sec - in 
org.apache.phoenix.end2end.UpsertBigValuesIT
Running org.apache.phoenix.end2end.CSVCommonsLoaderIT
Tests run: 13, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 38.875 sec - 
in org.apache.phoenix.end2end.AlterTableIT
Running org.apache.phoenix.end2end.SortOrderFIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.924 sec - in 
org.apache.phoenix.end2end.ViewIT
Running org.apache.phoenix.end2end.LpadFunctionIT
Tests run: 13, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.466 sec - 
in org.apache.phoenix.end2end.CSVCommonsLoaderIT
Running org.apache.phoenix.end2end.TenantSpecificViewIndexIT
Tests run: 30, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.143 sec - 
in org.apache.phoenix.end2end.SortOrderFIT
Running org.apache.phoenix.end2end.QueryExecWithoutSCNIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.817 sec - in 
org.apache.phoenix.end2end.QueryExecWithoutSCNIT
Running org.apache.phoenix.end2end.SkipScanQueryIT
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.233 sec - 
in org.apache.phoenix.end2end.LpadFunctionIT
Running org.apache.phoenix.end2end.index.IndexMetadataIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.329 sec - in 
org.apache.phoenix.end2end.TenantSpecificViewIndexIT
Running org.apache.phoenix.end2end.index.ImmutableIndexIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.055 sec - in 
org.apache.phoenix.end2end.SkipScanQueryIT
Running org.apache.phoenix.end2end.index.SaltedIndexIT
Tests run: 60, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 130.884 sec - 
in org.apache.phoenix.end2end.HashJoinIT
Running org.apache.phoenix.end2end.index.DropViewIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.226 sec - in 
org.apache.phoenix.end2end.index.ImmutableIndexIT
Running org.apache.phoenix.end2end.index.MutableIndexIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.887 sec - in 
org.apache.phoenix.end2end.index.IndexMetadataIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.38 sec - in 
org.apache.phoenix.end2end.index.DropViewIT
Running org.apache.phoenix.end2end.UpsertSelectAutoCommitIT
Running org.apache.phoenix.end2end.TimezoneOffsetFunctionIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.359 sec - in 
org.apache.phoenix.end2end.TimezoneOffsetFunctionIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.943 sec - in 
org.apache.phoenix.end2end.index.SaltedIndexIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.915 sec - in 
org.apache.phoenix.end2end.UpsertSelectAutoCommitIT
Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 115.688 sec - 
in org.apache.phoenix.end2end.index.MutableIndexIT

Results :

Tests run: 309, Failures: 0, Errors: 0, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.17:integration-test (ClientManagedTimeTests) 
@ phoenix-core ---
[INFO] Failsafe report directory: 

[INFO] parallel='none', perCoreThreadCount=true, threadCount=0, 
useUnlimitedThreads=false, threadCountSuites=0, threadCountClasses=0, 
threadCountMethods=0, parallelOptimized=true

---
 T E S T S
---

---
 T E S T S
---
Running org.apache.phoenix.end2end.IsNullIT
Running org.apache.phoenix.end2end.UpsertSelectIT
Running org.apache.phoenix.end2end.ScanQueryIT
Running org.apache.phoenix.end2end.

Build failed in Jenkins: Phoenix | 4.0 | Hadoop1 #196

2014-07-09 Thread Apache Jenkins Server
See 

Changes:

[jeffreyz] PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or 
hbase cluster hasnt' started yet

--
[...truncated 401 lines...]
Thu Jul 10 00:50:57 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:50:58 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:50:58 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:50:59 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:51:01 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:51:05 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:51:15 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:51:25 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:51:35 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:51:45 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:52:05 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:52:25 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:52:46 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:53:06 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:53:26 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:53:46 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:54:06 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:54:26 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:54:46 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:55:06 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:55:26 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:55:47 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:56:07 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:56:27 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:56:47 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@1aa5563, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection

Build failed in Jenkins: Phoenix | Master | Hadoop1 #264

2014-07-09 Thread Apache Jenkins Server
See 

Changes:

[jeffreyz] PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or 
hbase cluster hasnt' started yet

--
[...truncated 404 lines...]
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.438 sec - in 
org.apache.phoenix.end2end.GroupByCaseIT
Running org.apache.phoenix.end2end.PercentileIT
Tests run: 17, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.924 sec - in 
org.apache.phoenix.end2end.PercentileIT
Running org.apache.phoenix.end2end.ArrayIT
Tests run: 48, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 21.373 sec - 
in org.apache.phoenix.end2end.ArrayIT
Running org.apache.phoenix.end2end.DistinctCountIT
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.541 sec - in 
org.apache.phoenix.end2end.DistinctCountIT
Running org.apache.phoenix.end2end.CompareDecimalToLongIT
Tests run: 144, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 129.08 sec - 
in org.apache.phoenix.end2end.NotQueryIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.673 sec - in 
org.apache.phoenix.end2end.CompareDecimalToLongIT
Running org.apache.phoenix.end2end.ToCharFunctionIT
Running org.apache.phoenix.end2end.GroupByIT
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.843 sec - in 
org.apache.phoenix.end2end.ToCharFunctionIT
Running org.apache.phoenix.end2end.NativeHBaseTypesIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.433 sec - in 
org.apache.phoenix.end2end.NativeHBaseTypesIT
Running org.apache.phoenix.end2end.QueryIT
Tests run: 100, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 66.359 sec - 
in org.apache.phoenix.end2end.QueryIT
Running org.apache.phoenix.end2end.InMemoryOrderByIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 3.381 sec - in 
org.apache.phoenix.end2end.InMemoryOrderByIT
Running org.apache.phoenix.end2end.TopNIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.299 sec - in 
org.apache.phoenix.end2end.TopNIT
Tests run: 216, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 186.974 sec - 
in org.apache.phoenix.end2end.ClientTimeArithmeticQueryIT
Tests run: 152, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 147.993 sec - 
in org.apache.phoenix.end2end.GroupByIT
Tests run: 15, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 483.457 sec 
<<< FAILURE! - in org.apache.phoenix.end2end.QueryDatabaseMetaDataIT
testCreateViewOnExistingTable(org.apache.phoenix.end2end.QueryDatabaseMetaDataIT)
  Time elapsed: 450.176 sec  <<< ERROR!
org.apache.hadoop.hbase.client.RetriesExhaustedException: Failed after 
attempts=31, exceptions:
Thu Jul 10 00:47:10 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@494546, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:47:10 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@494546, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:47:11 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@494546, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:47:11 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@494546, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:47:12 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@494546, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:47:14 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@494546, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:47:18 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@494546, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:47:28 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@494546, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:47:38 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@494546, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:47:48 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@494546, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:47:58 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@494546, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get master
Thu Jul 10 00:48:18 UTC 2014, 
org.apache.hadoop.hbase.client.RpcRetryingCaller@494546, 
org.apache.hadoop.hbase.MasterNotRunningException: Connection was closed while 
trying to get mas

Apache-Phoenix | 3.0 | Hadoop1 | Build Successful

2014-07-09 Thread Apache Jenkins Server
3.0 branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastCompletedBuild/testReport/

Changes
[jeffreyz] PHOENIX-1069: Improve CsvBulkLoadTool to build indexes when loading data.



git commit: PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or hbase cluster hasnt' started yet

2014-07-09 Thread jeffreyz
Repository: phoenix
Updated Branches:
  refs/heads/3.0 94644d5b7 -> e528d1663


PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or hbase 
cluster hasnt' started yet


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e528d166
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e528d166
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e528d166

Branch: refs/heads/3.0
Commit: e528d1663cd5dd338916747e035e3de674e2a32b
Parents: 94644d5
Author: Jeffrey Zhong 
Authored: Wed Jul 9 17:43:54 2014 -0700
Committer: Jeffrey Zhong 
Committed: Wed Jul 9 17:52:39 2014 -0700

--
 bin/log4j.properties  | 4 +++-
 .../org/apache/phoenix/query/ConnectionQueryServicesImpl.java | 7 +++
 2 files changed, 10 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e528d166/bin/log4j.properties
--
diff --git a/bin/log4j.properties b/bin/log4j.properties
index 30119bc..47d45e8 100644
--- a/bin/log4j.properties
+++ b/bin/log4j.properties
@@ -65,5 +65,7 @@ log4j.appender.console.layout=org.apache.log4j.PatternLayout
 log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n
 
 # Custom Logging levels
-log4j.logger.org.apache.zookeeper=WARN
+log4j.logger.org.apache.zookeeper=ERROR
+log4j.logger.org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper=ERROR
+log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=ERROR
 log4j.logger.org.apache.hadoop.hbase.HBaseConfiguration=ERROR

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e528d166/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index a9b9a56..dcfbae7 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -1357,6 +1357,8 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 
 SQLException sqlE = null;
 PhoenixConnection metaConnection = null;
+int origClientRetries = 
ConnectionQueryServicesImpl.this.config.getInt(
+HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
 try {
 openConnection();
 Properties scnProps = 
PropertiesUtil.deepCopy(props);
@@ -1364,6 +1366,9 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 PhoenixRuntime.CURRENT_SCN_ATTRIB,
 
Long.toString(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP));
 scnProps.remove(PhoenixRuntime.TENANT_ID_ATTRIB);
+// during initialization fast fail when connection 
has issues
+ConnectionQueryServicesImpl.this.config.setInt(
+HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
Math.min(2,origClientRetries));
 metaConnection = new PhoenixConnection(
 ConnectionQueryServicesImpl.this, url, 
scnProps, newEmptyMetaData());
 try {
@@ -1382,6 +1387,8 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 } catch (SQLException e) {
 sqlE = e;
 } finally {
+ConnectionQueryServicesImpl.this.config.setInt(
+HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
origClientRetries);
 try {
 if (metaConnection != null) 
metaConnection.close();
 } catch (SQLException e) {



Apache-Phoenix | 4.0 | Hadoop1 | Build Successful

2014-07-09 Thread Apache Jenkins Server
4.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.0-hadoop1/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.0-hadoop1/lastCompletedBuild/testReport/

Changes
[jeffreyz] PHOENIX-1069: Improve CsvBulkLoadTool to build indexes when loading data.



git commit: PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or hbase cluster hasnt' started yet

2014-07-09 Thread jeffreyz
Repository: phoenix
Updated Branches:
  refs/heads/4.0 8dfda14ae -> 46b445bef


PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or hbase 
cluster hasnt' started yet


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/46b445be
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/46b445be
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/46b445be

Branch: refs/heads/4.0
Commit: 46b445befb43e6257bb085c01a0a26e30ee68366
Parents: 8dfda14
Author: Jeffrey Zhong 
Authored: Wed Jul 9 17:43:54 2014 -0700
Committer: Jeffrey Zhong 
Committed: Wed Jul 9 17:49:05 2014 -0700

--
 bin/log4j.properties  | 4 +++-
 .../org/apache/phoenix/query/ConnectionQueryServicesImpl.java | 7 +++
 2 files changed, 10 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/46b445be/bin/log4j.properties
--
diff --git a/bin/log4j.properties b/bin/log4j.properties
index 30119bc..47d45e8 100644
--- a/bin/log4j.properties
+++ b/bin/log4j.properties
@@ -65,5 +65,7 @@ log4j.appender.console.layout=org.apache.log4j.PatternLayout
 log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n
 
 # Custom Logging levels
-log4j.logger.org.apache.zookeeper=WARN
+log4j.logger.org.apache.zookeeper=ERROR
+log4j.logger.org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper=ERROR
+log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=ERROR
 log4j.logger.org.apache.hadoop.hbase.HBaseConfiguration=ERROR

http://git-wip-us.apache.org/repos/asf/phoenix/blob/46b445be/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index a83a0c9..be0ac7a 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -1457,6 +1457,8 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 
 SQLException sqlE = null;
 PhoenixConnection metaConnection = null;
+int origClientRetries = 
ConnectionQueryServicesImpl.this.config.getInt(
+HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
 try {
 openConnection();
 Properties scnProps = 
PropertiesUtil.deepCopy(props);
@@ -1464,6 +1466,9 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 PhoenixRuntime.CURRENT_SCN_ATTRIB,
 
Long.toString(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP));
 scnProps.remove(PhoenixRuntime.TENANT_ID_ATTRIB);
+// during initialization fast fail when connection 
has issues
+ConnectionQueryServicesImpl.this.config.setInt(
+HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
Math.min(2,origClientRetries));
 metaConnection = new PhoenixConnection(
 ConnectionQueryServicesImpl.this, url, 
scnProps, newEmptyMetaData());
 try {
@@ -1482,6 +1487,8 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 } catch (SQLException e) {
 sqlE = e;
 } finally {
+ConnectionQueryServicesImpl.this.config.setInt(
+HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
origClientRetries);
 try {
 if (metaConnection != null) 
metaConnection.close();
 } catch (SQLException e) {



git commit: PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or hbase cluster hasnt' started yet

2014-07-09 Thread jeffreyz
Repository: phoenix
Updated Branches:
  refs/heads/master 9bb0b01f6 -> 013e96c02


PHOENIX-1072: Fast fail sqlline.py when pass wrong quorum string or hbase 
cluster hasnt' started yet


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/013e96c0
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/013e96c0
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/013e96c0

Branch: refs/heads/master
Commit: 013e96c02b50f767e8e1e17d37c4cceef018d3eb
Parents: 9bb0b01
Author: Jeffrey Zhong 
Authored: Wed Jul 9 17:43:54 2014 -0700
Committer: Jeffrey Zhong 
Committed: Wed Jul 9 17:43:54 2014 -0700

--
 bin/log4j.properties  | 4 +++-
 .../org/apache/phoenix/query/ConnectionQueryServicesImpl.java | 7 +++
 2 files changed, 10 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/013e96c0/bin/log4j.properties
--
diff --git a/bin/log4j.properties b/bin/log4j.properties
index 30119bc..47d45e8 100644
--- a/bin/log4j.properties
+++ b/bin/log4j.properties
@@ -65,5 +65,7 @@ log4j.appender.console.layout=org.apache.log4j.PatternLayout
 log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n
 
 # Custom Logging levels
-log4j.logger.org.apache.zookeeper=WARN
+log4j.logger.org.apache.zookeeper=ERROR
+log4j.logger.org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper=ERROR
+log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=ERROR
 log4j.logger.org.apache.hadoop.hbase.HBaseConfiguration=ERROR

http://git-wip-us.apache.org/repos/asf/phoenix/blob/013e96c0/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index 5eb5314..04e65e7 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -1457,6 +1457,8 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 
 SQLException sqlE = null;
 PhoenixConnection metaConnection = null;
+int origClientRetries = 
ConnectionQueryServicesImpl.this.config.getInt(
+HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
 try {
 openConnection();
 Properties scnProps = 
PropertiesUtil.deepCopy(props);
@@ -1464,6 +1466,9 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 PhoenixRuntime.CURRENT_SCN_ATTRIB,
 
Long.toString(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP));
 scnProps.remove(PhoenixRuntime.TENANT_ID_ATTRIB);
+// during initialization fast fail when connection 
has issues
+ConnectionQueryServicesImpl.this.config.setInt(
+HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
Math.min(2,origClientRetries));
 metaConnection = new PhoenixConnection(
 ConnectionQueryServicesImpl.this, url, 
scnProps, newEmptyMetaData());
 try {
@@ -1482,6 +1487,8 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 } catch (SQLException e) {
 sqlE = e;
 } finally {
+ConnectionQueryServicesImpl.this.config.setInt(
+HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
origClientRetries);
 try {
 if (metaConnection != null) 
metaConnection.close();
 } catch (SQLException e) {



git commit: PHOENIX-1069: Improve CsvBulkLoadTool to build indexes when loading data.

2014-07-09 Thread jeffreyz
Repository: phoenix
Updated Branches:
  refs/heads/3.0 ec1b76ab2 -> 94644d5b7


PHOENIX-1069: Improve CsvBulkLoadTool to build indexes when loading data.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/94644d5b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/94644d5b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/94644d5b

Branch: refs/heads/3.0
Commit: 94644d5b7215551147c03adcae4721680b3ef407
Parents: ec1b76a
Author: Jeffrey Zhong 
Authored: Wed Jul 9 16:08:46 2014 -0700
Committer: Jeffrey Zhong 
Committed: Wed Jul 9 17:13:31 2014 -0700

--
 .../phoenix/mapreduce/CsvBulkLoadToolIT.java|  73 +++
 .../phoenix/mapreduce/CsvBulkLoadTool.java  | 207 ++-
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  |  16 +-
 3 files changed, 249 insertions(+), 47 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/94644d5b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
index 744b5d6..d4a80a2 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
@@ -26,6 +26,7 @@ import static org.junit.Assert.assertTrue;
 import java.io.PrintWriter;
 import java.sql.Connection;
 import java.sql.DriverManager;
+import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.Statement;
 
@@ -143,4 +144,76 @@ public class CsvBulkLoadToolIT {
 rs.close();
 stmt.close();
 }
+
+@Test
+public void testImportWithIndex() throws Exception {
+
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE TABLE3 (ID INTEGER NOT NULL PRIMARY KEY, " +
+"FIRST_NAME VARCHAR, LAST_NAME VARCHAR)");
+String ddl = "CREATE INDEX TABLE3_IDX ON TABLE3 "
++ " (FIRST_NAME ASC)"
++ " INCLUDE (LAST_NAME)";
+stmt.execute(ddl);
+
+FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
+FSDataOutputStream outputStream = fs.create(new 
Path("/tmp/input3.csv"));
+PrintWriter printWriter = new PrintWriter(outputStream);
+printWriter.println("1,FirstName 1,LastName 1");
+printWriter.println("2,FirstName 2,LastName 2");
+printWriter.close();
+
+CsvBulkLoadTool csvBulkLoadTool = new CsvBulkLoadTool();
+csvBulkLoadTool.setConf(hbaseTestUtil.getConfiguration());
+int exitCode = csvBulkLoadTool.run(new String[] {
+"--input", "/tmp/input3.csv",
+"--table", "table3",
+"--zookeeper", zkQuorum});
+assertEquals(0, exitCode);
+
+ResultSet rs = stmt.executeQuery("SELECT id, FIRST_NAME FROM TABLE3 
where first_name='FirstName 2'");
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals("FirstName 2", rs.getString(2));
+
+rs.close();
+stmt.close();
+}
+
+@Test
+public void testImportOneIndexTable() throws Exception {
+
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE TABLE4 (ID INTEGER NOT NULL PRIMARY KEY, " +
+"FIRST_NAME VARCHAR, LAST_NAME VARCHAR)");
+String ddl = "CREATE INDEX TABLE4_IDX ON TABLE4 "
++ " (FIRST_NAME ASC)";
+stmt.execute(ddl);
+
+FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
+FSDataOutputStream outputStream = fs.create(new 
Path("/tmp/input4.csv"));
+PrintWriter printWriter = new PrintWriter(outputStream);
+printWriter.println("1,FirstName 1,LastName 1");
+printWriter.println("2,FirstName 2,LastName 2");
+printWriter.close();
+
+CsvBulkLoadTool csvBulkLoadTool = new CsvBulkLoadTool();
+csvBulkLoadTool.setConf(hbaseTestUtil.getConfiguration());
+int exitCode = csvBulkLoadTool.run(new String[] {
+"--input", "/tmp/input4.csv",
+"--table", "table4",
+"--index-table", "TABLE4_IDX",
+"--zookeeper", zkQuorum});
+assertEquals(0, exitCode);
+
+ResultSet rs = stmt.executeQuery("SELECT * FROM TABLE4");
+assertFalse(rs.next());
+rs = stmt.executeQuery("SELECT FIRST_NAME FROM TABLE4 where 
FIRST_NAME='FirstName 1'");
+assertTrue(rs.next());
+assertEquals("FirstName 1", rs.getString(1));
+
+rs.close();
+stmt.close();
+}
+
 }

http://git-wip-us.apache.org/repos/as

git commit: PHOENIX-1069: Improve CsvBulkLoadTool to build indexes when loading data.

2014-07-09 Thread jeffreyz
Repository: phoenix
Updated Branches:
  refs/heads/4.0 2625274eb -> 8dfda14ae


PHOENIX-1069: Improve CsvBulkLoadTool to build indexes when loading data.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8dfda14a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8dfda14a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8dfda14a

Branch: refs/heads/4.0
Commit: 8dfda14ae1e4db9d76d3a1cdd0c3a35c0afed503
Parents: 2625274
Author: Jeffrey Zhong 
Authored: Wed Jul 9 16:08:46 2014 -0700
Committer: Jeffrey Zhong 
Committed: Wed Jul 9 16:18:29 2014 -0700

--
 .../phoenix/mapreduce/CsvBulkLoadToolIT.java|  73 +++
 .../phoenix/mapreduce/CsvBulkLoadTool.java  | 207 ++-
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  |  18 +-
 3 files changed, 250 insertions(+), 48 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8dfda14a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
index 744b5d6..d4a80a2 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
@@ -26,6 +26,7 @@ import static org.junit.Assert.assertTrue;
 import java.io.PrintWriter;
 import java.sql.Connection;
 import java.sql.DriverManager;
+import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.Statement;
 
@@ -143,4 +144,76 @@ public class CsvBulkLoadToolIT {
 rs.close();
 stmt.close();
 }
+
+@Test
+public void testImportWithIndex() throws Exception {
+
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE TABLE3 (ID INTEGER NOT NULL PRIMARY KEY, " +
+"FIRST_NAME VARCHAR, LAST_NAME VARCHAR)");
+String ddl = "CREATE INDEX TABLE3_IDX ON TABLE3 "
++ " (FIRST_NAME ASC)"
++ " INCLUDE (LAST_NAME)";
+stmt.execute(ddl);
+
+FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
+FSDataOutputStream outputStream = fs.create(new 
Path("/tmp/input3.csv"));
+PrintWriter printWriter = new PrintWriter(outputStream);
+printWriter.println("1,FirstName 1,LastName 1");
+printWriter.println("2,FirstName 2,LastName 2");
+printWriter.close();
+
+CsvBulkLoadTool csvBulkLoadTool = new CsvBulkLoadTool();
+csvBulkLoadTool.setConf(hbaseTestUtil.getConfiguration());
+int exitCode = csvBulkLoadTool.run(new String[] {
+"--input", "/tmp/input3.csv",
+"--table", "table3",
+"--zookeeper", zkQuorum});
+assertEquals(0, exitCode);
+
+ResultSet rs = stmt.executeQuery("SELECT id, FIRST_NAME FROM TABLE3 
where first_name='FirstName 2'");
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals("FirstName 2", rs.getString(2));
+
+rs.close();
+stmt.close();
+}
+
+@Test
+public void testImportOneIndexTable() throws Exception {
+
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE TABLE4 (ID INTEGER NOT NULL PRIMARY KEY, " +
+"FIRST_NAME VARCHAR, LAST_NAME VARCHAR)");
+String ddl = "CREATE INDEX TABLE4_IDX ON TABLE4 "
++ " (FIRST_NAME ASC)";
+stmt.execute(ddl);
+
+FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
+FSDataOutputStream outputStream = fs.create(new 
Path("/tmp/input4.csv"));
+PrintWriter printWriter = new PrintWriter(outputStream);
+printWriter.println("1,FirstName 1,LastName 1");
+printWriter.println("2,FirstName 2,LastName 2");
+printWriter.close();
+
+CsvBulkLoadTool csvBulkLoadTool = new CsvBulkLoadTool();
+csvBulkLoadTool.setConf(hbaseTestUtil.getConfiguration());
+int exitCode = csvBulkLoadTool.run(new String[] {
+"--input", "/tmp/input4.csv",
+"--table", "table4",
+"--index-table", "TABLE4_IDX",
+"--zookeeper", zkQuorum});
+assertEquals(0, exitCode);
+
+ResultSet rs = stmt.executeQuery("SELECT * FROM TABLE4");
+assertFalse(rs.next());
+rs = stmt.executeQuery("SELECT FIRST_NAME FROM TABLE4 where 
FIRST_NAME='FirstName 1'");
+assertTrue(rs.next());
+assertEquals("FirstName 1", rs.getString(1));
+
+rs.close();
+stmt.close();
+}
+
 }

http://git-wip-us.apache.org/repos/as

Apache-Phoenix | Master | Hadoop1 | Build Successful

2014-07-09 Thread Apache Jenkins Server
Master branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-master-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-master-hadoop1/lastCompletedBuild/testReport/

Changes
[jeffreyz] PHOENIX-1069: Improve CsvBulkLoadTool to build indexes when loading data.



git commit: PHOENIX-1069: Improve CsvBulkLoadTool to build indexes when loading data.

2014-07-09 Thread jeffreyz
Repository: phoenix
Updated Branches:
  refs/heads/master 61c948b73 -> 9bb0b01f6


PHOENIX-1069: Improve CsvBulkLoadTool to build indexes when loading data.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9bb0b01f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9bb0b01f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9bb0b01f

Branch: refs/heads/master
Commit: 9bb0b01f68e5da104810c3f1e3adb04ec2ba491f
Parents: 61c948b
Author: Jeffrey Zhong 
Authored: Wed Jul 9 16:08:46 2014 -0700
Committer: Jeffrey Zhong 
Committed: Wed Jul 9 16:08:46 2014 -0700

--
 .../phoenix/mapreduce/CsvBulkLoadToolIT.java|  73 +++
 .../phoenix/mapreduce/CsvBulkLoadTool.java  | 207 ++-
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  |  18 +-
 3 files changed, 250 insertions(+), 48 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9bb0b01f/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
index 744b5d6..d4a80a2 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
@@ -26,6 +26,7 @@ import static org.junit.Assert.assertTrue;
 import java.io.PrintWriter;
 import java.sql.Connection;
 import java.sql.DriverManager;
+import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.Statement;
 
@@ -143,4 +144,76 @@ public class CsvBulkLoadToolIT {
 rs.close();
 stmt.close();
 }
+
+@Test
+public void testImportWithIndex() throws Exception {
+
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE TABLE3 (ID INTEGER NOT NULL PRIMARY KEY, " +
+"FIRST_NAME VARCHAR, LAST_NAME VARCHAR)");
+String ddl = "CREATE INDEX TABLE3_IDX ON TABLE3 "
++ " (FIRST_NAME ASC)"
++ " INCLUDE (LAST_NAME)";
+stmt.execute(ddl);
+
+FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
+FSDataOutputStream outputStream = fs.create(new 
Path("/tmp/input3.csv"));
+PrintWriter printWriter = new PrintWriter(outputStream);
+printWriter.println("1,FirstName 1,LastName 1");
+printWriter.println("2,FirstName 2,LastName 2");
+printWriter.close();
+
+CsvBulkLoadTool csvBulkLoadTool = new CsvBulkLoadTool();
+csvBulkLoadTool.setConf(hbaseTestUtil.getConfiguration());
+int exitCode = csvBulkLoadTool.run(new String[] {
+"--input", "/tmp/input3.csv",
+"--table", "table3",
+"--zookeeper", zkQuorum});
+assertEquals(0, exitCode);
+
+ResultSet rs = stmt.executeQuery("SELECT id, FIRST_NAME FROM TABLE3 
where first_name='FirstName 2'");
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals("FirstName 2", rs.getString(2));
+
+rs.close();
+stmt.close();
+}
+
+@Test
+public void testImportOneIndexTable() throws Exception {
+
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE TABLE4 (ID INTEGER NOT NULL PRIMARY KEY, " +
+"FIRST_NAME VARCHAR, LAST_NAME VARCHAR)");
+String ddl = "CREATE INDEX TABLE4_IDX ON TABLE4 "
++ " (FIRST_NAME ASC)";
+stmt.execute(ddl);
+
+FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
+FSDataOutputStream outputStream = fs.create(new 
Path("/tmp/input4.csv"));
+PrintWriter printWriter = new PrintWriter(outputStream);
+printWriter.println("1,FirstName 1,LastName 1");
+printWriter.println("2,FirstName 2,LastName 2");
+printWriter.close();
+
+CsvBulkLoadTool csvBulkLoadTool = new CsvBulkLoadTool();
+csvBulkLoadTool.setConf(hbaseTestUtil.getConfiguration());
+int exitCode = csvBulkLoadTool.run(new String[] {
+"--input", "/tmp/input4.csv",
+"--table", "table4",
+"--index-table", "TABLE4_IDX",
+"--zookeeper", zkQuorum});
+assertEquals(0, exitCode);
+
+ResultSet rs = stmt.executeQuery("SELECT * FROM TABLE4");
+assertFalse(rs.next());
+rs = stmt.executeQuery("SELECT FIRST_NAME FROM TABLE4 where 
FIRST_NAME='FirstName 1'");
+assertTrue(rs.next());
+assertEquals("FirstName 1", rs.getString(1));
+
+rs.close();
+stmt.close();
+}
+
 }

http://git-wip-us.apache.org/re

svn commit: r1609131 - /phoenix/site/publish/array_type.html

2014-07-09 Thread ramkrishna
Author: ramkrishna
Date: Wed Jul  9 12:30:08 2014
New Revision: 1609131

URL: http://svn.apache.org/r1609131
Log:
Phoenix-1067 Add documentation for ANY/ALL support with arrays (Ram)

Modified:
phoenix/site/publish/array_type.html

Modified: phoenix/site/publish/array_type.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/array_type.html?rev=1609131&r1=1609130&r2=1609131&view=diff
==
--- phoenix/site/publish/array_type.html (original)
+++ phoenix/site/publish/array_type.html Wed Jul  9 12:30:08 2014
@@ -1,370 +1,385 @@
-
-
-
-
-
-   
-   
-   ARRAY Type | Apache Phoenix
-   
-   
-   
-
-   http://netdna.bootstrapcdn.com/bootswatch/2.3.2/united/bootstrap.min.css"; 
rel="stylesheet" />
-   http://netdna.bootstrapcdn.com/twitter-bootstrap/2.3.1/css/bootstrap-responsive.min.css";
 rel="stylesheet" />
-   
-   
-
-   http://yandex.st/highlightjs/7.5/styles/default.min.css"; rel="stylesheet" 
/>
-   
-   
-   
-   
-   
-   
-   
-   
-
-
-
-   
-
-   
-
-   
-   
-   
-   
-   
-   
-   
-   
-   
-   
-   
-   
-   About 
-   
-   Overview
-   New Features
-   Roadmap
-   Performance
-   Team
-   Contributing
-   Resources
-   Mailing Lists
-   Source Repository
-   Issue Tracking
-   Download
-   
-   http://www.apache.org/licenses/"; title="License" 
class="externalLink">License
-   http://www.apache.org/foundation/sponsorship.html"; title="Sponsorship" 
class="externalLink">Sponsorship
-   http://www.apache.org/foundation/thanks.html"; title="Thanks" 
class="externalLink">Thanks
-   http://www.apache.org/security/"; title="Security" 
class="externalLink">Security
-   
-   
-   
-   Using 
-   
-   F.A.Q.
-   Quick 
Start
-   Building
-   Tuning
-   Upgrade from 2.2
-   
-   Secondary 
Indexes
-   Joins
-   Views
-   Multi tenancy
-   Sequences
-   ARRAY type
-   Salted Tables
-

svn commit: r1609071 - /phoenix/site/source/src/site/markdown/array_type.md

2014-07-09 Thread ramkrishna
Author: ramkrishna
Date: Wed Jul  9 09:52:14 2014
New Revision: 1609071

URL: http://svn.apache.org/r1609071
Log:
Phoenix-1067 Add documentation for ANY/ALL support with arrays (Ram)

Modified:
phoenix/site/source/src/site/markdown/array_type.md

Modified: phoenix/site/source/src/site/markdown/array_type.md
URL: 
http://svn.apache.org/viewvc/phoenix/site/source/src/site/markdown/array_type.md?rev=1609071&r1=1609070&r2=1609071&view=diff
==
--- phoenix/site/source/src/site/markdown/array_type.md (original)
+++ phoenix/site/source/src/site/markdown/array_type.md Wed Jul  9 09:52:14 2014
@@ -46,10 +46,24 @@ The length of the array grows dynamicall
 
 Attempts to access an array element beyond the current length will evaluate to 
null.
 
+For searching in an array, built-in functions like ANY and ALL are provided.  
For example,
+
+SELECT region_name FROM regions WHERE '94030' = ANY(zip);
+SELECT region_name FROM regions WHERE '94030' = ALL(zip);
+
+The built-in function ANY checks if any of the element in the array satisfies 
the condition and it is equivalent to OR condition:
+
+SELECT region_name FROM regions WHERE zip[1] = '94030' OR zip[2] = '94030' 
OR zip[3] = '94030';
+
+The built-in function ALL checks if all the elements in the array satisfies 
the condition and it is equivalent to AND condition:
+
+SELECT region_name FROM regions WHERE zip[1] = '94030' AND zip[2] = 
'94030' AND zip[3] = '94030';
+
+
+
 ###Limitations
 * Only one dimensional arrays are currently supported
 * For an array of fixed width types, null elements occurring in the middle of 
an array are not tracked.
 * The declaration of an array length at DDL time is not enforced currently, 
but maybe in the future. Note that it is persisted with the table metadata.
 * An array may only be used as the last column in a primary key constraint.
 * Partial update of an array is currently not possible. Instead, the array may 
be manipulated on the client-side and then upserted back in its entirety.
-* No support currently exists for searching in an array through the ALL or ANY 
built-in functions, but we welcome community contributions.