Apache-Phoenix | 4.x-HBase-1.1 | Build Successful
4.x-HBase-1.1 branch build status Successful Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-1.1 Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/lastSuccessfulBuild/artifact/ Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/lastCompletedBuild/testReport/ Changes Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout
Apache-Phoenix | 4.x-HBase-1.1 | Build Successful
4.x-HBase-1.1 branch build status Successful Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-1.1 Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/lastSuccessfulBuild/artifact/ Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/lastCompletedBuild/testReport/ Changes Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout
phoenix git commit: PHOENIX-2011 Default, min, and max values should not require quotes around it in create function(Rajeshbabu)
Repository: phoenix Updated Branches: refs/heads/4.4-HBase-0.98 a52978e38 - aa288f8fe PHOENIX-2011 Default, min, and max values should not require quotes around it in create function(Rajeshbabu) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/aa288f8f Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/aa288f8f Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/aa288f8f Branch: refs/heads/4.4-HBase-0.98 Commit: aa288f8fe291438fd3c27bdbd0685102c7c0abc7 Parents: a52978e Author: Rajeshbabu Chintaguntla rajeshb...@apache.org Authored: Thu Jul 2 15:55:04 2015 +0530 Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org Committed: Thu Jul 2 15:55:04 2015 +0530 -- .../apache/phoenix/end2end/UserDefinedFunctionsIT.java | 12 ++-- phoenix-core/src/main/antlr3/PhoenixSQL.g | 3 ++- .../phoenix/coprocessor/MetaDataEndpointImpl.java | 9 + .../org/apache/phoenix/parse/FunctionParseNode.java | 9 +++-- .../main/java/org/apache/phoenix/parse/PFunction.java | 12 +--- .../java/org/apache/phoenix/schema/MetaDataClient.java | 6 +++--- 6 files changed, 28 insertions(+), 23 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/aa288f8f/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java index cee1c85..613231d 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java @@ -348,7 +348,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ } -tenant2Conn.createStatement().execute(create function myfunction(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +tenant2Conn.createStatement().execute(create function myfunction(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); try { tenant2Conn.createStatement().execute(create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.UnknownClass' using jar @@ -424,7 +424,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.commit(); conn.createStatement().execute(create table t2(k integer primary key, k1 integer, lastname_reverse varchar)); conn.commit(); -stmt.execute(create function mysum3(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +stmt.execute(create function mysum3(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); stmt.execute(create function myreverse3(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.+MY_REVERSE_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar1.jar+'); @@ -458,7 +458,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.createStatement().execute(create table t4(k integer primary key, k1 integer, lastname varchar)); stmt.execute(upsert into t4 values(1,1,'jock')); conn.commit(); -stmt.execute(create function mysum(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +stmt.execute(create function mysum(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); ResultSet rs = stmt.executeQuery(select mysum(k,12) from t4); assertTrue(rs.next()); @@ -481,7 +481,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.createStatement().execute(create table t9(k integer primary key, k1 integer, lastname varchar)); stmt.execute(upsert into t9 values(1,1,'jock')); conn.commit(); -stmt.execute(create temporary function mysum9(INTEGER, INTEGER CONSTANT
phoenix git commit: PHOENIX-2011 Default, min, and max values should not require quotes around it in create function(Rajeshbabu)
Repository: phoenix Updated Branches: refs/heads/4.4-HBase-1.0 3ff71e974 - 95edc578c PHOENIX-2011 Default, min, and max values should not require quotes around it in create function(Rajeshbabu) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/95edc578 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/95edc578 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/95edc578 Branch: refs/heads/4.4-HBase-1.0 Commit: 95edc578cb4038419be92879beb946af07180545 Parents: 3ff71e9 Author: Rajeshbabu Chintaguntla rajeshb...@apache.org Authored: Thu Jul 2 15:54:25 2015 +0530 Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org Committed: Thu Jul 2 15:54:25 2015 +0530 -- .../apache/phoenix/end2end/UserDefinedFunctionsIT.java | 12 ++-- phoenix-core/src/main/antlr3/PhoenixSQL.g | 3 ++- .../phoenix/coprocessor/MetaDataEndpointImpl.java | 9 + .../org/apache/phoenix/parse/FunctionParseNode.java | 9 +++-- .../main/java/org/apache/phoenix/parse/PFunction.java | 12 +--- .../java/org/apache/phoenix/schema/MetaDataClient.java | 6 +++--- 6 files changed, 28 insertions(+), 23 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/95edc578/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java index cee1c85..613231d 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java @@ -348,7 +348,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ } -tenant2Conn.createStatement().execute(create function myfunction(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +tenant2Conn.createStatement().execute(create function myfunction(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); try { tenant2Conn.createStatement().execute(create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.UnknownClass' using jar @@ -424,7 +424,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.commit(); conn.createStatement().execute(create table t2(k integer primary key, k1 integer, lastname_reverse varchar)); conn.commit(); -stmt.execute(create function mysum3(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +stmt.execute(create function mysum3(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); stmt.execute(create function myreverse3(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.+MY_REVERSE_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar1.jar+'); @@ -458,7 +458,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.createStatement().execute(create table t4(k integer primary key, k1 integer, lastname varchar)); stmt.execute(upsert into t4 values(1,1,'jock')); conn.commit(); -stmt.execute(create function mysum(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +stmt.execute(create function mysum(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); ResultSet rs = stmt.executeQuery(select mysum(k,12) from t4); assertTrue(rs.next()); @@ -481,7 +481,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.createStatement().execute(create table t9(k integer primary key, k1 integer, lastname varchar)); stmt.execute(upsert into t9 values(1,1,'jock')); conn.commit(); -stmt.execute(create temporary function mysum9(INTEGER, INTEGER CONSTANT
phoenix git commit: PHOENIX-2011 Default, min, and max values should not require quotes around it in create function(Rajeshbabu)
Repository: phoenix Updated Branches: refs/heads/master 54da7d1d6 - b2fb04b0c PHOENIX-2011 Default, min, and max values should not require quotes around it in create function(Rajeshbabu) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b2fb04b0 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b2fb04b0 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b2fb04b0 Branch: refs/heads/master Commit: b2fb04b0c2234c5b573642d39589ab9d36469723 Parents: 54da7d1 Author: Rajeshbabu Chintaguntla rajeshb...@apache.org Authored: Thu Jul 2 15:51:18 2015 +0530 Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org Committed: Thu Jul 2 15:51:18 2015 +0530 -- .../apache/phoenix/end2end/UserDefinedFunctionsIT.java | 12 ++-- phoenix-core/src/main/antlr3/PhoenixSQL.g | 3 ++- .../phoenix/coprocessor/MetaDataEndpointImpl.java | 9 + .../org/apache/phoenix/parse/FunctionParseNode.java | 9 +++-- .../main/java/org/apache/phoenix/parse/PFunction.java | 12 +--- .../java/org/apache/phoenix/schema/MetaDataClient.java | 6 +++--- 6 files changed, 28 insertions(+), 23 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/b2fb04b0/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java index cee1c85..613231d 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java @@ -348,7 +348,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ } -tenant2Conn.createStatement().execute(create function myfunction(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +tenant2Conn.createStatement().execute(create function myfunction(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); try { tenant2Conn.createStatement().execute(create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.UnknownClass' using jar @@ -424,7 +424,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.commit(); conn.createStatement().execute(create table t2(k integer primary key, k1 integer, lastname_reverse varchar)); conn.commit(); -stmt.execute(create function mysum3(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +stmt.execute(create function mysum3(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); stmt.execute(create function myreverse3(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.+MY_REVERSE_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar1.jar+'); @@ -458,7 +458,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.createStatement().execute(create table t4(k integer primary key, k1 integer, lastname varchar)); stmt.execute(upsert into t4 values(1,1,'jock')); conn.commit(); -stmt.execute(create function mysum(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +stmt.execute(create function mysum(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); ResultSet rs = stmt.executeQuery(select mysum(k,12) from t4); assertTrue(rs.next()); @@ -481,7 +481,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.createStatement().execute(create table t9(k integer primary key, k1 integer, lastname varchar)); stmt.execute(upsert into t9 values(1,1,'jock')); conn.commit(); -stmt.execute(create temporary function mysum9(INTEGER, INTEGER CONSTANT defaultValue='10'
phoenix git commit: PHOENIX-2011 Default, min, and max values should not require quotes around it in create function(Rajeshbabu)
Repository: phoenix Updated Branches: refs/heads/4.x-HBase-1.0 329363f7d - 32aa19cc2 PHOENIX-2011 Default, min, and max values should not require quotes around it in create function(Rajeshbabu) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/32aa19cc Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/32aa19cc Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/32aa19cc Branch: refs/heads/4.x-HBase-1.0 Commit: 32aa19cc26d355316c829c02f95fd48e0183163d Parents: 329363f Author: Rajeshbabu Chintaguntla rajeshb...@apache.org Authored: Thu Jul 2 15:52:35 2015 +0530 Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org Committed: Thu Jul 2 15:52:35 2015 +0530 -- .../apache/phoenix/end2end/UserDefinedFunctionsIT.java | 12 ++-- phoenix-core/src/main/antlr3/PhoenixSQL.g | 3 ++- .../phoenix/coprocessor/MetaDataEndpointImpl.java | 9 + .../org/apache/phoenix/parse/FunctionParseNode.java | 9 +++-- .../main/java/org/apache/phoenix/parse/PFunction.java | 12 +--- .../java/org/apache/phoenix/schema/MetaDataClient.java | 6 +++--- 6 files changed, 28 insertions(+), 23 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/32aa19cc/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java index cee1c85..613231d 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java @@ -348,7 +348,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ } -tenant2Conn.createStatement().execute(create function myfunction(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +tenant2Conn.createStatement().execute(create function myfunction(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); try { tenant2Conn.createStatement().execute(create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.UnknownClass' using jar @@ -424,7 +424,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.commit(); conn.createStatement().execute(create table t2(k integer primary key, k1 integer, lastname_reverse varchar)); conn.commit(); -stmt.execute(create function mysum3(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +stmt.execute(create function mysum3(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); stmt.execute(create function myreverse3(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.+MY_REVERSE_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar1.jar+'); @@ -458,7 +458,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.createStatement().execute(create table t4(k integer primary key, k1 integer, lastname varchar)); stmt.execute(upsert into t4 values(1,1,'jock')); conn.commit(); -stmt.execute(create function mysum(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +stmt.execute(create function mysum(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); ResultSet rs = stmt.executeQuery(select mysum(k,12) from t4); assertTrue(rs.next()); @@ -481,7 +481,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.createStatement().execute(create table t9(k integer primary key, k1 integer, lastname varchar)); stmt.execute(upsert into t9 values(1,1,'jock')); conn.commit(); -stmt.execute(create temporary function mysum9(INTEGER, INTEGER CONSTANT
phoenix git commit: PHOENIX-2011 Default, min, and max values should not require quotes around it in create function(Rajeshbabu)
Repository: phoenix Updated Branches: refs/heads/4.x-HBase-0.98 0a530565f - a7a55ab53 PHOENIX-2011 Default, min, and max values should not require quotes around it in create function(Rajeshbabu) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a7a55ab5 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a7a55ab5 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a7a55ab5 Branch: refs/heads/4.x-HBase-0.98 Commit: a7a55ab5354ef07a53e875e3812b50f8cf674594 Parents: 0a53056 Author: Rajeshbabu Chintaguntla rajeshb...@apache.org Authored: Thu Jul 2 15:53:12 2015 +0530 Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org Committed: Thu Jul 2 15:53:12 2015 +0530 -- .../apache/phoenix/end2end/UserDefinedFunctionsIT.java | 12 ++-- phoenix-core/src/main/antlr3/PhoenixSQL.g | 3 ++- .../phoenix/coprocessor/MetaDataEndpointImpl.java | 9 + .../org/apache/phoenix/parse/FunctionParseNode.java | 9 +++-- .../main/java/org/apache/phoenix/parse/PFunction.java | 12 +--- .../java/org/apache/phoenix/schema/MetaDataClient.java | 6 +++--- 6 files changed, 28 insertions(+), 23 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/a7a55ab5/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java index cee1c85..613231d 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java @@ -348,7 +348,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ } -tenant2Conn.createStatement().execute(create function myfunction(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +tenant2Conn.createStatement().execute(create function myfunction(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); try { tenant2Conn.createStatement().execute(create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.UnknownClass' using jar @@ -424,7 +424,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.commit(); conn.createStatement().execute(create table t2(k integer primary key, k1 integer, lastname_reverse varchar)); conn.commit(); -stmt.execute(create function mysum3(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +stmt.execute(create function mysum3(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); stmt.execute(create function myreverse3(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.+MY_REVERSE_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar1.jar+'); @@ -458,7 +458,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.createStatement().execute(create table t4(k integer primary key, k1 integer, lastname varchar)); stmt.execute(upsert into t4 values(1,1,'jock')); conn.commit(); -stmt.execute(create function mysum(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +stmt.execute(create function mysum(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); ResultSet rs = stmt.executeQuery(select mysum(k,12) from t4); assertTrue(rs.next()); @@ -481,7 +481,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.createStatement().execute(create table t9(k integer primary key, k1 integer, lastname varchar)); stmt.execute(upsert into t9 values(1,1,'jock')); conn.commit(); -stmt.execute(create temporary function mysum9(INTEGER, INTEGER CONSTANT
[20/47] phoenix git commit: PHOENIX-2021 - Implement ARRAY_CAT built in function (Dumindu Buddhika)
http://git-wip-us.apache.org/repos/asf/phoenix/blob/7385899d/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayConcatFunctionTest.java -- diff --git a/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayConcatFunctionTest.java b/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayConcatFunctionTest.java new file mode 100644 index 000..75d0827 --- /dev/null +++ b/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayConcatFunctionTest.java @@ -0,0 +1,584 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * License); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.phoenix.expression; + +import static org.junit.Assert.assertEquals; + +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.SQLException; +import java.util.List; + +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.phoenix.exception.DataExceedsCapacityException; +import org.apache.phoenix.expression.function.ArrayConcatFunction; +import org.apache.phoenix.schema.SortOrder; +import org.apache.phoenix.schema.TypeMismatchException; +import org.apache.phoenix.schema.types.*; +import org.junit.Test; + +import com.google.common.collect.Lists; + +public class ArrayConcatFunctionTest { + +private static void testExpression(LiteralExpression array1, LiteralExpression array2, PhoenixArray expected) +throws SQLException { +ListExpression expressions = Lists.newArrayList((Expression) array1); +expressions.add(array2); + +Expression arrayConcatFunction = new ArrayConcatFunction(expressions); +ImmutableBytesWritable ptr = new ImmutableBytesWritable(); +arrayConcatFunction.evaluate(null, ptr); +PhoenixArray result = (PhoenixArray) arrayConcatFunction.getDataType().toObject(ptr, expressions.get(0).getSortOrder(), array1.getMaxLength(), array1.getScale()); +assertEquals(expected, result); +} + +private static void test(PhoenixArray array1, PhoenixArray array2, PDataType array1DataType, Integer arr1MaxLen, Integer arr1Scale, PDataType array2DataType, Integer arr2MaxLen, Integer arr2Scale, PhoenixArray expected, SortOrder array1SortOrder, SortOrder array2SortOrder) throws SQLException { +LiteralExpression array1Literal, array2Literal; +array1Literal = LiteralExpression.newConstant(array1, array1DataType, arr1MaxLen, arr1Scale, array1SortOrder, Determinism.ALWAYS); +array2Literal = LiteralExpression.newConstant(array2, array2DataType, arr2MaxLen, arr2Scale, array2SortOrder, Determinism.ALWAYS); +testExpression(array1Literal, array2Literal, expected); +} + +@Test +public void testChar1() throws SQLException { +Object[] o1 = new Object[]{aa, bb}; +Object[] o2 = new Object[]{c, d}; +Object[] e = new Object[]{aa, bb, c, d}; +PDataType type = PCharArray.INSTANCE; +PDataType base = PChar.INSTANCE; + +PhoenixArray arr1 = new PhoenixArray(base, o1); +PhoenixArray arr2 = new PhoenixArray(base, o2); +PhoenixArray expected = new PhoenixArray(base, e); +test(arr1, arr2, type, 2, null, type, 1, null, expected, SortOrder.ASC, SortOrder.ASC); +test(arr1, arr2, type, 2, null, type, 1, null, expected, SortOrder.DESC, SortOrder.DESC); +test(arr1, arr2, type, 2, null, type, 1, null, expected, SortOrder.ASC, SortOrder.DESC); +test(arr1, arr2, type, 2, null, type, 1, null, expected, SortOrder.DESC, SortOrder.ASC); + +} + +@Test +public void testChar2() throws SQLException { +Object[] o1 = new Object[]{aa, bb}; +Object[] o2 = new Object[]{cc, dc, ee}; +Object[] e = new Object[]{aa, bb, cc, dc, ee}; +PDataType type = PCharArray.INSTANCE; +PDataType base = PChar.INSTANCE; + +PhoenixArray arr1 = new PhoenixArray(base, o1); +PhoenixArray arr2 = new PhoenixArray(base, o2); +PhoenixArray expected = new PhoenixArray(base, e); +test(arr1, arr2, type, 2, null, type, 2, null, expected, SortOrder.ASC, SortOrder.ASC); +test(arr1, arr2, type, 2, null, type, 2, null, expected, SortOrder.ASC, SortOrder.DESC); +
[10/47] phoenix git commit: PHOENIX-1935 org.apache.phoenix.end2end.ArithmeticQueryIT tests are failing (Alicia Ying Shu)
PHOENIX-1935 org.apache.phoenix.end2end.ArithmeticQueryIT tests are failing (Alicia Ying Shu) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/80600488 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/80600488 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/80600488 Branch: refs/heads/calcite Commit: 80600488f50fd000d74155ee17abfaa19ec39c69 Parents: db7b575 Author: Nick Dimiduk ndimi...@apache.org Authored: Wed Jun 17 12:28:35 2015 -0700 Committer: Nick Dimiduk ndimi...@apache.org Committed: Wed Jun 17 12:28:35 2015 -0700 -- .../src/it/java/org/apache/phoenix/end2end/BaseViewIT.java | 2 ++ phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java | 3 +++ .../src/test/java/org/apache/phoenix/query/BaseTest.java| 5 - 3 files changed, 9 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/80600488/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java index b9d7180..3140077 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java @@ -98,6 +98,7 @@ public abstract class BaseViewIT extends BaseOwnClusterHBaseManagedTimeIT { assertEquals(1, rs.getInt(1)); assertEquals(121, rs.getInt(2)); assertFalse(rs.next()); +conn.close(); } protected void testUpdatableViewIndex(Integer saltBuckets) throws Exception { @@ -179,6 +180,7 @@ public abstract class BaseViewIT extends BaseOwnClusterHBaseManagedTimeIT { + CLIENT MERGE SORT, QueryUtil.getExplainPlan(rs)); } +conn.close(); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/80600488/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java index 266438d..fb58a8f 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java @@ -92,8 +92,11 @@ public class ViewIT extends BaseViewIT { fail(); } catch (ReadOnlyTableException e) { +} finally { +conn.close(); } +conn = DriverManager.getConnection(getUrl()); int count = 0; ResultSet rs = conn.createStatement().executeQuery(SELECT k FROM v2); while (rs.next()) { http://git-wip-us.apache.org/repos/asf/phoenix/blob/80600488/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java -- diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java index fa78656..3f09518 100644 --- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java +++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java @@ -115,6 +115,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.IntegrationTestingUtility; +import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.coprocessor.RegionServerObserver; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; @@ -1634,7 +1635,9 @@ public abstract class BaseTest { for (HTableDescriptor table : tables) { String schemaName = SchemaUtil.getSchemaNameFromFullName(table.getName()); if (!QueryConstants.SYSTEM_SCHEMA_NAME.equals(schemaName)) { -admin.disableTable(table.getName()); +try{ +admin.disableTable(table.getName()); +} catch (TableNotEnabledException ignored){} admin.deleteTable(table.getName()); } }
[23/47] phoenix git commit: PHOENIX-2068 UserDefinedFunctionsIT is failing in windows with InvocationTargetException(Rajeshbabu)
PHOENIX-2068 UserDefinedFunctionsIT is failing in windows with InvocationTargetException(Rajeshbabu) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/50f3a041 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/50f3a041 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/50f3a041 Branch: refs/heads/calcite Commit: 50f3a04126c4fea59dc9eb978cef1399892d9a4a Parents: b58a62a Author: Rajeshbabu Chintaguntla rajeshb...@apache.org Authored: Thu Jun 25 00:44:25 2015 +0530 Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org Committed: Thu Jun 25 00:44:25 2015 +0530 -- .../phoenix/end2end/UserDefinedFunctionsIT.java | 22 1 file changed, 18 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/50f3a041/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java index c6bd62f..cee1c85 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java @@ -36,8 +36,10 @@ import java.io.OutputStream; import java.sql.Connection; import java.sql.ResultSet; import java.sql.Statement; +import java.util.HashSet; import java.util.Map; import java.util.Properties; +import java.util.Set; import java.util.jar.Attributes; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; @@ -614,10 +616,22 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, 1.0); FileOutputStream jarFos = new FileOutputStream(jarPath); JarOutputStream jarOutputStream = new JarOutputStream(jarFos, manifest); -String pathToAdd =packageName.replace('.', File.separatorChar) -+ File.separator; -jarOutputStream.putNextEntry(new JarEntry(pathToAdd)); -jarOutputStream.closeEntry(); +String pathToAdd = packageName.replace('.', '/') + '/'; +String jarPathStr = new String(pathToAdd); +SetString pathsInJar = new HashSetString(); + +while (pathsInJar.add(jarPathStr)) { +int ix = jarPathStr.lastIndexOf('/', jarPathStr.length() - 2); +if (ix 0) { +break; +} +jarPathStr = jarPathStr.substring(0, ix); +} +for (String pathInJar : pathsInJar) { +jarOutputStream.putNextEntry(new JarEntry(pathInJar)); +jarOutputStream.closeEntry(); +} + jarOutputStream.putNextEntry(new JarEntry(pathToAdd + classFile.getName())); byte[] allBytes = new byte[(int) classFile.length()]; FileInputStream fis = new FileInputStream(classFile);
[15/47] phoenix git commit: PHOENIX-1504 Support adding column to a table that has views (Samarth Jain/Dave Hacker)
PHOENIX-1504 Support adding column to a table that has views (Samarth Jain/Dave Hacker) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e78eb6fa Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e78eb6fa Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e78eb6fa Branch: refs/heads/calcite Commit: e78eb6faceec40d8b09fbc7dde778b87fe54feef Parents: 2d70eff Author: Samarth samarth.j...@salesforce.com Authored: Thu Jun 18 15:37:37 2015 -0700 Committer: Samarth samarth.j...@salesforce.com Committed: Thu Jun 18 15:37:37 2015 -0700 -- .../apache/phoenix/end2end/AlterTableIT.java| 356 + .../end2end/TenantSpecificTablesDDLIT.java | 20 +- .../org/apache/phoenix/end2end/UpgradeIT.java | 332 .../coprocessor/MetaDataEndpointImpl.java | 262 +--- .../phoenix/coprocessor/MetaDataProtocol.java | 4 +- .../coprocessor/generated/PTableProtos.java | 103 - .../phoenix/jdbc/PhoenixDatabaseMetaData.java | 3 +- .../query/ConnectionQueryServicesImpl.java | 51 ++- .../apache/phoenix/query/QueryConstants.java| 30 +- .../apache/phoenix/schema/DelegateTable.java| 5 + .../apache/phoenix/schema/MetaDataClient.java | 37 +- .../java/org/apache/phoenix/schema/PTable.java | 1 + .../org/apache/phoenix/schema/PTableImpl.java | 40 +- .../java/org/apache/phoenix/util/ByteUtil.java | 10 +- .../org/apache/phoenix/util/UpgradeUtil.java| 395 ++- 15 files changed, 1495 insertions(+), 154 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/e78eb6fa/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java index 59698d6..61dd6a9 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java @@ -18,6 +18,7 @@ package org.apache.phoenix.end2end; import static org.apache.hadoop.hbase.HColumnDescriptor.DEFAULT_REPLICATION_SCOPE; +import static org.apache.phoenix.exception.SQLExceptionCode.CANNOT_MUTATE_TABLE; import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; import static org.apache.phoenix.util.TestUtil.closeConnection; import static org.apache.phoenix.util.TestUtil.closeStatement; @@ -32,9 +33,11 @@ import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; +import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.Properties; @@ -48,8 +51,10 @@ import org.apache.phoenix.exception.SQLExceptionCode; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData; import org.apache.phoenix.query.QueryConstants; +import org.apache.phoenix.schema.PColumn; import org.apache.phoenix.schema.PTable; import org.apache.phoenix.schema.PTableKey; +import org.apache.phoenix.schema.PTableType; import org.apache.phoenix.schema.TableNotFoundException; import org.apache.phoenix.util.IndexUtil; import org.apache.phoenix.util.PhoenixRuntime; @@ -59,6 +64,8 @@ import org.apache.phoenix.util.SchemaUtil; import org.junit.BeforeClass; import org.junit.Test; +import com.google.common.base.Objects; + /** * * A lot of tests in this class test HBase level properties. As a result, @@ -1988,4 +1995,353 @@ public class AlterTableIT extends BaseOwnClusterHBaseManagedTimeIT { conn.close(); } } + +@Test +public void testAddColumnToTableWithViews() throws Exception { +Connection conn = DriverManager.getConnection(getUrl()); +try { +conn.createStatement().execute(CREATE TABLE IF NOT EXISTS TABLEWITHVIEW ( ++ ID char(1) NOT NULL, ++ COL1 integer NOT NULL, ++ COL2 bigint NOT NULL, ++ CONSTRAINT NAME_PK PRIMARY KEY (ID, COL1, COL2) ++ )); +assertTableDefinition(conn, TABLEWITHVIEW, PTableType.TABLE, null, 0, 3, -1, ID, COL1, COL2); + +conn.createStatement().execute(CREATE VIEW VIEWOFTABLE ( VIEW_COL1 SMALLINT ) AS SELECT * FROM TABLEWITHVIEW); +assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, TABLEWITHVIEW, 0, 4, 3, ID, COL1, COL2, VIEW_COL1); + +conn.createStatement().execute(ALTER TABLE TABLEWITHVIEW ADD COL3 char(10)); +
[27/47] phoenix git commit: PHOENIX-2073 Two bytes character in LIKE expression is not allowed (Yuhao Bi)
PHOENIX-2073 Two bytes character in LIKE expression is not allowed (Yuhao Bi) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/11577dd7 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/11577dd7 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/11577dd7 Branch: refs/heads/calcite Commit: 11577dd7b2f722cff0ef410bed60ec1ef6b9c55c Parents: c1e5723 Author: Yuhao Bi byh0...@gmail.com Authored: Thu Jun 25 15:41:06 2015 +0800 Committer: Thomas D'Silva tdsi...@salesforce.com Committed: Fri Jun 26 11:02:00 2015 -0700 -- .../apache/phoenix/compile/WhereOptimizer.java| 3 ++- .../phoenix/compile/WhereOptimizerTest.java | 18 ++ 2 files changed, 20 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/11577dd7/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java index b7f04e0..0cbef11 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java @@ -65,6 +65,7 @@ import org.apache.phoenix.schema.tuple.Tuple; import org.apache.phoenix.schema.types.PChar; import org.apache.phoenix.schema.types.PDataType; import org.apache.phoenix.schema.types.PVarbinary; +import org.apache.phoenix.schema.types.PVarchar; import org.apache.phoenix.util.ByteUtil; import org.apache.phoenix.util.MetaDataUtil; import org.apache.phoenix.util.ScanUtil; @@ -952,7 +953,7 @@ public class WhereOptimizer { KeySlots childSlots = childParts.get(0); KeySlot childSlot = childSlots.iterator().next(); final String startsWith = node.getLiteralPrefix(); -byte[] key = PChar.INSTANCE.toBytes(startsWith, node.getChildren().get(0).getSortOrder()); +byte[] key = PVarchar.INSTANCE.toBytes(startsWith, node.getChildren().get(0).getSortOrder()); // If the expression is an equality expression against a fixed length column // and the key length doesn't match the column length, the expression can // never be true. http://git-wip-us.apache.org/repos/asf/phoenix/blob/11577dd7/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java -- diff --git a/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java b/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java index adbd9a2..c1787ca 100644 --- a/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java +++ b/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java @@ -688,6 +688,24 @@ public class WhereOptimizerTest extends BaseConnectionlessQueryTest { } @Test +public void testLikeExtractAllKeyExpression2() throws SQLException { +String tenantId = 001; +String keyPrefix = ä¸æ; +String query = select * from atable where organization_id = ? and entity_id LIKE ' + keyPrefix + %'; +ListObject binds = Arrays.ObjectasList(tenantId); +StatementContext context = compileStatement(query, binds); +Scan scan = context.getScan(); + +assertNull(scan.getFilter()); +byte[] startRow = ByteUtil.concat( + PVarchar.INSTANCE.toBytes(tenantId),StringUtil.padChar(PVarchar.INSTANCE.toBytes(keyPrefix),15)); +assertArrayEquals(startRow, scan.getStartRow()); +byte[] stopRow = ByteUtil.concat( + PVarchar.INSTANCE.toBytes(tenantId),StringUtil.padChar(ByteUtil.nextKey(PVarchar.INSTANCE.toBytes(keyPrefix)),15)); +assertArrayEquals(stopRow, scan.getStopRow()); +} + +@Test public void testLikeExtractAllAsEqKeyExpression() throws SQLException { String tenantId = 001; String keyPrefix = 002;
[47/47] phoenix git commit: Fix compilation errors after merge
Fix compilation errors after merge Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1327c726 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1327c726 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1327c726 Branch: refs/heads/calcite Commit: 1327c726aebf5940ef13d334193d56e0e562f2cc Parents: b586007 b2fb04b Author: maryannxue wei@intel.com Authored: Thu Jul 2 17:00:44 2015 -0400 Committer: maryannxue wei@intel.com Committed: Thu Jul 2 17:00:44 2015 -0400 -- .gitignore | 2 + bin/phoenix_utils.py| 10 +- phoenix-assembly/pom.xml| 4 + phoenix-assembly/src/build/server.xml | 1 + .../phoenix/end2end/AbsFunctionEnd2EndIT.java | 108 +++ .../apache/phoenix/end2end/AlterTableIT.java| 553 - .../phoenix/end2end/ArrayAppendFunctionIT.java | 17 - .../phoenix/end2end/ArrayConcatFunctionIT.java | 578 + .../phoenix/end2end/ArrayFillFunctionIT.java| 531 .../phoenix/end2end/ArraysWithNullsIT.java | 2 +- .../org/apache/phoenix/end2end/BaseViewIT.java | 2 + .../phoenix/end2end/CSVCommonsLoaderIT.java | 18 +- .../phoenix/end2end/CbrtFunctionEnd2EndIT.java | 143 .../phoenix/end2end/End2EndTestDriver.java | 19 +- .../phoenix/end2end/ExpFunctionEnd2EndIT.java | 128 +++ .../phoenix/end2end/LnLogFunctionEnd2EndIT.java | 143 .../phoenix/end2end/PhoenixMetricsIT.java | 147 .../phoenix/end2end/PowerFunctionEnd2EndIT.java | 144 .../end2end/QueryDatabaseMetaDataIT.java| 31 + .../phoenix/end2end/RowValueConstructorIT.java | 28 + .../end2end/TenantSpecificTablesDDLIT.java | 40 +- .../org/apache/phoenix/end2end/UpgradeIT.java | 332 .../phoenix/end2end/UserDefinedFunctionsIT.java | 34 +- .../java/org/apache/phoenix/end2end/ViewIT.java | 99 +++ .../apache/phoenix/execute/PartialCommitIT.java | 1 + .../phoenix/mapreduce/CsvBulkLoadToolIT.java| 19 + .../apache/phoenix/mapreduce/IndexToolIT.java | 47 +- .../phoenix/monitoring/PhoenixMetricsIT.java| 815 +++ .../org/apache/phoenix/rpc/UpdateCacheIT.java | 139 phoenix-core/src/it/resources/hbase-default.xml | 36 - phoenix-core/src/it/resources/hbase-site.xml| 36 + phoenix-core/src/main/antlr3/PhoenixSQL.g | 3 +- .../apache/phoenix/cache/ServerCacheClient.java | 7 + .../rel/PhoenixToEnumerableConverter.java | 9 +- .../apache/phoenix/compile/DeleteCompiler.java | 50 +- .../MutatingParallelIteratorFactory.java| 51 +- .../apache/phoenix/compile/QueryCompiler.java | 2 +- .../org/apache/phoenix/compile/QueryPlan.java | 3 + .../phoenix/compile/StatementContext.java | 49 +- .../apache/phoenix/compile/TraceQueryPlan.java | 9 +- .../apache/phoenix/compile/UpsertCompiler.java | 80 +- .../apache/phoenix/compile/WhereOptimizer.java | 28 +- .../coprocessor/MetaDataEndpointImpl.java | 549 ++--- .../phoenix/coprocessor/MetaDataProtocol.java | 4 +- .../coprocessor/generated/PTableProtos.java | 103 ++- .../phoenix/exception/SQLExceptionCode.java | 2 - .../apache/phoenix/execute/AggregatePlan.java | 11 +- .../apache/phoenix/execute/BaseQueryPlan.java | 14 +- .../phoenix/execute/ClientAggregatePlan.java| 5 +- .../phoenix/execute/ClientProcessingPlan.java | 9 + .../apache/phoenix/execute/ClientScanPlan.java | 5 +- .../phoenix/execute/DegenerateQueryPlan.java| 3 +- .../apache/phoenix/execute/HashJoinPlan.java| 16 +- .../apache/phoenix/execute/MutationState.java | 290 --- .../org/apache/phoenix/execute/ScanPlan.java| 7 +- .../phoenix/execute/SortMergeJoinPlan.java | 13 +- .../phoenix/execute/TupleProjectionPlan.java| 11 +- .../org/apache/phoenix/execute/UnionPlan.java | 14 +- .../phoenix/expression/ExpressionType.java | 18 +- .../expression/function/AbsFunction.java| 66 ++ .../function/ArrayAppendFunction.java | 53 +- .../function/ArrayConcatFunction.java | 83 ++ .../expression/function/ArrayFillFunction.java | 79 ++ .../function/ArrayModifierFunction.java | 155 +++- .../function/ArrayPrependFunction.java | 54 +- .../expression/function/CbrtFunction.java | 55 ++ .../expression/function/ExpFunction.java| 55 ++ .../function/JavaMathOneArgumentFunction.java | 43 +- .../function/JavaMathTwoArgumentFunction.java | 69 ++ .../phoenix/expression/function/LnFunction.java | 55 ++ .../expression/function/LogFunction.java| 56 ++ .../expression/function/PowerFunction.java | 51 ++ .../expression/function/ScalarFunction.java | 4 +- .../expression/function/SqrtFunction.java | 8 +-
[33/47] phoenix git commit: PHOENIX-1819 Build a framework to capture and report phoenix client side request level metrics
PHOENIX-1819 Build a framework to capture and report phoenix client side request level metrics Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0f6595c0 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0f6595c0 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0f6595c0 Branch: refs/heads/calcite Commit: 0f6595c0c511a3f07c51cf92d1ced665556b7d4c Parents: 9c069bd Author: Samarth samarth.j...@salesforce.com Authored: Fri Jun 26 16:44:43 2015 -0700 Committer: Samarth samarth.j...@salesforce.com Committed: Fri Jun 26 16:44:43 2015 -0700 -- .../phoenix/end2end/PhoenixMetricsIT.java | 147 .../apache/phoenix/execute/PartialCommitIT.java | 1 + .../phoenix/monitoring/PhoenixMetricsIT.java| 815 +++ .../apache/phoenix/cache/ServerCacheClient.java | 7 + .../apache/phoenix/compile/DeleteCompiler.java | 50 +- .../MutatingParallelIteratorFactory.java| 51 +- .../phoenix/compile/StatementContext.java | 49 +- .../apache/phoenix/compile/UpsertCompiler.java | 80 +- .../apache/phoenix/execute/AggregatePlan.java | 8 +- .../apache/phoenix/execute/HashJoinPlan.java| 7 + .../apache/phoenix/execute/MutationState.java | 290 --- .../org/apache/phoenix/execute/UnionPlan.java | 8 +- .../phoenix/iterate/BaseResultIterators.java| 15 +- .../phoenix/iterate/ChunkedResultIterator.java | 21 +- .../iterate/ParallelIteratorFactory.java| 4 +- .../phoenix/iterate/ParallelIterators.java | 25 +- .../iterate/RoundRobinResultIterator.java | 4 +- .../phoenix/iterate/ScanningResultIterator.java | 38 +- .../apache/phoenix/iterate/SerialIterators.java | 23 +- .../phoenix/iterate/SpoolingResultIterator.java | 49 +- .../phoenix/iterate/TableResultIterator.java| 17 +- .../phoenix/iterate/UnionResultIterators.java | 70 +- .../apache/phoenix/jdbc/PhoenixConnection.java | 27 +- .../phoenix/jdbc/PhoenixDatabaseMetaData.java | 21 +- .../apache/phoenix/jdbc/PhoenixResultSet.java | 48 +- .../apache/phoenix/jdbc/PhoenixStatement.java | 20 +- .../java/org/apache/phoenix/job/JobManager.java | 60 +- .../phoenix/mapreduce/CsvBulkLoadTool.java | 10 +- .../phoenix/mapreduce/PhoenixRecordReader.java | 12 +- .../phoenix/memory/GlobalMemoryManager.java | 5 - .../apache/phoenix/monitoring/AtomicMetric.java | 70 ++ .../phoenix/monitoring/CombinableMetric.java| 77 ++ .../monitoring/CombinableMetricImpl.java| 77 ++ .../org/apache/phoenix/monitoring/Counter.java | 85 -- .../phoenix/monitoring/GlobalClientMetrics.java | 117 +++ .../apache/phoenix/monitoring/GlobalMetric.java | 37 + .../phoenix/monitoring/GlobalMetricImpl.java| 74 ++ .../phoenix/monitoring/MemoryMetricsHolder.java | 43 + .../org/apache/phoenix/monitoring/Metric.java | 45 +- .../apache/phoenix/monitoring/MetricType.java | 55 ++ .../phoenix/monitoring/MetricsStopWatch.java| 59 ++ .../phoenix/monitoring/MutationMetricQueue.java | 131 +++ .../phoenix/monitoring/NonAtomicMetric.java | 71 ++ .../phoenix/monitoring/OverAllQueryMetrics.java | 121 +++ .../phoenix/monitoring/PhoenixMetrics.java | 118 --- .../phoenix/monitoring/ReadMetricQueue.java | 180 .../phoenix/monitoring/SizeStatistic.java | 78 -- .../monitoring/SpoolingMetricsHolder.java | 43 + .../monitoring/TaskExecutionMetricsHolder.java | 68 ++ .../phoenix/query/BaseQueryServicesImpl.java| 2 +- .../org/apache/phoenix/query/QueryServices.java | 3 +- .../phoenix/query/QueryServicesOptions.java | 25 +- .../phoenix/trace/PhoenixMetricsSink.java | 36 +- .../java/org/apache/phoenix/util/JDBCUtil.java | 6 +- .../org/apache/phoenix/util/PhoenixRuntime.java | 175 +++- .../iterate/SpoolingResultIteratorTest.java | 4 +- 56 files changed, 2930 insertions(+), 852 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/0f6595c0/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixMetricsIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixMetricsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixMetricsIT.java deleted file mode 100644 index edb4042..000 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixMetricsIT.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in
[28/47] phoenix git commit: PHOENIX-2056 Ensure PK column from base table is added to any indexes on views
PHOENIX-2056 Ensure PK column from base table is added to any indexes on views Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7918a3d9 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7918a3d9 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7918a3d9 Branch: refs/heads/calcite Commit: 7918a3d94d19f1d57f55b88834680760605e575c Parents: 11577dd Author: Samarth samarth.j...@salesforce.com Authored: Fri Jun 26 16:04:46 2015 -0700 Committer: Samarth samarth.j...@salesforce.com Committed: Fri Jun 26 16:04:46 2015 -0700 -- .../apache/phoenix/end2end/AlterTableIT.java| 184 ++- .../coprocessor/MetaDataEndpointImpl.java | 145 ++- .../java/org/apache/phoenix/util/ByteUtil.java | 10 +- 3 files changed, 319 insertions(+), 20 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/7918a3d9/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java index 61dd6a9..ae5f940 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java @@ -46,10 +46,12 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.phoenix.compile.QueryPlan; import org.apache.phoenix.coprocessor.MetaDataProtocol; import org.apache.phoenix.exception.SQLExceptionCode; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData; +import org.apache.phoenix.jdbc.PhoenixStatement; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.schema.PColumn; import org.apache.phoenix.schema.PTable; @@ -2303,13 +2305,23 @@ public class AlterTableIT extends BaseOwnClusterHBaseManagedTimeIT { String alterBaseTable = ALTER TABLE + baseTable + ADD NEW_PK varchar primary key ; globalConn.createStatement().execute(alterBaseTable); - + // verify that the new column new_pk is now part of the primary key for the entire hierarchy - assertTrue(checkColumnPartOfPk(globalConn.unwrap(PhoenixConnection.class), PK1, baseTable)); - assertTrue(checkColumnPartOfPk(tenant1Conn.unwrap(PhoenixConnection.class), PK1, view1)); - assertTrue(checkColumnPartOfPk(tenant1Conn.unwrap(PhoenixConnection.class), PK1, view2)); - assertTrue(checkColumnPartOfPk(tenant2Conn.unwrap(PhoenixConnection.class), PK1, view3)); - assertTrue(checkColumnPartOfPk(globalConn.unwrap(PhoenixConnection.class), PK1, view4)); + +globalConn.createStatement().execute(SELECT * FROM + baseTable); + assertTrue(checkColumnPartOfPk(globalConn.unwrap(PhoenixConnection.class), NEW_PK, baseTable)); + +tenant1Conn.createStatement().execute(SELECT * FROM + view1); + assertTrue(checkColumnPartOfPk(tenant1Conn.unwrap(PhoenixConnection.class), NEW_PK, view1)); + +tenant1Conn.createStatement().execute(SELECT * FROM + view2); + assertTrue(checkColumnPartOfPk(tenant1Conn.unwrap(PhoenixConnection.class), NEW_PK, view2)); + +tenant2Conn.createStatement().execute(SELECT * FROM + view3); + assertTrue(checkColumnPartOfPk(tenant2Conn.unwrap(PhoenixConnection.class), NEW_PK, view3)); + +globalConn.createStatement().execute(SELECT * FROM + view4); + assertTrue(checkColumnPartOfPk(globalConn.unwrap(PhoenixConnection.class), NEW_PK, view4)); } finally { if (tenant1Conn != null) { @@ -2344,4 +2356,164 @@ public class AlterTableIT extends BaseOwnClusterHBaseManagedTimeIT { tenantProps.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId); return DriverManager.getConnection(getUrl(), tenantProps); } + +@Test +public void testAddPKColumnToBaseTableWhoseViewsHaveIndices() throws Exception { +String baseTable = testAddPKColumnToBaseTableWhoseViewsHaveIndices; +String view1 = view1; +String view2 = view2; +String view3 = view3; +String tenant1 = tenant1; +String tenant2 = tenant2; +String view2Index = view2 + _idx; +String view3Index = view3 + _idx; +/* baseTable(mutli-tenant) + / \
[02/47] phoenix git commit: PHOENIX-1660 Implement missing math built-in functions ABS, POWER, LN, LOG, SQRT, CBRT, EXP (Shuxiong Ye)
PHOENIX-1660 Implement missing math built-in functions ABS, POWER, LN, LOG, SQRT, CBRT, EXP (Shuxiong Ye) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c2927dde Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c2927dde Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c2927dde Branch: refs/heads/calcite Commit: c2927ddec5ab954dd779516ed29b4b7fa4b011d9 Parents: d1934af Author: James Taylor jamestay...@apache.org Authored: Mon Jun 15 15:53:44 2015 -0700 Committer: James Taylor jamestay...@apache.org Committed: Mon Jun 15 15:53:44 2015 -0700 -- .../phoenix/end2end/AbsFunctionEnd2EndIT.java | 108 +++ .../phoenix/end2end/CbrtFunctionEnd2EndIT.java | 143 +++ .../phoenix/end2end/ExpFunctionEnd2EndIT.java | 128 + .../phoenix/end2end/LnLogFunctionEnd2EndIT.java | 143 +++ .../phoenix/end2end/PowerFunctionEnd2EndIT.java | 144 +++ .../phoenix/expression/ExpressionType.java | 14 +- .../expression/function/AbsFunction.java| 66 +++ .../expression/function/CbrtFunction.java | 55 ++ .../expression/function/ExpFunction.java| 55 ++ .../function/JavaMathOneArgumentFunction.java | 43 ++--- .../function/JavaMathTwoArgumentFunction.java | 69 +++ .../phoenix/expression/function/LnFunction.java | 55 ++ .../expression/function/LogFunction.java| 56 ++ .../expression/function/PowerFunction.java | 51 ++ .../expression/function/ScalarFunction.java | 4 +- .../expression/function/SqrtFunction.java | 8 +- .../apache/phoenix/schema/types/PDecimal.java | 11 ++ .../phoenix/schema/types/PNumericType.java | 8 + .../phoenix/schema/types/PRealNumber.java | 8 + .../phoenix/schema/types/PWholeNumber.java | 8 + .../phoenix/compile/QueryCompilerTest.java | 68 ++- .../phoenix/expression/AbsFunctionTest.java | 180 ++ .../phoenix/expression/CbrtFunctionTest.java| 127 + .../phoenix/expression/ExpFunctionTest.java | 150 +++ .../phoenix/expression/LnLogFunctionTest.java | 182 +++ .../phoenix/expression/PowerFunctionTest.java | 182 +++ 26 files changed, 2036 insertions(+), 30 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/c2927dde/phoenix-core/src/it/java/org/apache/phoenix/end2end/AbsFunctionEnd2EndIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AbsFunctionEnd2EndIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AbsFunctionEnd2EndIT.java new file mode 100644 index 000..0c6204c --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AbsFunctionEnd2EndIT.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * License); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.end2end; + +import static org.apache.phoenix.util.TestUtil.closeStmtAndConn; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.math.BigDecimal; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; + +import org.apache.phoenix.expression.function.AbsFunction; +import org.junit.Before; +import org.junit.Test; + +/** + * End to end tests for {@link AbsFunction} + */ +public class AbsFunctionEnd2EndIT extends BaseHBaseManagedTimeIT { + +private static final String KEY = key; + +@Before +public void initTable() throws Exception { +Connection conn = null; +PreparedStatement stmt = null; +try { +conn = DriverManager.getConnection(getUrl()); +String ddl; +ddl = CREATE TABLE testSigned (k VARCHAR NOT NULL PRIMARY KEY, dec DECIMAL, doub DOUBLE, fl FLOAT, inte INTEGER, lon BIGINT, smalli SMALLINT, tinyi TINYINT); +conn.createStatement().execute(ddl); +conn.commit();
[25/47] phoenix git commit: PHOENIX-1975 Detect and use HBASE_HOME when set
PHOENIX-1975 Detect and use HBASE_HOME when set Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3e493398 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3e493398 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3e493398 Branch: refs/heads/calcite Commit: 3e49339828e64842891879a18248a91601740dd0 Parents: 3cf22a7 Author: Nick Dimiduk ndimi...@apache.org Authored: Wed Jun 24 13:59:00 2015 -0700 Committer: Nick Dimiduk ndimi...@apache.org Committed: Wed Jun 24 13:59:00 2015 -0700 -- bin/phoenix_utils.py | 10 +- 1 file changed, 9 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/3e493398/bin/phoenix_utils.py -- diff --git a/bin/phoenix_utils.py b/bin/phoenix_utils.py index 383e0e1..bfb4737 100755 --- a/bin/phoenix_utils.py +++ b/bin/phoenix_utils.py @@ -65,7 +65,15 @@ def setPath(): phoenix_class_path = os.getenv('PHOENIX_CLASS_PATH','') global hbase_conf_dir -hbase_conf_dir = os.getenv('HBASE_CONF_DIR', os.getenv('HBASE_CONF_PATH', '.')) +# if HBASE_CONF_DIR set explicitly, use that +hbase_conf_dir = os.getenv('HBASE_CONF_DIR', os.getenv('HBASE_CONF_PATH')) +if not hbase_conf_dir: +# else fall back to HBASE_HOME +if os.getenv('HBASE_HOME'): +hbase_conf_dir = os.path.join(os.getenv('HBASE_HOME'), conf) +else: +# default to pwd +hbase_conf_dir = '.' global hbase_conf_path # keep conf_path around for backward compatibility hbase_conf_path = hbase_conf_dir
[45/47] phoenix git commit: PHOENIX-2059 MR index build does not handle table with a schema name correctly
PHOENIX-2059 MR index build does not handle table with a schema name correctly Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/54da7d1d Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/54da7d1d Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/54da7d1d Branch: refs/heads/calcite Commit: 54da7d1d6b2ecd27c8c98211e84484029b6d39c2 Parents: 6a07d45 Author: Thomas D'Silva tdsi...@salesforce.com Authored: Mon Jun 22 17:45:58 2015 -0700 Committer: Thomas D'Silva tdsi...@salesforce.com Committed: Tue Jun 30 22:21:37 2015 -0700 -- .../apache/phoenix/mapreduce/IndexToolIT.java | 47 .../phoenix/mapreduce/index/IndexTool.java | 15 --- 2 files changed, 36 insertions(+), 26 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/54da7d1d/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/IndexToolIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/IndexToolIT.java b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/IndexToolIT.java index 6761275..5d11cf2 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/IndexToolIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/IndexToolIT.java @@ -42,6 +42,7 @@ import org.apache.phoenix.util.MetaDataUtil; import org.apache.phoenix.util.PhoenixRuntime; import org.apache.phoenix.util.PropertiesUtil; import org.apache.phoenix.util.QueryUtil; +import org.apache.phoenix.util.SchemaUtil; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -62,6 +63,7 @@ public class IndexToolIT { public static void setUp() throws Exception { hbaseTestUtil = new HBaseTestingUtility(); Configuration conf = hbaseTestUtil.getConfiguration(); +conf.setBoolean(hbase.defaults.for.version.skip, true); setUpConfigForMiniCluster(conf); hbaseTestUtil.startMiniCluster(); hbaseTestUtil.startMiniMapReduceCluster(); @@ -71,34 +73,35 @@ public class IndexToolIT { @Test public void testImmutableGlobalIndex() throws Exception { -testSecondaryIndex(DATA_TABLE1,true, false); +testSecondaryIndex(SCHEMA, DATA_TABLE1, true, false); } @Test public void testImmutableLocalIndex() throws Exception { -testSecondaryIndex(DATA_TABLE2,true, true); +testSecondaryIndex(SCHEMA, DATA_TABLE2, true, true); } @Test public void testMutableGlobalIndex() throws Exception { -testSecondaryIndex(DATA_TABLE3,false, false); +testSecondaryIndex(SCHEMA, DATA_TABLE3, false, false); } @Test public void testMutableLocalIndex() throws Exception { -testSecondaryIndex(DATA_TABLE4,false, true); +testSecondaryIndex(SCHEMA, DATA_TABLE4, false, true); } -public void testSecondaryIndex(final String dataTable , final boolean isImmutable , final boolean isLocal) throws Exception { +public void testSecondaryIndex(final String schemaName, final String dataTable, final boolean isImmutable , final boolean isLocal) throws Exception { + final String fullTableName = SchemaUtil.getTableName(schemaName, dataTable); final String indxTable = String.format(%s_%s,dataTable,INDX); Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(PhoenixRuntime.JDBC_PROTOCOL + PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR + zkQuorum,props); Statement stmt = conn.createStatement(); try { -stmt.execute(String.format(CREATE TABLE %s (ID INTEGER NOT NULL PRIMARY KEY, NAME VARCHAR, ZIP INTEGER) %s,dataTable, (isImmutable ? IMMUTABLE_ROWS=true :))); -String upsertQuery = String.format(UPSERT INTO %s VALUES(?, ?, ?),dataTable); +stmt.execute(String.format(CREATE TABLE %s (ID INTEGER NOT NULL PRIMARY KEY, NAME VARCHAR, ZIP INTEGER) %s, fullTableName, (isImmutable ? IMMUTABLE_ROWS=true :))); +String upsertQuery = String.format(UPSERT INTO %s VALUES(?, ?, ?), fullTableName); PreparedStatement stmt1 = conn.prepareStatement(upsertQuery); int id = 1; @@ -107,15 +110,15 @@ public class IndexToolIT { upsertRow(stmt1, id++); conn.commit(); -stmt.execute(String.format(CREATE %s INDEX %s ON %s (UPPER(NAME)) ASYNC , (isLocal ? LOCAL : ), indxTable,dataTable)); +stmt.execute(String.format(CREATE %s INDEX %s ON %s (UPPER(NAME)) ASYNC , (isLocal ? LOCAL : ), indxTable, fullTableName)); //verify rows are fetched from data table. -String
[09/47] phoenix git commit: PHOENIX-1941 Phoenix tests are failing in linux env with missing class: StaticMapping (Alicia Ying Shu)
PHOENIX-1941 Phoenix tests are failing in linux env with missing class: StaticMapping (Alicia Ying Shu) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/db7b5753 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/db7b5753 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/db7b5753 Branch: refs/heads/calcite Commit: db7b5753bfecaefd4fb32e7e9b9b5223787d4c62 Parents: 03a6ac0 Author: Nick Dimiduk ndimi...@apache.org Authored: Wed Jun 17 12:17:33 2015 -0700 Committer: Nick Dimiduk ndimi...@apache.org Committed: Wed Jun 17 12:17:33 2015 -0700 -- .../phoenix/end2end/End2EndTestDriver.java | 19 +++ 1 file changed, 15 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/db7b5753/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java index 26d18cf..743f729 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java @@ -21,6 +21,7 @@ package org.apache.phoenix.end2end; import java.io.IOException; import java.io.PrintStream; +import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -79,10 +80,20 @@ public class End2EndTestDriver extends AbstractHBaseTool { @Override public boolean isCandidateClass(Class? c) { -return testFilterRe.matcher(c.getName()).find() - // Our pattern will match the below NON-IntegrationTest. Rather than - // do exotic regex, just filter it out here - super.isCandidateClass(c); + Annotation[] annotations = c.getAnnotations(); + for (Annotation curAnnotation : annotations) { + if (curAnnotation.toString().contains(NeedsOwnMiniClusterTest)) { + /* Skip tests that aren't designed to run against a live cluster. + * For a live cluster, we cannot bring it up and down as required + * for these tests to run. + */ + return false; + } + } + return testFilterRe.matcher(c.getName()).find() + // Our pattern will match the below NON-IntegrationTest. Rather than + // do exotic regex, just filter it out here + super.isCandidateClass(c); } }
[19/47] phoenix git commit: PHOENIX-1920 - Pherf - Add support for mixed r/w workloads
PHOENIX-1920 - Pherf - Add support for mixed r/w workloads Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7175dcbc Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7175dcbc Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7175dcbc Branch: refs/heads/calcite Commit: 7175dcbc011dff48f6d041697ec84da98f80f729 Parents: 466eeb3 Author: cmarcel cmar...@salesforce.com Authored: Fri Jun 19 16:34:41 2015 -0700 Committer: cmarcel cmar...@salesforce.com Committed: Fri Jun 19 16:34:41 2015 -0700 -- .gitignore | 2 + phoenix-pherf/pom.xml | 10 +- .../org/apache/phoenix/pherf/DataIngestIT.java | 134 -- .../org/apache/phoenix/pherf/PherfMainIT.java | 36 ++ .../apache/phoenix/pherf/ResultBaseTestIT.java | 31 +- .../apache/phoenix/pherf/SchemaReaderIT.java| 17 +- .../java/org/apache/phoenix/pherf/Pherf.java| 179 +--- .../apache/phoenix/pherf/PherfConstants.java| 8 +- .../phoenix/pherf/configuration/DataModel.java | 10 - .../phoenix/pherf/configuration/Scenario.java | 12 +- .../pherf/configuration/WriteParams.java| 72 +++ .../pherf/configuration/XMLConfigParser.java| 25 +- .../phoenix/pherf/jmx/MonitorManager.java | 153 --- .../phoenix/pherf/loaddata/DataLoader.java | 332 -- .../pherf/result/DataLoadThreadTime.java| 87 ++-- .../pherf/result/DataLoadTimeSummary.java | 54 +-- .../phoenix/pherf/result/DataModelResult.java | 68 ++- .../phoenix/pherf/result/QueryResult.java | 17 +- .../phoenix/pherf/result/QuerySetResult.java| 40 +- .../org/apache/phoenix/pherf/result/Result.java | 11 +- .../phoenix/pherf/result/ResultHandler.java | 5 + .../phoenix/pherf/result/ResultManager.java | 19 +- .../apache/phoenix/pherf/result/ResultUtil.java | 119 +++-- .../phoenix/pherf/result/ResultValue.java | 4 +- .../apache/phoenix/pherf/result/RunTime.java| 179 .../phoenix/pherf/result/ScenarioResult.java| 44 +- .../apache/phoenix/pherf/result/ThreadTime.java | 34 +- .../phoenix/pherf/result/file/Extension.java| 3 +- .../phoenix/pherf/result/file/Header.java | 11 +- .../pherf/result/impl/CSVResultHandler.java | 47 +- .../pherf/result/impl/ImageResultHandler.java | 58 +-- .../pherf/result/impl/XMLResultHandler.java | 36 +- .../phoenix/pherf/schema/SchemaReader.java | 2 +- .../apache/phoenix/pherf/util/PhoenixUtil.java | 64 ++- .../pherf/workload/MultiThreadedRunner.java | 153 +++ .../pherf/workload/MultithreadedDiffer.java | 131 +++--- .../pherf/workload/MultithreadedRunner.java | 170 --- .../phoenix/pherf/workload/QueryExecutor.java | 459 ++- .../phoenix/pherf/workload/QueryVerifier.java | 265 +-- .../apache/phoenix/pherf/workload/Workload.java | 10 + .../pherf/workload/WorkloadExecutor.java| 109 ++--- .../phoenix/pherf/workload/WriteWorkload.java | 403 .../scenario/prod_test_unsalted_scenario.xml| 35 ++ .../phoenix/pherf/ConfigurationParserTest.java | 102 +++-- .../org/apache/phoenix/pherf/ResultTest.java| 5 +- .../apache/phoenix/pherf/RuleGeneratorTest.java | 15 +- .../test/resources/scenario/test_scenario.xml | 58 ++- 47 files changed, 2171 insertions(+), 1667 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/7175dcbc/.gitignore -- diff --git a/.gitignore b/.gitignore index fc0e4af..b918d76 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,5 @@ target/ release/ RESULTS/ +CSV_EXPORT/ + http://git-wip-us.apache.org/repos/asf/phoenix/blob/7175dcbc/phoenix-pherf/pom.xml -- diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml index 1667c66..0facbde 100644 --- a/phoenix-pherf/pom.xml +++ b/phoenix-pherf/pom.xml @@ -16,7 +16,8 @@ ~ limitations under the License. -- -project xmlns=http://maven.apache.org/POM/4.0.0; xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance; +project xmlns=http://maven.apache.org/POM/4.0.0; + xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance; xsi:schemaLocation=http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd; modelVersion4.0.0/modelVersion parent @@ -30,7 +31,7 @@ namePhoenix - Pherf/name properties - top.dir${project.basedir}/../top.dir +top.dir${project.basedir}/../top.dir /properties profiles @@ -233,6 +234,11 @@ !-- Test Dependencies -- dependency +groupIdcom.jcabi/groupId +
[26/47] phoenix git commit: PHOENIX-2072 - (James Taylor) Fix Eclipse compiler errors in pherf module
PHOENIX-2072 - (James Taylor) Fix Eclipse compiler errors in pherf module Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c1e57235 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c1e57235 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c1e57235 Branch: refs/heads/calcite Commit: c1e57235b81af4fe847394f8288c9b679ee2d54f Parents: 3e49339 Author: cmarcel cmar...@salesforce.com Authored: Fri Jun 26 09:07:00 2015 -0700 Committer: cmarcel cmar...@salesforce.com Committed: Fri Jun 26 09:07:00 2015 -0700 -- .../pherf/workload/MultithreadedDiffer.java | 7 ++ .../apache/phoenix/pherf/RuleGeneratorTest.java | 26 ++-- 2 files changed, 20 insertions(+), 13 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/c1e57235/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultithreadedDiffer.java -- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultithreadedDiffer.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultithreadedDiffer.java index 1735754..91189e2 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultithreadedDiffer.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultithreadedDiffer.java @@ -21,20 +21,18 @@ package org.apache.phoenix.pherf.workload; import java.util.Calendar; import java.util.Date; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import org.apache.phoenix.pherf.PherfConstants; import org.apache.phoenix.pherf.configuration.Query; import org.apache.phoenix.pherf.result.RunTime; import org.apache.phoenix.pherf.result.ThreadTime; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class MultithreadedDiffer implements Runnable { private static final Logger logger = LoggerFactory.getLogger(MultiThreadedRunner.class); private Thread t; private Query query; private ThreadTime threadTime; -private String threadName; private long numberOfExecutions; private long executionDurationInMs; private QueryVerifier queryVerifier = new QueryVerifier(true); @@ -72,7 +70,6 @@ class MultithreadedDiffer implements Runnable { MultithreadedDiffer(String threadName, Query query, ThreadTime threadTime, long numberOfExecutions, long executionDurationInMs) { this.query = query; -this.threadName = threadName; this.threadTime = threadTime; this.numberOfExecutions = numberOfExecutions; this.executionDurationInMs = executionDurationInMs; http://git-wip-us.apache.org/repos/asf/phoenix/blob/c1e57235/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/RuleGeneratorTest.java -- diff --git a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/RuleGeneratorTest.java b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/RuleGeneratorTest.java index 92604d4..936eedb 100644 --- a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/RuleGeneratorTest.java +++ b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/RuleGeneratorTest.java @@ -18,22 +18,32 @@ package org.apache.phoenix.pherf; -import org.apache.phoenix.pherf.configuration.*; -import org.apache.phoenix.pherf.workload.WriteWorkload; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; + +import org.apache.phoenix.pherf.configuration.Column; +import org.apache.phoenix.pherf.configuration.DataModel; +import org.apache.phoenix.pherf.configuration.DataSequence; +import org.apache.phoenix.pherf.configuration.DataTypeMapping; +import org.apache.phoenix.pherf.configuration.Scenario; +import org.apache.phoenix.pherf.configuration.XMLConfigParser; import org.apache.phoenix.pherf.rules.DataValue; import org.apache.phoenix.pherf.rules.RulesApplier; -import org.apache.phoenix.pherf.util.PhoenixUtil; +import org.apache.phoenix.pherf.workload.WriteWorkload; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.junit.Test; -import java.util.*; - -import static org.junit.Assert.*; - public class RuleGeneratorTest { -private static PhoenixUtil util = PhoenixUtil.create(true); private static final String matcherScenario = PherfConstants.SCENARIO_ROOT_PATTERN + .xml; @Test
[05/47] phoenix git commit: minor changes based on jesses feedback
minor changes based on jesses feedback Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d1f7dede Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d1f7dede Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d1f7dede Branch: refs/heads/calcite Commit: d1f7dedeccbb0befce071cb87efd38290271039a Parents: a4aa780 Author: Prashant Kommireddi pkommire...@pkommireddi-ltm.internal.salesforce.com Authored: Mon Jun 15 16:18:47 2015 -0700 Committer: Eli Levine elilev...@apache.org Committed: Mon Jun 15 18:17:45 2015 -0700 -- .../src/main/java/org/apache/phoenix/pig/util/TypeUtil.java | 7 +++ 1 file changed, 3 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/d1f7dede/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java -- diff --git a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java index 6e32fb5..5820ec6 100644 --- a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java +++ b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java @@ -76,9 +76,7 @@ public final class TypeUtil { private TypeUtil() {} /** - * A map of Phoenix to Pig data types. - * - * @return + * @return map of Phoenix to Pig data types. */ private static ImmutableMapPDataType, Byte init() { final ImmutableMap.BuilderPDataType, Byte builder = new BuilderPDataType, Byte(); @@ -160,7 +158,8 @@ public final class TypeUtil { /** * This method encodes a value with Phoenix data type. It begins with checking whether an object is BINARY and makes - * a call to {@link #castBytes(Object, PDataType)} to convery bytes to targetPhoenixType + * a call to {@link #castBytes(Object, PDataType)} to convert bytes to targetPhoenixType. It returns a {@link RuntimeException} + * when object can not be coerced. * * @param o * @param targetPhoenixType
[17/47] phoenix git commit: PHOENIX-1920 - Pherf - Add support for mixed r/w workloads
http://git-wip-us.apache.org/repos/asf/phoenix/blob/7175dcbc/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryVerifier.java -- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryVerifier.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryVerifier.java index 78f18ca..c9333a0 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryVerifier.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryVerifier.java @@ -43,153 +43,160 @@ import difflib.DiffUtils; import difflib.Patch; public class QueryVerifier { - private PhoenixUtil pUtil = new PhoenixUtil(); - private static final Logger logger = LoggerFactory - .getLogger(QueryVerifier.class); - private boolean useTemporaryOutput; - private String directoryLocation; - - public QueryVerifier(boolean useTemporaryOutput) { - this.useTemporaryOutput = useTemporaryOutput; - this.directoryLocation = this.useTemporaryOutput ? - PherfConstants.EXPORT_TMP : PherfConstants.EXPORT_DIR; - - ensureBaseDirExists(); - } - - /*** -* Export query resultSet to CSV file -* @param query -* @throws Exception -*/ - public String exportCSV(Query query) throws Exception { - Connection conn = null; - PreparedStatement statement = null; - ResultSet rs = null; - String fileName = getFileName(query); - FileOutputStream fos = new FileOutputStream(fileName); - try { - conn = pUtil.getConnection(query.getTenantId()); - statement = conn.prepareStatement(query.getStatement()); - boolean isQuery = statement.execute(); - if (isQuery) { - rs = statement.executeQuery(); - int columnCount = rs.getMetaData().getColumnCount(); - while (rs.next()) { - for (int columnNum = 1; columnNum = columnCount; columnNum++) { - fos.write((rs.getString(columnNum) + PherfConstants.RESULT_FILE_DELIMETER).getBytes()); - } - fos.write(PherfConstants.NEW_LINE.getBytes()); - } - } else { - conn.commit(); - } - } catch (Exception e) { - e.printStackTrace(); - } finally { - if (rs != null) rs.close(); - if (statement != null) statement.close(); - if (conn != null) conn.close(); - fos.flush(); - fos.close(); - } - return fileName; - } - - /*** -* Do a diff between exported query results and temporary CSV file -* @param query -* @param newCSV -* @return -*/ - public boolean doDiff(Query query, String newCSV) { +private PhoenixUtil pUtil = PhoenixUtil.create(); +private static final Logger logger = LoggerFactory.getLogger(QueryVerifier.class); +private boolean useTemporaryOutput; +private String directoryLocation; + +public QueryVerifier(boolean useTemporaryOutput) { +this.useTemporaryOutput = useTemporaryOutput; +this.directoryLocation = +this.useTemporaryOutput ? PherfConstants.EXPORT_TMP : PherfConstants.EXPORT_DIR; + +ensureBaseDirExists(); +} + +/** + * Export query resultSet to CSV file + * + * @param query + * @throws Exception + */ +public String exportCSV(Query query) throws Exception { +Connection conn = null; +PreparedStatement statement = null; +ResultSet rs = null; +String fileName = getFileName(query); +FileOutputStream fos = new FileOutputStream(fileName); +try { +conn = pUtil.getConnection(query.getTenantId()); +statement = conn.prepareStatement(query.getStatement()); +boolean isQuery = statement.execute(); +if (isQuery) { +rs = statement.executeQuery(); +int columnCount = rs.getMetaData().getColumnCount(); +while (rs.next()) { +for (int columnNum = 1; columnNum = columnCount; columnNum++) { +fos.write((rs.getString(columnNum) + PherfConstants.RESULT_FILE_DELIMETER) +.getBytes()); +} +
[31/47] phoenix git commit: PHOENIX-1819 Build a framework to capture and report phoenix client side request level metrics
http://git-wip-us.apache.org/repos/asf/phoenix/blob/0f6595c0/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java index 5270277..bb4054b 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java @@ -57,6 +57,7 @@ import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData; import org.apache.phoenix.jdbc.PhoenixDriver; import org.apache.phoenix.job.JobManager; +import org.apache.phoenix.monitoring.GlobalClientMetrics; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.query.QueryServicesOptions; @@ -255,12 +256,9 @@ public class CsvBulkLoadTool extends Configured implements Tool { } ListFutureBoolean runningJobs = new ArrayListFutureBoolean(); -boolean useInstrumentedPool = conn -.unwrap(PhoenixConnection.class) -.getQueryServices() -.getProps() -.getBoolean(QueryServices.METRICS_ENABLED, -QueryServicesOptions.DEFAULT_IS_METRICS_ENABLED); +boolean useInstrumentedPool = GlobalClientMetrics.isMetricsEnabled() +|| conn.unwrap(PhoenixConnection.class).isRequestLevelMetricsEnabled(); + ExecutorService executor = JobManager.createThreadPoolExec(Integer.MAX_VALUE, 5, 20, useInstrumentedPool); try{ http://git-wip-us.apache.org/repos/asf/phoenix/blob/0f6595c0/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordReader.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordReader.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordReader.java index eb6dc3d..b500a25 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordReader.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordReader.java @@ -17,6 +17,8 @@ */ package org.apache.phoenix.mapreduce; +import static org.apache.phoenix.monitoring.MetricType.SCAN_BYTES; + import java.io.IOException; import java.sql.SQLException; import java.util.List; @@ -32,6 +34,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.db.DBWritable; import org.apache.hadoop.util.ReflectionUtils; import org.apache.phoenix.compile.QueryPlan; +import org.apache.phoenix.compile.StatementContext; import org.apache.phoenix.iterate.ConcatResultIterator; import org.apache.phoenix.iterate.LookAheadResultIterator; import org.apache.phoenix.iterate.PeekingResultIterator; @@ -40,6 +43,7 @@ import org.apache.phoenix.iterate.RoundRobinResultIterator; import org.apache.phoenix.iterate.SequenceResultIterator; import org.apache.phoenix.iterate.TableResultIterator; import org.apache.phoenix.jdbc.PhoenixResultSet; +import org.apache.phoenix.monitoring.ReadMetricQueue; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; @@ -100,8 +104,12 @@ public class PhoenixRecordReaderT extends DBWritable extends RecordReaderNull final ListScan scans = pSplit.getScans(); try { ListPeekingResultIterator iterators = Lists.newArrayListWithExpectedSize(scans.size()); +StatementContext ctx = queryPlan.getContext(); +ReadMetricQueue readMetrics = ctx.getReadMetricsQueue(); +String tableName = queryPlan.getTableRef().getTable().getPhysicalName().getString(); for (Scan scan : scans) { -final TableResultIterator tableResultIterator = new TableResultIterator(queryPlan.getContext(), queryPlan.getTableRef(), scan); +final TableResultIterator tableResultIterator = new TableResultIterator(queryPlan.getContext(), +queryPlan.getTableRef(), scan, readMetrics.allotMetric(SCAN_BYTES, tableName)); PeekingResultIterator peekingResultIterator = LookAheadResultIterator.wrap(tableResultIterator); iterators.add(peekingResultIterator); } @@ -112,7 +120,7 @@ public class PhoenixRecordReaderT extends DBWritable extends RecordReaderNull this.resultIterator = iterator; // Clone the row projector as it's not thread safe and would be used simultaneously by // multiple threads otherwise. -this.resultSet = new PhoenixResultSet(this.resultIterator,
[42/47] phoenix git commit: PHOENIX-2087 Ensure predictable column position during alter table
PHOENIX-2087 Ensure predictable column position during alter table Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/72a7356b Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/72a7356b Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/72a7356b Branch: refs/heads/calcite Commit: 72a7356bcade01990a59cfd5d72161f18ae909f3 Parents: a8a9d01 Author: James Taylor jtay...@salesforce.com Authored: Tue Jun 30 08:44:37 2015 -0700 Committer: James Taylor jtay...@salesforce.com Committed: Tue Jun 30 17:31:24 2015 -0700 -- .../apache/phoenix/end2end/AlterTableIT.java| 51 +++- .../coprocessor/MetaDataEndpointImpl.java | 5 +- .../apache/phoenix/schema/MetaDataClient.java | 9 +++- 3 files changed, 46 insertions(+), 19 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/72a7356b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java index cd46927..56bba9b 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java @@ -448,7 +448,7 @@ public class AlterTableIT extends BaseOwnClusterHBaseManagedTimeIT { conn.commit(); assertIndexExists(conn,true); -conn.createStatement().execute(ALTER TABLE + DATA_TABLE_FULL_NAME + ADD v3 VARCHAR, k2 DECIMAL PRIMARY KEY); +conn.createStatement().execute(ALTER TABLE + DATA_TABLE_FULL_NAME + ADD v3 VARCHAR, k2 DECIMAL PRIMARY KEY, k3 DECIMAL PRIMARY KEY); rs = conn.getMetaData().getPrimaryKeys(, SCHEMA_NAME, DATA_TABLE_NAME); assertTrue(rs.next()); assertEquals(K,rs.getString(COLUMN_NAME)); @@ -456,6 +456,10 @@ public class AlterTableIT extends BaseOwnClusterHBaseManagedTimeIT { assertTrue(rs.next()); assertEquals(K2,rs.getString(COLUMN_NAME)); assertEquals(2, rs.getShort(KEY_SEQ)); +assertTrue(rs.next()); +assertEquals(K3,rs.getString(COLUMN_NAME)); +assertEquals(3, rs.getShort(KEY_SEQ)); +assertFalse(rs.next()); rs = conn.getMetaData().getPrimaryKeys(, SCHEMA_NAME, INDEX_TABLE_NAME); assertTrue(rs.next()); @@ -467,6 +471,10 @@ public class AlterTableIT extends BaseOwnClusterHBaseManagedTimeIT { assertTrue(rs.next()); assertEquals(IndexUtil.INDEX_COLUMN_NAME_SEP + K2,rs.getString(COLUMN_NAME)); assertEquals(3, rs.getShort(KEY_SEQ)); +assertTrue(rs.next()); +assertEquals(IndexUtil.INDEX_COLUMN_NAME_SEP + K3,rs.getString(COLUMN_NAME)); +assertEquals(4, rs.getShort(KEY_SEQ)); +assertFalse(rs.next()); query = SELECT * FROM + DATA_TABLE_FULL_NAME; rs = conn.createStatement().executeQuery(query); @@ -478,19 +486,21 @@ public class AlterTableIT extends BaseOwnClusterHBaseManagedTimeIT { assertFalse(rs.next()); // load some data into the table -stmt = conn.prepareStatement(UPSERT INTO + DATA_TABLE_FULL_NAME + (K,K2,V1,V2) VALUES(?,?,?,?)); +stmt = conn.prepareStatement(UPSERT INTO + DATA_TABLE_FULL_NAME + (K,K2,V1,V2,K3) VALUES(?,?,?,?,?)); stmt.setString(1, b); stmt.setBigDecimal(2, BigDecimal.valueOf(2)); stmt.setString(3, y); stmt.setString(4, 2); +stmt.setBigDecimal(5, BigDecimal.valueOf(3)); stmt.execute(); conn.commit(); -query = SELECT k,k2 FROM + DATA_TABLE_FULL_NAME + WHERE v1='y'; +query = SELECT k,k2,k3 FROM + DATA_TABLE_FULL_NAME + WHERE v1='y'; rs = conn.createStatement().executeQuery(query); assertTrue(rs.next()); assertEquals(b,rs.getString(1)); assertEquals(BigDecimal.valueOf(2),rs.getBigDecimal(2)); +assertEquals(BigDecimal.valueOf(3),rs.getBigDecimal(3)); assertFalse(rs.next()); } @@ -2345,6 +2355,21 @@ public class AlterTableIT extends BaseOwnClusterHBaseManagedTimeIT { return false; } +private int getIndexOfPkColumn(PhoenixConnection conn, String columnName, String tableName) throws SQLException { +String normalizedTableName = SchemaUtil.normalizeIdentifier(tableName); +PTable table = conn.getMetaDataCache().getTable(new PTableKey(conn.getTenantId(), normalizedTableName)); +ListPColumn pkCols = table.getPKColumns(); +String normalizedColumnName = SchemaUtil.normalizeIdentifier(columnName); +int i = 0; +for (PColumn pkCol : pkCols) { +if
[46/47] phoenix git commit: PHOENIX-2011 Default, min, and max values should not require quotes around it in create function(Rajeshbabu)
PHOENIX-2011 Default, min, and max values should not require quotes around it in create function(Rajeshbabu) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b2fb04b0 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b2fb04b0 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b2fb04b0 Branch: refs/heads/calcite Commit: b2fb04b0c2234c5b573642d39589ab9d36469723 Parents: 54da7d1 Author: Rajeshbabu Chintaguntla rajeshb...@apache.org Authored: Thu Jul 2 15:51:18 2015 +0530 Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org Committed: Thu Jul 2 15:51:18 2015 +0530 -- .../apache/phoenix/end2end/UserDefinedFunctionsIT.java | 12 ++-- phoenix-core/src/main/antlr3/PhoenixSQL.g | 3 ++- .../phoenix/coprocessor/MetaDataEndpointImpl.java | 9 + .../org/apache/phoenix/parse/FunctionParseNode.java | 9 +++-- .../main/java/org/apache/phoenix/parse/PFunction.java | 12 +--- .../java/org/apache/phoenix/schema/MetaDataClient.java | 6 +++--- 6 files changed, 28 insertions(+), 23 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/b2fb04b0/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java index cee1c85..613231d 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java @@ -348,7 +348,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ } -tenant2Conn.createStatement().execute(create function myfunction(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +tenant2Conn.createStatement().execute(create function myfunction(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); try { tenant2Conn.createStatement().execute(create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.UnknownClass' using jar @@ -424,7 +424,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.commit(); conn.createStatement().execute(create table t2(k integer primary key, k1 integer, lastname_reverse varchar)); conn.commit(); -stmt.execute(create function mysum3(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +stmt.execute(create function mysum3(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); stmt.execute(create function myreverse3(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.+MY_REVERSE_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar1.jar+'); @@ -458,7 +458,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.createStatement().execute(create table t4(k integer primary key, k1 integer, lastname varchar)); stmt.execute(upsert into t4 values(1,1,'jock')); conn.commit(); -stmt.execute(create function mysum(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar +stmt.execute(create function mysum(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar + '+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + /myjar2.jar+'); ResultSet rs = stmt.executeQuery(select mysum(k,12) from t4); assertTrue(rs.next()); @@ -481,7 +481,7 @@ public class UserDefinedFunctionsIT extends BaseOwnClusterIT{ conn.createStatement().execute(create table t9(k integer primary key, k1 integer, lastname varchar)); stmt.execute(upsert into t9 values(1,1,'jock')); conn.commit(); -stmt.execute(create temporary function mysum9(INTEGER, INTEGER CONSTANT defaultValue='10' minvalue='1' maxvalue='15' ) returns INTEGER as
[35/47] phoenix git commit: PHOENIX-1659 PhoenixDatabaseMetaData.getColumns does not return REMARKS column
PHOENIX-1659 PhoenixDatabaseMetaData.getColumns does not return REMARKS column Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d02b3610 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d02b3610 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d02b3610 Branch: refs/heads/calcite Commit: d02b3610991616ab3920d40d719c9000601d8722 Parents: 38ae6b7 Author: Josh Mahonin jmaho...@interset.com Authored: Thu Jun 25 15:52:37 2015 -0400 Committer: Josh Mahonin jmaho...@interset.com Committed: Mon Jun 29 12:27:09 2015 -0400 -- .../end2end/QueryDatabaseMetaDataIT.java| 31 .../phoenix/jdbc/PhoenixDatabaseMetaData.java | 1 + 2 files changed, 32 insertions(+) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/d02b3610/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java index 61459a5..2fdccf6 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java @@ -1118,4 +1118,35 @@ public class QueryDatabaseMetaDataIT extends BaseClientManagedTimeIT { assertFalse(rs.next()); } +@Test +public void testRemarkColumn() throws SQLException { +long ts = nextTimestamp(); +Properties props = new Properties(); +props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5)); +Connection conn = DriverManager.getConnection(getUrl(), props); + +// Retrieve the database metadata +DatabaseMetaData dbmd = conn.getMetaData(); +ResultSet rs = dbmd.getColumns(null, null, null, null); +rs.next(); + +// Lookup column by name, this should return null but not throw an exception +String remarks = rs.getString(REMARKS); +assertNull(remarks); + +// Same as above, but lookup by position +remarks = rs.getString(12); +assertNull(remarks); + +// Iterate through metadata columns to find 'COLUMN_NAME' == 'REMARKS' +boolean foundRemarksColumn = false; +while(rs.next()) { +String colName = rs.getString(COLUMN_NAME); +if(PhoenixDatabaseMetaData.REMARKS.equals(colName)) { +foundRemarksColumn = true; +break; +} +} +assertTrue(Could not find REMARKS column, foundRemarksColumn); +} } http://git-wip-us.apache.org/repos/asf/phoenix/blob/d02b3610/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java index 2dd8af4..314af2e 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java @@ -439,6 +439,7 @@ public class PhoenixDatabaseMetaData implements DatabaseMetaData, org.apache.pho DECIMAL_DIGITS + , + NUM_PREC_RADIX + , + NULLABLE + , + +REMARKS + , + COLUMN_DEF + , + SQL_DATA_TYPE + , + SQL_DATETIME_SUB + , +
[18/47] phoenix git commit: PHOENIX-1920 - Pherf - Add support for mixed r/w workloads
http://git-wip-us.apache.org/repos/asf/phoenix/blob/7175dcbc/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java -- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java index 523feb4..39d6a9c 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java @@ -33,17 +33,13 @@ public class ResultManager { private final ResultUtil util; private final PherfConstants.RunMode runMode; - public ResultManager(String fileNameSeed, PherfConstants.RunMode runMode) { -this(runMode, Arrays.asList( -new XMLResultHandler(fileNameSeed, ResultFileDetails.XML), +this(runMode, Arrays.asList(new XMLResultHandler(fileNameSeed, ResultFileDetails.XML), new ImageResultHandler(fileNameSeed, ResultFileDetails.IMAGE), - new CSVResultHandler( - fileNameSeed, - runMode == RunMode.PERFORMANCE ? ResultFileDetails.CSV_DETAILED_PERFORMANCE - : ResultFileDetails.CSV_DETAILED_FUNCTIONAL), -new CSVResultHandler(fileNameSeed, ResultFileDetails.CSV_AGGREGATE_PERFORMANCE) -)); +new CSVResultHandler(fileNameSeed, runMode == RunMode.PERFORMANCE ? +ResultFileDetails.CSV_DETAILED_PERFORMANCE : +ResultFileDetails.CSV_DETAILED_FUNCTIONAL), +new CSVResultHandler(fileNameSeed, ResultFileDetails.CSV_AGGREGATE_PERFORMANCE))); } public ResultManager(PherfConstants.RunMode runMode, ListResultHandler resultHandlers) { @@ -81,6 +77,7 @@ public class ResultManager { /** * Write a combined set of results for each result in the list. + * * @param dataModelResults List{@link DataModelResult /} * @throws Exception */ @@ -89,7 +86,9 @@ public class ResultManager { CSVResultHandler detailsCSVWriter = null; try { -detailsCSVWriter = new CSVResultHandler(PherfConstants.COMBINED_FILE_NAME, ResultFileDetails.CSV_DETAILED_PERFORMANCE); +detailsCSVWriter = +new CSVResultHandler(PherfConstants.COMBINED_FILE_NAME, +ResultFileDetails.CSV_DETAILED_PERFORMANCE); for (DataModelResult dataModelResult : dataModelResults) { util.write(detailsCSVWriter, dataModelResult, runMode); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/7175dcbc/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java -- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java index fd960d1..07dfa86 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java @@ -22,15 +22,16 @@ import org.apache.phoenix.pherf.PherfConstants; import org.apache.phoenix.pherf.PherfConstants.RunMode; import org.apache.phoenix.pherf.result.file.ResultFileDetails; import org.apache.phoenix.pherf.result.impl.CSVResultHandler; -import org.apache.phoenix.pherf.result.impl.ImageResultHandler; -import org.apache.phoenix.pherf.result.impl.XMLResultHandler; import org.apache.phoenix.pherf.util.PhoenixUtil; -import java.io.*; +import java.io.File; +import java.io.IOException; import java.text.Format; import java.text.SimpleDateFormat; -import java.util.*; +import java.util.ArrayList; +import java.util.Date; import java.util.List; +import java.util.Map; public class ResultUtil { @@ -54,7 +55,10 @@ public class ResultUtil { ListResultValue rowValues = new ArrayList(); rowValues.add(new ResultValue(PhoenixUtil.getZookeeper())); rowValues.addAll(writeThreadTime.getCsvRepresentation(this)); -Result result = new Result(ResultFileDetails.CSV_DETAILED_PERFORMANCE, ZK, + dataLoadThreadTime.getCsvTitle(), rowValues); +Result +result = +new Result(ResultFileDetails.CSV_DETAILED_PERFORMANCE, +ZK, + dataLoadThreadTime.getCsvTitle(), rowValues); writer.write(result); } } @@ -83,7 +87,10 @@ public class ResultUtil { ListResultValue
[37/47] phoenix git commit: PHOENIX-1659 PhoenixDatabaseMetaData.getColumns does not return REMARKS column
PHOENIX-1659 PhoenixDatabaseMetaData.getColumns does not return REMARKS column Followup commit to adjust the COLUMN_FAMILY_POSITION in QueryUtil as well. Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/83b8db4d Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/83b8db4d Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/83b8db4d Branch: refs/heads/calcite Commit: 83b8db4def81d9a7fc959de116edbaa1a265bf18 Parents: d604494 Author: Josh Mahonin jmaho...@interset.com Authored: Mon Jun 29 15:27:01 2015 -0400 Committer: Josh Mahonin jmaho...@interset.com Committed: Mon Jun 29 16:56:54 2015 -0400 -- phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/83b8db4d/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java index a2d4a91..bc2141c 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java @@ -59,7 +59,7 @@ public final class QueryUtil { /** * Column family name index within ResultSet resulting from {@link DatabaseMetaData#getColumns(String, String, String, String)} */ -public static final int COLUMN_FAMILY_POSITION = 24; +public static final int COLUMN_FAMILY_POSITION = 25; /** * Column name index within ResultSet resulting from {@link DatabaseMetaData#getColumns(String, String, String, String)}
[34/47] phoenix git commit: LP-1277 Support nulls in CHAR fields in CSV loader
LP-1277 Support nulls in CHAR fields in CSV loader Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/38ae6b75 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/38ae6b75 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/38ae6b75 Branch: refs/heads/calcite Commit: 38ae6b754a77fd967d601e89711349e8c2e22577 Parents: 0f6595c Author: Gabriel Reid gabri...@ngdata.com Authored: Thu Jun 25 21:36:51 2015 +0200 Committer: Gabriel Reid gabri...@ngdata.com Committed: Mon Jun 29 08:38:52 2015 +0200 -- .../phoenix/end2end/CSVCommonsLoaderIT.java | 18 +++--- .../org/apache/phoenix/schema/types/PChar.java| 3 --- .../phoenix/util/csv/CsvUpsertExecutor.java | 5 - 3 files changed, 15 insertions(+), 11 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/38ae6b75/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java index d07ed8d..c7287ea 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java @@ -46,9 +46,10 @@ import org.junit.Test; public class CSVCommonsLoaderIT extends BaseHBaseManagedTimeIT { private static final String DATATYPE_TABLE = DATATYPE; -private static final String DATATYPES_CSV_VALUES = CKEY, CVARCHAR, CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE\n -+ KEY1,A,2147483647,1.1,0,TRUE,9223372036854775807,0,1990-12-31 10:59:59,1999-12-31 23:59:59\n -+ KEY2,B,-2147483648,-1.1,2147483647,FALSE,-9223372036854775808,9223372036854775807,2000-01-01 00:00:01,2012-02-29 23:59:59\n; +private static final String DATATYPES_CSV_VALUES = CKEY, CVARCHAR, CCHAR, CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE\n ++ KEY1,A,A,2147483647,1.1,0,TRUE,9223372036854775807,0,1990-12-31 10:59:59,1999-12-31 23:59:59\n ++ KEY2,B,B,-2147483648,-1.1,2147483647,FALSE,-9223372036854775808,9223372036854775807,2000-01-01 00:00:01,2012-02-29 23:59:59\n ++ KEY3,,\n; private static final String STOCK_TABLE = STOCK_SYMBOL; private static final String STOCK_TABLE_MULTI = STOCK_SYMBOL_MULTI; private static final String STOCK_CSV_VALUES = AAPL,APPLE Inc.\n @@ -480,7 +481,7 @@ public class CSVCommonsLoaderIT extends BaseHBaseManagedTimeIT { String statements = CREATE TABLE IF NOT EXISTS + DATATYPE_TABLE + (CKEY VARCHAR NOT NULL PRIMARY KEY, -+ CVARCHAR VARCHAR, CINTEGER INTEGER, CDECIMAL DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT BIGINT, CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);; ++ CVARCHAR VARCHAR, CCHAR CHAR(10), CINTEGER INTEGER, CDECIMAL DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT BIGINT, CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);; conn = DriverManager.getConnection(getUrl()) .unwrap(PhoenixConnection.class); PhoenixRuntime.executeStatements(conn, @@ -493,7 +494,7 @@ public class CSVCommonsLoaderIT extends BaseHBaseManagedTimeIT { // Compare Phoenix ResultSet with CSV file content PreparedStatement statement = conn -.prepareStatement(SELECT CKEY, CVARCHAR, CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM +.prepareStatement(SELECT CKEY, CVARCHAR, CCHAR, CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM + DATATYPE_TABLE); ResultSet phoenixResultSet = statement.executeQuery(); parser = new CSVParser(new StringReader(DATATYPES_CSV_VALUES), @@ -511,9 +512,12 @@ public class CSVCommonsLoaderIT extends BaseHBaseManagedTimeIT { i++; } // special case for matching date, time values -assertEquals(DateUtil.parseTime(record.get(8)), +String timeFieldValue = record.get(9); +assertEquals(timeFieldValue.isEmpty() ? null : DateUtil.parseTime(record.get(9)), phoenixResultSet.getTime(CTIME)); -assertEquals(DateUtil.parseDate(record.get(9)), + +String dateField = record.get(10); +assertEquals(dateField.isEmpty() ? null :
[22/47] phoenix git commit: PHOENIX-2066 Existing client fails initialization due to upgrade atttempting to create column with no name (Lukas Lalinsky)
PHOENIX-2066 Existing client fails initialization due to upgrade atttempting to create column with no name (Lukas Lalinsky) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b58a62a5 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b58a62a5 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b58a62a5 Branch: refs/heads/calcite Commit: b58a62a5e43dcbb37695a0ebf7a20ced13e99503 Parents: 7385899 Author: James Taylor jtay...@salesforce.com Authored: Wed Jun 24 08:11:12 2015 -0700 Committer: James Taylor jtay...@salesforce.com Committed: Wed Jun 24 08:11:12 2015 -0700 -- .../phoenix/query/ConnectionQueryServicesImpl.java| 14 +- 1 file changed, 9 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/b58a62a5/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java index c5dde10..ddebf9f 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java @@ -1941,11 +1941,15 @@ public class ConnectionQueryServicesImpl extends DelegateQueryServices implement columnsToAdd += , + PhoenixDatabaseMetaData.INDEX_TYPE + + PUnsignedTinyint.INSTANCE.getSqlTypeName() + , + PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP + + PLong.INSTANCE.getSqlTypeName(); } -// Ugh..need to assign to another local variable to keep eclipse happy. -PhoenixConnection newMetaConnection = addColumnsIfNotExists(metaConnection, -PhoenixDatabaseMetaData.SYSTEM_CATALOG, - MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP, columnsToAdd); -metaConnection = newMetaConnection; + +// If we have some new columns from 4.1-4.3 to add, add them now. +if (!columnsToAdd.isEmpty()) { +// Ugh..need to assign to another local variable to keep eclipse happy. +PhoenixConnection newMetaConnection = addColumnsIfNotExists(metaConnection, + PhoenixDatabaseMetaData.SYSTEM_CATALOG, + MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP, columnsToAdd); +metaConnection = newMetaConnection; +} if (currentServerSideTableTimeStamp MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_5_0) { columnsToAdd = PhoenixDatabaseMetaData.BASE_COLUMN_COUNT +
[11/47] phoenix git commit: PHOENIX-2014 WHERE search condition ignored when also using row value constructor in view
PHOENIX-2014 WHERE search condition ignored when also using row value constructor in view Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/14d11b13 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/14d11b13 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/14d11b13 Branch: refs/heads/calcite Commit: 14d11b130ca0b3726e7724a1f4a9770bc1cb2453 Parents: 8060048 Author: James Taylor jamestay...@apache.org Authored: Wed Jun 17 16:58:51 2015 -0700 Committer: James Taylor jamestay...@apache.org Committed: Wed Jun 17 16:58:51 2015 -0700 -- .../phoenix/end2end/RowValueConstructorIT.java | 28 .../apache/phoenix/compile/WhereOptimizer.java | 25 ++--- .../phoenix/compile/WhereOptimizerTest.java | 20 ++ 3 files changed, 64 insertions(+), 9 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/14d11b13/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java index 3859785..e227eb0 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java @@ -1395,4 +1395,32 @@ public class RowValueConstructorIT extends BaseClientManagedTimeIT { assertEquals(1, numRecords); } +@Test +public void testRVCInView() throws Exception { +Connection conn = nextConnection(getUrl()); +conn.createStatement().execute(CREATE TABLE TEST_TABLE.TEST1 (\n + +PK1 CHAR(3) NOT NULL, \n + +PK2 CHAR(3) NOT NULL,\n + +DATA1 CHAR(10)\n + +CONSTRAINT PK PRIMARY KEY (PK1, PK2))); +conn.close(); +conn = nextConnection(getUrl()); +conn.createStatement().execute(CREATE VIEW TEST_TABLE.FOO AS SELECT * FROM TEST_TABLE.TEST1 WHERE PK1 = 'FOO'); +conn.close(); +conn = nextConnection(getUrl()); +conn.createStatement().execute(UPSERT INTO TEST_TABLE.TEST1 VALUES('FOO','001','SOMEDATA')); +conn.createStatement().execute(UPSERT INTO TEST_TABLE.TEST1 VALUES('FOO','002','SOMEDATA')); +conn.createStatement().execute(UPSERT INTO TEST_TABLE.TEST1 VALUES('FOO','003','SOMEDATA')); +conn.createStatement().execute(UPSERT INTO TEST_TABLE.TEST1 VALUES('FOO','004','SOMEDATA')); +conn.createStatement().execute(UPSERT INTO TEST_TABLE.TEST1 VALUES('FOO','005','SOMEDATA')); +conn.commit(); +conn.close(); + +conn = nextConnection(getUrl()); +ResultSet rs = conn.createStatement().executeQuery(SELECT * FROM TEST_TABLE.FOO WHERE PK2 '004' AND (PK1,PK2) ('FOO','002') LIMIT 2); +assertTrue(rs.next()); +assertEquals(003, rs.getString(PK2)); +assertFalse(rs.next()); +conn.close(); +} } http://git-wip-us.apache.org/repos/asf/phoenix/blob/14d11b13/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java index a5aef02..b7f04e0 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java @@ -647,23 +647,30 @@ public class WhereOptimizer { if (childSlot == EMPTY_KEY_SLOTS) { return EMPTY_KEY_SLOTS; } -// FIXME: get rid of this min/max range BS now that a key range can span multiple columns +// FIXME: get rid of this special-cased min/max range now that a key range can span multiple columns if (childSlot.getMinMaxRange() != null) { // Only set if in initial pk position -// TODO: potentially use KeySlot.intersect here. However, we can't intersect the key ranges in the slot -// with our minMaxRange, since it spans columns and this would mess up our skip scan. +// TODO: fix intersectSlots so that it works with RVCs. We'd just need to fill in the leading parts +// of the key with the minMaxRange and then intersect the key parts that overlap. minMaxRange = minMaxRange.intersect(childSlot.getMinMaxRange()); for (KeySlot slot : childSlot) { if
[32/47] phoenix git commit: PHOENIX-1819 Build a framework to capture and report phoenix client side request level metrics
http://git-wip-us.apache.org/repos/asf/phoenix/blob/0f6595c0/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java index 857a952..57fa25a 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java @@ -17,6 +17,7 @@ */ package org.apache.phoenix.execute; +import static org.apache.phoenix.monitoring.TaskExecutionMetricsHolder.NO_OP_INSTANCE; import static org.apache.phoenix.util.LogUtil.addCustomAnnotations; import java.sql.SQLException; @@ -54,6 +55,7 @@ import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.job.JobManager.JobCallable; import org.apache.phoenix.join.HashCacheClient; import org.apache.phoenix.join.HashJoinInfo; +import org.apache.phoenix.monitoring.TaskExecutionMetricsHolder; import org.apache.phoenix.parse.FilterableStatement; import org.apache.phoenix.parse.ParseNode; import org.apache.phoenix.parse.SQLParser; @@ -140,6 +142,11 @@ public class HashJoinPlan extends DelegateQueryPlan { public Object getJobId() { return HashJoinPlan.this; } + +@Override +public TaskExecutionMetricsHolder getTaskExecutionMetric() { +return NO_OP_INSTANCE; +} })); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/0f6595c0/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java index 99f41b2..af3bcf3 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java @@ -17,6 +17,10 @@ */ package org.apache.phoenix.execute; +import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_BATCH_SIZE; +import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_BYTES; +import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_COMMIT_TIME; + import java.io.IOException; import java.sql.SQLException; import java.util.Arrays; @@ -39,7 +43,11 @@ import org.apache.phoenix.index.IndexMaintainer; import org.apache.phoenix.index.IndexMetaDataCacheClient; import org.apache.phoenix.index.PhoenixIndexCodec; import org.apache.phoenix.jdbc.PhoenixConnection; -import org.apache.phoenix.monitoring.PhoenixMetrics; +import org.apache.phoenix.monitoring.GlobalClientMetrics; +import org.apache.phoenix.monitoring.MutationMetricQueue; +import org.apache.phoenix.monitoring.MutationMetricQueue.MutationMetric; +import org.apache.phoenix.monitoring.MutationMetricQueue.NoOpMutationMetricsQueue; +import org.apache.phoenix.monitoring.ReadMetricQueue; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.schema.IllegalDataException; import org.apache.phoenix.schema.MetaDataClient; @@ -65,9 +73,6 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.sun.istack.NotNull; -import static org.apache.phoenix.monitoring.PhoenixMetrics.SizeMetric.MUTATION_BYTES; -import static org.apache.phoenix.monitoring.PhoenixMetrics.SizeMetric.MUTATION_BATCH_SIZE; -import static org.apache.phoenix.monitoring.PhoenixMetrics.SizeMetric.MUTATION_COMMIT_TIME; /** * @@ -85,11 +90,17 @@ public class MutationState implements SQLCloseable { private final MapTableRef, MapImmutableBytesPtr,RowMutationState mutations; private long sizeOffset; private int numRows = 0; +private final MutationMetricQueue mutationMetricQueue; +private ReadMetricQueue readMetricQueue; -MutationState(long maxSize, PhoenixConnection connection, MapTableRef, MapImmutableBytesPtr,RowMutationState mutations) { +MutationState(long maxSize, PhoenixConnection connection, +MapTableRef, MapImmutableBytesPtr, RowMutationState mutations) { this.maxSize = maxSize; this.connection = connection; this.mutations = mutations; +boolean isMetricsEnabled = connection.isRequestLevelMetricsEnabled(); +this.mutationMetricQueue = isMetricsEnabled ? new MutationMetricQueue() +: NoOpMutationMetricsQueue.NO_OP_MUTATION_METRICS_QUEUE; } public MutationState(long maxSize, PhoenixConnection connection) { @@ -108,6 +119,12 @@ public class MutationState implements SQLCloseable { throwIfTooBig(); } +public static
[21/47] phoenix git commit: PHOENIX-2021 - Implement ARRAY_CAT built in function (Dumindu Buddhika)
PHOENIX-2021 - Implement ARRAY_CAT built in function (Dumindu Buddhika) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7385899d Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7385899d Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7385899d Branch: refs/heads/calcite Commit: 7385899d966e38cfc798fd509445db24653ad7de Parents: 7175dcb Author: ramkrishna ramkrishna.s.vasude...@gmail.com Authored: Sun Jun 21 22:05:13 2015 +0530 Committer: ramkrishna ramkrishna.s.vasude...@gmail.com Committed: Sun Jun 21 22:14:16 2015 +0530 -- .../phoenix/end2end/ArrayAppendFunctionIT.java | 17 - .../phoenix/end2end/ArrayConcatFunctionIT.java | 578 ++ .../phoenix/expression/ExpressionType.java | 4 +- .../function/ArrayAppendFunction.java | 53 +- .../function/ArrayConcatFunction.java | 83 +++ .../function/ArrayModifierFunction.java | 155 - .../function/ArrayPrependFunction.java | 54 +- .../phoenix/schema/types/PArrayDataType.java| 163 +- .../expression/ArrayConcatFunctionTest.java | 584 +++ 9 files changed, 1543 insertions(+), 148 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/7385899d/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayAppendFunctionIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayAppendFunctionIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayAppendFunctionIT.java index 1957b3a..cf45724 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayAppendFunctionIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayAppendFunctionIT.java @@ -497,23 +497,6 @@ public class ArrayAppendFunctionIT extends BaseHBaseManagedTimeIT { } @Test -public void testArrayAppendFunctionIntegerWithNull() throws Exception { -Connection conn = DriverManager.getConnection(getUrl()); -initTables(conn); - -ResultSet rs; -rs = conn.createStatement().executeQuery(SELECT ARRAY_APPEND(NULL,NULL) FROM regions WHERE region_name = 'SF Bay Area'); -assertTrue(rs.next()); - -Integer[] integers = new Integer[]{2345, 46345, 23234, 456}; - -Array array = conn.createArrayOf(INTEGER, integers); - -assertEquals(null, rs.getArray(1)); -assertFalse(rs.next()); -} - -@Test public void testArrayAppendFunctionVarcharWithNull() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); initTables(conn); http://git-wip-us.apache.org/repos/asf/phoenix/blob/7385899d/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayConcatFunctionIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayConcatFunctionIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayConcatFunctionIT.java new file mode 100644 index 000..247bfb7 --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayConcatFunctionIT.java @@ -0,0 +1,578 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * License); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.end2end; + +import org.apache.phoenix.schema.TypeMismatchException; +import org.junit.Test; + +import java.sql.*; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class ArrayConcatFunctionIT extends BaseHBaseManagedTimeIT { + +private void initTables(Connection conn) throws Exception { +String ddl = CREATE TABLE regions (region_name VARCHAR PRIMARY KEY,varchars VARCHAR[],integers INTEGER[],doubles DOUBLE[],bigints BIGINT[],chars CHAR(15)[],double1 DOUBLE,char1 CHAR(17),nullcheck INTEGER,chars2 CHAR(15)[]); +conn.createStatement().execute(ddl); +String dml = UPSERT INTO
[43/47] phoenix git commit: PHOENIX-2085 Include joda-time in phoenix server jar
PHOENIX-2085 Include joda-time in phoenix server jar Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d2392bea Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d2392bea Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d2392bea Branch: refs/heads/calcite Commit: d2392beae3318099685856ffd18825028f21a7d1 Parents: 72a7356 Author: Thomas D'Silva tdsi...@salesforce.com Authored: Mon Jun 29 13:21:30 2015 -0700 Committer: Thomas D'Silva tdsi...@salesforce.com Committed: Tue Jun 30 22:10:33 2015 -0700 -- phoenix-assembly/src/build/server.xml | 1 + 1 file changed, 1 insertion(+) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/d2392bea/phoenix-assembly/src/build/server.xml -- diff --git a/phoenix-assembly/src/build/server.xml b/phoenix-assembly/src/build/server.xml index 12d3d81..78a4b1f 100644 --- a/phoenix-assembly/src/build/server.xml +++ b/phoenix-assembly/src/build/server.xml @@ -38,6 +38,7 @@ includeorg.iq80.snappy:snappy/include includeorg.jruby.joni:joni/include includeorg.jruby.jcodings:jcodings/include + includejoda-time:joda-time/include /includes /dependencySet dependencySet
[06/47] phoenix git commit: PHOENIX-1981 : PhoenixHBase Load and Store Funcs should handle all Pig data types
PHOENIX-1981 : PhoenixHBase Load and Store Funcs should handle all Pig data types Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a4aa780c Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a4aa780c Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a4aa780c Branch: refs/heads/calcite Commit: a4aa780c78f73cf0ee5f7d5e7afefd7ab581097a Parents: 8a0dee7 Author: Prashant Kommireddi pkommire...@pkommireddi-ltm.internal.salesforce.com Authored: Mon May 18 19:48:30 2015 -0700 Committer: Eli Levine elilev...@apache.org Committed: Mon Jun 15 18:17:45 2015 -0700 -- .../src/main/java/org/apache/phoenix/pig/util/TypeUtil.java | 8 +++- .../test/java/org/apache/phoenix/pig/util/TypeUtilTest.java | 8 +++- 2 files changed, 6 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/a4aa780c/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java -- diff --git a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java index c8bc9d8..6e32fb5 100644 --- a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java +++ b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java @@ -1,17 +1,15 @@ /* - * Copyright 2010 The Apache Software Foundation - * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file - *distributed with this work for additional information + * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the - * License); you maynot use this file except in compliance + * License); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicablelaw or agreed to in writing, software + * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an AS IS BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and http://git-wip-us.apache.org/repos/asf/phoenix/blob/a4aa780c/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java -- diff --git a/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java b/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java index 56167f6..0b44d2b 100644 --- a/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java +++ b/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java @@ -1,17 +1,15 @@ /* - * Copyright 2010 The Apache Software Foundation - * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file - *distributed with this work for additional information + * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the - * License); you maynot use this file except in compliance + * License); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicablelaw or agreed to in writing, software + * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an AS IS BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and
[04/47] phoenix git commit: PHOENIX-1981 : PhoenixHBase Load and Store Funcs should handle all Pig data types
PHOENIX-1981 : PhoenixHBase Load and Store Funcs should handle all Pig data types Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8076126a Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8076126a Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8076126a Branch: refs/heads/calcite Commit: 8076126a741a0cf2a5839b88904fa08bfdfb6cdb Parents: b61ef77 Author: Prashant Kommireddi pkommire...@pkommireddi-ltm.internal.salesforce.com Authored: Mon May 18 19:41:08 2015 -0700 Committer: Eli Levine elilev...@apache.org Committed: Mon Jun 15 18:17:44 2015 -0700 -- .../org/apache/phoenix/pig/util/TypeUtil.java | 415 +-- .../apache/phoenix/pig/util/TypeUtilTest.java | 52 +++ 2 files changed, 251 insertions(+), 216 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/8076126a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java -- diff --git a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java index bdee3a4..6549445 100644 --- a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java +++ b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java @@ -1,19 +1,11 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE + * file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by + * applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. */ package org.apache.phoenix.pig.util; @@ -29,11 +21,11 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.phoenix.pig.writable.PhoenixPigDBWritable; import org.apache.phoenix.schema.types.PBinary; -import org.apache.phoenix.schema.types.PChar; -import org.apache.phoenix.schema.types.PDecimal; import org.apache.phoenix.schema.types.PBoolean; +import org.apache.phoenix.schema.types.PChar; import org.apache.phoenix.schema.types.PDataType; import org.apache.phoenix.schema.types.PDate; +import org.apache.phoenix.schema.types.PDecimal; import org.apache.phoenix.schema.types.PDouble; import org.apache.phoenix.schema.types.PFloat; import org.apache.phoenix.schema.types.PInteger; @@ -56,7 +48,6 @@ import org.apache.phoenix.schema.types.PVarchar; import org.apache.pig.PigException; import org.apache.pig.ResourceSchema.ResourceFieldSchema; import org.apache.pig.backend.hadoop.hbase.HBaseBinaryConverter; -import org.apache.pig.builtin.Utf8StorageConverter; import org.apache.pig.data.DataByteArray; import org.apache.pig.data.DataType; import org.apache.pig.data.Tuple; @@ -68,258 +59,250 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap.Builder; public final class TypeUtil { - + private static final Log LOG = LogFactory.getLog(TypeUtil.class); -private static final HBaseBinaryConverter binaryConverter = new HBaseBinaryConverter (); - private static final ImmutableMapPDataType,Byte phoenixTypeToPigDataType = init(); - - private TypeUtil(){ - } - - /** -* A map of Phoenix to Pig data types. -* @return -*/ - private static ImmutableMapPDataType, Byte init() { -final ImmutableMap.BuilderPDataType,Byte builder = new BuilderPDataType,Byte (); -
[13/47] phoenix git commit: PHOENIX-2049 Change ArraysWithNullsIT to be derived from BaseHBaseManagedTimeIT
PHOENIX-2049 Change ArraysWithNullsIT to be derived from BaseHBaseManagedTimeIT Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2d70eff6 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2d70eff6 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2d70eff6 Branch: refs/heads/calcite Commit: 2d70eff6594d0f46b10f2d9c4c8fa5d43d6ba5ab Parents: fb44f35 Author: James Taylor jamestay...@apache.org Authored: Wed Jun 17 17:09:33 2015 -0700 Committer: James Taylor jamestay...@apache.org Committed: Wed Jun 17 17:13:31 2015 -0700 -- .../src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/2d70eff6/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java index b034193..e95a386 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java @@ -26,7 +26,7 @@ import org.apache.phoenix.schema.types.PTimestamp; import org.apache.phoenix.schema.types.PhoenixArray; import org.junit.Test; -public class ArraysWithNullsIT extends BaseClientManagedTimeIT { +public class ArraysWithNullsIT extends BaseHBaseManagedTimeIT { @Test public void testArrayUpsertIntWithNulls() throws Exception {
[03/47] phoenix git commit: PHOENIX-2040 Mark spark/scala dependencies as 'provided' (Josh Mahonin)
PHOENIX-2040 Mark spark/scala dependencies as 'provided' (Josh Mahonin) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b61ef77e Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b61ef77e Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b61ef77e Branch: refs/heads/calcite Commit: b61ef77e509a024ccaf6c3ce74c385c31c5f534a Parents: c2927dd Author: Nick Dimiduk ndimi...@apache.org Authored: Mon Jun 15 16:16:03 2015 -0700 Committer: Nick Dimiduk ndimi...@apache.org Committed: Mon Jun 15 16:16:03 2015 -0700 -- phoenix-assembly/pom.xml | 4 phoenix-spark/pom.xml| 51 --- 2 files changed, 32 insertions(+), 23 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/b61ef77e/phoenix-assembly/pom.xml -- diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml index baf6738..51ff74d 100644 --- a/phoenix-assembly/pom.xml +++ b/phoenix-assembly/pom.xml @@ -152,6 +152,10 @@ /dependency dependency groupIdorg.apache.phoenix/groupId + artifactIdphoenix-spark/artifactId +/dependency +dependency + groupIdorg.apache.phoenix/groupId artifactIdphoenix-server/artifactId /dependency dependency http://git-wip-us.apache.org/repos/asf/phoenix/blob/b61ef77e/phoenix-spark/pom.xml -- diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml index 7086bb6..289801a 100644 --- a/phoenix-spark/pom.xml +++ b/phoenix-spark/pom.xml @@ -45,12 +45,7 @@ groupIdorg.apache.phoenix/groupId artifactIdphoenix-core/artifactId /dependency -dependency - groupIdorg.apache.phoenix/groupId - artifactIdphoenix-core/artifactId - classifiertests/classifier - scopetest/scope -/dependency + !-- Force import of Spark's servlet API for unit tests -- dependency groupIdjavax.servlet/groupId @@ -59,16 +54,38 @@ scopetest/scope /dependency +!-- Mark Spark / Scala as provided -- dependency - groupIdjunit/groupId - artifactIdjunit/artifactId + groupIdorg.scala-lang/groupId + artifactIdscala-library/artifactId + version${scala.version}/version + scopeprovided/scope +/dependency +dependency + groupIdorg.apache.spark/groupId + artifactIdspark-core_${scala.binary.version}/artifactId + version${spark.version}/version + scopeprovided/scope +/dependency +dependency + groupIdorg.apache.spark/groupId + artifactIdspark-sql_${scala.binary.version}/artifactId + version${spark.version}/version + scopeprovided/scope +/dependency + +!-- Test dependencies -- +dependency + groupIdorg.apache.phoenix/groupId + artifactIdphoenix-core/artifactId + classifiertests/classifier scopetest/scope /dependency dependency - groupIdorg.scala-lang/groupId - artifactIdscala-library/artifactId - version${scala.version}/version + groupIdjunit/groupId + artifactIdjunit/artifactId + scopetest/scope /dependency dependency @@ -86,18 +103,6 @@ /dependency dependency - groupIdorg.apache.spark/groupId - artifactIdspark-core_${scala.binary.version}/artifactId - version${spark.version}/version -/dependency - -dependency - groupIdorg.apache.spark/groupId - artifactIdspark-sql_${scala.binary.version}/artifactId - version${spark.version}/version -/dependency - -dependency groupIdorg.apache.hadoop/groupId artifactIdhadoop-client/artifactId version${hadoop-two.version}/version
[24/47] phoenix git commit: PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output directory suffix(Rajeshbabu)
PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output directory suffix(Rajeshbabu) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3cf22a7d Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3cf22a7d Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3cf22a7d Branch: refs/heads/calcite Commit: 3cf22a7de4eaec6978763b6961d73aa9eaa07015 Parents: 50f3a04 Author: Rajeshbabu Chintaguntla rajeshb...@apache.org Authored: Thu Jun 25 01:16:51 2015 +0530 Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org Committed: Thu Jun 25 01:16:51 2015 +0530 -- .../phoenix/mapreduce/CsvBulkLoadToolIT.java | 19 +++ .../phoenix/mapreduce/CsvBulkLoadTool.java | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cf22a7d/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java index 392395d..6bcc221 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java @@ -39,6 +39,7 @@ import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest; import org.apache.phoenix.jdbc.PhoenixDriver; import org.apache.phoenix.util.DateUtil; import org.apache.phoenix.util.PhoenixRuntime; +import org.apache.phoenix.util.QueryUtil; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -206,6 +207,8 @@ public class CsvBulkLoadToolIT { String ddl = CREATE LOCAL INDEX TABLE6_IDX ON TABLE6 + (FIRST_NAME ASC); stmt.execute(ddl); +ddl = CREATE LOCAL INDEX TABLE6_IDX2 ON TABLE6 + (LAST_NAME ASC); +stmt.execute(ddl); FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration()); FSDataOutputStream outputStream = fs.create(new Path(/tmp/input3.csv)); @@ -228,6 +231,22 @@ public class CsvBulkLoadToolIT { assertEquals(FirstName 2, rs.getString(2)); rs.close(); +rs = +stmt.executeQuery(EXPLAIN SELECT id, FIRST_NAME FROM TABLE6 where first_name='FirstName 2'); +assertEquals( +CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 [-32768,'FirstName 2']\n ++ SERVER FILTER BY FIRST KEY ONLY, QueryUtil.getExplainPlan(rs)); +rs.close(); +rs = stmt.executeQuery(SELECT id, LAST_NAME FROM TABLE6 where last_name='LastName 2'); +assertTrue(rs.next()); +assertEquals(2, rs.getInt(1)); +assertEquals(LastName 2, rs.getString(2)); +rs.close(); +rs = +stmt.executeQuery(EXPLAIN SELECT id, LAST_NAME FROM TABLE6 where last_name='LastName 2'); +assertEquals( +CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 [-32767,'LastName 2']\n ++ SERVER FILTER BY FIRST KEY ONLY, QueryUtil.getExplainPlan(rs)); stmt.close(); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cf22a7d/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java index 9e27bac..5270277 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java @@ -265,7 +265,7 @@ public class CsvBulkLoadTool extends Configured implements Tool { JobManager.createThreadPoolExec(Integer.MAX_VALUE, 5, 20, useInstrumentedPool); try{ for (TargetTableRef table : tablesToBeLoaded) { - Path tablePath = new Path(outputPath, table.getPhysicalName()); + Path tablePath = new Path(outputPath, table.getLogicalName()); Configuration jobConf = new Configuration(conf); jobConf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, qualifiedTableName); if (qualifiedTableName.compareToIgnoreCase(table.getLogicalName()) != 0) {
[38/47] phoenix git commit: PHOENIX-2042 Windows need hadoop native libraries to run tests (Alicia Ying Shu)
PHOENIX-2042 Windows need hadoop native libraries to run tests (Alicia Ying Shu) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/bc2aef89 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/bc2aef89 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/bc2aef89 Branch: refs/heads/calcite Commit: bc2aef89423eee836f24a123860676e967caf079 Parents: 83b8db4 Author: Enis Soztutar e...@apache.org Authored: Mon Jun 29 18:16:35 2015 -0700 Committer: Enis Soztutar e...@apache.org Committed: Mon Jun 29 18:16:35 2015 -0700 -- pom.xml | 8 1 file changed, 4 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/bc2aef89/pom.xml -- diff --git a/pom.xml b/pom.xml index 707ea66..9fa6702 100644 --- a/pom.xml +++ b/pom.xml @@ -266,7 +266,7 @@ encodingUTF-8/encoding forkCount${numForkedIT}/forkCount reuseForkstrue/reuseForks - argLine-enableassertions -Xmx2000m -XX:MaxPermSize=128m -Djava.security.egd=file:/dev/./urandom/argLine + argLine-enableassertions -Xmx2000m -XX:MaxPermSize=128m -Djava.security.egd=file:/dev/./urandom -Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}/argLine redirectTestOutputToFile${test.output.tofile}/redirectTestOutputToFile testSourceDirectory${basedir}/src/it/java/testSourceDirectory groupsorg.apache.phoenix.end2end.ClientManagedTimeTest/groups @@ -282,7 +282,7 @@ encodingUTF-8/encoding forkCount${numForkedIT}/forkCount reuseForkstrue/reuseForks - argLine-enableassertions -Xmx2000m -XX:MaxPermSize=128m -Djava.security.egd=file:/dev/./urandom/argLine + argLine-enableassertions -Xmx2000m -XX:MaxPermSize=128m -Djava.security.egd=file:/dev/./urandom -Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}/argLine redirectTestOutputToFile${test.output.tofile}/redirectTestOutputToFile testSourceDirectory${basedir}/src/it/java/testSourceDirectory groupsorg.apache.phoenix.end2end.HBaseManagedTimeTest/groups @@ -298,7 +298,7 @@ encodingUTF-8/encoding forkCount${numForkedIT}/forkCount reuseForkstrue/reuseForks - argLine-enableassertions -Xmx2000m -XX:MaxPermSize=128m -Djava.security.egd=file:/dev/./urandom/argLine + argLine-enableassertions -Xmx2000m -XX:MaxPermSize=128m -Djava.security.egd=file:/dev/./urandom -Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}/argLine redirectTestOutputToFile${test.output.tofile}/redirectTestOutputToFile testSourceDirectory${basedir}/src/it/java/testSourceDirectory groupsorg.apache.phoenix.end2end.NeedsOwnMiniClusterTest/groups @@ -395,7 +395,7 @@ forkCount${numForkedUT}/forkCount reuseForkstrue/reuseForks argLine-enableassertions -Xmx2250m -XX:MaxPermSize=128m --Djava.security.egd=file:/dev/./urandom/argLine +-Djava.security.egd=file:/dev/./urandom -Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}/argLine redirectTestOutputToFile${test.output.tofile}/redirectTestOutputToFile /configuration /plugin
[39/47] phoenix git commit: PHOENIX-2060 - Implement ARRAY_FILL built in function (Dumindu Buddhika)
PHOENIX-2060 - Implement ARRAY_FILL built in function (Dumindu Buddhika) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c0ad8cf6 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c0ad8cf6 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c0ad8cf6 Branch: refs/heads/calcite Commit: c0ad8cf6772b59e0ee24d1a4e8bc935d35a26a13 Parents: bc2aef8 Author: ramkrishna ramkrishna.s.vasude...@gmail.com Authored: Tue Jun 30 22:26:53 2015 +0530 Committer: ramkrishna ramkrishna.s.vasude...@gmail.com Committed: Tue Jun 30 22:26:53 2015 +0530 -- .../main/java/org/apache/phoenix/expression/ExpressionType.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/c0ad8cf6/phoenix-core/src/main/java/org/apache/phoenix/expression/ExpressionType.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/ExpressionType.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/ExpressionType.java index 51f4089..ef14e6a 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/expression/ExpressionType.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/ExpressionType.java @@ -25,6 +25,7 @@ import org.apache.phoenix.expression.function.ArrayAnyComparisonExpression; import org.apache.phoenix.expression.function.ArrayAppendFunction; import org.apache.phoenix.expression.function.ArrayConcatFunction; import org.apache.phoenix.expression.function.ArrayElemRefExpression; +import org.apache.phoenix.expression.function.ArrayFillFunction; import org.apache.phoenix.expression.function.ArrayIndexFunction; import org.apache.phoenix.expression.function.ArrayLengthFunction; import org.apache.phoenix.expression.function.ArrayPrependFunction; @@ -247,7 +248,8 @@ public enum ExpressionType { LogFunction(LogFunction.class), ExpFunction(ExpFunction.class), PowerFunction(PowerFunction.class), -ArrayConcatFunction(ArrayConcatFunction.class) +ArrayConcatFunction(ArrayConcatFunction.class), +ArrayFillFunction(ArrayFillFunction.class) ; ExpressionType(Class? extends Expression clazz) {
[01/47] phoenix git commit: PHOENIX-1660 Implement missing math built-in functions ABS, POWER, LN, LOG, SQRT, CBRT, EXP (Shuxiong Ye)
Repository: phoenix Updated Branches: refs/heads/calcite b58600738 - 1327c726a http://git-wip-us.apache.org/repos/asf/phoenix/blob/c2927dde/phoenix-core/src/test/java/org/apache/phoenix/expression/ExpFunctionTest.java -- diff --git a/phoenix-core/src/test/java/org/apache/phoenix/expression/ExpFunctionTest.java b/phoenix-core/src/test/java/org/apache/phoenix/expression/ExpFunctionTest.java new file mode 100644 index 000..b7b95c2 --- /dev/null +++ b/phoenix-core/src/test/java/org/apache/phoenix/expression/ExpFunctionTest.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * License); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.expression; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.math.BigDecimal; +import java.sql.SQLException; +import java.util.List; +import java.util.Random; + +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.phoenix.expression.function.ExpFunction; +import org.apache.phoenix.schema.SortOrder; +import org.apache.phoenix.schema.types.PDecimal; +import org.apache.phoenix.schema.types.PDouble; +import org.apache.phoenix.schema.types.PFloat; +import org.apache.phoenix.schema.types.PInteger; +import org.apache.phoenix.schema.types.PLong; +import org.apache.phoenix.schema.types.PNumericType; +import org.apache.phoenix.schema.types.PSmallint; +import org.apache.phoenix.schema.types.PTinyint; +import org.apache.phoenix.schema.types.PUnsignedDouble; +import org.apache.phoenix.schema.types.PUnsignedFloat; +import org.apache.phoenix.schema.types.PUnsignedInt; +import org.apache.phoenix.schema.types.PUnsignedLong; +import org.junit.Test; + +import com.google.common.collect.Lists; + +/** + * Unit tests for {@link ExpFunction} + */ +public class ExpFunctionTest { +private static final double ZERO = 1e-9; + +private static boolean twoDoubleEquals(double a, double b) { +if (Double.isNaN(a) ^ Double.isNaN(b)) return false; +if (Double.isNaN(a)) return true; +if (Double.isInfinite(a) ^ Double.isInfinite(b)) return false; +if (Double.isInfinite(a)) { +if ((a 0) ^ (b 0)) return false; +else return true; +} +if (Math.abs(a - b) = ZERO) { +return true; +} else { +return false; +} +} + +private static boolean testExpression(LiteralExpression literal, double expected) +throws SQLException { +ListExpression expressions = Lists.newArrayList((Expression) literal); +Expression sqrtFunction = new ExpFunction(expressions); +ImmutableBytesWritable ptr = new ImmutableBytesWritable(); +boolean ret = sqrtFunction.evaluate(null, ptr); +if (ret) { +Double result = +(Double) sqrtFunction.getDataType().toObject(ptr, sqrtFunction.getSortOrder()); +assertTrue(twoDoubleEquals(result.doubleValue(), expected)); +} +return ret; +} + +private static void test(Number value, PNumericType dataType, double expected) +throws SQLException { +LiteralExpression literal; +literal = LiteralExpression.newConstant(value, dataType, SortOrder.ASC); +boolean ret1 = testExpression(literal, expected); +literal = LiteralExpression.newConstant(value, dataType, SortOrder.DESC); +boolean ret2 = testExpression(literal, expected); +assertEquals(ret1, ret2); +} + +private static void testBatch(Number[] value, PNumericType dataType) throws SQLException { +double[] expected = new double[value.length]; +for (int i = 0; i expected.length; ++i) { +expected[i] = Math.exp(value[i].doubleValue()); +} +assertEquals(value.length, expected.length); +for (int i = 0; i value.length; ++i) { +test(value[i], dataType, expected[i]); +} +} + +@Test +public void testSqrtFunction() throws Exception { +Random random = new Random(); + +testBatch( +new BigDecimal[] { BigDecimal.valueOf(1.0), BigDecimal.valueOf(0.0), +
phoenix git commit: PHOENIX-2065 Throw TableNotFoundException when select all columns of one column family from the table with schema (Jun Ng)
Repository: phoenix Updated Branches: refs/heads/4.x-HBase-0.98 a7a55ab53 - 47a24c8f2 PHOENIX-2065 Throw TableNotFoundException when select all columns of one column family from the table with schema (Jun Ng) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/47a24c8f Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/47a24c8f Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/47a24c8f Branch: refs/heads/4.x-HBase-0.98 Commit: 47a24c8f208df28adcc86771b844fa2fa6564c22 Parents: a7a55ab Author: Thomas D'Silva tdsi...@salesforce.com Authored: Thu Jul 2 21:37:10 2015 -0700 Committer: Thomas D'Silva tdsi...@salesforce.com Committed: Thu Jul 2 21:44:01 2015 -0700 -- .../apache/phoenix/compile/FromCompiler.java| 7 +++- .../phoenix/compile/QueryCompilerTest.java | 41 2 files changed, 47 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/47a24c8f/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java index 5fe0e6f..bc753c9 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java @@ -710,7 +710,12 @@ public class FromCompiler { if (theColumnFamilyRef != null) { return theColumnFamilyRef; } throw new TableNotFoundException(cfName); } else { -TableRef tableRef = resolveTable(null, tableName); +TableRef tableRef = null; +try { +tableRef = resolveTable(null, tableName); +} catch (TableNotFoundException e) { +return resolveColumnFamily(null, cfName); +} PColumnFamily columnFamily = tableRef.getTable().getColumnFamily(cfName); return new ColumnFamilyRef(tableRef, columnFamily); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/47a24c8f/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java -- diff --git a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java index 79721df..559ce10 100644 --- a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java +++ b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java @@ -1859,4 +1859,45 @@ public class QueryCompilerTest extends BaseConnectionlessQueryTest { assertFalse(Expected plan to not use round robin iterator + query, plan.useRoundRobinIterator()); } } + +@Test +public void testSelectColumnsInOneFamily() throws Exception { +Connection conn = DriverManager.getConnection(getUrl()); +Statement statement = conn.createStatement(); +try { +// create table with specified column family. +String create = CREATE TABLE t (k integer not null primary key, f1.v1 varchar, f1.v2 varchar, f2.v3 varchar, v4 varchar); +statement.execute(create); +// select columns in one family. +String query = SELECT f1.*, v4 FROM t; +ResultSetMetaData rsMeta = statement.executeQuery(query).getMetaData(); +assertEquals(V1, rsMeta.getColumnName(1)); +assertEquals(V2, rsMeta.getColumnName(2)); +assertEquals(V4, rsMeta.getColumnName(3)); +} finally { +statement.execute(DROP TABLE IF EXISTS t); +conn.close(); +} +} + +@Test +public void testSelectColumnsInOneFamilyWithSchema() throws Exception { +Connection conn = DriverManager.getConnection(getUrl()); +Statement statement = conn.createStatement(); +try { +// create table with specified column family. +String create = CREATE TABLE s.t (k integer not null primary key, f1.v1 varchar, f1.v2 varchar, f2.v3 varchar, v4 varchar); +statement.execute(create); +// select columns in one family. +String query = SELECT f1.*, v4 FROM s.t; +ResultSetMetaData rsMeta = statement.executeQuery(query).getMetaData(); +assertEquals(V1, rsMeta.getColumnName(1)); +assertEquals(V2, rsMeta.getColumnName(2)); +assertEquals(V4, rsMeta.getColumnName(3)); +} finally { +statement.execute(DROP TABLE IF EXISTS s.t); +
phoenix git commit: PHOENIX-2065 Throw TableNotFoundException when select all columns of one column family from the table with schema (Jun Ng)
Repository: phoenix Updated Branches: refs/heads/4.4-HBase-1.0 95edc578c - 82c3ddd8f PHOENIX-2065 Throw TableNotFoundException when select all columns of one column family from the table with schema (Jun Ng) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/82c3ddd8 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/82c3ddd8 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/82c3ddd8 Branch: refs/heads/4.4-HBase-1.0 Commit: 82c3ddd8fbd182fbfd82cefa8011fc9450afd8f1 Parents: 95edc57 Author: Thomas D'Silva tdsi...@salesforce.com Authored: Thu Jul 2 21:37:10 2015 -0700 Committer: Thomas D'Silva tdsi...@salesforce.com Committed: Thu Jul 2 21:41:46 2015 -0700 -- .../apache/phoenix/compile/FromCompiler.java| 7 +++- .../phoenix/compile/QueryCompilerTest.java | 41 2 files changed, 47 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/82c3ddd8/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java index 5fe0e6f..bc753c9 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java @@ -710,7 +710,12 @@ public class FromCompiler { if (theColumnFamilyRef != null) { return theColumnFamilyRef; } throw new TableNotFoundException(cfName); } else { -TableRef tableRef = resolveTable(null, tableName); +TableRef tableRef = null; +try { +tableRef = resolveTable(null, tableName); +} catch (TableNotFoundException e) { +return resolveColumnFamily(null, cfName); +} PColumnFamily columnFamily = tableRef.getTable().getColumnFamily(cfName); return new ColumnFamilyRef(tableRef, columnFamily); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/82c3ddd8/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java -- diff --git a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java index 7be8eae..cbbfb90 100644 --- a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java +++ b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java @@ -1793,4 +1793,45 @@ public class QueryCompilerTest extends BaseConnectionlessQueryTest { assertFalse(Expected plan to not use round robin iterator + query, plan.useRoundRobinIterator()); } } + +@Test +public void testSelectColumnsInOneFamily() throws Exception { +Connection conn = DriverManager.getConnection(getUrl()); +Statement statement = conn.createStatement(); +try { +// create table with specified column family. +String create = CREATE TABLE t (k integer not null primary key, f1.v1 varchar, f1.v2 varchar, f2.v3 varchar, v4 varchar); +statement.execute(create); +// select columns in one family. +String query = SELECT f1.*, v4 FROM t; +ResultSetMetaData rsMeta = statement.executeQuery(query).getMetaData(); +assertEquals(V1, rsMeta.getColumnName(1)); +assertEquals(V2, rsMeta.getColumnName(2)); +assertEquals(V4, rsMeta.getColumnName(3)); +} finally { +statement.execute(DROP TABLE IF EXISTS t); +conn.close(); +} +} + +@Test +public void testSelectColumnsInOneFamilyWithSchema() throws Exception { +Connection conn = DriverManager.getConnection(getUrl()); +Statement statement = conn.createStatement(); +try { +// create table with specified column family. +String create = CREATE TABLE s.t (k integer not null primary key, f1.v1 varchar, f1.v2 varchar, f2.v3 varchar, v4 varchar); +statement.execute(create); +// select columns in one family. +String query = SELECT f1.*, v4 FROM s.t; +ResultSetMetaData rsMeta = statement.executeQuery(query).getMetaData(); +assertEquals(V1, rsMeta.getColumnName(1)); +assertEquals(V2, rsMeta.getColumnName(2)); +assertEquals(V4, rsMeta.getColumnName(3)); +} finally { +statement.execute(DROP TABLE IF EXISTS s.t); +
phoenix git commit: PHOENIX-2065 Throw TableNotFoundException when select all columns of one column family from the table with schema (Jun Ng)
Repository: phoenix Updated Branches: refs/heads/4.4-HBase-1.1 2fba75c65 - 4db7aaf38 PHOENIX-2065 Throw TableNotFoundException when select all columns of one column family from the table with schema (Jun Ng) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/4db7aaf3 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/4db7aaf3 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/4db7aaf3 Branch: refs/heads/4.4-HBase-1.1 Commit: 4db7aaf38522a655027bcaf7e6bec10374629c63 Parents: 2fba75c Author: Thomas D'Silva tdsi...@salesforce.com Authored: Thu Jul 2 21:37:10 2015 -0700 Committer: Thomas D'Silva tdsi...@salesforce.com Committed: Thu Jul 2 21:43:22 2015 -0700 -- .../apache/phoenix/compile/FromCompiler.java| 7 +++- .../phoenix/compile/QueryCompilerTest.java | 41 2 files changed, 47 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/4db7aaf3/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java index 5fe0e6f..bc753c9 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java @@ -710,7 +710,12 @@ public class FromCompiler { if (theColumnFamilyRef != null) { return theColumnFamilyRef; } throw new TableNotFoundException(cfName); } else { -TableRef tableRef = resolveTable(null, tableName); +TableRef tableRef = null; +try { +tableRef = resolveTable(null, tableName); +} catch (TableNotFoundException e) { +return resolveColumnFamily(null, cfName); +} PColumnFamily columnFamily = tableRef.getTable().getColumnFamily(cfName); return new ColumnFamilyRef(tableRef, columnFamily); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/4db7aaf3/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java -- diff --git a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java index 7be8eae..cbbfb90 100644 --- a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java +++ b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java @@ -1793,4 +1793,45 @@ public class QueryCompilerTest extends BaseConnectionlessQueryTest { assertFalse(Expected plan to not use round robin iterator + query, plan.useRoundRobinIterator()); } } + +@Test +public void testSelectColumnsInOneFamily() throws Exception { +Connection conn = DriverManager.getConnection(getUrl()); +Statement statement = conn.createStatement(); +try { +// create table with specified column family. +String create = CREATE TABLE t (k integer not null primary key, f1.v1 varchar, f1.v2 varchar, f2.v3 varchar, v4 varchar); +statement.execute(create); +// select columns in one family. +String query = SELECT f1.*, v4 FROM t; +ResultSetMetaData rsMeta = statement.executeQuery(query).getMetaData(); +assertEquals(V1, rsMeta.getColumnName(1)); +assertEquals(V2, rsMeta.getColumnName(2)); +assertEquals(V4, rsMeta.getColumnName(3)); +} finally { +statement.execute(DROP TABLE IF EXISTS t); +conn.close(); +} +} + +@Test +public void testSelectColumnsInOneFamilyWithSchema() throws Exception { +Connection conn = DriverManager.getConnection(getUrl()); +Statement statement = conn.createStatement(); +try { +// create table with specified column family. +String create = CREATE TABLE s.t (k integer not null primary key, f1.v1 varchar, f1.v2 varchar, f2.v3 varchar, v4 varchar); +statement.execute(create); +// select columns in one family. +String query = SELECT f1.*, v4 FROM s.t; +ResultSetMetaData rsMeta = statement.executeQuery(query).getMetaData(); +assertEquals(V1, rsMeta.getColumnName(1)); +assertEquals(V2, rsMeta.getColumnName(2)); +assertEquals(V4, rsMeta.getColumnName(3)); +} finally { +statement.execute(DROP TABLE IF EXISTS s.t); +
phoenix git commit: PHOENIX-2065 Throw TableNotFoundException when select all columns of one column family from the table with schema (Jun Ng)
Repository: phoenix Updated Branches: refs/heads/4.4-HBase-0.98 aa288f8fe - fca8fb250 PHOENIX-2065 Throw TableNotFoundException when select all columns of one column family from the table with schema (Jun Ng) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/fca8fb25 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/fca8fb25 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/fca8fb25 Branch: refs/heads/4.4-HBase-0.98 Commit: fca8fb2508da5d46cfd182cef14dc7ea9ddf0b01 Parents: aa288f8 Author: Thomas D'Silva tdsi...@salesforce.com Authored: Thu Jul 2 21:37:10 2015 -0700 Committer: Thomas D'Silva tdsi...@salesforce.com Committed: Thu Jul 2 21:41:20 2015 -0700 -- .../apache/phoenix/compile/FromCompiler.java| 7 +++- .../phoenix/compile/QueryCompilerTest.java | 41 2 files changed, 47 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/fca8fb25/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java index 5fe0e6f..bc753c9 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java @@ -710,7 +710,12 @@ public class FromCompiler { if (theColumnFamilyRef != null) { return theColumnFamilyRef; } throw new TableNotFoundException(cfName); } else { -TableRef tableRef = resolveTable(null, tableName); +TableRef tableRef = null; +try { +tableRef = resolveTable(null, tableName); +} catch (TableNotFoundException e) { +return resolveColumnFamily(null, cfName); +} PColumnFamily columnFamily = tableRef.getTable().getColumnFamily(cfName); return new ColumnFamilyRef(tableRef, columnFamily); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/fca8fb25/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java -- diff --git a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java index 7be8eae..cbbfb90 100644 --- a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java +++ b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java @@ -1793,4 +1793,45 @@ public class QueryCompilerTest extends BaseConnectionlessQueryTest { assertFalse(Expected plan to not use round robin iterator + query, plan.useRoundRobinIterator()); } } + +@Test +public void testSelectColumnsInOneFamily() throws Exception { +Connection conn = DriverManager.getConnection(getUrl()); +Statement statement = conn.createStatement(); +try { +// create table with specified column family. +String create = CREATE TABLE t (k integer not null primary key, f1.v1 varchar, f1.v2 varchar, f2.v3 varchar, v4 varchar); +statement.execute(create); +// select columns in one family. +String query = SELECT f1.*, v4 FROM t; +ResultSetMetaData rsMeta = statement.executeQuery(query).getMetaData(); +assertEquals(V1, rsMeta.getColumnName(1)); +assertEquals(V2, rsMeta.getColumnName(2)); +assertEquals(V4, rsMeta.getColumnName(3)); +} finally { +statement.execute(DROP TABLE IF EXISTS t); +conn.close(); +} +} + +@Test +public void testSelectColumnsInOneFamilyWithSchema() throws Exception { +Connection conn = DriverManager.getConnection(getUrl()); +Statement statement = conn.createStatement(); +try { +// create table with specified column family. +String create = CREATE TABLE s.t (k integer not null primary key, f1.v1 varchar, f1.v2 varchar, f2.v3 varchar, v4 varchar); +statement.execute(create); +// select columns in one family. +String query = SELECT f1.*, v4 FROM s.t; +ResultSetMetaData rsMeta = statement.executeQuery(query).getMetaData(); +assertEquals(V1, rsMeta.getColumnName(1)); +assertEquals(V2, rsMeta.getColumnName(2)); +assertEquals(V4, rsMeta.getColumnName(3)); +} finally { +statement.execute(DROP TABLE IF EXISTS s.t); +
phoenix git commit: Revert unnecessary changes to existing Phoenix classes
Repository: phoenix Updated Branches: refs/heads/calcite 1327c726a - 57e1b485b Revert unnecessary changes to existing Phoenix classes Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/57e1b485 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/57e1b485 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/57e1b485 Branch: refs/heads/calcite Commit: 57e1b485b71f14349704f0d5eb91e97e256db179 Parents: 1327c72 Author: maryannxue wei@intel.com Authored: Thu Jul 2 18:26:54 2015 -0400 Committer: maryannxue wei@intel.com Committed: Thu Jul 2 18:26:54 2015 -0400 -- .../org/apache/phoenix/calcite/rel/PhoenixServerJoin.java | 2 +- .../org/apache/phoenix/calcite/rel/PhoenixTableScan.java| 4 ++-- .../phoenix/expression/ProjectedColumnExpression.java | 9 - .../main/java/org/apache/phoenix/parse/SelectStatement.java | 7 --- 4 files changed, 3 insertions(+), 19 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/57e1b485/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixServerJoin.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixServerJoin.java b/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixServerJoin.java index 328549b..e07f345 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixServerJoin.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixServerJoin.java @@ -134,7 +134,7 @@ public class PhoenixServerJoin extends PhoenixAbstractJoin { new int[] {leftTable.getColumns().size() - leftTable.getPKColumns().size()}, postFilterExpr, null); -return HashJoinPlan.create(SelectStatement.SELECT_STAR, leftPlan, hashJoinInfo, new HashJoinPlan.HashSubPlan[] {new HashJoinPlan.HashSubPlan(0, rightPlan, rightExprs, isSingleValueRhs, null, null)}); +return HashJoinPlan.create((SelectStatement) (leftPlan.getStatement()), leftPlan, hashJoinInfo, new HashJoinPlan.HashSubPlan[] {new HashJoinPlan.HashSubPlan(0, rightPlan, rightExprs, isSingleValueRhs, null, null)}); } } http://git-wip-us.apache.org/repos/asf/phoenix/blob/57e1b485/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixTableScan.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixTableScan.java b/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixTableScan.java index 429b73f..2e11c09 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixTableScan.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixTableScan.java @@ -90,7 +90,7 @@ public class PhoenixTableScan extends TableScan implements PhoenixRel { TableRef tableRef = new TableRef(CalciteUtils.createTempAlias(), pTable, HConstants.LATEST_TIMESTAMP, false); Implementor tmpImplementor = new PhoenixRelImplementorImpl(); tmpImplementor.setTableRef(tableRef); -SelectStatement select = SelectStatement.SELECT_STAR; +SelectStatement select = SelectStatement.SELECT_ONE; PhoenixStatement stmt = new PhoenixStatement(phoenixTable.pc); ColumnResolver resolver = FromCompiler.getResolver(tableRef); StatementContext context = new StatementContext(stmt, resolver, new Scan(), new SequenceManager(stmt)); @@ -156,7 +156,7 @@ public class PhoenixTableScan extends TableScan implements PhoenixRel { PhoenixStatement stmt = new PhoenixStatement(phoenixTable.pc); ColumnResolver resolver = FromCompiler.getResolver(tableRef); StatementContext context = new StatementContext(stmt, resolver, new Scan(), new SequenceManager(stmt)); -SelectStatement select = SelectStatement.SELECT_STAR; +SelectStatement select = SelectStatement.SELECT_ONE; if (filter != null) { Expression filterExpr = CalciteUtils.toExpression(filter, implementor); filterExpr = WhereOptimizer.pushKeyExpressionsToScan(context, select, filterExpr); http://git-wip-us.apache.org/repos/asf/phoenix/blob/57e1b485/phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java index 7956612..97d1aff 100644 ---