hive git commit: HIVE-11775: Implement limit push down through union all in CBO (Pengcheng Xiong, reviewed by Laljo John Pullokkaran)

2015-12-18 Thread pxiong
Repository: hive
Updated Branches:
  refs/heads/branch-2.0 e4e91172b -> 422d58c25


HIVE-11775: Implement limit push down through union all in CBO (Pengcheng 
Xiong, reviewed by Laljo John Pullokkaran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/422d58c2
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/422d58c2
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/422d58c2

Branch: refs/heads/branch-2.0
Commit: 422d58c2538ba318d34d6e6b41460cdd16369d8b
Parents: e4e9117
Author: Pengcheng Xiong 
Authored: Fri Dec 18 23:44:40 2015 -0800
Committer: Pengcheng Xiong 
Committed: Fri Dec 18 23:45:41 2015 -0800

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   20 +-
 .../calcite/rules/HiveSortUnionReduceRule.java  |  109 ++
 .../hadoop/hive/ql/parse/CalcitePlanner.java|   10 +-
 .../clientpositive/cbo_SortUnionTransposeRule.q |  100 ++
 .../clientpositive/limit_join_transpose.q   |   16 +-
 .../clientpositive/tez_dynpart_hashjoin_3.q |6 +-
 .../cbo_SortUnionTransposeRule.q.out| 1196 ++
 7 files changed, 1432 insertions(+), 25 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/422d58c2/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 36e281a..5f1772b 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1307,16 +1307,16 @@ public class HiveConf extends Configuration {
 "we are increasing the number of files possibly by a big margin. So, 
we merge aggressively."),
 HIVEOPTCORRELATION("hive.optimize.correlation", false, "exploit 
intra-query correlations."),
 
-HIVE_OPTIMIZE_LIMIT_JOIN_TRANSPOSE("hive.optimize.limitjointranspose", 
false,
-"Whether to push a limit through left/right outer join. If the value 
is true and the size of the outer\n" +
-"input is reduced enough (as specified in 
hive.optimize.limitjointranspose.reduction), the limit is pushed\n" +
-"to the outer input; to remain semantically correct, the limit is kept 
on top of the join too."),
-
HIVE_OPTIMIZE_LIMIT_JOIN_TRANSPOSE_REDUCTION_PERCENTAGE("hive.optimize.limitjointranspose.reductionpercentage",
 1.0f,
-"When hive.optimize.limitjointranspose is true, this variable 
specifies the minimal reduction of the\n" +
-"size of the outer input of the join that we should get in order to 
apply the rule."),
-
HIVE_OPTIMIZE_LIMIT_JOIN_TRANSPOSE_REDUCTION_TUPLES("hive.optimize.limitjointranspose.reductiontuples",
 (long) 0,
-"When hive.optimize.limitjointranspose is true, this variable 
specifies the minimal reduction in the\n" +
-"number of tuples of the outer input of the join that you should get 
in order to apply the rule."),
+HIVE_OPTIMIZE_LIMIT_TRANSPOSE("hive.optimize.limittranspose", false,
+"Whether to push a limit through left/right outer join or union. If 
the value is true and the size of the outer\n" +
+"input is reduced enough (as specified in 
hive.optimize.limittranspose.reduction), the limit is pushed\n" +
+"to the outer input or union; to remain semantically correct, the 
limit is kept on top of the join or the union too."),
+
HIVE_OPTIMIZE_LIMIT_TRANSPOSE_REDUCTION_PERCENTAGE("hive.optimize.limittranspose.reductionpercentage",
 1.0f,
+"When hive.optimize.limittranspose is true, this variable specifies 
the minimal reduction of the\n" +
+"size of the outer input of the join or input of the union that we 
should get in order to apply the rule."),
+
HIVE_OPTIMIZE_LIMIT_TRANSPOSE_REDUCTION_TUPLES("hive.optimize.limittranspose.reductiontuples",
 (long) 0,
+"When hive.optimize.limittranspose is true, this variable specifies 
the minimal reduction in the\n" +
+"number of tuples of the outer input of the join or the input of the 
union that you should get in order to apply the rule."),
 
 HIVE_OPTIMIZE_SKEWJOIN_COMPILETIME("hive.optimize.skewjoin.compiletime", 
false,
 "Whether to create a separate plan for skewed keys for the tables in 
the join.\n" +

http://git-wip-us.apache.org/repos/asf/hive/blob/422d58c2/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveSortUnionReduceRule.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveSortUnionReduceRule.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveSortUnionReduceRule.java
new file mode 100644
index 000..0ec8bf1
--- /de

hive git commit: HIVE-11775: Implement limit push down through union all in CBO (Pengcheng Xiong, reviewed by Laljo John Pullokkaran)

2015-12-18 Thread pxiong
Repository: hive
Updated Branches:
  refs/heads/master f1ecce036 -> 71536a2f8


HIVE-11775: Implement limit push down through union all in CBO (Pengcheng 
Xiong, reviewed by Laljo John Pullokkaran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/71536a2f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/71536a2f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/71536a2f

Branch: refs/heads/master
Commit: 71536a2f8295f61602e776e4e5773c7007a46b69
Parents: f1ecce0
Author: Pengcheng Xiong 
Authored: Fri Dec 18 23:44:40 2015 -0800
Committer: Pengcheng Xiong 
Committed: Fri Dec 18 23:44:40 2015 -0800

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   20 +-
 .../calcite/rules/HiveSortUnionReduceRule.java  |  109 ++
 .../hadoop/hive/ql/parse/CalcitePlanner.java|   10 +-
 .../clientpositive/cbo_SortUnionTransposeRule.q |  100 ++
 .../clientpositive/limit_join_transpose.q   |   16 +-
 .../clientpositive/tez_dynpart_hashjoin_3.q |6 +-
 .../cbo_SortUnionTransposeRule.q.out| 1196 ++
 7 files changed, 1432 insertions(+), 25 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/71536a2f/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 60ac0c0..9e8e2f5 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1312,16 +1312,16 @@ public class HiveConf extends Configuration {
 "we are increasing the number of files possibly by a big margin. So, 
we merge aggressively."),
 HIVEOPTCORRELATION("hive.optimize.correlation", false, "exploit 
intra-query correlations."),
 
-HIVE_OPTIMIZE_LIMIT_JOIN_TRANSPOSE("hive.optimize.limitjointranspose", 
false,
-"Whether to push a limit through left/right outer join. If the value 
is true and the size of the outer\n" +
-"input is reduced enough (as specified in 
hive.optimize.limitjointranspose.reduction), the limit is pushed\n" +
-"to the outer input; to remain semantically correct, the limit is kept 
on top of the join too."),
-
HIVE_OPTIMIZE_LIMIT_JOIN_TRANSPOSE_REDUCTION_PERCENTAGE("hive.optimize.limitjointranspose.reductionpercentage",
 1.0f,
-"When hive.optimize.limitjointranspose is true, this variable 
specifies the minimal reduction of the\n" +
-"size of the outer input of the join that we should get in order to 
apply the rule."),
-
HIVE_OPTIMIZE_LIMIT_JOIN_TRANSPOSE_REDUCTION_TUPLES("hive.optimize.limitjointranspose.reductiontuples",
 (long) 0,
-"When hive.optimize.limitjointranspose is true, this variable 
specifies the minimal reduction in the\n" +
-"number of tuples of the outer input of the join that you should get 
in order to apply the rule."),
+HIVE_OPTIMIZE_LIMIT_TRANSPOSE("hive.optimize.limittranspose", false,
+"Whether to push a limit through left/right outer join or union. If 
the value is true and the size of the outer\n" +
+"input is reduced enough (as specified in 
hive.optimize.limittranspose.reduction), the limit is pushed\n" +
+"to the outer input or union; to remain semantically correct, the 
limit is kept on top of the join or the union too."),
+
HIVE_OPTIMIZE_LIMIT_TRANSPOSE_REDUCTION_PERCENTAGE("hive.optimize.limittranspose.reductionpercentage",
 1.0f,
+"When hive.optimize.limittranspose is true, this variable specifies 
the minimal reduction of the\n" +
+"size of the outer input of the join or input of the union that we 
should get in order to apply the rule."),
+
HIVE_OPTIMIZE_LIMIT_TRANSPOSE_REDUCTION_TUPLES("hive.optimize.limittranspose.reductiontuples",
 (long) 0,
+"When hive.optimize.limittranspose is true, this variable specifies 
the minimal reduction in the\n" +
+"number of tuples of the outer input of the join or the input of the 
union that you should get in order to apply the rule."),
 
 HIVE_OPTIMIZE_SKEWJOIN_COMPILETIME("hive.optimize.skewjoin.compiletime", 
false,
 "Whether to create a separate plan for skewed keys for the tables in 
the join.\n" +

http://git-wip-us.apache.org/repos/asf/hive/blob/71536a2f/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveSortUnionReduceRule.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveSortUnionReduceRule.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveSortUnionReduceRule.java
new file mode 100644
index 000..0ec8bf1
--- /dev/null
+

hive git commit: HIVE-12685: Remove redundant hive-site.xml under common/src/test/resources/ (Wei Zheng, reviewed by Ashutosh Chauhan, Mohit Sabharwal)

2015-12-18 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master 7eb14afc9 -> f1ecce036


HIVE-12685: Remove redundant hive-site.xml under common/src/test/resources/ 
(Wei Zheng, reviewed by Ashutosh Chauhan, Mohit Sabharwal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f1ecce03
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f1ecce03
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f1ecce03

Branch: refs/heads/master
Commit: f1ecce036369098922393c8af03ee74d8bb60b44
Parents: 7eb14af
Author: Daniel Dai 
Authored: Fri Dec 18 17:51:40 2015 -0800
Committer: Daniel Dai 
Committed: Fri Dec 18 17:51:40 2015 -0800

--
 .../apache/hadoop/hive/conf/TestHiveConf.java   |  8 ++--
 common/src/test/resources/hive-site.xml | 42 
 data/conf/hive-site.xml | 17 
 3 files changed, 21 insertions(+), 46 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f1ecce03/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
--
diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java 
b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
index cd472c7..365d500 100644
--- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
+++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
@@ -80,13 +80,13 @@ public class TestHiveConf {
 checkHiveConf(ConfVars.HIVESKEWJOINKEY.varname, "10");
 
 // ConfVar overridden in in hive-site.xml
-checkHadoopConf(ConfVars.METASTORE_CONNECTION_DRIVER.varname, null);
-checkConfVar(ConfVars.METASTORE_CONNECTION_DRIVER, 
"org.apache.derby.jdbc.EmbeddedDriver");
-checkHiveConf(ConfVars.METASTORE_CONNECTION_DRIVER.varname, 
"hive-site.xml");
+checkHadoopConf(ConfVars.HIVETESTMODEDUMMYSTATAGGR.varname, null);
+checkConfVar(ConfVars.HIVETESTMODEDUMMYSTATAGGR, "");
+checkHiveConf(ConfVars.HIVETESTMODEDUMMYSTATAGGR.varname, "value2");
 
 // Property defined in hive-site.xml only
 checkHadoopConf("test.property1", null);
-checkHiveConf("test.property1", "hive-site.xml");
+checkHiveConf("test.property1", "value1");
 
 // Test HiveConf property variable substitution in hive-site.xml
 checkHiveConf("test.var.hiveconf.property", 
ConfVars.DEFAULTPARTITIONNAME.getDefaultValue());

http://git-wip-us.apache.org/repos/asf/hive/blob/f1ecce03/common/src/test/resources/hive-site.xml
--
diff --git a/common/src/test/resources/hive-site.xml 
b/common/src/test/resources/hive-site.xml
deleted file mode 100644
index 0d5c834..000
--- a/common/src/test/resources/hive-site.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-
-
-
-
-
-
-
-
-
-  javax.jdo.option.ConnectionDriverName
-  hive-site.xml
-  Override ConfVar defined in HiveConf
-
-
-
-  test.property1
-  hive-site.xml
-  Test property defined in hive-site.xml only
-
-
-
-  test.var.hiveconf.property
-  ${hive.exec.default.partition.name}
-  Test hiveconf property substitution
-
-
-

http://git-wip-us.apache.org/repos/asf/hive/blob/f1ecce03/data/conf/hive-site.xml
--
diff --git a/data/conf/hive-site.xml b/data/conf/hive-site.xml
index 84005b4..2a5f891 100644
--- a/data/conf/hive-site.xml
+++ b/data/conf/hive-site.xml
@@ -244,6 +244,23 @@
 
 
 
+  test.var.hiveconf.property
+  ${hive.exec.default.partition.name}
+  Test hiveconf property substitution
+
+
+
+  test.property1
+  value1
+  Test property defined in hive-site.xml only
+
+
+
+  hive.test.dummystats.aggregator
+  value2
+
+
+
   hive.fetch.task.conversion
   minimal
 



hive git commit: HIVE-12697 Remove deprecated post option from webhcat test files (Aswathy Chellammal Sreekumar via Eugene Koifman)

2015-12-18 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-2.0 4da7ed913 -> e4e91172b


HIVE-12697 Remove deprecated post option from webhcat test files (Aswathy 
Chellammal Sreekumar via Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e4e91172
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e4e91172
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e4e91172

Branch: refs/heads/branch-2.0
Commit: e4e91172ba0f234060a51261a4115fc2b406eab5
Parents: 4da7ed9
Author: Eugene Koifman 
Authored: Fri Dec 18 16:08:43 2015 -0800
Committer: Eugene Koifman 
Committed: Fri Dec 18 16:08:43 2015 -0800

--
 hcatalog/src/test/e2e/templeton/tests/ddl.conf  |  85 +++---
 .../src/test/e2e/templeton/tests/jobstatus.conf |  12 +-
 .../test/e2e/templeton/tests/jobsubmission.conf | 112 +--
 .../e2e/templeton/tests/jobsubmission2.conf |   4 +-
 .../tests/jobsubmission_streaming.conf  |  21 ++--
 .../templeton/tests/modifyConfiguration.conf|   8 +-
 6 files changed, 119 insertions(+), 123 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e4e91172/hcatalog/src/test/e2e/templeton/tests/ddl.conf
--
diff --git a/hcatalog/src/test/e2e/templeton/tests/ddl.conf 
b/hcatalog/src/test/e2e/templeton/tests/ddl.conf
index 23d3074..3f3d00e 100644
--- a/hcatalog/src/test/e2e/templeton/tests/ddl.conf
+++ b/hcatalog/src/test/e2e/templeton/tests/ddl.conf
@@ -42,9 +42,9 @@ $cfg =
 #drop table if exists
  'num' => 1,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=drop table if exists 
templetontest_tab2'],
+ 'post_options' => ['exec=drop table if exists templetontest_tab2'],
  'json_field_substr_match' => {'stderr' => 'OK'},
 
 },
@@ -52,63 +52,63 @@ $cfg =
 #create table
  'num' => 2,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=create table 
templetontest_tab2(i int, j bigint) STORED AS rcfile;'],
+ 'post_options' => ['exec=create table templetontest_tab2(i int, j bigint) 
STORED AS rcfile;'],
  'json_field_substr_match' => {'stderr' => 'OK', 'exitcode' => '^0$'}
 },
 {
 #show tables
  'num' => 3,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=show tables;'],
+ 'post_options' => ['exec=show tables;'],
  'json_field_substr_match' => {'stderr' => 'OK', 'stdout' => 
'templetontest_tab2', 'exitcode' => '^0$'}
 },
 {
 #create table again, should fail
  'num' => 4,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=create table 
templetontest_tab2(i int, j bigint) STORED AS rcfile;'],
+ 'post_options' => ['exec=create table templetontest_tab2(i int, j bigint) 
STORED AS rcfile;'],
  'json_field_substr_match' => { 'exitcode' => '^1$'}
 },
 {
 #describe table
  'num' => 5,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=describe 
templetontest_tab2;'],
+ 'post_options' => ['exec=describe templetontest_tab2;'],
  'json_field_substr_match' => { 'stdout' => '.*i\s+int.*\n.*j.*bigint.*', 
'exitcode' => '^0$', 'stderr' => 'OK'}
 },
 {
 #alter table - file format
  'num' => 6,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=alter table 
templetontest_tab2 SET FILEFORMAT rcfile;'],
+ 'post_options' => ['exec=alter table templetontest_tab2 SET FILEFORMAT 
rcfile;'],
  'json_field_substr_match' => { 'exitcode' => '^0$', 'stderr' => 'OK'}
 },
 {
 #alter table - add column
  'num' => 7,
  'method' => 'POST',
- 'url' => ':TEMPLETON_U

hive git commit: HIVE-12697 Remove deprecated post option from webhcat test files (Aswathy Chellammal Sreekumar via Eugene Koifman)

2015-12-18 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 ec50fc919 -> a5e49d56f


HIVE-12697 Remove deprecated post option from webhcat test files (Aswathy 
Chellammal Sreekumar via Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a5e49d56
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a5e49d56
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a5e49d56

Branch: refs/heads/branch-1
Commit: a5e49d56fe7a9928eb35198cabd295bcc69ddb40
Parents: ec50fc9
Author: Eugene Koifman 
Authored: Fri Dec 18 16:05:34 2015 -0800
Committer: Eugene Koifman 
Committed: Fri Dec 18 16:05:34 2015 -0800

--
 hcatalog/src/test/e2e/templeton/tests/ddl.conf  |  85 +++---
 .../src/test/e2e/templeton/tests/jobstatus.conf |  12 +-
 .../test/e2e/templeton/tests/jobsubmission.conf | 112 +--
 .../e2e/templeton/tests/jobsubmission2.conf |   4 +-
 .../tests/jobsubmission_streaming.conf  |  21 ++--
 .../templeton/tests/modifyConfiguration.conf|   8 +-
 6 files changed, 119 insertions(+), 123 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a5e49d56/hcatalog/src/test/e2e/templeton/tests/ddl.conf
--
diff --git a/hcatalog/src/test/e2e/templeton/tests/ddl.conf 
b/hcatalog/src/test/e2e/templeton/tests/ddl.conf
index 23d3074..3f3d00e 100644
--- a/hcatalog/src/test/e2e/templeton/tests/ddl.conf
+++ b/hcatalog/src/test/e2e/templeton/tests/ddl.conf
@@ -42,9 +42,9 @@ $cfg =
 #drop table if exists
  'num' => 1,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=drop table if exists 
templetontest_tab2'],
+ 'post_options' => ['exec=drop table if exists templetontest_tab2'],
  'json_field_substr_match' => {'stderr' => 'OK'},
 
 },
@@ -52,63 +52,63 @@ $cfg =
 #create table
  'num' => 2,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=create table 
templetontest_tab2(i int, j bigint) STORED AS rcfile;'],
+ 'post_options' => ['exec=create table templetontest_tab2(i int, j bigint) 
STORED AS rcfile;'],
  'json_field_substr_match' => {'stderr' => 'OK', 'exitcode' => '^0$'}
 },
 {
 #show tables
  'num' => 3,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=show tables;'],
+ 'post_options' => ['exec=show tables;'],
  'json_field_substr_match' => {'stderr' => 'OK', 'stdout' => 
'templetontest_tab2', 'exitcode' => '^0$'}
 },
 {
 #create table again, should fail
  'num' => 4,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=create table 
templetontest_tab2(i int, j bigint) STORED AS rcfile;'],
+ 'post_options' => ['exec=create table templetontest_tab2(i int, j bigint) 
STORED AS rcfile;'],
  'json_field_substr_match' => { 'exitcode' => '^1$'}
 },
 {
 #describe table
  'num' => 5,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=describe 
templetontest_tab2;'],
+ 'post_options' => ['exec=describe templetontest_tab2;'],
  'json_field_substr_match' => { 'stdout' => '.*i\s+int.*\n.*j.*bigint.*', 
'exitcode' => '^0$', 'stderr' => 'OK'}
 },
 {
 #alter table - file format
  'num' => 6,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=alter table 
templetontest_tab2 SET FILEFORMAT rcfile;'],
+ 'post_options' => ['exec=alter table templetontest_tab2 SET FILEFORMAT 
rcfile;'],
  'json_field_substr_match' => { 'exitcode' => '^0$', 'stderr' => 'OK'}
 },
 {
 #alter table - add column
  'num' => 7,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/

hive git commit: HIVE-12697 Remove deprecated post option from webhcat test files (Aswathy Chellammal Sreekumar via Eugene Koifman)

2015-12-18 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master e84da8c9b -> 7eb14afc9


HIVE-12697 Remove deprecated post option from webhcat test files (Aswathy 
Chellammal Sreekumar via Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7eb14afc
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7eb14afc
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7eb14afc

Branch: refs/heads/master
Commit: 7eb14afc93a697c91c900410b5ca5c5a5c598b45
Parents: e84da8c
Author: Eugene Koifman 
Authored: Fri Dec 18 16:03:50 2015 -0800
Committer: Eugene Koifman 
Committed: Fri Dec 18 16:03:50 2015 -0800

--
 hcatalog/src/test/e2e/templeton/tests/ddl.conf  |  85 +++---
 .../src/test/e2e/templeton/tests/jobstatus.conf |  12 +-
 .../test/e2e/templeton/tests/jobsubmission.conf | 112 +--
 .../e2e/templeton/tests/jobsubmission2.conf |   4 +-
 .../tests/jobsubmission_streaming.conf  |  21 ++--
 .../templeton/tests/modifyConfiguration.conf|   8 +-
 6 files changed, 119 insertions(+), 123 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7eb14afc/hcatalog/src/test/e2e/templeton/tests/ddl.conf
--
diff --git a/hcatalog/src/test/e2e/templeton/tests/ddl.conf 
b/hcatalog/src/test/e2e/templeton/tests/ddl.conf
index 23d3074..3f3d00e 100644
--- a/hcatalog/src/test/e2e/templeton/tests/ddl.conf
+++ b/hcatalog/src/test/e2e/templeton/tests/ddl.conf
@@ -42,9 +42,9 @@ $cfg =
 #drop table if exists
  'num' => 1,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=drop table if exists 
templetontest_tab2'],
+ 'post_options' => ['exec=drop table if exists templetontest_tab2'],
  'json_field_substr_match' => {'stderr' => 'OK'},
 
 },
@@ -52,63 +52,63 @@ $cfg =
 #create table
  'num' => 2,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=create table 
templetontest_tab2(i int, j bigint) STORED AS rcfile;'],
+ 'post_options' => ['exec=create table templetontest_tab2(i int, j bigint) 
STORED AS rcfile;'],
  'json_field_substr_match' => {'stderr' => 'OK', 'exitcode' => '^0$'}
 },
 {
 #show tables
  'num' => 3,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=show tables;'],
+ 'post_options' => ['exec=show tables;'],
  'json_field_substr_match' => {'stderr' => 'OK', 'stdout' => 
'templetontest_tab2', 'exitcode' => '^0$'}
 },
 {
 #create table again, should fail
  'num' => 4,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=create table 
templetontest_tab2(i int, j bigint) STORED AS rcfile;'],
+ 'post_options' => ['exec=create table templetontest_tab2(i int, j bigint) 
STORED AS rcfile;'],
  'json_field_substr_match' => { 'exitcode' => '^1$'}
 },
 {
 #describe table
  'num' => 5,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=describe 
templetontest_tab2;'],
+ 'post_options' => ['exec=describe templetontest_tab2;'],
  'json_field_substr_match' => { 'stdout' => '.*i\s+int.*\n.*j.*bigint.*', 
'exitcode' => '^0$', 'stderr' => 'OK'}
 },
 {
 #alter table - file format
  'num' => 6,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl?user.name=:UNAME:',
  'status_code' => 200,
- 'post_options' => ['user.name=:UNAME:','exec=alter table 
templetontest_tab2 SET FILEFORMAT rcfile;'],
+ 'post_options' => ['exec=alter table templetontest_tab2 SET FILEFORMAT 
rcfile;'],
  'json_field_substr_match' => { 'exitcode' => '^0$', 'stderr' => 'OK'}
 },
 {
 #alter table - add column
  'num' => 7,
  'method' => 'POST',
- 'url' => ':TEMPLETON_URL:/temp

hive git commit: Revert "HIVE-12429: Switch default Hive authorization to SQLStandardAuth in 2.0", unintended commit

2015-12-18 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master 5a67b0a64 -> e84da8c9b


Revert "HIVE-12429: Switch default Hive authorization to SQLStandardAuth in 
2.0", unintended commit

This reverts commit 95d22735d73381458354e0ca79a2cb607f8e2150.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e84da8c9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e84da8c9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e84da8c9

Branch: refs/heads/master
Commit: e84da8c9b324ae9e971da824845e46f51f844a76
Parents: 5a67b0a
Author: Daniel Dai 
Authored: Fri Dec 18 15:48:11 2015 -0800
Committer: Daniel Dai 
Committed: Fri Dec 18 15:48:32 2015 -0800

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   2 +-
 .../cli/SemanticAnalysis/HCatAuthUtil.java  |   5 +-
 .../cli/SemanticAnalysis/TestHCatAuthUtil.java  |   4 +-
 .../SQLStdHiveAuthorizationValidator.java   |  11 ++
 .../parse/authorization/TestPrivilegesV1.java   |  13 +-
 .../TestSQLStdHiveAccessControllerCLI.java  |  16 +-
 .../authorization_cli_auth_enable.q |   7 +
 .../clientnegative/authorization_fail_1.q   |   1 -
 .../clientnegative/authorization_fail_2.q   |   1 -
 .../clientnegative/authorization_fail_3.q   |   1 -
 .../clientnegative/authorization_fail_4.q   |   1 -
 .../clientnegative/authorization_fail_5.q   |   3 +-
 .../clientnegative/authorization_fail_6.q   |   1 -
 .../clientnegative/authorization_fail_7.q   |   3 +-
 .../authorization_fail_create_db.q  |   1 -
 .../clientnegative/authorization_fail_drop_db.q |   1 -
 .../authorization_invalid_priv_v1.q |   1 -
 .../queries/clientnegative/authorization_part.q |   3 +-
 .../authorization_public_create.q   |   1 -
 .../clientnegative/authorization_public_drop.q  |   1 -
 .../clientnegative/authorization_role_case.q|   1 -
 .../clientnegative/authorize_grant_public.q |   1 -
 .../clientnegative/authorize_revoke_public.q|   1 -
 .../clientnegative/exim_22_export_authfail.q|   1 -
 .../exim_23_import_exist_authfail.q |   1 -
 .../exim_24_import_part_authfail.q  |   1 -
 .../exim_25_import_nonexist_authfail.q  |   1 -
 .../clientnegative/join_nonexistent_part.q  |   5 +
 .../clientnegative/load_exist_part_authfail.q   |   1 -
 .../clientnegative/load_nonpart_authfail.q  |   1 -
 .../queries/clientnegative/load_part_authfail.q |   1 -
 .../alter_rename_partition_authorization.q  |   1 -
 .../queries/clientpositive/authorization_1.q|   4 +-
 .../queries/clientpositive/authorization_2.q|   4 +-
 .../queries/clientpositive/authorization_3.q|   2 -
 .../queries/clientpositive/authorization_4.q|   4 +-
 .../queries/clientpositive/authorization_5.q|   2 -
 .../queries/clientpositive/authorization_6.q|   2 -
 .../queries/clientpositive/authorization_7.q|   4 +-
 .../queries/clientpositive/authorization_8.q|   1 -
 .../queries/clientpositive/authorization_9.q|   1 -
 ...orization_default_create_table_owner_privs.q |   1 -
 .../clientpositive/authorization_explain.q  |   1 -
 .../authorization_show_role_principals_v1.q |   1 -
 .../clientpositive/exim_21_export_authsuccess.q |   1 -
 .../exim_22_import_exist_authsuccess.q  |   1 -
 .../exim_23_import_part_authsuccess.q   |   1 -
 .../exim_24_import_nonexist_authsuccess.q   |   1 -
 ql/src/test/queries/clientpositive/index_auth.q |   2 -
 ql/src/test/queries/clientpositive/keyword_1.q  |   4 +-
 .../load_exist_part_authsuccess.q   |   1 -
 .../clientpositive/load_nonpart_authsuccess.q   |   1 -
 .../clientpositive/load_part_authsuccess.q  |   1 -
 ql/src/test/queries/clientpositive/show_roles.q |   2 -
 .../authorization_cli_auth_enable.q.out |   1 +
 .../clientnegative/join_nonexistent_part.q.out  |   1 +
 .../clientpositive/authorization_9.q.out| 180 ---
 .../authorization_explain.q.java1.7.out |   3 +
 .../authorization_show_grant.q.out  |  60 ---
 59 files changed, 62 insertions(+), 318 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e84da8c9/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 96a3fb5..60ac0c0 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1646,7 +1646,7 @@ public class HiveConf extends Configuration {
 HIVE_AUTHORIZATION_ENABLED("hive.security.authorization.enabled", false,
 "enable or disable the Hive client authorization

[2/2] hive git commit: HIVE-12633 : LLAP: package included serde jars (Sergey Shelukhin/Gopal V, reviewed by Gopal V)

2015-12-18 Thread sershe
HIVE-12633 : LLAP: package included serde jars (Sergey Shelukhin/Gopal V, 
reviewed by Gopal V)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4da7ed91
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4da7ed91
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4da7ed91

Branch: refs/heads/branch-2.0
Commit: 4da7ed913918792bd212109d8c4f72678136d979
Parents: 1d5e9c9
Author: Sergey Shelukhin 
Authored: Fri Dec 18 15:36:19 2015 -0800
Committer: Sergey Shelukhin 
Committed: Fri Dec 18 15:36:35 2015 -0800

--
 .../hive/llap/cli/LlapOptionsProcessor.java | 16 +++-
 .../hadoop/hive/llap/cli/LlapServiceDriver.java | 43 +++-
 2 files changed, 55 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/4da7ed91/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
--
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
index 8fd615c..58ef472 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
@@ -44,10 +44,11 @@ public class LlapOptionsProcessor {
 private final long cache;
 private final long size;
 private final long xmx;
+private final String jars;
 private final Properties conf;
 
 public LlapOptions(String name, int instances, String directory, int 
executors, long cache,
-long size, long xmx, @Nonnull Properties hiveconf) throws 
ParseException {
+long size, long xmx, String jars, @Nonnull Properties hiveconf) throws 
ParseException {
   if (instances <= 0) {
 throw new ParseException("Invalid configuration: " + instances
 + " (should be greater than 0)");
@@ -59,6 +60,7 @@ public class LlapOptionsProcessor {
   this.cache = cache;
   this.size = size;
   this.xmx = xmx;
+  this.jars = jars;
   this.conf = hiveconf;
 }
 
@@ -90,6 +92,10 @@ public class LlapOptionsProcessor {
   return xmx;
 }
 
+public String getAuxJars() {
+  return jars;
+}
+
 public Properties getConfig() {
   return conf;
 }
@@ -134,6 +140,10 @@ public class LlapOptionsProcessor {
 
options.addOption(OptionBuilder.hasArg().withArgName("xmx").withLongOpt("xmx")
 .withDescription("working memory size").create('w'));
 
+
options.addOption(OptionBuilder.hasArg().withArgName("auxjars").withLongOpt("auxjars")
+.withDescription("additional jars to package (by default, JSON and 
HBase SerDe jars"
++ " are packaged if available)").create('j'));
+
 // -hiveconf x=y
 
options.addOption(OptionBuilder.withValueSeparator().hasArgs(2).withArgName("property=value")
 .withLongOpt("hiveconf").withDescription("Use value for given 
property").create());
@@ -156,6 +166,7 @@ public class LlapOptionsProcessor {
 
 int instances = Integer.parseInt(commandLine.getOptionValue("instances"));
 String directory = commandLine.getOptionValue("directory");
+String jars = commandLine.getOptionValue("auxjars");
 
 String name = commandLine.getOptionValue("name", null);
 
@@ -174,7 +185,8 @@ public class LlapOptionsProcessor {
 
 // loglevel, chaosmonkey & args are parsed by the python processor
 
-return new LlapOptions(name, instances, directory, executors, cache, size, 
xmx, hiveconf);
+return new LlapOptions(
+name, instances, directory, executors, cache, size, xmx, jars, 
hiveconf);
 
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4da7ed91/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
--
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java 
b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
index 08d573b..8e5377f 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hive.llap.cli;
 
 import java.io.OutputStreamWriter;
 import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
@@ -45,6 +47,9 @@ import com.google.common.base.Preconditions;
 public class LlapServiceDriver {
 
   protected static final Logger LOG = 
LoggerFactory.getLogger(LlapServiceDriver.class.getName());
+  private static final String[] DEFAULT_AUX_CLASSES = new String[] {
+"org.apache.hive.hcatalo

[1/2] hive git commit: HIVE-12633 : LLAP: package included serde jars (Sergey Shelukhin/Gopal V, reviewed by Gopal V)

2015-12-18 Thread sershe
Repository: hive
Updated Branches:
  refs/heads/branch-2.0 1d5e9c96c -> 4da7ed913
  refs/heads/master 7df62023f -> 5a67b0a64


HIVE-12633 : LLAP: package included serde jars (Sergey Shelukhin/Gopal V, 
reviewed by Gopal V)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5a67b0a6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5a67b0a6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5a67b0a6

Branch: refs/heads/master
Commit: 5a67b0a64bee7721330423a493a21e16fc2ec0b1
Parents: 7df6202
Author: Sergey Shelukhin 
Authored: Fri Dec 18 15:36:19 2015 -0800
Committer: Sergey Shelukhin 
Committed: Fri Dec 18 15:36:19 2015 -0800

--
 .../hive/llap/cli/LlapOptionsProcessor.java | 16 +++-
 .../hadoop/hive/llap/cli/LlapServiceDriver.java | 43 +++-
 2 files changed, 55 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/5a67b0a6/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
--
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
index 8fd615c..58ef472 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
@@ -44,10 +44,11 @@ public class LlapOptionsProcessor {
 private final long cache;
 private final long size;
 private final long xmx;
+private final String jars;
 private final Properties conf;
 
 public LlapOptions(String name, int instances, String directory, int 
executors, long cache,
-long size, long xmx, @Nonnull Properties hiveconf) throws 
ParseException {
+long size, long xmx, String jars, @Nonnull Properties hiveconf) throws 
ParseException {
   if (instances <= 0) {
 throw new ParseException("Invalid configuration: " + instances
 + " (should be greater than 0)");
@@ -59,6 +60,7 @@ public class LlapOptionsProcessor {
   this.cache = cache;
   this.size = size;
   this.xmx = xmx;
+  this.jars = jars;
   this.conf = hiveconf;
 }
 
@@ -90,6 +92,10 @@ public class LlapOptionsProcessor {
   return xmx;
 }
 
+public String getAuxJars() {
+  return jars;
+}
+
 public Properties getConfig() {
   return conf;
 }
@@ -134,6 +140,10 @@ public class LlapOptionsProcessor {
 
options.addOption(OptionBuilder.hasArg().withArgName("xmx").withLongOpt("xmx")
 .withDescription("working memory size").create('w'));
 
+
options.addOption(OptionBuilder.hasArg().withArgName("auxjars").withLongOpt("auxjars")
+.withDescription("additional jars to package (by default, JSON and 
HBase SerDe jars"
++ " are packaged if available)").create('j'));
+
 // -hiveconf x=y
 
options.addOption(OptionBuilder.withValueSeparator().hasArgs(2).withArgName("property=value")
 .withLongOpt("hiveconf").withDescription("Use value for given 
property").create());
@@ -156,6 +166,7 @@ public class LlapOptionsProcessor {
 
 int instances = Integer.parseInt(commandLine.getOptionValue("instances"));
 String directory = commandLine.getOptionValue("directory");
+String jars = commandLine.getOptionValue("auxjars");
 
 String name = commandLine.getOptionValue("name", null);
 
@@ -174,7 +185,8 @@ public class LlapOptionsProcessor {
 
 // loglevel, chaosmonkey & args are parsed by the python processor
 
-return new LlapOptions(name, instances, directory, executors, cache, size, 
xmx, hiveconf);
+return new LlapOptions(
+name, instances, directory, executors, cache, size, xmx, jars, 
hiveconf);
 
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/5a67b0a6/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
--
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java 
b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
index 08d573b..8e5377f 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hive.llap.cli;
 
 import java.io.OutputStreamWriter;
 import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
@@ -45,6 +47,9 @@ import com.google.common.base.Preconditions;
 public class LlapServiceDriver {
 
   protected static final Logger LOG = 
LoggerFactory.getLogger(LlapServiceD

[5/7] hive git commit: HIVE-12075 : add analyze command to explictly cache file metadata in HBase metastore (Sergey Shelukhin, reviewed by Alan Gates)

2015-12-18 Thread sershe
http://git-wip-us.apache.org/repos/asf/hive/blob/7df62023/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
--
diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h 
b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
index cea9000..62a2007 100644
--- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
+++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
@@ -153,6 +153,7 @@ class ThriftHiveMetastoreIf : virtual public  
::facebook::fb303::FacebookService
   virtual void get_file_metadata(GetFileMetadataResult& _return, const 
GetFileMetadataRequest& req) = 0;
   virtual void put_file_metadata(PutFileMetadataResult& _return, const 
PutFileMetadataRequest& req) = 0;
   virtual void clear_file_metadata(ClearFileMetadataResult& _return, const 
ClearFileMetadataRequest& req) = 0;
+  virtual void cache_file_metadata(CacheFileMetadataResult& _return, const 
CacheFileMetadataRequest& req) = 0;
 };
 
 class ThriftHiveMetastoreIfFactory : virtual public  
::facebook::fb303::FacebookServiceIfFactory {
@@ -598,6 +599,9 @@ class ThriftHiveMetastoreNull : virtual public 
ThriftHiveMetastoreIf , virtual p
   void clear_file_metadata(ClearFileMetadataResult& /* _return */, const 
ClearFileMetadataRequest& /* req */) {
 return;
   }
+  void cache_file_metadata(CacheFileMetadataResult& /* _return */, const 
CacheFileMetadataRequest& /* req */) {
+return;
+  }
 };
 
 typedef struct _ThriftHiveMetastore_getMetaConf_args__isset {
@@ -17023,6 +17027,110 @@ class ThriftHiveMetastore_clear_file_metadata_presult 
{
 
 };
 
+typedef struct _ThriftHiveMetastore_cache_file_metadata_args__isset {
+  _ThriftHiveMetastore_cache_file_metadata_args__isset() : req(false) {}
+  bool req :1;
+} _ThriftHiveMetastore_cache_file_metadata_args__isset;
+
+class ThriftHiveMetastore_cache_file_metadata_args {
+ public:
+
+  ThriftHiveMetastore_cache_file_metadata_args(const 
ThriftHiveMetastore_cache_file_metadata_args&);
+  ThriftHiveMetastore_cache_file_metadata_args& operator=(const 
ThriftHiveMetastore_cache_file_metadata_args&);
+  ThriftHiveMetastore_cache_file_metadata_args() {
+  }
+
+  virtual ~ThriftHiveMetastore_cache_file_metadata_args() throw();
+  CacheFileMetadataRequest req;
+
+  _ThriftHiveMetastore_cache_file_metadata_args__isset __isset;
+
+  void __set_req(const CacheFileMetadataRequest& val);
+
+  bool operator == (const ThriftHiveMetastore_cache_file_metadata_args & rhs) 
const
+  {
+if (!(req == rhs.req))
+  return false;
+return true;
+  }
+  bool operator != (const ThriftHiveMetastore_cache_file_metadata_args &rhs) 
const {
+return !(*this == rhs);
+  }
+
+  bool operator < (const ThriftHiveMetastore_cache_file_metadata_args & ) 
const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+};
+
+
+class ThriftHiveMetastore_cache_file_metadata_pargs {
+ public:
+
+
+  virtual ~ThriftHiveMetastore_cache_file_metadata_pargs() throw();
+  const CacheFileMetadataRequest* req;
+
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+};
+
+typedef struct _ThriftHiveMetastore_cache_file_metadata_result__isset {
+  _ThriftHiveMetastore_cache_file_metadata_result__isset() : success(false) {}
+  bool success :1;
+} _ThriftHiveMetastore_cache_file_metadata_result__isset;
+
+class ThriftHiveMetastore_cache_file_metadata_result {
+ public:
+
+  ThriftHiveMetastore_cache_file_metadata_result(const 
ThriftHiveMetastore_cache_file_metadata_result&);
+  ThriftHiveMetastore_cache_file_metadata_result& operator=(const 
ThriftHiveMetastore_cache_file_metadata_result&);
+  ThriftHiveMetastore_cache_file_metadata_result() {
+  }
+
+  virtual ~ThriftHiveMetastore_cache_file_metadata_result() throw();
+  CacheFileMetadataResult success;
+
+  _ThriftHiveMetastore_cache_file_metadata_result__isset __isset;
+
+  void __set_success(const CacheFileMetadataResult& val);
+
+  bool operator == (const ThriftHiveMetastore_cache_file_metadata_result & 
rhs) const
+  {
+if (!(success == rhs.success))
+  return false;
+return true;
+  }
+  bool operator != (const ThriftHiveMetastore_cache_file_metadata_result &rhs) 
const {
+return !(*this == rhs);
+  }
+
+  bool operator < (const ThriftHiveMetastore_cache_file_metadata_result & ) 
const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+};
+
+typedef struct _ThriftHiveMetastore_cache_file_metadata_presult__isset {
+  _ThriftHiveMetastore_cache_file_metadata_presult__isset() : success(false) {}
+  bool success :1;
+} _ThriftHiveMetastore_cache_file_metadata_presult__isset;
+
+class ThriftHiveMetastore_cache_file_metadata_presult {
+ public:
+
+
+  virtual ~ThriftHiveMetastore_cache_file_metadata_presult() throw();
+  CacheFileMetadataResult* success;
+
+  _ThriftHiveMetastore_c

[7/7] hive git commit: HIVE-12075 : add analyze command to explictly cache file metadata in HBase metastore (Sergey Shelukhin, reviewed by Alan Gates)

2015-12-18 Thread sershe
HIVE-12075 : add analyze command to explictly cache file metadata in HBase 
metastore (Sergey Shelukhin, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7df62023
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7df62023
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7df62023

Branch: refs/heads/master
Commit: 7df62023f8a328046055486de46121fd16b7458a
Parents: 542eaf6
Author: Sergey Shelukhin 
Authored: Fri Dec 18 14:41:29 2015 -0800
Committer: Sergey Shelukhin 
Committed: Fri Dec 18 14:41:29 2015 -0800

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |5 +-
 .../test/resources/testconfiguration.properties |1 +
 metastore/if/hive_metastore.thrift  |   17 +-
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  | 2257 ++
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.h|  126 +
 .../ThriftHiveMetastore_server.skeleton.cpp |5 +
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp |  575 +++--
 .../gen/thrift/gen-cpp/hive_metastore_types.h   |  125 +-
 .../metastore/api/CacheFileMetadataRequest.java |  702 ++
 .../metastore/api/CacheFileMetadataResult.java  |  386 +++
 .../metastore/api/FileMetadataExprType.java |4 -
 .../metastore/api/PutFileMetadataRequest.java   |  124 +-
 .../hive/metastore/api/ThriftHiveMetastore.java | 1478 +---
 .../gen-php/metastore/ThriftHiveMetastore.php   |  215 ++
 .../src/gen/thrift/gen-php/metastore/Types.php  |  242 ++
 .../hive_metastore/ThriftHiveMetastore-remote   |7 +
 .../hive_metastore/ThriftHiveMetastore.py   |  189 ++
 .../gen/thrift/gen-py/hive_metastore/ttypes.py  |  190 +-
 .../gen/thrift/gen-rb/hive_metastore_types.rb   |   48 +-
 .../gen/thrift/gen-rb/thrift_hive_metastore.rb  |   54 +
 .../hadoop/hive/metastore/FileFormatProxy.java  |   64 +
 .../hive/metastore/FileMetadataHandler.java |   84 +-
 .../hive/metastore/FileMetadataManager.java |  129 +
 .../hadoop/hive/metastore/HiveMetaStore.java|  140 +-
 .../hive/metastore/HiveMetaStoreClient.java |   17 +
 .../hadoop/hive/metastore/IMetaStoreClient.java |3 +
 .../hadoop/hive/metastore/MetaStoreUtils.java   |6 +-
 .../hadoop/hive/metastore/ObjectStore.java  |   17 +-
 .../metastore/PartitionExpressionProxy.java |   36 +-
 .../apache/hadoop/hive/metastore/RawStore.java  |7 +-
 .../filemeta/OrcFileMetadataHandler.java|   24 +-
 .../hive/metastore/hbase/HBaseReadWrite.java|   60 +-
 .../hadoop/hive/metastore/hbase/HBaseStore.java |   50 +-
 .../hive/metastore/hbase/MetadataStore.java |   52 +
 .../DummyRawStoreControlledCommit.java  |8 +-
 .../DummyRawStoreForJdoConnection.java  |8 +-
 .../MockPartitionExpressionForMetastore.java|9 +-
 .../hadoop/hive/metastore/TestObjectStore.java  |9 +-
 .../hadoop/hive/metastore/hbase/MockUtils.java  |   10 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |   12 +
 .../hive/ql/io/orc/OrcFileFormatProxy.java  |   74 +
 .../apache/hadoop/hive/ql/metadata/Hive.java|   13 +
 .../ppr/PartitionExpressionForMetastore.java|   54 +-
 .../hive/ql/parse/AnalyzeCommandUtils.java  |   57 +
 .../ql/parse/ColumnStatsSemanticAnalyzer.java   |   36 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java  |   27 +-
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |1 +
 .../apache/hadoop/hive/ql/parse/HiveParser.g|8 +-
 .../hive/ql/parse/SemanticAnalyzerFactory.java  |2 +
 .../hadoop/hive/ql/plan/CacheMetadataDesc.java  |   58 +
 .../org/apache/hadoop/hive/ql/plan/DDLWork.java |   14 +
 .../hadoop/hive/ql/plan/HiveOperation.java  |1 +
 .../queries/clientpositive/stats_filemetadata.q |   17 +
 .../clientpositive/tez/stats_filemetadata.q.out |   54 +
 .../gen-py/hive_service/ThriftHive-remote   |   21 +
 .../org/apache/hadoop/hive/io/HdfsUtils.java|   61 +
 56 files changed, 6440 insertions(+), 1553 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7df62023/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 67c4213..96a3fb5 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -200,7 +200,8 @@ public class HiveConf extends Configuration {
   HiveConf.ConfVars.METASTORE_HBASE_AGGR_STATS_CACHE_ENTRIES,
   HiveConf.ConfVars.METASTORE_HBASE_AGGR_STATS_MEMORY_TTL,
   HiveConf.ConfVars.METASTORE_HBASE_AGGR_STATS_INVALIDATOR_FREQUENCY,
-  HiveConf.ConfVars.METASTORE_HBASE_AGGR_STATS_HBASE_TTL
+  HiveConf.ConfVars.METASTORE_HBASE_AGGR_STATS_HBAS

[3/7] hive git commit: HIVE-12075 : add analyze command to explictly cache file metadata in HBase metastore (Sergey Shelukhin, reviewed by Alan Gates)

2015-12-18 Thread sershe
http://git-wip-us.apache.org/repos/asf/hive/blob/7df62023/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
--
diff --git 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
index 4690093..47b7afa 100644
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
@@ -304,6 +304,8 @@ public class ThriftHiveMetastore {
 
 public ClearFileMetadataResult 
clear_file_metadata(ClearFileMetadataRequest req) throws 
org.apache.thrift.TException;
 
+public CacheFileMetadataResult 
cache_file_metadata(CacheFileMetadataRequest req) throws 
org.apache.thrift.TException;
+
   }
 
   public interface AsyncIface extends com.facebook.fb303.FacebookService 
.AsyncIface {
@@ -570,6 +572,8 @@ public class ThriftHiveMetastore {
 
 public void clear_file_metadata(ClearFileMetadataRequest req, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
+public void cache_file_metadata(CacheFileMetadataRequest req, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
   }
 
   public static class Client extends com.facebook.fb303.FacebookService.Client 
implements Iface {
@@ -4425,6 +4429,29 @@ public class ThriftHiveMetastore {
   throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "clear_file_metadata failed: unknown result");
 }
 
+public CacheFileMetadataResult 
cache_file_metadata(CacheFileMetadataRequest req) throws 
org.apache.thrift.TException
+{
+  send_cache_file_metadata(req);
+  return recv_cache_file_metadata();
+}
+
+public void send_cache_file_metadata(CacheFileMetadataRequest req) throws 
org.apache.thrift.TException
+{
+  cache_file_metadata_args args = new cache_file_metadata_args();
+  args.setReq(req);
+  sendBase("cache_file_metadata", args);
+}
+
+public CacheFileMetadataResult recv_cache_file_metadata() throws 
org.apache.thrift.TException
+{
+  cache_file_metadata_result result = new cache_file_metadata_result();
+  receiveBase(result, "cache_file_metadata");
+  if (result.isSetSuccess()) {
+return result.success;
+  }
+  throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "cache_file_metadata failed: unknown result");
+}
+
   }
   public static class AsyncClient extends 
com.facebook.fb303.FacebookService.AsyncClient implements AsyncIface {
 public static class Factory implements 
org.apache.thrift.async.TAsyncClientFactory {
@@ -9097,6 +9124,38 @@ public class ThriftHiveMetastore {
   }
 }
 
+public void cache_file_metadata(CacheFileMetadataRequest req, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+  checkReady();
+  cache_file_metadata_call method_call = new cache_file_metadata_call(req, 
resultHandler, this, ___protocolFactory, ___transport);
+  this.___currentMethod = method_call;
+  ___manager.call(method_call);
+}
+
+public static class cache_file_metadata_call extends 
org.apache.thrift.async.TAsyncMethodCall {
+  private CacheFileMetadataRequest req;
+  public cache_file_metadata_call(CacheFileMetadataRequest req, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+super(client, protocolFactory, transport, resultHandler, false);
+this.req = req;
+  }
+
+  public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("cache_file_metadata", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+cache_file_metadata_args args = new cache_file_metadata_args();
+args.setReq(req);
+args.write(prot);
+prot.writeMessageEnd();
+  }
+
+  public CacheFileMetadataResult getResult() throws 
org.apache.thrift.TException {
+if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+  throw new IllegalStateException("Method call not finished!");
+}
+org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+org.apache.thrift.protocol.TProtocol prot = 

[1/7] hive git commit: HIVE-12075 : add analyze command to explictly cache file metadata in HBase metastore (Sergey Shelukhin, reviewed by Alan Gates)

2015-12-18 Thread sershe
Repository: hive
Updated Branches:
  refs/heads/master 542eaf6bc -> 7df62023f


http://git-wip-us.apache.org/repos/asf/hive/blob/7df62023/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java
--
diff --git 
a/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java 
b/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java
index 9089d1c..1157033 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hive.metastore;
 
-import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
@@ -25,6 +24,7 @@ import java.util.List;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.InvalidInputException;
 import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
@@ -71,12 +71,17 @@ public class TestObjectStore {
 }
 
 @Override
+public FileMetadataExprType getMetadataType(String inputFormat) {
+  return null;
+}
+
+@Override
 public SearchArgument createSarg(byte[] expr) {
   return null;
 }
 
 @Override
-public ByteBuffer applySargToFileMetadata(SearchArgument sarg, ByteBuffer 
byteBuffer) {
+public FileFormatProxy getFileFormatProxy(FileMetadataExprType type) {
   return null;
 }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/7df62023/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/MockUtils.java
--
diff --git 
a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/MockUtils.java 
b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/MockUtils.java
index 983129a..784648a 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/MockUtils.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/MockUtils.java
@@ -19,6 +19,8 @@
 package org.apache.hadoop.hive.metastore.hbase;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
@@ -29,7 +31,9 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.FileFormatProxy;
 import org.apache.hadoop.hive.metastore.PartitionExpressionProxy;
+import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
@@ -73,10 +77,14 @@ public class MockUtils {
 }
 
 @Override
-public ByteBuffer applySargToFileMetadata(SearchArgument sarg, ByteBuffer 
byteBuffer) {
+public FileMetadataExprType getMetadataType(String inputFormat) {
   return null;
 }
 
+@Override
+public FileFormatProxy getFileFormatProxy(FileMetadataExprType type) {
+  return null;
+}
   }
 
   static HBaseStore init(Configuration conf, HTableInterface htable,

http://git-wip-us.apache.org/repos/asf/hive/blob/7df62023/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 290f489..30cae88 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -104,6 +104,7 @@ import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
 import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
 import org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition;
 import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc;
+import org.apache.hadoop.hive.ql.plan.CacheMetadataDesc;
 import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
 import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
@@ -518,6 +519,11 @@ public class DDLTask extends Task implements 
Serializable {
   if (alterTableExchangePartition != null) {
 return exchangeTablePartition(db, alterTableExchangePartition);
   }
+
+  CacheMetadataDesc cacheMetadataDesc = work.getCacheMetadataDesc();
+  if (cacheMetadataDesc != null) {
+return ca

[2/7] hive git commit: HIVE-12075 : add analyze command to explictly cache file metadata in HBase metastore (Sergey Shelukhin, reviewed by Alan Gates)

2015-12-18 Thread sershe
http://git-wip-us.apache.org/repos/asf/hive/blob/7df62023/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
--
diff --git a/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php 
b/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
index 34c2205..75f4a47 100644
--- a/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
+++ b/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
@@ -1051,6 +1051,11 @@ interface ThriftHiveMetastoreIf extends 
\FacebookServiceIf {
* @return \metastore\ClearFileMetadataResult
*/
   public function clear_file_metadata(\metastore\ClearFileMetadataRequest 
$req);
+  /**
+   * @param \metastore\CacheFileMetadataRequest $req
+   * @return \metastore\CacheFileMetadataResult
+   */
+  public function cache_file_metadata(\metastore\CacheFileMetadataRequest 
$req);
 }
 
 class ThriftHiveMetastoreClient extends \FacebookServiceClient implements 
\metastore\ThriftHiveMetastoreIf {
@@ -8559,6 +8564,57 @@ class ThriftHiveMetastoreClient extends 
\FacebookServiceClient implements \metas
 throw new \Exception("clear_file_metadata failed: unknown result");
   }
 
+  public function cache_file_metadata(\metastore\CacheFileMetadataRequest $req)
+  {
+$this->send_cache_file_metadata($req);
+return $this->recv_cache_file_metadata();
+  }
+
+  public function send_cache_file_metadata(\metastore\CacheFileMetadataRequest 
$req)
+  {
+$args = new \metastore\ThriftHiveMetastore_cache_file_metadata_args();
+$args->req = $req;
+$bin_accel = ($this->output_ instanceof TBinaryProtocolAccelerated) && 
function_exists('thrift_protocol_write_binary');
+if ($bin_accel)
+{
+  thrift_protocol_write_binary($this->output_, 'cache_file_metadata', 
TMessageType::CALL, $args, $this->seqid_, $this->output_->isStrictWrite());
+}
+else
+{
+  $this->output_->writeMessageBegin('cache_file_metadata', 
TMessageType::CALL, $this->seqid_);
+  $args->write($this->output_);
+  $this->output_->writeMessageEnd();
+  $this->output_->getTransport()->flush();
+}
+  }
+
+  public function recv_cache_file_metadata()
+  {
+$bin_accel = ($this->input_ instanceof TBinaryProtocolAccelerated) && 
function_exists('thrift_protocol_read_binary');
+if ($bin_accel) $result = thrift_protocol_read_binary($this->input_, 
'\metastore\ThriftHiveMetastore_cache_file_metadata_result', 
$this->input_->isStrictRead());
+else
+{
+  $rseqid = 0;
+  $fname = null;
+  $mtype = 0;
+
+  $this->input_->readMessageBegin($fname, $mtype, $rseqid);
+  if ($mtype == TMessageType::EXCEPTION) {
+$x = new TApplicationException();
+$x->read($this->input_);
+$this->input_->readMessageEnd();
+throw $x;
+  }
+  $result = new 
\metastore\ThriftHiveMetastore_cache_file_metadata_result();
+  $result->read($this->input_);
+  $this->input_->readMessageEnd();
+}
+if ($result->success !== null) {
+  return $result->success;
+}
+throw new \Exception("cache_file_metadata failed: unknown result");
+  }
+
 }
 
 // HELPER FUNCTIONS AND STRUCTURES
@@ -39493,4 +39549,163 @@ class ThriftHiveMetastore_clear_file_metadata_result {
 
 }
 
+class ThriftHiveMetastore_cache_file_metadata_args {
+  static $_TSPEC;
+
+  /**
+   * @var \metastore\CacheFileMetadataRequest
+   */
+  public $req = null;
+
+  public function __construct($vals=null) {
+if (!isset(self::$_TSPEC)) {
+  self::$_TSPEC = array(
+1 => array(
+  'var' => 'req',
+  'type' => TType::STRUCT,
+  'class' => '\metastore\CacheFileMetadataRequest',
+  ),
+);
+}
+if (is_array($vals)) {
+  if (isset($vals['req'])) {
+$this->req = $vals['req'];
+  }
+}
+  }
+
+  public function getName() {
+return 'ThriftHiveMetastore_cache_file_metadata_args';
+  }
+
+  public function read($input)
+  {
+$xfer = 0;
+$fname = null;
+$ftype = 0;
+$fid = 0;
+$xfer += $input->readStructBegin($fname);
+while (true)
+{
+  $xfer += $input->readFieldBegin($fname, $ftype, $fid);
+  if ($ftype == TType::STOP) {
+break;
+  }
+  switch ($fid)
+  {
+case 1:
+  if ($ftype == TType::STRUCT) {
+$this->req = new \metastore\CacheFileMetadataRequest();
+$xfer += $this->req->read($input);
+  } else {
+$xfer += $input->skip($ftype);
+  }
+  break;
+default:
+  $xfer += $input->skip($ftype);
+  break;
+  }
+  $xfer += $input->readFieldEnd();
+}
+$xfer += $input->readStructEnd();
+return $xfer;
+  }
+
+  public function write($output) {
+$xfer = 0;
+$xfer += 
$output->writeStructBegin('ThriftHiveMetastore_cache_file_metadata_args');
+if ($this->req !== null) {
+  if (!is_object($thi

[6/7] hive git commit: HIVE-12075 : add analyze command to explictly cache file metadata in HBase metastore (Sergey Shelukhin, reviewed by Alan Gates)

2015-12-18 Thread sershe
http://git-wip-us.apache.org/repos/asf/hive/blob/7df62023/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
--
diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp 
b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
index 0443f80..22bea87 100644
--- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
+++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
@@ -1240,14 +1240,14 @@ uint32_t 
ThriftHiveMetastore_get_databases_result::read(::apache::thrift::protoc
 if (ftype == ::apache::thrift::protocol::T_LIST) {
   {
 this->success.clear();
-uint32_t _size716;
-::apache::thrift::protocol::TType _etype719;
-xfer += iprot->readListBegin(_etype719, _size716);
-this->success.resize(_size716);
-uint32_t _i720;
-for (_i720 = 0; _i720 < _size716; ++_i720)
+uint32_t _size721;
+::apache::thrift::protocol::TType _etype724;
+xfer += iprot->readListBegin(_etype724, _size721);
+this->success.resize(_size721);
+uint32_t _i725;
+for (_i725 = 0; _i725 < _size721; ++_i725)
 {
-  xfer += iprot->readString(this->success[_i720]);
+  xfer += iprot->readString(this->success[_i725]);
 }
 xfer += iprot->readListEnd();
   }
@@ -1286,10 +1286,10 @@ uint32_t 
ThriftHiveMetastore_get_databases_result::write(::apache::thrift::proto
 xfer += oprot->writeFieldBegin("success", 
::apache::thrift::protocol::T_LIST, 0);
 {
   xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, 
static_cast(this->success.size()));
-  std::vector ::const_iterator _iter721;
-  for (_iter721 = this->success.begin(); _iter721 != this->success.end(); 
++_iter721)
+  std::vector ::const_iterator _iter726;
+  for (_iter726 = this->success.begin(); _iter726 != this->success.end(); 
++_iter726)
   {
-xfer += oprot->writeString((*_iter721));
+xfer += oprot->writeString((*_iter726));
   }
   xfer += oprot->writeListEnd();
 }
@@ -1334,14 +1334,14 @@ uint32_t 
ThriftHiveMetastore_get_databases_presult::read(::apache::thrift::proto
 if (ftype == ::apache::thrift::protocol::T_LIST) {
   {
 (*(this->success)).clear();
-uint32_t _size722;
-::apache::thrift::protocol::TType _etype725;
-xfer += iprot->readListBegin(_etype725, _size722);
-(*(this->success)).resize(_size722);
-uint32_t _i726;
-for (_i726 = 0; _i726 < _size722; ++_i726)
+uint32_t _size727;
+::apache::thrift::protocol::TType _etype730;
+xfer += iprot->readListBegin(_etype730, _size727);
+(*(this->success)).resize(_size727);
+uint32_t _i731;
+for (_i731 = 0; _i731 < _size727; ++_i731)
 {
-  xfer += iprot->readString((*(this->success))[_i726]);
+  xfer += iprot->readString((*(this->success))[_i731]);
 }
 xfer += iprot->readListEnd();
   }
@@ -1458,14 +1458,14 @@ uint32_t 
ThriftHiveMetastore_get_all_databases_result::read(::apache::thrift::pr
 if (ftype == ::apache::thrift::protocol::T_LIST) {
   {
 this->success.clear();
-uint32_t _size727;
-::apache::thrift::protocol::TType _etype730;
-xfer += iprot->readListBegin(_etype730, _size727);
-this->success.resize(_size727);
-uint32_t _i731;
-for (_i731 = 0; _i731 < _size727; ++_i731)
+uint32_t _size732;
+::apache::thrift::protocol::TType _etype735;
+xfer += iprot->readListBegin(_etype735, _size732);
+this->success.resize(_size732);
+uint32_t _i736;
+for (_i736 = 0; _i736 < _size732; ++_i736)
 {
-  xfer += iprot->readString(this->success[_i731]);
+  xfer += iprot->readString(this->success[_i736]);
 }
 xfer += iprot->readListEnd();
   }
@@ -1504,10 +1504,10 @@ uint32_t 
ThriftHiveMetastore_get_all_databases_result::write(::apache::thrift::p
 xfer += oprot->writeFieldBegin("success", 
::apache::thrift::protocol::T_LIST, 0);
 {
   xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, 
static_cast(this->success.size()));
-  std::vector ::const_iterator _iter732;
-  for (_iter732 = this->success.begin(); _iter732 != this->success.end(); 
++_iter732)
+  std::vector ::const_iterator _iter737;
+  for (_iter737 = this->success.begin(); _iter737 != this->success.end(); 
++_iter737)
   {
-xfer += oprot->writeString((*_iter732));
+xfer += oprot->writeString((*_iter737));
   }
   xfer += oprot->writeListEnd();
 }
@@ -155

[4/7] hive git commit: HIVE-12075 : add analyze command to explictly cache file metadata in HBase metastore (Sergey Shelukhin, reviewed by Alan Gates)

2015-12-18 Thread sershe
http://git-wip-us.apache.org/repos/asf/hive/blob/7df62023/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
--
diff --git 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
index a5fef3d..1b7bbf0 100644
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
@@ -40,6 +40,7 @@ public class PutFileMetadataRequest implements 
org.apache.thrift.TBase, SchemeFactory> schemes = 
new HashMap, SchemeFactory>();
   static {
@@ -49,11 +50,17 @@ public class PutFileMetadataRequest implements 
org.apache.thrift.TBase fileIds; // required
   private List metadata; // required
+  private FileMetadataExprType type; // optional
 
   /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
   public enum _Fields implements org.apache.thrift.TFieldIdEnum {
 FILE_IDS((short)1, "fileIds"),
-METADATA((short)2, "metadata");
+METADATA((short)2, "metadata"),
+/**
+ *
+ * @see FileMetadataExprType
+ */
+TYPE((short)3, "type");
 
 private static final Map byName = new HashMap();
 
@@ -72,6 +79,8 @@ public class PutFileMetadataRequest implements 
org.apache.thrift.TBase 
metaDataMap;
   static {
 Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new 
EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -121,6 +131,8 @@ public class PutFileMetadataRequest implements 
org.apache.thrift.TBase __this__metadata = new 
ArrayList(other.metadata);
   this.metadata = __this__metadata;
 }
+if (other.isSetType()) {
+  this.type = other.type;
+}
   }
 
   public PutFileMetadataRequest deepCopy() {
@@ -159,6 +174,7 @@ public class PutFileMetadataRequest implements 
org.apache.thrift.TBase

hive git commit: HIVE-12708: Hive on Spark doesn't work with Kerboresed HBase [Spark Branch] (reviewed by Szehon)

2015-12-18 Thread xuefu
Repository: hive
Updated Branches:
  refs/heads/spark 9af0b27bd -> a116e96b7


HIVE-12708: Hive on Spark doesn't work with Kerboresed HBase [Spark Branch] 
(reviewed by Szehon)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a116e96b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a116e96b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a116e96b

Branch: refs/heads/spark
Commit: a116e96b75998b5e8632c46678cd94c551fba78a
Parents: 9af0b27
Author: Xuefu Zhang 
Authored: Fri Dec 18 14:37:03 2015 -0800
Committer: Xuefu Zhang 
Committed: Fri Dec 18 14:37:03 2015 -0800

--
 .../hive/ql/exec/spark/HiveSparkClientFactory.java   | 11 +++
 1 file changed, 11 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a116e96b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
index ec0fdea..9b2dce3 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
@@ -30,6 +30,7 @@ import org.apache.commons.compress.utils.CharsetNames;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.io.HiveKey;
@@ -67,6 +68,7 @@ public class HiveSparkClientFactory {
 
   public static Map initiateSparkConf(HiveConf hiveConf) {
 Map sparkConf = new HashMap();
+HBaseConfiguration.addHbaseResources(hiveConf);
 
 // set default spark configurations.
 sparkConf.put("spark.master", SPARK_DEFAULT_MASTER);
@@ -139,7 +141,16 @@ public class HiveSparkClientFactory {
 if (value != null && !value.isEmpty()) {
   sparkConf.put("spark.hadoop." + propertyName, value);
 }
+  } else if (propertyName.startsWith("hbase")) {
+// Add HBase related configuration to Spark because in security mode, 
Spark needs it
+// to generate hbase delegation token for Spark. This is a temp 
solution to deal with
+// Spark problem.
+String value = hiveConf.get(propertyName);
+sparkConf.put("spark.hadoop." + propertyName, value);
+LOG.info(String.format(
+  "load HBase configuration (%s -> %s).", propertyName, value));
   }
+
   if (RpcConfiguration.HIVE_SPARK_RSC_CONFIGS.contains(propertyName)) {
 String value = RpcConfiguration.getValue(hiveConf, propertyName);
 sparkConf.put(propertyName, value);



hive git commit: HIVE-12658: Task rejection by an llap daemon spams the log with RejectedExecutionExceptions (Prasanth Jayachandran reviewed by Siddharth Seth)

2015-12-18 Thread prasanthj
Repository: hive
Updated Branches:
  refs/heads/master 949640919 -> 542eaf6bc


HIVE-12658: Task rejection by an llap daemon spams the log with 
RejectedExecutionExceptions (Prasanth Jayachandran reviewed by Siddharth Seth)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/542eaf6b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/542eaf6b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/542eaf6b

Branch: refs/heads/master
Commit: 542eaf6bc5f833b505bf0fd2fde4bff840aacac6
Parents: 9496409
Author: Prasanth Jayachandran 
Authored: Fri Dec 18 13:45:02 2015 -0600
Committer: Prasanth Jayachandran 
Committed: Fri Dec 18 16:01:24 2015 -0600

--
 .../daemon/rpc/LlapDaemonProtocolProtos.java| 233 +--
 .../hive/llap/daemon/ContainerRunner.java   |  13 +-
 .../llap/daemon/impl/ContainerRunnerImpl.java   |  42 +++-
 .../hive/llap/daemon/impl/LlapDaemon.java   |  22 +-
 .../impl/LlapDaemonProtocolServerImpl.java  |  12 +-
 .../hadoop/hive/llap/daemon/impl/Scheduler.java |  11 +-
 .../llap/daemon/impl/TaskExecutorService.java   |  28 ++-
 .../llap/tezplugins/LlapTaskCommunicator.java   |  41 ++--
 .../src/protobuf/LlapDaemonProtocol.proto   |   7 +
 .../impl/TestLlapDaemonProtocolServerImpl.java  |  19 +-
 .../daemon/impl/TestTaskExecutorService.java|  31 +--
 11 files changed, 355 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/542eaf6b/llap-server/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
--
diff --git 
a/llap-server/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
 
b/llap-server/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
index af009b8..d2180e5 100644
--- 
a/llap-server/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
+++ 
b/llap-server/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
@@ -90,6 +90,97 @@ public final class LlapDaemonProtocolProtos {
 // @@protoc_insertion_point(enum_scope:SourceStateProto)
   }
 
+  /**
+   * Protobuf enum {@code SubmissionStateProto}
+   */
+  public enum SubmissionStateProto
+  implements com.google.protobuf.ProtocolMessageEnum {
+/**
+ * ACCEPTED = 1;
+ */
+ACCEPTED(0, 1),
+/**
+ * REJECTED = 2;
+ */
+REJECTED(1, 2),
+/**
+ * EVICTED_OTHER = 3;
+ */
+EVICTED_OTHER(2, 3),
+;
+
+/**
+ * ACCEPTED = 1;
+ */
+public static final int ACCEPTED_VALUE = 1;
+/**
+ * REJECTED = 2;
+ */
+public static final int REJECTED_VALUE = 2;
+/**
+ * EVICTED_OTHER = 3;
+ */
+public static final int EVICTED_OTHER_VALUE = 3;
+
+
+public final int getNumber() { return value; }
+
+public static SubmissionStateProto valueOf(int value) {
+  switch (value) {
+case 1: return ACCEPTED;
+case 2: return REJECTED;
+case 3: return EVICTED_OTHER;
+default: return null;
+  }
+}
+
+public static 
com.google.protobuf.Internal.EnumLiteMap
+internalGetValueMap() {
+  return internalValueMap;
+}
+private static 
com.google.protobuf.Internal.EnumLiteMap
+internalValueMap =
+  new com.google.protobuf.Internal.EnumLiteMap() 
{
+public SubmissionStateProto findValueByNumber(int number) {
+  return SubmissionStateProto.valueOf(number);
+}
+  };
+
+public final com.google.protobuf.Descriptors.EnumValueDescriptor
+getValueDescriptor() {
+  return getDescriptor().getValues().get(index);
+}
+public final com.google.protobuf.Descriptors.EnumDescriptor
+getDescriptorForType() {
+  return getDescriptor();
+}
+public static final com.google.protobuf.Descriptors.EnumDescriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.getDescriptor().getEnumTypes().get(1);
+}
+
+private static final SubmissionStateProto[] VALUES = values();
+
+public static SubmissionStateProto valueOf(
+com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+  if (desc.getType() != getDescriptor()) {
+throw new java.lang.IllegalArgumentException(
+  "EnumValueDescriptor is not for this type.");
+  }
+  return VALUES[desc.getIndex()];
+}
+
+private final int index;
+private final int value;
+
+private SubmissionStateProto(int index, int value) {
+  this.index = index;
+  this.value = value;
+}
+
+// @@protoc_insertion_point(enum_scope:SubmissionState

hive git commit: HIVE-12658: Task rejection by an llap daemon spams the log with RejectedExecutionExceptions (Prasanth Jayachandran reviewed by Siddharth Seth)

2015-12-18 Thread prasanthj
Repository: hive
Updated Branches:
  refs/heads/branch-2.0 a17c95e04 -> 1d5e9c96c


HIVE-12658: Task rejection by an llap daemon spams the log with 
RejectedExecutionExceptions (Prasanth Jayachandran reviewed by Siddharth Seth)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1d5e9c96
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1d5e9c96
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1d5e9c96

Branch: refs/heads/branch-2.0
Commit: 1d5e9c96ced95f2c897f83937fd65cfa06bd312d
Parents: a17c95e
Author: Prasanth Jayachandran 
Authored: Fri Dec 18 13:45:02 2015 -0600
Committer: Prasanth Jayachandran 
Committed: Fri Dec 18 16:00:31 2015 -0600

--
 .../daemon/rpc/LlapDaemonProtocolProtos.java| 233 +--
 .../hive/llap/daemon/ContainerRunner.java   |  13 +-
 .../llap/daemon/impl/ContainerRunnerImpl.java   |  42 +++-
 .../hive/llap/daemon/impl/LlapDaemon.java   |  22 +-
 .../impl/LlapDaemonProtocolServerImpl.java  |  12 +-
 .../hadoop/hive/llap/daemon/impl/Scheduler.java |  11 +-
 .../llap/daemon/impl/TaskExecutorService.java   |  28 ++-
 .../llap/tezplugins/LlapTaskCommunicator.java   |  41 ++--
 .../src/protobuf/LlapDaemonProtocol.proto   |   7 +
 .../impl/TestLlapDaemonProtocolServerImpl.java  |  19 +-
 .../daemon/impl/TestTaskExecutorService.java|  31 +--
 11 files changed, 355 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1d5e9c96/llap-server/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
--
diff --git 
a/llap-server/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
 
b/llap-server/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
index af009b8..d2180e5 100644
--- 
a/llap-server/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
+++ 
b/llap-server/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
@@ -90,6 +90,97 @@ public final class LlapDaemonProtocolProtos {
 // @@protoc_insertion_point(enum_scope:SourceStateProto)
   }
 
+  /**
+   * Protobuf enum {@code SubmissionStateProto}
+   */
+  public enum SubmissionStateProto
+  implements com.google.protobuf.ProtocolMessageEnum {
+/**
+ * ACCEPTED = 1;
+ */
+ACCEPTED(0, 1),
+/**
+ * REJECTED = 2;
+ */
+REJECTED(1, 2),
+/**
+ * EVICTED_OTHER = 3;
+ */
+EVICTED_OTHER(2, 3),
+;
+
+/**
+ * ACCEPTED = 1;
+ */
+public static final int ACCEPTED_VALUE = 1;
+/**
+ * REJECTED = 2;
+ */
+public static final int REJECTED_VALUE = 2;
+/**
+ * EVICTED_OTHER = 3;
+ */
+public static final int EVICTED_OTHER_VALUE = 3;
+
+
+public final int getNumber() { return value; }
+
+public static SubmissionStateProto valueOf(int value) {
+  switch (value) {
+case 1: return ACCEPTED;
+case 2: return REJECTED;
+case 3: return EVICTED_OTHER;
+default: return null;
+  }
+}
+
+public static 
com.google.protobuf.Internal.EnumLiteMap
+internalGetValueMap() {
+  return internalValueMap;
+}
+private static 
com.google.protobuf.Internal.EnumLiteMap
+internalValueMap =
+  new com.google.protobuf.Internal.EnumLiteMap() 
{
+public SubmissionStateProto findValueByNumber(int number) {
+  return SubmissionStateProto.valueOf(number);
+}
+  };
+
+public final com.google.protobuf.Descriptors.EnumValueDescriptor
+getValueDescriptor() {
+  return getDescriptor().getValues().get(index);
+}
+public final com.google.protobuf.Descriptors.EnumDescriptor
+getDescriptorForType() {
+  return getDescriptor();
+}
+public static final com.google.protobuf.Descriptors.EnumDescriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.getDescriptor().getEnumTypes().get(1);
+}
+
+private static final SubmissionStateProto[] VALUES = values();
+
+public static SubmissionStateProto valueOf(
+com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+  if (desc.getType() != getDescriptor()) {
+throw new java.lang.IllegalArgumentException(
+  "EnumValueDescriptor is not for this type.");
+  }
+  return VALUES[desc.getIndex()];
+}
+
+private final int index;
+private final int value;
+
+private SubmissionStateProto(int index, int value) {
+  this.index = index;
+  this.value = value;
+}
+
+// @@protoc_insertion_point(enum_scope:Submiss

hive git commit: HIVE-12666: PCRExprProcFactory.GenericFuncExprProcessor.process() aggressively removes dynamic partition pruner generated synthetic join predicates (Hari Subramaniyan, reviewed by Lal

2015-12-18 Thread harisankar
Repository: hive
Updated Branches:
  refs/heads/branch-2.0 a73816a13 -> a17c95e04


HIVE-12666: PCRExprProcFactory.GenericFuncExprProcessor.process() aggressively 
removes dynamic partition pruner generated synthetic join predicates (Hari 
Subramaniyan, reviewed by Laljo John Pullokkaran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a17c95e0
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a17c95e0
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a17c95e0

Branch: refs/heads/branch-2.0
Commit: a17c95e048b069028622a508d68d853c37c31787
Parents: a73816a
Author: Hari Subramaniyan 
Authored: Tue Dec 15 11:31:26 2015 -0800
Committer: Hari Subramaniyan 
Committed: Fri Dec 18 12:48:27 2015 -0800

--
 .../DynamicPartitionPruningOptimization.java| 73 +---
 .../ql/optimizer/pcr/PcrExprProcFactory.java| 31 ++---
 .../llap/dynamic_partition_pruning.q.out| 10 +--
 .../vectorized_dynamic_partition_pruning.q.out  | 10 +--
 .../tez/dynamic_partition_pruning.q.out | 10 +--
 .../results/clientpositive/tez/mergejoin.q.out  |  2 -
 .../vectorized_dynamic_partition_pruning.q.out  | 10 +--
 7 files changed, 64 insertions(+), 82 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a17c95e0/ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java
index c2749a8..292d375 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java
@@ -65,7 +65,6 @@ import org.apache.hadoop.hive.ql.plan.GroupByDesc;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.SelectDesc;
-import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 
 /**
  * This optimization looks for expressions of the kind "x IN (RS[n])". If such
@@ -166,7 +165,8 @@ public class DynamicPartitionPruningOptimization implements 
NodeProcessor {
   procCtx.getClass().getName());
 }
 
-final FilterOperator filter = (FilterOperator) nd;
+FilterOperator filter = (FilterOperator) nd;
+FilterDesc desc = filter.getConf();
 
 TableScanOperator ts = null;
 
@@ -183,19 +183,15 @@ public class DynamicPartitionPruningOptimization 
implements NodeProcessor {
   ts = (TableScanOperator) filter.getParentOperators().get(0);
 }
 
-final ExprNodeDesc filterPredicate = filter.getConf().getPredicate(),
-tsPredicate = ts != null ? ts.getConf().getFilterExpr() : null,
-predicateToUse = (tsPredicate == null) ? filterPredicate : tsPredicate;
-
 if (LOG.isDebugEnabled()) {
   LOG.debug("Parent: " + filter.getParentOperators().get(0));
-  LOG.debug("Filter: " + predicateToUse.getExprString());
+  LOG.debug("Filter: " + desc.getPredicateString());
   LOG.debug("TableScan: " + ts);
 }
 
 // collect the dynamic pruning conditions
 removerContext.dynLists.clear();
-walkExprTree(predicateToUse, removerContext);
+walkExprTree(desc.getPredicate(), removerContext);
 
 for (DynamicListContext ctx : removerContext) {
   String column = extractColName(ctx.parent);
@@ -228,53 +224,50 @@ public class DynamicPartitionPruningOptimization 
implements NodeProcessor {
   }
 
   // we always remove the condition by replacing it with "true"
-  if (predicateToUse == filterPredicate) {
-removeDppExpr(filter.getConf(), ctx);
+  ExprNodeDesc constNode = new 
ExprNodeConstantDesc(ctx.parent.getTypeInfo(), true);
+  if (ctx.grandParent == null) {
+desc.setPredicate(constNode);
   } else {
-removeDppExpr(ts.getConf(), ctx);
+int i = ctx.grandParent.getChildren().indexOf(ctx.parent);
+ctx.grandParent.getChildren().remove(i);
+ctx.grandParent.getChildren().add(i, constNode);
   }
 }
 
-// Clean up the other predicate too.
-if (predicateToUse == tsPredicate) {
-  removerContext = new DynamicPartitionPrunerContext();
-  removerContext.dynLists.clear();
-  walkExprTree(filterPredicate, removerContext);
-  for (DynamicListContext ctx : removerContext) {
-removeDppExpr(filter.getConf(), ctx);
-  }
-}
+// if we pushed the predicate into the table scan we need to remove the
+// synthetic conditions there.
+cleanTableScanFilters(ts);
 
 return false;
   }
 
+  private void cleanTableScanFilters(TableScanOperator ts) t

[2/3] hive git commit: HIVE-12675 : PerfLogger should log performance metrics at debug level(Hari Subramaniyan, reviewed by Laljo John Pullokkaran)

2015-12-18 Thread harisankar
HIVE-12675 : PerfLogger should log performance metrics at debug level(Hari 
Subramaniyan, reviewed by Laljo John Pullokkaran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9a3a45a9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9a3a45a9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9a3a45a9

Branch: refs/heads/branch-2.0
Commit: 9a3a45a9a7b717219dd402f9f588214b12fb6198
Parents: 592179c
Author: Hari Subramaniyan 
Authored: Thu Dec 17 14:12:44 2015 -0800
Committer: Hari Subramaniyan 
Committed: Fri Dec 18 12:45:30 2015 -0800

--
 .../apache/hadoop/hive/ql/log/PerfLogger.java   | 55 +++-
 1 file changed, 30 insertions(+), 25 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/9a3a45a9/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java 
b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
index f6eb64b..98ebd50 100644
--- a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
+++ b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
@@ -117,10 +117,12 @@ public class PerfLogger {
* @param method method or ID that identifies this perf log element.
*/
   public void PerfLogBegin(String callerName, String method) {
-long startTime = System.currentTimeMillis();
-LOG.info("");
-startTimes.put(method, new Long(startTime));
-beginMetrics(method);
+if (LOG.isDebugEnabled()) {
+  long startTime = System.currentTimeMillis();
+  LOG.debug("");
+  startTimes.put(method, new Long(startTime));
+  beginMetrics(method);
+}
   }
   /**
* Call this function in correspondence of PerfLogBegin to mark the end of 
the measurement.
@@ -139,31 +141,34 @@ public class PerfLogger {
* @return long duration  the difference between now and startTime, or -1 if 
startTime is null
*/
   public long PerfLogEnd(String callerName, String method, String 
additionalInfo) {
-Long startTime = startTimes.get(method);
-long endTime = System.currentTimeMillis();
-long duration = -1;
+if (LOG.isDebugEnabled()) {
+  Long startTime = startTimes.get(method);
+  long endTime = System.currentTimeMillis();
+  long duration = -1;
 
-endTimes.put(method, new Long(endTime));
+  endTimes.put(method, new Long(endTime));
 
-StringBuilder sb = new StringBuilder("");
-LOG.info(sb.toString());
+  StringBuilder sb = new StringBuilder("");
+  LOG.debug(sb.toString());
 
-endMetrics(method);
+  endMetrics(method);
 
-return duration;
+  return duration;
+}
+return -1;
   }
 
   public Long getStartTime(String method) {



[1/3] hive git commit: HIVE-12526 : PerfLogger for hive compiler and optimizer (Hari Subramaniyan, reviewed by Jesus Camacho Rodriguez)

2015-12-18 Thread harisankar
Repository: hive
Updated Branches:
  refs/heads/branch-2.0 1420e65f5 -> a73816a13


HIVE-12526 : PerfLogger for hive compiler and optimizer (Hari Subramaniyan, 
reviewed by Jesus Camacho Rodriguez)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/592179c4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/592179c4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/592179c4

Branch: refs/heads/branch-2.0
Commit: 592179c40cc1953ea7ac8a8468c6a5470ffdd4cd
Parents: 178a6bf
Author: Hari Subramaniyan 
Authored: Mon Dec 14 17:18:34 2015 -0800
Committer: Hari Subramaniyan 
Committed: Fri Dec 18 12:45:07 2015 -0800

--
 .../apache/hadoop/hive/ql/log/PerfLogger.java   |  1 +
 .../ql/optimizer/BucketMapJoinOptimizer.java|  3 +-
 .../BucketingSortingReduceSinkOptimizer.java|  2 +-
 .../hadoop/hive/ql/optimizer/ColumnPruner.java  |  2 +-
 .../hive/ql/optimizer/ConstantPropagate.java|  2 +-
 .../optimizer/FixedBucketPruningOptimizer.java  |  2 +-
 .../hive/ql/optimizer/GlobalLimitOptimizer.java |  2 +-
 .../hive/ql/optimizer/GroupByOptimizer.java |  2 +-
 .../ql/optimizer/IdentityProjectRemover.java|  2 +-
 .../hadoop/hive/ql/optimizer/JoinReorder.java   |  2 +-
 .../ql/optimizer/LimitPushdownOptimizer.java|  2 +-
 .../hive/ql/optimizer/MapJoinProcessor.java |  2 +-
 .../ql/optimizer/NonBlockingOpDeDupProc.java|  2 +-
 .../hadoop/hive/ql/optimizer/Optimizer.java |  4 +-
 .../ql/optimizer/PartitionColumnsSeparator.java |  2 +-
 .../hive/ql/optimizer/PointLookupOptimizer.java |  2 +-
 .../hadoop/hive/ql/optimizer/SamplePruner.java  |  2 +-
 .../ql/optimizer/SimpleFetchAggregation.java|  2 +-
 .../hive/ql/optimizer/SimpleFetchOptimizer.java |  2 +-
 .../hive/ql/optimizer/SkewJoinOptimizer.java|  2 +-
 .../optimizer/SortedDynPartitionOptimizer.java  |  2 +-
 .../SortedMergeBucketMapJoinOptimizer.java  |  2 +-
 .../hive/ql/optimizer/StatsOptimizer.java   |  2 +-
 .../hadoop/hive/ql/optimizer/Transform.java | 20 ++-
 .../translator/HiveOpConverterPostProc.java |  2 +-
 .../correlation/CorrelationOptimizer.java   |  2 +-
 .../correlation/ReduceSinkDeDuplication.java|  2 +-
 .../ql/optimizer/index/RewriteGBUsingIndex.java |  2 +-
 .../hive/ql/optimizer/lineage/Generator.java|  2 +-
 .../ListBucketingPruner.java|  2 +-
 .../annotation/AnnotateWithOpTraits.java|  2 +-
 .../pcr/PartitionConditionRemover.java  |  2 +-
 .../hive/ql/optimizer/ppr/PartitionPruner.java  |  2 +-
 .../annotation/AnnotateWithStatistics.java  |  2 +-
 .../ql/optimizer/unionproc/UnionProcessor.java  |  2 +-
 .../hadoop/hive/ql/parse/CalcitePlanner.java| 57 +++-
 .../hadoop/hive/ql/parse/TezCompiler.java   |  6 ++-
 .../hadoop/hive/ql/ppd/PredicatePushDown.java   |  2 +-
 .../ql/ppd/PredicateTransitivePropagate.java|  2 +-
 .../hive/ql/ppd/SyntheticJoinPredicate.java |  2 +-
 40 files changed, 118 insertions(+), 41 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/592179c4/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java 
b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
index 1ef636c..f6eb64b 100644
--- a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
+++ b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
@@ -39,6 +39,7 @@ public class PerfLogger {
   public static final String COMPILE = "compile";
   public static final String PARSE = "parse";
   public static final String ANALYZE = "semanticAnalyze";
+  public static final String OPTIMIZER = "optimizer";
   public static final String DO_AUTHORIZATION = "doAuthorization";
   public static final String DRIVER_EXECUTE = "Driver.execute";
   public static final String INPUT_SUMMARY = "getInputSummary";

http://git-wip-us.apache.org/repos/asf/hive/blob/592179c4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java
index 750427a..a649fdf 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
 /**
  * this transformation does bucket map join optimization.
  */
-public class BucketMapJoinOptimizer implements Transform {
+public class BucketMapJoinOptimizer extends Transform {
 
   private static fi

[3/3] hive git commit: Merge branch 'branch-2.0' of https://git-wip-us.apache.org/repos/asf/hive into branch-2.0

2015-12-18 Thread harisankar
Merge branch 'branch-2.0' of https://git-wip-us.apache.org/repos/asf/hive into 
branch-2.0


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a73816a1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a73816a1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a73816a1

Branch: refs/heads/branch-2.0
Commit: a73816a137563b6700341048e7e8804544e4c009
Parents: 9a3a45a 1420e65
Author: Hari Subramaniyan 
Authored: Fri Dec 18 12:46:12 2015 -0800
Committer: Hari Subramaniyan 
Committed: Fri Dec 18 12:46:12 2015 -0800

--
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java | 31 ++--
 .../authorization/AuthorizationUtils.java   | 50 +++-
 .../DefaultHiveAuthorizationTranslator.java | 81 
 .../plugin/HiveAuthorizationTranslator.java | 46 +++
 .../authorization/plugin/HiveAuthorizer.java| 26 ---
 .../plugin/HiveAuthorizerImpl.java  | 26 +++
 .../authorization/plugin/HiveV1Authorizer.java  | 18 +
 7 files changed, 188 insertions(+), 90 deletions(-)
--




hive git commit: HIVE-12698 : Remove exposure to internal privilege and principal classes in HiveAuthorizer (Thejas Nair, reviewed by Ferdinand Xu )

2015-12-18 Thread thejas
Repository: hive
Updated Branches:
  refs/heads/branch-1 3fe412417 -> ec50fc919


HIVE-12698 : Remove exposure to internal privilege and principal classes in 
HiveAuthorizer (Thejas Nair, reviewed by Ferdinand Xu )


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ec50fc91
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ec50fc91
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ec50fc91

Branch: refs/heads/branch-1
Commit: ec50fc919ef815848994bd9e0b0c84deb971772e
Parents: 3fe4124
Author: Thejas Nair 
Authored: Fri Dec 18 11:49:28 2015 -0800
Committer: Thejas Nair 
Committed: Fri Dec 18 12:15:10 2015 -0800

--
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java | 31 ++--
 .../authorization/AuthorizationUtils.java   | 50 +++-
 .../DefaultHiveAuthorizationTranslator.java | 81 
 .../plugin/HiveAuthorizationTranslator.java | 46 +++
 .../authorization/plugin/HiveAuthorizer.java| 26 ---
 .../plugin/HiveAuthorizerImpl.java  | 26 +++
 .../authorization/plugin/HiveV1Authorizer.java  | 18 +
 7 files changed, 188 insertions(+), 90 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ec50fc91/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index c3693de..2a64da3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -49,6 +49,7 @@ import java.util.TreeMap;
 import java.util.TreeSet;
 
 import com.google.common.collect.Iterables;
+
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
@@ -182,7 +183,10 @@ import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import 
org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationTranslator;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
@@ -236,6 +240,7 @@ public class DDLTask extends Task implements 
Serializable {
   private static String INTERMEDIATE_EXTRACTED_DIR_SUFFIX;
 
   private MetaDataFormatter formatter;
+  private final HiveAuthorizationTranslator defaultAuthorizationTranslator = 
new DefaultHiveAuthorizationTranslator();
 
   @Override
   public boolean requireLock() {
@@ -641,8 +646,8 @@ public class DDLTask extends Task implements 
Serializable {
   grantorPrinc = new HivePrincipal(grantOrRevokeRoleDDL.getGrantor(),
   
AuthorizationUtils.getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType()));
 }
-List principals =
-authorizer.getHivePrincipals(grantOrRevokeRoleDDL.getPrincipalDesc());
+List principals = AuthorizationUtils.getHivePrincipals(
+grantOrRevokeRoleDDL.getPrincipalDesc(), 
getAuthorizationTranslator(authorizer));
 List roles = grantOrRevokeRoleDDL.getRoles();
 
 boolean grantOption = grantOrRevokeRoleDDL.isGrantOption();
@@ -654,13 +659,22 @@ public class DDLTask extends Task implements 
Serializable {
 return 0;
   }
 
+  private HiveAuthorizationTranslator 
getAuthorizationTranslator(HiveAuthorizer authorizer)
+  throws HiveAuthzPluginException {
+if (authorizer.getHiveAuthorizationTranslator() == null) {
+  return defaultAuthorizationTranslator;
+} else {
+  return 
(HiveAuthorizationTranslator)authorizer.getHiveAuthorizationTranslator();
+}
+  }
+
   private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException {
 
 HiveAuthorizer authorizer = getSessionAuthorizer();
 try {
   List privInfos = authorizer.showPrivileges(
-  
AuthorizationUtils.getHivePrincipal(showGrantDesc.getPrincipalDesc()),
-  authorizer.getHivePrivilegeObject(showGrantDesc.getHiveObj()));
+  
getAuthorizationTranslator(authorizer).getHivePrincipal(showGrantDesc.getPrincipalDesc()),
+  
getAuthorizationTranslator(authorizer).getHivePrivilegeObject(showGrantDesc.getHiveObj()));
   boolean testMode = conf.getBoolVar(HiveCo

hive git commit: HIVE-11935: Race condition in HiveMetaStoreClient: isCompatibleWith and close (Daniel Dai, Reviewed by Alan Gates)

2015-12-18 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1 0d9689657 -> 3fe412417


HIVE-11935: Race condition in  HiveMetaStoreClient: isCompatibleWith and close 
(Daniel Dai, Reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3fe41241
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3fe41241
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3fe41241

Branch: refs/heads/branch-1
Commit: 3fe4124178b8c5a413b07b400c9b5df6360f6d29
Parents: 0d96896
Author: Daniel Dai 
Authored: Fri Dec 18 12:06:16 2015 -0800
Committer: Daniel Dai 
Committed: Fri Dec 18 12:06:57 2015 -0800

--
 .../org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java | 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/3fe41241/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index e1ab1d5..6ee48c6 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -287,13 +287,16 @@ public class HiveMetaStoreClient implements 
IMetaStoreClient {
 
   @Override
   public boolean isCompatibleWith(HiveConf conf) {
-if (currentMetaVars == null) {
+// Make a copy of currentMetaVars, there is a race condition that
+   // currentMetaVars might be changed during the execution of the method
+Map currentMetaVarsCopy = currentMetaVars;
+if (currentMetaVarsCopy == null) {
   return false; // recreate
 }
 boolean compatible = true;
 for (ConfVars oneVar : HiveConf.metaVars) {
   // Since metaVars are all of different types, use string for comparison
-  String oldVar = currentMetaVars.get(oneVar.varname);
+  String oldVar = currentMetaVarsCopy.get(oneVar.varname);
   String newVar = conf.get(oneVar.varname, "");
   if (oldVar == null ||
   (oneVar.isCaseSensitive() ? !oldVar.equals(newVar) : 
!oldVar.equalsIgnoreCase(newVar))) {



[3/3] hive git commit: HIVE-11935: Race condition in HiveMetaStoreClient: isCompatibleWith and close (Daniel Dai, Reviewed by Alan Gates)

2015-12-18 Thread daijy
HIVE-11935: Race condition in  HiveMetaStoreClient: isCompatibleWith and close 
(Daniel Dai, Reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/94964091
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/94964091
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/94964091

Branch: refs/heads/master
Commit: 949640919e47bf5c729c51ba396424f7101ff43b
Parents: 27a14d5
Author: Daniel Dai 
Authored: Fri Dec 18 12:06:16 2015 -0800
Committer: Daniel Dai 
Committed: Fri Dec 18 12:06:16 2015 -0800

--
 .../org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java | 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/94964091/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index c5e7a5f..178796d 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -309,13 +309,16 @@ public class HiveMetaStoreClient implements 
IMetaStoreClient {
 
   @Override
   public boolean isCompatibleWith(HiveConf conf) {
-if (currentMetaVars == null) {
+// Make a copy of currentMetaVars, there is a race condition that
+   // currentMetaVars might be changed during the execution of the method
+Map currentMetaVarsCopy = currentMetaVars;
+if (currentMetaVarsCopy == null) {
   return false; // recreate
 }
 boolean compatible = true;
 for (ConfVars oneVar : HiveConf.metaVars) {
   // Since metaVars are all of different types, use string for comparison
-  String oldVar = currentMetaVars.get(oneVar.varname);
+  String oldVar = currentMetaVarsCopy.get(oneVar.varname);
   String newVar = conf.get(oneVar.varname, "");
   if (oldVar == null ||
   (oneVar.isCaseSensitive() ? !oldVar.equals(newVar) : 
!oldVar.equalsIgnoreCase(newVar))) {



[2/3] hive git commit: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/hive

2015-12-18 Thread daijy
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/hive


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/27a14d5a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/27a14d5a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/27a14d5a

Branch: refs/heads/master
Commit: 27a14d5a4cf00b34bdfd9c556c71b49fa2e58fff
Parents: 95d2273 1199754
Author: Daniel Dai 
Authored: Fri Dec 18 12:04:17 2015 -0800
Committer: Daniel Dai 
Committed: Fri Dec 18 12:04:17 2015 -0800

--
 .../predicate/AccumuloPredicateHandler.java | 4 +-
 .../predicate/TestAccumuloPredicateHandler.java |36 +-
 common/pom.xml  | 5 +
 .../hadoop/hive/common/DiskRangeInfo.java   |59 -
 .../common/metrics/common/MetricsConstant.java  | 8 +
 .../metrics/metrics2/CodahaleMetrics.java   | 7 +
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   125 +-
 .../apache/hadoop/hive/ql/log/PerfLogger.java   |56 +-
 .../apache/hive/common/util/BloomFilter.java|   309 -
 .../org/apache/hive/common/util/Murmur3.java|   335 -
 .../java/org/apache/hive/http/HttpServer.java   |47 +
 .../hive/common/metrics/MetricsTestUtils.java   |13 +-
 .../apache/hadoop/hive/conf/TestHiveConf.java   |14 +
 .../apache/hive/common/util/TestMurmur3.java|   224 -
 conf/ivysettings.xml|12 +-
 data/conf/hive-site.xml | 5 +
 data/conf/llap/hive-site.xml| 5 +
 data/conf/llap/llap-daemon-site.xml | 5 +
 data/conf/perf-reg/hive-site.xml|   285 +
 data/conf/perf-reg/tez-site.xml | 6 +
 .../metastore_export/csv/TABLE_PARAMS.txt   |   102 +
 .../metastore_export/csv/TAB_COL_STATS.txt  |   259 +
 data/scripts/q_perf_test_init.sql   |   616 +
 .../hive/hbase/HiveHBaseTableInputFormat.java   |11 +-
 .../test/results/positive/hbase_queries.q.out   |38 +-
 .../src/test/templates/TestHBaseCliDriver.vm|63 +-
 .../templates/TestHBaseNegativeCliDriver.vm |64 +-
 .../hive/hcatalog/mapreduce/SpecialCases.java   | 2 +-
 .../hive/hcatalog/api/HCatClientHMSImpl.java|26 +-
 itests/custom-udfs/pom.xml  |62 +
 itests/custom-udfs/udf-classloader-udf1/pom.xml |43 +
 .../src/main/java/hive/it/custom/udfs/UDF1.java |58 +
 itests/custom-udfs/udf-classloader-udf2/pom.xml |43 +
 .../src/main/java/hive/it/custom/udfs/UDF2.java |60 +
 itests/custom-udfs/udf-classloader-util/pom.xml |35 +
 .../src/main/java/hive/it/custom/udfs/Util.java |25 +
 .../hive/thrift/TestHadoopAuthBridge23.java | 4 -
 .../hive/metastore/TestMetaStoreMetrics.java|   152 +-
 .../hbase/TestHBaseMetastoreMetrics.java|   128 +
 .../hadoop/hive/ql/TestLocationQueries.java | 2 +-
 .../plugin/TestHiveAuthorizerShowFilters.java   |36 +-
 .../org/apache/hive/jdbc/TestJdbcDriver2.java   | 8 +
 .../hive/jdbc/miniHS2/TestHs2Metrics.java   |42 +-
 itests/pom.xml  | 1 +
 itests/qtest/pom.xml|21 +-
 .../test/resources/testconfiguration.properties | 1 +
 .../hadoop/hive/accumulo/AccumuloQTestUtil.java | 2 +-
 .../hadoop/hive/hbase/HBaseQTestUtil.java   | 2 +-
 .../hadoop/hive/hbase/HBaseTestSetup.java   | 9 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java|   337 +-
 .../org/apache/hive/jdbc/HiveConnection.java| 9 +-
 .../org/apache/hive/jdbc/HiveStatement.java |43 +-
 jdbc/src/java/org/apache/hive/jdbc/Utils.java   | 2 +
 .../hadoop/hive/llap/io/api/LlapIoProxy.java|78 -
 .../hadoop/hive/llap/io/api/LlapProxy.java  |   111 +
 .../hive/llap/registry/ServiceInstance.java | 7 +
 .../registry/impl/LlapFixedRegistryImpl.java| 7 +
 .../registry/impl/LlapYarnRegistryImpl.java |30 +-
 .../hive/llap/security/LlapTokenIdentifier.java |82 +
 .../hive/llap/security/LlapTokenProvider.java   |27 +
 .../daemon/rpc/LlapDaemonProtocolProtos.java|  1059 +-
 .../hadoop/hive/llap/cache/BuddyAllocator.java  | 1 -
 .../llap/cache/LowLevelCacheMemoryManager.java  |41 +-
 .../daemon/LlapDaemonProtocolBlockingPB.java| 6 +
 .../LlapManagementProtocolBlockingPB.java   |24 +
 .../hive/llap/daemon/impl/LlapDaemon.java   |41 +-
 .../impl/LlapDaemonProtocolClientImpl.java  | 1 -
 .../impl/LlapDaemonProtocolServerImpl.java  |   155 +-
 .../impl/LlapManagementProtocolClientImpl.java  |82 +
 .../hive/llap/daemon/impl/QueryFileCleaner.java |96 -
 .../hive/llap/daemon/impl/QueryTracker.java |   114 +-
 .../daemon/services/impl/LlapWebServices.java   |33 +-
 .../llap/io/decode/OrcEncodedDataConsumer.java  | 4 +-
 .../llap/io/encoded/OrcEncodedDataR

[1/3] hive git commit: HIVE-12429: Switch default Hive authorization to SQLStandardAuth in 2.0

2015-12-18 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master 1199754cc -> 949640919


HIVE-12429: Switch default Hive authorization to SQLStandardAuth in 2.0


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/95d22735
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/95d22735
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/95d22735

Branch: refs/heads/master
Commit: 95d22735d73381458354e0ca79a2cb607f8e2150
Parents: 7a1f14c
Author: Daniel Dai 
Authored: Sat Dec 5 21:44:52 2015 -0800
Committer: Daniel Dai 
Committed: Sat Dec 5 21:45:32 2015 -0800

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   2 +-
 .../cli/SemanticAnalysis/HCatAuthUtil.java  |   5 +-
 .../cli/SemanticAnalysis/TestHCatAuthUtil.java  |   4 +-
 .../SQLStdHiveAuthorizationValidator.java   |  11 --
 .../parse/authorization/TestPrivilegesV1.java   |  13 +-
 .../TestSQLStdHiveAccessControllerCLI.java  |  16 +-
 .../authorization_cli_auth_enable.q |   7 -
 .../clientnegative/authorization_fail_1.q   |   1 +
 .../clientnegative/authorization_fail_2.q   |   1 +
 .../clientnegative/authorization_fail_3.q   |   1 +
 .../clientnegative/authorization_fail_4.q   |   1 +
 .../clientnegative/authorization_fail_5.q   |   3 +-
 .../clientnegative/authorization_fail_6.q   |   1 +
 .../clientnegative/authorization_fail_7.q   |   3 +-
 .../authorization_fail_create_db.q  |   1 +
 .../clientnegative/authorization_fail_drop_db.q |   1 +
 .../authorization_invalid_priv_v1.q |   1 +
 .../queries/clientnegative/authorization_part.q |   3 +-
 .../authorization_public_create.q   |   1 +
 .../clientnegative/authorization_public_drop.q  |   1 +
 .../clientnegative/authorization_role_case.q|   1 +
 .../clientnegative/authorize_grant_public.q |   1 +
 .../clientnegative/authorize_revoke_public.q|   1 +
 .../clientnegative/exim_22_export_authfail.q|   1 +
 .../exim_23_import_exist_authfail.q |   1 +
 .../exim_24_import_part_authfail.q  |   1 +
 .../exim_25_import_nonexist_authfail.q  |   1 +
 .../clientnegative/join_nonexistent_part.q  |   5 -
 .../clientnegative/load_exist_part_authfail.q   |   1 +
 .../clientnegative/load_nonpart_authfail.q  |   1 +
 .../queries/clientnegative/load_part_authfail.q |   1 +
 .../alter_rename_partition_authorization.q  |   1 +
 .../queries/clientpositive/authorization_1.q|   4 +-
 .../queries/clientpositive/authorization_2.q|   4 +-
 .../queries/clientpositive/authorization_3.q|   2 +
 .../queries/clientpositive/authorization_4.q|   4 +-
 .../queries/clientpositive/authorization_5.q|   2 +
 .../queries/clientpositive/authorization_6.q|   2 +
 .../queries/clientpositive/authorization_7.q|   4 +-
 .../queries/clientpositive/authorization_8.q|   1 +
 .../queries/clientpositive/authorization_9.q|   1 +
 ...orization_default_create_table_owner_privs.q |   1 +
 .../clientpositive/authorization_explain.q  |   1 +
 .../authorization_show_role_principals_v1.q |   1 +
 .../clientpositive/exim_21_export_authsuccess.q |   1 +
 .../exim_22_import_exist_authsuccess.q  |   1 +
 .../exim_23_import_part_authsuccess.q   |   1 +
 .../exim_24_import_nonexist_authsuccess.q   |   1 +
 ql/src/test/queries/clientpositive/index_auth.q |   2 +
 ql/src/test/queries/clientpositive/keyword_1.q  |   4 +-
 .../load_exist_part_authsuccess.q   |   1 +
 .../clientpositive/load_nonpart_authsuccess.q   |   1 +
 .../clientpositive/load_part_authsuccess.q  |   1 +
 ql/src/test/queries/clientpositive/show_roles.q |   2 +
 .../authorization_cli_auth_enable.q.out |   1 -
 .../clientnegative/join_nonexistent_part.q.out  |   1 -
 .../clientpositive/authorization_9.q.out| 180 +++
 .../authorization_explain.q.java1.7.out |   3 -
 .../authorization_show_grant.q.out  |  60 +++
 59 files changed, 318 insertions(+), 62 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/95d22735/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index d52f994..a563f2e 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1641,7 +1641,7 @@ public class HiveConf extends Configuration {
 HIVE_AUTHORIZATION_ENABLED("hive.security.authorization.enabled", false,
 "enable or disable the Hive client authorization"),
 HIVE_AUTHORIZATION_MANAGER("hive.security.authorization.manager",
-
"org.apac

hive git commit: HIVE-12698 : Remove exposure to internal privilege and principal classes in HiveAuthorizer (Thejas Nair, reviewed by Ferdinand Xu )

2015-12-18 Thread thejas
Repository: hive
Updated Branches:
  refs/heads/master 1907977cf -> 1199754cc


HIVE-12698 : Remove exposure to internal privilege and principal classes in 
HiveAuthorizer (Thejas Nair, reviewed by Ferdinand Xu )


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1199754c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1199754c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1199754c

Branch: refs/heads/master
Commit: 1199754cc030e12bf73f54e937f552326ac96f2f
Parents: 1907977
Author: Thejas Nair 
Authored: Fri Dec 18 11:49:28 2015 -0800
Committer: Thejas Nair 
Committed: Fri Dec 18 11:50:26 2015 -0800

--
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java | 31 ++--
 .../authorization/AuthorizationUtils.java   | 50 +++-
 .../DefaultHiveAuthorizationTranslator.java | 81 
 .../plugin/HiveAuthorizationTranslator.java | 46 +++
 .../authorization/plugin/HiveAuthorizer.java| 26 ---
 .../plugin/HiveAuthorizerImpl.java  | 26 +++
 .../authorization/plugin/HiveV1Authorizer.java  | 18 +
 7 files changed, 188 insertions(+), 90 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1199754c/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 4fb6c00..290f489 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.exec;
 
 import com.google.common.collect.Iterables;
+
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -153,7 +154,10 @@ import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import 
org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationTranslator;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
@@ -237,6 +241,7 @@ public class DDLTask extends Task implements 
Serializable {
   private static String INTERMEDIATE_EXTRACTED_DIR_SUFFIX;
 
   private MetaDataFormatter formatter;
+  private final HiveAuthorizationTranslator defaultAuthorizationTranslator = 
new DefaultHiveAuthorizationTranslator();
 
   @Override
   public boolean requireLock() {
@@ -661,8 +666,8 @@ public class DDLTask extends Task implements 
Serializable {
   grantorPrinc = new HivePrincipal(grantOrRevokeRoleDDL.getGrantor(),
   
AuthorizationUtils.getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType()));
 }
-List principals =
-authorizer.getHivePrincipals(grantOrRevokeRoleDDL.getPrincipalDesc());
+List principals = AuthorizationUtils.getHivePrincipals(
+grantOrRevokeRoleDDL.getPrincipalDesc(), 
getAuthorizationTranslator(authorizer));
 List roles = grantOrRevokeRoleDDL.getRoles();
 
 boolean grantOption = grantOrRevokeRoleDDL.isGrantOption();
@@ -674,13 +679,22 @@ public class DDLTask extends Task implements 
Serializable {
 return 0;
   }
 
+  private HiveAuthorizationTranslator 
getAuthorizationTranslator(HiveAuthorizer authorizer)
+  throws HiveAuthzPluginException {
+if (authorizer.getHiveAuthorizationTranslator() == null) {
+  return defaultAuthorizationTranslator;
+} else {
+  return 
(HiveAuthorizationTranslator)authorizer.getHiveAuthorizationTranslator();
+}
+  }
+
   private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException {
 
 HiveAuthorizer authorizer = getSessionAuthorizer();
 try {
   List privInfos = authorizer.showPrivileges(
-  
AuthorizationUtils.getHivePrincipal(showGrantDesc.getPrincipalDesc()),
-  authorizer.getHivePrivilegeObject(showGrantDesc.getHiveObj()));
+  
getAuthorizationTranslator(authorizer).getHivePrincipal(showGrantDesc.getPrincipalDesc()),
+  
getAuthorizationTranslator(authorizer).getHivePrivilegeObject(showGrantDesc.getHiveObj()));
   boolean testMode = conf.getBoolVar(HiveConf.Conf

hive git commit: HIVE-12698 : Remove exposure to internal privilege and principal classes in HiveAuthorizer (Thejas Nair, reviewed by Ferdinand Xu )

2015-12-18 Thread thejas
Repository: hive
Updated Branches:
  refs/heads/branch-2.0 178a6bf3a -> 1420e65f5


HIVE-12698 : Remove exposure to internal privilege and principal classes in 
HiveAuthorizer (Thejas Nair, reviewed by Ferdinand Xu )


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1420e65f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1420e65f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1420e65f

Branch: refs/heads/branch-2.0
Commit: 1420e65f5fea62109c8ff5485ddae65a4606feed
Parents: 178a6bf
Author: Thejas Nair 
Authored: Fri Dec 18 11:49:28 2015 -0800
Committer: Thejas Nair 
Committed: Fri Dec 18 11:49:28 2015 -0800

--
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java | 31 ++--
 .../authorization/AuthorizationUtils.java   | 50 +++-
 .../DefaultHiveAuthorizationTranslator.java | 81 
 .../plugin/HiveAuthorizationTranslator.java | 46 +++
 .../authorization/plugin/HiveAuthorizer.java| 26 ---
 .../plugin/HiveAuthorizerImpl.java  | 26 +++
 .../authorization/plugin/HiveV1Authorizer.java  | 18 +
 7 files changed, 188 insertions(+), 90 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1420e65f/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index ea12fe1..f4b688a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.exec;
 
 import com.google.common.collect.Iterables;
+
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -153,7 +154,10 @@ import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import 
org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationTranslator;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
@@ -237,6 +241,7 @@ public class DDLTask extends Task implements 
Serializable {
   private static String INTERMEDIATE_EXTRACTED_DIR_SUFFIX;
 
   private MetaDataFormatter formatter;
+  private final HiveAuthorizationTranslator defaultAuthorizationTranslator = 
new DefaultHiveAuthorizationTranslator();
 
   @Override
   public boolean requireLock() {
@@ -661,8 +666,8 @@ public class DDLTask extends Task implements 
Serializable {
   grantorPrinc = new HivePrincipal(grantOrRevokeRoleDDL.getGrantor(),
   
AuthorizationUtils.getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType()));
 }
-List principals =
-authorizer.getHivePrincipals(grantOrRevokeRoleDDL.getPrincipalDesc());
+List principals = AuthorizationUtils.getHivePrincipals(
+grantOrRevokeRoleDDL.getPrincipalDesc(), 
getAuthorizationTranslator(authorizer));
 List roles = grantOrRevokeRoleDDL.getRoles();
 
 boolean grantOption = grantOrRevokeRoleDDL.isGrantOption();
@@ -674,13 +679,22 @@ public class DDLTask extends Task implements 
Serializable {
 return 0;
   }
 
+  private HiveAuthorizationTranslator 
getAuthorizationTranslator(HiveAuthorizer authorizer)
+  throws HiveAuthzPluginException {
+if (authorizer.getHiveAuthorizationTranslator() == null) {
+  return defaultAuthorizationTranslator;
+} else {
+  return 
(HiveAuthorizationTranslator)authorizer.getHiveAuthorizationTranslator();
+}
+  }
+
   private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException {
 
 HiveAuthorizer authorizer = getSessionAuthorizer();
 try {
   List privInfos = authorizer.showPrivileges(
-  
AuthorizationUtils.getHivePrincipal(showGrantDesc.getPrincipalDesc()),
-  authorizer.getHivePrivilegeObject(showGrantDesc.getHiveObj()));
+  
getAuthorizationTranslator(authorizer).getHivePrincipal(showGrantDesc.getPrincipalDesc()),
+  
getAuthorizationTranslator(authorizer).getHivePrivilegeObject(showGrantDesc.getHiveObj()));
   boolean testMode = conf.getBoolVar(HiveC