hive git commit: HIVE-12282: beeline - update command printing in verbose mode (Daniel Dai, reviewed by Thejas Nair, Lefty Leverenz)
Repository: hive Updated Branches: refs/heads/master 99a043a05 -> 63dc1fa61 HIVE-12282: beeline - update command printing in verbose mode (Daniel Dai, reviewed by Thejas Nair, Lefty Leverenz) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/63dc1fa6 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/63dc1fa6 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/63dc1fa6 Branch: refs/heads/master Commit: 63dc1fa61d071b64664c5b7dfb700b9c18bcca50 Parents: 99a043a Author: Daniel Dai Authored: Wed Oct 28 21:24:42 2015 -0700 Committer: Daniel Dai Committed: Wed Oct 28 21:24:42 2015 -0700 -- .../java/org/apache/hive/beeline/BeeLine.java | 22 ++-- .../hive/beeline/TestBeelineArgParsing.java | 18 +++- 2 files changed, 33 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/63dc1fa6/beeline/src/java/org/apache/hive/beeline/BeeLine.java -- diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java b/beeline/src/java/org/apache/hive/beeline/BeeLine.java index 4e04997..377703f 100644 --- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -151,6 +151,7 @@ public class BeeLine implements Closeable { private static final String HIVE_VAR_PREFIX = "--hivevar"; private static final String HIVE_CONF_PREFIX = "--hiveconf"; + static final String PASSWD_MASK = "[passwd stripped]"; private final Map formats = map(new Object[] { "vertical", new VerticalOutputFormat(this), @@ -768,12 +769,9 @@ public class BeeLine implements Closeable { */ if (url != null) { - String com = "!connect " - + url + " " - + (user == null || user.length() == 0 ? "''" : user) + " " - + (pass == null || pass.length() == 0 ? "''" : pass) + " " - + (driver == null ? "" : driver); - debug("issuing: " + com); + String com = constructCmd(url, user, pass, driver, false); + String comForDebug = constructCmd(url, user, pass, driver, true); + debug("issuing: " + comForDebug); dispatch(com); } @@ -796,6 +794,18 @@ public class BeeLine implements Closeable { return code; } + private String constructCmd(String url, String user, String pass, String driver, boolean stripPasswd) { +String com = "!connect " ++ url + " " ++ (user == null || user.length() == 0 ? "''" : user) + " "; +if (stripPasswd) { + com += PASSWD_MASK + " "; +} else { + com += (pass == null || pass.length() == 0 ? "''" : pass) + " "; +} +com += (driver == null ? "" : driver); +return com; + } /** * Obtains a password from the passed file path. */ http://git-wip-us.apache.org/repos/asf/hive/blob/63dc1fa6/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java -- diff --git a/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java b/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java index 06d6ffe..80c6e06 100644 --- a/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java +++ b/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java @@ -23,9 +23,11 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; - import java.io.File; import java.io.FileOutputStream; +import java.io.PrintStream; +import java.nio.file.Files; +import java.nio.file.Paths; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -244,4 +246,18 @@ public class TestBeelineArgParsing { Assert.assertEquals(bl.findLocalDriver(connectionString).getClass().getName(), driverClazzName); } } + + @Test + public void testBeelinePasswordMask() throws Exception { +TestBeeline bl = new TestBeeline(); +File errFile = File.createTempFile("test", "tmp"); +bl.setErrorStream(new PrintStream(new FileOutputStream(errFile))); +String args[] = +new String[] { "-u", "url", "-n", "name", "-p", "password", "-d", "driver", +"--autoCommit=true", "--verbose", "--truncateTable" }; +bl.initArgs(args); +bl.close(); +String errContents = new String(Files.readAllBytes(Paths.get(errFile.toString(; +Assert.assertTrue(errContents.contains(BeeLine.PASSWD_MASK)); + } }
hive git commit: HIVE-12245: Support column comments for an HBase backed table (Chaoyu Tang, reviewed by Jimmy Xiang)
Repository: hive Updated Branches: refs/heads/master 53fc31931 -> 99a043a05 HIVE-12245: Support column comments for an HBase backed table (Chaoyu Tang, reviewed by Jimmy Xiang) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/99a043a0 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/99a043a0 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/99a043a0 Branch: refs/heads/master Commit: 99a043a05b4d823589e403de9779cf3a4b881ca3 Parents: 53fc319 Author: ctang Authored: Wed Oct 28 22:46:55 2015 -0400 Committer: ctang Committed: Wed Oct 28 22:46:55 2015 -0400 -- .../hive/hbase/HBaseLazyObjectFactory.java | 28 +++ .../apache/hadoop/hive/hbase/HBaseSerDe.java| 5 +-- .../src/test/queries/positive/hbase_queries.q | 4 ++- .../results/positive/external_table_ppd.q.out | 16 - .../positive/hbase_binary_storage_queries.q.out | 32 - .../test/results/positive/hbase_queries.q.out | 37 +++- .../test/results/positive/hbase_timestamp.q.out | 6 ++-- .../positive/hbase_timestamp_format.q.out | 12 +++ 8 files changed, 93 insertions(+), 47 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/99a043a0/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java -- diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java index cb9f9d3..841e8ba 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java @@ -19,7 +19,10 @@ package org.apache.hadoop.hive.hbase; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.Properties; import org.apache.hadoop.hive.hbase.struct.HBaseValueFactory; import org.apache.hadoop.hive.serde2.SerDeException; @@ -53,4 +56,29 @@ public class HBaseLazyObjectFactory { serdeParams.getColumnNames(), columnObjectInspectors, null, serdeParams.getSeparators()[0], serdeParams, ObjectInspectorOptions.JAVA); } + + public static ObjectInspector createLazyHBaseStructInspector(HBaseSerDeParameters hSerdeParams, + Properties tbl) + throws SerDeException { +List columnTypes = hSerdeParams.getColumnTypes(); +ArrayList columnObjectInspectors = new ArrayList( +columnTypes.size()); +for (int i = 0; i < columnTypes.size(); i++) { + if (i == hSerdeParams.getKeyIndex()) { +columnObjectInspectors.add(hSerdeParams.getKeyFactory() +.createKeyObjectInspector(columnTypes.get(i))); + } else { +columnObjectInspectors.add(hSerdeParams.getValueFactories().get(i) +.createValueObjectInspector(columnTypes.get(i))); + } +} +List structFieldComments = tbl.getProperty("columns.comments") == null ? +new ArrayList(Collections.nCopies(columnTypes.size(), "")) +: Arrays.asList(tbl.getProperty("columns.comments").split("\0", columnTypes.size())); + +return LazyObjectInspectorFactory.getLazySimpleStructObjectInspector( +hSerdeParams.getColumnNames(), columnObjectInspectors, structFieldComments, +hSerdeParams.getSerdeParams().getSeparators()[0], +hSerdeParams.getSerdeParams(), ObjectInspectorOptions.JAVA); + } } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hive/blob/99a043a0/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java -- diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java index 41d6302..466aabe 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java @@ -125,10 +125,7 @@ public class HBaseSerDe extends AbstractSerDe { serdeParams = new HBaseSerDeParameters(conf, tbl, getClass().getName()); cachedObjectInspector = -HBaseLazyObjectFactory -.createLazyHBaseStructInspector(serdeParams.getSerdeParams(), -serdeParams.getKeyIndex(), serdeParams.getKeyFactory(), -serdeParams.getValueFactories()); +HBaseLazyObjectFactory.createLazyHBaseStructInspector(serdeParams, tbl); cachedHBaseRow = new LazyHBaseRow( (LazySimpleStructObjectInspector) cachedObjectInspector, serdeParams); http://git-wip-us.apache.org/repos/asf/hive/blob/99a043a0/hbase-handler/src/test/queries/positive/hbase_
hive git commit: HIVE-12276 Fix messages in InvalidTable (Eugene Koifman, reviewed by Jason Dere)
Repository: hive Updated Branches: refs/heads/master 01580af2e -> 53fc31931 HIVE-12276 Fix messages in InvalidTable (Eugene Koifman, reviewed by Jason Dere) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/53fc3193 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/53fc3193 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/53fc3193 Branch: refs/heads/master Commit: 53fc3193194a742429170a7c5a0a809ab3c5341f Parents: 01580af Author: Eugene Koifman Authored: Wed Oct 28 16:00:46 2015 -0700 Committer: Eugene Koifman Committed: Wed Oct 28 16:00:46 2015 -0700 -- .../java/org/apache/hive/hcatalog/streaming/InvalidTable.java| 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/53fc3193/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java -- diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java index 98ef688..d61dfbb 100644 --- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java +++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java @@ -29,10 +29,10 @@ public class InvalidTable extends StreamingException { } public InvalidTable(String db, String table, String msg) { -super(msg); +super(makeMsg(db, table) + ": " + msg, null); } public InvalidTable(String db, String table, Exception inner) { -super(inner.getMessage(), inner); +super(makeMsg(db, table) + ": " + inner.getMessage(), inner); } }
hive git commit: HIVE-12276 Fix messages in InvalidTable (Eugene Koifman, reviewed by Jason Dere)
Repository: hive Updated Branches: refs/heads/branch-1 e075acd5a -> fdfd2cea6 HIVE-12276 Fix messages in InvalidTable (Eugene Koifman, reviewed by Jason Dere) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fdfd2cea Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fdfd2cea Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fdfd2cea Branch: refs/heads/branch-1 Commit: fdfd2cea6bc0e57441f515083624e4b768dc9274 Parents: e075acd Author: Eugene Koifman Authored: Wed Oct 28 15:35:33 2015 -0700 Committer: Eugene Koifman Committed: Wed Oct 28 15:35:33 2015 -0700 -- .../java/org/apache/hive/hcatalog/streaming/InvalidTable.java| 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/fdfd2cea/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java -- diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java index 98ef688..d61dfbb 100644 --- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java +++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java @@ -29,10 +29,10 @@ public class InvalidTable extends StreamingException { } public InvalidTable(String db, String table, String msg) { -super(msg); +super(makeMsg(db, table) + ": " + msg, null); } public InvalidTable(String db, String table, Exception inner) { -super(inner.getMessage(), inner); +super(makeMsg(db, table) + ": " + inner.getMessage(), inner); } }
[1/2] hive git commit: HIVE-12284: Merge master to Spark branch 10/28/2015 [Spark Branch] update some test result (Reviewed by Chao)
Repository: hive Updated Branches: refs/heads/spark c9073aadc -> fd1192914 http://git-wip-us.apache.org/repos/asf/hive/blob/fd119291/ql/src/test/results/clientpositive/spark/vector_outer_join5.q.out -- diff --git a/ql/src/test/results/clientpositive/spark/vector_outer_join5.q.out b/ql/src/test/results/clientpositive/spark/vector_outer_join5.q.out index 2c7cd5b..2b13dc6 100644 --- a/ql/src/test/results/clientpositive/spark/vector_outer_join5.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_outer_join5.q.out @@ -90,18 +90,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: st - Statistics: Num rows: 100 Data size: 372 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 100 Data size: 380 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: _col0 -Statistics: Num rows: 100 Data size: 372 Basic stats: COMPLETE Column stats: NONE +Statistics: Num rows: 100 Data size: 380 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator keys: 0 _col0 (type: tinyint) 1 _col0 (type: tinyint) +Execution mode: vectorized Local Work: Map Reduce Local Work -Execution mode: vectorized Stage: Stage-1 Spark @@ -113,11 +113,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: s - Statistics: Num rows: 6058 Data size: 2018 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6058 Data size: 2027 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: _col0 -Statistics: Num rows: 6058 Data size: 2018 Basic stats: COMPLETE Column stats: NONE +Statistics: Num rows: 6058 Data size: 2027 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -126,7 +126,7 @@ STAGE PLANS: 1 _col0 (type: tinyint) input vertices: 1 Map 3 - Statistics: Num rows: 6663 Data size: 2219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6663 Data size: 2229 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -136,10 +136,11 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) +Execution mode: vectorized Local Work: Map Reduce Local Work -Execution mode: vectorized Reducer 2 +Execution mode: vectorized Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) @@ -153,7 +154,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -Execution mode: vectorized Stage: Stage-0 Fetch Operator @@ -208,11 +208,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: sm - Statistics: Num rows: 100 Data size: 372 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 100 Data size: 380 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: _col0 -Statistics: Num rows: 100 Data size: 372 Basic stats: COMPLETE Column stats: NONE +Statistics: Num rows: 100 Data size: 380 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator filter predicates: 0 {(_col1 = 2)} @@ -220,9 +220,9 @@ STAGE PLANS: keys: 0 _col0 (type: tinyint) 1 _col0 (type: tinyint) +Execution mode: vectorized Local Work: Map Reduce Local Work -Execution mode: vectorized Stage: Stage-1 Sp
[2/2] hive git commit: HIVE-12284: Merge master to Spark branch 10/28/2015 [Spark Branch] update some test result (Reviewed by Chao)
HIVE-12284: Merge master to Spark branch 10/28/2015 [Spark Branch] update some test result (Reviewed by Chao) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fd119291 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fd119291 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fd119291 Branch: refs/heads/spark Commit: fd119291482f5fa75a97dda0bf4282b6bd73a970 Parents: c9073aa Author: Xuefu Zhang Authored: Wed Oct 28 13:53:20 2015 -0700 Committer: Xuefu Zhang Committed: Wed Oct 28 13:53:20 2015 -0700 -- .../spark/vector_inner_join.q.out | 36 ++-- .../spark/vector_outer_join0.q.out | 8 +- .../spark/vector_outer_join1.q.out | 56 +++--- .../spark/vector_outer_join2.q.out | 24 +-- .../spark/vector_outer_join3.q.out | 72 .../spark/vector_outer_join4.q.out | 56 +++--- .../spark/vector_outer_join5.q.out | 176 +-- 7 files changed, 214 insertions(+), 214 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/fd119291/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out -- diff --git a/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out b/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out index bf7090b..e63e1f1 100644 --- a/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out @@ -60,9 +60,9 @@ STAGE PLANS: keys: 0 c (type: int) 1 a (type: int) +Execution mode: vectorized Local Work: Map Reduce Local Work -Execution mode: vectorized Stage: Stage-1 Spark @@ -97,9 +97,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +Execution mode: vectorized Local Work: Map Reduce Local Work -Execution mode: vectorized Stage: Stage-0 Fetch Operator @@ -155,9 +155,9 @@ STAGE PLANS: keys: 0 _col0 (type: int) 1 _col0 (type: int) +Execution mode: vectorized Local Work: Map Reduce Local Work -Execution mode: vectorized Stage: Stage-1 Spark @@ -192,9 +192,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +Execution mode: vectorized Local Work: Map Reduce Local Work -Execution mode: vectorized Stage: Stage-0 Fetch Operator @@ -277,9 +277,9 @@ STAGE PLANS: keys: 0 c (type: int) 1 a (type: int) +Execution mode: vectorized Local Work: Map Reduce Local Work -Execution mode: vectorized Stage: Stage-1 Spark @@ -314,9 +314,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +Execution mode: vectorized Local Work: Map Reduce Local Work -Execution mode: vectorized Stage: Stage-0 Fetch Operator @@ -363,9 +363,9 @@ STAGE PLANS: keys: 0 c (type: int) 1 a (type: int) +Execution mode: vectorized Local Work: Map Reduce Local Work -Execution mode: vectorized Stage: Stage-1 Spark @@ -400,9 +400,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +Execution mode: vectorized Local Work: Map Reduce Local Work -Execution mode: vectorized Stage: Stage-0
[2/2] hive git commit: HIVE-11564 HBaseSchemaTool should be able to list objects (gates reviewed by Daniel Dai)
HIVE-11564 HBaseSchemaTool should be able to list objects (gates reviewed by Daniel Dai) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/01580af2 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/01580af2 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/01580af2 Branch: refs/heads/master Commit: 01580af2e2401d0512ec95d52c55aeb61c116039 Parents: baf32e2 Author: Alan Gates Authored: Wed Oct 28 12:38:16 2015 -0700 Committer: Alan Gates Committed: Wed Oct 28 12:38:16 2015 -0700 -- .../metastore/hbase/TestHBaseSchemaTool.java| 584 .../metastore/hbase/TestHBaseSchemaTool2.java | 61 ++ .../hive/metastore/hbase/HBaseReadWrite.java| 698 +++ .../hive/metastore/hbase/HBaseSchemaTool.java | 282 .../hadoop/hive/metastore/hbase/HBaseUtils.java | 103 ++- 5 files changed, 1431 insertions(+), 297 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/01580af2/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseSchemaTool.java -- diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseSchemaTool.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseSchemaTool.java new file mode 100644 index 000..d3b8615 --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseSchemaTool.java @@ -0,0 +1,584 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hive.metastore.hbase; + +import org.apache.commons.lang.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.metastore.api.ColumnStatistics; +import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData; +import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc; +import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Function; +import org.apache.hadoop.hive.metastore.api.FunctionType; +import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; +import org.apache.hadoop.hive.metastore.api.HiveObjectType; +import org.apache.hadoop.hive.metastore.api.LongColumnStatsData; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; +import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.StringColumnStatsData; +import org.apache.hadoop.hive.metastore.api.Table; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class TestHBaseSchemaTool extends HBaseIntegrationTests { + private static final Log LOG = LogFactory.getLog(TestHBaseSchemaTool.class.getName()); + private String lsep = System.getProperty("line.separator"); + + @BeforeClass + public static void startup() throws Exception { +HBaseIntegrationTests.startMiniCluster(); + } + + @AfterClass + public static void shutdown() throws Exception { +HBaseIntegrationTests.shutdownMiniCluster(); + } + + @Before + public void setup() throws IOException { +setupHBaseStore(); + } + + @Test + public void listTables() throws Exception { +ByteArrayOutputStream outStr = new ByteArrayOutputStream(); +PrintStream out = new PrintStream(outSt
[1/2] hive git commit: HIVE-11564 HBaseSchemaTool should be able to list objects (gates reviewed by Daniel Dai)
Repository: hive Updated Branches: refs/heads/master baf32e2e2 -> 01580af2e http://git-wip-us.apache.org/repos/asf/hive/blob/01580af2/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java -- diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java index f4f30d7..f4df2e2 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hive.metastore.hbase; import com.google.common.collect.Lists; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; - import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -74,10 +73,8 @@ import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; -import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; -import java.util.Deque; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -847,15 +844,67 @@ class HBaseUtils { } /** + * Deserialize a partition key when you know nothing about it. That is, you do not know what + * dbname, tablename it came from. + * @param key the key fetched from HBase + * @param callback A reference to the calling HBaseReadWrite object. This has to be done as a + * callback because we have to first deserialize the database name and table + * name, and then fetch the table information, and then we will know how to + * desierliaze the rest of the key. + * @return a list that includes the dbname, tablename, and partition values + * @throws IOException + */ + static List deserializePartitionKey(byte[] key, HBaseReadWrite callback) + throws IOException { +List keyParts = +desierliazeDbNameTableNameFromPartitionKey(key, callback.getConf()); +Table table = callback.getTable(keyParts.get(0), keyParts.get(1)); +keyParts.addAll(deserializePartitionKey(table.getPartitionKeys(), key, callback.getConf())); +return keyParts; + } + + /** * Deserialize a partition. This version should be used when the partition key is not already - * known (eg a scan). + * known and the database and table name are not known either (eg a full scan). Because the +* dbname and tablename (and thus the partition columns) are not known a priori this version +* has to go fetch the table after it figures out which table. If you already have the table +* object you should use +* {@link #deserializePartition(String,String,List,byte[],byte[],Configuration)} * @param key the key fetched from HBase * @param serialized the value fetched from HBase + * @param callback A reference to the calling HBaseReadWrite object. This has to be done as a + * callback because we have to first deserialize the database name and table + * name, and then fetch the table information, and then we will know how to + * desierliaze the rest of the key. * @return A struct that contains the partition plus parts of the storage descriptor */ - static StorageDescriptorParts deserializePartition(String dbName, String tableName, List partitions, - byte[] key, byte[] serialized, Configuration conf) throws InvalidProtocolBufferException { -List keys = deserializePartitionKey(partitions, key, conf); + static StorageDescriptorParts deserializePartition(byte[] key, byte[] serialized, + HBaseReadWrite callback) + throws IOException { +List dbNameTableName = +desierliazeDbNameTableNameFromPartitionKey(key, callback.getConf()); +Table table = callback.getTable(dbNameTableName.get(0), dbNameTableName.get(1)); +List keys = deserializePartitionKey(table.getPartitionKeys(), key, callback.getConf()); +return deserializePartition(dbNameTableName.get(0), dbNameTableName.get(1), keys, serialized); + } + + /** + * Deserialize a partition. This version should be used when you know the dbname and tablename + * but not the partition values. + * @param dbName database this partition is in + * @param tableName table this partition is in + * @param partitions schemas for the partition columns of this table + * @param key key fetched from HBase + * @param serialized serialized version of the partition + * @param conf configuration file + * @return + * @throws InvalidProtocolBufferException + */ + static StorageDescriptorParts deserializePartition(String dbName, String tableName, + List p
hive git commit: HIVE-12278: Skip logging lineage for explain queries (Jimmy, reviewed by Chaoyu)
Repository: hive Updated Branches: refs/heads/branch-1 40b2525eb -> e075acd5a HIVE-12278: Skip logging lineage for explain queries (Jimmy, reviewed by Chaoyu) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e075acd5 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e075acd5 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e075acd5 Branch: refs/heads/branch-1 Commit: e075acd5a5c071def350f94e2a9a73287b5f3973 Parents: 40b2525 Author: Jimmy Xiang Authored: Tue Oct 27 11:57:29 2015 -0700 Committer: Jimmy Xiang Committed: Wed Oct 28 10:59:43 2015 -0700 -- ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/e075acd5/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java index beded59..303bfdf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java @@ -134,7 +134,8 @@ public class LineageLogger implements ExecuteWithHookContext { Index index = hookContext.getIndex(); SessionState ss = SessionState.get(); if (ss != null && index != null -&& OPERATION_NAMES.contains(plan.getOperationName())) { +&& OPERATION_NAMES.contains(plan.getOperationName()) +&& !plan.isExplain()) { try { StringBuilderWriter out = new StringBuilderWriter(1024); JsonWriter writer = new JsonWriter(out);
hive git commit: HIVE-12278: Skip logging lineage for explain queries (Jimmy, reviewed by Chaoyu)
Repository: hive Updated Branches: refs/heads/master 553374447 -> baf32e2e2 HIVE-12278: Skip logging lineage for explain queries (Jimmy, reviewed by Chaoyu) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/baf32e2e Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/baf32e2e Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/baf32e2e Branch: refs/heads/master Commit: baf32e2e23552fddbc8f650b4dd820304750aced Parents: 5533744 Author: Jimmy Xiang Authored: Tue Oct 27 11:57:29 2015 -0700 Committer: Jimmy Xiang Committed: Wed Oct 28 10:55:38 2015 -0700 -- ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/baf32e2e/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java index 64220f2..1146cae 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java @@ -134,7 +134,8 @@ public class LineageLogger implements ExecuteWithHookContext { Index index = hookContext.getIndex(); SessionState ss = SessionState.get(); if (ss != null && index != null -&& OPERATION_NAMES.contains(plan.getOperationName())) { +&& OPERATION_NAMES.contains(plan.getOperationName()) +&& !plan.isExplain()) { try { StringBuilderWriter out = new StringBuilderWriter(1024); JsonWriter writer = new JsonWriter(out);
[12/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java -- diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java index d0e7ac6..91a9cb1 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java @@ -23,8 +23,8 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hive.hcatalog.templeton.JsonBuilder; @@ -35,7 +35,7 @@ import org.apache.hive.hcatalog.templeton.JsonBuilder; */ public class JobState { - private static final Log LOG = LogFactory.getLog(JobState.class); + private static final Logger LOG = LoggerFactory.getLogger(JobState.class); private String id; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobStateTracker.java -- diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobStateTracker.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobStateTracker.java index 41fd82f..9a7e093 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobStateTracker.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobStateTracker.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.curator.framework.CuratorFramework; import org.apache.hadoop.conf.Configuration; import org.apache.zookeeper.CreateMode; @@ -56,7 +56,7 @@ public class JobStateTracker { private String jobid; // The logger - private static final Log LOG = LogFactory.getLog(JobStateTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(JobStateTracker.class); /** * Constructor for a new node -- takes the jobid of an existing job http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java -- diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java index a5ff67e..41ddb9c 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java @@ -18,8 +18,8 @@ */ package org.apache.hive.hcatalog.templeton.tool; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -73,7 +73,7 @@ public class LaunchMapper extends Mapper * This class currently sends everything to stderr, but it should probably use Log4J - * it will end up in 'syslog' of this Map task. For example, look for KeepAlive heartbeat msgs. */ - private static final Log LOG = LogFactory.getLog(LaunchMapper.class); + private static final Logger LOG = LoggerFactory.getLogger(LaunchMapper.class); /** * When a Pig job is submitted and it uses HCat, WebHCat may be configured to ship hive tar * to the target node. Pig on the target node needs some env vars configured. @@ -481,7 +481,7 @@ public class LaunchMapper extends Mapper try { state.close(); } catch (IOException e) { - LOG.warn(e); + LOG.warn("Caught exception while closing job state ", e); } } } http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LogRetriever.java -- diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LogRetriever.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LogRetriever.java index
[13/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java -- diff --git a/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java b/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java index 6ffaf94..5d475f4 100644 --- a/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java +++ b/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java @@ -23,13 +23,14 @@ import com.google.common.base.Stopwatch; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; import org.apache.hadoop.util.Daemon; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; @@ -41,7 +42,7 @@ import java.util.Set; * Based on the JvmPauseMonitor from Hadoop. */ public class JvmPauseMonitor { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( JvmPauseMonitor.class); /** The target sleep time */ @@ -164,8 +165,8 @@ public class JvmPauseMonitor { return "count=" + gcCount + " time=" + gcTimeMillis + "ms"; } -private long gcCount; -private long gcTimeMillis; +private final long gcCount; +private final long gcTimeMillis; } private class Monitor implements Runnable { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/common/src/java/org/apache/hadoop/hive/common/LogUtils.java -- diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java index 3ca5c0f..3be8733 100644 --- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java @@ -21,11 +21,11 @@ package org.apache.hadoop.hive.common; import java.io.File; import java.net.URL; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.logging.log4j.core.config.Configurator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utilities common to logging operations. @@ -34,7 +34,7 @@ public class LogUtils { private static final String HIVE_L4J = "hive-log4j2.xml"; private static final String HIVE_EXEC_L4J = "hive-exec-log4j2.xml"; - private static final Log l4j = LogFactory.getLog(LogUtils.class); + private static final Logger l4j = LoggerFactory.getLogger(LogUtils.class); @SuppressWarnings("serial") public static class LogInitializationException extends Exception { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/common/src/java/org/apache/hadoop/hive/common/RunnableWithNdc.java -- diff --git a/common/src/java/org/apache/hadoop/hive/common/RunnableWithNdc.java b/common/src/java/org/apache/hadoop/hive/common/RunnableWithNdc.java deleted file mode 100644 index 35a45d1..000 --- a/common/src/java/org/apache/hadoop/hive/common/RunnableWithNdc.java +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.common; - -import java.util.Stack; - -import org.apache.log4j.NDC; - -//TODO: cloned from TEZ-2003; replace when that's in a release. -public abstract class RunnableWithNdc implements Runnable { - private final Stack ndcStack; - - public RunnableWithNdc() { -ndcStack = NDC.cloneStack(); - } - - @Override - public final void run() { -NDC.inherit(ndcStack); -try { - runInternal(); -} fina
[02/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java -- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java index 39c0571..2ba0b29 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.hive.common.ValidTxnList; @@ -60,7 +60,7 @@ import java.util.concurrent.atomic.AtomicBoolean; */ public abstract class CompactorTest { static final private String CLASS_NAME = CompactorTest.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); protected CompactionTxnHandler txnHandler; protected IMetaStoreClient ms; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java -- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java index 0db732c..bca5002 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.txn.compactor; import org.junit.Assert; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.*; @@ -37,7 +37,7 @@ import java.util.concurrent.atomic.AtomicBoolean; */ public class TestCleaner extends CompactorTest { - static final private Log LOG = LogFactory.getLog(TestCleaner.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(TestCleaner.class.getName()); public TestCleaner() throws Exception { super(); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java -- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java index 0b0b1da..e9b4154 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.txn.compactor; import org.junit.Assert; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.*; import org.apache.hadoop.hive.metastore.txn.TxnDbUtil; @@ -38,7 +38,7 @@ import java.util.concurrent.TimeUnit; */ public class TestInitiator extends CompactorTest { static final private String CLASS_NAME = TestInitiator.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); public TestInitiator() throws Exception { super(); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java -- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java index 245e839..fe1d0d3 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.*; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.*; @@ -46,7 +46,7 @@ import java.util.Map; */ public class TestWorker extends CompactorTest { static final private String CLASS_NAME = TestWorker.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); +
[09/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java index c5d8aea..7fc3226 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java @@ -21,8 +21,8 @@ import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; import java.text.NumberFormat; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -31,7 +31,7 @@ import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; * for HashTableSinkOperator. */ public class MapJoinMemoryExhaustionHandler { - private static final Log LOG = LogFactory.getLog(MapJoinMemoryExhaustionHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(MapJoinMemoryExhaustionHandler.class); public final MemoryMXBean memoryMXBean; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index bed7d63..5cbf764 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -29,11 +29,10 @@ import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Properties; -import java.util.Set; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.FileStatus; @@ -84,15 +83,12 @@ import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.Counters; -import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapred.Partitioner; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.security.UserGroupInformation; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.Appender; import org.apache.logging.log4j.core.appender.FileAppender; import org.apache.logging.log4j.core.appender.RollingFileAppender; @@ -115,7 +111,7 @@ public class ExecDriver extends Task implements Serializable, Hadoop public static MemoryMXBean memoryMXBean; protected HadoopJobExecHelper jobExecHelper; - protected static transient final Log LOG = LogFactory.getLog(ExecDriver.class); + protected static transient final Logger LOG = LoggerFactory.getLogger(ExecDriver.class); private RunningJob rj; @@ -473,7 +469,7 @@ public class ExecDriver extends Task implements Serializable, Hadoop jobID = rj.getID().toString(); } } catch (Exception e) { - LOG.warn(e); + LOG.warn("Failed while cleaning up ", e); } finally { HadoopJobExecHelper.runningJobs.remove(rj); } @@ -695,7 +691,7 @@ public class ExecDriver extends Task implements Serializable, Hadoop if (noLog) { // If started from main(), and noLog is on, we should not output // any logs. To turn the log on, please set -Dtest.silent=false - Logger logger = org.apache.logging.log4j.LogManager.getRootLogger(); + org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger(); NullAppender appender = NullAppender.createNullAppender(); appender.addToLogger(logger.getName(), Level.ERROR); appender.start(); @@ -703,7 +699,7 @@ public class ExecDriver extends Task implements Serializable, Hadoop setupChildLog4j(conf); } -Log LOG = LogFactory.getLog(ExecDriver.class.getName()); +Logger LOG = LoggerFactory.getLogger(ExecDriver.class.getName()); LogHelper console = new LogHelper(LOG, isSilent); if (planFileName == null) { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java -
[11/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java -- diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java index cf3cc78..784c631 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java @@ -22,8 +22,8 @@ import com.google.common.annotations.VisibleForTesting; import com.google.protobuf.BlockingService; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos; @@ -45,7 +45,7 @@ import org.apache.hadoop.hive.llap.daemon.LlapDaemonProtocolBlockingPB; public class LlapDaemonProtocolServerImpl extends AbstractService implements LlapDaemonProtocolBlockingPB { - private static final Log LOG = LogFactory.getLog(LlapDaemonProtocolServerImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(LlapDaemonProtocolServerImpl.class); private final int numHandlers; private final ContainerRunner containerRunner; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java -- diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java index 5c95086..3b38597 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java @@ -29,7 +29,6 @@ import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.hive.common.CallableWithNdc; import org.apache.hadoop.hive.llap.daemon.FragmentCompletionHandler; import org.apache.hadoop.hive.llap.daemon.HistoryLogger; import org.apache.hadoop.hive.llap.daemon.KilledTaskHandler; @@ -47,6 +46,7 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; +import org.apache.tez.common.CallableWithNdc; import org.apache.tez.common.TezCommonUtils; import org.apache.tez.common.security.JobTokenIdentifier; import org.apache.tez.common.security.TokenCache; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java -- diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java index 57aa1e7..621a6a6 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java @@ -31,11 +31,12 @@ import org.apache.hadoop.hive.llap.daemon.registry.ServiceRegistry; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class LlapFixedRegistryImpl implements ServiceRegistry { - private static final Logger LOG = Logger.getLogger(LlapFixedRegistryImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(LlapFixedRegistryImpl.class); @InterfaceAudience.Private // This is primarily for testing to avoid the host lookup @@ -219,4 +220,4 @@ public class LlapFixedRegistryImpl implements ServiceRegistry { public String toString() { return String.format("FixedRegistry hosts=%s", StringUtils.join(",", this.hosts)); } -} \ No newline at end of file +} http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java -- diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry
[05/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java index c4a40bf..2a415d5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java @@ -47,7 +47,6 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.SqlFunctionConverter; import org.apache.hadoop.hive.ql.parse.SemanticException; -import com.esotericsoftware.minlog.Log; import com.google.common.collect.ImmutableList; public final class HiveJoinAddNotNullRule extends RelOptRule { @@ -92,7 +91,6 @@ public final class HiveJoinAddNotNullRule extends RelOptRule { try { joinPredInfo = HiveCalciteUtil.JoinPredicateInfo.constructJoinPredicateInfo(join); } catch (CalciteSemanticException e) { - Log.trace("Failed to add is not null filter on join ", e); return; } @@ -183,7 +181,7 @@ public final class HiveJoinAddNotNullRule extends RelOptRule { } return newConditions; } - + private static Map splitCondition(RexNode condition) { Map newConditions = new HashMap(); if (condition.getKind() == SqlKind.AND) { @@ -196,10 +194,10 @@ public final class HiveJoinAddNotNullRule extends RelOptRule { } return newConditions; } - + private static RelNode createHiveFilterConjunctiveCondition(FilterFactory filterFactory, RexBuilder rexBuilder, RelNode input, Collection conditions) { final RexNode newCondition = RexUtil.composeConjunction(rexBuilder, conditions, false); return filterFactory.createFilter(input, newCondition); } -} \ No newline at end of file +} http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinToMultiJoinRule.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinToMultiJoinRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinToMultiJoinRule.java index 35dbda9..a8b16cc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinToMultiJoinRule.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinToMultiJoinRule.java @@ -35,8 +35,8 @@ import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil.JoinPredicateInfo; @@ -59,7 +59,7 @@ public class HiveJoinToMultiJoinRule extends RelOptRule { private final ProjectFactory projectFactory; - private static transient final Log LOG = LogFactory.getLog(HiveJoinToMultiJoinRule.class); + private static transient final Logger LOG = LoggerFactory.getLogger(HiveJoinToMultiJoinRule.class); //~ Constructors --- http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java index 5824127..82d9600 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java @@ -18,10 +18,8 @@ package org.apache.hadoop.hive.ql.optimizer.calcite.rules; import java.util.ArrayList; -import java.util.Collection; import java.util.EnumSet; import java.util.List; -import java.util.Map.Entry; import java.util.Set; import org.apache.calcite.plan.RelOptPredicateList; @@ -40,8 +38,8 @@ import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.sql.SqlKind; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactor
[08/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java index c5539ff..0d84340 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java @@ -26,8 +26,8 @@ import java.util.Iterator; import java.util.LinkedList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.session.SessionState; @@ -43,7 +43,7 @@ import org.apache.hadoop.security.UserGroupInformation; */ public class TezSessionPoolManager { - private static final Log LOG = LogFactory.getLog(TezSessionPoolManager.class); + private static final Logger LOG = LoggerFactory.getLogger(TezSessionPoolManager.class); private BlockingQueue defaultQueuePool; private Semaphore llapQueue; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java index 58be1dc..07f26be 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java @@ -42,8 +42,6 @@ import javax.security.auth.login.LoginException; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.FilenameUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -69,13 +67,15 @@ import org.apache.tez.serviceplugins.api.ContainerLauncherDescriptor; import org.apache.tez.serviceplugins.api.ServicePluginsDescriptor; import org.apache.tez.serviceplugins.api.TaskCommunicatorDescriptor; import org.apache.tez.serviceplugins.api.TaskSchedulerDescriptor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Holds session state related to Tez */ public class TezSessionState { - private static final Log LOG = LogFactory.getLog(TezSessionState.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TezSessionState.class.getName()); private static final String TEZ_DIR = "_tez_session_dir"; public static final String LLAP_SERVICE = "LLAP"; private static final String LLAP_SCHEDULER = "org.apache.tez.dag.app.rm.LlapTaskSchedulerService"; @@ -188,7 +188,7 @@ public class TezSessionState { this.conf = conf; this.queueName = conf.get("tez.queue.name"); this.doAsEnabled = conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS); - + final boolean llapMode = "llap".equals(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_MODE)); UserGroupInformation ugi = Utils.getUGI(); @@ -401,7 +401,7 @@ public class TezSessionState { /** * Close a tez session. Will cleanup any tez/am related resources. After closing a session no * further DAGs can be executed against it. - * + * * @param keepTmpDir * whether or not to remove the scratch dir at the same time. * @throws Exception http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValueInputMerger.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValueInputMerger.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValueInputMerger.java index c8e9606..698fa7f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValueInputMerger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValueInputMerger.java @@ -25,8 +25,8 @@ import java.util.List; import java.util.Map; import java.util.PriorityQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -47,7 +47,7 @@ import org.apache.tez.runtime.library.api.KeyValueReader; @SuppressWarnings("deprecation") public class KeyValueInputMerger extends KeyValueReader { - public static final Log l4j = Lo
[04/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java index eeccc4b..d41253f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java @@ -27,8 +27,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; @@ -74,7 +74,7 @@ import org.apache.hadoop.hive.shims.ShimLoader; public class MapReduceCompiler extends TaskCompiler { - protected final Log LOG = LogFactory.getLog(MapReduceCompiler.class); + protected final Logger LOG = LoggerFactory.getLogger(MapReduceCompiler.class); public MapReduceCompiler() { } http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java index 1739fd2..a17696a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Date; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -44,7 +44,7 @@ import org.apache.hadoop.hive.ql.parse.ImportSemanticAnalyzer; * of the user performing the drop */ public class MetaDataExportListener extends MetaStorePreEventListener { - public static final Log LOG = LogFactory.getLog(MetaDataExportListener.class); + public static final Logger LOG = LoggerFactory.getLogger(MetaDataExportListener.class); /** Configure the export listener */ public MetaDataExportListener(Configuration config) { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java index e0cd398..2370ec0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java @@ -31,8 +31,8 @@ import java.util.Stack; import org.antlr.runtime.CommonToken; import org.antlr.runtime.tree.TreeWizard; import org.antlr.runtime.tree.TreeWizard.ContextVisitor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -102,7 +102,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; public class PTFTranslator { - private static final Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.parse"); + private static final Logger LOG = LoggerFactory.getLogger("org.apache.hadoop.hive.ql.parse"); HiveConf hCfg; LeadLagInfo llInfo; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java index debd5ac..c33bb66 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java @@ -29,8 +29,8 @@ import org.antlr.runtime.TokenStream; import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.CommonTreeAdaptor; import org.antlr.runtime.tree.TreeAdaptor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.Context; /** @@ -39,7 +39,7 @@ import org.apache.hadoop.hive.ql.Context; */ public class ParseDriver { - private static final Log LOG = LogFactory.getLog("hive.ql.parse.ParseDriver"); + private static final Logger LOG
[10/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java -- diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java index c465c84..91abb80 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java @@ -26,8 +26,8 @@ import java.sql.SQLTransactionRollbackException; import java.sql.Statement; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.shims.ShimLoader; @@ -37,7 +37,7 @@ import org.apache.hadoop.hive.shims.ShimLoader; */ public final class TxnDbUtil { - static final private Log LOG = LogFactory.getLog(TxnDbUtil.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(TxnDbUtil.class.getName()); private static final String TXN_MANAGER = "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager"; private static int deadlockCnt = 0; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java -- diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java index ca485fa..5c5e6ff 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java @@ -22,8 +22,8 @@ import com.jolbox.bonecp.BoneCPDataSource; import org.apache.commons.dbcp.ConnectionFactory; import org.apache.commons.dbcp.DriverManagerConnectionFactory; import org.apache.commons.dbcp.PoolableConnectionFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.commons.dbcp.PoolingDataSource; import org.apache.commons.pool.ObjectPool; @@ -82,7 +82,7 @@ public class TxnHandler { static final private int ALLOWED_REPEATED_DEADLOCKS = 10; static final private int TIMED_OUT_TXN_ABORT_BATCH_SIZE = 100; - static final private Log LOG = LogFactory.getLog(TxnHandler.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(TxnHandler.class.getName()); static private DataSource connPool; static private boolean doRetryOnConnPool = false; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java -- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java index 00bbad7..2eb8354 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.metastore; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreInitContext; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java -- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java index 7e46523..9acf9d7 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java @@ -33,8 +33,8 @@ import java.util.Set; import org.apache.commons.lang.ClassUtils; import org.apache.commons.lang.builder.EqualsBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; @@ -42,7 +42,7 @@ import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.thrift.TException; class VerifyingObjectStore extends ObjectStore { - priva
[03/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java index 65c3b6b..ab3d3cf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java @@ -20,14 +20,14 @@ package org.apache.hadoop.hive.ql.stats; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.mapred.Reporter; public class CounterStatsPublisher implements StatsPublisher { - private static final Log LOG = LogFactory.getLog(CounterStatsPublisher.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CounterStatsPublisher.class.getName()); private Reporter reporter; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java index 053fa18..a53fcc0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.stats; import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.StatsSetupConst.StatDB; import org.apache.hadoop.hive.conf.HiveConf; @@ -38,7 +38,7 @@ import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_STATS_KEY_PREFI */ public final class StatsFactory { - static final private Log LOG = LogFactory.getLog(StatsFactory.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(StatsFactory.class.getName()); private Class publisherImplementation; private Class aggregatorImplementation; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java index cc8c9e8..e1f8ebc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java @@ -22,8 +22,8 @@ import com.google.common.base.Joiner; import com.google.common.collect.Lists; import com.google.common.math.LongMath; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -98,7 +98,7 @@ import java.util.Set; public class StatsUtils { - private static final Log LOG = LogFactory.getLog(StatsUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(StatsUtils.class.getName()); /** http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java index f5303ae..5c5fafa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java @@ -24,8 +24,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -38,7 +38,7 @@ import org.apache.hadoop.hive.ql.stats.StatsCollectionContext; import com.esotericsoftware.kryo.io.Input; public class FSStatsAggregator implements StatsAggregator { - private final Log LOG = LogFactory.getLog(this.getClass().getName()); + private final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); private List>> statsList; private Map> statsMap; private FileSystem fs; @@ -69,7 +69,7 @@ public class FSStatsAggregator implements
[01/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
Repository: hive Updated Branches: refs/heads/master 34ba81ae7 -> 553374447 http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java -- diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java index 1f3806e..56434a7 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hive.serde2.lazybinary.fast; import java.io.EOFException; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.serde2.fast.DeserializeRead; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -57,7 +57,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; * called. */ public class LazyBinaryDeserializeRead implements DeserializeRead { - public static final Log LOG = LogFactory.getLog(LazyBinaryDeserializeRead.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(LazyBinaryDeserializeRead.class.getName()); private PrimitiveTypeInfo[] primitiveTypeInfos; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java -- diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java index 253b514..ebe4181 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.sql.Date; import java.sql.Timestamp; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; @@ -46,7 +46,7 @@ import org.apache.hive.common.util.DateUtils; * This is an alternative way to serialize than what is provided by LazyBinarySerDe. */ public class LazyBinarySerializeWrite implements SerializeWrite { - public static final Log LOG = LogFactory.getLog(LazyBinarySerializeWrite.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(LazyBinarySerializeWrite.class.getName()); private Output output; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java -- diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java index 09e9108..56597a2 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java @@ -27,8 +27,8 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -89,7 +89,7 @@ import org.apache.hadoop.util.StringUtils; */ public final class ObjectInspectorUtils { - protected final static Log LOG = LogFactory.getLog(ObjectInspectorUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ObjectInspectorUtils.class.getName()); /** * This enum controls how we copy primitive objects. http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java -- diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java index 87a072c..227e8a9 100644 --- a/ser
[14/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
HIVE-12237 : Use slf4j as logging facade Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/55337444 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/55337444 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/55337444 Branch: refs/heads/master Commit: 55337444784e2b211725ac64e5ae08eb507a6467 Parents: 34ba81a Author: Ashutosh Chauhan Authored: Wed Oct 28 08:33:16 2015 -0700 Committer: Ashutosh Chauhan Committed: Wed Oct 28 08:34:06 2015 -0700 -- accumulo-handler/pom.xml| 4 - .../hadoop/hive/accumulo/LazyAccumuloRow.java | 5 +- .../org/apache/hadoop/hive/accumulo/Utils.java | 5 +- .../hive/accumulo/columns/ColumnMapper.java | 5 +- .../accumulo/columns/ColumnMappingFactory.java | 5 +- .../columns/HiveAccumuloColumnMapping.java | 5 +- .../hive/accumulo/mr/HiveAccumuloSplit.java | 5 +- .../predicate/AccumuloPredicateHandler.java | 5 +- .../predicate/PrimitiveComparisonFilter.java| 5 +- .../hive/accumulo/predicate/PushdownTuple.java | 5 +- .../predicate/compare/StringCompare.java| 3 - .../accumulo/serde/AccumuloRowSerializer.java | 5 +- .../accumulo/serde/AccumuloSerDeParameters.java | 5 +- .../serde/CompositeAccumuloRowIdFactory.java| 5 +- .../predicate/TestAccumuloPredicateHandler.java | 3 - .../serde/DelimitedAccumuloRowIdFactory.java| 5 +- .../serde/FirstCharAccumuloCompositeRowId.java | 5 +- .../hive/accumulo/serde/TestAccumuloSerDe.java | 3 - beeline/pom.xml | 5 - .../apache/hive/beeline/ClassNameCompleter.java | 6 +- .../org/apache/hive/beeline/SQLCompleter.java | 6 +- .../apache/hive/beeline/util/QFileClient.java | 8 +- .../hive/beeline/TestBeelineArgParsing.java | 6 +- .../apache/hive/beeline/cli/TestHiveCli.java| 6 +- cli/pom.xml | 5 - .../org/apache/hadoop/hive/cli/CliDriver.java | 13 +- .../hadoop/hive/cli/OptionsProcessor.java | 6 +- common/pom.xml | 10 -- .../hadoop/hive/common/CallableWithNdc.java | 44 -- .../hadoop/hive/common/CompressionUtils.java| 22 +-- .../apache/hadoop/hive/common/FileUtils.java| 8 +- .../hadoop/hive/common/JvmPauseMonitor.java | 11 +- .../org/apache/hadoop/hive/common/LogUtils.java | 6 +- .../hadoop/hive/common/RunnableWithNdc.java | 43 -- .../apache/hadoop/hive/common/ServerUtils.java | 6 +- .../common/jsonexplain/tez/TezJsonParser.java | 8 +- .../metrics/metrics2/CodahaleMetrics.java | 17 ++- .../org/apache/hadoop/hive/conf/HiveConf.java | 8 +- .../hadoop/hive/conf/SystemVariables.java | 6 +- .../hadoop/hive/conf/VariableSubstitution.java | 8 +- .../apache/hadoop/hive/ql/log/PerfLogger.java | 8 +- .../java/org/apache/hive/common/HiveCompat.java | 6 +- .../hive/common/util/FixedSizedObjectPool.java | 6 +- .../hive/common/util/HiveStringUtils.java | 2 +- .../apache/hive/common/util/HiveTestUtils.java | 8 +- .../hive/common/util/HiveVersionInfo.java | 6 +- .../hive/common/util/ShutdownHookManager.java | 6 +- .../common/util/TestFixedSizedObjectPool.java | 9 +- contrib/pom.xml | 5 - .../genericudf/example/GenericUDFDBOutput.java | 8 +- .../hive/contrib/serde2/MultiDelimitSerDe.java | 5 +- .../hadoop/hive/contrib/serde2/RegexSerDe.java | 6 +- .../hive/contrib/serde2/TypedBytesSerDe.java| 6 +- .../contrib/serde2/s3/S3LogDeserializer.java| 6 +- hbase-handler/pom.xml | 5 - .../AbstractHBaseKeyPredicateDecomposer.java| 8 +- .../hive/hbase/CompositeHBaseKeyFactory.java| 8 +- .../apache/hadoop/hive/hbase/HBaseSerDe.java| 6 +- .../hadoop/hive/hbase/HBaseSerDeHelper.java | 8 +- .../hadoop/hive/hbase/HBaseStorageHandler.java | 6 +- .../HBaseTableSnapshotInputFormatUtil.java | 6 +- .../hive/hbase/HiveHBaseTableInputFormat.java | 6 +- .../hive/hbase/HiveHBaseTableOutputFormat.java | 6 +- .../hive/hbase/HiveHFileOutputFormat.java | 6 +- .../org/apache/hive/hcatalog/cli/HCatCli.java | 8 +- .../mapreduce/HCatBaseOutputFormat.java | 2 - .../hive/hcatalog/mapreduce/SpecialCases.java | 6 +- .../listener/DbNotificationListener.java| 6 +- .../messaging/json/JSONMessageFactory.java | 6 +- .../streaming/AbstractRecordWriter.java | 6 +- .../streaming/DelimitedInputWriter.java | 6 +- .../hive/hcatalog/streaming/HiveEndPoint.java | 6 +- .../streaming/StreamingIntegrationTester.java | 6 +- .../hcatalog/api/repl/CommandTestUtils.java | 6 +- .../api/repl/commands/TestCommands.java | 6 +- .../hive/hcatalog/templeton/AppConfig.java
[06/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java index 7d7e7c0..20e1147 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.lockmgr; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData; import org.apache.hadoop.hive.ql.metadata.*; @@ -33,7 +33,7 @@ import java.util.concurrent.locks.ReentrantLock; */ public class EmbeddedLockManager implements HiveLockManager { - private static final Log LOG = LogFactory.getLog("EmbeddedHiveLockManager"); + private static final Logger LOG = LoggerFactory.getLogger("EmbeddedHiveLockManager"); private final Node root = new Node(); @@ -46,41 +46,50 @@ public class EmbeddedLockManager implements HiveLockManager { public EmbeddedLockManager() { } + @Override public void setContext(HiveLockManagerCtx ctx) throws LockException { this.ctx = ctx; refresh(); } + @Override public HiveLock lock(HiveLockObject key, HiveLockMode mode, boolean keepAlive) throws LockException { return lock(key, mode, numRetriesForLock, sleepTime); } + @Override public List lock(List objs, boolean keepAlive) throws LockException { return lock(objs, numRetriesForLock, sleepTime); } + @Override public void unlock(HiveLock hiveLock) throws LockException { unlock(hiveLock, numRetriesForUnLock, sleepTime); } + @Override public void releaseLocks(List hiveLocks) { releaseLocks(hiveLocks, numRetriesForUnLock, sleepTime); } + @Override public List getLocks(boolean verifyTablePartitions, boolean fetchData) throws LockException { return getLocks(verifyTablePartitions, fetchData, ctx.getConf()); } + @Override public List getLocks(HiveLockObject key, boolean verifyTablePartitions, boolean fetchData) throws LockException { return getLocks(key, verifyTablePartitions, fetchData, ctx.getConf()); } + @Override public void prepareRetry() { } + @Override public void refresh() { HiveConf conf = ctx.getConf(); sleepTime = conf.getTimeVar( @@ -149,6 +158,7 @@ public class EmbeddedLockManager implements HiveLockManager { private void sortLocks(List objs) { Collections.sort(objs, new Comparator() { + @Override public int compare(HiveLockObj o1, HiveLockObj o2) { int cmp = o1.getName().compareTo(o2.getName()); if (cmp == 0) { @@ -186,7 +196,7 @@ public class EmbeddedLockManager implements HiveLockManager { try { unlock(locked, numRetriesForUnLock, sleepTime); } catch (LockException e) { -LOG.info(e); +LOG.info("Failed to unlock ", e); } } } @@ -242,6 +252,7 @@ public class EmbeddedLockManager implements HiveLockManager { } } + @Override public void close() { root.lock.lock(); try { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java index fbf2a01..6482f3b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.lockmgr.zookeeper; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.retry.ExponentialBackoffRetry; @@ -31,7 +31,7 @@ import org.apache.hadoop.hive.ql.util.ZooKeeperHiveHelper; public class CuratorFrameworkSingleton { private static HiveConf conf = null; private static CuratorFramework sharedClient = null; - static final Log LOG = LogFactory.getLog("CuratorFrameworkSingleton"); + static final Logger LOG = LoggerFactory.getLogger("CuratorFrameworkSingleton"); static { // Add shutdown hook. Runtime.getRuntime().addShutdow
[07/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java index b7b6f90..a1e35cb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java @@ -28,8 +28,8 @@ import java.util.Random; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.exec.Task; @@ -51,7 +51,7 @@ public class HiveHistoryImpl implements HiveHistory{ String histFileName; // History file name - private static final Log LOG = LogFactory.getLog("hive.ql.exec.HiveHistoryImpl"); + private static final Logger LOG = LoggerFactory.getLogger("hive.ql.exec.HiveHistoryImpl"); private static final Random randGen = new Random(); @@ -355,7 +355,7 @@ public class HiveHistoryImpl implements HiveHistory{ @Override public void closeStream() { -IOUtils.cleanup(LOG, histStream); +IOUtils.closeStream(histStream); } @Override http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java index 1b357de..616f2d6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.history.HiveHistory.Keys; import org.apache.hadoop.hive.ql.history.HiveHistory.Listener; import org.apache.hadoop.hive.ql.history.HiveHistory.QueryInfo; @@ -38,7 +38,7 @@ public class HiveHistoryViewer implements Listener { String historyFile; String sessionId; - private static final Log LOG = LogFactory.getLog(HiveHistoryViewer.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveHistoryViewer.class); // Job Hash Map private final HashMap jobInfoMap = new HashMap(); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java index 2caa7ae..5610fab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java @@ -22,8 +22,8 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryPlan; @@ -46,7 +46,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; */ public class ATSHook implements ExecuteWithHookContext { - private static final Log LOG = LogFactory.getLog(ATSHook.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ATSHook.class.getName()); private static final Object LOCK = new Object(); private static ExecutorService executor; private static TimelineClient timelineClient; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java index 9988c79..64220f2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java @@ -31,8 +31,8 @@ import java.util.Set; import org.apache.commons.collections.SetUtils; import org.apache.commons.io.output.StringBuilderWriter; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.L
[1/2] hive git commit: HIVE-12268: Context leaks deleteOnExit paths (Jimmy, reviewed by Szehon)
Repository: hive Updated Branches: refs/heads/master efbb0418a -> 34ba81ae7 HIVE-12268: Context leaks deleteOnExit paths (Jimmy, reviewed by Szehon) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/34ba81ae Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/34ba81ae Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/34ba81ae Branch: refs/heads/master Commit: 34ba81ae774bc4e13e7b3f8c1454490fb8d02704 Parents: 9c18fce Author: Jimmy Xiang Authored: Mon Oct 26 16:24:48 2015 -0700 Committer: Jimmy Xiang Committed: Wed Oct 28 07:55:21 2015 -0700 -- ql/src/java/org/apache/hadoop/hive/ql/Context.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/34ba81ae/ql/src/java/org/apache/hadoop/hive/ql/Context.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java b/ql/src/java/org/apache/hadoop/hive/ql/Context.java index ca0d487..1499a91 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java @@ -347,7 +347,9 @@ public class Context { for (Map.Entry entry : fsScratchDirs.entrySet()) { try { Path p = entry.getValue(); -p.getFileSystem(conf).delete(p, true); +FileSystem fs = p.getFileSystem(conf); +fs.delete(p, true); +fs.cancelDeleteOnExit(p); } catch (Exception e) { LOG.warn("Error Removing Scratch: " + StringUtils.stringifyException(e));
[2/2] hive git commit: HIVE-12265: Generate lineage info only if requested (Jimmy, reviewed by Chao)
HIVE-12265: Generate lineage info only if requested (Jimmy, reviewed by Chao) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9c18fce4 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9c18fce4 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9c18fce4 Branch: refs/heads/master Commit: 9c18fce4bfd60288250a90797ce35d042bd91ec7 Parents: efbb041 Author: Jimmy Xiang Authored: Mon Oct 26 10:20:25 2015 -0700 Committer: Jimmy Xiang Committed: Wed Oct 28 07:55:21 2015 -0700 -- ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 9 + .../org/apache/hadoop/hive/ql/optimizer/Optimizer.java | 13 - 2 files changed, 17 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/9c18fce4/ql/src/java/org/apache/hadoop/hive/ql/Driver.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 3a3fcf1..44b247f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -298,9 +298,7 @@ public class Driver implements CommandProcessor { } public Driver() { -conf = (SessionState.get() != null) ? SessionState.get().getConf() : null; -isParallelEnabled = (conf != null) -&& HiveConf.getBoolVar(conf, ConfVars.HIVE_SERVER2_PARALLEL_COMPILATION); +this((SessionState.get() != null) ? SessionState.get().getConf() : null); } /** @@ -1085,6 +1083,10 @@ public class Driver implements CommandProcessor { * while keeping the result around. */ private void releaseResources() { +if (SessionState.get() != null) { + SessionState.get().getLineageState().clear(); +} + if (plan != null) { fetchTask = plan.getFetchTask(); if (fetchTask != null) { @@ -1713,7 +1715,6 @@ public class Driver implements CommandProcessor { if (SessionState.get() != null) { try { -SessionState.get().getLineageState().clear(); SessionState.get().getHiveHistory().logPlanProgress(plan); } catch (Exception e) { // ignore http://git-wip-us.apache.org/repos/asf/hive/blob/9c18fce4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java index 439f616..25c9618 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.optimizer; import java.util.ArrayList; import java.util.List; +import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -41,6 +42,10 @@ import org.apache.hadoop.hive.ql.ppd.PredicatePushDown; import org.apache.hadoop.hive.ql.ppd.PredicateTransitivePropagate; import org.apache.hadoop.hive.ql.ppd.SyntheticJoinPredicate; +import com.google.common.base.Splitter; +import com.google.common.base.Strings; +import com.google.common.collect.Sets; + /** * Implementation of the optimizer. */ @@ -67,7 +72,13 @@ public class Optimizer { transformations.add(new HiveOpConverterPostProc()); // Add the transformation that computes the lineage information. -transformations.add(new Generator()); +Set postExecHooks = Sets.newHashSet( + Splitter.on(",").trimResults().omitEmptyStrings().split( +Strings.nullToEmpty(HiveConf.getVar(hiveConf, HiveConf.ConfVars.POSTEXECHOOKS; +if (postExecHooks.contains("org.apache.hadoop.hive.ql.hooks.PostExecutePrinter") +|| postExecHooks.contains("org.apache.hadoop.hive.ql.hooks.LineageLogger")) { + transformations.add(new Generator()); +} // Try to transform OR predicates in Filter into simpler IN clauses first if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEPOINTLOOKUPOPTIMIZER)) {
hive git commit: HIVE-11306: Add a bloom-1 filter to reduce Hybrid MapJoin spills (Wei Zheng, reviewed by Gopal V)
Repository: hive Updated Branches: refs/heads/master 3e21a6d44 -> efbb0418a HIVE-11306: Add a bloom-1 filter to reduce Hybrid MapJoin spills (Wei Zheng, reviewed by Gopal V) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/efbb0418 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/efbb0418 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/efbb0418 Branch: refs/heads/master Commit: efbb0418ab9dcbba27390456905f42f73af6069d Parents: 3e21a6d Author: Gopal V Authored: Wed Oct 28 01:12:05 2015 -0700 Committer: Gopal V Committed: Wed Oct 28 01:12:05 2015 -0700 -- .../hadoop/hive/ql/exec/MapJoinOperator.java| 10 - .../persistence/HybridHashTableContainer.java | 47 +--- 2 files changed, 49 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/efbb0418/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java index 4be5383..b67ead7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java @@ -383,6 +383,10 @@ public class MapJoinOperator extends AbstractMapJoinOperator implem joinResult = adaptor.setFromOther(firstSetKey); } MapJoinRowContainer rowContainer = adaptor.getCurrentRows(); + if (joinResult != JoinUtil.JoinResult.MATCH) { +assert (rowContainer == null || !rowContainer.hasRows()) : +"Expecting an empty result set for no match"; + } if (rowContainer != null && unwrapContainer[pos] != null) { Object[] currentKey = firstSetKey.getCurrentKey(); rowContainer = unwrapContainer[pos].setInternal(rowContainer, currentKey); @@ -392,10 +396,12 @@ public class MapJoinOperator extends AbstractMapJoinOperator implem if (!noOuterJoin) { // For Hybrid Grace Hash Join, during the 1st round processing, // we only keep the LEFT side if the row is not spilled - if (!conf.isHybridHashJoin() || hybridMapJoinLeftover - || (!hybridMapJoinLeftover && joinResult != JoinUtil.JoinResult.SPILL)) { + if (!conf.isHybridHashJoin() || hybridMapJoinLeftover || + (joinResult != JoinUtil.JoinResult.SPILL && !bigTableRowSpilled)) { joinNeeded = true; storage[pos] = dummyObjVectors[pos]; + } else { +joinNeeded = false; } } else { storage[pos] = emptyList; http://git-wip-us.apache.org/repos/asf/hive/blob/efbb0418/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java index 52c02ae..625ba39 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java @@ -35,6 +35,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.JoinUtil; +import org.apache.hadoop.hive.ql.exec.JoinUtil.JoinResult; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer.KeyValueHelper; import org.apache.hadoop.hive.ql.exec.vector.VectorHashKeyWrapper; @@ -55,6 +56,7 @@ import org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryStruct import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Writable; +import org.apache.hive.common.util.BloomFilter; import com.esotericsoftware.kryo.Kryo; @@ -90,6 +92,18 @@ public class HybridHashTableContainer private boolean[] sortableSortOrders; private MapJoinBytesTableContainer.KeyValueHelper writeHelper; private MapJoinBytesTableContainer.DirectKeyValueWriter directWriteHelper; + /* + * this is not a real bloom filter, but is a cheap version of the 1-memory + * access bloom filters + * + * In several cases, we'll have map-join spills because the value columns are + * a few hundred columns of Text each, while there are very few keys in total + * (a few thousand). + * + * This is
[52/55] [abbrv] hive git commit: HIVE-12227 : LLAP: better column vector object pools (Sergey Shelukhin, reviewed by Gopal V)
HIVE-12227 : LLAP: better column vector object pools (Sergey Shelukhin, reviewed by Gopal V) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f2ede0e7 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f2ede0e7 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f2ede0e7 Branch: refs/heads/spark Commit: f2ede0e728c9740d7f7c37bbc342ada7df11b1aa Parents: e5b5303 Author: Sergey Shelukhin Authored: Tue Oct 27 18:26:03 2015 -0700 Committer: Sergey Shelukhin Committed: Tue Oct 27 18:26:03 2015 -0700 -- .../apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/f2ede0e7/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java -- diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java index 23c2c51..b81e97d 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java @@ -42,9 +42,8 @@ public abstract class EncodedDataConsumer downstreamConsumer; private Callable readCallable; private final LlapDaemonQueueMetrics queueMetrics; - // TODO: if we were using Exchanger, pool would not be necessary here - it would be 1/N items - private final static int CVB_POOL_SIZE = 8; // Note that the pool is per EDC - within EDC, CVBs are expected to have the same schema. + private final static int CVB_POOL_SIZE = 128; protected final FixedSizedObjectPool cvbPool; public EncodedDataConsumer(Consumer consumer, final int colCount, @@ -59,6 +58,7 @@ public abstract class EncodedDataConsumer
[37/55] [abbrv] hive git commit: HIVE-12260: Fix TestColumnStatistics and TestJsonFileDump test failures in master (Prasanth Jayachandran reviewed by Thejas Nair)
HIVE-12260: Fix TestColumnStatistics and TestJsonFileDump test failures in master (Prasanth Jayachandran reviewed by Thejas Nair) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0808741c Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0808741c Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0808741c Branch: refs/heads/spark Commit: 0808741c69d3ec6739e47bd10f89695c28d460c0 Parents: 383d1cc Author: Prasanth Jayachandran Authored: Mon Oct 26 11:45:10 2015 -0500 Committer: Prasanth Jayachandran Committed: Mon Oct 26 11:45:10 2015 -0500 -- ql/src/test/resources/orc-file-dump.json| 184 +++ ql/src/test/resources/orc-file-has-null.out | 80 +- 2 files changed, 132 insertions(+), 132 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/0808741c/ql/src/test/resources/orc-file-dump.json -- diff --git a/ql/src/test/resources/orc-file-dump.json b/ql/src/test/resources/orc-file-dump.json index 14cf962..646dfe5 100644 --- a/ql/src/test/resources/orc-file-dump.json +++ b/ql/src/test/resources/orc-file-dump.json @@ -4,7 +4,7 @@ "writerVersion": "HIVE_4243", "numberOfRows": 21000, "compression": "ZLIB", - "compressionBufferSize": 1, + "compressionBufferSize": 4096, "schemaString": "struct", "schema": [ { @@ -254,8 +254,8 @@ "stripeNumber": 1, "stripeInformation": { "offset": 3, -"indexLength": 863, -"dataLength": 63749, +"indexLength": 970, +"dataLength": 63770, "footerLength": 90, "rowCount": 5000 }, @@ -270,60 +270,60 @@ "columnId": 1, "section": "ROW_INDEX", "startOffset": 20, - "length": 165 + "length": 167 }, { "columnId": 2, "section": "ROW_INDEX", - "startOffset": 185, - "length": 174 + "startOffset": 187, + "length": 171 }, { "columnId": 3, "section": "ROW_INDEX", - "startOffset": 359, + "startOffset": 358, "length": 103 }, { "columnId": 3, "section": "BLOOM_FILTER", - "startOffset": 462, - "length": 404 + "startOffset": 461, + "length": 512 }, { "columnId": 1, "section": "DATA", - "startOffset": 866, - "length": 20029 + "startOffset": 973, + "length": 20035 }, { "columnId": 2, "section": "DATA", - "startOffset": 20895, - "length": 40035 + "startOffset": 21008, + "length": 40050 }, { "columnId": 3, "section": "PRESENT", - "startOffset": 60930, + "startOffset": 61058, "length": 17 }, { "columnId": 3, "section": "DATA", - "startOffset": 60947, + "startOffset": 61075, "length": 3510 }, { "columnId": 3, "section": "LENGTH", - "startOffset": 64457, + "startOffset": 64585, "length": 25 }, { "columnId": 3, "section": "DICTIONARY_DATA", - "startOffset": 64482, + "startOffset": 64610, "length": 133 } ], @@ -494,77 +494,77 @@ { "stripeNumber": 2, "stripeInformation": { -"offset": 64705, -"indexLength": 854, -"dataLength": 63742, -"footerLength": 90, +"offset": 64833, +"indexLength": 961, +"dataLength": 63763, +"footerLength": 88, "rowCount": 5000 }, "streams": [ { "columnId": 0, "section": "ROW_INDEX", - "startOffset": 64705, + "startOffset": 64833, "length": 17 }, { "columnId": 1, "section": "ROW_INDEX", - "startOffset": 64722, - "length": 164 + "startOffset": 64850, + "length": 166 }, { "columnId": 2, "section": "ROW_INDEX", - "startOffset": 64886, - "length": 169 + "startOffset": 65016, + "length": 166 }, { "columnId": 3, "section": "ROW_INDEX", - "startOffset": 65055, + "startOffset": 65182, "length": 100 }, { "columnId": 3, "section": "BLOOM_FILTER", - "startOffset": 65155, - "length": 404 + "startOffset": 65282, + "length"
[45/55] [abbrv] hive git commit: HIVE-12259: Command containing semicolon is broken in Beeline (Chaoyu Tang, reviewed by Ferdinand Xu)
HIVE-12259: Command containing semicolon is broken in Beeline (Chaoyu Tang, reviewed by Ferdinand Xu) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ccdd1740 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ccdd1740 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ccdd1740 Branch: refs/heads/spark Commit: ccdd1740a582ed49bcfba0d940e60438e1c7cb08 Parents: 86346fb Author: ctang Authored: Tue Oct 27 08:16:24 2015 -0400 Committer: ctang Committed: Tue Oct 27 08:16:24 2015 -0400 -- beeline/src/java/org/apache/hive/beeline/BeeLine.java| 4 ++-- .../java/org/apache/hive/beeline/TestBeeLineWithArgs.java| 8 2 files changed, 10 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/ccdd1740/beeline/src/java/org/apache/hive/beeline/BeeLine.java -- diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java b/beeline/src/java/org/apache/hive/beeline/BeeLine.java index 69e9418..4e04997 100644 --- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -1108,8 +1108,8 @@ public class BeeLine implements Closeable { } if (isBeeLine) { - if (line.startsWith(COMMAND_PREFIX) && !line.contains(";")) { -// handle the case "!cmd" for beeline + if (line.startsWith(COMMAND_PREFIX)) { +// handle SQLLine command in beeline which starts with ! and does not end with ; return execCommandWithPrefix(line); } else { return commands.sql(line, getOpts().getEntireLineAsCommand()); http://git-wip-us.apache.org/repos/asf/hive/blob/ccdd1740/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java -- diff --git a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java index 0465ef3..7cc0acf 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java @@ -752,4 +752,12 @@ public class TestBeeLineWithArgs { final String EXPECTED_PATTERN = "Stage-1 map ="; testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList); } + + @Test + public void testConnectionUrlWithSemiColon() throws Throwable{ +List argList = getBaseArgs(miniHS2.getJdbcURL("default", "sess_var_list?var1=value1")); +final String SCRIPT_TEXT = "set var1"; +final String EXPECTED_PATTERN = "var1=value1"; +testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList); + } }
[48/55] [abbrv] hive git commit: HIVE-11489 : Jenkins PreCommit-HIVE-SPARK-Build fails with TestCliDriver.initializationError (Szehon, reviewed by Sergio Pena)
HIVE-11489 : Jenkins PreCommit-HIVE-SPARK-Build fails with TestCliDriver.initializationError (Szehon, reviewed by Sergio Pena) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6df90903 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6df90903 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6df90903 Branch: refs/heads/spark Commit: 6df909035c474bd2634481f8f2f9ab626ec79b8a Parents: d5e8544 Author: Szehon Ho Authored: Tue Oct 27 14:03:24 2015 -0700 Committer: Szehon Ho Committed: Tue Oct 27 14:03:24 2015 -0700 -- testutils/ptest2/src/main/resources/batch-exec.vm | 2 ++ 1 file changed, 2 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hive/blob/6df90903/testutils/ptest2/src/main/resources/batch-exec.vm -- diff --git a/testutils/ptest2/src/main/resources/batch-exec.vm b/testutils/ptest2/src/main/resources/batch-exec.vm index da3e0ac..c155851 100644 --- a/testutils/ptest2/src/main/resources/batch-exec.vm +++ b/testutils/ptest2/src/main/resources/batch-exec.vm @@ -62,6 +62,8 @@ then testModule=./ fi pushd $testModule + #clean to force regeneration of class files (maven sometimes skips generation) + mvn clean -Dmaven.repo.local=$localDir/$instanceName/maven $mavenArgs timeout 2h mvn -B test -Dmaven.repo.local=$localDir/$instanceName/maven \ $mavenArgs $mavenTestArgs $testArguments 1>$logDir/maven-test.txt 2>&1
[42/55] [abbrv] hive git commit: HIVE-11378 Remove hadoop-1 support from master branch (gates, reviewed by Ashutosh Chauhan and Sergey Shelukhin)
http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/hive-unit/pom.xml -- diff --git a/itests/hive-unit/pom.xml b/itests/hive-unit/pom.xml index 5295840..326d646 100644 --- a/itests/hive-unit/pom.xml +++ b/itests/hive-unit/pom.xml @@ -64,7 +64,23 @@ hive-hcatalog-streaming ${project.version} - + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + + + org.apache.tez + tez-tests + ${tez.version} + test-jar + + + commons-logging + commons-logging + ${commons-logging.version} + @@ -122,6 +138,119 @@ ${mockito-all.version} test + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + tests + test + + + org.apache.hadoop + hadoop-mapreduce-client-jobclient + ${hadoop.version} + tests + test + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + test + + + org.apache.hbase + hbase-server + ${hbase.version} + test + + + org.apache.hbase + hbase-server + ${hbase.version} + test-jar + test + + + org.apache.hbase + hbase-hadoop-compat + ${hbase.version} + test-jar + test + + + org.apache.hbase + hbase-hadoop2-compat + ${hbase.version} + test-jar + test + + + org.apache.hadoop + hadoop-minicluster + test + + + com.sun.jersey + jersey-servlet + test + + + org.apache.hadoop + hadoop-archives + ${hadoop.version} + test + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + tests + test + + + org.apache.hadoop + hadoop-mapreduce-client-hs + ${hadoop.version} + test + + + org.apache.tez + tez-api + ${tez.version} + test + + + org.apache.tez + tez-runtime-library + ${tez.version} + test + + + org.apache.tez + tez-mapreduce + ${tez.version} + test + + + org.apache.tez + tez-dag + ${tez.version} + test + + + org.apache.hadoop + hadoop-yarn-server-tests + ${hadoop.version} + test + tests + + + org.apache.hadoop + hadoop-yarn-client + ${hadoop.version} + test + @@ -171,233 +300,6 @@ - - hadoop-1 - - - -org.apache.maven.plugins -maven-compiler-plugin -2.3.2 - - -**/metastore/hbase/** - - - - - - - - org.apache.hadoop - hadoop-core - ${hadoop-20S.version} - - - org.apache.hadoop - hadoop-test - ${hadoop-20S.version} - test - - - org.apache.hbase - hbase-common - ${hbase.hadoop1.version} - test - - - org.apache.hbase - hbase-common - ${hbase.hadoop1.version} - test - tests - - - org.apache.hbase - hbase-hadoop-compat - ${hbase.hadoop1.version} - test - - - org.apache.hbase - hbase-hadoop-compat - ${hbase.hadoop1.version} - test - tests - - - org.apache.hbase - hbase-hadoop1-compat - ${hbase.hadoop1.version} - test - - - org.apache.hbase - hbase-hadoop1-compat - ${hbase.hadoop1.version} - test - tests - - - org.apache.hbase - hbase-server - ${hbase.hadoop1.version} - test - - - org.apache.hbase - hbase-server - ${hbase.hadoop1.version} - tests - test - - - org.apache.hadoop - hadoop-tools - ${hadoop-20S.version} - test - - - com.sun.jersey - jersey-servlet - test - - - - - hadoop-2 - - - org.apache.hadoop - hadoop-common - ${hadoop-23.version} - - - org.apache.hadoop - hadoop-hdfs - ${hadoop-23.version} - tests - test - - - org.apache.hadoop - hadoop-mapreduce-client-jobclient - ${hadoop-23.version} - tests - test - - - org.apache.hadoop - hadoop-mapreduce-client-core - ${hadoop-23.version} - test -
[46/55] [abbrv] hive git commit: HIVE-11973: IN operator fails when the column type is DATE (Yongzhi via Jimmy)
HIVE-11973: IN operator fails when the column type is DATE (Yongzhi via Jimmy) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/601a4812 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/601a4812 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/601a4812 Branch: refs/heads/spark Commit: 601a48122992337ceca8e4042948b3a16334fa24 Parents: ccdd174 Author: Jimmy Xiang Authored: Tue Oct 27 09:09:55 2015 -0700 Committer: Jimmy Xiang Committed: Tue Oct 27 09:27:46 2015 -0700 -- .../hadoop/hive/ql/exec/FunctionRegistry.java | 6 ++ .../test/queries/clientpositive/selectindate.q | 9 +++ .../results/clientpositive/selectindate.q.out | 70 3 files changed, 85 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hive/blob/601a4812/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index d0e6122..9316600 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -784,6 +784,12 @@ public final class FunctionRegistry { return PrimitiveCategory.STRING; } +if (pgA == PrimitiveGrouping.DATE_GROUP && pgB == PrimitiveGrouping.STRING_GROUP) { + return PrimitiveCategory.STRING; +} +if (pgB == PrimitiveGrouping.DATE_GROUP && pgA == PrimitiveGrouping.STRING_GROUP) { + return PrimitiveCategory.STRING; +} Integer ai = numericTypes.get(pcA); Integer bi = numericTypes.get(pcB); if (ai == null || bi == null) { http://git-wip-us.apache.org/repos/asf/hive/blob/601a4812/ql/src/test/queries/clientpositive/selectindate.q -- diff --git a/ql/src/test/queries/clientpositive/selectindate.q b/ql/src/test/queries/clientpositive/selectindate.q new file mode 100644 index 000..00f6a5a --- /dev/null +++ b/ql/src/test/queries/clientpositive/selectindate.q @@ -0,0 +1,9 @@ +drop table if exists datetest; +create table datetest(dValue date, iValue int); +insert into datetest values('2000-03-22', 1); +insert into datetest values('2001-03-22', 2); +insert into datetest values('2002-03-22', 3); +insert into datetest values('2003-03-22', 4); +SELECT * FROM datetest WHERE dValue IN ('2000-03-22','2001-03-22'); +drop table datetest; + http://git-wip-us.apache.org/repos/asf/hive/blob/601a4812/ql/src/test/results/clientpositive/selectindate.q.out -- diff --git a/ql/src/test/results/clientpositive/selectindate.q.out b/ql/src/test/results/clientpositive/selectindate.q.out new file mode 100644 index 000..defebe3 --- /dev/null +++ b/ql/src/test/results/clientpositive/selectindate.q.out @@ -0,0 +1,70 @@ +PREHOOK: query: drop table if exists datetest +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists datetest +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table datetest(dValue date, iValue int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@datetest +POSTHOOK: query: create table datetest(dValue date, iValue int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@datetest +PREHOOK: query: insert into datetest values('2000-03-22', 1) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@datetest +POSTHOOK: query: insert into datetest values('2000-03-22', 1) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@datetest +POSTHOOK: Lineage: datetest.dvalue EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: datetest.ivalue EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +PREHOOK: query: insert into datetest values('2001-03-22', 2) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__2 +PREHOOK: Output: default@datetest +POSTHOOK: query: insert into datetest values('2001-03-22', 2) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__2 +POSTHOOK: Output: default@datetest +POSTHOOK: Lineage: datetest.dvalue EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: datetest.ivalue EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +PREHOOK: query: insert into datetest values('2002-03-22', 3)
[08/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/TCLIService.cpp -- diff --git a/service/src/gen/thrift/gen-cpp/TCLIService.cpp b/service/src/gen/thrift/gen-cpp/TCLIService.cpp index 7dae2f8..7e2c028 100644 --- a/service/src/gen/thrift/gen-cpp/TCLIService.cpp +++ b/service/src/gen/thrift/gen-cpp/TCLIService.cpp @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -15,6 +15,7 @@ TCLIService_OpenSession_args::~TCLIService_OpenSession_args() throw() { uint32_t TCLIService_OpenSession_args::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -55,7 +56,7 @@ uint32_t TCLIService_OpenSession_args::read(::apache::thrift::protocol::TProtoco uint32_t TCLIService_OpenSession_args::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("TCLIService_OpenSession_args"); xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1); @@ -64,7 +65,6 @@ uint32_t TCLIService_OpenSession_args::write(::apache::thrift::protocol::TProtoc xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -75,7 +75,7 @@ TCLIService_OpenSession_pargs::~TCLIService_OpenSession_pargs() throw() { uint32_t TCLIService_OpenSession_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("TCLIService_OpenSession_pargs"); xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1); @@ -84,7 +84,6 @@ uint32_t TCLIService_OpenSession_pargs::write(::apache::thrift::protocol::TProto xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -95,6 +94,7 @@ TCLIService_OpenSession_result::~TCLIService_OpenSession_result() throw() { uint32_t TCLIService_OpenSession_result::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -156,6 +156,7 @@ TCLIService_OpenSession_presult::~TCLIService_OpenSession_presult() throw() { uint32_t TCLIService_OpenSession_presult::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -201,6 +202,7 @@ TCLIService_CloseSession_args::~TCLIService_CloseSession_args() throw() { uint32_t TCLIService_CloseSession_args::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -241,7 +243,7 @@ uint32_t TCLIService_CloseSession_args::read(::apache::thrift::protocol::TProtoc uint32_t TCLIService_CloseSession_args::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("TCLIService_CloseSession_args"); xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1); @@ -250,7 +252,6 @@ uint32_t TCLIService_CloseSession_args::write(::apache::thrift::protocol::TProto xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -261,7 +262,7 @@ TCLIService_CloseSession_pargs::~TCLIService_CloseSession_pargs() throw() { uint32_t TCLIService_CloseSession_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("TCLIService_CloseSession_pargs"); xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1); @@ -270,7 +271,6 @@ uint32_t TCLIService_CloseSession_pargs::write(::apache::thrift::protocol::TProt xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -281,6 +281,7 @@ TCLIService_CloseSession_result::~TCLIService_CloseSession_result(
[30/55] [abbrv] hive git commit: HIVE-11540 - Too many delta files during Compaction - OOM (Eugene Koifman, reviewed by Alan Gates)
HIVE-11540 - Too many delta files during Compaction - OOM (Eugene Koifman, reviewed by Alan Gates) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e3ef96f2 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e3ef96f2 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e3ef96f2 Branch: refs/heads/spark Commit: e3ef96f2b83ffa932dd59fc3df79dff8747309ba Parents: 24ec6be Author: Eugene Koifman Authored: Sat Oct 24 18:44:05 2015 -0700 Committer: Eugene Koifman Committed: Sat Oct 24 18:44:05 2015 -0700 -- .../org/apache/hadoop/hive/conf/HiveConf.java | 2 + .../org/apache/hadoop/hive/ql/io/AcidUtils.java | 15 ++- .../hive/ql/txn/compactor/CompactorMR.java | 96 ++- .../hadoop/hive/ql/txn/compactor/Worker.java| 6 +- .../hive/ql/txn/compactor/CompactorTest.java| 4 + .../hive/ql/txn/compactor/TestWorker.java | 120 +-- 6 files changed, 201 insertions(+), 42 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/e3ef96f2/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java -- diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index f065048..dc79415 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1551,6 +1551,8 @@ public class HiveConf extends Configuration { HIVE_COMPACTOR_DELTA_PCT_THRESHOLD("hive.compactor.delta.pct.threshold", 0.1f, "Percentage (fractional) size of the delta files relative to the base that will trigger\n" + "a major compaction. (1.0 = 100%, so the default 0.1 = 10%.)"), +COMPACTOR_MAX_NUM_DELTA("hive.compactor.max.num.delta", 500, "Maximum number of delta files that " + + "the compactor will attempt to handle in a single job."), HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD("hive.compactor.abortedtxn.threshold", 1000, "Number of aborted transactions involving a given table or partition that will trigger\n" + http://git-wip-us.apache.org/repos/asf/hive/blob/e3ef96f2/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java index 30db513..e8d070c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java @@ -132,6 +132,9 @@ public class AcidUtils { return deltaSubdir(min, max) + "_" + String.format(STATEMENT_DIGITS, statementId); } + public static String baseDir(long txnId) { +return BASE_PREFIX + String.format(DELTA_DIGITS, txnId); + } /** * Create a filename for a bucket file. * @param directory the partition directory @@ -221,14 +224,16 @@ public class AcidUtils { Path getBaseDirectory(); /** - * Get the list of original files. + * Get the list of original files. Not {@code null}. * @return the list of original files (eg. 00_0) */ List getOriginalFiles(); /** * Get the list of base and delta directories that are valid and not - * obsolete. + * obsolete. Not {@code null}. List must be sorted in a specific way. + * See {@link org.apache.hadoop.hive.ql.io.AcidUtils.ParsedDelta#compareTo(org.apache.hadoop.hive.ql.io.AcidUtils.ParsedDelta)} + * for details. * @return the minimal list of current directories */ List getCurrentDirectories(); @@ -237,7 +242,7 @@ public class AcidUtils { * Get the list of obsolete directories. After filtering out bases and * deltas that are not selected by the valid transaction list, return the * list of original files, bases, and deltas that have been replaced by - * more up to date ones. + * more up to date ones. Not {@code null}. */ List getObsolete(); } @@ -284,6 +289,7 @@ public class AcidUtils { * happens in a different process; thus it's possible to have bases/deltas with * overlapping txnId boundaries. The sort order helps figure out the "best" set of files * to use to get data. + * This sorts "wider" delta before "narrower" i.e. delta_5_20 sorts before delta_5_10 (and delta_11_20) */ @Override public int compareTo(ParsedDelta parsedDelta) { @@ -499,6 +505,9 @@ public class AcidUtils { } Collections.sort(working); +//so now, 'working' should be sorted like delta_5_20 delta_5_10 delta_11_20 delta_51_60 for example +//and we want to end up with the best set containing all relevant data: delta_5_20 delta_51_60, +
[24/55] [abbrv] hive git commit: HIVE-12253 : revert HIVE-12061 (Sergey Shelukhin, reviewed by Prasanth Jayachandran)
HIVE-12253 : revert HIVE-12061 (Sergey Shelukhin, reviewed by Prasanth Jayachandran) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3e0d87f8 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3e0d87f8 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3e0d87f8 Branch: refs/heads/spark Commit: 3e0d87f8114508916c4268bf3317ba3da5523def Parents: 2653537 Author: Sergey Shelukhin Authored: Fri Oct 23 16:42:16 2015 -0700 Committer: Sergey Shelukhin Committed: Fri Oct 23 16:49:14 2015 -0700 -- metastore/if/hive_metastore.thrift |8 +- .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp | 1744 +- .../gen/thrift/gen-cpp/hive_metastore_types.cpp | 402 ++-- .../gen/thrift/gen-cpp/hive_metastore_types.h | 20 +- .../metastore/api/FileMetadataExprType.java | 42 - .../api/GetFileMetadataByExprRequest.java | 126 +- .../src/gen/thrift/gen-php/metastore/Types.php | 30 - .../gen/thrift/gen-py/hive_metastore/ttypes.py | 26 +- .../gen/thrift/gen-rb/hive_metastore_types.rb | 13 +- .../hive/metastore/FileMetadataHandler.java | 30 - .../hadoop/hive/metastore/HiveMetaStore.java| 15 +- .../hadoop/hive/metastore/ObjectStore.java |3 +- .../apache/hadoop/hive/metastore/RawStore.java |8 +- .../filemeta/OrcFileMetadataHandler.java| 63 - .../hive/metastore/hbase/HBaseReadWrite.java|2 +- .../hadoop/hive/metastore/hbase/HBaseStore.java | 41 +- .../DummyRawStoreControlledCommit.java |3 +- .../DummyRawStoreForJdoConnection.java |3 +- 18 files changed, 1095 insertions(+), 1484 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/if/hive_metastore.thrift -- diff --git a/metastore/if/hive_metastore.thrift b/metastore/if/hive_metastore.thrift index 3e30f56..751cebe 100755 --- a/metastore/if/hive_metastore.thrift +++ b/metastore/if/hive_metastore.thrift @@ -725,17 +725,11 @@ struct GetFileMetadataByExprResult { 2: required bool isSupported } -enum FileMetadataExprType { - ORC_SARG = 1 -} - - // Request type for get_file_metadata_by_expr struct GetFileMetadataByExprRequest { 1: required list fileIds, 2: required binary expr, - 3: optional bool doGetFooters, - 4: optional FileMetadataExprType type + 3: optional bool doGetFooters } // Return type for get_file_metadata
[04/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/ThriftHive.cpp -- diff --git a/service/src/gen/thrift/gen-cpp/ThriftHive.cpp b/service/src/gen/thrift/gen-cpp/ThriftHive.cpp index 8935c04..a5448f0 100644 --- a/service/src/gen/thrift/gen-cpp/ThriftHive.cpp +++ b/service/src/gen/thrift/gen-cpp/ThriftHive.cpp @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -15,6 +15,7 @@ ThriftHive_execute_args::~ThriftHive_execute_args() throw() { uint32_t ThriftHive_execute_args::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -55,7 +56,7 @@ uint32_t ThriftHive_execute_args::read(::apache::thrift::protocol::TProtocol* ip uint32_t ThriftHive_execute_args::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("ThriftHive_execute_args"); xfer += oprot->writeFieldBegin("query", ::apache::thrift::protocol::T_STRING, 1); @@ -64,7 +65,6 @@ uint32_t ThriftHive_execute_args::write(::apache::thrift::protocol::TProtocol* o xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -75,7 +75,7 @@ ThriftHive_execute_pargs::~ThriftHive_execute_pargs() throw() { uint32_t ThriftHive_execute_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("ThriftHive_execute_pargs"); xfer += oprot->writeFieldBegin("query", ::apache::thrift::protocol::T_STRING, 1); @@ -84,7 +84,6 @@ uint32_t ThriftHive_execute_pargs::write(::apache::thrift::protocol::TProtocol* xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -95,6 +94,7 @@ ThriftHive_execute_result::~ThriftHive_execute_result() throw() { uint32_t ThriftHive_execute_result::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -156,6 +156,7 @@ ThriftHive_execute_presult::~ThriftHive_execute_presult() throw() { uint32_t ThriftHive_execute_presult::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -201,6 +202,7 @@ ThriftHive_fetchOne_args::~ThriftHive_fetchOne_args() throw() { uint32_t ThriftHive_fetchOne_args::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -228,12 +230,11 @@ uint32_t ThriftHive_fetchOne_args::read(::apache::thrift::protocol::TProtocol* i uint32_t ThriftHive_fetchOne_args::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("ThriftHive_fetchOne_args"); xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -244,12 +245,11 @@ ThriftHive_fetchOne_pargs::~ThriftHive_fetchOne_pargs() throw() { uint32_t ThriftHive_fetchOne_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("ThriftHive_fetchOne_pargs"); xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -260,6 +260,7 @@ ThriftHive_fetchOne_result::~ThriftHive_fetchOne_result() throw() { uint32_t ThriftHive_fetchOne_result::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -333,6 +334,7 @@ ThriftHive_fetchOne_presult::~ThriftHive_fetchOne_presult() throw() { uint32_t ThriftHive_fetchOne_presult::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker trac
[55/55] [abbrv] hive git commit: HIVE-12284: Merge branch 'master' into spark
HIVE-12284: Merge branch 'master' into spark Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c9073aad Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c9073aad Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c9073aad Branch: refs/heads/spark Commit: c9073aadc00b01372b85522e777acaea997d5936 Parents: 51f257a 3e21a6d Author: Xuefu Zhang Authored: Wed Oct 28 05:08:53 2015 -0700 Committer: Xuefu Zhang Committed: Wed Oct 28 05:08:53 2015 -0700 -- LICENSE |34 +- NOTICE | 3 + accumulo-handler/pom.xml|61 +- .../hive/accumulo/HiveAccumuloHelper.java |55 +- .../mr/HiveAccumuloTableOutputFormat.java |50 +- .../hive/accumulo/TestHiveAccumuloHelper.java |69 +- .../mr/TestHiveAccumuloTableOutputFormat.java |86 +- .../apache/hadoop/hive/ant/GenVectorCode.java |61 +- .../apache/hadoop/hive/ant/QTestGenTask.java|13 + beeline/pom.xml |44 +- .../java/org/apache/hive/beeline/BeeLine.java | 304 +- .../org/apache/hive/beeline/BeeLineOpts.java|52 +- .../hive/beeline/ClientCommandHookFactory.java |85 + .../org/apache/hive/beeline/ClientHook.java |33 + .../java/org/apache/hive/beeline/Commands.java | 492 +- .../apache/hive/beeline/DatabaseConnection.java | 4 +- .../apache/hive/beeline/HiveSchemaHelper.java | 4 +- .../org/apache/hive/beeline/HiveSchemaTool.java |22 +- .../hive/beeline/cli/CliOptionsProcessor.java | 104 + .../org/apache/hive/beeline/cli/HiveCli.java|37 + .../hive/beeline/TestBeelineArgParsing.java | 1 + .../beeline/TestClientCommandHookFactory.java |32 + .../apache/hive/beeline/cli/TestHiveCli.java| 289 + beeline/src/test/resources/hive-site.xml|37 + bin/beeline |10 + bin/ext/cli.cmd |39 +- bin/ext/cli.sh |35 +- bin/ext/hbaseimport.cmd |35 + bin/ext/hbaseimport.sh |27 + bin/ext/hbaseschematool.sh |27 + bin/ext/llap.sh |49 + bin/ext/util/execHiveCmd.sh |21 +- bin/hive| 8 +- cli/pom.xml |63 +- .../org/apache/hadoop/hive/cli/CliDriver.java |33 +- common/pom.xml |41 +- .../hadoop/hive/common/CallableWithNdc.java |44 + .../hadoop/hive/common/CompressionUtils.java| 131 + .../apache/hadoop/hive/common/DiskRange.java|99 - .../hadoop/hive/common/DiskRangeInfo.java |59 + .../hadoop/hive/common/DiskRangeList.java | 205 - .../apache/hadoop/hive/common/FileUtils.java|38 + .../apache/hadoop/hive/common/ObjectPair.java | 5 + .../hadoop/hive/common/RunnableWithNdc.java |43 + .../hadoop/hive/common/StatsSetupConst.java |16 - .../common/metrics/common/MetricsConstant.java |22 +- .../org/apache/hadoop/hive/conf/HiveConf.java | 253 +- .../hadoop/hive/conf/HiveVariableSource.java|24 + .../hadoop/hive/conf/VariableSubstitution.java |70 + .../apache/hadoop/hive/ql/log/PerfLogger.java | 196 + .../apache/hive/common/util/BloomFilter.java|20 +- .../hive/common/util/FixedSizedObjectPool.java | 315 + .../apache/hive/common/util/ReflectionUtil.java | 2 +- .../hive/common/util/ShutdownHookManager.java |52 +- common/src/main/resources/hive-log4j.properties |88 + common/src/main/resources/hive-log4j2.xml | 4 +- .../hive/common/type/TestHiveDecimal.java |12 +- .../apache/hadoop/hive/conf/TestHiveConf.java |25 + .../hive/conf/TestVariableSubstitution.java |63 + .../common/util/TestFixedSizedObjectPool.java | 246 + .../common/util/TestShutdownHookManager.java|22 +- contrib/pom.xml |44 +- .../hive/contrib/serde2/MultiDelimitSerDe.java | 1 + data/conf/hive-log4j2.xml |27 + data/conf/hive-site.xml |45 +- data/conf/llap/hive-site.xml| 266 + data/conf/llap/llap-daemon-site.xml |57 + data/conf/llap/tez-site.xml | 6 + data/conf/spark/standalone/hive-site.xml| 6 - data/conf/spark/yarn-client/hive-site.xml | 6 - data/conf/tez/hive-site.xml |26 +- data/conf/tez/tez-site.xml | 6 + data/files/decimal_1_1.txt |30 + data/files/escape_crlf.txt | 2 +
[43/55] [abbrv] hive git commit: HIVE-11378 Remove hadoop-1 support from master branch (gates, reviewed by Ashutosh Chauhan and Sergey Shelukhin)
HIVE-11378 Remove hadoop-1 support from master branch (gates, reviewed by Ashutosh Chauhan and Sergey Shelukhin) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f9517efd Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f9517efd Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f9517efd Branch: refs/heads/spark Commit: f9517efd2dc70fd88944fa8879feea0ddc11d08b Parents: d84e393 Author: Alan Gates Authored: Mon Oct 26 14:59:01 2015 -0700 Committer: Alan Gates Committed: Mon Oct 26 14:59:01 2015 -0700 -- accumulo-handler/pom.xml| 61 +- beeline/pom.xml | 44 +- cli/pom.xml | 63 +- common/pom.xml | 41 +- contrib/pom.xml | 44 +- hbase-handler/pom.xml | 246 +++ hcatalog/core/pom.xml | 194 ++--- hcatalog/hcatalog-pig-adapter/pom.xml | 160 ++-- .../hcatalog/pig/TestHCatLoaderEncryption.java | 61 +- hcatalog/pom.xml| 65 +- hcatalog/server-extensions/pom.xml | 29 +- hcatalog/streaming/pom.xml | 42 +- hcatalog/webhcat/java-client/pom.xml| 39 +- hcatalog/webhcat/svr/pom.xml| 60 +- hplsql/pom.xml | 31 +- hwi/pom.xml | 61 +- itests/custom-serde/pom.xml | 31 +- itests/hcatalog-unit/pom.xml| 389 -- itests/hive-jmh/pom.xml | 38 +- itests/hive-minikdc/pom.xml | 181 ++--- itests/hive-unit-hadoop2/pom.xml| 12 +- .../hive/thrift/TestHadoopAuthBridge23.java | 27 +- itests/hive-unit/pom.xml| 358 - itests/pom.xml | 9 +- itests/qtest-accumulo/pom.xml | 404 -- itests/qtest-spark/pom.xml | 38 +- itests/qtest/pom.xml| 554 ++ itests/test-serde/pom.xml | 32 +- itests/util/pom.xml | 163 ++-- jdbc/pom.xml| 29 +- llap-client/pom.xml | 90 +-- llap-server/pom.xml | 146 ++-- metastore/pom.xml | 67 +- pom.xml | 228 +++--- ql/pom.xml | 154 ++-- serde/pom.xml | 90 +-- service/pom.xml | 45 +- shims/0.20S/pom.xml | 63 -- .../hadoop/hive/shims/Hadoop20SShims.java | 734 --- .../apache/hadoop/hive/shims/Jetty20SShims.java | 53 -- .../apache/hadoop/mapred/WebHCatJTShim20S.java | 123 shims/0.23/pom.xml | 26 +- shims/aggregator/pom.xml| 6 - shims/common/pom.xml| 4 +- .../apache/hadoop/hive/shims/ShimLoader.java| 17 +- .../hive/thrift/HadoopThriftAuthBridge.java | 11 +- shims/pom.xml | 1 - shims/scheduler/pom.xml | 14 +- storage-api/pom.xml | 31 +- 49 files changed, 1583 insertions(+), 3826 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/accumulo-handler/pom.xml -- diff --git a/accumulo-handler/pom.xml b/accumulo-handler/pom.xml index 4e3a087..a330e94 100644 --- a/accumulo-handler/pom.xml +++ b/accumulo-handler/pom.xml @@ -91,6 +91,24 @@ slf4j-api + org.apache.hadoop + hadoop-client + ${hadoop.version} + true + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + true + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + true + + junit junit test @@ -102,49 +120,6 @@ - - - hadoop-1 - - - org.apache.hadoop - hadoop-client - ${hadoop-20S.version} - true - - - org.apache.hadoop - hadoop-core - ${hadoop-20S.version} - true - - - - - hadoop-2 - - - org.apache.hadoop - hadoop-client - ${hadoop-23.version} - true - - - org.apache.hadoop - hadoop-common - ${hadoop-23.version} - true -
[06/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp -- diff --git a/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp b/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp index 742cfdc..b852379 100644 --- a/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp +++ b/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -280,11 +280,9 @@ void TTypeQualifierValue::__set_stringValue(const std::string& val) { __isset.stringValue = true; } -const char* TTypeQualifierValue::ascii_fingerprint = "A7801670116150C65ACA43E6F679BA79"; -const uint8_t TTypeQualifierValue::binary_fingerprint[16] = {0xA7,0x80,0x16,0x70,0x11,0x61,0x50,0xC6,0x5A,0xCA,0x43,0xE6,0xF6,0x79,0xBA,0x79}; - uint32_t TTypeQualifierValue::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -333,7 +331,7 @@ uint32_t TTypeQualifierValue::read(::apache::thrift::protocol::TProtocol* iprot) uint32_t TTypeQualifierValue::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("TTypeQualifierValue"); if (this->__isset.i32Value) { @@ -348,7 +346,6 @@ uint32_t TTypeQualifierValue::write(::apache::thrift::protocol::TProtocol* oprot } xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -370,13 +367,12 @@ TTypeQualifierValue& TTypeQualifierValue::operator=(const TTypeQualifierValue& o __isset = other1.__isset; return *this; } -std::ostream& operator<<(std::ostream& out, const TTypeQualifierValue& obj) { - using apache::thrift::to_string; +void TTypeQualifierValue::printTo(std::ostream& out) const { + using ::apache::thrift::to_string; out << "TTypeQualifierValue("; - out << "i32Value="; (obj.__isset.i32Value ? (out << to_string(obj.i32Value)) : (out << "")); - out << ", " << "stringValue="; (obj.__isset.stringValue ? (out << to_string(obj.stringValue)) : (out << "")); + out << "i32Value="; (__isset.i32Value ? (out << to_string(i32Value)) : (out << "")); + out << ", " << "stringValue="; (__isset.stringValue ? (out << to_string(stringValue)) : (out << "")); out << ")"; - return out; } @@ -388,11 +384,9 @@ void TTypeQualifiers::__set_qualifiers(const std::mapqualifiers = val; } -const char* TTypeQualifiers::ascii_fingerprint = "6C72981CFA989214285648FA8C196C47"; -const uint8_t TTypeQualifiers::binary_fingerprint[16] = {0x6C,0x72,0x98,0x1C,0xFA,0x98,0x92,0x14,0x28,0x56,0x48,0xFA,0x8C,0x19,0x6C,0x47}; - uint32_t TTypeQualifiers::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -451,7 +445,7 @@ uint32_t TTypeQualifiers::read(::apache::thrift::protocol::TProtocol* iprot) { uint32_t TTypeQualifiers::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("TTypeQualifiers"); xfer += oprot->writeFieldBegin("qualifiers", ::apache::thrift::protocol::T_MAP, 1); @@ -469,7 +463,6 @@ uint32_t TTypeQualifiers::write(::apache::thrift::protocol::TProtocol* oprot) co xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -485,12 +478,11 @@ TTypeQualifiers& TTypeQualifiers::operator=(const TTypeQualifiers& other11) { qualifiers = other11.qualifiers; return *this; } -std::ostream& operator<<(std::ostream& out, const TTypeQualifiers& obj) { - using apache::thrift::to_string; +void TTypeQualifiers::printTo(std::ostream& out) const { + using ::apache::thrift::to_string; out << "TTypeQualifiers("; - out << "qualifiers=" << to_string(obj.qualifiers); + out << "qualifiers=" << to_string(qualifiers); out << ")"; - return out; } @@ -507,11 +499,9 @@ void TPrimitiveTypeEntry::__set_typeQualifiers(const TTypeQualifiers& val) { __isset.typeQualifiers = true; } -const char* TPrimitiveTypeEntry::ascii_fingerprint = "755674F6A5C8EB47868686AE386FBC1C"; -const uint8_t TPrimitiveTypeEntry::binary_fingerprint[16] = {0x75,0x56,0x74,0xF6,0xA5,0xC8,0xEB,0x47,0x86,0x86,0x86,0xAE,0x38,0x6F,0xBC,0x1C}; - uint32_t TPrimitiveTypeEntry::read(::apache::thrift::protocol::
[49/55] [abbrv] hive git commit: HIVE-12061 : add file type support to file metadata by expr call (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp -- diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp index cb0ee7a..5fd4a90 100644 --- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp +++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp @@ -151,6 +151,14 @@ const char* _kResourceTypeNames[] = { }; const std::map _ResourceType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(3, _kResourceTypeValues, _kResourceTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL)); +int _kFileMetadataExprTypeValues[] = { + FileMetadataExprType::ORC_SARG +}; +const char* _kFileMetadataExprTypeNames[] = { + "ORC_SARG" +}; +const std::map _FileMetadataExprType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(1, _kFileMetadataExprTypeValues, _kFileMetadataExprTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL)); + Version::~Version() throw() { } @@ -14262,6 +14270,11 @@ void GetFileMetadataByExprRequest::__set_doGetFooters(const bool val) { __isset.doGetFooters = true; } +void GetFileMetadataByExprRequest::__set_type(const FileMetadataExprType::type val) { + this->type = val; +__isset.type = true; +} + uint32_t GetFileMetadataByExprRequest::read(::apache::thrift::protocol::TProtocol* iprot) { apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); @@ -14321,6 +14334,16 @@ uint32_t GetFileMetadataByExprRequest::read(::apache::thrift::protocol::TProtoco xfer += iprot->skip(ftype); } break; + case 4: +if (ftype == ::apache::thrift::protocol::T_I32) { + int32_t ecast626; + xfer += iprot->readI32(ecast626); + this->type = (FileMetadataExprType::type)ecast626; + this->__isset.type = true; +} else { + xfer += iprot->skip(ftype); +} +break; default: xfer += iprot->skip(ftype); break; @@ -14345,10 +14368,10 @@ uint32_t GetFileMetadataByExprRequest::write(::apache::thrift::protocol::TProtoc xfer += oprot->writeFieldBegin("fileIds", ::apache::thrift::protocol::T_LIST, 1); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_I64, static_cast(this->fileIds.size())); -std::vector ::const_iterator _iter626; -for (_iter626 = this->fileIds.begin(); _iter626 != this->fileIds.end(); ++_iter626) +std::vector ::const_iterator _iter627; +for (_iter627 = this->fileIds.begin(); _iter627 != this->fileIds.end(); ++_iter627) { - xfer += oprot->writeI64((*_iter626)); + xfer += oprot->writeI64((*_iter627)); } xfer += oprot->writeListEnd(); } @@ -14363,6 +14386,11 @@ uint32_t GetFileMetadataByExprRequest::write(::apache::thrift::protocol::TProtoc xfer += oprot->writeBool(this->doGetFooters); xfer += oprot->writeFieldEnd(); } + if (this->__isset.type) { +xfer += oprot->writeFieldBegin("type", ::apache::thrift::protocol::T_I32, 4); +xfer += oprot->writeI32((int32_t)this->type); +xfer += oprot->writeFieldEnd(); + } xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); return xfer; @@ -14373,20 +14401,23 @@ void swap(GetFileMetadataByExprRequest &a, GetFileMetadataByExprRequest &b) { swap(a.fileIds, b.fileIds); swap(a.expr, b.expr); swap(a.doGetFooters, b.doGetFooters); + swap(a.type, b.type); swap(a.__isset, b.__isset); } -GetFileMetadataByExprRequest::GetFileMetadataByExprRequest(const GetFileMetadataByExprRequest& other627) { - fileIds = other627.fileIds; - expr = other627.expr; - doGetFooters = other627.doGetFooters; - __isset = other627.__isset; -} -GetFileMetadataByExprRequest& GetFileMetadataByExprRequest::operator=(const GetFileMetadataByExprRequest& other628) { +GetFileMetadataByExprRequest::GetFileMetadataByExprRequest(const GetFileMetadataByExprRequest& other628) { fileIds = other628.fileIds; expr = other628.expr; doGetFooters = other628.doGetFooters; + type = other628.type; __isset = other628.__isset; +} +GetFileMetadataByExprRequest& GetFileMetadataByExprRequest::operator=(const GetFileMetadataByExprRequest& other629) { + fileIds = other629.fileIds; + expr = other629.expr; + doGetFooters = other629.doGetFooters; + type = other629.type; + __isset = other629.__isset; return *this; } void GetFileMetadataByExprRequest::printTo(std::ostream& out) const { @@ -14395,6 +14426,7 @@ void GetFileMetadataByExprRequest::printTo(std::ostream& out) const { out << "fileIds=" << to_string(fileIds); out << ", " << "expr=" << to_string(expr); out << ", " << "doGetFooters="; (__isset.doGetFooters ? (out << to_string(doGetFooters)) : (out << "")); + out << ", " << "type="; (__isset.type ? (out << to_string(type)) : (out << "")); out << ")"; } @@ -14438,17 +14470,17
[32/55] [abbrv] hive git commit: HIVE-12234: Beeline quit tries to connect again if no existing connections(Ferdinand Xu, reviewed by Szehon Ho)
HIVE-12234: Beeline quit tries to connect again if no existing connections(Ferdinand Xu, reviewed by Szehon Ho) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9ea51d12 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9ea51d12 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9ea51d12 Branch: refs/heads/spark Commit: 9ea51d122bcab987a5eca4e8244b1d121b9d8041 Parents: 48a1e1f Author: Ferdinand Xu Authored: Mon Oct 26 00:48:49 2015 -0400 Committer: Ferdinand Xu Committed: Mon Oct 26 00:48:49 2015 -0400 -- beeline/src/java/org/apache/hive/beeline/Commands.java | 6 +++--- .../src/java/org/apache/hive/beeline/DatabaseConnection.java | 4 +++- 2 files changed, 6 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/9ea51d12/beeline/src/java/org/apache/hive/beeline/Commands.java -- diff --git a/beeline/src/java/org/apache/hive/beeline/Commands.java b/beeline/src/java/org/apache/hive/beeline/Commands.java index 44dd18b..745f694 100644 --- a/beeline/src/java/org/apache/hive/beeline/Commands.java +++ b/beeline/src/java/org/apache/hive/beeline/Commands.java @@ -1226,11 +1226,11 @@ public class Commands { return false; } try { - if (beeLine.getDatabaseConnection().getConnection() != null - && !(beeLine.getDatabaseConnection().getConnection().isClosed())) { + if (beeLine.getDatabaseConnection().getCurrentConnection() != null + && !(beeLine.getDatabaseConnection().getCurrentConnection().isClosed())) { int index = beeLine.getDatabaseConnections().getIndex(); beeLine.info(beeLine.loc("closing", index, beeLine.getDatabaseConnection())); -beeLine.getDatabaseConnection().getConnection().close(); +beeLine.getDatabaseConnection().getCurrentConnection().close(); } else { beeLine.info(beeLine.loc("already-closed")); } http://git-wip-us.apache.org/repos/asf/hive/blob/9ea51d12/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java -- diff --git a/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java b/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java index e349a49..791fec6 100644 --- a/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java +++ b/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java @@ -201,7 +201,6 @@ class DatabaseConnection { return null; } - public Connection getConnection() throws SQLException { if (connection != null) { return connection; @@ -210,6 +209,9 @@ class DatabaseConnection { return connection; } + public Connection getCurrentConnection() { +return connection; + } public void reconnect() throws Exception { close();
[41/55] [abbrv] hive git commit: HIVE-11378 Remove hadoop-1 support from master branch (gates, reviewed by Ashutosh Chauhan and Sergey Shelukhin)
http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/pom.xml -- diff --git a/pom.xml b/pom.xml index a400f78..3b3303c 100644 --- a/pom.xml +++ b/pom.xml @@ -123,11 +123,9 @@ 3.1.0 14.0.1 2.4.4 -1.2.1 -2.6.0 +2.6.0 ${basedir}/${hive.path.to.root}/testutils/hadoop -0.98.9-hadoop1 -1.1.1 +1.1.1 4.4 4.4 @@ -236,7 +234,6 @@ - @@ -599,6 +596,87 @@ xercesImpl ${xerces.version} + +org.apache.hadoop +hadoop-client +${hadoop.version} + + +org.apache.hadoop +hadoop-common +${hadoop.version} + + +org.apache.httpcomponents +httpcore + + +org.apache.httpcomponents +httpclient + + + + +org.apache.hadoop +hadoop-hdfs +${hadoop.version} + + +org.apache.hadoop +hadoop-mapreduce-client-jobclient +${hadoop.version} + + +org.apache.hadoop +hadoop-mapreduce-client-core +${hadoop.version} + + +org.apache.hadoop +hadoop-minikdc +${hadoop.version} + + +org.apache.hbase +hbase-common +${hbase.version} + + +org.apache.hbase +hbase-hadoop-compat +${hbase.version} + + +org.apache.hbase +hbase-hadoop2-compat +${hbase.version} + + +org.apache.hbase +hbase-server +${hbase.version} + + +org.apache.hadoop +hadoop-minicluster +${hadoop.version} + + +org.scala-lang +scala-library +${scala.version} + + +org.apache.spark +spark-core_${scala.binary.version} +${spark.version} + + +org.apache.hadoop +hadoop-core + + + @@ -1061,146 +1139,6 @@ - - - - hadoop-1 - - - -org.apache.hadoop -hadoop-client -${hadoop-20S.version} - - -org.apache.hadoop -hadoop-core -${hadoop-20S.version} - - -org.apache.hadoop -hadoop-test -${hadoop-20S.version} - - -org.apache.hadoop -hadoop-tools -${hadoop-20S.version} - - -org.apache.hbase -hbase-common -${hbase.hadoop1.version} - - -org.apache.hbase -hbase-hadoop-compat -${hbase.hadoop1.version} - - -org.apache.hbase -hbase-hadoop1-compat -${hbase.hadoop1.version} - - -org.apache.hbase -hbase-server -${hbase.hadoop1.version} - - - - - - hadoop-2 - -llap-server - - - - -org.apache.hadoop -hadoop-client -${hadoop-23.version} - - -org.apache.hadoop -hadoop-common -${hadoop-23.version} - - -org.apache.httpcomponents -httpcore - - -org.apache.httpcomponents -httpclient - - - - -org.apache.hadoop -hadoop-hdfs -${hadoop-23.version} - - -org.apache.hadoop -hadoop-mapreduce-client-jobclient -${hadoop-23.version} - - -org.apache.hadoop -hadoop-mapreduce-client-core -${hadoop-23.version} - - -org.apache.hadoop -hadoop-minikdc -${hadoop-23.version} - - -org.apache.hbase -hbase-common -${hbase.hadoop2.version} - - -org.apache.hbase -hbase-hadoop-compat -${hbase.hadoop2.version} - - -org.apache.hbase -hbase-hadoop2-compat -${hbase.hadoop2.version} - - -org.apache.hbase -hbase-server -${hbase.hadoop2.version} - - -org.apache.hadoop -hadoop-minicluster -${hadoop-23.version} - - -org.scala-lang -scala-library -${scala.version} - - -org.apache.sp
[47/55] [abbrv] hive git commit: HIVE-12250 Zookeeper connection leaks in Hive's HBaseHandler (Naveen, reviewed by Aihua and Szehon)
HIVE-12250 Zookeeper connection leaks in Hive's HBaseHandler (Naveen, reviewed by Aihua and Szehon) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d5e8544e Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d5e8544e Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d5e8544e Branch: refs/heads/spark Commit: d5e8544e7106ba0879b176c3524e369833bd844b Parents: 601a481 Author: Szehon Ho Authored: Tue Oct 27 11:09:07 2015 -0700 Committer: Szehon Ho Committed: Tue Oct 27 11:09:07 2015 -0700 -- .../hive/hbase/HiveHBaseTableInputFormat.java | 105 ++- .../hive/hbase/HiveHBaseTableOutputFormat.java | 9 ++ 2 files changed, 64 insertions(+), 50 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/d5e8544e/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java -- diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java index 8e72759..5f4a1e4 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java @@ -107,6 +107,7 @@ public class HiveHBaseTableInputFormat extends TableInputFormatBase try { recordReader.initialize(tableSplit, tac); } catch (InterruptedException e) { + closeTable(); // Free up the HTable connections throw new IOException("Failed to initialize RecordReader", e); } @@ -445,65 +446,69 @@ public class HiveHBaseTableInputFormat extends TableInputFormatBase String hbaseColumnsMapping = jobConf.get(HBaseSerDe.HBASE_COLUMNS_MAPPING); boolean doColumnRegexMatching = jobConf.getBoolean(HBaseSerDe.HBASE_COLUMNS_REGEX_MATCHING, true); -if (hbaseColumnsMapping == null) { - throw new IOException(HBaseSerDe.HBASE_COLUMNS_MAPPING + " required for HBase Table."); -} - -ColumnMappings columnMappings = null; try { - columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping, doColumnRegexMatching); -} catch (SerDeException e) { - throw new IOException(e); -} + if (hbaseColumnsMapping == null) { +throw new IOException(HBaseSerDe.HBASE_COLUMNS_MAPPING + " required for HBase Table."); + } -int iKey = columnMappings.getKeyIndex(); -int iTimestamp = columnMappings.getTimestampIndex(); -ColumnMapping keyMapping = columnMappings.getKeyMapping(); - -// Take filter pushdown into account while calculating splits; this -// allows us to prune off regions immediately. Note that although -// the Javadoc for the superclass getSplits says that it returns one -// split per region, the implementation actually takes the scan -// definition into account and excludes regions which don't satisfy -// the start/stop row conditions (HBASE-1829). -Scan scan = createFilterScan(jobConf, iKey, iTimestamp, -HiveHBaseInputFormatUtil.getStorageFormatOfKey(keyMapping.mappingSpec, -jobConf.get(HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE, "string"))); - -// The list of families that have been added to the scan -List addedFamilies = new ArrayList(); - -// REVIEW: are we supposed to be applying the getReadColumnIDs -// same as in getRecordReader? -for (ColumnMapping colMap : columnMappings) { - if (colMap.hbaseRowKey || colMap.hbaseTimestamp) { -continue; + ColumnMappings columnMappings = null; + try { +columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping, doColumnRegexMatching); + } catch (SerDeException e) { +throw new IOException(e); } - if (colMap.qualifierName == null) { -scan.addFamily(colMap.familyNameBytes); -addedFamilies.add(colMap.familyName); - } else { -if(!addedFamilies.contains(colMap.familyName)){ - // add the column only if the family has not already been added - scan.addColumn(colMap.familyNameBytes, colMap.qualifierNameBytes); + int iKey = columnMappings.getKeyIndex(); + int iTimestamp = columnMappings.getTimestampIndex(); + ColumnMapping keyMapping = columnMappings.getKeyMapping(); + + // Take filter pushdown into account while calculating splits; this + // allows us to prune off regions immediately. Note that although + // the Javadoc for the superclass getSplits says that it returns one + // split per region, the implementation actually takes the scan + // definition into account and excludes regions which don't satisfy + // the start/stop row conditions (HBASE-1829). +
[28/55] [abbrv] hive git commit: HIVE-12239 : Constants in hive.common.metrics.common.MetricsConstant are not final (Aleksei Statkevich via Ashutosh Chauhan)
HIVE-12239 : Constants in hive.common.metrics.common.MetricsConstant are not final (Aleksei Statkevich via Ashutosh Chauhan) Signed-off-by: Ashutosh Chauhan Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a6da5d15 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a6da5d15 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a6da5d15 Branch: refs/heads/spark Commit: a6da5d15b162aa39ac7cb82f206ad47a59f0dd3e Parents: 03c62d0 Author: Aleksei Statkevich Authored: Thu Oct 22 21:18:00 2015 -0800 Committer: Ashutosh Chauhan Committed: Sat Oct 24 14:50:45 2015 -0700 -- .../common/metrics/common/MetricsConstant.java | 26 ++-- 1 file changed, 13 insertions(+), 13 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/a6da5d15/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java -- diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java b/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java index 88a3c29..a5aa995 100644 --- a/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java +++ b/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java @@ -22,20 +22,20 @@ package org.apache.hadoop.hive.common.metrics.common; */ public class MetricsConstant { - public static String JVM_PAUSE_INFO = "jvm.pause.info-threshold"; - public static String JVM_PAUSE_WARN = "jvm.pause.warn-threshold"; - public static String JVM_EXTRA_SLEEP = "jvm.pause.extraSleepTime"; + public static final String JVM_PAUSE_INFO = "jvm.pause.info-threshold"; + public static final String JVM_PAUSE_WARN = "jvm.pause.warn-threshold"; + public static final String JVM_EXTRA_SLEEP = "jvm.pause.extraSleepTime"; - public static String OPEN_CONNECTIONS = "open_connections"; - public static String OPEN_OPERATIONS = "open_operations"; + public static final String OPEN_CONNECTIONS = "open_connections"; + public static final String OPEN_OPERATIONS = "open_operations"; - public static String JDO_ACTIVE_TRANSACTIONS = "active_jdo_transactions"; - public static String JDO_ROLLBACK_TRANSACTIONS = "rollbacked_jdo_transactions"; - public static String JDO_COMMIT_TRANSACTIONS = "committed_jdo_transactions"; - public static String JDO_OPEN_TRANSACTIONS = "opened_jdo_transactions"; + public static final String JDO_ACTIVE_TRANSACTIONS = "active_jdo_transactions"; + public static final String JDO_ROLLBACK_TRANSACTIONS = "rollbacked_jdo_transactions"; + public static final String JDO_COMMIT_TRANSACTIONS = "committed_jdo_transactions"; + public static final String JDO_OPEN_TRANSACTIONS = "opened_jdo_transactions"; - public static String METASTORE_HIVE_LOCKS = "metastore_hive_locks"; - public static String ZOOKEEPER_HIVE_SHAREDLOCKS = "zookeeper_hive_sharedlocks"; - public static String ZOOKEEPER_HIVE_EXCLUSIVELOCKS = "zookeeper_hive_exclusivelocks"; - public static String ZOOKEEPER_HIVE_SEMISHAREDLOCKS = "zookeeper_hive_semisharedlocks"; + public static final String METASTORE_HIVE_LOCKS = "metastore_hive_locks"; + public static final String ZOOKEEPER_HIVE_SHAREDLOCKS = "zookeeper_hive_sharedlocks"; + public static final String ZOOKEEPER_HIVE_EXCLUSIVELOCKS = "zookeeper_hive_exclusivelocks"; + public static final String ZOOKEEPER_HIVE_SEMISHAREDLOCKS = "zookeeper_hive_semisharedlocks"; }
[10/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp -- diff --git a/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp b/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp index a1fb60b..e92d776 100644 --- a/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp +++ b/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -150,11 +150,9 @@ void Adjacency::__set_adjacencyType(const AdjacencyType::type val) { this->adjacencyType = val; } -const char* Adjacency::ascii_fingerprint = "BC4F8C394677A1003AA9F56ED26D8204"; -const uint8_t Adjacency::binary_fingerprint[16] = {0xBC,0x4F,0x8C,0x39,0x46,0x77,0xA1,0x00,0x3A,0xA9,0xF5,0x6E,0xD2,0x6D,0x82,0x04}; - uint32_t Adjacency::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -225,7 +223,7 @@ uint32_t Adjacency::read(::apache::thrift::protocol::TProtocol* iprot) { uint32_t Adjacency::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("Adjacency"); xfer += oprot->writeFieldBegin("node", ::apache::thrift::protocol::T_STRING, 1); @@ -250,7 +248,6 @@ uint32_t Adjacency::write(::apache::thrift::protocol::TProtocol* oprot) const { xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -275,14 +272,13 @@ Adjacency& Adjacency::operator=(const Adjacency& other8) { __isset = other8.__isset; return *this; } -std::ostream& operator<<(std::ostream& out, const Adjacency& obj) { - using apache::thrift::to_string; +void Adjacency::printTo(std::ostream& out) const { + using ::apache::thrift::to_string; out << "Adjacency("; - out << "node=" << to_string(obj.node); - out << ", " << "children=" << to_string(obj.children); - out << ", " << "adjacencyType=" << to_string(obj.adjacencyType); + out << "node=" << to_string(node); + out << ", " << "children=" << to_string(children); + out << ", " << "adjacencyType=" << to_string(adjacencyType); out << ")"; - return out; } @@ -302,11 +298,9 @@ void Graph::__set_adjacencyList(const std::vector & val) { this->adjacencyList = val; } -const char* Graph::ascii_fingerprint = "1F7FB604B3EF8F7AFB5DEAD15F2FC0B5"; -const uint8_t Graph::binary_fingerprint[16] = {0x1F,0x7F,0xB6,0x04,0xB3,0xEF,0x8F,0x7A,0xFB,0x5D,0xEA,0xD1,0x5F,0x2F,0xC0,0xB5}; - uint32_t Graph::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -389,7 +383,7 @@ uint32_t Graph::read(::apache::thrift::protocol::TProtocol* iprot) { uint32_t Graph::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("Graph"); xfer += oprot->writeFieldBegin("nodeType", ::apache::thrift::protocol::T_I32, 1); @@ -422,7 +416,6 @@ uint32_t Graph::write(::apache::thrift::protocol::TProtocol* oprot) const { xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -447,14 +440,13 @@ Graph& Graph::operator=(const Graph& other23) { __isset = other23.__isset; return *this; } -std::ostream& operator<<(std::ostream& out, const Graph& obj) { - using apache::thrift::to_string; +void Graph::printTo(std::ostream& out) const { + using ::apache::thrift::to_string; out << "Graph("; - out << "nodeType=" << to_string(obj.nodeType); - out << ", " << "roots=" << to_string(obj.roots); - out << ", " << "adjacencyList=" << to_string(obj.adjacencyList); + out << "nodeType=" << to_string(nodeType); + out << ", " << "roots=" << to_string(roots); + out << ", " << "adjacencyList=" << to_string(adjacencyList); out << ")"; - return out; } @@ -486,11 +478,9 @@ void Operator::__set_started(const bool val) { this->started = val; } -const char* Operator::ascii_fingerprint = "30917C758A752485AF223B697479DE6C"; -const uint8_t Operator::binary_fingerprint[16] = {0x30,0x91,0x7C,0x75,0x8A,0x75,0x24,0x85,0xAF,0x22,0x3B,0x69,0x74,0x79,0xDE,0x6C}; - uint32_t Operator::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::th
[26/55] [abbrv] hive git commit: HIVE-12039 : Fix TestSSL#testSSLVersion (Vaibhav Gumashta via Ashutosh Chauhan)
HIVE-12039 : Fix TestSSL#testSSLVersion (Vaibhav Gumashta via Ashutosh Chauhan) Signed-off-by: Ashutosh Chauhan Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e8f71f4d Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e8f71f4d Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e8f71f4d Branch: refs/heads/spark Commit: e8f71f4d2bfcf6534fbbf3413a8b3c80698fcc46 Parents: ee2d318 Author: Vaibhav Gumashta Authored: Thu Oct 8 12:43:00 2015 -0800 Committer: Ashutosh Chauhan Committed: Sat Oct 24 14:41:06 2015 -0700 -- .../test/java/org/apache/hive/jdbc/TestSSL.java | 44 +--- 1 file changed, 20 insertions(+), 24 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/e8f71f4d/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java -- diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java index 28a3777..b66ffda 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java @@ -55,15 +55,15 @@ public class TestSSL { private static final String HS2_HTTP_MODE = "http"; private static final String HS2_HTTP_ENDPOINT = "cliservice"; private static final String HS2_BINARY_AUTH_MODE = "NONE"; - private static final String HS2_HTTP_AUTH_MODE = "NOSASL"; private MiniHS2 miniHS2 = null; private static HiveConf conf = new HiveConf(); private Connection hs2Conn = null; private String dataFileDir = conf.get("test.data.files"); private Map confOverlay; - private final String SSL_CONN_PARAMS = ";ssl=true;sslTrustStore=" + URLEncoder.encode(dataFileDir + File.separator + - TRUST_STORE_NAME) + ";trustStorePassword=" + KEY_STORE_PASSWORD; + private final String SSL_CONN_PARAMS = ";ssl=true;sslTrustStore=" + + URLEncoder.encode(dataFileDir + File.separator + TRUST_STORE_NAME) + ";trustStorePassword=" + + KEY_STORE_PASSWORD; @BeforeClass public static void beforeTest() throws Exception { @@ -111,9 +111,10 @@ public class TestSSL { */ @Test public void testSSLVersion() throws Exception { -Assume.assumeTrue(execCommand("which openssl") == 0); // we need openssl -Assume.assumeTrue(System.getProperty("os.name").toLowerCase() - .contains("linux")); // we depend on linux openssl exit codes +// we need openssl +Assume.assumeTrue(execCommand("which openssl") == 0); +// we depend on linux openssl exit codes + Assume.assumeTrue(System.getProperty("os.name").toLowerCase().contains("linux")); setSslConfOverlay(confOverlay); // Test in binary mode @@ -122,16 +123,15 @@ public class TestSSL { miniHS2.start(confOverlay); // make SSL connection -hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" + -dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" + -KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar"); +hs2Conn = +DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" + dataFileDir ++ File.separator + TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD, +System.getProperty("user.name"), "bar"); hs2Conn.close(); -Assert.assertEquals("Expected exit code of 1", 1, - execCommand("openssl s_client -connect " + miniHS2.getHost() + ":" + miniHS2.getBinaryPort() - + " -ssl2 < /dev/null")); -Assert.assertEquals("Expected exit code of 1", 1, - execCommand("openssl s_client -connect " + miniHS2.getHost() + ":" + miniHS2.getBinaryPort() - + " -ssl3 < /dev/null")); +Assert.assertEquals("Expected exit code of 1", 1, execCommand("openssl s_client -connect " ++ miniHS2.getHost() + ":" + miniHS2.getBinaryPort() + " -ssl2 < /dev/null")); +Assert.assertEquals("Expected exit code of 1", 1, execCommand("openssl s_client -connect " ++ miniHS2.getHost() + ":" + miniHS2.getBinaryPort() + " -ssl3 < /dev/null")); miniHS2.stop(); // Test in http mode @@ -139,12 +139,10 @@ public class TestSSL { miniHS2.start(confOverlay); // make SSL connection try { - hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + - ";ssl=true;sslTrustStore=" + dataFileDir + File.separator + - TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD + - "?hive.server2.transport.mode=" + HS2_HTTP_MODE + - ";hive.server2.thrift.http.path=" + HS2_HTTP_ENDPOINT, - System.getProperty("user.name"), "bar"); + hs2Conn = + DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl
[29/55] [abbrv] hive git commit: HIVE-11688 : OrcRawRecordMerger does not close primary reader if not fully consumed (Sudheesh Katkam via Prasanth J)
HIVE-11688 : OrcRawRecordMerger does not close primary reader if not fully consumed (Sudheesh Katkam via Prasanth J) Signed-off-by: Ashutosh Chauhan Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/24ec6bed Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/24ec6bed Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/24ec6bed Branch: refs/heads/spark Commit: 24ec6bedaaf683ab7dd4fc28801f3283b5a1744f Parents: a6da5d1 Author: Sudheesh Katkam Authored: Fri Aug 28 15:48:00 2015 -0800 Committer: Ashutosh Chauhan Committed: Sat Oct 24 18:10:45 2015 -0700 -- .../java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java | 3 +++ 1 file changed, 3 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hive/blob/24ec6bed/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java index 58b85ef..fb5110d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java @@ -660,6 +660,9 @@ public class OrcRawRecordMerger implements AcidInputFormat.RawReader{ @Override public void close() throws IOException { +if (primary != null) { + primary.recordReader.close(); +} for(ReaderPair pair: readers.values()) { pair.recordReader.close(); }
[54/55] [abbrv] hive git commit: HIVE-12284: Merge branch 'master' into spark
http://git-wip-us.apache.org/repos/asf/hive/blob/c9073aad/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java -- http://git-wip-us.apache.org/repos/asf/hive/blob/c9073aad/itests/src/test/resources/testconfiguration.properties -- diff --cc itests/src/test/resources/testconfiguration.properties index 72dbcec,13efc58..e927955 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@@ -1167,20 -1227,7 +1227,18 @@@ miniSparkOnYarn.query.files=auto_sortme load_fs2.q,\ load_hdfs_file_with_space_in_the_name.q,\ optrstat_groupby.q,\ + orc_merge1.q,\ + orc_merge2.q,\ + orc_merge3.q,\ + orc_merge4.q,\ + orc_merge5.q,\ + orc_merge6.q,\ + orc_merge7.q,\ + orc_merge8.q,\ + orc_merge9.q,\ + orc_merge_incompat1.q,\ + orc_merge_incompat2.q,\ parallel_orderby.q,\ - ql_rewrite_gbtoidx.q,\ - ql_rewrite_gbtoidx_cbo_1.q,\ quotedid_smb.q,\ reduce_deduplicate.q,\ remote_script.q,\ @@@ -1194,15 -1240,14 +1251,21 @@@ stats_counter_partitioned.q,\ temp_table_external.q,\ truncate_column_buckets.q,\ - uber_reduce.q + uber_reduce.q,\ + vector_inner_join.q,\ + vector_outer_join0.q,\ + vector_outer_join1.q,\ + vector_outer_join2.q,\ + vector_outer_join3.q,\ + vector_outer_join4.q,\ + vector_outer_join5.q + # These tests are removed from miniSparkOnYarn.query.files + # ql_rewrite_gbtoidx.q,\ + # ql_rewrite_gbtoidx_cbo_1.q,\ + # smb_mapjoin_8.q,\ + + spark.query.negative.files=groupby2_map_skew_multi_distinct.q,\ groupby2_multi_distinct.q,\ groupby3_map_skew_multi_distinct.q,\ http://git-wip-us.apache.org/repos/asf/hive/blob/c9073aad/pom.xml -- diff --cc pom.xml index 0cd4238,3b3303c..0d9c9a3 --- a/pom.xml +++ b/pom.xml @@@ -159,9 -158,9 +158,9 @@@ 1.0.1 1.7.5 4.0.4 - 0.5.2 + 0.8.1-alpha 2.2.0 -1.4.0 +1.5.0 2.10 2.10.4 1.1 @@@ -222,9 -222,18 +222,8 @@@ false - - spark-1.3 - https://s3-us-west-1.amazonaws.com/hive-spark/maven2/spark_2.10-1.3-rc1/ - - true - - - false - - - http://git-wip-us.apache.org/repos/asf/hive/blob/c9073aad/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java -- diff --cc ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java index d2c5245,085ad9e..2ab9c2d --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java @@@ -24,11 -23,8 +24,9 @@@ import java.util.List import java.util.Map; import java.util.Set; - import com.google.common.base.Preconditions; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.io.merge.MergeFileMapper; http://git-wip-us.apache.org/repos/asf/hive/blob/c9073aad/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java -- http://git-wip-us.apache.org/repos/asf/hive/blob/c9073aad/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java --
[34/55] [abbrv] hive git commit: HIVE-12261 : schematool version info exit status should depend on compatibility, not equality (Thejas Nair, reviewed by Sushanth Sowmyan)
HIVE-12261 : schematool version info exit status should depend on compatibility, not equality (Thejas Nair, reviewed by Sushanth Sowmyan) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1f9556d8 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1f9556d8 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1f9556d8 Branch: refs/heads/spark Commit: 1f9556d843ae5bfaf0c701f4f7428dfabc127e7c Parents: f415ce9 Author: Thejas Nair Authored: Sun Oct 25 23:47:57 2015 -0700 Committer: Thejas Nair Committed: Sun Oct 25 23:47:57 2015 -0700 -- .../org/apache/hive/beeline/HiveSchemaTool.java | 12 +++--- .../hive/metastore/MetaStoreSchemaInfo.java | 44 2 files changed, 50 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/1f9556d8/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java -- diff --git a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java index 9e72a3a..6f0ba07 100644 --- a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java +++ b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java @@ -129,7 +129,7 @@ public class HiveSchemaTool { String dbVersion = getMetaStoreSchemaVersion(metastoreConn); System.out.println("Hive distribution version:\t " + hiveVersion); System.out.println("Metastore schema version:\t " + dbVersion); -assertSameVersion(hiveVersion, dbVersion); +assertCompatibleVersion(hiveVersion, dbVersion); } @@ -185,15 +185,15 @@ public class HiveSchemaTool { String newSchemaVersion = getMetaStoreSchemaVersion( getConnectionToMetastore(false)); // verify that the new version is added to schema -assertSameVersion(MetaStoreSchemaInfo.getHiveSchemaVersion(), newSchemaVersion); +assertCompatibleVersion(MetaStoreSchemaInfo.getHiveSchemaVersion(), newSchemaVersion); } - private void assertSameVersion(String hiveSchemaVersion, String dbSchemaVersion) + private void assertCompatibleVersion(String hiveSchemaVersion, String dbSchemaVersion) throws HiveMetaException { -if (!hiveSchemaVersion.equalsIgnoreCase(dbSchemaVersion)) { - throw new HiveMetaException("Expected schema version " + hiveSchemaVersion - + ", found version " + dbSchemaVersion); +if (!MetaStoreSchemaInfo.isVersionCompatible(hiveSchemaVersion, dbSchemaVersion)) { + throw new HiveMetaException("Metastore schema version is not compatible. Hive Version: " + + hiveSchemaVersion + ", Database Schema Version: " + dbSchemaVersion); } } http://git-wip-us.apache.org/repos/asf/hive/blob/1f9556d8/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java -- diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java index d72267d..98798e8 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java @@ -151,6 +151,10 @@ public class MetaStoreSchemaInfo { public static String getHiveSchemaVersion() { String hiveVersion = HiveVersionInfo.getShortVersion(); +return getEquivalentVersion(hiveVersion); + } + + private static String getEquivalentVersion(String hiveVersion) { // if there is an equivalent version, return that, else return this version String equivalentVersion = EQUIVALENT_VERSIONS.get(hiveVersion); if (equivalentVersion != null) { @@ -160,4 +164,44 @@ public class MetaStoreSchemaInfo { } } + /** + * A dbVersion is compatible with hive version if it is greater or equal to + * the hive version. This is result of the db schema upgrade design principles + * followed in hive project. + * + * @param hiveVersion + * version of hive software + * @param dbVersion + * version of metastore rdbms schema + * @return true if versions are compatible + */ + public static boolean isVersionCompatible(String hiveVersion, String dbVersion) { +hiveVersion = getEquivalentVersion(hiveVersion); +dbVersion = getEquivalentVersion(dbVersion); +if (hiveVersion.equals(dbVersion)) { + return true; +} +String[] hiveVerParts = hiveVersion.split("\\."); +String[] dbVerParts = dbVersion.split("\\."); +if (hiveVerParts.length != 3 || dbVerParts.length != 3) { + // these are non standard version numbers. can't perform the + // comparison on these, so assume that they are incompatible + return
[39/55] [abbrv] hive git commit: HIVE-9013 : Hive set command exposes metastore db password (Binglin Chang, reviewed by Thejas Nair, Sushanth Sowmyan)
HIVE-9013 : Hive set command exposes metastore db password (Binglin Chang, reviewed by Thejas Nair, Sushanth Sowmyan) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a91e1471 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a91e1471 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a91e1471 Branch: refs/heads/spark Commit: a91e1471cfdd8a8da9bfafc1cb60efd16afbe847 Parents: 8f96446 Author: Sushanth Sowmyan Authored: Mon Oct 26 11:43:02 2015 -0700 Committer: Sushanth Sowmyan Committed: Mon Oct 26 11:43:54 2015 -0700 -- .../org/apache/hadoop/hive/conf/HiveConf.java | 33 .../apache/hadoop/hive/conf/TestHiveConf.java | 25 + .../apache/hadoop/hive/ql/exec/Utilities.java | 13 - .../hadoop/hive/ql/exec/mr/ExecDriver.java | 2 +- .../hadoop/hive/ql/exec/tez/DagUtils.java | 2 +- .../hive/ql/exec/tez/TezSessionState.java | 2 +- .../hadoop/hive/ql/processors/SetProcessor.java | 12 - .../hive/ql/processors/TestSetProcessor.java| 54 8 files changed, 125 insertions(+), 18 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/a91e1471/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java -- diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index dc79415..643326a 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -45,11 +45,13 @@ import java.io.PrintStream; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -75,6 +77,7 @@ public class HiveConf extends Configuration { private static final Map vars = new HashMap(); private static final Map metaConfs = new HashMap(); private final List restrictList = new ArrayList(); + private final Set hiddenSet = new HashSet(); private Pattern modWhiteListPattern = null; private volatile boolean isSparkConfigUpdated = false; @@ -2095,6 +2098,9 @@ public class HiveConf extends Configuration { HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list", "hive.security.authenticator.manager,hive.security.authorization.manager,hive.users.in.admin.role", "Comma separated list of configuration options which are immutable at runtime"), +HIVE_CONF_HIDDEN_LIST("hive.conf.hidden.list", +METASTOREPWD.varname + "," + HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname, +"Comma separated list of configuration options which should not be read by normal user like passwords"), // If this is set all move tasks at the end of a multi-insert query will only begin once all // outputs are ready @@ -2630,6 +2636,10 @@ public class HiveConf extends Configuration { } } + public boolean isHiddenConfig(String name) { +return hiddenSet.contains(name); + } + /** * check whether spark related property is updated, which includes spark configurations, * RSC configurations and yarn configuration in Spark on YARN mode. @@ -2979,6 +2989,7 @@ public class HiveConf extends Configuration { // setup list of conf vars that are not allowed to change runtime setupRestrictList(); +setupHiddenSet(); } @@ -3298,6 +3309,28 @@ public class HiveConf extends Configuration { } restrictList.add(ConfVars.HIVE_IN_TEST.varname); restrictList.add(ConfVars.HIVE_CONF_RESTRICTED_LIST.varname); +restrictList.add(ConfVars.HIVE_CONF_HIDDEN_LIST.varname); + } + + private void setupHiddenSet() { +String hiddenListStr = this.getVar(ConfVars.HIVE_CONF_HIDDEN_LIST); +hiddenSet.clear(); +if (hiddenListStr != null) { + for (String entry : hiddenListStr.split(",")) { +hiddenSet.add(entry.trim()); + } +} + } + + /** + * Strips hidden config entries from configuration + */ + public void stripHiddenConfigurations(Configuration conf) { +for (String name : hiddenSet) { + if (conf.get(name) != null) { +conf.set(name, ""); + } +} } public static boolean isLoadMetastoreConfig() { http://git-wip-us.apache.org/repos/asf/hive/blob/a91e1471/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java -- diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java b/common/src/test/org/apache/hadoop/hive/conf/TestHi
[36/55] [abbrv] hive git commit: HIVE-12246: Orc FileDump fails with Missing CLI jar (Prasanth Jayachandran reviewed by Ferdinand Xu)
HIVE-12246: Orc FileDump fails with Missing CLI jar (Prasanth Jayachandran reviewed by Ferdinand Xu) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/383d1ccc Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/383d1ccc Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/383d1ccc Branch: refs/heads/spark Commit: 383d1ccc8343168189e8aca97b7e247bdb000cd3 Parents: 2a0ea58 Author: Prasanth Jayachandran Authored: Mon Oct 26 11:37:27 2015 -0500 Committer: Prasanth Jayachandran Committed: Mon Oct 26 11:37:27 2015 -0500 -- bin/ext/util/execHiveCmd.sh | 19 --- 1 file changed, 16 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/383d1ccc/bin/ext/util/execHiveCmd.sh -- diff --git a/bin/ext/util/execHiveCmd.sh b/bin/ext/util/execHiveCmd.sh index e46ec3c..9a06ce0 100644 --- a/bin/ext/util/execHiveCmd.sh +++ b/bin/ext/util/execHiveCmd.sh @@ -13,15 +13,28 @@ # See the License for the specific language governing permissions and # limitations under the License. +CLI_JAR="hive-cli-*.jar" +BEELINE_JAR="hive-beeline-*.jar" + execHiveCmd () { CLASS=$1; shift; - JAR=$1 - shift; + + # if jar is not passed as parameter use corresponding cli jar + if [ "$1" == "$CLI_JAR" ] || [ "$1" == "$BEELINE_JAR" ]; then +JAR="$1" +shift; + else +if [ "$USE_DEPRECATED_CLI" == "true" ]; then + JAR="$CLI_JAR" +else + JAR="$BEELINE_JAR" +fi + fi # cli specific code if [ ! -f ${HIVE_LIB}/$JAR ]; then -echo "Missing Hive CLI Jar" +echo "Missing $JAR Jar" exit 3; fi
[18/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp -- diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp b/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp index f61ac7d..f982bf2 100644 --- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp +++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h -- diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h b/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h index c95fec1..ae14bd1 100644 --- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h +++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated
[40/55] [abbrv] hive git commit: HIVE-11523: org.apache.hadoop.hive.ql.io.orc.FileDump should handle errors (Prasanth Jayachandran reviewed by Ashutosh Chauhan)
HIVE-11523: org.apache.hadoop.hive.ql.io.orc.FileDump should handle errors (Prasanth Jayachandran reviewed by Ashutosh Chauhan) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d84e393e Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d84e393e Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d84e393e Branch: refs/heads/spark Commit: d84e393ed66829fe0c8cc87254fef2a329b96163 Parents: a91e147 Author: Prasanth Jayachandran Authored: Mon Oct 26 14:18:36 2015 -0500 Committer: Prasanth Jayachandran Committed: Mon Oct 26 14:18:36 2015 -0500 -- .../apache/hadoop/hive/ql/io/orc/FileDump.java | 193 + .../hadoop/hive/ql/io/orc/JsonFileDump.java | 210 ++- .../hadoop/hive/ql/io/orc/TestFileDump.java | 50 - 3 files changed, 213 insertions(+), 240 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/d84e393e/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java index a1c5058..9c6538f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java @@ -136,9 +136,16 @@ public final class FileDump { private static void printData(List files, Configuration conf) throws IOException, JSONException { for (String file : files) { - printJsonData(conf, file); - if (files.size() > 1) { -System.out.println(Strings.repeat("=", 80) + "\n"); + try { +printJsonData(conf, file); +if (files.size() > 1) { + System.out.println(Strings.repeat("=", 80) + "\n"); +} + } catch (Exception e) { +System.err.println("Unable to dump data for file: " + file); +e.printStackTrace(); +System.err.println(Strings.repeat("=", 80) + "\n"); +continue; } } } @@ -146,103 +153,111 @@ public final class FileDump { private static void printMetaData(List files, Configuration conf, List rowIndexCols, boolean printTimeZone) throws IOException { for (String filename : files) { - System.out.println("Structure for " + filename); - Path path = new Path(filename); - Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(conf)); - System.out.println("File Version: " + reader.getFileVersion().getName() + - " with " + reader.getWriterVersion()); - RecordReaderImpl rows = (RecordReaderImpl) reader.rows(); - System.out.println("Rows: " + reader.getNumberOfRows()); - System.out.println("Compression: " + reader.getCompression()); - if (reader.getCompression() != CompressionKind.NONE) { -System.out.println("Compression size: " + reader.getCompressionSize()); - } - System.out.println("Type: " + reader.getObjectInspector().getTypeName()); - System.out.println("\nStripe Statistics:"); - List stripeStats = reader.getStripeStatistics(); - for (int n = 0; n < stripeStats.size(); n++) { -System.out.println(" Stripe " + (n + 1) + ":"); -StripeStatistics ss = stripeStats.get(n); -for (int i = 0; i < ss.getColumnStatistics().length; ++i) { - System.out.println("Column " + i + ": " + - ss.getColumnStatistics()[i].toString()); + try { +Path path = new Path(filename); +Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(conf)); +System.out.println("Structure for " + filename); +System.out.println("File Version: " + reader.getFileVersion().getName() + +" with " + reader.getWriterVersion()); +RecordReaderImpl rows = (RecordReaderImpl) reader.rows(); +System.out.println("Rows: " + reader.getNumberOfRows()); +System.out.println("Compression: " + reader.getCompression()); +if (reader.getCompression() != CompressionKind.NONE) { + System.out.println("Compression size: " + reader.getCompressionSize()); } - } - ColumnStatistics[] stats = reader.getStatistics(); - int colCount = stats.length; - System.out.println("\nFile Statistics:"); - for (int i = 0; i < stats.length; ++i) { -System.out.println(" Column " + i + ": " + stats[i].toString()); - } - System.out.println("\nStripes:"); - int stripeIx = -1; - for (StripeInformation stripe : reader.getStripes()) { -++stripeIx; -long stripeStart = stripe.getOffset(); -OrcProto.StripeFooter footer = rows.readStripeFooter(stripe); -if (printTimeZone) { - String tz = footer.getWriterTimezone(); - if (tz == null ||
[53/55] [abbrv] hive git commit: HIVE-11497: Make sure --orcfiledump utility includes OrcRecordUpdate.AcidStats (Prasanth Jayachandran reviewed by Eugene Koifman)
HIVE-11497: Make sure --orcfiledump utility includes OrcRecordUpdate.AcidStats (Prasanth Jayachandran reviewed by Eugene Koifman) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3e21a6d4 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3e21a6d4 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3e21a6d4 Branch: refs/heads/spark Commit: 3e21a6d44971feb91ab26ec6dbf8ee207683ada1 Parents: f2ede0e Author: Prasanth Jayachandran Authored: Tue Oct 27 23:44:51 2015 -0500 Committer: Prasanth Jayachandran Committed: Tue Oct 27 23:44:51 2015 -0500 -- .../apache/hadoop/hive/ql/io/orc/FileDump.java | 4 +++ .../hadoop/hive/ql/io/orc/JsonFileDump.java | 6 .../hadoop/hive/ql/io/orc/OrcRecordUpdater.java | 34 +--- ql/src/test/resources/orc-file-dump.json| 3 +- 4 files changed, 35 insertions(+), 12 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/3e21a6d4/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java index 9c6538f..0e9667a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java @@ -249,6 +249,10 @@ public final class FileDump { System.out.println("\nFile length: " + fileLen + " bytes"); System.out.println("Padding length: " + paddedBytes + " bytes"); System.out.println("Padding ratio: " + format.format(percentPadding) + "%"); +OrcRecordUpdater.AcidStats acidStats = OrcRecordUpdater.parseAcidStats(reader); +if (acidStats != null) { + System.out.println("ACID stats:" + acidStats); +} rows.close(); if (files.size() > 1) { System.out.println(Strings.repeat("=", 80) + "\n"); http://git-wip-us.apache.org/repos/asf/hive/blob/3e21a6d4/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java index 02e01b4..7f673dc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java @@ -167,6 +167,12 @@ public class JsonFileDump { writer.key("fileLength").value(fileLen); writer.key("paddingLength").value(paddedBytes); writer.key("paddingRatio").value(percentPadding); +OrcRecordUpdater.AcidStats acidStats = OrcRecordUpdater.parseAcidStats(reader); +if (acidStats != null) { + writer.key("numInserts").value(acidStats.inserts); + writer.key("numDeletes").value(acidStats.deletes); + writer.key("numUpdates").value(acidStats.updates); +} writer.key("status").value("OK"); rows.close(); http://git-wip-us.apache.org/repos/asf/hive/blob/3e21a6d4/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java index 2220b8e..01374a7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java @@ -45,7 +45,6 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; /** * A RecordUpdater where the files are stored as ORC. @@ -128,6 +127,15 @@ public class OrcRecordUpdater implements RecordUpdater { builder.append(deletes); return builder.toString(); } + +@Override +public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append(" inserts: ").append(inserts); + builder.append(" updates: ").append(updates); + builder.append(" deletes: ").append(deletes); + return builder.toString(); +} } static Path getSideFile(Path main) { @@ -448,17 +456,21 @@ public class OrcRecordUpdater implements RecordUpdater { * {@link KeyIndexBuilder} creates these */ static AcidStats parseAcidStats(Reader reader) { -String statsSerialized; -try { - ByteBuffer val = -reader.getMetadataValue(OrcRecordUpdater.ACID_STATS) - .duplicate(); - statsSerialized = utf8Decoder.decode(val).toString(); -} catch (CharacterCodingException e) { - throw new Ill
[20/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp -- diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp index bc84e20..a82c363 100644 --- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp +++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -15,6 +15,7 @@ ThriftHiveMetastore_getMetaConf_args::~ThriftHiveMetastore_getMetaConf_args() th uint32_t ThriftHiveMetastore_getMetaConf_args::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -55,7 +56,7 @@ uint32_t ThriftHiveMetastore_getMetaConf_args::read(::apache::thrift::protocol:: uint32_t ThriftHiveMetastore_getMetaConf_args::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("ThriftHiveMetastore_getMetaConf_args"); xfer += oprot->writeFieldBegin("key", ::apache::thrift::protocol::T_STRING, 1); @@ -64,7 +65,6 @@ uint32_t ThriftHiveMetastore_getMetaConf_args::write(::apache::thrift::protocol: xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -75,7 +75,7 @@ ThriftHiveMetastore_getMetaConf_pargs::~ThriftHiveMetastore_getMetaConf_pargs() uint32_t ThriftHiveMetastore_getMetaConf_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("ThriftHiveMetastore_getMetaConf_pargs"); xfer += oprot->writeFieldBegin("key", ::apache::thrift::protocol::T_STRING, 1); @@ -84,7 +84,6 @@ uint32_t ThriftHiveMetastore_getMetaConf_pargs::write(::apache::thrift::protocol xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -95,6 +94,7 @@ ThriftHiveMetastore_getMetaConf_result::~ThriftHiveMetastore_getMetaConf_result( uint32_t ThriftHiveMetastore_getMetaConf_result::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -168,6 +168,7 @@ ThriftHiveMetastore_getMetaConf_presult::~ThriftHiveMetastore_getMetaConf_presul uint32_t ThriftHiveMetastore_getMetaConf_presult::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -221,6 +222,7 @@ ThriftHiveMetastore_setMetaConf_args::~ThriftHiveMetastore_setMetaConf_args() th uint32_t ThriftHiveMetastore_setMetaConf_args::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -269,7 +271,7 @@ uint32_t ThriftHiveMetastore_setMetaConf_args::read(::apache::thrift::protocol:: uint32_t ThriftHiveMetastore_setMetaConf_args::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("ThriftHiveMetastore_setMetaConf_args"); xfer += oprot->writeFieldBegin("key", ::apache::thrift::protocol::T_STRING, 1); @@ -282,7 +284,6 @@ uint32_t ThriftHiveMetastore_setMetaConf_args::write(::apache::thrift::protocol: xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -293,7 +294,7 @@ ThriftHiveMetastore_setMetaConf_pargs::~ThriftHiveMetastore_setMetaConf_pargs() uint32_t ThriftHiveMetastore_setMetaConf_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("ThriftHiveMetastore_setMetaConf_pargs"); xfer += oprot->writeFieldBegin("key", ::apache::thrift::protocol::T_STRING, 1); @@ -306,7 +307,6 @@ uint32_t ThriftHiveMetastore_setMetaConf_pargs::write(::apache::thrift::protocol xfer += oprot->writeFieldStop(); xfe
[14/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java -- diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java index 82dd057..f15d7a9 100644 --- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java +++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class HeartbeatTxnRangeRequest implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HeartbeatTxnRangeRequest"); @@ -229,10 +229,10 @@ public class HeartbeatTxnRangeRequest implements org.apache.thrift.TBasehttp://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java -- diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java index feb550a..b00fb9c 100644 --- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java +++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class HeartbeatTxnRangeResponse implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HeartbeatTxnRangeResponse"); http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectPrivilege.java -- diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectPrivilege.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectPrivilege.java index 30665de..3b818ab 100644 --- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectPrivilege.java +++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectPrivilege.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class HiveObjectPrivilege implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HiveObjectPrivilege"); http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java -- diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java index 8a06eb1..d7dd8ec 100644 --- a/metastore/src/gen/thrift/gen-javabean/o
[17/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp -- diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp index a0bb4c0..5fd4a90 100644 --- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp +++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -172,11 +172,9 @@ void Version::__set_comments(const std::string& val) { this->comments = val; } -const char* Version::ascii_fingerprint = "07A9615F837F7D0A952B595DD3020972"; -const uint8_t Version::binary_fingerprint[16] = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72}; - uint32_t Version::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -225,7 +223,7 @@ uint32_t Version::read(::apache::thrift::protocol::TProtocol* iprot) { uint32_t Version::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("Version"); xfer += oprot->writeFieldBegin("version", ::apache::thrift::protocol::T_STRING, 1); @@ -238,7 +236,6 @@ uint32_t Version::write(::apache::thrift::protocol::TProtocol* oprot) const { xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -260,13 +257,12 @@ Version& Version::operator=(const Version& other1) { __isset = other1.__isset; return *this; } -std::ostream& operator<<(std::ostream& out, const Version& obj) { - using apache::thrift::to_string; +void Version::printTo(std::ostream& out) const { + using ::apache::thrift::to_string; out << "Version("; - out << "version=" << to_string(obj.version); - out << ", " << "comments=" << to_string(obj.comments); + out << "version=" << to_string(version); + out << ", " << "comments=" << to_string(comments); out << ")"; - return out; } @@ -286,11 +282,9 @@ void FieldSchema::__set_comment(const std::string& val) { this->comment = val; } -const char* FieldSchema::ascii_fingerprint = "AB879940BD15B6B25691265F7384B271"; -const uint8_t FieldSchema::binary_fingerprint[16] = {0xAB,0x87,0x99,0x40,0xBD,0x15,0xB6,0xB2,0x56,0x91,0x26,0x5F,0x73,0x84,0xB2,0x71}; - uint32_t FieldSchema::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -347,7 +341,7 @@ uint32_t FieldSchema::read(::apache::thrift::protocol::TProtocol* iprot) { uint32_t FieldSchema::write(::apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; - oprot->incrementRecursionDepth(); + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); xfer += oprot->writeStructBegin("FieldSchema"); xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1); @@ -364,7 +358,6 @@ uint32_t FieldSchema::write(::apache::thrift::protocol::TProtocol* oprot) const xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); - oprot->decrementRecursionDepth(); return xfer; } @@ -389,14 +382,13 @@ FieldSchema& FieldSchema::operator=(const FieldSchema& other3) { __isset = other3.__isset; return *this; } -std::ostream& operator<<(std::ostream& out, const FieldSchema& obj) { - using apache::thrift::to_string; +void FieldSchema::printTo(std::ostream& out) const { + using ::apache::thrift::to_string; out << "FieldSchema("; - out << "name=" << to_string(obj.name); - out << ", " << "type=" << to_string(obj.type); - out << ", " << "comment=" << to_string(obj.comment); + out << "name=" << to_string(name); + out << ", " << "type=" << to_string(type); + out << ", " << "comment=" << to_string(comment); out << ")"; - return out; } @@ -423,11 +415,9 @@ void Type::__set_fields(const std::vector & val) { __isset.fields = true; } -const char* Type::ascii_fingerprint = "20DF02DE523C27F7066C7BD4D9120842"; -const uint8_t Type::binary_fingerprint[16] = {0x20,0xDF,0x02,0xDE,0x52,0x3C,0x27,0xF7,0x06,0x6C,0x7B,0xD4,0xD9,0x12,0x08,0x42}; - uint32_t Type::read(::apache::thrift::protocol::TProtocol* iprot) { + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); uint32_t xfer = 0; std::string fname; ::apache::thrift::protocol::TType ftype; @@ -504,7 +494,7 @@ uint32_t Type::read(::apache::thrift:
[44/55] [abbrv] hive git commit: HIVE-10807 : Invalidate basic stats for insert queries if autogather=false (Ashutosh Chauhan via Gopal V)
HIVE-10807 : Invalidate basic stats for insert queries if autogather=false (Ashutosh Chauhan via Gopal V) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/86346fb1 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/86346fb1 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/86346fb1 Branch: refs/heads/spark Commit: 86346fb150f0358e40b6435077eccda3e07d17e2 Parents: f9517ef Author: Ashutosh Chauhan Authored: Mon Oct 26 17:45:59 2015 -0700 Committer: Ashutosh Chauhan Committed: Mon Oct 26 17:45:59 2015 -0700 -- .../apache/hadoop/hive/ql/QueryProperties.java | 10 -- .../apache/hadoop/hive/ql/metadata/Hive.java| 13 +- .../hive/ql/optimizer/GenMapRedUtils.java | 3 +- .../hive/ql/optimizer/StatsOptimizer.java | 38 +++-- .../hadoop/hive/ql/parse/QBParseInfo.java | 9 -- .../hadoop/hive/ql/parse/SemanticAnalyzer.java | 6 - .../test/queries/clientpositive/insert_into1.q | 10 +- .../test/queries/clientpositive/insert_into2.q | 8 + .../clientpositive/bucket_map_join_1.q.out | 4 - .../clientpositive/bucket_map_join_2.q.out | 4 - .../encryption_insert_partition_dynamic.q.out | 4 - .../encryption_join_unencrypted_tbl.q.out | 4 - .../results/clientpositive/insert_into1.q.out | 151 +++ .../results/clientpositive/insert_into2.q.out | 69 + .../spark/bucket_map_join_1.q.out | 8 - .../spark/bucket_map_join_2.q.out | 8 - .../clientpositive/spark/insert_into1.q.out | 116 ++ .../clientpositive/spark/insert_into2.q.out | 75 + .../results/clientpositive/spark/stats3.q.out | 2 - ql/src/test/results/clientpositive/stats3.q.out | 2 - .../clientpositive/tez/insert_into1.q.out | 120 +++ .../clientpositive/tez/insert_into2.q.out | 75 + 22 files changed, 661 insertions(+), 78 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java index e8f7fba..3bc9432 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java @@ -39,7 +39,6 @@ public class QueryProperties { boolean noScanAnalyzeCommand; boolean analyzeRewrite; boolean ctas; - boolean insertToTable; int outerQueryLimit; boolean hasJoin = false; @@ -115,14 +114,6 @@ public class QueryProperties { this.ctas = ctas; } - public boolean isInsertToTable() { -return insertToTable; - } - - public void setInsertToTable(boolean insertToTable) { -this.insertToTable = insertToTable; - } - public int getOuterQueryLimit() { return outerQueryLimit; } @@ -276,7 +267,6 @@ public class QueryProperties { noScanAnalyzeCommand = false; analyzeRewrite = false; ctas = false; -insertToTable = false; outerQueryLimit = -1; hasJoin = false; http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index c64d8d1..a2dea67 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -1464,6 +1464,7 @@ public class Hive { newTPart = getPartition(tbl, partSpec, true, newPartPath.toString(), inheritTableSpecs, newFiles); + // recreate the partition if it existed before if (isSkewedStoreAsSubdir) { org.apache.hadoop.hive.metastore.api.Partition newCreatedTpart = newTPart.getTPartition(); @@ -1474,12 +1475,18 @@ public class Hive { /* Add list bucketing location mappings. */ skewedInfo.setSkewedColValueLocationMaps(skewedColValueLocationMaps); newCreatedTpart.getSd().setSkewedInfo(skewedInfo); +if(!this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { + newTPart.getParameters().put(StatsSetupConst.COLUMN_STATS_ACCURATE, "false"); +} alterPartition(tbl.getDbName(), tbl.getTableName(), new Partition(tbl, newCreatedTpart)); newTPart = getPartition(tbl, partSpec, true, newPartPath.toString(), inheritTableSpecs, newFiles); return new Partition(tbl, newCreatedTpart); } - + if(!this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { +newTPart.getParameters().
[09/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/testthrift_types.h -- diff --git a/serde/src/gen/thrift/gen-cpp/testthrift_types.h b/serde/src/gen/thrift/gen-cpp/testthrift_types.h index 8c57e48..a59346a 100644 --- a/serde/src/gen/thrift/gen-cpp/testthrift_types.h +++ b/serde/src/gen/thrift/gen-cpp/testthrift_types.h @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -31,9 +31,6 @@ typedef struct _InnerStruct__isset { class InnerStruct { public: - static const char* ascii_fingerprint; // = "E86CACEB22240450EDCBEFC3A83970E4"; - static const uint8_t binary_fingerprint[16]; // = {0xE8,0x6C,0xAC,0xEB,0x22,0x24,0x04,0x50,0xED,0xCB,0xEF,0xC3,0xA8,0x39,0x70,0xE4}; - InnerStruct(const InnerStruct&); InnerStruct& operator=(const InnerStruct&); InnerStruct() : field0(0) { @@ -61,11 +58,17 @@ class InnerStruct { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const InnerStruct& obj); + virtual void printTo(std::ostream& out) const; }; void swap(InnerStruct &a, InnerStruct &b); +inline std::ostream& operator<<(std::ostream& out, const InnerStruct& obj) +{ + obj.printTo(out); + return out; +} + typedef struct _ThriftTestObj__isset { _ThriftTestObj__isset() : field1(false), field2(false), field3(false) {} bool field1 :1; @@ -76,9 +79,6 @@ typedef struct _ThriftTestObj__isset { class ThriftTestObj { public: - static const char* ascii_fingerprint; // = "2BA5D8DAACFBBE6599779830A6185706"; - static const uint8_t binary_fingerprint[16]; // = {0x2B,0xA5,0xD8,0xDA,0xAC,0xFB,0xBE,0x65,0x99,0x77,0x98,0x30,0xA6,0x18,0x57,0x06}; - ThriftTestObj(const ThriftTestObj&); ThriftTestObj& operator=(const ThriftTestObj&); ThriftTestObj() : field1(0), field2() { @@ -116,11 +116,17 @@ class ThriftTestObj { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const ThriftTestObj& obj); + virtual void printTo(std::ostream& out) const; }; void swap(ThriftTestObj &a, ThriftTestObj &b); +inline std::ostream& operator<<(std::ostream& out, const ThriftTestObj& obj) +{ + obj.printTo(out); + return out; +} + #endif http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java -- diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java index 7902849..8b3eeb7 100644 --- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java +++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java -- diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java index 3fdd0d9..01a84fe 100644 --- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java +++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class InnerStruct implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InnerStruct"); @@ -185,7 +185,7 @@ public class InnerStruct implements org.apache.thrift.TBasehttp://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java -
[22/55] [abbrv] hive git commit: HIVE-12253 : revert HIVE-12061 (Sergey Shelukhin, reviewed by Prasanth Jayachandran)
http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp -- diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp index 5fd4a90..cb0ee7a 100644 --- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp +++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp @@ -151,14 +151,6 @@ const char* _kResourceTypeNames[] = { }; const std::map _ResourceType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(3, _kResourceTypeValues, _kResourceTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL)); -int _kFileMetadataExprTypeValues[] = { - FileMetadataExprType::ORC_SARG -}; -const char* _kFileMetadataExprTypeNames[] = { - "ORC_SARG" -}; -const std::map _FileMetadataExprType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(1, _kFileMetadataExprTypeValues, _kFileMetadataExprTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL)); - Version::~Version() throw() { } @@ -14270,11 +14262,6 @@ void GetFileMetadataByExprRequest::__set_doGetFooters(const bool val) { __isset.doGetFooters = true; } -void GetFileMetadataByExprRequest::__set_type(const FileMetadataExprType::type val) { - this->type = val; -__isset.type = true; -} - uint32_t GetFileMetadataByExprRequest::read(::apache::thrift::protocol::TProtocol* iprot) { apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); @@ -14334,16 +14321,6 @@ uint32_t GetFileMetadataByExprRequest::read(::apache::thrift::protocol::TProtoco xfer += iprot->skip(ftype); } break; - case 4: -if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast626; - xfer += iprot->readI32(ecast626); - this->type = (FileMetadataExprType::type)ecast626; - this->__isset.type = true; -} else { - xfer += iprot->skip(ftype); -} -break; default: xfer += iprot->skip(ftype); break; @@ -14368,10 +14345,10 @@ uint32_t GetFileMetadataByExprRequest::write(::apache::thrift::protocol::TProtoc xfer += oprot->writeFieldBegin("fileIds", ::apache::thrift::protocol::T_LIST, 1); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_I64, static_cast(this->fileIds.size())); -std::vector ::const_iterator _iter627; -for (_iter627 = this->fileIds.begin(); _iter627 != this->fileIds.end(); ++_iter627) +std::vector ::const_iterator _iter626; +for (_iter626 = this->fileIds.begin(); _iter626 != this->fileIds.end(); ++_iter626) { - xfer += oprot->writeI64((*_iter627)); + xfer += oprot->writeI64((*_iter626)); } xfer += oprot->writeListEnd(); } @@ -14386,11 +14363,6 @@ uint32_t GetFileMetadataByExprRequest::write(::apache::thrift::protocol::TProtoc xfer += oprot->writeBool(this->doGetFooters); xfer += oprot->writeFieldEnd(); } - if (this->__isset.type) { -xfer += oprot->writeFieldBegin("type", ::apache::thrift::protocol::T_I32, 4); -xfer += oprot->writeI32((int32_t)this->type); -xfer += oprot->writeFieldEnd(); - } xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); return xfer; @@ -14401,23 +14373,20 @@ void swap(GetFileMetadataByExprRequest &a, GetFileMetadataByExprRequest &b) { swap(a.fileIds, b.fileIds); swap(a.expr, b.expr); swap(a.doGetFooters, b.doGetFooters); - swap(a.type, b.type); swap(a.__isset, b.__isset); } -GetFileMetadataByExprRequest::GetFileMetadataByExprRequest(const GetFileMetadataByExprRequest& other628) { +GetFileMetadataByExprRequest::GetFileMetadataByExprRequest(const GetFileMetadataByExprRequest& other627) { + fileIds = other627.fileIds; + expr = other627.expr; + doGetFooters = other627.doGetFooters; + __isset = other627.__isset; +} +GetFileMetadataByExprRequest& GetFileMetadataByExprRequest::operator=(const GetFileMetadataByExprRequest& other628) { fileIds = other628.fileIds; expr = other628.expr; doGetFooters = other628.doGetFooters; - type = other628.type; __isset = other628.__isset; -} -GetFileMetadataByExprRequest& GetFileMetadataByExprRequest::operator=(const GetFileMetadataByExprRequest& other629) { - fileIds = other629.fileIds; - expr = other629.expr; - doGetFooters = other629.doGetFooters; - type = other629.type; - __isset = other629.__isset; return *this; } void GetFileMetadataByExprRequest::printTo(std::ostream& out) const { @@ -14426,7 +14395,6 @@ void GetFileMetadataByExprRequest::printTo(std::ostream& out) const { out << "fileIds=" << to_string(fileIds); out << ", " << "expr=" << to_string(expr); out << ", " << "doGetFooters="; (__isset.doGetFooters ? (out << to_string(doGetFooters)) : (out << "")); - out << ", " << "type="; (__isset.type ? (out << to_string(type)) : (out << "")); out << ")"; } @@ -14470,17 +14438,17
[02/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationHandle.java -- diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationHandle.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationHandle.java index 75a3c89..d1d86fe 100644 --- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationHandle.java +++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationHandle.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class TOperationHandle implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TOperationHandle"); @@ -333,10 +333,10 @@ public class TOperationHandle implements org.apache.thrift.TBasehttp://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationState.java -- diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationState.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationState.java index 2dfde1c..fe029ef 100644 --- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationState.java +++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationState.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationType.java -- diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationType.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationType.java index 6927740..f105259 100644 --- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationType.java +++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationType.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TPrimitiveTypeEntry.java -- diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TPrimitiveTypeEntry.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TPrimitiveTypeEntry.java index 706c6ba..38a5be3 100644 --- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TPrimitiveTypeEntry.java +++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TPrimitiveTypeEntry.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class TPrimitiveTypeEntry implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TPrimitiveTypeEntry"); http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TProtocolVersion.java -- diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TProtocolVersion.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/
[38/55] [abbrv] hive git commit: HIVE-12213 Investigating the test failure TestHCatClient.testTableSchemaPropagation (Aleksei Statkevich via Aihua Xu)
HIVE-12213 Investigating the test failure TestHCatClient.testTableSchemaPropagation (Aleksei Statkevich via Aihua Xu) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8f964465 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8f964465 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8f964465 Branch: refs/heads/spark Commit: 8f9644658fd141fd5ea7395d3e97a093f98870bb Parents: 0808741 Author: Aihua Xu Authored: Mon Oct 26 12:49:11 2015 -0400 Committer: aihuaxu Committed: Mon Oct 26 13:03:48 2015 -0400 -- .../hive/hcatalog/api/TestHCatClient.java | 39 +--- .../hadoop/hive/metastore/MetaStoreUtils.java | 10 +++-- 2 files changed, 24 insertions(+), 25 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/8f964465/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java -- diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java index 8992d552c..891322a 100644 --- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java +++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java @@ -90,7 +90,6 @@ public class TestHCatClient { private static HiveConf replicationTargetHCatConf; private static SecurityManager securityManager; private static boolean useExternalMS = false; - private static boolean useExternalMSForReplication = false; public static class RunMS implements Runnable { @@ -1056,14 +1055,14 @@ public class TestHCatClient { HCatTable targetTable = targetMetaStore.deserializeTable(tableStringRep); assertEquals("Table after deserialization should have been identical to sourceTable.", - sourceTable.diff(targetTable), HCatTable.NO_DIFF); + HCatTable.NO_DIFF, sourceTable.diff(targetTable)); // Create table on Target. targetMetaStore.createTable(HCatCreateTableDesc.create(targetTable).build()); // Verify that the created table is identical to sourceTable. targetTable = targetMetaStore.getTable(dbName, tableName); assertEquals("Table after deserialization should have been identical to sourceTable.", - sourceTable.diff(targetTable), HCatTable.NO_DIFF); + HCatTable.NO_DIFF, sourceTable.diff(targetTable)); // Modify sourceTable. List newColumnSchema = new ArrayList(columnSchema); @@ -1098,7 +1097,7 @@ public class TestHCatClient { targetTable = targetMetaStore.getTable(dbName, tableName); assertEquals("After propagating schema changes, source and target tables should have been equivalent.", - targetTable.diff(sourceTable), HCatTable.NO_DIFF); + HCatTable.NO_DIFF, targetTable.diff(sourceTable)); } catch (Exception unexpected) { @@ -1157,14 +1156,14 @@ public class TestHCatClient { sourceMetaStore.addPartition(HCatAddPartitionDesc.create(sourcePartition_1).build()); assertEquals("Unexpected number of partitions. ", - sourceMetaStore.getPartitions(dbName, tableName).size(), 1); + 1, sourceMetaStore.getPartitions(dbName, tableName).size()); // Verify that partition_1 was added correctly, and properties were inherited from the HCatTable. HCatPartition addedPartition_1 = sourceMetaStore.getPartition(dbName, tableName, partitionSpec_1); - assertEquals("Column schema doesn't match.", addedPartition_1.getColumns(), sourceTable.getCols()); - assertEquals("InputFormat doesn't match.", addedPartition_1.getInputFormat(), sourceTable.getInputFileFormat()); - assertEquals("OutputFormat doesn't match.", addedPartition_1.getOutputFormat(), sourceTable.getOutputFileFormat()); - assertEquals("SerDe doesn't match.", addedPartition_1.getSerDe(), sourceTable.getSerdeLib()); - assertEquals("SerDe params don't match.", addedPartition_1.getSerdeParams(), sourceTable.getSerdeParams()); + assertEquals("Column schema doesn't match.", sourceTable.getCols(), addedPartition_1.getColumns()); + assertEquals("InputFormat doesn't match.", sourceTable.getInputFileFormat(), addedPartition_1.getInputFormat()); + assertEquals("OutputFormat doesn't match.", sourceTable.getOutputFileFormat(), addedPartition_1.getOutputFormat()); + assertEquals("SerDe doesn't match.", sourceTable.getSerdeLib(), addedPartition_1.getSerDe()); + assertEquals("SerDe params don't match.", sourceTable.getSerdeParams(), addedPartition_1.getSerdeParams()); // Replicate table definition. @@ -1177,8 +117
[27/55] [abbrv] hive git commit: HIVE-12059 : Clean up reference to deprecated constants in AvroSerdeUtils (Aaron Dossett via Ashutosh Chauhan)
HIVE-12059 : Clean up reference to deprecated constants in AvroSerdeUtils (Aaron Dossett via Ashutosh Chauhan) Signed-off-by: Ashutosh Chauhan Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/03c62d0d Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/03c62d0d Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/03c62d0d Branch: refs/heads/spark Commit: 03c62d0da2c4d71fbd144d2636807d38a008820c Parents: e8f71f4 Author: Aaron Dossett Authored: Wed Oct 7 13:59:00 2015 -0800 Committer: Ashutosh Chauhan Committed: Sat Oct 24 14:44:39 2015 -0700 -- .../hadoop/hive/hbase/HBaseSerDeHelper.java | 21 --- .../hadoop/hive/hbase/HBaseSerDeParameters.java | 20 +++--- .../hbase/struct/AvroHBaseValueFactory.java | 3 ++- .../hadoop/hive/hbase/TestHBaseSerDe.java | 12 - .../ql/io/avro/AvroGenericRecordReader.java | 5 ++-- .../hadoop/hive/serde2/avro/TestAvroSerde.java | 28 +--- .../hive/serde2/avro/TestAvroSerdeUtils.java| 18 ++--- 7 files changed, 52 insertions(+), 55 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/03c62d0d/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java -- diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java index 3bcc5c0..20362e5 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.avro.AvroObjectInspectorGenerator; -import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils; +import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties; import org.apache.hadoop.hive.serde2.lazy.LazyFactory; import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector; @@ -215,16 +215,17 @@ public class HBaseSerDeHelper { // for avro type, the serialization class parameter is optional schemaLiteral = tbl.getProperty(colMap.familyName + "." + colMap.qualifierPrefix + "." - + AvroSerdeUtils.SCHEMA_LITERAL); + + AvroTableProperties.SCHEMA_LITERAL.getPropName()); schemaUrl = tbl.getProperty(colMap.familyName + "." + colMap.qualifierPrefix + "." - + AvroSerdeUtils.SCHEMA_URL); + + AvroTableProperties.SCHEMA_URL.getPropName()); if (schemaLiteral == null && schemaUrl == null) { // either schema literal, schema url or serialization class must // be provided throw new SerDeException("For an avro schema, either " -+ AvroSerdeUtils.SCHEMA_LITERAL + ", " + AvroSerdeUtils.SCHEMA_URL + " or " ++ AvroTableProperties.SCHEMA_LITERAL.getPropName() + ", " ++ AvroTableProperties.SCHEMA_URL.getPropName() + " or " + serdeConstants.SERIALIZATION_CLASS + " property must be set."); } @@ -254,13 +255,13 @@ public class HBaseSerDeHelper { if (serType.equalsIgnoreCase(AVRO_SERIALIZATION_TYPE)) { // for avro type, the serialization class parameter is optional schemaLiteral = - tbl.getProperty(colMap.familyName + "." + AvroSerdeUtils.SCHEMA_LITERAL); - schemaUrl = tbl.getProperty(colMap.familyName + "." + AvroSerdeUtils.SCHEMA_URL); + tbl.getProperty(colMap.familyName + "." + AvroTableProperties.SCHEMA_LITERAL.getPropName()); + schemaUrl = tbl.getProperty(colMap.familyName + "." + AvroTableProperties.SCHEMA_URL.getPropName()); if (schemaLiteral == null && schemaUrl == null) { // either schema literal or serialization class must be provided throw new SerDeException("For an avro schema, either " -+ AvroSerdeUtils.SCHEMA_LITERAL + " property or " ++ AvroTableProperties.SCHEMA_LITERAL.getPropName() + " property or " + serdeConstants.SERIALIZATION_CLASS + " property must be set."); } @@ -315,16 +316,16 @@ public class HBaseSerDeHelper { // for avro type, the serialization class parameter is optional schemaLite
[50/55] [abbrv] hive git commit: HIVE-12061 : add file type support to file metadata by expr call (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp -- diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp index 44aa22e..a82c363 100644 --- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp +++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp @@ -1240,14 +1240,14 @@ uint32_t ThriftHiveMetastore_get_databases_result::read(::apache::thrift::protoc if (ftype == ::apache::thrift::protocol::T_LIST) { { this->success.clear(); -uint32_t _size713; -::apache::thrift::protocol::TType _etype716; -xfer += iprot->readListBegin(_etype716, _size713); -this->success.resize(_size713); -uint32_t _i717; -for (_i717 = 0; _i717 < _size713; ++_i717) +uint32_t _size714; +::apache::thrift::protocol::TType _etype717; +xfer += iprot->readListBegin(_etype717, _size714); +this->success.resize(_size714); +uint32_t _i718; +for (_i718 = 0; _i718 < _size714; ++_i718) { - xfer += iprot->readString(this->success[_i717]); + xfer += iprot->readString(this->success[_i718]); } xfer += iprot->readListEnd(); } @@ -1286,10 +1286,10 @@ uint32_t ThriftHiveMetastore_get_databases_result::write(::apache::thrift::proto xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast(this->success.size())); - std::vector ::const_iterator _iter718; - for (_iter718 = this->success.begin(); _iter718 != this->success.end(); ++_iter718) + std::vector ::const_iterator _iter719; + for (_iter719 = this->success.begin(); _iter719 != this->success.end(); ++_iter719) { -xfer += oprot->writeString((*_iter718)); +xfer += oprot->writeString((*_iter719)); } xfer += oprot->writeListEnd(); } @@ -1334,14 +1334,14 @@ uint32_t ThriftHiveMetastore_get_databases_presult::read(::apache::thrift::proto if (ftype == ::apache::thrift::protocol::T_LIST) { { (*(this->success)).clear(); -uint32_t _size719; -::apache::thrift::protocol::TType _etype722; -xfer += iprot->readListBegin(_etype722, _size719); -(*(this->success)).resize(_size719); -uint32_t _i723; -for (_i723 = 0; _i723 < _size719; ++_i723) +uint32_t _size720; +::apache::thrift::protocol::TType _etype723; +xfer += iprot->readListBegin(_etype723, _size720); +(*(this->success)).resize(_size720); +uint32_t _i724; +for (_i724 = 0; _i724 < _size720; ++_i724) { - xfer += iprot->readString((*(this->success))[_i723]); + xfer += iprot->readString((*(this->success))[_i724]); } xfer += iprot->readListEnd(); } @@ -1458,14 +1458,14 @@ uint32_t ThriftHiveMetastore_get_all_databases_result::read(::apache::thrift::pr if (ftype == ::apache::thrift::protocol::T_LIST) { { this->success.clear(); -uint32_t _size724; -::apache::thrift::protocol::TType _etype727; -xfer += iprot->readListBegin(_etype727, _size724); -this->success.resize(_size724); -uint32_t _i728; -for (_i728 = 0; _i728 < _size724; ++_i728) +uint32_t _size725; +::apache::thrift::protocol::TType _etype728; +xfer += iprot->readListBegin(_etype728, _size725); +this->success.resize(_size725); +uint32_t _i729; +for (_i729 = 0; _i729 < _size725; ++_i729) { - xfer += iprot->readString(this->success[_i728]); + xfer += iprot->readString(this->success[_i729]); } xfer += iprot->readListEnd(); } @@ -1504,10 +1504,10 @@ uint32_t ThriftHiveMetastore_get_all_databases_result::write(::apache::thrift::p xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast(this->success.size())); - std::vector ::const_iterator _iter729; - for (_iter729 = this->success.begin(); _iter729 != this->success.end(); ++_iter729) + std::vector ::const_iterator _iter730; + for (_iter730 = this->success.begin(); _iter730 != this->success.end(); ++_iter730) { -xfer += oprot->writeString((*_iter729)); +xfer += oprot->writeString((*_iter730)); } xfer += oprot->writeListEnd(); } @@ -155
[31/55] [abbrv] hive git commit: HIVE-12262: Session log dir cannot be created in some cases (Daniel Dai, reviewed by Thejas Nair)
HIVE-12262: Session log dir cannot be created in some cases (Daniel Dai, reviewed by Thejas Nair) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/48a1e1f7 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/48a1e1f7 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/48a1e1f7 Branch: refs/heads/spark Commit: 48a1e1f7b051e6ca2a5bbb4a55eda16f5349d369 Parents: e3ef96f Author: Daniel Dai Authored: Sun Oct 25 09:46:27 2015 -0700 Committer: Daniel Dai Committed: Sun Oct 25 09:48:00 2015 -0700 -- .../hive/service/cli/session/HiveSessionImpl.java | 12 1 file changed, 12 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hive/blob/48a1e1f7/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java -- diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index a600309..3eaab9a 100644 --- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -241,6 +241,18 @@ public class HiveSessionImpl implements HiveSession { @Override public void setOperationLogSessionDir(File operationLogRootDir) { +if (!operationLogRootDir.exists()) { + LOG.warn("The operation log root directory is removed, recreating:" + + operationLogRootDir.getAbsolutePath()); + if (!operationLogRootDir.mkdirs()) { +LOG.warn("Unable to create operation log root directory: " + +operationLogRootDir.getAbsolutePath()); + } +} +if (!operationLogRootDir.canWrite()) { + LOG.warn("The operation log root directory is not writable: " + + operationLogRootDir.getAbsolutePath()); +} sessionLogDir = new File(operationLogRootDir, sessionHandle.getHandleIdentifier().toString()); isOperationLogEnabled = true; if (!sessionLogDir.exists()) {
[19/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h -- diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h index 49e6143..c8f16a7 100644 --- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h +++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -8,11 +8,17 @@ #define ThriftHiveMetastore_H #include +#include #include "hive_metastore_types.h" #include "FacebookService.h" namespace Apache { namespace Hadoop { namespace Hive { +#ifdef _WIN32 + #pragma warning( push ) + #pragma warning (disable : 4250 ) //inheriting methods via dominance +#endif + class ThriftHiveMetastoreIf : virtual public ::facebook::fb303::FacebookServiceIf { public: virtual ~ThriftHiveMetastoreIf() {} @@ -594,9 +600,6 @@ typedef struct _ThriftHiveMetastore_getMetaConf_args__isset { class ThriftHiveMetastore_getMetaConf_args { public: - static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1"; - static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1}; - ThriftHiveMetastore_getMetaConf_args(const ThriftHiveMetastore_getMetaConf_args&); ThriftHiveMetastore_getMetaConf_args& operator=(const ThriftHiveMetastore_getMetaConf_args&); ThriftHiveMetastore_getMetaConf_args() : key() { @@ -624,23 +627,18 @@ class ThriftHiveMetastore_getMetaConf_args { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_getMetaConf_args& obj); }; class ThriftHiveMetastore_getMetaConf_pargs { public: - static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1"; - static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1}; - virtual ~ThriftHiveMetastore_getMetaConf_pargs() throw(); const std::string* key; uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_getMetaConf_pargs& obj); }; typedef struct _ThriftHiveMetastore_getMetaConf_result__isset { @@ -652,9 +650,6 @@ typedef struct _ThriftHiveMetastore_getMetaConf_result__isset { class ThriftHiveMetastore_getMetaConf_result { public: - static const char* ascii_fingerprint; // = "FB3D66E547AE5D7F8EB046D752ECF5C1"; - static const uint8_t binary_fingerprint[16]; // = {0xFB,0x3D,0x66,0xE5,0x47,0xAE,0x5D,0x7F,0x8E,0xB0,0x46,0xD7,0x52,0xEC,0xF5,0xC1}; - ThriftHiveMetastore_getMetaConf_result(const ThriftHiveMetastore_getMetaConf_result&); ThriftHiveMetastore_getMetaConf_result& operator=(const ThriftHiveMetastore_getMetaConf_result&); ThriftHiveMetastore_getMetaConf_result() : success() { @@ -687,7 +682,6 @@ class ThriftHiveMetastore_getMetaConf_result { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_getMetaConf_result& obj); }; typedef struct _ThriftHiveMetastore_getMetaConf_presult__isset { @@ -699,9 +693,6 @@ typedef struct _ThriftHiveMetastore_getMetaConf_presult__isset { class ThriftHiveMetastore_getMetaConf_presult { public: - static const char* ascii_fingerprint; // = "FB3D66E547AE5D7F8EB046D752ECF5C1"; - static const uint8_t binary_fingerprint[16]; // = {0xFB,0x3D,0x66,0xE5,0x47,0xAE,0x5D,0x7F,0x8E,0xB0,0x46,0xD7,0x52,0xEC,0xF5,0xC1}; - virtual ~ThriftHiveMetastore_getMetaConf_presult() throw(); std::string* success; @@ -711,7 +702,6 @@ class ThriftHiveMetastore_getMetaConf_presult { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); - friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_getMetaConf_presult& obj); }; typedef struct _ThriftHiveMetastore_setMetaConf_args__isset { @@ -723,9 +713,6 @@ typedef struct _ThriftHiveMetastore_setMetaConf_args__isset { class ThriftHiveMetastore_setMetaConf_args { public: - static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972"; - static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72}; - ThriftHiveMetastore_setMetaConf_args(const ThriftHiveMetastore_setMetaConf_args&); ThriftHiveMetastore_setMetaConf_args& operator=(const ThriftHiveMetastore_setMetaConf_args&); ThriftHiveMeta
[51/55] [abbrv] hive git commit: HIVE-12061 : add file type support to file metadata by expr call (Sergey Shelukhin, reviewed by Alan Gates)
HIVE-12061 : add file type support to file metadata by expr call (Sergey Shelukhin, reviewed by Alan Gates) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e5b53032 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e5b53032 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e5b53032 Branch: refs/heads/spark Commit: e5b53032beaafb95c798a152e724acf0e38dc094 Parents: 6df9090 Author: Sergey Shelukhin Authored: Tue Oct 27 14:32:13 2015 -0700 Committer: Sergey Shelukhin Committed: Tue Oct 27 14:32:13 2015 -0700 -- metastore/if/hive_metastore.thrift |8 +- .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp | 1744 +- .../gen/thrift/gen-cpp/hive_metastore_types.cpp | 402 ++-- .../gen/thrift/gen-cpp/hive_metastore_types.h | 20 +- .../metastore/api/FileMetadataExprType.java | 42 + .../api/GetFileMetadataByExprRequest.java | 126 +- .../src/gen/thrift/gen-php/metastore/Types.php | 30 + .../gen/thrift/gen-py/hive_metastore/ttypes.py | 26 +- .../gen/thrift/gen-rb/hive_metastore_types.rb | 13 +- .../hive/metastore/FileMetadataHandler.java | 30 + .../hadoop/hive/metastore/HiveMetaStore.java| 15 +- .../hadoop/hive/metastore/ObjectStore.java |3 +- .../apache/hadoop/hive/metastore/RawStore.java |8 +- .../filemeta/OrcFileMetadataHandler.java| 63 + .../hive/metastore/hbase/HBaseReadWrite.java|2 +- .../hadoop/hive/metastore/hbase/HBaseStore.java | 41 +- .../DummyRawStoreControlledCommit.java |3 +- .../DummyRawStoreForJdoConnection.java |3 +- 18 files changed, 1484 insertions(+), 1095 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/if/hive_metastore.thrift -- diff --git a/metastore/if/hive_metastore.thrift b/metastore/if/hive_metastore.thrift index 751cebe..3e30f56 100755 --- a/metastore/if/hive_metastore.thrift +++ b/metastore/if/hive_metastore.thrift @@ -725,11 +725,17 @@ struct GetFileMetadataByExprResult { 2: required bool isSupported } +enum FileMetadataExprType { + ORC_SARG = 1 +} + + // Request type for get_file_metadata_by_expr struct GetFileMetadataByExprRequest { 1: required list fileIds, 2: required binary expr, - 3: optional bool doGetFooters + 3: optional bool doGetFooters, + 4: optional FileMetadataExprType type } // Return type for get_file_metadata
[33/55] [abbrv] hive git commit: HIVE-12189: The list in pushdownPreds of ppd.ExprWalkerInfo should not be allowed to grow very large (Yongzhi Chen, reviewed by Chao Sun)
HIVE-12189: The list in pushdownPreds of ppd.ExprWalkerInfo should not be allowed to grow very large (Yongzhi Chen, reviewed by Chao Sun) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f415ce95 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f415ce95 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f415ce95 Branch: refs/heads/spark Commit: f415ce9567661dcdc4b68b49ccb6ee4962a330e4 Parents: 9ea51d1 Author: Yongzhi Chen Authored: Sun Oct 25 23:24:31 2015 -0700 Committer: Chao Sun Committed: Sun Oct 25 23:24:31 2015 -0700 -- .../hadoop/hive/ql/ppd/ExprWalkerInfo.java | 23 ++-- 1 file changed, 21 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/f415ce95/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java index e4b768e..fca671c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java @@ -149,7 +149,13 @@ public class ExprWalkerInfo implements NodeProcessorCtx { } public void addFinalCandidate(String alias, ExprNodeDesc expr) { -getPushdownPreds(alias).add(expr); +List predicates = getPushdownPreds(alias); +for (ExprNodeDesc curPred: predicates) { + if (curPred.isSame(expr)) { +return; + } +} +predicates.add(expr); } /** @@ -159,7 +165,20 @@ public class ExprWalkerInfo implements NodeProcessorCtx { * @param pushDowns */ public void addPushDowns(String alias, List pushDowns) { -getPushdownPreds(alias).addAll(pushDowns); +List predicates = getPushdownPreds(alias); +boolean isNew; +for (ExprNodeDesc newPred: pushDowns) { + isNew = true; + for (ExprNodeDesc curPred: predicates) { +if (curPred.isSame(newPred)) { + isNew = false; + break; +} + } + if (isNew) { +predicates.add(newPred); + } +} } /**
[21/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/26535378 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/26535378 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/26535378 Branch: refs/heads/spark Commit: 26535378f9ece1543d1a7200f6514a85a7f8090e Parents: e9cdea9 Author: Sergey Shelukhin Authored: Fri Oct 23 14:29:23 2015 -0700 Committer: Sergey Shelukhin Committed: Fri Oct 23 14:31:04 2015 -0700 -- .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp | 12960 - .../gen/thrift/gen-cpp/ThriftHiveMetastore.h| 2479 +--- .../thrift/gen-cpp/hive_metastore_constants.cpp | 2 +- .../thrift/gen-cpp/hive_metastore_constants.h | 2 +- .../gen/thrift/gen-cpp/hive_metastore_types.cpp | 2269 ++- .../gen/thrift/gen-cpp/hive_metastore_types.h | 1332 +- .../hive/metastore/api/AbortTxnRequest.java | 6 +- .../metastore/api/AddDynamicPartitions.java | 6 +- .../metastore/api/AddPartitionsRequest.java | 8 +- .../hive/metastore/api/AddPartitionsResult.java | 4 +- .../hadoop/hive/metastore/api/AggrStats.java| 6 +- .../metastore/api/AlreadyExistsException.java | 4 +- .../metastore/api/BinaryColumnStatsData.java|10 +- .../metastore/api/BooleanColumnStatsData.java |10 +- .../hive/metastore/api/CheckLockRequest.java| 6 +- .../metastore/api/ClearFileMetadataRequest.java | 4 +- .../metastore/api/ClearFileMetadataResult.java | 4 +- .../hive/metastore/api/ColumnStatistics.java| 4 +- .../metastore/api/ColumnStatisticsData.java | 2 +- .../metastore/api/ColumnStatisticsDesc.java | 8 +- .../hive/metastore/api/ColumnStatisticsObj.java | 4 +- .../hive/metastore/api/CommitTxnRequest.java| 6 +- .../hive/metastore/api/CompactionRequest.java | 4 +- .../hive/metastore/api/CompactionType.java | 2 +- .../api/ConfigValSecurityException.java | 4 +- .../api/CurrentNotificationEventId.java | 6 +- .../hadoop/hive/metastore/api/Database.java | 4 +- .../apache/hadoop/hive/metastore/api/Date.java | 6 +- .../hive/metastore/api/DateColumnStatsData.java | 8 +- .../hadoop/hive/metastore/api/Decimal.java | 6 +- .../metastore/api/DecimalColumnStatsData.java | 8 +- .../metastore/api/DoubleColumnStatsData.java|12 +- .../hive/metastore/api/DropPartitionsExpr.java | 6 +- .../metastore/api/DropPartitionsRequest.java|12 +- .../metastore/api/DropPartitionsResult.java | 4 +- .../hive/metastore/api/EnvironmentContext.java | 4 +- .../hive/metastore/api/EventRequestType.java| 2 +- .../hadoop/hive/metastore/api/FieldSchema.java | 4 +- .../metastore/api/FileMetadataExprType.java | 2 +- .../hive/metastore/api/FireEventRequest.java| 6 +- .../metastore/api/FireEventRequestData.java | 2 +- .../hive/metastore/api/FireEventResponse.java | 4 +- .../hadoop/hive/metastore/api/Function.java | 6 +- .../hadoop/hive/metastore/api/FunctionType.java | 2 +- .../metastore/api/GetAllFunctionsResponse.java | 4 +- .../api/GetFileMetadataByExprRequest.java | 6 +- .../api/GetFileMetadataByExprResult.java| 6 +- .../metastore/api/GetFileMetadataRequest.java | 4 +- .../metastore/api/GetFileMetadataResult.java| 6 +- .../metastore/api/GetOpenTxnsInfoResponse.java | 6 +- .../hive/metastore/api/GetOpenTxnsResponse.java | 6 +- .../api/GetPrincipalsInRoleRequest.java | 4 +- .../api/GetPrincipalsInRoleResponse.java| 4 +- .../api/GetRoleGrantsForPrincipalRequest.java | 4 +- .../api/GetRoleGrantsForPrincipalResponse.java | 4 +- .../api/GrantRevokePrivilegeRequest.java| 6 +- .../api/GrantRevokePrivilegeResponse.java | 6 +- .../metastore/api/GrantRevokeRoleRequest.java | 6 +- .../metastore/api/GrantRevokeRoleResponse.java | 6 +- .../hive/metastore/api/GrantRevokeType.java | 2 +- .../hive/metastore/api/HeartbeatRequest.java| 8 +- .../metastore/api/HeartbeatTxnRangeRequest.java | 8 +- .../api/HeartbeatTxnRangeResponse.java | 4 +- .../hive/metastore/api/HiveObjectPrivilege.java | 4 +- .../hive/metastore/api/HiveObjectRef.java | 4 +- .../hive/metastore/api/HiveObjectType.java | 2 +- .../apache/hadoop/hive/metastore/api/Index.java |10 +- .../api/IndexAlreadyExistsException.java| 4 +- .../metastore/api/InsertEventRequestData.java | 4 +- .../metastore/api/InvalidInputException.java| 4 +- .../metastore/api/InvalidObjectException.java | 4 +- .../api/InvalidOperationExceptio
[25/55] [abbrv] hive git commit: HIVE-11755 :Incorrect method called with Kerberos enabled in AccumuloStorageHandler (Josh Elser via Brock Noland)
HIVE-11755 :Incorrect method called with Kerberos enabled in AccumuloStorageHandler (Josh Elser via Brock Noland) Signed-off-by: Ashutosh Chauhan Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ee2d3189 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ee2d3189 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ee2d3189 Branch: refs/heads/spark Commit: ee2d3189ff9a7e8bd604b2e036d53632a9b4e616 Parents: 3e0d87f Author: Josh Elser Authored: Tue Sep 8 14:46:00 2015 -0800 Committer: Ashutosh Chauhan Committed: Sat Oct 24 14:30:14 2015 -0700 -- .../hive/accumulo/HiveAccumuloHelper.java | 55 ++--- .../mr/HiveAccumuloTableOutputFormat.java | 50 .../hive/accumulo/TestHiveAccumuloHelper.java | 69 +++- .../mr/TestHiveAccumuloTableOutputFormat.java | 86 +++- 4 files changed, 229 insertions(+), 31 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/ee2d3189/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java -- diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java index dfc5d03..71b8b77 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.accumulo; import static com.google.common.base.Preconditions.checkNotNull; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Collection; @@ -238,22 +239,56 @@ public class HiveAccumuloHelper { public void setZooKeeperInstance(JobConf jobConf, Class inputOrOutputFormatClass, String zookeepers, String instanceName, boolean useSasl) throws IOException { try { - Class clientConfigClass = JavaUtils.loadClass(CLIENT_CONFIGURATION_CLASS_NAME); - - // get the ClientConfiguration - Object clientConfig = getClientConfiguration(zookeepers, instanceName, useSasl); - - // AccumuloOutputFormat.setZooKeeperInstance(JobConf, ClientConfiguration) or - // AccumuloInputFormat.setZooKeeperInstance(JobConf, ClientConfiguration) - Method setZooKeeperMethod = inputOrOutputFormatClass.getMethod( - SET_ZOOKEEPER_INSTANCE_METHOD_NAME, JobConf.class, clientConfigClass); - setZooKeeperMethod.invoke(null, jobConf, clientConfig); + setZooKeeperInstanceWithReflection(jobConf, inputOrOutputFormatClass, zookeepers, + instanceName, useSasl); +} catch (InvocationTargetException e) { + Throwable cause = e.getCause(); + if (null != cause && cause instanceof IllegalStateException) { +throw (IllegalStateException) cause; + } + throw new IOException("Failed to invoke setZooKeeperInstance method", e); +} catch (IllegalStateException e) { + // re-throw the ISE so the caller can work around the silly impl that throws this in the + // first place. + throw e; } catch (Exception e) { throw new IOException("Failed to invoke setZooKeeperInstance method", e); } } /** + * Wrap the setZooKeeperInstance reflected-call into its own method for testing + * + * @param jobConf + * The JobConf + * @param inputOrOutputFormatClass + * The InputFormat or OutputFormat class + * @param zookeepers + * ZooKeeper hosts + * @param instanceName + * Accumulo instance name + * @param useSasl + * Is SASL enabled + * @throws IOException + * When invocation of the method fails + */ + void setZooKeeperInstanceWithReflection(JobConf jobConf, Class inputOrOutputFormatClass, String + zookeepers, String instanceName, boolean useSasl) throws IOException, ClassNotFoundException, + NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, + InvocationTargetException { +Class clientConfigClass = JavaUtils.loadClass(CLIENT_CONFIGURATION_CLASS_NAME); + +// get the ClientConfiguration +Object clientConfig = getClientConfiguration(zookeepers, instanceName, useSasl); + +// AccumuloOutputFormat.setZooKeeperInstance(JobConf, ClientConfiguration) or +// AccumuloInputFormat.setZooKeeperInstance(JobConf, ClientConfiguration) +Method setZooKeeperMethod = inputOrOutputFormatClass.getMethod( +SET_ZOOKEEPER_INSTANCE_METHOD_NAME, JobConf.class, clientConfigClass); +setZooKeeperMethod.invoke(null, jobConf, clientConfig); + } + + /** * Wrapper around Con
[16/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h -- diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h index a4eb625..53ab272 100644 --- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h +++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -390,9 +390,6 @@ typedef struct _Version__isset { class Version { public: - static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972"; - static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72}; - Version(const Version&); Version& operator=(const Version&); Version() : version(), comments() { @@ -425,11 +422,17 @@ class Version { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const Version& obj); + virtual void printTo(std::ostream& out) const; }; void swap(Version &a, Version &b); +inline std::ostream& operator<<(std::ostream& out, const Version& obj) +{ + obj.printTo(out); + return out; +} + typedef struct _FieldSchema__isset { _FieldSchema__isset() : name(false), type(false), comment(false) {} bool name :1; @@ -440,9 +443,6 @@ typedef struct _FieldSchema__isset { class FieldSchema { public: - static const char* ascii_fingerprint; // = "AB879940BD15B6B25691265F7384B271"; - static const uint8_t binary_fingerprint[16]; // = {0xAB,0x87,0x99,0x40,0xBD,0x15,0xB6,0xB2,0x56,0x91,0x26,0x5F,0x73,0x84,0xB2,0x71}; - FieldSchema(const FieldSchema&); FieldSchema& operator=(const FieldSchema&); FieldSchema() : name(), type(), comment() { @@ -480,11 +480,17 @@ class FieldSchema { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const FieldSchema& obj); + virtual void printTo(std::ostream& out) const; }; void swap(FieldSchema &a, FieldSchema &b); +inline std::ostream& operator<<(std::ostream& out, const FieldSchema& obj) +{ + obj.printTo(out); + return out; +} + typedef struct _Type__isset { _Type__isset() : name(false), type1(false), type2(false), fields(false) {} bool name :1; @@ -496,9 +502,6 @@ typedef struct _Type__isset { class Type { public: - static const char* ascii_fingerprint; // = "20DF02DE523C27F7066C7BD4D9120842"; - static const uint8_t binary_fingerprint[16]; // = {0x20,0xDF,0x02,0xDE,0x52,0x3C,0x27,0xF7,0x06,0x6C,0x7B,0xD4,0xD9,0x12,0x08,0x42}; - Type(const Type&); Type& operator=(const Type&); Type() : name(), type1(), type2() { @@ -547,11 +550,17 @@ class Type { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const Type& obj); + virtual void printTo(std::ostream& out) const; }; void swap(Type &a, Type &b); +inline std::ostream& operator<<(std::ostream& out, const Type& obj) +{ + obj.printTo(out); + return out; +} + typedef struct _HiveObjectRef__isset { _HiveObjectRef__isset() : objectType(false), dbName(false), objectName(false), partValues(false), columnName(false) {} bool objectType :1; @@ -564,9 +573,6 @@ typedef struct _HiveObjectRef__isset { class HiveObjectRef { public: - static const char* ascii_fingerprint; // = "205CD8311CF3AA9EC161BAEF8D7C933C"; - static const uint8_t binary_fingerprint[16]; // = {0x20,0x5C,0xD8,0x31,0x1C,0xF3,0xAA,0x9E,0xC1,0x61,0xBA,0xEF,0x8D,0x7C,0x93,0x3C}; - HiveObjectRef(const HiveObjectRef&); HiveObjectRef& operator=(const HiveObjectRef&); HiveObjectRef() : objectType((HiveObjectType::type)0), dbName(), objectName(), columnName() { @@ -614,11 +620,17 @@ class HiveObjectRef { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const HiveObjectRef& obj); + virtual void printTo(std::ostream& out) const; }; void swap(HiveObjectRef &a, HiveObjectRef &b); +inline std::ostream& operator<<(std::ostream& out, const HiveObjectRef& obj) +{ + obj.printTo(out); + return out; +} + typedef struct _PrivilegeGrantInfo__isset { _PrivilegeGrantInfo__isset() : privilege(false), createTime(false), grantor(false), grantorType(false), grantOption(false) {} bool privilege :1; @@ -631,9 +643,6 @@ typedef struct _PrivilegeGrantInfo__isset { class Pr
[15/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnRequest.java -- diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnRequest.java index dcfbbe0..8e3a0ae 100644 --- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnRequest.java +++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnRequest.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class AbortTxnRequest implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AbortTxnRequest"); @@ -185,7 +185,7 @@ public class AbortTxnRequest implements org.apache.thrift.TBasehttp://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java -- diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java index ffef8a6..bb6e584 100644 --- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java +++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class AddDynamicPartitions implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AddDynamicPartitions"); @@ -334,7 +334,7 @@ public class AddDynamicPartitions implements org.apache.thrift.TBasehttp://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java -- diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java index 05f83d3..083d340 100644 --- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java +++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class AddPartitionsRequest implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AddPartitionsRequest"); @@ -390,10 +390,10 @@ public class AddPartitionsRequest implements org.apache.thrift.TBasehttp://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java -- diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPart
[11/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py -- diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py b/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py index f86fda9..d1c07a5 100644 --- a/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py +++ b/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py @@ -1,5 +1,5 @@ # -# Autogenerated by Thrift Compiler (0.9.2) +# Autogenerated by Thrift Compiler (0.9.3) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py -- diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py index 56ce527..8940dff 100644 --- a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py +++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py @@ -1,5 +1,5 @@ # -# Autogenerated by Thrift Compiler (0.9.2) +# Autogenerated by Thrift Compiler (0.9.3) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # @@ -253,12 +253,12 @@ class Version: break if fid == 1: if ftype == TType.STRING: - self.version = iprot.readString(); + self.version = iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: - self.comments = iprot.readString(); + self.comments = iprot.readString() else: iprot.skip(ftype) else: @@ -334,17 +334,17 @@ class FieldSchema: break if fid == 1: if ftype == TType.STRING: - self.name = iprot.readString(); + self.name = iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: - self.type = iprot.readString(); + self.type = iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: - self.comment = iprot.readString(); + self.comment = iprot.readString() else: iprot.skip(ftype) else: @@ -428,17 +428,17 @@ class Type: break if fid == 1: if ftype == TType.STRING: - self.name = iprot.readString(); + self.name = iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: - self.type1 = iprot.readString(); + self.type1 = iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: - self.type2 = iprot.readString(); + self.type2 = iprot.readString() else: iprot.skip(ftype) elif fid == 4: @@ -544,17 +544,17 @@ class HiveObjectRef: break if fid == 1: if ftype == TType.I32: - self.objectType = iprot.readI32(); + self.objectType = iprot.readI32() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: - self.dbName = iprot.readString(); + self.dbName = iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: - self.objectName = iprot.readString(); + self.objectName = iprot.readString() else: iprot.skip(ftype) elif fid == 4: @@ -562,14 +562,14 @@ class HiveObjectRef: self.partValues = [] (_etype10, _size7) = iprot.readListBegin() for _i11 in xrange(_size7): -_elem12 = iprot.readString(); +_elem12 = iprot.readString() self.partValues.append(_elem12) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 5: if ftype == TType.STRING: - self.columnName = iprot.readString(); + self.columnName = iprot.readString() else: iprot.skip(ftype) else: @@ -669,27 +669,27 @@ class PrivilegeGrantInfo: break if fid == 1: if ftype == TType.STRING: - self.privilege = iprot.readString(); + self.privilege = iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I32: - self.createTime = iprot.readI32(); + self.createTime = iprot.readI32() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: - self.grantor = iprot.readString(); + self.grantor = iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: - self.grantorType = iprot
[35/55] [abbrv] hive git commit: HIVE-12261 - adding more comments
HIVE-12261 - adding more comments Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2a0ea58b Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2a0ea58b Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2a0ea58b Branch: refs/heads/spark Commit: 2a0ea58b18b864b556b37870b0189d13537df5ce Parents: 1f9556d Author: Thejas Nair Authored: Sun Oct 25 23:51:16 2015 -0700 Committer: Thejas Nair Committed: Sun Oct 25 23:51:16 2015 -0700 -- .../org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java | 5 - 1 file changed, 4 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/2a0ea58b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java -- diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java index 98798e8..7c7f7ce 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java @@ -167,7 +167,10 @@ public class MetaStoreSchemaInfo { /** * A dbVersion is compatible with hive version if it is greater or equal to * the hive version. This is result of the db schema upgrade design principles - * followed in hive project. + * followed in hive project. The state where db schema version is ahead of + * hive software version is often seen when a 'rolling upgrade' or + * 'rolling downgrade' is happening. This is a state where hive is functional + * and returning non zero status for it is misleading. * * @param hiveVersion * version of hive software
[07/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/TCLIService.h -- diff --git a/service/src/gen/thrift/gen-cpp/TCLIService.h b/service/src/gen/thrift/gen-cpp/TCLIService.h index 29a9f4a..2ea80c7 100644 --- a/service/src/gen/thrift/gen-cpp/TCLIService.h +++ b/service/src/gen/thrift/gen-cpp/TCLIService.h @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -8,10 +8,16 @@ #define TCLIService_H #include +#include #include "TCLIService_types.h" namespace apache { namespace hive { namespace service { namespace cli { namespace thrift { +#ifdef _WIN32 + #pragma warning( push ) + #pragma warning (disable : 4250 ) //inheriting methods via dominance +#endif + class TCLIServiceIf { public: virtual ~TCLIServiceIf() {} @@ -130,9 +136,6 @@ typedef struct _TCLIService_OpenSession_args__isset { class TCLIService_OpenSession_args { public: - static const char* ascii_fingerprint; // = "657FF0677838A57698AD9D58A923940A"; - static const uint8_t binary_fingerprint[16]; // = {0x65,0x7F,0xF0,0x67,0x78,0x38,0xA5,0x76,0x98,0xAD,0x9D,0x58,0xA9,0x23,0x94,0x0A}; - TCLIService_OpenSession_args(const TCLIService_OpenSession_args&); TCLIService_OpenSession_args& operator=(const TCLIService_OpenSession_args&); TCLIService_OpenSession_args() { @@ -160,23 +163,18 @@ class TCLIService_OpenSession_args { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const TCLIService_OpenSession_args& obj); }; class TCLIService_OpenSession_pargs { public: - static const char* ascii_fingerprint; // = "657FF0677838A57698AD9D58A923940A"; - static const uint8_t binary_fingerprint[16]; // = {0x65,0x7F,0xF0,0x67,0x78,0x38,0xA5,0x76,0x98,0xAD,0x9D,0x58,0xA9,0x23,0x94,0x0A}; - virtual ~TCLIService_OpenSession_pargs() throw(); const TOpenSessionReq* req; uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const TCLIService_OpenSession_pargs& obj); }; typedef struct _TCLIService_OpenSession_result__isset { @@ -187,9 +185,6 @@ typedef struct _TCLIService_OpenSession_result__isset { class TCLIService_OpenSession_result { public: - static const char* ascii_fingerprint; // = "C55268D57D6DC6A256619A7DB419699E"; - static const uint8_t binary_fingerprint[16]; // = {0xC5,0x52,0x68,0xD5,0x7D,0x6D,0xC6,0xA2,0x56,0x61,0x9A,0x7D,0xB4,0x19,0x69,0x9E}; - TCLIService_OpenSession_result(const TCLIService_OpenSession_result&); TCLIService_OpenSession_result& operator=(const TCLIService_OpenSession_result&); TCLIService_OpenSession_result() { @@ -217,7 +212,6 @@ class TCLIService_OpenSession_result { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const TCLIService_OpenSession_result& obj); }; typedef struct _TCLIService_OpenSession_presult__isset { @@ -228,9 +222,6 @@ typedef struct _TCLIService_OpenSession_presult__isset { class TCLIService_OpenSession_presult { public: - static const char* ascii_fingerprint; // = "C55268D57D6DC6A256619A7DB419699E"; - static const uint8_t binary_fingerprint[16]; // = {0xC5,0x52,0x68,0xD5,0x7D,0x6D,0xC6,0xA2,0x56,0x61,0x9A,0x7D,0xB4,0x19,0x69,0x9E}; - virtual ~TCLIService_OpenSession_presult() throw(); TOpenSessionResp* success; @@ -239,7 +230,6 @@ class TCLIService_OpenSession_presult { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); - friend std::ostream& operator<<(std::ostream& out, const TCLIService_OpenSession_presult& obj); }; typedef struct _TCLIService_CloseSession_args__isset { @@ -250,9 +240,6 @@ typedef struct _TCLIService_CloseSession_args__isset { class TCLIService_CloseSession_args { public: - static const char* ascii_fingerprint; // = "FD7076C37D193E2A343D9691B59D94EC"; - static const uint8_t binary_fingerprint[16]; // = {0xFD,0x70,0x76,0xC3,0x7D,0x19,0x3E,0x2A,0x34,0x3D,0x96,0x91,0xB5,0x9D,0x94,0xEC}; - TCLIService_CloseSession_args(const TCLIService_CloseSession_args&); TCLIService_CloseSession_args& operator=(const TCLIService_CloseSession_args&); TCLIService_CloseSession_args() { @@ -280,23 +267,18 @@ class TCLIService_CloseSession_args { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const TCLIService_CloseSession_args& obj); }; class TCLIService_CloseSession_pargs { public: - static const char* ascii_fingerprint; //
[23/55] [abbrv] hive git commit: HIVE-12253 : revert HIVE-12061 (Sergey Shelukhin, reviewed by Prasanth Jayachandran)
http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp -- diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp index a82c363..44aa22e 100644 --- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp +++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp @@ -1240,14 +1240,14 @@ uint32_t ThriftHiveMetastore_get_databases_result::read(::apache::thrift::protoc if (ftype == ::apache::thrift::protocol::T_LIST) { { this->success.clear(); -uint32_t _size714; -::apache::thrift::protocol::TType _etype717; -xfer += iprot->readListBegin(_etype717, _size714); -this->success.resize(_size714); -uint32_t _i718; -for (_i718 = 0; _i718 < _size714; ++_i718) +uint32_t _size713; +::apache::thrift::protocol::TType _etype716; +xfer += iprot->readListBegin(_etype716, _size713); +this->success.resize(_size713); +uint32_t _i717; +for (_i717 = 0; _i717 < _size713; ++_i717) { - xfer += iprot->readString(this->success[_i718]); + xfer += iprot->readString(this->success[_i717]); } xfer += iprot->readListEnd(); } @@ -1286,10 +1286,10 @@ uint32_t ThriftHiveMetastore_get_databases_result::write(::apache::thrift::proto xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast(this->success.size())); - std::vector ::const_iterator _iter719; - for (_iter719 = this->success.begin(); _iter719 != this->success.end(); ++_iter719) + std::vector ::const_iterator _iter718; + for (_iter718 = this->success.begin(); _iter718 != this->success.end(); ++_iter718) { -xfer += oprot->writeString((*_iter719)); +xfer += oprot->writeString((*_iter718)); } xfer += oprot->writeListEnd(); } @@ -1334,14 +1334,14 @@ uint32_t ThriftHiveMetastore_get_databases_presult::read(::apache::thrift::proto if (ftype == ::apache::thrift::protocol::T_LIST) { { (*(this->success)).clear(); -uint32_t _size720; -::apache::thrift::protocol::TType _etype723; -xfer += iprot->readListBegin(_etype723, _size720); -(*(this->success)).resize(_size720); -uint32_t _i724; -for (_i724 = 0; _i724 < _size720; ++_i724) +uint32_t _size719; +::apache::thrift::protocol::TType _etype722; +xfer += iprot->readListBegin(_etype722, _size719); +(*(this->success)).resize(_size719); +uint32_t _i723; +for (_i723 = 0; _i723 < _size719; ++_i723) { - xfer += iprot->readString((*(this->success))[_i724]); + xfer += iprot->readString((*(this->success))[_i723]); } xfer += iprot->readListEnd(); } @@ -1458,14 +1458,14 @@ uint32_t ThriftHiveMetastore_get_all_databases_result::read(::apache::thrift::pr if (ftype == ::apache::thrift::protocol::T_LIST) { { this->success.clear(); -uint32_t _size725; -::apache::thrift::protocol::TType _etype728; -xfer += iprot->readListBegin(_etype728, _size725); -this->success.resize(_size725); -uint32_t _i729; -for (_i729 = 0; _i729 < _size725; ++_i729) +uint32_t _size724; +::apache::thrift::protocol::TType _etype727; +xfer += iprot->readListBegin(_etype727, _size724); +this->success.resize(_size724); +uint32_t _i728; +for (_i728 = 0; _i728 < _size724; ++_i728) { - xfer += iprot->readString(this->success[_i729]); + xfer += iprot->readString(this->success[_i728]); } xfer += iprot->readListEnd(); } @@ -1504,10 +1504,10 @@ uint32_t ThriftHiveMetastore_get_all_databases_result::write(::apache::thrift::p xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast(this->success.size())); - std::vector ::const_iterator _iter730; - for (_iter730 = this->success.begin(); _iter730 != this->success.end(); ++_iter730) + std::vector ::const_iterator _iter729; + for (_iter729 = this->success.begin(); _iter729 != this->success.end(); ++_iter729) { -xfer += oprot->writeString((*_iter730)); +xfer += oprot->writeString((*_iter729)); } xfer += oprot->writeListEnd(); } @@ -155
[12/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py -- diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py b/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py index 8354d38..59c7b94 100644 --- a/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py +++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py @@ -1,5 +1,5 @@ # -# Autogenerated by Thrift Compiler (0.9.2) +# Autogenerated by Thrift Compiler (0.9.3) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # @@ -8,6 +8,7 @@ from thrift.Thrift import TType, TMessageType, TException, TApplicationException import fb303.FacebookService +import logging from ttypes import * from thrift.Thrift import TProcessor from thrift.transport import TTransport @@ -1091,7 +1092,7 @@ class Client(fb303.FacebookService.Client, Iface): return result.success if result.o1 is not None: raise result.o1 -raise TApplicationException(TApplicationException.MISSING_RESULT, "getMetaConf failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "getMetaConf failed: unknown result") def setMetaConf(self, key, value): """ @@ -1194,7 +1195,7 @@ class Client(fb303.FacebookService.Client, Iface): raise result.o1 if result.o2 is not None: raise result.o2 -raise TApplicationException(TApplicationException.MISSING_RESULT, "get_database failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "get_database failed: unknown result") def drop_database(self, name, deleteData, cascade): """ @@ -1266,7 +1267,7 @@ class Client(fb303.FacebookService.Client, Iface): return result.success if result.o1 is not None: raise result.o1 -raise TApplicationException(TApplicationException.MISSING_RESULT, "get_databases failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "get_databases failed: unknown result") def get_all_databases(self): self.send_get_all_databases() @@ -1294,7 +1295,7 @@ class Client(fb303.FacebookService.Client, Iface): return result.success if result.o1 is not None: raise result.o1 -raise TApplicationException(TApplicationException.MISSING_RESULT, "get_all_databases failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "get_all_databases failed: unknown result") def alter_database(self, dbname, db): """ @@ -1364,7 +1365,7 @@ class Client(fb303.FacebookService.Client, Iface): raise result.o1 if result.o2 is not None: raise result.o2 -raise TApplicationException(TApplicationException.MISSING_RESULT, "get_type failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "get_type failed: unknown result") def create_type(self, type): """ @@ -1401,7 +1402,7 @@ class Client(fb303.FacebookService.Client, Iface): raise result.o2 if result.o3 is not None: raise result.o3 -raise TApplicationException(TApplicationException.MISSING_RESULT, "create_type failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "create_type failed: unknown result") def drop_type(self, type): """ @@ -1436,7 +1437,7 @@ class Client(fb303.FacebookService.Client, Iface): raise result.o1 if result.o2 is not None: raise result.o2 -raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_type failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_type failed: unknown result") def get_type_all(self, name): """ @@ -1469,7 +1470,7 @@ class Client(fb303.FacebookService.Client, Iface): return result.success if result.o2 is not None: raise result.o2 -raise TApplicationException(TApplicationException.MISSING_RESULT, "get_type_all failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "get_type_all failed: unknown result") def get_fields(self, db_name, table_name): """ @@ -1508,7 +1509,7 @@ class Client(fb303.FacebookService.Client, Iface): raise result.o2 if result.o3 is not None: raise result.o3 -raise TApplicationException(TApplicationException.MISSING_RESULT, "get_fields failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "get_fields failed: unknown result") def get_fields_with_environment_context(self, db_name, table_name, environment_context): """ @@ -1549,7 +1550,7 @@ class Client(fb303.FacebookService.Client, Iface): raise result.o2 if result.o3 is not None: ra
[03/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/JobTrackerState.java -- diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/JobTrackerState.java b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/JobTrackerState.java index ab48bf7..46c71ee 100644 --- a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/JobTrackerState.java +++ b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/JobTrackerState.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java -- diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java index 6235efd..934a8a5 100644 --- a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java +++ b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class ThriftHive { public interface Iface extends org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface { @@ -2951,7 +2951,7 @@ public class ThriftHive { public Object getFieldValue(_Fields field) { switch (field) { case NUM_ROWS: -return Integer.valueOf(getNumRows()); +return getNumRows(); } throw new IllegalStateException(); http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java -- diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java index c6ee2c5..0fb33c7 100644 --- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java +++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class TArrayTypeEntry implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TArrayTypeEntry"); @@ -185,7 +185,7 @@ public class TArrayTypeEntry implements org.apache.thrift.TBasehttp://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java -- diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java index c58a260..788bc89 100644 --- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java +++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class TBinaryColumn implements org.apache.thrift
[13/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java -- diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java index 4046e95..b9b7f3c 100644 --- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java +++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class ShowLocksResponse implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ShowLocksResponse"); http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponseElement.java -- diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponseElement.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponseElement.java index e0597a6..037a383 100644 --- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponseElement.java +++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponseElement.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class ShowLocksResponseElement implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ShowLocksResponseElement"); @@ -640,7 +640,7 @@ public class ShowLocksResponseElement implements org.apache.thrift.TBasehttp://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java -- diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java index 4b4ee50..c32f50c 100644 --- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java +++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class SkewedInfo implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SkewedInfo"); http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java -- diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java index eb95e42..938f06b 100644 --- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java +++ b/metastore/src/gen/thrift/gen-javabean/org/apa
[05/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/TCLIService_types.h -- diff --git a/service/src/gen/thrift/gen-cpp/TCLIService_types.h b/service/src/gen/thrift/gen-cpp/TCLIService_types.h index 4536b41..b078c99 100644 --- a/service/src/gen/thrift/gen-cpp/TCLIService_types.h +++ b/service/src/gen/thrift/gen-cpp/TCLIService_types.h @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.2) + * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -338,9 +338,6 @@ typedef struct _TTypeQualifierValue__isset { class TTypeQualifierValue { public: - static const char* ascii_fingerprint; // = "A7801670116150C65ACA43E6F679BA79"; - static const uint8_t binary_fingerprint[16]; // = {0xA7,0x80,0x16,0x70,0x11,0x61,0x50,0xC6,0x5A,0xCA,0x43,0xE6,0xF6,0x79,0xBA,0x79}; - TTypeQualifierValue(const TTypeQualifierValue&); TTypeQualifierValue& operator=(const TTypeQualifierValue&); TTypeQualifierValue() : i32Value(0), stringValue() { @@ -377,18 +374,21 @@ class TTypeQualifierValue { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const TTypeQualifierValue& obj); + virtual void printTo(std::ostream& out) const; }; void swap(TTypeQualifierValue &a, TTypeQualifierValue &b); +inline std::ostream& operator<<(std::ostream& out, const TTypeQualifierValue& obj) +{ + obj.printTo(out); + return out; +} + class TTypeQualifiers { public: - static const char* ascii_fingerprint; // = "6C72981CFA989214285648FA8C196C47"; - static const uint8_t binary_fingerprint[16]; // = {0x6C,0x72,0x98,0x1C,0xFA,0x98,0x92,0x14,0x28,0x56,0x48,0xFA,0x8C,0x19,0x6C,0x47}; - TTypeQualifiers(const TTypeQualifiers&); TTypeQualifiers& operator=(const TTypeQualifiers&); TTypeQualifiers() { @@ -414,11 +414,17 @@ class TTypeQualifiers { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const TTypeQualifiers& obj); + virtual void printTo(std::ostream& out) const; }; void swap(TTypeQualifiers &a, TTypeQualifiers &b); +inline std::ostream& operator<<(std::ostream& out, const TTypeQualifiers& obj) +{ + obj.printTo(out); + return out; +} + typedef struct _TPrimitiveTypeEntry__isset { _TPrimitiveTypeEntry__isset() : typeQualifiers(false) {} bool typeQualifiers :1; @@ -427,9 +433,6 @@ typedef struct _TPrimitiveTypeEntry__isset { class TPrimitiveTypeEntry { public: - static const char* ascii_fingerprint; // = "755674F6A5C8EB47868686AE386FBC1C"; - static const uint8_t binary_fingerprint[16]; // = {0x75,0x56,0x74,0xF6,0xA5,0xC8,0xEB,0x47,0x86,0x86,0x86,0xAE,0x38,0x6F,0xBC,0x1C}; - TPrimitiveTypeEntry(const TPrimitiveTypeEntry&); TPrimitiveTypeEntry& operator=(const TPrimitiveTypeEntry&); TPrimitiveTypeEntry() : type((TTypeId::type)0) { @@ -464,18 +467,21 @@ class TPrimitiveTypeEntry { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const TPrimitiveTypeEntry& obj); + virtual void printTo(std::ostream& out) const; }; void swap(TPrimitiveTypeEntry &a, TPrimitiveTypeEntry &b); +inline std::ostream& operator<<(std::ostream& out, const TPrimitiveTypeEntry& obj) +{ + obj.printTo(out); + return out; +} + class TArrayTypeEntry { public: - static const char* ascii_fingerprint; // = "E86CACEB22240450EDCBEFC3A83970E4"; - static const uint8_t binary_fingerprint[16]; // = {0xE8,0x6C,0xAC,0xEB,0x22,0x24,0x04,0x50,0xED,0xCB,0xEF,0xC3,0xA8,0x39,0x70,0xE4}; - TArrayTypeEntry(const TArrayTypeEntry&); TArrayTypeEntry& operator=(const TArrayTypeEntry&); TArrayTypeEntry() : objectTypePtr(0) { @@ -501,18 +507,21 @@ class TArrayTypeEntry { uint32_t read(::apache::thrift::protocol::TProtocol* iprot); uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; - friend std::ostream& operator<<(std::ostream& out, const TArrayTypeEntry& obj); + virtual void printTo(std::ostream& out) const; }; void swap(TArrayTypeEntry &a, TArrayTypeEntry &b); +inline std::ostream& operator<<(std::ostream& out, const TArrayTypeEntry& obj) +{ + obj.printTo(out); + return out; +} + class TMapTypeEntry { public: - static const char* ascii_fingerprint; // = "989D1F1AE8D148D5E2119FFEC4BBBEE3"; - static const uint8_t binary_fingerprint[16]; // = {0x98,0x9D,0x1F,0x1A,0xE8,0xD1,0x48,0xD5,0xE2,0x11,0x9F,0xFE,0xC4,0xBB,0xBE,0xE3}; - TMapTypeEntry(const TMapTypeEntry&); TMapTypeEntry& operator=(const TMapTypeEntry&); TMapTypeEntry() : keyTypePtr(0), valueTypePtr(
[01/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)
Repository: hive Updated Branches: refs/heads/spark 51f257af0 -> c9073aadc http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-py/hive_service/ThriftHive.py -- diff --git a/service/src/gen/thrift/gen-py/hive_service/ThriftHive.py b/service/src/gen/thrift/gen-py/hive_service/ThriftHive.py index 177531d..978c2a3 100644 --- a/service/src/gen/thrift/gen-py/hive_service/ThriftHive.py +++ b/service/src/gen/thrift/gen-py/hive_service/ThriftHive.py @@ -1,5 +1,5 @@ # -# Autogenerated by Thrift Compiler (0.9.2) +# Autogenerated by Thrift Compiler (0.9.3) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # @@ -8,6 +8,7 @@ from thrift.Thrift import TType, TMessageType, TException, TApplicationException import hive_metastore.ThriftHiveMetastore +import logging from ttypes import * from thrift.Thrift import TProcessor from thrift.transport import TTransport @@ -116,7 +117,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface): return result.success if result.ex is not None: raise result.ex -raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchOne failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchOne failed: unknown result") def fetchN(self, numRows): """ @@ -149,7 +150,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface): return result.success if result.ex is not None: raise result.ex -raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchN failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchN failed: unknown result") def fetchAll(self): self.send_fetchAll() @@ -177,7 +178,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface): return result.success if result.ex is not None: raise result.ex -raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchAll failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchAll failed: unknown result") def getSchema(self): self.send_getSchema() @@ -205,7 +206,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface): return result.success if result.ex is not None: raise result.ex -raise TApplicationException(TApplicationException.MISSING_RESULT, "getSchema failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "getSchema failed: unknown result") def getThriftSchema(self): self.send_getThriftSchema() @@ -233,7 +234,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface): return result.success if result.ex is not None: raise result.ex -raise TApplicationException(TApplicationException.MISSING_RESULT, "getThriftSchema failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "getThriftSchema failed: unknown result") def getClusterStatus(self): self.send_getClusterStatus() @@ -261,7 +262,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface): return result.success if result.ex is not None: raise result.ex -raise TApplicationException(TApplicationException.MISSING_RESULT, "getClusterStatus failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "getClusterStatus failed: unknown result") def getQueryPlan(self): self.send_getQueryPlan() @@ -289,7 +290,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface): return result.success if result.ex is not None: raise result.ex -raise TApplicationException(TApplicationException.MISSING_RESULT, "getQueryPlan failed: unknown result"); +raise TApplicationException(TApplicationException.MISSING_RESULT, "getQueryPlan failed: unknown result") def clean(self): self.send_clean() @@ -351,9 +352,17 @@ class Processor(hive_metastore.ThriftHiveMetastore.Processor, Iface, TProcessor) result = execute_result() try: self._handler.execute(args.query) -except HiveServerException, ex: + msg_type = TMessageType.REPLY +except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): + raise +except HiveServerException as ex: + msg_type = TMessageType.REPLY result.ex = ex -oprot.writeMessageBegin("execute", TMessageType.REPLY, seqid) +except Exception as ex: + msg_type = TMessageType.EXCEPTION + logging.exception(ex) + result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') +oprot.writeMessageBegin("execute", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() @@ -365,9 +374,17 @@ class Processor(hive_metastore.ThriftHiveM
hive git commit: HIVE-12283: Fix test failures after HIVE-11844 [Spark Branch] (Rui via Xuefu)
Repository: hive Updated Branches: refs/heads/spark 84363196b -> 51f257af0 HIVE-12283: Fix test failures after HIVE-11844 [Spark Branch] (Rui via Xuefu) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/51f257af Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/51f257af Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/51f257af Branch: refs/heads/spark Commit: 51f257af0d881bfb7978603e55066db6b4e7af05 Parents: 8436319 Author: Xuefu Zhang Authored: Wed Oct 28 05:08:07 2015 -0700 Committer: Xuefu Zhang Committed: Wed Oct 28 05:08:07 2015 -0700 -- .../spark/vector_inner_join.q.out | 12 ++-- .../spark/vector_outer_join2.q.out | 20 2 files changed, 14 insertions(+), 18 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/51f257af/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out -- diff --git a/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out b/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out index d1b775f..bf7090b 100644 --- a/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out @@ -140,17 +140,17 @@ STAGE PLANS: alias: t1 Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: NONE Filter Operator -predicate: a is not null (type: boolean) -Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE +predicate: (a > 2) (type: boolean) +Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: a (type: int) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 -Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE +Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator keys: 0 _col0 (type: int) @@ -184,10 +184,10 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 2 -Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE +Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat http://git-wip-us.apache.org/repos/asf/hive/blob/51f257af/ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out -- diff --git a/ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out b/ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out index 0015708..38051fd 100644 --- a/ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out @@ -262,19 +262,15 @@ STAGE PLANS: input vertices: 1 Map 4 Statistics: Num rows: 24 Data size: 6117 Basic stats: COMPLETE Column stats: NONE -Select Operator - expressions: _col1 (type: bigint) - outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 6117 Basic stats: COMPLETE Column stats: NONE - Group By Operator -aggregations: count(), sum(_col0) -mode: hash -outputColumnNames: _col0, _col1 +Group