[hive] branch master updated: HIVE-25443 : Arrow SerDe Cannot serialize/deserialize complex data types When there are more than 1024 values (#2581) (Syed Shameerur Rahman reviewed by Zoltan Haindrich)

2021-11-17 Thread kgyrtkirk
This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 8e693d1  HIVE-25443 : Arrow SerDe Cannot serialize/deserialize complex 
data types When there are more than 1024 values (#2581) (Syed Shameerur Rahman 
reviewed by Zoltan Haindrich)
8e693d1 is described below

commit 8e693d1b36e1ff0aacd802d16e1a3d0ec72ef04b
Author: Syed Shameerur Rahman 
AuthorDate: Thu Nov 18 12:59:50 2021 +0530

HIVE-25443 : Arrow SerDe Cannot serialize/deserialize complex data types 
When there are more than 1024 values (#2581) (Syed Shameerur Rahman reviewed by 
Zoltan Haindrich)
---
 .../hive/ql/io/arrow/ArrowColumnarBatchSerDe.java  |  4 +-
 .../hadoop/hive/ql/io/arrow/Deserializer.java  |  3 ++
 .../ql/io/arrow/TestArrowColumnarBatchSerDe.java   | 43 ++
 3 files changed, 48 insertions(+), 2 deletions(-)

diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/arrow/ArrowColumnarBatchSerDe.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/arrow/ArrowColumnarBatchSerDe.java
index fdef3b8..ceb794f 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/io/arrow/ArrowColumnarBatchSerDe.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/arrow/ArrowColumnarBatchSerDe.java
@@ -210,9 +210,9 @@ public class ArrowColumnarBatchSerDe extends AbstractSerDe {
   static ListColumnVector toStructListVector(MapColumnVector mapVector) {
 final StructColumnVector structVector;
 final ListColumnVector structListVector;
-structVector = new StructColumnVector();
+structVector = new StructColumnVector(mapVector.childCount);
 structVector.fields = new ColumnVector[] {mapVector.keys, 
mapVector.values};
-structListVector = new ListColumnVector();
+structListVector = new ListColumnVector(mapVector.childCount, null);
 structListVector.child = structVector;
 structListVector.childCount = mapVector.childCount;
 structListVector.isRepeating = mapVector.isRepeating;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/arrow/Deserializer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/arrow/Deserializer.java
index ac4d237..ce8488f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/arrow/Deserializer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/arrow/Deserializer.java
@@ -391,6 +391,7 @@ class Deserializer {
 
   private void readList(FieldVector arrowVector, ListColumnVector hiveVector, 
ListTypeInfo typeInfo) {
 final int size = arrowVector.getValueCount();
+hiveVector.ensureSize(size, false);
 final ArrowBuf offsets = arrowVector.getOffsetBuffer();
 final int OFFSET_WIDTH = 4;
 
@@ -412,6 +413,7 @@ class Deserializer {
 
   private void readMap(FieldVector arrowVector, MapColumnVector hiveVector, 
MapTypeInfo typeInfo) {
 final int size = arrowVector.getValueCount();
+hiveVector.ensureSize(size, false);
 final ListTypeInfo mapStructListTypeInfo = toStructListTypeInfo(typeInfo);
 final ListColumnVector mapStructListVector = 
toStructListVector(hiveVector);
 final StructColumnVector mapStructVector = (StructColumnVector) 
mapStructListVector.child;
@@ -430,6 +432,7 @@ class Deserializer {
 
   private void readStruct(FieldVector arrowVector, StructColumnVector 
hiveVector, StructTypeInfo typeInfo) {
 final int size = arrowVector.getValueCount();
+hiveVector.ensureSize(size, false);
 final List fieldTypeInfos = 
typeInfo.getAllStructFieldTypeInfos();
 final int fieldSize = arrowVector.getChildrenFromFields().size();
 for (int i = 0; i < fieldSize; i++) {
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
index d803063..a4b296b 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.io.arrow;
 
+import static 
org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_ARROW_BATCH_SIZE;
 import com.google.common.base.Joiner;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
@@ -157,6 +158,7 @@ public class TestArrowColumnarBatchSerDe {
   @Before
   public void setUp() {
 conf = new Configuration();
+conf.setInt(HIVE_ARROW_BATCH_SIZE.varname, 1025);
   }
 
   private static ByteWritable byteW(int value) {
@@ -1024,4 +1026,45 @@ public class TestArrowColumnarBatchSerDe {
 initAndSerializeAndDeserialize(schema, toList(DECIMAL_ROWS));
   }
 
+  @Test
+  public void testListBooleanWithMoreThan1024Values() throws SerDeException {
+String[][] schema = {
+{"boolean_list", "array"},
+};
+
+Object[][] rows = new Object[1025][1];
+for (int i = 0; i < 1025; i++) {
+  rows[i][0] = 

[hive] branch master updated: HIVE-25095: Beeline/hive -e command can't deal with query with trailing quote (#2526) (Robbie Zhang reviewed by Zoltan Haindrich)

2021-11-17 Thread kgyrtkirk
This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 651b0ef  HIVE-25095: Beeline/hive -e command can't deal with query 
with trailing quote (#2526) (Robbie Zhang reviewed by Zoltan Haindrich)
651b0ef is described below

commit 651b0ef7f1e4c6b0e4696ccd75e032ab965006bc
Author: Robbie Zhang 
AuthorDate: Thu Nov 18 18:22:07 2021 +1100

HIVE-25095: Beeline/hive -e command can't deal with query with trailing 
quote (#2526) (Robbie Zhang reviewed by Zoltan Haindrich)

Co-authored-by: Robbie Zhang 
---
 beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java | 4 +++-
 beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java   | 6 ++
 pom.xml | 2 +-
 3 files changed, 10 insertions(+), 2 deletions(-)

diff --git 
a/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java 
b/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java
index 26e118a..26a892f 100644
--- a/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java
+++ b/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java
@@ -193,11 +193,13 @@ public class TestBeelineArgParsing {
   public void testQueryScripts() throws Exception {
 TestBeeline bl = new TestBeeline();
 String args[] = new String[] {"-u", "url", "-n", "name",
-  "-p", "password", "-d", "driver", "-e", "select1", "-e", "select2"};
+  "-p", "password", "-d", "driver", "-e", "select1", "-e", "select2",
+  "-e", "select \"hive\""};
 Assert.assertEquals(0, bl.initArgs(args));
 Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
 Assert.assertTrue(bl.queries.contains("select1"));
 Assert.assertTrue(bl.queries.contains("select2"));
+Assert.assertTrue(bl.queries.contains("select \"hive\""));
   }
 
   /**
diff --git a/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java 
b/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java
index 1e68405..5ea4d11 100644
--- a/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java
+++ b/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java
@@ -168,6 +168,12 @@ public class TestHiveCli {
   }
 
   @Test
+  public void testSqlFromCmdWithEmbeddedQuotes() {
+verifyCMD(null, "hive", out,
+new String[] { "-e", "select \"hive\"" }, ERRNO_OK, true);
+  }
+
+  @Test
   public void testInvalidOptions() {
 verifyCMD(null,
 "The '-e' and '-f' options cannot be specified simultaneously", err,
diff --git a/pom.xml b/pom.xml
index c5542e4..3f28653 100644
--- a/pom.xml
+++ b/pom.xml
@@ -118,7 +118,7 @@
 5.2.4
 3.2.0-release
 5.2.4
-1.2
+1.4
 1.15
 3.2.2
 4.1


[hive] branch master updated: disable unstable tests

2021-11-17 Thread kgyrtkirk
This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 2cdfa8d  disable unstable tests
2cdfa8d is described below

commit 2cdfa8d3e6241c28803d804a8042f784c032d061
Author: Zoltan Haindrich 
AuthorDate: Wed Nov 17 17:37:45 2021 +

disable unstable tests
---
 .../apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java  | 2 ++
 .../org/apache/hadoop/hive/ql/txn/compactor/TestCompactionMetrics.java  | 1 +
 ql/src/test/queries/clientpositive/replication_metrics_ingest.q | 1 +
 3 files changed, 4 insertions(+)

diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java
index cca89b9..e0b389e 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java
@@ -254,6 +254,7 @@ public class TestScheduledReplicationScenarios extends 
BaseReplicationScenariosA
   }
 
   @Test
+  @Ignore("HIVE-25720")
   public void testCompleteFailoverWithReverseBootstrap() throws Throwable {
 String withClause = "'" + HiveConf.ConfVars.HIVE_IN_TEST + "' = 'true','"
 + HiveConf.ConfVars.REPL_RETAIN_PREV_DUMP_DIR + "'='true'" ;
@@ -400,6 +401,7 @@ public class TestScheduledReplicationScenarios extends 
BaseReplicationScenariosA
   }
 
   @Test
+  @Ignore("HIVE-25720")
   public void testSetPolicyId() throws Throwable {
 String withClause =
 " WITH('" + HiveConf.ConfVars.HIVE_IN_TEST + "' = 'true'" + ",'"
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCompactionMetrics.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCompactionMetrics.java
index 51fe394..13c97fb 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCompactionMetrics.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCompactionMetrics.java
@@ -178,6 +178,7 @@ public class TestCompactionMetrics  extends CompactorTest {
   }
 
   @Test
+  @org.junit.Ignore("HIVE-25716")
   public void testOldestReadyForCleaningAge() throws Exception {
 conf.setIntVar(HiveConf.ConfVars.COMPACTOR_MAX_NUM_DELTA, 1);
 
diff --git a/ql/src/test/queries/clientpositive/replication_metrics_ingest.q 
b/ql/src/test/queries/clientpositive/replication_metrics_ingest.q
index a710b00..a03aa86 100644
--- a/ql/src/test/queries/clientpositive/replication_metrics_ingest.q
+++ b/ql/src/test/queries/clientpositive/replication_metrics_ingest.q
@@ -1,3 +1,4 @@
+--! qt:disabled:HIVE-25719
 --! qt:authorizer
 --! qt:scheduledqueryservice
 --! qt:sysdb


[hive] branch master updated: HIVE-25670: Avoid getTable() calls for foreign key tables not used in… (#2763) (Steve Carlin reviewed by Zoltan Haindrich)

2021-11-17 Thread kgyrtkirk
This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 47ba530  HIVE-25670: Avoid getTable() calls for foreign key tables not 
used in… (#2763) (Steve Carlin reviewed by Zoltan Haindrich)
47ba530 is described below

commit 47ba530464c6941b4fc0f2882fd09ce1683808d6
Author: scarlin-cloudera <55709772+scarlin-cloud...@users.noreply.github.com>
AuthorDate: Wed Nov 17 08:23:42 2021 -0800

HIVE-25670: Avoid getTable() calls for foreign key tables not used in… 
(#2763) (Steve Carlin reviewed by Zoltan Haindrich)

RelOptHiveTable currently fetches the Table information for all
referential constraint tables. However, it only needs to fetch the tables
that are used in the query.
---
 .../ql/metadata/HiveMaterializedViewsRegistry.java |  5 +-
 .../hive/ql/optimizer/calcite/RelOptHiveTable.java | 47 +++--
 .../hadoop/hive/ql/parse/CalcitePlanner.java   |  5 +-
 .../apache/hadoop/hive/ql/parse/ParseContext.java  |  6 +-
 .../hadoop/hive/ql/parse/ParsedQueryTables.java| 29 +
 .../apache/hadoop/hive/ql/parse/QueryTables.java   | 76 ++
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java | 10 +--
 7 files changed, 142 insertions(+), 36 deletions(-)

diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
index 9af4d0a..fba0a02 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
@@ -66,6 +66,7 @@ import 
org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter;
 import org.apache.hadoop.hive.ql.parse.CBOPlan;
 import org.apache.hadoop.hive.ql.parse.CalcitePlanner;
 import org.apache.hadoop.hive.ql.parse.ParseUtils;
+import org.apache.hadoop.hive.ql.parse.QueryTables;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -459,7 +460,7 @@ public final class HiveMaterializedViewsRegistry {
   // for materialized views.
   RelOptHiveTable optTable = new RelOptHiveTable(null, 
cluster.getTypeFactory(), fullyQualifiedTabName,
   rowType, viewTable, nonPartitionColumns, partitionColumns, new 
ArrayList<>(),
-  conf, null, new HashMap<>(), new HashMap<>(), new HashMap<>(), new 
AtomicInteger());
+  conf, null, new QueryTables(true), new HashMap<>(), new HashMap<>(), 
new AtomicInteger());
   DruidTable druidTable = new DruidTable(new DruidSchema(address, address, 
false),
   dataSource, RelDataTypeImpl.proto(rowType), metrics, 
DruidTable.DEFAULT_TIMESTAMP_COLUMN,
   intervals, null, null);
@@ -474,7 +475,7 @@ public final class HiveMaterializedViewsRegistry {
   // for materialized views.
   RelOptHiveTable optTable = new RelOptHiveTable(null, 
cluster.getTypeFactory(), fullyQualifiedTabName,
   rowType, viewTable, nonPartitionColumns, partitionColumns, new 
ArrayList<>(),
-  conf, null, new HashMap<>(), new HashMap<>(), new HashMap<>(), new 
AtomicInteger());
+  conf, null, new QueryTables(true), new HashMap<>(), new HashMap<>(), 
new AtomicInteger());
   tableRel = new HiveTableScan(cluster, 
cluster.traitSetOf(HiveRelNode.CONVENTION), optTable,
   viewTable.getTableName(), null, false, false);
 }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
index 8f9b78c..385fe9a 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
@@ -67,6 +67,7 @@ import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import 
org.apache.hadoop.hive.ql.optimizer.calcite.translator.ExprNodeConverter;
 import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
 import org.apache.hadoop.hive.ql.parse.ColumnStatsList;
+import org.apache.hadoop.hive.ql.parse.ParsedQueryTables;
 import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
 import org.apache.hadoop.hive.ql.plan.ColStatistics;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -82,6 +83,7 @@ import org.slf4j.LoggerFactory;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
 
 public class RelOptHiveTable implements RelOptTable {
 
@@ -102,11 +104,12 @@ public class RelOptHiveTable implements RelOptTable {
   private final int   noOfNonVirtualCols;
   private final 

[hive] branch master updated: HIVE-25690: Fix column reorder detection for Iceberg schema evolution (Marton Bod, reviewed by Adam Szita)

2021-11-17 Thread mbod
This is an automated email from the ASF dual-hosted git repository.

mbod pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 7c3899c  HIVE-25690: Fix column reorder detection for Iceberg schema 
evolution (Marton Bod, reviewed by Adam Szita)
7c3899c is described below

commit 7c3899c51d87a31bcb49fc10750f0a23b67b4041
Author: Marton Bod 
AuthorDate: Wed Nov 17 14:19:10 2021 +0100

HIVE-25690: Fix column reorder detection for Iceberg schema evolution 
(Marton Bod, reviewed by Adam Szita)
---
 .../org/apache/iceberg/hive/HiveSchemaUtil.java| 58 +-
 .../iceberg/mr/hive/HiveIcebergMetaHook.java   |  5 +-
 .../mr/hive/TestHiveIcebergSchemaEvolution.java| 48 ++
 .../src/test/queries/positive/llap_iceberg_read.q  | 16 +++---
 .../queries/positive/vectorized_iceberg_read.q |  4 +-
 .../results/positive/llap/llap_iceberg_read.q.out  | 32 ++--
 .../positive/llap/vectorized_iceberg_read.q.out|  6 +--
 .../results/positive/vectorized_iceberg_read.q.out |  6 +--
 8 files changed, 128 insertions(+), 47 deletions(-)

diff --git 
a/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/HiveSchemaUtil.java
 
b/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/HiveSchemaUtil.java
index 57123e1..cc9ad46 100644
--- 
a/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/HiveSchemaUtil.java
+++ 
b/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/HiveSchemaUtil.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.iceberg.PartitionSpec;
 import org.apache.iceberg.Schema;
+import org.apache.iceberg.relocated.com.google.common.collect.Maps;
 import org.apache.iceberg.types.Type;
 import org.apache.iceberg.types.Types;
 import org.apache.iceberg.util.Pair;
@@ -178,28 +179,61 @@ public final class HiveSchemaUtil {
   }
 
   /**
-   * Compares a list of columns to another list, by name, to find an out of 
order column.
-   * It iterates through updated one by one, and compares the name of the 
column to the name of the column in the old
-   * list, in the same position. It returns the first mismatch it finds in 
updated, if any.
+   * Compares two lists of columns to each other to find the (singular) column 
that was moved. This works ideally for
+   * identifying the column that was moved by an ALTER TABLE ... CHANGE COLUMN 
command.
*
-   * @param updated The list of the columns after some updates have taken place
+   * Note: This method is only suitable for finding a single reordered column.
+   * Consequently, this method is NOT suitable for handling scenarios where 
multiple column reorders are possible at the
+   * same time, such as ALTER TABLE ... REPLACE COLUMNS commands.
+   *
+   * @param updated The list of the columns after some updates have taken 
place (if any)
* @param old The list of the original columns
* @param renameMapping A map of name aliases for the updated columns (e.g. 
if a column rename occurred)
-   * @return A pair consisting of the first out of order column name, and its 
preceding column name (if any).
+   * @return A pair consisting of the reordered column's name, and its 
preceding column's name (if any).
* Returns a null in case there are no out of order columns.
*/
-  public static Pair> 
getFirstOutOfOrderColumn(List updated,
+  public static Pair> 
getReorderedColumn(List updated,
 
List old,
 
Map renameMapping) {
-for (int i = 0; i < updated.size() && i < old.size(); ++i) {
+// first collect the updated index for each column
+Map nameToNewIndex = Maps.newHashMap();
+for (int i = 0; i < updated.size(); ++i) {
   String updatedCol = renameMapping.getOrDefault(updated.get(i).getName(), 
updated.get(i).getName());
-  String oldCol = old.get(i).getName();
-  if (!oldCol.equals(updatedCol)) {
-Optional previousCol = i > 0 ? Optional.of(updated.get(i - 
1).getName()) : Optional.empty();
-return Pair.of(updatedCol, previousCol);
+  nameToNewIndex.put(updatedCol, i);
+}
+
+// find the column which has the highest index difference between its 
position in the old vs the updated list
+String reorderedColName = null;
+int maxIndexDiff = 0;
+for (int oldIndex = 0; oldIndex < old.size(); ++oldIndex) {
+  String oldName = old.get(oldIndex).getName();
+  Integer newIndex = nameToNewIndex.get(oldName);
+  if (newIndex != null) {
+int indexDiff = Math.abs(newIndex - oldIndex);
+if (maxIndexDiff < indexDiff) {
+  maxIndexDiff = indexDiff;
+  reorderedColName = oldName;
+}
+  }
+}
+
+

[hive] branch master updated: HIVE-25701: Declare JDBC drivers as runtime & optional dependencies (Stamatis Zampetakis, reviewed by Zoltan Haindrich)

2021-11-17 Thread zabetak
This is an automated email from the ASF dual-hosted git repository.

zabetak pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 34158d8  HIVE-25701: Declare JDBC drivers as runtime & optional 
dependencies (Stamatis Zampetakis, reviewed by Zoltan Haindrich)
34158d8 is described below

commit 34158d8531725ab778a6c6112391370b44669bd8
Author: Stamatis Zampetakis 
AuthorDate: Mon Nov 15 14:13:43 2021 +0100

HIVE-25701: Declare JDBC drivers as runtime & optional dependencies 
(Stamatis Zampetakis, reviewed by Zoltan Haindrich)

1. Use dependency management section for setting version, scope, and
optionality of a JDBC driver.
2. Gather declarations of JDBC drivers together.
3. Remove download-maven-plugin.
4. Add explicit dependencies to drivers when necessary.
5. Remove redundant itest.jdbc.jars property and update test
instructions

Declaring drivers as runtime & optional dependency has the following
advantages:
* Eliminates the risk to write code which needs JDBC driver classes in
order to compile and potentially violate AL2.
* Unifies the declaration of JDBC drivers making easier to add/remove
one if necessary.
* Removes the need to use download-maven-plugin and other similar
workarounds to avoid licensing problems.
* Simplifies the execution of tests using these drivers since now they
are added in the runtime classpath automatically by maven.
* Projects with dependencies depending on Hive will not inherit any JDBC
driver by default.

Closes #2790
---
 Jenkinsfile|  2 +-
 beeline/pom.xml|  2 -
 itests/qtest-iceberg/pom.xml   |  7 ---
 itests/qtest/pom.xml   | 34 ++--
 pom.xml| 52 +++---
 .../hadoop/hive/ql/lockmgr/ITestDbTxnManager.java  |  2 +-
 standalone-metastore/DEV-README| 24 +++--
 standalone-metastore/metastore-common/pom.xml  |  1 -
 standalone-metastore/metastore-server/pom.xml  | 30 ---
 standalone-metastore/pom.xml   | 62 +++---
 10 files changed, 112 insertions(+), 104 deletions(-)

diff --git a/Jenkinsfile b/Jenkinsfile
index b3127c9..f7ccca4 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -272,7 +272,7 @@ time docker rm -f dev_$dbType || true
   stage('verify') {
 try {
   sh """#!/bin/bash -e
-mvn verify -DskipITests=false -Dit.test=ITest${dbType.capitalize()} 
-Dtest=nosuch -pl standalone-metastore/metastore-server 
-Dmaven.test.failure.ignore -B -Ditest.jdbc.jars=`find /apps/lib/ -type f | 
paste -s -d:`
+mvn verify -DskipITests=false -Dit.test=ITest${dbType.capitalize()} 
-Dtest=nosuch -pl standalone-metastore/metastore-server 
-Dmaven.test.failure.ignore -B
 """
 } finally {
   junit '**/TEST-*.xml'
diff --git a/beeline/pom.xml b/beeline/pom.xml
index 368d865..5617b4e 100644
--- a/beeline/pom.xml
+++ b/beeline/pom.xml
@@ -193,8 +193,6 @@
 
   org.postgresql
   postgresql
-  ${postgres.version}
-  test
 
   
 
diff --git a/itests/qtest-iceberg/pom.xml b/itests/qtest-iceberg/pom.xml
index e70aab4..839c714 100644
--- a/itests/qtest-iceberg/pom.xml
+++ b/itests/qtest-iceberg/pom.xml
@@ -382,18 +382,12 @@
 slf4j-api
 
 
-
 org.mariadb.jdbc
 mariadb-java-client
-${mariadb.version}
-test
 
 
 org.postgresql
 postgresql
-${postgres.version}
-test
 
 
 
@@ -446,7 +440,6 @@
 
 
${test.conf.dir}
 
${basedir}/${hive.path.to.root}/conf
-
${itest.jdbc.jars}
 
 
 
diff --git a/itests/qtest/pom.xml b/itests/qtest/pom.xml
index f772015..b86fffa 100644
--- a/itests/qtest/pom.xml
+++ b/itests/qtest/pom.xml
@@ -404,31 +404,26 @@
   org.slf4j
   slf4j-api
 
+
+
+  com.microsoft.sqlserver
+  mssql-jdbc
+
 
-  
   org.mariadb.jdbc
   mariadb-java-client
-  ${mariadb.version}
-  test
 
 
   org.postgresql
   postgresql
-  ${postgres.version}
-  test
 
 
   mysql
   mysql-connector-java
-  ${mysql.version}
-  test
 
 
   com.oracle.database.jdbc
   ojdbc8
-  ${oracle.version}
-  test
 
   
   
@@ -490,23 +485,6 @@
   
 
   
-com.googlecode.maven-download-plugin
-download-maven-plugin
-1.3.0
-
-  
-generate-sources
-
-  wget
-
-
- 

[hive] branch master updated: HIVE-25692: ExceptionHandler may mask checked exceptions (#2782) (Zoltan Haindrich reviewed by Zhihua Deng and Krisztian Kasa)

2021-11-17 Thread kgyrtkirk
This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 8f2dc79  HIVE-25692: ExceptionHandler may mask checked exceptions 
(#2782) (Zoltan Haindrich reviewed by Zhihua Deng and Krisztian Kasa)
8f2dc79 is described below

commit 8f2dc79e7f9d6355234fd2694caa33cbb5d6733c
Author: Zoltan Haindrich 
AuthorDate: Wed Nov 17 09:58:38 2021 +0100

HIVE-25692: ExceptionHandler may mask checked exceptions (#2782) (Zoltan 
Haindrich reviewed by Zhihua Deng and Krisztian Kasa)
---
 .../hadoop/hive/metastore/ExceptionHandler.java| 33 --
 .../apache/hadoop/hive/metastore/HMSHandler.java   | 10 +--
 .../hive/metastore/TestExceptionHandler.java   |  2 --
 3 files changed, 31 insertions(+), 14 deletions(-)

diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ExceptionHandler.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ExceptionHandler.java
index 8bffa97..ffac608 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ExceptionHandler.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ExceptionHandler.java
@@ -44,8 +44,7 @@ public final class ExceptionHandler {
   /**
* Throws if the input exception is the instance of the input class
*/
-  public  ExceptionHandler
-  throwIfInstance(Class t) throws T {
+  public  ExceptionHandler throwIfInstance(Class t) 
throws T {
 if (t.isInstance(e)) {
   throw t.cast(e);
 }
@@ -55,13 +54,29 @@ public final class ExceptionHandler {
   /**
* Throws if the input exception is the instance of the one in the input 
classes
*/
-  public  ExceptionHandler
-  throwIfInstance(Class ...te) throws T {
-if (te != null) {
-  for (Class t : te) {
-throwIfInstance(t);
-  }
-}
+  public 
+  ExceptionHandler throwIfInstance(
+  Class te1,
+  Class te2) throws T1, T2 {
+throwIfInstance(te1);
+throwIfInstance(te2);
+return this;
+  }
+
+  /**
+   * Throws if the input exception is the instance of the one in the input 
classes
+   */
+  public 
+  ExceptionHandler throwIfInstance(
+  Class te1,
+  Class te2,
+  Class te3) throws T1, T2, T3 {
+throwIfInstance(te1);
+throwIfInstance(te2);
+throwIfInstance(te3);
 return this;
   }
 
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
index c2b166b..a2211a4 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
@@ -3245,7 +3245,7 @@ public class HMSHandler extends FacebookBase implements 
IHMSHandler {
   }
 
   private void drop_table_with_environment_context(final String dbname, final 
String name, final boolean deleteData,
-  final EnvironmentContext envContext, boolean dropPartitions) throws 
MetaException {
+  final EnvironmentContext envContext, boolean dropPartitions) throws 
MetaException, NoSuchObjectException {
 String[] parsedDbName = parseDbName(dbname, conf);
 startTableFunction("drop_table", parsedDbName[CAT_NAME], 
parsedDbName[DB_NAME], name);
 
@@ -5190,8 +5190,12 @@ public class HMSHandler extends FacebookBase implements 
IHMSHandler {
 
 @Override
 public boolean equals(Object o) {
-  if (this == o) return true;
-  if (o == null || getClass() != o.getClass()) return false;
+  if (this == o) {
+return true;
+  }
+  if (o == null || getClass() != o.getClass()) {
+return false;
+  }
   PathAndDepth that = (PathAndDepth) o;
   return depth == that.depth && Objects.equals(path, that.path);
 }
diff --git 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestExceptionHandler.java
 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestExceptionHandler.java
index 9e79e40..435c085 100644
--- 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestExceptionHandler.java
+++ 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestExceptionHandler.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.metastore;
 
 import java.io.IOException;
-
 import org.junit.Test;
 
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
@@ -120,5 +119,4 @@ public class TestExceptionHandler {
   assertTrue(e.getMessage().equals(ix.getMessage()));
 }
   }
-
 }


[hive] branch master updated: disable unstable tests

2021-11-17 Thread kgyrtkirk
This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new dbec774  disable unstable tests
dbec774 is described below

commit dbec7744f04a9389e1bb168e85b5e8f73d357011
Author: Zoltan Haindrich 
AuthorDate: Wed Nov 17 08:02:17 2021 +

disable unstable tests
---
 contrib/src/test/queries/clientpositive/url_hook.q   | 1 +
 .../apache/hadoop/hive/llap/tezplugins/TestLlapTaskSchedulerService.java | 1 +
 2 files changed, 2 insertions(+)

diff --git a/contrib/src/test/queries/clientpositive/url_hook.q 
b/contrib/src/test/queries/clientpositive/url_hook.q
index b8f4c9f..cc95729 100644
--- a/contrib/src/test/queries/clientpositive/url_hook.q
+++ b/contrib/src/test/queries/clientpositive/url_hook.q
@@ -1,3 +1,4 @@
+--! qt:disabled:HIVE-25712
 --! qt:dataset:src
 add jar 
${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 SHOW TABLES 'src';
diff --git 
a/llap-tez/src/test/org/apache/hadoop/hive/llap/tezplugins/TestLlapTaskSchedulerService.java
 
b/llap-tez/src/test/org/apache/hadoop/hive/llap/tezplugins/TestLlapTaskSchedulerService.java
index 4c13c73..3a7e6d0 100644
--- 
a/llap-tez/src/test/org/apache/hadoop/hive/llap/tezplugins/TestLlapTaskSchedulerService.java
+++ 
b/llap-tez/src/test/org/apache/hadoop/hive/llap/tezplugins/TestLlapTaskSchedulerService.java
@@ -603,6 +603,7 @@ public class TestLlapTaskSchedulerService {
 
 
   @Test(timeout = 1)
+  @org.junit.Ignore("HIVE-25713")
   public void testPreemption() throws InterruptedException, IOException {
 
 Priority priority1 = Priority.newInstance(1);