[2/3] hive git commit: HIVE-11253. Move SearchArgument and VectorizedRowBatch classes to storage-api. (omalley reviewed by prasanthj)
http://git-wip-us.apache.org/repos/asf/hive/blob/9ae70cb4/serde/src/java/org/apache/hadoop/hive/ql/io/sarg/PredicateLeaf.java -- diff --git a/serde/src/java/org/apache/hadoop/hive/ql/io/sarg/PredicateLeaf.java b/serde/src/java/org/apache/hadoop/hive/ql/io/sarg/PredicateLeaf.java deleted file mode 100644 index 3a92565..000 --- a/serde/src/java/org/apache/hadoop/hive/ql/io/sarg/PredicateLeaf.java +++ /dev/null @@ -1,104 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.io.sarg; - -import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; - -import java.sql.Date; -import java.sql.Timestamp; -import java.util.List; - -/** - * The primitive predicates that form a SearchArgument. - */ -public interface PredicateLeaf { - - /** - * The possible operators for predicates. To get the opposites, construct - * an expression with a not operator. - */ - public static enum Operator { -EQUALS, -NULL_SAFE_EQUALS, -LESS_THAN, -LESS_THAN_EQUALS, -IN, -BETWEEN, -IS_NULL - } - - /** - * The possible types for sargs. - */ - public static enum Type { -INTEGER(Integer.class), // all of the integer types except long -LONG(Long.class), -FLOAT(Double.class), // float and double -STRING(String.class), // string, char, varchar -DATE(Date.class), -DECIMAL(HiveDecimalWritable.class), -TIMESTAMP(Timestamp.class), -BOOLEAN(Boolean.class); - -private final Class cls; -Type(Class cls) { - this.cls = cls; -} - -/** - * For all SARG leaves, the values must be the matching class. - * @return the value class - */ -public Class getValueClass() { - return cls; -} - } - - /** - * Get the operator for the leaf. - */ - public Operator getOperator(); - - /** - * Get the type of the column and literal by the file format. - */ - public Type getType(); - - /** - * Get the simple column name. - * @return the column name - */ - public String getColumnName(); - - /** - * Get the literal half of the predicate leaf. Adapt the original type for what orc needs - * - * @return an Integer, Long, Double, or String - */ - public Object getLiteral(); - - /** - * For operators with multiple literals (IN and BETWEEN), get the literals. - * - * @return the list of literals (Integer, Longs, Doubles, or Strings) - * - */ - public ListObject getLiteralList(); - -} http://git-wip-us.apache.org/repos/asf/hive/blob/9ae70cb4/serde/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgument.java -- diff --git a/serde/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgument.java b/serde/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgument.java deleted file mode 100644 index bc0d503..000 --- a/serde/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgument.java +++ /dev/null @@ -1,298 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.io.sarg; - -import java.util.List; - -/** - * Primary interface for a href=http://en.wikipedia.org/wiki/Sargable; - * SearchArgument/a, which are the subset of predicates - * that can be pushed down to the RecordReader. Each SearchArgument consists - * of a series of SearchClauses that must each be true for the row to be - *
[3/3] hive git commit: HIVE-11253. Move SearchArgument and VectorizedRowBatch classes to storage-api. (omalley reviewed by prasanthj)
HIVE-11253. Move SearchArgument and VectorizedRowBatch classes to storage-api. (omalley reviewed by prasanthj) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9ae70cb4 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9ae70cb4 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9ae70cb4 Branch: refs/heads/master Commit: 9ae70cb4d11dae6cea45c29b0e87dc5da1ec Parents: 70b56e3 Author: Owen O'Malley omal...@apache.org Authored: Wed Jul 29 09:36:08 2015 -0700 Committer: Owen O'Malley omal...@apache.org Committed: Wed Jul 29 09:36:08 2015 -0700 -- common/pom.xml | 5 + .../hadoop/hive/common/type/HiveDecimal.java| 312 - pom.xml | 1 + .../hive/ql/exec/vector/BytesColumnVector.java | 322 - .../hive/ql/exec/vector/ColumnVector.java | 174 - .../ql/exec/vector/DecimalColumnVector.java | 106 --- .../hive/ql/exec/vector/DoubleColumnVector.java | 143 .../hive/ql/exec/vector/LongColumnVector.java | 189 - .../hive/ql/exec/vector/VectorizedRowBatch.java | 186 - .../hive/ql/io/sarg/SearchArgumentFactory.java | 39 -- .../hive/ql/io/sarg/SearchArgumentImpl.java | 697 --- .../hive/ql/io/orc/TestInputOutputFormat.java | 17 +- .../hive/ql/io/sarg/TestSearchArgumentImpl.java | 3 +- .../hadoop/hive/ql/io/sarg/ExpressionTree.java | 157 - .../hadoop/hive/ql/io/sarg/PredicateLeaf.java | 104 --- .../hadoop/hive/ql/io/sarg/SearchArgument.java | 298 .../hive/serde2/io/HiveDecimalWritable.java | 174 - storage-api/pom.xml | 85 +++ .../hadoop/hive/common/type/HiveDecimal.java| 312 + .../hive/ql/exec/vector/BytesColumnVector.java | 322 + .../hive/ql/exec/vector/ColumnVector.java | 173 + .../ql/exec/vector/DecimalColumnVector.java | 106 +++ .../hive/ql/exec/vector/DoubleColumnVector.java | 143 .../hive/ql/exec/vector/LongColumnVector.java | 189 + .../hive/ql/exec/vector/VectorizedRowBatch.java | 186 + .../hadoop/hive/ql/io/sarg/ExpressionTree.java | 156 + .../hadoop/hive/ql/io/sarg/PredicateLeaf.java | 104 +++ .../hadoop/hive/ql/io/sarg/SearchArgument.java | 287 .../hive/ql/io/sarg/SearchArgumentFactory.java | 28 + .../hive/ql/io/sarg/SearchArgumentImpl.java | 687 ++ .../hive/serde2/io/HiveDecimalWritable.java | 174 + 31 files changed, 2974 insertions(+), 2905 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/9ae70cb4/common/pom.xml -- diff --git a/common/pom.xml b/common/pom.xml index aedf7ba..a7997e2 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -39,6 +39,11 @@ artifactIdhive-shims/artifactId version${project.version}/version /dependency +dependency + groupIdorg.apache.hive/groupId + artifactIdhive-storage-api/artifactId + version${project.version}/version +/dependency !-- inter-project -- dependency groupIdcommons-cli/groupId http://git-wip-us.apache.org/repos/asf/hive/blob/9ae70cb4/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java -- diff --git a/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java b/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java deleted file mode 100644 index 7d7fb28..000 --- a/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java +++ /dev/null @@ -1,312 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.common.type; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.math.RoundingMode; - -/** - * - * HiveDecimal. Simple wrapper for BigDecimal. Adds fixed max precision and non scientific string - * representation - * - */ -public class HiveDecimal implements
[1/3] hive git commit: HIVE-11253. Move SearchArgument and VectorizedRowBatch classes to storage-api. (omalley reviewed by prasanthj)
Repository: hive Updated Branches: refs/heads/master 70b56e372 - 9ae70cb4d http://git-wip-us.apache.org/repos/asf/hive/blob/9ae70cb4/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java -- diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java new file mode 100644 index 000..d27ac16 --- /dev/null +++ b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java @@ -0,0 +1,687 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * License); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.io.sarg; + +import java.sql.Timestamp; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Deque; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * The implementation of SearchArguments. + */ +final class SearchArgumentImpl implements SearchArgument { + public static final Log LOG = LogFactory.getLog(SearchArgumentImpl.class); + + static final class PredicateLeafImpl implements PredicateLeaf { +private final Operator operator; +private final Type type; +private final String columnName; +private final Object literal; +private final ListObject literalList; + +// Used by kryo +@SuppressWarnings(unused) +PredicateLeafImpl() { + operator = null; + type = null; + columnName = null; + literal = null; + literalList = null; +} + +PredicateLeafImpl(Operator operator, + Type type, + String columnName, + Object literal, + ListObject literalList) { + this.operator = operator; + this.type = type; + this.columnName = columnName; + this.literal = literal; + if (literal != null) { +if (literal.getClass() != type.getValueClass()) { + throw new IllegalArgumentException(Wrong value class + + literal.getClass().getName() + for + type + . + operator + + leaf); +} + } + this.literalList = literalList; + if (literalList != null) { +Class valueCls = type.getValueClass(); +for(Object lit: literalList) { + if (lit != null lit.getClass() != valueCls) { +throw new IllegalArgumentException(Wrong value class item + +lit.getClass().getName() + for + type + . + operator + + leaf); + } +} + } +} + +@Override +public Operator getOperator() { + return operator; +} + +@Override +public Type getType(){ + return type; +} + +@Override +public String getColumnName() { + return columnName; +} + +@Override +public Object getLiteral() { + // To get around a kryo 2.22 bug while deserialize a Timestamp into Date + // (https://github.com/EsotericSoftware/kryo/issues/88) + // When we see a Date, convert back into Timestamp + if (literal instanceof java.util.Date) { +return new Timestamp(((java.util.Date)literal).getTime()); + } + return literal; +} + +@Override +public ListObject getLiteralList() { + return literalList; +} + +@Override +public String toString() { + StringBuilder buffer = new StringBuilder(); + buffer.append('('); + buffer.append(operator); + buffer.append(' '); + buffer.append(columnName); + if (literal != null) { +buffer.append(' '); +buffer.append(literal); + } else if (literalList != null) { +for(Object lit: literalList) { + buffer.append(' '); + buffer.append(lit == null ? null : lit.toString()); +} + } + buffer.append(')'); + return buffer.toString(); +} + +private static boolean isEqual(Object left, Object right) { + + return left == right || + (left != null right != null
hive git commit: HIVE-11330: Add early termination for recursion in StatsRulesProcFactory.evaluateExpression (Prasanth Jayachandran, reviewed by Hari Subramaniyan)
Repository: hive Updated Branches: refs/heads/branch-1 5105bbd46 - d5f81a423 HIVE-11330: Add early termination for recursion in StatsRulesProcFactory.evaluateExpression (Prasanth Jayachandran, reviewed by Hari Subramaniyan) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d5f81a42 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d5f81a42 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d5f81a42 Branch: refs/heads/branch-1 Commit: d5f81a423d33710df61a3f573c7cdbacce4bffd9 Parents: 5105bbd Author: Hari Subramaniyan harisan...@apache.org Authored: Wed Jul 29 14:57:31 2015 -0700 Committer: Hari Subramaniyan harisan...@apache.org Committed: Wed Jul 29 14:59:07 2015 -0700 -- .../hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java | 3 +++ 1 file changed, 3 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hive/blob/d5f81a42/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java index 376d42c..1663b88 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java @@ -301,6 +301,9 @@ public class StatsRulesProcFactory { long newNumRows = 0; Statistics andStats = null; + if (stats.getNumRows() = 1 || stats.getDataSize() = 0) +return 1; + if (pred instanceof ExprNodeGenericFuncDesc) { ExprNodeGenericFuncDesc genFunc = (ExprNodeGenericFuncDesc) pred; GenericUDF udf = genFunc.getGenericUDF();
hive git commit: HIVE-11296 - Merge from master to spark branch [Spark Branch] (Chao Sun, reviewed by Xuefu Zhang)
Repository: hive Updated Branches: refs/heads/spark fb7ba8bde - 89736c8b5 HIVE-11296 - Merge from master to spark branch [Spark Branch] (Chao Sun, reviewed by Xuefu Zhang) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/89736c8b Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/89736c8b Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/89736c8b Branch: refs/heads/spark Commit: 89736c8b50bb7ec90acd3bb3c1baa5d3d08cee58 Parents: fb7ba8b Author: Chao Sun sunc...@apache.org Authored: Wed Jul 29 15:21:29 2015 -0700 Committer: Chao Sun sunc...@apache.org Committed: Wed Jul 29 15:21:29 2015 -0700 -- .../spark/dynamic_rdd_cache.q.out | 48 .../clientpositive/spark/load_dyn_part14.q.out | 30 +--- .../spark/vector_count_distinct.q.out | 1 + 3 files changed, 31 insertions(+), 48 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/89736c8b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out -- diff --git a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out index 61563e4..7045855 100644 --- a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out +++ b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out @@ -859,19 +859,23 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE -Filter Operator - predicate: (CASE (_col5) WHEN (0) THEN (0) ELSE ((_col4 / _col5)) END 1) (type: boolean) +Select Operator + expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: double), _col5 (type: double) + outputColumnNames: _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE - Select Operator -expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: double), CASE (_col5) WHEN (0) THEN (null) ELSE ((_col4 / _col5)) END (type: double) -outputColumnNames: _col1, _col2, _col3, _col5, _col6 + Filter Operator +predicate: (CASE (_col5) WHEN (0) THEN (0) ELSE ((_col4 / _col5)) END 1) (type: boolean) Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE -Reduce Output Operator - key expressions: _col2 (type: int), _col1 (type: int) - sort order: ++ - Map-reduce partition columns: _col2 (type: int), _col1 (type: int) +Select Operator + expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: double), CASE (_col5) WHEN (0) THEN (null) ELSE ((_col4 / _col5)) END (type: double) + outputColumnNames: _col1, _col2, _col3, _col5, _col6 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE - value expressions: _col3 (type: int), _col5 (type: double), _col6 (type: double) + Reduce Output Operator +key expressions: _col2 (type: int), _col1 (type: int) +sort order: ++ +Map-reduce partition columns: _col2 (type: int), _col1 (type: int) +Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE +value expressions: _col3 (type: int), _col5 (type: double), _col6 (type: double) Reducer 4 Reduce Operator Tree: Join Operator @@ -909,19 +913,23 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE -Filter Operator - predicate: (CASE (_col5) WHEN (0) THEN (0) ELSE ((_col4 / _col5)) END 1) (type: boolean) +Select Operator + expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: double), _col5 (type: double) + outputColumnNames: _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE - Select Operator -expressions: _col1 (type: int), _col2 (type: int),
hive git commit: HIVE-11143: Tests udf_from_utc_timestamp.q/udf_to_utc_timestamp.q do not work with updated Java timezone information (Jason Dere, reviewed by Alex Pivovarov)
Repository: hive Updated Branches: refs/heads/branch-1 ff22441db - 3cc23a616 HIVE-11143: Tests udf_from_utc_timestamp.q/udf_to_utc_timestamp.q do not work with updated Java timezone information (Jason Dere, reviewed by Alex Pivovarov) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3cc23a61 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3cc23a61 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3cc23a61 Branch: refs/heads/branch-1 Commit: 3cc23a616ca531fb8942ad25fb584e4170e89812 Parents: ff22441 Author: Jason Dere jd...@hortonworks.com Authored: Wed Jul 29 14:26:16 2015 -0700 Committer: Jason Dere jd...@hortonworks.com Committed: Wed Jul 29 14:27:31 2015 -0700 -- .../clientpositive/udf_from_utc_timestamp.q | 30 - .../clientpositive/udf_to_utc_timestamp.q | 30 - .../clientpositive/udf_from_utc_timestamp.q.out | 66 ++-- .../clientpositive/udf_to_utc_timestamp.q.out | 66 ++-- 4 files changed, 96 insertions(+), 96 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/3cc23a61/ql/src/test/queries/clientpositive/udf_from_utc_timestamp.q -- diff --git a/ql/src/test/queries/clientpositive/udf_from_utc_timestamp.q b/ql/src/test/queries/clientpositive/udf_from_utc_timestamp.q index de98507..b113eec 100644 --- a/ql/src/test/queries/clientpositive/udf_from_utc_timestamp.q +++ b/ql/src/test/queries/clientpositive/udf_from_utc_timestamp.q @@ -1,24 +1,24 @@ DESCRIBE FUNCTION from_utc_timestamp; DESC FUNCTION EXTENDED from_utc_timestamp; -explain select from_utc_timestamp('2015-02-11 10:30:00', 'PST'); +explain select from_utc_timestamp('2012-02-11 10:30:00', 'PST'); select -from_utc_timestamp('2015-02-11 04:30:00', 'PST'), -from_utc_timestamp('2015-02-11 04:30:00', 'Europe/Moscow'), -from_utc_timestamp('2015-02-11 04:30:00', 'GMT+8'), -from_utc_timestamp('2015-02-11 04:30:00', 'GMT'), -from_utc_timestamp('2015-02-11 04:30:00', ''), -from_utc_timestamp('2015-02-11 04:30:00', '---'), +from_utc_timestamp('2012-02-11 04:30:00', 'PST'), +from_utc_timestamp('2012-02-11 04:30:00', 'Europe/Moscow'), +from_utc_timestamp('2012-02-11 04:30:00', 'GMT+8'), +from_utc_timestamp('2012-02-11 04:30:00', 'GMT'), +from_utc_timestamp('2012-02-11 04:30:00', ''), +from_utc_timestamp('2012-02-11 04:30:00', '---'), from_utc_timestamp(cast(null as string), 'PST'), -from_utc_timestamp('2015-02-11 04:30:00', cast(null as string)); +from_utc_timestamp('2012-02-11 04:30:00', cast(null as string)); select -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), 'PST'), -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), 'Europe/Moscow'), -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), 'GMT+8'), -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), 'GMT'), -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), ''), -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), '---'), +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), 'PST'), +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), 'Europe/Moscow'), +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), 'GMT+8'), +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), 'GMT'), +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), ''), +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), '---'), from_utc_timestamp(cast(null as timestamp), 'PST'), -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), cast(null as string)); \ No newline at end of file +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), cast(null as string)); http://git-wip-us.apache.org/repos/asf/hive/blob/3cc23a61/ql/src/test/queries/clientpositive/udf_to_utc_timestamp.q -- diff --git a/ql/src/test/queries/clientpositive/udf_to_utc_timestamp.q b/ql/src/test/queries/clientpositive/udf_to_utc_timestamp.q index fe0b647..cca6d7d 100644 --- a/ql/src/test/queries/clientpositive/udf_to_utc_timestamp.q +++ b/ql/src/test/queries/clientpositive/udf_to_utc_timestamp.q @@ -1,24 +1,24 @@ DESCRIBE FUNCTION to_utc_timestamp; DESC FUNCTION EXTENDED to_utc_timestamp; -explain select to_utc_timestamp('2015-02-11 10:30:00', 'PST'); +explain select to_utc_timestamp('2012-02-11 10:30:00', 'PST'); select -to_utc_timestamp('2015-02-10 20:30:00', 'PST'), -to_utc_timestamp('2015-02-11 08:30:00', 'Europe/Moscow'), -to_utc_timestamp('2015-02-11 12:30:00', 'GMT+8'), -to_utc_timestamp('2015-02-11 04:30:00', 'GMT'), -to_utc_timestamp('2015-02-11 04:30:00', ''), -to_utc_timestamp('2015-02-11 04:30:00', '---'), +to_utc_timestamp('2012-02-10 20:30:00', 'PST'), +to_utc_timestamp('2012-02-11
hive git commit: HIVE-11404: branch-1 does not compile (Jason Dere)
Repository: hive Updated Branches: refs/heads/branch-1 3cc23a616 - 5105bbd46 HIVE-11404: branch-1 does not compile (Jason Dere) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5105bbd4 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5105bbd4 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5105bbd4 Branch: refs/heads/branch-1 Commit: 5105bbd46e5ed27a9ceb54b394d53bf57972aedb Parents: 3cc23a6 Author: Jason Dere jd...@hortonworks.com Authored: Wed Jul 29 14:49:34 2015 -0700 Committer: Jason Dere jd...@hortonworks.com Committed: Wed Jul 29 14:49:34 2015 -0700 -- .../org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java| 1 + 1 file changed, 1 insertion(+) -- http://git-wip-us.apache.org/repos/asf/hive/blob/5105bbd4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java index 8b20452..d252e6e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.SelectOperator; +import org.apache.hadoop.hive.ql.exec.UnionOperator; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.parse.ParseContext;
hive git commit: HIVE-11143: Tests udf_from_utc_timestamp.q/udf_to_utc_timestamp.q do not work with updated Java timezone information (Jason Dere, reviewed by Alex Pivovarov)
Repository: hive Updated Branches: refs/heads/master 9ae70cb4d - f312d17ef HIVE-11143: Tests udf_from_utc_timestamp.q/udf_to_utc_timestamp.q do not work with updated Java timezone information (Jason Dere, reviewed by Alex Pivovarov) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f312d17e Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f312d17e Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f312d17e Branch: refs/heads/master Commit: f312d17ef19d79a3b67792a67afd8435f90681ef Parents: 9ae70cb Author: Jason Dere jd...@hortonworks.com Authored: Wed Jul 29 14:26:16 2015 -0700 Committer: Jason Dere jd...@hortonworks.com Committed: Wed Jul 29 14:26:16 2015 -0700 -- .../clientpositive/udf_from_utc_timestamp.q | 30 - .../clientpositive/udf_to_utc_timestamp.q | 30 - .../clientpositive/udf_from_utc_timestamp.q.out | 66 ++-- .../clientpositive/udf_to_utc_timestamp.q.out | 66 ++-- 4 files changed, 96 insertions(+), 96 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/f312d17e/ql/src/test/queries/clientpositive/udf_from_utc_timestamp.q -- diff --git a/ql/src/test/queries/clientpositive/udf_from_utc_timestamp.q b/ql/src/test/queries/clientpositive/udf_from_utc_timestamp.q index de98507..b113eec 100644 --- a/ql/src/test/queries/clientpositive/udf_from_utc_timestamp.q +++ b/ql/src/test/queries/clientpositive/udf_from_utc_timestamp.q @@ -1,24 +1,24 @@ DESCRIBE FUNCTION from_utc_timestamp; DESC FUNCTION EXTENDED from_utc_timestamp; -explain select from_utc_timestamp('2015-02-11 10:30:00', 'PST'); +explain select from_utc_timestamp('2012-02-11 10:30:00', 'PST'); select -from_utc_timestamp('2015-02-11 04:30:00', 'PST'), -from_utc_timestamp('2015-02-11 04:30:00', 'Europe/Moscow'), -from_utc_timestamp('2015-02-11 04:30:00', 'GMT+8'), -from_utc_timestamp('2015-02-11 04:30:00', 'GMT'), -from_utc_timestamp('2015-02-11 04:30:00', ''), -from_utc_timestamp('2015-02-11 04:30:00', '---'), +from_utc_timestamp('2012-02-11 04:30:00', 'PST'), +from_utc_timestamp('2012-02-11 04:30:00', 'Europe/Moscow'), +from_utc_timestamp('2012-02-11 04:30:00', 'GMT+8'), +from_utc_timestamp('2012-02-11 04:30:00', 'GMT'), +from_utc_timestamp('2012-02-11 04:30:00', ''), +from_utc_timestamp('2012-02-11 04:30:00', '---'), from_utc_timestamp(cast(null as string), 'PST'), -from_utc_timestamp('2015-02-11 04:30:00', cast(null as string)); +from_utc_timestamp('2012-02-11 04:30:00', cast(null as string)); select -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), 'PST'), -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), 'Europe/Moscow'), -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), 'GMT+8'), -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), 'GMT'), -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), ''), -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), '---'), +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), 'PST'), +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), 'Europe/Moscow'), +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), 'GMT+8'), +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), 'GMT'), +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), ''), +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), '---'), from_utc_timestamp(cast(null as timestamp), 'PST'), -from_utc_timestamp(cast('2015-02-11 04:30:00' as timestamp), cast(null as string)); \ No newline at end of file +from_utc_timestamp(cast('2012-02-11 04:30:00' as timestamp), cast(null as string)); http://git-wip-us.apache.org/repos/asf/hive/blob/f312d17e/ql/src/test/queries/clientpositive/udf_to_utc_timestamp.q -- diff --git a/ql/src/test/queries/clientpositive/udf_to_utc_timestamp.q b/ql/src/test/queries/clientpositive/udf_to_utc_timestamp.q index fe0b647..cca6d7d 100644 --- a/ql/src/test/queries/clientpositive/udf_to_utc_timestamp.q +++ b/ql/src/test/queries/clientpositive/udf_to_utc_timestamp.q @@ -1,24 +1,24 @@ DESCRIBE FUNCTION to_utc_timestamp; DESC FUNCTION EXTENDED to_utc_timestamp; -explain select to_utc_timestamp('2015-02-11 10:30:00', 'PST'); +explain select to_utc_timestamp('2012-02-11 10:30:00', 'PST'); select -to_utc_timestamp('2015-02-10 20:30:00', 'PST'), -to_utc_timestamp('2015-02-11 08:30:00', 'Europe/Moscow'), -to_utc_timestamp('2015-02-11 12:30:00', 'GMT+8'), -to_utc_timestamp('2015-02-11 04:30:00', 'GMT'), -to_utc_timestamp('2015-02-11 04:30:00', ''), -to_utc_timestamp('2015-02-11 04:30:00', '---'), +to_utc_timestamp('2012-02-10 20:30:00', 'PST'), +to_utc_timestamp('2012-02-11
hive git commit: HIVE-11330: Add early termination for recursion in StatsRulesProcFactory.evaluateExpression (Prasanth Jayachandran, reviewed by Hari Subramaniyan)
Repository: hive Updated Branches: refs/heads/master f312d17ef - 4ee17e53c HIVE-11330: Add early termination for recursion in StatsRulesProcFactory.evaluateExpression (Prasanth Jayachandran, reviewed by Hari Subramaniyan) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4ee17e53 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4ee17e53 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4ee17e53 Branch: refs/heads/master Commit: 4ee17e53c194501d8eaca07c3039379b56d86d26 Parents: f312d17 Author: Hari Subramaniyan harisan...@apache.org Authored: Wed Jul 29 14:57:31 2015 -0700 Committer: Hari Subramaniyan harisan...@apache.org Committed: Wed Jul 29 14:57:31 2015 -0700 -- .../hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java | 3 +++ 1 file changed, 3 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hive/blob/4ee17e53/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java index 376d42c..1663b88 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java @@ -301,6 +301,9 @@ public class StatsRulesProcFactory { long newNumRows = 0; Statistics andStats = null; + if (stats.getNumRows() = 1 || stats.getDataSize() = 0) +return 1; + if (pred instanceof ExprNodeGenericFuncDesc) { ExprNodeGenericFuncDesc genFunc = (ExprNodeGenericFuncDesc) pred; GenericUDF udf = genFunc.getGenericUDF();
hive git commit: HIVE-11214: Insert into ACID table switches vectorization off (Matt McCline, reviewed by Eugene Koifman)
Repository: hive Updated Branches: refs/heads/master 4ee17e53c - 0ec5185a2 HIVE-11214: Insert into ACID table switches vectorization off (Matt McCline, reviewed by Eugene Koifman) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0ec5185a Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0ec5185a Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0ec5185a Branch: refs/heads/master Commit: 0ec5185a28177a04cf4f59a38b7c93da3b6e4565 Parents: 4ee17e5 Author: Matt McCline mmccl...@hortonworks.com Authored: Wed Jul 29 15:03:20 2015 -0700 Committer: Matt McCline mmccl...@hortonworks.com Committed: Wed Jul 29 15:03:52 2015 -0700 -- .../test/resources/testconfiguration.properties | 1 + .../hadoop/hive/ql/parse/SemanticAnalyzer.java | 4 --- .../test/queries/clientpositive/vector_acid3.q | 17 +++ .../clientpositive/tez/vector_acid3.q.out | 31 .../results/clientpositive/vector_acid3.q.out | 31 5 files changed, 80 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/0ec5185a/itests/src/test/resources/testconfiguration.properties -- diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index fbde465..aabf6fc 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -186,6 +186,7 @@ minitez.query.files.shared=alter_merge_2_orc.q,\ update_where_non_partitioned.q,\ update_where_partitioned.q,\ update_two_cols.q,\ + vector_acid3.q,\ vector_aggregate_9.q,\ vector_between_in.q,\ vector_binary_join_groupby.q,\ http://git-wip-us.apache.org/repos/asf/hive/blob/0ec5185a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index ad4efef..f05407d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -6678,10 +6678,6 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { LOG.debug(Couldn't find table + tableName + in insertIntoTable); throw new SemanticException(ErrorMsg.NO_INSERT_OVERWRITE_WITH_ACID.getMsg()); } -if (conf.getBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED)) { - LOG.info(Turning off vectorization for acid write operation); - conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED, false); -} LOG.info(Modifying config values for ACID write); conf.setBoolVar(ConfVars.HIVEOPTREDUCEDEDUPLICATION, true); conf.setIntVar(ConfVars.HIVEOPTREDUCEDEDUPLICATIONMINREDUCER, 1); http://git-wip-us.apache.org/repos/asf/hive/blob/0ec5185a/ql/src/test/queries/clientpositive/vector_acid3.q -- diff --git a/ql/src/test/queries/clientpositive/vector_acid3.q b/ql/src/test/queries/clientpositive/vector_acid3.q new file mode 100644 index 000..d4313f4 --- /dev/null +++ b/ql/src/test/queries/clientpositive/vector_acid3.q @@ -0,0 +1,17 @@ +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.exec.dynamic.partition=true; +set hive.vectorized.execution.enabled=true; + +drop table if exists testacid1; + +create table testacid1(id int) clustered by (id) into 2 buckets stored as orc tblproperties(transactional=true); + +insert into table testacid1 values (1),(2),(3),(4); + +set hive.compute.query.using.stats=false; + +set hive.vectorized.execution.enabled; + +select count(1) from testacid1; http://git-wip-us.apache.org/repos/asf/hive/blob/0ec5185a/ql/src/test/results/clientpositive/tez/vector_acid3.q.out -- diff --git a/ql/src/test/results/clientpositive/tez/vector_acid3.q.out b/ql/src/test/results/clientpositive/tez/vector_acid3.q.out new file mode 100644 index 000..4299c73 --- /dev/null +++ b/ql/src/test/results/clientpositive/tez/vector_acid3.q.out @@ -0,0 +1,31 @@ +PREHOOK: query: drop table if exists testacid1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists testacid1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table testacid1(id int) clustered by (id) into 2 buckets stored as orc tblproperties(transactional=true) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@testacid1 +POSTHOOK: query: create table
hive git commit: HIVE-11390: CBO (Calcite Return Path): Fix table alias propagation for windowing (Jesus Camacho Rodriguez, reviewed by Pengcheng Xiong)
Repository: hive Updated Branches: refs/heads/master cd2b49970 - 70b56e372 HIVE-11390: CBO (Calcite Return Path): Fix table alias propagation for windowing (Jesus Camacho Rodriguez, reviewed by Pengcheng Xiong) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/70b56e37 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/70b56e37 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/70b56e37 Branch: refs/heads/master Commit: 70b56e372d1bf792a5b13c217e0004785a84a10f Parents: cd2b499 Author: Jesus Camacho Rodriguez jcama...@apache.org Authored: Tue Jul 28 20:22:26 2015 +0200 Committer: Jesus Camacho Rodriguez jcama...@apache.org Committed: Wed Jul 29 11:52:54 2015 +0200 -- .../hive/ql/optimizer/calcite/translator/HiveOpConverter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/70b56e37/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java index c54a601..169c351 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java @@ -605,7 +605,7 @@ public class HiveOpConverter { WindowingComponentizer groups = new WindowingComponentizer(wSpec); RowResolver rr = new RowResolver(); for (ColumnInfo ci : input.getSchema().getSignature()) { - rr.put(ci.getTabAlias(), ci.getInternalName(), ci); + rr.put(inputOpAf.tabAlias, ci.getInternalName(), ci); } while (groups.hasNext()) {
hive git commit: HIVE-11214: Insert into ACID table switches vectorization off (Matt McCline, reviewed by Eugene Koifman)
Repository: hive Updated Branches: refs/heads/branch-1 d5f81a423 - fb57bd134 HIVE-11214: Insert into ACID table switches vectorization off (Matt McCline, reviewed by Eugene Koifman) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fb57bd13 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fb57bd13 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fb57bd13 Branch: refs/heads/branch-1 Commit: fb57bd13452f676e603279dd75645526c17cc2e5 Parents: d5f81a4 Author: Matt McCline mmccl...@hortonworks.com Authored: Wed Jul 29 16:23:33 2015 -0700 Committer: Matt McCline mmccl...@hortonworks.com Committed: Wed Jul 29 16:23:33 2015 -0700 -- .../test/resources/testconfiguration.properties | 1 + .../hadoop/hive/ql/parse/SemanticAnalyzer.java | 4 --- .../test/queries/clientpositive/vector_acid3.q | 17 +++ .../clientpositive/tez/vector_acid3.q.out | 31 .../results/clientpositive/vector_acid3.q.out | 31 5 files changed, 80 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/fb57bd13/itests/src/test/resources/testconfiguration.properties -- diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index 75c1a5c..98fd87b 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -186,6 +186,7 @@ minitez.query.files.shared=alter_merge_2_orc.q,\ update_where_non_partitioned.q,\ update_where_partitioned.q,\ update_two_cols.q,\ + vector_acid3.q,\ vector_aggregate_9.q,\ vector_between_in.q,\ vector_binary_join_groupby.q,\ http://git-wip-us.apache.org/repos/asf/hive/blob/fb57bd13/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 2ae6309..b4878bf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -6690,10 +6690,6 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { LOG.debug(Couldn't find table + tableName + in insertIntoTable); throw new SemanticException(ErrorMsg.NO_INSERT_OVERWRITE_WITH_ACID.getMsg()); } -if (conf.getBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED)) { - LOG.info(Turning off vectorization for acid write operation); - conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED, false); -} LOG.info(Modifying config values for ACID write); conf.setBoolVar(ConfVars.HIVEOPTREDUCEDEDUPLICATION, true); conf.setIntVar(ConfVars.HIVEOPTREDUCEDEDUPLICATIONMINREDUCER, 1); http://git-wip-us.apache.org/repos/asf/hive/blob/fb57bd13/ql/src/test/queries/clientpositive/vector_acid3.q -- diff --git a/ql/src/test/queries/clientpositive/vector_acid3.q b/ql/src/test/queries/clientpositive/vector_acid3.q new file mode 100644 index 000..d4313f4 --- /dev/null +++ b/ql/src/test/queries/clientpositive/vector_acid3.q @@ -0,0 +1,17 @@ +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.exec.dynamic.partition=true; +set hive.vectorized.execution.enabled=true; + +drop table if exists testacid1; + +create table testacid1(id int) clustered by (id) into 2 buckets stored as orc tblproperties(transactional=true); + +insert into table testacid1 values (1),(2),(3),(4); + +set hive.compute.query.using.stats=false; + +set hive.vectorized.execution.enabled; + +select count(1) from testacid1; http://git-wip-us.apache.org/repos/asf/hive/blob/fb57bd13/ql/src/test/results/clientpositive/tez/vector_acid3.q.out -- diff --git a/ql/src/test/results/clientpositive/tez/vector_acid3.q.out b/ql/src/test/results/clientpositive/tez/vector_acid3.q.out new file mode 100644 index 000..4299c73 --- /dev/null +++ b/ql/src/test/results/clientpositive/tez/vector_acid3.q.out @@ -0,0 +1,31 @@ +PREHOOK: query: drop table if exists testacid1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists testacid1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table testacid1(id int) clustered by (id) into 2 buckets stored as orc tblproperties(transactional=true) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@testacid1 +POSTHOOK: query: create