This is an automated email from the ASF dual-hosted git repository.

zabetak pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit a537000af730e994a37b0f60b37007e3caf8f1c7
Author: mlorek <michal.lo...@gmail.comx>
AuthorDate: Tue Jan 30 14:28:23 2024 +0000

    HIVE-28046: Use serdeConstants fields instead of string literals in 
hive-exec module (Michal Lorek reviewed by Stamatis Zampetakis)
    
    The procedure to replace literals with constans is outlined below:
    1. Generate a sed script file using serdeConstants.java for the possible 
replacements
    grep "public static final java.lang.String" 
./serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
 | sed 's/.\+java\.lang\.String//'| sed 's/ //g' | sed 
's/\(.*\)=\(.*\);/s#\2#serdeConstants.\1#/' | sed 's/\./\\./g'  > sedrepscript
    
    2. Apply the script file to all java files in production code
    find ql/src/java -name "*.java" -exec sed -i -f sedrepscript {} \;
    
    3. Manual review of change files adding import when necessary and reverting 
irrelevant changes
    
    using serdeConstants fields instead of string literals for 'columns' and 
'columns.types'
    
    Co-authored-by: Stamatis Zampetakis <zabe...@gmail.com>
    
    Close apache/hive#5072
---
 .../hadoop/hive/ql/exec/ColumnStatsUpdateTask.java | 27 ++++---
 .../apache/hadoop/hive/ql/exec/DDLPlanUtils.java   |  3 +-
 .../ql/exec/vector/VectorExpressionDescriptor.java | 24 +++---
 .../hive/ql/exec/vector/VectorizationContext.java  | 86 +++++++++++-----------
 .../hive/ql/exec/vector/VectorizedBatchUtil.java   |  3 +-
 .../exec/vector/expressions/CastDateToBoolean.java |  3 +-
 .../vector/expressions/CastDateToTimestamp.java    |  3 +-
 .../vector/expressions/CastDoubleToTimestamp.java  |  3 +-
 .../vector/expressions/CastTimestampToBoolean.java |  3 +-
 .../vector/expressions/CastTimestampToDouble.java  |  3 +-
 .../vector/expressions/CastTimestampToLong.java    |  3 +-
 .../expressions/DateColSubtractDateColumn.java     |  5 +-
 .../expressions/DateColSubtractDateScalar.java     |  5 +-
 .../expressions/DateScalarSubtractDateColumn.java  |  5 +-
 .../IfExprDoubleColumnDoubleColumn.java            |  5 +-
 .../hive/ql/exec/vector/ptf/VectorPTFOperator.java | 11 +--
 .../org/apache/hadoop/hive/ql/io/IOConstants.java  |  2 +-
 .../hadoop/hive/ql/io/RCFileOutputFormat.java      |  3 +-
 .../org/apache/hadoop/hive/ql/io/orc/OrcSerde.java |  4 +-
 .../org/apache/hadoop/hive/ql/io/orc/OrcUnion.java |  5 +-
 .../parquet/convert/HiveCollectionConverter.java   |  4 +-
 .../hive/ql/io/parquet/serde/ParquetHiveSerDe.java |  2 +-
 .../hive/ql/io/sarg/ConvertAstToSearchArg.java     |  3 +-
 .../hive/ql/optimizer/SimpleFetchOptimizer.java    |  3 +-
 .../calcite/translator/SqlFunctionConverter.java   | 28 +++----
 .../hive/ql/optimizer/physical/Vectorizer.java     | 55 +++++++-------
 .../hadoop/hive/ql/parse/BaseSemanticAnalyzer.java |  4 +-
 .../hive/ql/parse/rewrite/MergeRewriter.java       |  3 +-
 .../hive/ql/parse/type/TypeCheckProcFactory.java   |  8 +-
 .../apache/hadoop/hive/ql/plan/PartitionDesc.java  |  2 +-
 .../org/apache/hadoop/hive/ql/plan/PlanUtils.java  |  9 ++-
 .../hadoop/hive/ql/processors/DfsProcessor.java    |  3 +-
 .../ql/processors/LlapCacheResourceProcessor.java  |  5 +-
 .../processors/LlapClusterResourceProcessor.java   | 13 ++--
 .../hive/ql/udf/esri/serde/BaseJsonSerDe.java      |  3 +-
 .../hive/ql/udf/generic/GenericUDFBetween.java     |  3 +-
 .../hive/ql/udf/ptf/ValueBoundaryScanner.java      | 25 ++++---
 37 files changed, 207 insertions(+), 172 deletions(-)

diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
index 8b6c8d6b1bd..c4ae676d7a9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
@@ -55,6 +55,7 @@ import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.slf4j.Logger;
@@ -101,9 +102,11 @@ public class ColumnStatsUpdateTask extends 
Task<ColumnStatsUpdateWork> {
 
     ColumnStatisticsData statsData = new ColumnStatisticsData();
 
-    if (columnType.equalsIgnoreCase("long") || 
columnType.equalsIgnoreCase("tinyint")
-        || columnType.equalsIgnoreCase("smallint") || 
columnType.equalsIgnoreCase("int")
-        || columnType.equalsIgnoreCase("bigint")) {
+    if (columnType.equalsIgnoreCase("long")
+        || columnType.equalsIgnoreCase(serdeConstants.TINYINT_TYPE_NAME)
+        || columnType.equalsIgnoreCase(serdeConstants.SMALLINT_TYPE_NAME)
+        || columnType.equalsIgnoreCase(serdeConstants.INT_TYPE_NAME)
+        || columnType.equalsIgnoreCase(serdeConstants.BIGINT_TYPE_NAME)) {
       LongColumnStatsDataInspector longStats = new 
LongColumnStatsDataInspector();
       longStats.setNumNullsIsSet(false);
       longStats.setNumDVsIsSet(false);
@@ -127,7 +130,8 @@ public class ColumnStatsUpdateTask extends 
Task<ColumnStatsUpdateWork> {
       }
       statsData.setLongStats(longStats);
       statsObj.setStatsData(statsData);
-    } else if (columnType.equalsIgnoreCase("double") || 
columnType.equalsIgnoreCase("float")) {
+    } else if (columnType.equalsIgnoreCase(serdeConstants.DOUBLE_TYPE_NAME)
+            || columnType.equalsIgnoreCase(serdeConstants.FLOAT_TYPE_NAME)) {
       DoubleColumnStatsDataInspector doubleStats = new 
DoubleColumnStatsDataInspector();
       doubleStats.setNumNullsIsSet(false);
       doubleStats.setNumDVsIsSet(false);
@@ -151,8 +155,9 @@ public class ColumnStatsUpdateTask extends 
Task<ColumnStatsUpdateWork> {
       }
       statsData.setDoubleStats(doubleStats);
       statsObj.setStatsData(statsData);
-    } else if (columnType.equalsIgnoreCase("string") || 
columnType.toLowerCase().startsWith("char")
-              || columnType.toLowerCase().startsWith("varchar")) { 
//char(x),varchar(x) types
+    } else if (columnType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)
+            || 
columnType.toLowerCase().startsWith(serdeConstants.CHAR_TYPE_NAME)
+            || 
columnType.toLowerCase().startsWith(serdeConstants.VARCHAR_TYPE_NAME)) { 
//char(x),varchar(x) types
       StringColumnStatsDataInspector stringStats = new 
StringColumnStatsDataInspector();
       stringStats.setMaxColLenIsSet(false);
       stringStats.setAvgColLenIsSet(false);
@@ -176,7 +181,7 @@ public class ColumnStatsUpdateTask extends 
Task<ColumnStatsUpdateWork> {
       }
       statsData.setStringStats(stringStats);
       statsObj.setStatsData(statsData);
-    } else if (columnType.equalsIgnoreCase("boolean")) {
+    } else if (columnType.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME)) {
       BooleanColumnStatsData booleanStats = new BooleanColumnStatsData();
       booleanStats.setNumNullsIsSet(false);
       booleanStats.setNumTruesIsSet(false);
@@ -197,7 +202,7 @@ public class ColumnStatsUpdateTask extends 
Task<ColumnStatsUpdateWork> {
       }
       statsData.setBooleanStats(booleanStats);
       statsObj.setStatsData(statsData);
-    } else if (columnType.equalsIgnoreCase("binary")) {
+    } else if (columnType.equalsIgnoreCase(serdeConstants.BINARY_TYPE_NAME)) {
       BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
       binaryStats.setNumNullsIsSet(false);
       binaryStats.setAvgColLenIsSet(false);
@@ -218,7 +223,7 @@ public class ColumnStatsUpdateTask extends 
Task<ColumnStatsUpdateWork> {
       }
       statsData.setBinaryStats(binaryStats);
       statsObj.setStatsData(statsData);
-    } else if (columnType.toLowerCase().startsWith("decimal")) { 
//decimal(a,b) type
+    } else if 
(columnType.toLowerCase().startsWith(serdeConstants.DECIMAL_TYPE_NAME)) { 
//decimal(a,b) type
       DecimalColumnStatsDataInspector decimalStats = new 
DecimalColumnStatsDataInspector();
       decimalStats.setNumNullsIsSet(false);
       decimalStats.setNumDVsIsSet(false);
@@ -246,7 +251,7 @@ public class ColumnStatsUpdateTask extends 
Task<ColumnStatsUpdateWork> {
       }
       statsData.setDecimalStats(decimalStats);
       statsObj.setStatsData(statsData);
-    } else if (columnType.equalsIgnoreCase("date")) {
+    } else if (columnType.equalsIgnoreCase(serdeConstants.DATE_TYPE_NAME)) {
       DateColumnStatsDataInspector dateStats = new 
DateColumnStatsDataInspector();
       Map<String, String> mapProp = work.getMapProp();
       for (Entry<String, String> entry : mapProp.entrySet()) {
@@ -268,7 +273,7 @@ public class ColumnStatsUpdateTask extends 
Task<ColumnStatsUpdateWork> {
       }
       statsData.setDateStats(dateStats);
       statsObj.setStatsData(statsData);
-    } else if (columnType.equalsIgnoreCase("timestamp")) {
+    } else if 
(columnType.equalsIgnoreCase(serdeConstants.TIMESTAMP_TYPE_NAME)) {
       TimestampColumnStatsDataInspector timestampStats = new 
TimestampColumnStatsDataInspector();
       Map<String, String> mapProp = work.getMapProp();
       for (Entry<String, String> entry : mapProp.entrySet()) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLPlanUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLPlanUtils.java
index 9e226c24e92..d3163bee95b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLPlanUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLPlanUtils.java
@@ -89,6 +89,7 @@ import java.util.stream.Collectors;
 
 import static 
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE;
 import static org.apache.hadoop.hive.ql.metadata.HiveUtils.unparseIdentifier;
+import static org.apache.hadoop.hive.serde.serdeConstants.UNION_TYPE_NAME;
 
 public class DDLPlanUtils {
   private static final String EXTERNAL = "external";
@@ -964,7 +965,7 @@ public class DDLPlanUtils {
           String unionElementType = formatType(unionElementTypeInfo);
           unionFormattedType.append(unionElementType);
         }
-        return "uniontype<" + unionFormattedType.toString() + ">";
+        return UNION_TYPE_NAME + "<" + unionFormattedType.toString() + ">";
       default:
         throw new RuntimeException("Unknown type: " + typeInfo.getCategory());
     }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
index ff8f31ae90b..726d562480d 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
@@ -105,28 +105,28 @@ public class VectorExpressionDescriptor {
 
     public static ArgumentType fromHiveTypeName(String hiveTypeName) {
       String lower = hiveTypeName.toLowerCase();
-      if (lower.equals("tinyint") ||
-          lower.equals("smallint") ||
-          lower.equals("int") ||
-          lower.equals("bigint") ||
-          lower.equals("boolean") ||
+      if (lower.equals(serdeConstants.TINYINT_TYPE_NAME) ||
+          lower.equals(serdeConstants.SMALLINT_TYPE_NAME) ||
+          lower.equals(serdeConstants.INT_TYPE_NAME) ||
+          lower.equals(serdeConstants.BIGINT_TYPE_NAME) ||
+          lower.equals(serdeConstants.BOOLEAN_TYPE_NAME) ||
           lower.equals("long")) {
         return INT_FAMILY;
-      } else if (lower.equals("double") || lower.equals("float")) {
+      } else if (lower.equals(serdeConstants.DOUBLE_TYPE_NAME) || 
lower.equals(serdeConstants.FLOAT_TYPE_NAME)) {
         return FLOAT_FAMILY;
-      } else if (lower.equals("string")) {
+      } else if (lower.equals(serdeConstants.STRING_TYPE_NAME)) {
         return STRING;
       } else if 
(VectorizationContext.charTypePattern.matcher(lower).matches()) {
         return CHAR;
       } else if 
(VectorizationContext.varcharTypePattern.matcher(lower).matches()) {
         return VARCHAR;
-      } else if (lower.equals("binary")) {
+      } else if (lower.equals(serdeConstants.BINARY_TYPE_NAME)) {
         return BINARY;
       } else if 
(VectorizationContext.decimalTypePattern.matcher(lower).matches()) {
         return DECIMAL;
-      } else if (lower.equals("timestamp")) {
+      } else if (lower.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
         return TIMESTAMP;
-      } else if (lower.equals("date")) {
+      } else if (lower.equals(serdeConstants.DATE_TYPE_NAME)) {
         return DATE;
       } else if (lower.equals(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME)) {
         return INTERVAL_YEAR_MONTH;
@@ -138,7 +138,7 @@ public class VectorExpressionDescriptor {
         return LIST;
       } else if (VectorizationContext.mapTypePattern.matcher(lower).matches()) 
{
         return MAP;
-      } else if (lower.equals("void")) {
+      } else if (lower.equals(serdeConstants.VOID_TYPE_NAME)) {
         return VOID;
       } else {
         return NONE;
@@ -149,7 +149,7 @@ public class VectorExpressionDescriptor {
       if (inType.equalsIgnoreCase("long")) {
         // A synonym in some places in the code...
         return INT_FAMILY;
-      } else if (inType.equalsIgnoreCase("double")) {
+      } else if (inType.equalsIgnoreCase(serdeConstants.DOUBLE_TYPE_NAME)) {
         // A synonym in some places in the code...
         return FLOAT_FAMILY;
       } else if 
(VectorizationContext.decimalTypePattern.matcher(inType).matches()) {
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
index 42a0f2ea609..a3a24aa9029 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
@@ -512,9 +512,9 @@ import com.google.common.annotations.VisibleForTesting;
       typeName = VectorizationContext.mapTypeNameSynonyms(typeName);
 
       // Make CHAR and VARCHAR type info parsable.
-      if (typeName.equals("char")) {
+      if (typeName.equals(serdeConstants.CHAR_TYPE_NAME)) {
         typeName = "char(" + HiveChar.MAX_CHAR_LENGTH + ")";
-      } else if (typeName.equals("varchar")) {
+      } else if (typeName.equals(serdeConstants.VARCHAR_TYPE_NAME)) {
         typeName = "varchar(" + HiveVarchar.MAX_VARCHAR_LENGTH + ")";
       }
 
@@ -1475,9 +1475,9 @@ import com.google.common.annotations.VisibleForTesting;
       if ((gudf instanceof GenericUDFToString
                    || gudf instanceof GenericUDFToChar
                    || gudf instanceof GenericUDFToVarchar) &&
-               (arg0Type(expr).equals("timestamp")
-                   || arg0Type(expr).equals("double")
-                   || arg0Type(expr).equals("float"))) {
+               (arg0Type(expr).equals(serdeConstants.TIMESTAMP_TYPE_NAME)
+                   || arg0Type(expr).equals(serdeConstants.DOUBLE_TYPE_NAME)
+                   || arg0Type(expr).equals(serdeConstants.FLOAT_TYPE_NAME))) {
       return true;
     } else if (gudf instanceof GenericUDFBetween && (mode == 
VectorExpressionDescriptor.Mode.PROJECTION)) {
       return true;
@@ -1623,7 +1623,7 @@ import com.google.common.annotations.VisibleForTesting;
     }
 
     // Boolean is special case.
-    if (typeName.equalsIgnoreCase("boolean")) {
+    if (typeName.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME)) {
       if (mode == VectorExpressionDescriptor.Mode.FILTER) {
         if ((Boolean) constantValue) {
           return new FilterConstantBooleanVectorExpression(1);
@@ -3248,7 +3248,7 @@ import com.google.common.annotations.VisibleForTesting;
          isFloatFamily(inputType) ||
          decimalTypePattern.matcher(inputType).matches() ||
          isStringFamily(inputType) ||
-         inputType.equals("timestamp")) {
+         inputType.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
       return returnDecimalType;
     }
     return null;
@@ -3289,10 +3289,10 @@ import com.google.common.annotations.VisibleForTesting;
       }
       return createVectorExpression(CastLongToDecimal.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
-    } else if (inputType.equals("float")) {
+    } else if (inputType.equals(serdeConstants.FLOAT_TYPE_NAME)) {
       return createVectorExpression(CastFloatToDecimal.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
-    } else if (inputType.equals("double")) {
+    } else if (inputType.equals(serdeConstants.DOUBLE_TYPE_NAME)) {
       return createVectorExpression(CastDoubleToDecimal.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
     } else if (decimalTypePattern.matcher(inputType).matches()) {
@@ -3329,7 +3329,7 @@ import com.google.common.annotations.VisibleForTesting;
               DataTypePhysicalVariation.NONE;
       return createVectorExpression(CastStringToDecimal.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, dataTypePhysicalVariation);
-    } else if (inputType.equals("timestamp")) {
+    } else if (inputType.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
       return createVectorExpression(CastTimestampToDecimal.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
     }
@@ -3353,17 +3353,17 @@ import com.google.common.annotations.VisibleForTesting;
           return null;
         }
     }
-    if (inputType.equals("boolean")) {
+    if (inputType.equals(serdeConstants.BOOLEAN_TYPE_NAME)) {
       // Boolean must come before the integer family. It's a special case.
       return createVectorExpression(CastBooleanToStringViaLongToString.class, 
childExpr,
           VectorExpressionDescriptor.Mode.PROJECTION, returnType, 
DataTypePhysicalVariation.NONE);
     } else if (isIntFamily(inputType)) {
       return createVectorExpression(CastLongToString.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
-    } else if (inputType.equals("float")) {
+    } else if (inputType.equals(serdeConstants.FLOAT_TYPE_NAME)) {
       return createVectorExpression(CastFloatToString.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
-    } else if (inputType.equals("double")) {
+    } else if (inputType.equals(serdeConstants.DOUBLE_TYPE_NAME)) {
       return createVectorExpression(CastDoubleToString.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
     } else if (isDecimalFamily(inputType)) {
@@ -3394,17 +3394,17 @@ import com.google.common.annotations.VisibleForTesting;
       // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424.
       return null;
     }
-    if (inputType.equals("boolean")) {
+    if (inputType.equals(serdeConstants.BOOLEAN_TYPE_NAME)) {
       // Boolean must come before the integer family. It's a special case.
       return createVectorExpression(CastBooleanToCharViaLongToChar.class, 
childExpr,
           VectorExpressionDescriptor.Mode.PROJECTION, returnType, 
DataTypePhysicalVariation.NONE);
     } else if (isIntFamily(inputType)) {
       return createVectorExpression(CastLongToChar.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
-    } else if (inputType.equals("float")) {
+    } else if (inputType.equals(serdeConstants.FLOAT_TYPE_NAME)) {
       return createVectorExpression(CastFloatToChar.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
-    } else if (inputType.equals("double")) {
+    } else if (inputType.equals(serdeConstants.DOUBLE_TYPE_NAME)) {
       return createVectorExpression(CastDoubleToChar.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
     } else if (isDecimalFamily(inputType)) {
@@ -3432,17 +3432,17 @@ import com.google.common.annotations.VisibleForTesting;
       // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424.
       return null;
     }
-    if (inputType.equals("boolean")) {
+    if (inputType.equals(serdeConstants.BOOLEAN_TYPE_NAME)) {
       // Boolean must come before the integer family. It's a special case.
       return 
createVectorExpression(CastBooleanToVarCharViaLongToVarChar.class, childExpr,
           VectorExpressionDescriptor.Mode.PROJECTION, returnType, 
DataTypePhysicalVariation.NONE);
     } else if (isIntFamily(inputType)) {
       return createVectorExpression(CastLongToVarChar.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
-    } else if (inputType.equals("float")) {
+    } else if (inputType.equals(serdeConstants.FLOAT_TYPE_NAME)) {
       return createVectorExpression(CastFloatToVarChar.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
-    } else if (inputType.equals("double")) {
+    } else if (inputType.equals(serdeConstants.DOUBLE_TYPE_NAME)) {
       return createVectorExpression(CastDoubleToVarChar.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
     } else if (isDecimalFamily(inputType)) {
@@ -3470,7 +3470,7 @@ import com.google.common.annotations.VisibleForTesting;
       // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424.
       return null;
     }
-    if (inputType.equalsIgnoreCase("string") || 
varcharTypePattern.matcher(inputType).matches()) {
+    if (inputType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME) || 
varcharTypePattern.matcher(inputType).matches()) {
 
       // STRING and VARCHAR types require no conversion, so use a no-op.
       return getIdentityExpression(childExpr);
@@ -3500,7 +3500,7 @@ import com.google.common.annotations.VisibleForTesting;
         return createVectorExpression(CastLongToDouble.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
             returnType, DataTypePhysicalVariation.NONE);
       }
-    } else if (inputType.equals("timestamp")) {
+    } else if (inputType.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
       return createVectorExpression(CastTimestampToDouble.class, childExpr, 
VectorExpressionDescriptor.Mode.PROJECTION,
           returnType, DataTypePhysicalVariation.NONE);
     } else if (isFloatFamily(inputType)) {
@@ -3749,7 +3749,7 @@ import com.google.common.annotations.VisibleForTesting;
       } else {
         cl = FilterDoubleColumnNotBetween.class;
       }
-    } else if (colType.equals("string") && !notKeywordPresent) {
+    } else if (colType.equals(serdeConstants.STRING_TYPE_NAME) && 
!notKeywordPresent) {
       if (mode == VectorExpressionDescriptor.Mode.PROJECTION) {
         cl = StringColumnBetween.class;
       } else {
@@ -3757,7 +3757,7 @@ import com.google.common.annotations.VisibleForTesting;
             FilterStringColumnBetweenDynamicValue.class :
             FilterStringColumnBetween.class);
       }
-    } else if (colType.equals("string") && notKeywordPresent) {
+    } else if (colType.equals(serdeConstants.STRING_TYPE_NAME) && 
notKeywordPresent) {
       if (mode == VectorExpressionDescriptor.Mode.PROJECTION) {
         cl = StringColumnNotBetween.class;
       } else {
@@ -3791,7 +3791,7 @@ import com.google.common.annotations.VisibleForTesting;
       } else {
         cl = FilterCharColumnNotBetween.class;
       }
-    } else if (colType.equals("timestamp") && !notKeywordPresent) {
+    } else if (colType.equals(serdeConstants.TIMESTAMP_TYPE_NAME) && 
!notKeywordPresent) {
       if (mode == VectorExpressionDescriptor.Mode.PROJECTION) {
         cl = TimestampColumnBetween.class;
       } else {
@@ -3799,7 +3799,7 @@ import com.google.common.annotations.VisibleForTesting;
             FilterTimestampColumnBetweenDynamicValue.class :
             FilterTimestampColumnBetween.class);
       }
-    } else if (colType.equals("timestamp") && notKeywordPresent) {
+    } else if (colType.equals(serdeConstants.TIMESTAMP_TYPE_NAME) && 
notKeywordPresent) {
       if (mode == VectorExpressionDescriptor.Mode.PROJECTION) {
         cl = TimestampColumnNotBetween.class;
       } else {
@@ -4266,43 +4266,43 @@ import com.google.common.annotations.VisibleForTesting;
   }
 
   public static boolean isStringFamily(String resultType) {
-    return resultType.equalsIgnoreCase("string") || 
charVarcharTypePattern.matcher(resultType).matches() ||
+    return resultType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME) || 
charVarcharTypePattern.matcher(resultType).matches() ||
            resultType.equalsIgnoreCase("string_family");
   }
 
   public static boolean isDatetimeFamily(String resultType) {
-    return resultType.equalsIgnoreCase("timestamp") || 
resultType.equalsIgnoreCase("date");
+    return resultType.equalsIgnoreCase(serdeConstants.TIMESTAMP_TYPE_NAME) || 
resultType.equalsIgnoreCase(serdeConstants.DATE_TYPE_NAME);
   }
 
   public static boolean isTimestampFamily(String resultType) {
-    return resultType.equalsIgnoreCase("timestamp");
+    return resultType.equalsIgnoreCase(serdeConstants.TIMESTAMP_TYPE_NAME);
   }
 
   public static boolean isDateFamily(String resultType) {
-    return resultType.equalsIgnoreCase("date");
+    return resultType.equalsIgnoreCase(serdeConstants.DATE_TYPE_NAME);
   }
 
   @SuppressWarnings("unused") public static boolean 
isIntervalYearMonthFamily(String resultType) {
-    return resultType.equalsIgnoreCase("interval_year_month");
+    return 
resultType.equalsIgnoreCase(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
   }
 
   @SuppressWarnings("unused") public static boolean 
isIntervalDayTimeFamily(String resultType) {
-    return resultType.equalsIgnoreCase("interval_day_time");
+    return 
resultType.equalsIgnoreCase(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
   }
 
   // return true if this is any kind of float
   public static boolean isFloatFamily(String resultType) {
-    return resultType.equalsIgnoreCase("double")
-        || resultType.equalsIgnoreCase("float");
+    return resultType.equalsIgnoreCase(serdeConstants.DOUBLE_TYPE_NAME)
+        || resultType.equalsIgnoreCase(serdeConstants.FLOAT_TYPE_NAME);
   }
 
   // Return true if this data type is handled in the output vector as an 
integer.
   public static boolean isIntFamily(String resultType) {
-    return resultType.equalsIgnoreCase("tinyint")
-        || resultType.equalsIgnoreCase("smallint")
-        || resultType.equalsIgnoreCase("int")
-        || resultType.equalsIgnoreCase("bigint")
-        || resultType.equalsIgnoreCase("boolean")
+    return resultType.equalsIgnoreCase(serdeConstants.TINYINT_TYPE_NAME)
+        || resultType.equalsIgnoreCase(serdeConstants.SMALLINT_TYPE_NAME)
+        || resultType.equalsIgnoreCase(serdeConstants.INT_TYPE_NAME)
+        || resultType.equalsIgnoreCase(serdeConstants.BIGINT_TYPE_NAME)
+        || resultType.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME)
         || resultType.equalsIgnoreCase("long");
   }
 
@@ -4319,7 +4319,7 @@ import com.google.common.annotations.VisibleForTesting;
       return ((HiveChar) 
constDesc.getValue()).getStrippedValue().getBytes(StandardCharsets.UTF_8);
     } else if (varcharTypePattern.matcher(typeString).matches()) {
       return ((HiveVarchar) 
constDesc.getValue()).getValue().getBytes(StandardCharsets.UTF_8);
-    } else if (typeString.equalsIgnoreCase("boolean")) {
+    } else if (typeString.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME)) {
       if (constDesc.getValue() == null) {
         return null;
       }else{
@@ -4495,9 +4495,9 @@ import com.google.common.annotations.VisibleForTesting;
     typeName = typeName.toLowerCase();
     switch (typeName) {
     case "long":
-      return "bigint";
+      return serdeConstants.BIGINT_TYPE_NAME;
     case "string_family":
-      return "string";
+      return serdeConstants.STRING_TYPE_NAME;
     default:
       return typeName;
     }
@@ -4582,10 +4582,10 @@ import com.google.common.annotations.VisibleForTesting;
       String typeName;
       if (vectorTypeName.equalsIgnoreCase("bytes")) {
         // Use hive type name.
-        typeName = "string";
+        typeName = serdeConstants.STRING_TYPE_NAME;
       } else if (vectorTypeName.equalsIgnoreCase("long")) {
         // Use hive type name.
-        typeName = "bigint";
+        typeName = serdeConstants.BIGINT_TYPE_NAME;
       } else {
         typeName = vectorTypeName;
       }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
index 89289b36d60..cfa846e6877 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
@@ -34,6 +34,7 @@ import 
org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -116,7 +117,7 @@ public class VectorizedBatchUtil {
     typeName = typeName.toLowerCase();
 
     // Allow undecorated CHAR and VARCHAR to support scratch column type names.
-    if (typeName.equals("char") || typeName.equals("varchar")) {
+    if (typeName.equals(serdeConstants.CHAR_TYPE_NAME) || 
typeName.equals(serdeConstants.VARCHAR_TYPE_NAME)) {
       return new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
     }
 
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToBoolean.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToBoolean.java
index 117e8140777..5eddc296a29 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToBoolean.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToBoolean.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde.serdeConstants;
 
 /*
  * Comment from BooleanWritable evaluate(DateWritable d)
@@ -54,7 +55,7 @@ public class CastDateToBoolean extends NullVectorExpression {
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(1)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("date"))
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.DATE_TYPE_NAME))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
   }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
index ec8b9662fa1..9bd62dd7ce2 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
@@ -25,6 +25,7 @@ import 
org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 public class CastDateToTimestamp extends VectorExpression {
@@ -145,7 +146,7 @@ public class CastDateToTimestamp extends VectorExpression {
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(1)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("date"))
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.DATE_TYPE_NAME))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
   }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
index 8f6db5cdff1..9baabefdfd8 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
@@ -26,6 +26,7 @@ import 
org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.TimestampUtils;
+import org.apache.hadoop.hive.serde.serdeConstants;
 
 public class CastDoubleToTimestamp extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -155,7 +156,7 @@ public class CastDoubleToTimestamp extends VectorExpression 
{
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(1)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("double"))
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.DOUBLE_TYPE_NAME))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
   }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
index d77eb94bd77..a70fd4f355c 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
@@ -24,6 +24,7 @@ import 
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.MathExpr;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.serdeConstants;
 
 public class CastTimestampToBoolean extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -121,7 +122,7 @@ public class CastTimestampToBoolean extends 
VectorExpression {
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(1)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.TIMESTAMP_TYPE_NAME))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
   }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
index 72a7b32cefe..0b0397fac8c 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
@@ -22,6 +22,7 @@ import java.util.Arrays;
 
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.serdeConstants;
 
 public class CastTimestampToDouble extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -142,7 +143,7 @@ public class CastTimestampToDouble extends VectorExpression 
{
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(1)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.TIMESTAMP_TYPE_NAME))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
   }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
index 7fca34ba3b4..5677c68bc8e 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
@@ -25,6 +25,7 @@ import 
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.MathExpr;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import 
org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 
@@ -183,7 +184,7 @@ public class CastTimestampToLong extends VectorExpression {
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(1)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.TIMESTAMP_TYPE_NAME))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
   }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
index d8162c5236b..620e95929c0 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 // A type date (LongColumnVector storing epoch days) minus a type date 
produces a
@@ -159,8 +160,8 @@ public class DateColSubtractDateColumn extends 
VectorExpression {
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(2)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("date"),
-            VectorExpressionDescriptor.ArgumentType.getType("date"))
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.DATE_TYPE_NAME),
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.DATE_TYPE_NAME))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN,
             VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
index 845c3946c5c..81c5dea5f3c 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
@@ -27,6 +27,7 @@ import 
org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 // A type date (LongColumnVector storing epoch days) minus a type date 
produces a
@@ -170,8 +171,8 @@ public class DateColSubtractDateScalar extends 
VectorExpression {
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(2)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("date"),
-            VectorExpressionDescriptor.ArgumentType.getType("date"))
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.DATE_TYPE_NAME),
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.DATE_TYPE_NAME))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN,
             VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
index 36aa6b92ed3..53aadc77eaf 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
@@ -24,6 +24,7 @@ import java.util.Arrays;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 // A type date (LongColumnVector storing epoch days) minus a type date 
produces a
@@ -153,8 +154,8 @@ public class DateScalarSubtractDateColumn extends 
VectorExpression {
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(2)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("date"),
-            VectorExpressionDescriptor.ArgumentType.getType("date"))
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.DATE_TYPE_NAME),
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.DATE_TYPE_NAME))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.SCALAR,
             VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprDoubleColumnDoubleColumn.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprDoubleColumnDoubleColumn.java
index f72d9e7bdc3..6e0436c8f99 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprDoubleColumnDoubleColumn.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprDoubleColumnDoubleColumn.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.serdeConstants;
 
 /**
  * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
@@ -159,8 +160,8 @@ public class IfExprDoubleColumnDoubleColumn extends 
VectorExpression {
         .setNumArguments(3)
         .setArgumentTypes(
             VectorExpressionDescriptor.ArgumentType.getType("long"),
-            VectorExpressionDescriptor.ArgumentType.getType("double"),
-            VectorExpressionDescriptor.ArgumentType.getType("double"))
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.DOUBLE_TYPE_NAME),
+            
VectorExpressionDescriptor.ArgumentType.getType(serdeConstants.DOUBLE_TYPE_NAME))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN,
             VectorExpressionDescriptor.InputExpressionType.COLUMN,
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFOperator.java
index 1bdfee594c0..e2bd65590fb 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFOperator.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFOperator.java
@@ -55,6 +55,7 @@ import org.apache.hadoop.hive.ql.plan.VectorDesc;
 import org.apache.hadoop.hive.ql.plan.VectorPTFDesc;
 import org.apache.hadoop.hive.ql.plan.VectorPTFInfo;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
@@ -568,15 +569,15 @@ public class VectorPTFOperator extends Operator<PTFDesc>
   private static TypeInfo columnVectorTypeToTypeInfo(Type type) {
     switch (type) {
     case DOUBLE:
-      return TypeInfoUtils.getTypeInfoFromTypeString("double");
+      return 
TypeInfoUtils.getTypeInfoFromTypeString(serdeConstants.DOUBLE_TYPE_NAME);
     case BYTES:
-      return TypeInfoUtils.getTypeInfoFromTypeString("string");
+      return 
TypeInfoUtils.getTypeInfoFromTypeString(serdeConstants.STRING_TYPE_NAME);
     case DECIMAL:
-      return TypeInfoUtils.getTypeInfoFromTypeString("decimal");
+      return 
TypeInfoUtils.getTypeInfoFromTypeString(serdeConstants.DECIMAL_TYPE_NAME);
     case TIMESTAMP:
-      return TypeInfoUtils.getTypeInfoFromTypeString("timestamp");
+      return 
TypeInfoUtils.getTypeInfoFromTypeString(serdeConstants.TIMESTAMP_TYPE_NAME);
     case LONG:
-      return TypeInfoUtils.getTypeInfoFromTypeString("int");
+      return 
TypeInfoUtils.getTypeInfoFromTypeString(serdeConstants.INT_TYPE_NAME);
     default:
       throw new RuntimeException("Cannot convert column vector type: '" + type 
+ "' to TypeInfo");
     }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java
index 2be864e752e..49f310d930a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java
@@ -39,7 +39,7 @@ public final class IOConstants {
 
   /**
    * The desired TABLE column names and types for input format schema 
evolution.
-   * This is different than COLUMNS and COLUMNS_TYPES, which are based on 
individual partition
+   * This is different from COLUMNS and COLUMNS_TYPES, which are based on 
individual partition
    * metadata.
    *
    * Virtual columns and partition columns are not included
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java
index 37aeba0a82a..21c96cb86d7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
@@ -124,7 +125,7 @@ public class RCFileOutputFormat extends
       boolean isCompressed, Properties tableProperties, Progressable progress) 
throws IOException {
 
     String[] cols = null;
-    String columns = tableProperties.getProperty("columns");
+    String columns = tableProperties.getProperty(serdeConstants.LIST_COLUMNS);
     if (columns == null || columns.trim().equals("")) {
       cols = new String[0];
     } else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
index 4e92edee013..3553a6a04d1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
@@ -97,7 +97,7 @@ public class OrcSerde extends AbstractSerDe implements 
SchemaInference {
   }
 
   /**
-   * NOTE: if "columns.types" is missing, all columns will be of String type.
+   * NOTE: if {@link serdeConstants#LIST_COLUMN_TYPES} is missing, all columns 
will be of String type.
    */
   @Override
   protected List<TypeInfo> parseColumnTypes() {
@@ -163,7 +163,7 @@ public class OrcSerde extends AbstractSerDe implements 
SchemaInference {
   }
 
   private String convertPrimitiveType(TypeDescription fieldType) {
-    if (fieldType.getCategory().getName().equals("timestamp with local time 
zone")) {
+    if 
(fieldType.getCategory().getName().equals(serdeConstants.TIMESTAMPLOCALTZ_TYPE_NAME))
 {
       throw new IllegalArgumentException("Unhandled ORC type " + 
fieldType.getCategory().getName());
     }
     return fieldType.toString();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUnion.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUnion.java
index b87e00d0070..733d8f261a8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUnion.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUnion.java
@@ -27,6 +27,8 @@ import org.apache.orc.OrcProto;
 import java.util.ArrayList;
 import java.util.List;
 
+import static org.apache.hadoop.hive.serde.serdeConstants.UNION_TYPE_NAME;
+
 /**
  * An in-memory representation of a union type.
  */
@@ -119,7 +121,8 @@ final class OrcUnion implements UnionObject {
 
     @Override
     public String getTypeName() {
-      StringBuilder builder = new StringBuilder("uniontype<");
+      StringBuilder builder = new StringBuilder(UNION_TYPE_NAME);
+      builder.append("<");
       boolean first = true;
       for(ObjectInspector child: children) {
         if (first) {
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java
index 537213091fb..4c54f11c657 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java
@@ -31,6 +31,8 @@ import org.apache.parquet.io.api.Converter;
 import org.apache.parquet.schema.GroupType;
 import org.apache.parquet.schema.Type;
 
+import static org.apache.hadoop.hive.serde.serdeConstants.LIST_TYPE_NAME;
+
 public class HiveCollectionConverter extends HiveGroupConverter {
   private final GroupType collectionType;
   private final ConverterParent parent;
@@ -184,7 +186,7 @@ public class HiveCollectionConverter extends 
HiveGroupConverter {
     if (repeatedType.isPrimitive() ||
         (repeatedType.asGroupType().getFieldCount() != 1)) {
       return true;
-    } else if (repeatedType.getName().equals("array")) {
+    } else if (repeatedType.getName().equals(LIST_TYPE_NAME)) {
       return true; // existing avro data
     } else if (repeatedType.getName().equals(parentName + "_tuple")) {
       return true; // existing thrift data
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
index a3a123ee00c..db882d8ba82 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
@@ -422,7 +422,7 @@ public class ParquetHiveSerDe extends AbstractSerDe 
implements SchemaInference {
       MessageType msg = metadata.getSchema();
       List<FieldSchema> schema = new ArrayList<>();
       String inferBinaryAsStringValue = 
conf.get(HiveConf.ConfVars.HIVE_PARQUET_INFER_BINARY_AS.varname);
-      boolean inferBinaryAsString = 
"string".equalsIgnoreCase(inferBinaryAsStringValue);
+      boolean inferBinaryAsString = 
serdeConstants.STRING_TYPE_NAME.equalsIgnoreCase(inferBinaryAsStringValue);
 
       for (Type field: msg.getFields()) {
         FieldSchema fieldSchema = convertParquetTypeToFieldSchema(field, 
inferBinaryAsString);
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
index de6f88b932c..5bbe6b3f619 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
@@ -51,6 +51,7 @@ import 
org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
@@ -439,7 +440,7 @@ public class ConvertAstToSearchArg {
       // if it is a reference to a boolean column, covert it to a truth test.
       if (expression instanceof ExprNodeColumnDesc) {
         ExprNodeColumnDesc columnDesc = (ExprNodeColumnDesc) expression;
-        if (columnDesc.getTypeString().equals("boolean")) {
+        if 
(columnDesc.getTypeString().equals(serdeConstants.BOOLEAN_TYPE_NAME)) {
           builder.equals(columnDesc.getColumn(), PredicateLeaf.Type.BOOLEAN,
               true);
           return;
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
index fb1f6a1c795..a4e236b4c33 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.ql.stats.StatsUtils;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
@@ -231,7 +232,7 @@ public class SimpleFetchOptimizer extends Transform {
       if (op instanceof FilterOperator) {
         ExprNodeDesc predicate = ((FilterOperator) 
op).getConf().getPredicate();
         if (predicate instanceof ExprNodeConstantDesc
-                && "boolean".equals(predicate.getTypeInfo().getTypeName())) {
+                && 
serdeConstants.BOOLEAN_TYPE_NAME.equals(predicate.getTypeInfo().getTypeName())) 
{
           continue;
         } else if (PartitionPruner.onlyContainsPartnCols(table, predicate)) {
           continue;
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java
index 9fdbfa9a38a..dcaf70e2a50 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java
@@ -179,38 +179,38 @@ public class SqlFunctionConverter {
       TypeInfo castType = TypeConverter.convert(dt);
 
       if (castType.equals(TypeInfoFactory.byteTypeInfo)) {
-        castUDF = FunctionRegistry.getFunctionInfo("tinyint");
+        castUDF = 
FunctionRegistry.getFunctionInfo(serdeConstants.TINYINT_TYPE_NAME);
       } else if (castType instanceof CharTypeInfo) {
-        castUDF = handleCastForParameterizedType(castType, 
FunctionRegistry.getFunctionInfo("char"));
+        castUDF = handleCastForParameterizedType(castType, 
FunctionRegistry.getFunctionInfo(serdeConstants.CHAR_TYPE_NAME));
       } else if (castType instanceof VarcharTypeInfo) {
         castUDF = handleCastForParameterizedType(castType,
-            FunctionRegistry.getFunctionInfo("varchar"));
+            
FunctionRegistry.getFunctionInfo(serdeConstants.VARCHAR_TYPE_NAME));
       } else if (castType.equals(TypeInfoFactory.stringTypeInfo)) {
-        castUDF = FunctionRegistry.getFunctionInfo("string");
+        castUDF = 
FunctionRegistry.getFunctionInfo(serdeConstants.STRING_TYPE_NAME);
       } else if (castType.equals(TypeInfoFactory.booleanTypeInfo)) {
-        castUDF = FunctionRegistry.getFunctionInfo("boolean");
+        castUDF = 
FunctionRegistry.getFunctionInfo(serdeConstants.BOOLEAN_TYPE_NAME);
       } else if (castType.equals(TypeInfoFactory.shortTypeInfo)) {
-        castUDF = FunctionRegistry.getFunctionInfo("smallint");
+        castUDF = 
FunctionRegistry.getFunctionInfo(serdeConstants.SMALLINT_TYPE_NAME);
       } else if (castType.equals(TypeInfoFactory.intTypeInfo)) {
-        castUDF = FunctionRegistry.getFunctionInfo("int");
+        castUDF = 
FunctionRegistry.getFunctionInfo(serdeConstants.INT_TYPE_NAME);
       } else if (castType.equals(TypeInfoFactory.longTypeInfo)) {
-        castUDF = FunctionRegistry.getFunctionInfo("bigint");
+        castUDF = 
FunctionRegistry.getFunctionInfo(serdeConstants.BIGINT_TYPE_NAME);
       } else if (castType.equals(TypeInfoFactory.floatTypeInfo)) {
-        castUDF = FunctionRegistry.getFunctionInfo("float");
+        castUDF = 
FunctionRegistry.getFunctionInfo(serdeConstants.FLOAT_TYPE_NAME);
       } else if (castType.equals(TypeInfoFactory.doubleTypeInfo)) {
-        castUDF = FunctionRegistry.getFunctionInfo("double");
+        castUDF = 
FunctionRegistry.getFunctionInfo(serdeConstants.DOUBLE_TYPE_NAME);
       } else if (castType.equals(TypeInfoFactory.timestampTypeInfo)) {
-        castUDF = FunctionRegistry.getFunctionInfo("timestamp");
+        castUDF = 
FunctionRegistry.getFunctionInfo(serdeConstants.TIMESTAMP_TYPE_NAME);
       } else if (castType instanceof TimestampLocalTZTypeInfo) {
         castUDF = handleCastForParameterizedType(castType,
             
FunctionRegistry.getFunctionInfo(serdeConstants.TIMESTAMPLOCALTZ_TYPE_NAME));
       } else if (castType.equals(TypeInfoFactory.dateTypeInfo)) {
-        castUDF = FunctionRegistry.getFunctionInfo("date");
+        castUDF = 
FunctionRegistry.getFunctionInfo(serdeConstants.DATE_TYPE_NAME);
       } else if (castType instanceof DecimalTypeInfo) {
         castUDF = handleCastForParameterizedType(castType,
-            FunctionRegistry.getFunctionInfo("decimal"));
+            
FunctionRegistry.getFunctionInfo(serdeConstants.DECIMAL_TYPE_NAME));
       } else if (castType.equals(TypeInfoFactory.binaryTypeInfo)) {
-        castUDF = FunctionRegistry.getFunctionInfo("binary");
+        castUDF = 
FunctionRegistry.getFunctionInfo(serdeConstants.BINARY_TYPE_NAME);
       } else if (castType.equals(TypeInfoFactory.intervalDayTimeTypeInfo)) {
         castUDF = 
FunctionRegistry.getFunctionInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
       } else if (castType.equals(TypeInfoFactory.intervalYearMonthTypeInfo)) {
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
index a33e5627b4b..d231d988f45 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
@@ -259,33 +259,34 @@ public class Vectorizer implements PhysicalPlanResolver {
 
   static {
     StringBuilder patternBuilder = new StringBuilder();
-    patternBuilder.append("int");
-    patternBuilder.append("|smallint");
-    patternBuilder.append("|tinyint");
-    patternBuilder.append("|bigint");
-    patternBuilder.append("|integer");
-    patternBuilder.append("|long");
-    patternBuilder.append("|short");
-    patternBuilder.append("|timestamp");
-    patternBuilder.append("|" + serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
-    patternBuilder.append("|" + serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
-    patternBuilder.append("|boolean");
-    patternBuilder.append("|binary");
-    patternBuilder.append("|string");
-    patternBuilder.append("|byte");
-    patternBuilder.append("|float");
-    patternBuilder.append("|double");
-    patternBuilder.append("|date");
-    patternBuilder.append("|void");
-
-    // Decimal types can be specified with different precision and scales e.g. 
decimal(10,5),
-    // as opposed to other data types which can be represented by constant 
strings.
-    // The regex matches only the "decimal" prefix of the type.
-    patternBuilder.append("|decimal.*");
+    patternBuilder.append(serdeConstants.INT_TYPE_NAME);
+    patternBuilder.append("|").append(serdeConstants.SMALLINT_TYPE_NAME);
+    patternBuilder.append("|").append(serdeConstants.TINYINT_TYPE_NAME);
+    patternBuilder.append("|").append(serdeConstants.BIGINT_TYPE_NAME);
+    patternBuilder.append("|").append("integer");
+    patternBuilder.append("|").append("long");
+    patternBuilder.append("|").append("short");
+    patternBuilder.append("|").append(serdeConstants.TIMESTAMP_TYPE_NAME);
+    
patternBuilder.append("|").append(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
+    
patternBuilder.append("|").append(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
+    patternBuilder.append("|").append(serdeConstants.BOOLEAN_TYPE_NAME);
+    patternBuilder.append("|").append(serdeConstants.BINARY_TYPE_NAME);
+    patternBuilder.append("|").append(serdeConstants.STRING_TYPE_NAME);
+    patternBuilder.append("|").append("byte");
+    patternBuilder.append("|").append(serdeConstants.FLOAT_TYPE_NAME);
+    patternBuilder.append("|").append(serdeConstants.DOUBLE_TYPE_NAME);
+    patternBuilder.append("|").append(serdeConstants.DATE_TYPE_NAME);
+    patternBuilder.append("|").append(serdeConstants.VOID_TYPE_NAME);
+
+    /** Decimal types can be specified with different precision and scales 
e.g. decimal(10,5),
+     * as opposed to other data types which can be represented by constant 
strings.
+     * The regex matches only the {@link serdeConstants#DECIMAL_TYPE_NAME} 
prefix of the type.
+     */
+    
patternBuilder.append("|").append(serdeConstants.DECIMAL_TYPE_NAME).append(".*");
 
     // CHAR and VARCHAR types can be specified with maximum length.
-    patternBuilder.append("|char.*");
-    patternBuilder.append("|varchar.*");
+    
patternBuilder.append("|").append(serdeConstants.CHAR_TYPE_NAME).append(".*");
+    
patternBuilder.append("|").append(serdeConstants.VARCHAR_TYPE_NAME).append(".*");
 
     supportedDataTypesPattern = Pattern.compile(patternBuilder.toString());
   }
@@ -3255,7 +3256,7 @@ public class Vectorizer implements PhysicalPlanResolver {
     type = type.toLowerCase();
     boolean result = supportedDataTypesPattern.matcher(type).matches();
     if (result && !allowVoidProjection &&
-        mode == VectorExpressionDescriptor.Mode.PROJECTION && 
type.equals("void")) {
+        mode == VectorExpressionDescriptor.Mode.PROJECTION && 
type.equals(serdeConstants.VOID_TYPE_NAME)) {
       return false;
     }
 
@@ -3283,7 +3284,7 @@ public class Vectorizer implements PhysicalPlanResolver {
     type = type.toLowerCase();
     boolean result = supportedDataTypesPattern.matcher(type).matches();
     if (result && !allowVoidProjection &&
-        mode == VectorExpressionDescriptor.Mode.PROJECTION && 
type.equals("void")) {
+        mode == VectorExpressionDescriptor.Mode.PROJECTION && 
type.equals(serdeConstants.VOID_TYPE_NAME)) {
       return "Vectorizing data type void not supported when mode = PROJECTION";
     }
 
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 54b6587ba99..9a406fed65d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -1954,8 +1954,8 @@ public abstract class BaseSemanticAnalyzer {
     prop.setProperty(serdeConstants.SERIALIZATION_FORMAT, 
Integer.toString(Utilities.tabCode));
     prop.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, " ");
     String[] colTypes = schema.split("#");
-    prop.setProperty("columns", colTypes[0]);
-    prop.setProperty("columns.types", colTypes[1]);
+    prop.setProperty(serdeConstants.LIST_COLUMNS, colTypes[0]);
+    prop.setProperty(serdeConstants.LIST_COLUMN_TYPES, colTypes[1]);
     prop.setProperty(serdeConstants.SERIALIZATION_LIB, 
LazySimpleSerDe.class.getName());
     prop.setProperty(hive_metastoreConstants.TABLE_BUCKETING_VERSION, "-1");
     FetchWork fetch =
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/rewrite/MergeRewriter.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/rewrite/MergeRewriter.java
index 64152f419b6..d97f71a641f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/rewrite/MergeRewriter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/rewrite/MergeRewriter.java
@@ -41,6 +41,7 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.function.UnaryOperator;
+import org.apache.hadoop.hive.serde.serdeConstants;
 
 import static org.apache.commons.lang3.StringUtils.isNotBlank;
 
@@ -141,7 +142,7 @@ public class MergeRewriter implements 
Rewriter<MergeStatement>, MergeStatement.D
         Table table = db.newTable(tableName);
         table.setSerializationLib(format.getSerde());
         List<FieldSchema> fields = new ArrayList<>();
-        fields.add(new FieldSchema("val", "int", null));
+        fields.add(new FieldSchema("val", serdeConstants.INT_TYPE_NAME, null));
         table.setFields(fields);
         table.setDataLocation(Warehouse.getDnsPath(new 
Path(SessionState.get().getTempTableSpace(),
             tableName), conf));
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/type/TypeCheckProcFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/type/TypeCheckProcFactory.java
index f8b50ca6c29..1eb1d17297f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/type/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/type/TypeCheckProcFactory.java
@@ -1043,8 +1043,8 @@ public class TypeCheckProcFactory<T> {
           // flatten OR
           List<T> childrenList = new ArrayList<>(children.size());
           for (T child : children) {
-            if 
(TypeInfoFactory.getPrimitiveTypeInfo("void").equals(exprFactory.getTypeInfo(child)))
 {
-              child = exprFactory.setTypeInfo(child, 
TypeInfoFactory.getPrimitiveTypeInfo("boolean"));
+            if 
(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.VOID_TYPE_NAME).equals(exprFactory.getTypeInfo(child)))
 {
+              child = exprFactory.setTypeInfo(child, 
TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
             }
             if (exprFactory.isORFuncCallExpr(child)) {
               childrenList.addAll(exprFactory.getExprChildren(child));
@@ -1057,8 +1057,8 @@ public class TypeCheckProcFactory<T> {
           // flatten AND
           List<T> childrenList = new ArrayList<>(children.size());
           for (T child : children) {
-            if 
(TypeInfoFactory.getPrimitiveTypeInfo("void").equals(exprFactory.getTypeInfo(child)))
 {
-              child = exprFactory.setTypeInfo(child, 
TypeInfoFactory.getPrimitiveTypeInfo("boolean"));
+            if 
(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.VOID_TYPE_NAME).equals(exprFactory.getTypeInfo(child)))
 {
+              child = exprFactory.setTypeInfo(child, 
TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
             }
             if (exprFactory.isANDFuncCallExpr(child)) {
               childrenList.addAll(exprFactory.getExprChildren(child));
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
index 5358e100660..0dcfe72d7f5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
@@ -217,7 +217,7 @@ public class PartitionDesc implements Serializable, 
Cloneable {
   }
 
   public void setProperties(final Properties properties) {
-    properties.remove("columns.comments");
+    properties.remove(serdeConstants.LIST_COLUMN_COMMENTS);
     if (properties instanceof CopyOnFirstWriteProperties) {
       this.properties = properties;
     } else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
index 188ec71a57e..0a6c9c26eec 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
@@ -90,6 +90,7 @@ import org.apache.hadoop.mapred.TextInputFormat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_COMMENTS;
 /**
  * PlanUtils.
  *
@@ -543,9 +544,9 @@ public final class PlanUtils {
               serdeConstants.SERIALIZATION_LIB, 
BinarySortableSerDe.class.getName()));
     } else {
       return new TableDesc(SequenceFileInputFormat.class,
-          SequenceFileOutputFormat.class, Utilities.makeProperties("columns",
+          SequenceFileOutputFormat.class, 
Utilities.makeProperties(serdeConstants.LIST_COLUMNS,
               MetaStoreUtils.getColumnNamesFromFieldSchema(fieldSchemas),
-              "columns.types", MetaStoreUtils
+              serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils
               .getColumnTypesFromFieldSchema(fieldSchemas),
               serdeConstants.ESCAPE_CHAR, "\\",
               
serdeConstants.SERIALIZATION_LIB,LazyBinarySerDe.class.getName()));
@@ -1238,11 +1239,11 @@ public final class PlanUtils {
   static Map<Object, Object> getPropertiesForExplain(Properties properties) {
     if (properties != null) {
       Map<Object, Object> clone = null;
-      String value = properties.getProperty("columns.comments");
+      String value = properties.getProperty(LIST_COLUMN_COMMENTS);
       if (value != null) {
         // should copy properties first
         clone = new HashMap<>(properties);
-        clone.put("columns.comments", quoteComments(value));
+        clone.put(LIST_COLUMN_COMMENTS, quoteComments(value));
       }
       value = properties.getProperty(StatsSetupConst.NUM_ERASURE_CODED_FILES);
       if ("0".equals(value)) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
index 2a68da0699a..2d8dfddd87a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
@@ -23,6 +23,7 @@ import java.util.Arrays;
 import java.util.ArrayList;
 import java.util.Map;
 
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -56,7 +57,7 @@ public class DfsProcessor implements CommandProcessor {
   public DfsProcessor(Configuration conf, boolean addSchema) {
     dfs = new FsShell(conf);
     dfsSchema = new Schema();
-    dfsSchema.addToFieldSchemas(new FieldSchema(DFS_RESULT_HEADER, "string", 
""));
+    dfsSchema.addToFieldSchemas(new FieldSchema(DFS_RESULT_HEADER, 
serdeConstants.STRING_TYPE_NAME, ""));
   }
 
   @Override
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/LlapCacheResourceProcessor.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/LlapCacheResourceProcessor.java
index 53d4328724d..094cd7fc7df 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/LlapCacheResourceProcessor.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/LlapCacheResourceProcessor.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.net.NetUtils;
@@ -122,8 +123,8 @@ public class LlapCacheResourceProcessor implements 
CommandProcessor {
 
   private Schema getSchema() {
     Schema sch = new Schema();
-    sch.addToFieldSchemas(new FieldSchema("hostName", "string", ""));
-    sch.addToFieldSchemas(new FieldSchema("purgedMemoryBytes", "string", ""));
+    sch.addToFieldSchemas(new FieldSchema("hostName", 
serdeConstants.STRING_TYPE_NAME, ""));
+    sch.addToFieldSchemas(new FieldSchema("purgedMemoryBytes", 
serdeConstants.STRING_TYPE_NAME, ""));
     sch.putToProperties(SERIALIZATION_NULL_FORMAT, defaultNullString);
     return sch;
   }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/LlapClusterResourceProcessor.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/LlapClusterResourceProcessor.java
index c5dd688a46d..61d1914223e 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/LlapClusterResourceProcessor.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/LlapClusterResourceProcessor.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -113,12 +114,12 @@ public class LlapClusterResourceProcessor implements 
CommandProcessor {
 
   private Schema getSchema() {
     Schema sch = new Schema();
-    sch.addToFieldSchemas(new FieldSchema("applicationId", "string", ""));
-    sch.addToFieldSchemas(new FieldSchema("workerIdentity", "string", ""));
-    sch.addToFieldSchemas(new FieldSchema("hostname", "string", ""));
-    sch.addToFieldSchemas(new FieldSchema("rpcPort", "string", ""));
-    sch.addToFieldSchemas(new FieldSchema("memory", "string", ""));
-    sch.addToFieldSchemas(new FieldSchema("vcores", "string", ""));
+    sch.addToFieldSchemas(new FieldSchema("applicationId", 
serdeConstants.STRING_TYPE_NAME, ""));
+    sch.addToFieldSchemas(new FieldSchema("workerIdentity", 
serdeConstants.STRING_TYPE_NAME, ""));
+    sch.addToFieldSchemas(new FieldSchema("hostname", 
serdeConstants.STRING_TYPE_NAME, ""));
+    sch.addToFieldSchemas(new FieldSchema("rpcPort", 
serdeConstants.STRING_TYPE_NAME, ""));
+    sch.addToFieldSchemas(new FieldSchema("memory", 
serdeConstants.STRING_TYPE_NAME, ""));
+    sch.addToFieldSchemas(new FieldSchema("vcores", 
serdeConstants.STRING_TYPE_NAME, ""));
     sch.putToProperties(SERIALIZATION_NULL_FORMAT, defaultNullString);
     return sch;
   }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/BaseJsonSerDe.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/BaseJsonSerDe.java
index e4b2dbbed55..76df357d4be 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/BaseJsonSerDe.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/BaseJsonSerDe.java
@@ -28,6 +28,7 @@ import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.core.JsonToken;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
@@ -110,7 +111,7 @@ abstract public class BaseJsonSerDe extends AbstractSerDe {
         throw new SerDeException("Only primitive field types are accepted");
       }
 
-      if (colTypeInfo.getTypeName().equals("binary")) {
+      if (colTypeInfo.getTypeName().equals(serdeConstants.BINARY_TYPE_NAME)) {
 
         if (geometryColumn >= 0) {
           // only one column can be defined as binary for geometries
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
index 2eb65c51df4..290572f4b24 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -39,7 +40,7 @@ public class GenericUDFBetween extends GenericUDF {
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
-    if (!arguments[0].getTypeName().equals("boolean")) {
+    if (!arguments[0].getTypeName().equals(serdeConstants.BOOLEAN_TYPE_NAME)) {
       throw new UDFArgumentTypeException(0, "First argument for BETWEEN should 
be boolean type");
     }
     egt.initialize(new ObjectInspector[] {arguments[1], arguments[2]});
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
index 59bb6d9b261..f0fc1b4b150 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hive.ql.plan.ptf.BoundaryDef;
 import org.apache.hadoop.hive.ql.plan.ptf.OrderDef;
 import org.apache.hadoop.hive.ql.plan.ptf.OrderExpressionDef;
 import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -757,27 +758,27 @@ abstract class SingleValueBoundaryScanner extends 
ValueBoundaryScanner {
   public static SingleValueBoundaryScanner getBoundaryScanner(BoundaryDef 
start, BoundaryDef end,
       boolean nullsLast, OrderExpressionDef exprDef, String typeString) throws 
HiveException {
     switch (typeString) {
-    case "int":
-    case "bigint":
-    case "smallint":
-    case "tinyint":
+    case serdeConstants.INT_TYPE_NAME:
+    case serdeConstants.BIGINT_TYPE_NAME:
+    case serdeConstants.SMALLINT_TYPE_NAME:
+    case serdeConstants.TINYINT_TYPE_NAME:
       return new LongPrimitiveValueBoundaryScanner(start, end, exprDef, 
nullsLast);
-    case "timestamp":
+    case serdeConstants.TIMESTAMP_TYPE_NAME:
       return new TimestampPrimitiveValueBoundaryScanner(start, end, exprDef, 
nullsLast);
-    case "double":
-    case "float":
+    case serdeConstants.DOUBLE_TYPE_NAME:
+    case serdeConstants.FLOAT_TYPE_NAME:
       return new DoublePrimitiveValueBoundaryScanner(start, end, exprDef, 
nullsLast);
-    case "date":
+    case serdeConstants.DATE_TYPE_NAME:
       return new DatePrimitiveValueBoundaryScanner(start, end, exprDef, 
nullsLast);
-    case "string":
+    case serdeConstants.STRING_TYPE_NAME:
       return new StringPrimitiveValueBoundaryScanner(start, end, exprDef, 
nullsLast);
-    case "boolean":
+    case serdeConstants.BOOLEAN_TYPE_NAME:
       return new BooleanPrimitiveValueBoundaryScanner(start, end, exprDef, 
nullsLast);
     default:
       // The following types includes scale/precision: "decimal(10,4), 
char(10) and varchar(15)"
-      if (typeString.startsWith("char") || typeString.startsWith("varchar")) {
+      if (typeString.startsWith(serdeConstants.CHAR_TYPE_NAME) || 
typeString.startsWith(serdeConstants.VARCHAR_TYPE_NAME)) {
         return new StringPrimitiveValueBoundaryScanner(start, end, exprDef, 
nullsLast);
-      } else if (typeString.startsWith("decimal")) {
+      } else if (typeString.startsWith(serdeConstants.DECIMAL_TYPE_NAME)) {
         return new HiveDecimalPrimitiveValueBoundaryScanner(start, end, 
exprDef, nullsLast);
       }
       throw new HiveException(String

Reply via email to