kgyrtkirk commented on a change in pull request #1305: URL: https://github.com/apache/hive/pull/1305#discussion_r460765929
########## File path: ql/src/test/results/clientpositive/llap/vector_const.q.out ########## @@ -40,7 +40,7 @@ STAGE PLANS: alias: varchar_const_1 Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: 'FF' (type: varchar(3)) + expressions: 'FF' (type: varchar(4)) Review comment: interesting change; will this also mean that: ``` CONCAT(CAST('F' AS CHAR(200)),CAST('F' AS CHAR(200))) ``` will not be processable because it would need `CHAR(400)` - which is not supported? ########## File path: ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java ########## @@ -341,26 +342,22 @@ public static ExprNodeConstantDesc toExprNodeConstantDesc(RexLiteral literal) { case DECIMAL: return new ExprNodeConstantDesc(TypeInfoFactory.getDecimalTypeInfo(lType.getPrecision(), lType.getScale()), HiveDecimal.create((BigDecimal)literal.getValue3())); - case VARCHAR: case CHAR: { - if (literal.getValue() instanceof HiveNlsString) { - HiveNlsString mxNlsString = (HiveNlsString) literal.getValue(); - switch (mxNlsString.interpretation) { - case STRING: - return new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, literal.getValue3()); - case CHAR: { - int precision = lType.getPrecision(); - HiveChar value = new HiveChar((String) literal.getValue3(), precision); - return new ExprNodeConstantDesc(new CharTypeInfo(precision), value); - } - case VARCHAR: { - int precision = lType.getPrecision(); - HiveVarchar value = new HiveVarchar((String) literal.getValue3(), precision); - return new ExprNodeConstantDesc(new VarcharTypeInfo(precision), value); - } - } + Preconditions.checkState(literal.getValue() instanceof NlsString, + "char values must use NlsString for correctness"); + int precision = lType.getPrecision(); + HiveChar value = new HiveChar((String) literal.getValue3(), precision); + return new ExprNodeConstantDesc(new CharTypeInfo(precision), value); + } + case VARCHAR: { + Preconditions.checkState(literal.getValue() instanceof NlsString, + "varchar/string values must use NlsString for correctness"); + int precision = lType.getPrecision(); + if (precision == Integer.MAX_VALUE) { + return new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, literal.getValue3()); Review comment: I don't know why I've not choosen this path in HIVE-21316... maybe I missed that `MAX_VARCHAR_LENGTH` and `MAX_CHAR_LENGTH` is both below 64K ########## File path: ql/src/java/org/apache/hadoop/hive/ql/parse/type/RexNodeExprFactory.java ########## @@ -374,7 +373,7 @@ protected Object interpretConstantAsPrimitive(PrimitiveTypeInfo targetType, Obje HiveChar newValue = new HiveChar(constValue, length); HiveChar maxCharConst = new HiveChar(constValue, HiveChar.MAX_CHAR_LENGTH); if (maxCharConst.equals(newValue)) { - return makeHiveUnicodeString(Interpretation.CHAR, newValue.getValue()); + return makeHiveUnicodeString(newValue.getValue()); Review comment: this will discard type distinction between char/varchar - but because `CHAR` is already padded at this point; it will work correctly! awesome! :) ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org For additional commands, e-mail: gitbox-h...@hive.apache.org