Guillaume Massé created CALCITE-6121:
----------------------------------------
Summary: Invalid unparse for TIMESTAMP with SparkSqlDialect
Key: CALCITE-6121
URL: https://issues.apache.org/jira/browse/CALCITE-6121
Project: Calcite
Issue Type: Bug
Reporter: Guillaume Massé
In Apache Spark Timestamps does not have precision:
[https://spark.apache.org/docs/latest/sql-ref-datatypes.html]
When parsing
{code:java}
SELECT CAST("2023-11-10" AS TIMESTAMP) {code}
The unparsed query gives:
{code:java}
SELECT CAST("2023-11-10" AS TIMESTAMP(0)) {code}
In spark-shell:
{code:java}
scala> spark.sql("""SELECT CAST("2023-11-10" AS TIMESTAMP)""").show()
+-----------------------------+
|CAST(2023-11-10 AS TIMESTAMP)|
+-----------------------------+
| 2023-11-10 00:00:00|
+-----------------------------+
scala> spark.sql("""SELECT CAST("2023-11-10" AS TIMESTAMP(0))""").show()
org.apache.spark.sql.catalyst.parser.ParseException:
[UNSUPPORTED_DATATYPE] Unsupported data type "TIMESTAMP(0)".(line 1, pos 28)==
SQL ==
SELECT CAST("2023-11-10" AS TIMESTAMP(0))
----------------------------^^^ at
org.apache.spark.sql.errors.QueryParsingErrors$.dataTypeUnsupportedError(QueryParsingErrors.scala:279)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitPrimitiveDataType$1(AstBuilder.scala:2898)
at
org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitPrimitiveDataType(AstBuilder.scala:2867)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitPrimitiveDataType(AstBuilder.scala:58)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParser$PrimitiveDataTypeContext.accept(SqlBaseParser.java:20672)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.typedVisit(AstBuilder.scala:63)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitCast$1(AstBuilder.scala:1972)
at
org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitCast(AstBuilder.scala:1971)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitCast(AstBuilder.scala:58)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParser$CastContext.accept(SqlBaseParser.java:17983)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitChildren(AstBuilder.scala:73)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParserBaseVisitor.visitValueExpressionDefault(SqlBaseParserBaseVisitor.java:1567)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParser$ValueExpressionDefaultContext.accept(SqlBaseParser.java:17491)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.typedVisit(AstBuilder.scala:63)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.expression(AstBuilder.scala:1630)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitPredicated$1(AstBuilder.scala:1766)
at
org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitPredicated(AstBuilder.scala:1765)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitPredicated(AstBuilder.scala:58)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParser$PredicatedContext.accept(SqlBaseParser.java:16909)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitChildren(AstBuilder.scala:73)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParserBaseVisitor.visitExpression(SqlBaseParserBaseVisitor.java:1518)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParser$ExpressionContext.accept(SqlBaseParser.java:16766)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.typedVisit(AstBuilder.scala:63)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.expression(AstBuilder.scala:1630)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitNamedExpression$1(AstBuilder.scala:1652)
at
org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitNamedExpression(AstBuilder.scala:1651)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitNamedExpression(AstBuilder.scala:58)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParser$NamedExpressionContext.accept(SqlBaseParser.java:16268)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.typedVisit(AstBuilder.scala:63)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitNamedExpressionSeq$2(AstBuilder.scala:676)
at scala.collection.immutable.List.map(List.scala:293)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitNamedExpressionSeq(AstBuilder.scala:676)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$withSelectQuerySpecification$1(AstBuilder.scala:782)
at
org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.withSelectQuerySpecification(AstBuilder.scala:776)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitRegularQuerySpecification$1(AstBuilder.scala:668)
at
org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitRegularQuerySpecification(AstBuilder.scala:656)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitRegularQuerySpecification(AstBuilder.scala:58)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParser$RegularQuerySpecificationContext.accept(SqlBaseParser.java:10386)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitChildren(AstBuilder.scala:73)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParserBaseVisitor.visitQueryPrimaryDefault(SqlBaseParserBaseVisitor.java:902)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParser$QueryPrimaryDefaultContext.accept(SqlBaseParser.java:9891)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitChildren(AstBuilder.scala:73)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParserBaseVisitor.visitQueryTermDefault(SqlBaseParserBaseVisitor.java:888)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParser$QueryTermDefaultContext.accept(SqlBaseParser.java:9658)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.typedVisit(AstBuilder.scala:63)
at org.apache.spark.sql.catalyst.parser.AstBuilder.plan(AstBuilder.scala:114)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitQuery$1(AstBuilder.scala:120)
at
org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitQuery(AstBuilder.scala:119)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitQuery(AstBuilder.scala:58)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParser$QueryContext.accept(SqlBaseParser.java:6891)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitChildren(AstBuilder.scala:73)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParserBaseVisitor.visitStatementDefault(SqlBaseParserBaseVisitor.java:69)
at
org.apache.spark.sql.catalyst.parser.SqlBaseParser$StatementDefaultContext.accept(SqlBaseParser.java:1988)
at
org.antlr.v4.runtime.tree.AbstractParseTreeVisitor.visit(AbstractParseTreeVisitor.java:18)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitSingleStatement$1(AstBuilder.scala:80)
at
org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
at
org.apache.spark.sql.catalyst.parser.AstBuilder.visitSingleStatement(AstBuilder.scala:80)
at
org.apache.spark.sql.catalyst.parser.AbstractSqlParser.$anonfun$parsePlan$2(ParseDriver.scala:92)
at
org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
at
org.apache.spark.sql.catalyst.parser.AbstractSqlParser.$anonfun$parsePlan$1(ParseDriver.scala:92)
at
org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parse(ParseDriver.scala:127)
at
org.apache.spark.sql.execution.SparkSqlParser.parse(SparkSqlParser.scala:52)
at
org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parsePlan(ParseDriver.scala:89)
at org.apache.spark.sql.SparkSession.$anonfun$sql$2(SparkSession.scala:633)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:632)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:827)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:630)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:671)
... 47 elided {code}
My work around is to create a custom RelDataTypeSystem and override the presion
of timestamp to -1
--
This message was sent by Atlassian Jira
(v8.20.10#820010)