Github user nblintao commented on a diff in the pull request: https://github.com/apache/spark/pull/14158#discussion_r119022541 --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala --- @@ -65,13 +65,29 @@ abstract class AbstractSqlParser extends ParserInterface with Logging { } /** Creates LogicalPlan for a given SQL string. */ - override def parsePlan(sqlText: String): LogicalPlan = parse(sqlText) { parser => - astBuilder.visitSingleStatement(parser.singleStatement()) match { - case plan: LogicalPlan => plan - case _ => - val position = Origin(None, None) - throw new ParseException(Option(sqlText), "Unsupported SQL statement", position, position) + override def parsePlan(sqlText: String): LogicalPlan = { + val logicalPlan = parse(sqlText) { parser => + astBuilder.visitSingleStatement(parser.singleStatement()) match { + case plan: LogicalPlan => plan + case _ => + val position = Origin(None, None) + throw new ParseException(Option(sqlText), "Unsupported SQL statement", position, position) + } + } + // Record the original sql text in the top logical plan for checking in the web UI. + // Truncate the text to avoid downing browsers or web UI servers by running out of memory. + val maxLength = 1000 + val suffix = " ... (truncated)" + val truncateLength = maxLength - suffix.length + val truncatedSqlText = { + if (sqlText.length <= maxLength) { + sqlText + } else { + sqlText.substring(0, truncateLength) + suffix + } } + logicalPlan.sqlText = Some(truncatedSqlText) + logicalPlan --- End diff -- Thanks for pointing it out. I agree that `QueryExecution` is a better place for original SQL text. I have updated my code accordingly. Could you please have a look?
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org