davidm-db commented on code in PR #46665:
URL: https://github.com/apache/spark/pull/46665#discussion_r1607347956


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala:
##########
@@ -116,6 +116,78 @@ class AstBuilder extends DataTypeAstBuilder with 
SQLConfHelper with Logging {
     }
   }
 
+  override def visitBatchOrSingleStatement(ctx: 
BatchOrSingleStatementContext): BatchBody = {
+    if (ctx.batchCompound() != null) {
+      visit(ctx.batchCompound()).asInstanceOf[BatchBody]
+    } else {
+      val logicalPlan = visitSingleStatement(ctx.singleStatement())
+      BatchBody(List(SparkStatementWithPlan(
+        parsedPlan = logicalPlan,
+        sourceStart = ctx.start.getStartIndex,
+        sourceEnd = ctx.stop.getStopIndex + 1)))
+    }
+  }
+
+  override def visitBatchCompound(ctx: BatchCompoundContext): BatchBody = {
+    visitBatchBody(ctx.batchBody(), allowDeclareAtTop = true)
+  }
+
+  private def visitBatchBody(ctx: BatchBodyContext, allowDeclareAtTop: 
Boolean): BatchBody = {
+    val buff = ListBuffer[BatchPlanStatement]()
+    for (i <- 0 until ctx.getChildCount) {
+      val child = visit(ctx.getChild(i))
+      child match {
+        case statement: BatchPlanStatement => buff += statement
+        case null => // When terminal nodes are visited (like SEMICOLON, EOF, 
etc.)

Review Comment:
   Improved - didn't find a better way to do it, but I think it's clean like 
this.
   Any other way I think we would need to match child before visiting and than 
again after visiting which would turn out to be really messy.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to