miland-db commented on code in PR #47403:
URL: https://github.com/apache/spark/pull/47403#discussion_r1688196798


##########
sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala:
##########
@@ -650,14 +657,27 @@ class SparkSession private(
   private[sql] def sql(sqlText: String, args: Array[_], tracker: 
QueryPlanningTracker): DataFrame =
     withActive {
       val plan = tracker.measurePhase(QueryPlanningTracker.PARSING) {
-        val parsedPlan = sessionState.sqlParser.parsePlan(sqlText)
-        if (args.nonEmpty) {
-          PosParameterizedQuery(parsedPlan, 
args.map(lit(_).expr).toImmutableArraySeq)
-        } else {
-          parsedPlan
+        val parsedPlan = sessionState.sqlParser.parseScript(sqlText)
+        parsedPlan match {
+          case CompoundBody(Seq(singleStmtPlan: SingleStatement), label) if 
args.nonEmpty =>
+            CompoundBody(Seq(SingleStatement(
+              PosParameterizedQuery(
+                singleStmtPlan.parsedPlan, 
args.map(lit(_).expr).toImmutableArraySeq))), label)
+          case p =>
+            assert(args.isEmpty, "Named parameters are not supported for batch 
queries")
+            p
         }
       }
-      Dataset.ofRows(self, plan, tracker)
+
+      plan match {
+        case CompoundBody(Seq(singleStmtPlan: SingleStatement), _) =>
+          Dataset.ofRows(self, singleStmtPlan.parsedPlan, tracker)
+        case _ =>
+          // execute the plan directly if it is not a single statement
+          val lastRow = executeScript(plan).foldLeft(Array.empty[Row])((_, 
next) => next)
+          val attributes = DataTypeUtils.toAttributes(lastRow.head.schema)
+          Dataset.ofRows(self, LocalRelation.fromExternalRows(attributes, 
lastRow.toIndexedSeq))

Review Comment:
   What is noop sink doing? Is it 
`df.write.format("noop").mode("overwrite").save()`? Is it the same as doing 
`df.collect()` but just throws away the result?
   
   We have a hard time determining which statement is the last statement. That 
is the reason why we are doing it this way (we have to save the result of the 
last `dataframe`).



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to