MaxGekk commented on code in PR #43334:
URL: https://github.com/apache/spark/pull/43334#discussion_r1373515871


##########
sql/api/src/main/scala/org/apache/spark/sql/catalyst/trees/QueryContexts.scala:
##########
@@ -128,6 +129,45 @@ case class SQLQueryContext(
     sqlText.isDefined && originStartIndex.isDefined && 
originStopIndex.isDefined &&
       originStartIndex.get >= 0 && originStopIndex.get < sqlText.get.length &&
       originStartIndex.get <= originStopIndex.get
+  }
+
+  override def callSite: String = throw new UnsupportedOperationException
+}
+
+case class DatasetQueryContext(
+    override val fragment: String,
+    override val callSite: String) extends QueryContext {
+  override val contextType = QueryContextType.Dataset
+
+  override def objectType: String = throw new UnsupportedOperationException
+  override def objectName: String = throw new UnsupportedOperationException
+  override def startIndex: Int = throw new UnsupportedOperationException
+  override def stopIndex: Int = throw new UnsupportedOperationException
+
+  override lazy val summary: String = {
+    val builder = new StringBuilder
+    builder ++= "== Dataset ==\n"
+    builder ++= "\""
+
+    builder ++= fragment
+    builder ++= "\""
+    builder ++= " was called from "
+    builder ++= callSite
+    builder += '\n'
+    builder.result()
+  }
+}
+
+object DatasetQueryContext {
+  def apply(elements: Array[StackTraceElement]): DatasetQueryContext = {
+    val methodName = elements(0).getMethodName
+    val code = if (methodName.length > 1 && methodName(0) == '$') {
+      methodName.substring(1)
+    } else {
+      methodName
+    }
+    val callSite = elements(1).toString

Review Comment:
   We assume that `elements` contains at least two elements. The assumption 
comes from the code:
   ```scala
     private[sql] def withOrigin[T](f: => T): T = {
   ...
         var i = 3
         while (i < st.length && sparkCode(st(i))) i += 1
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to