hvanhovell commented on code in PR #40168:
URL: https://github.com/apache/spark/pull/40168#discussion_r1117852174


##########
connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala:
##########
@@ -69,30 +69,131 @@ class CompatibilitySuite extends AnyFunSuite { // 
scalastyle:ignore funsuite
     val mima = new MiMaLib(Seq(clientJar, sqlJar))
     val allProblems = mima.collectProblems(sqlJar, clientJar, List.empty)
     val includedRules = Seq(
-      IncludeByName("org.apache.spark.sql.Column"),
-      IncludeByName("org.apache.spark.sql.Column$"),
-      IncludeByName("org.apache.spark.sql.Dataset"),
-      // TODO(SPARK-42175) Add the Dataset object definition
-      // IncludeByName("org.apache.spark.sql.Dataset$"),
-      IncludeByName("org.apache.spark.sql.DataFrame"),
+      IncludeByName("org.apache.spark.sql.Column.*"),
+      IncludeByName("org.apache.spark.sql.DataFrame.*"),
       IncludeByName("org.apache.spark.sql.DataFrameReader.*"),
       IncludeByName("org.apache.spark.sql.DataFrameWriter.*"),
       IncludeByName("org.apache.spark.sql.DataFrameWriterV2.*"),
-      IncludeByName("org.apache.spark.sql.SparkSession"),
-      IncludeByName("org.apache.spark.sql.SparkSession$")) ++ 
includeImplementedMethods(clientJar)
+      IncludeByName("org.apache.spark.sql.Dataset.*"),
+      IncludeByName("org.apache.spark.sql.functions.*"),
+      IncludeByName("org.apache.spark.sql.RelationalGroupedDataset.*"),
+      IncludeByName("org.apache.spark.sql.SparkSession.*"))
     val excludeRules = Seq(
       // Filter unsupported rules:
-      // Two sql overloading methods are marked experimental in the API and 
skipped in the client.
-      ProblemFilters.exclude[Problem]("org.apache.spark.sql.SparkSession.sql"),
-      // Deprecated json methods and RDD related methods are skipped in the 
client.
+      // Note when muting errors for a method, checks on all overloading 
methods are also muted.
+
+      // Skip all shaded dependencies and proto files in the client.
+      ProblemFilters.exclude[Problem]("org.sparkproject.*"),
+      ProblemFilters.exclude[Problem]("org.apache.spark.connect.proto.*"),
+
+      // DataFrame Reader & Writer
       
ProblemFilters.exclude[Problem]("org.apache.spark.sql.DataFrameReader.json"),
       
ProblemFilters.exclude[Problem]("org.apache.spark.sql.DataFrameReader.csv"),
       
ProblemFilters.exclude[Problem]("org.apache.spark.sql.DataFrameReader.jdbc"),
       
ProblemFilters.exclude[Problem]("org.apache.spark.sql.DataFrameWriter.jdbc"),
-      // Skip all shaded dependencies in the client.
-      ProblemFilters.exclude[Problem]("org.sparkproject.*"),
-      ProblemFilters.exclude[Problem]("org.apache.spark.connect.proto.*"),
-      // Disable Range until we support typed APIs
+
+      // Dataset
+      ProblemFilters.exclude[Problem]("org.apache.spark.sql.Dataset.ofRows"),
+      
ProblemFilters.exclude[Problem]("org.apache.spark.sql.Dataset.DATASET_ID_TAG"),
+      
ProblemFilters.exclude[Problem]("org.apache.spark.sql.Dataset.COL_POS_KEY"),
+      
ProblemFilters.exclude[Problem]("org.apache.spark.sql.Dataset.DATASET_ID_KEY"),
+      ProblemFilters.exclude[Problem]("org.apache.spark.sql.Dataset.curId"),
+      ProblemFilters.exclude[Problem]("org.apache.spark.sql.Dataset.groupBy"),

Review Comment:
   Yeah we are missing a signature.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to