This is an automated email from the ASF dual-hosted git repository.

liyuanjian pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e0a6af988df3 [SPARK-45082][DOC] Review and fix issues in API docs for 
3.5.0
e0a6af988df3 is described below

commit e0a6af988df3f52e95d46ac4c333825d2940065f
Author: Yuanjian Li <yuanjian...@databricks.com>
AuthorDate: Tue Sep 5 12:45:36 2023 -0700

    [SPARK-45082][DOC] Review and fix issues in API docs for 3.5.0
    
    ### What changes were proposed in this pull request?
    
    Compare the 3.4 API doc with the 3.5 RC3 cut. Fix the following issues:
    
    - Remove the leaking class/object in API doc
    
    ### Why are the changes needed?
    Fix the issues in the Spark 3.5.0 release API docs.
    
    ### Does this PR introduce _any_ user-facing change?
    No, API doc changes only.
    
    ### How was this patch tested?
    Manually test.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    no
    
    Closes #42819 from xuanyuanking/SPARK-45082.
    
    Authored-by: Yuanjian Li <yuanjian...@databricks.com>
    Signed-off-by: Yuanjian Li <yuanjian...@databricks.com>
---
 .../scala/org/apache/spark/SparkBuildInfo.scala    |  2 +-
 .../org/apache/spark/util/SparkClassUtils.scala    |  4 +--
 .../apache/spark/util/SparkCollectionUtils.scala   |  4 +--
 .../org/apache/spark/util/SparkErrorUtils.scala    |  2 +-
 .../org/apache/spark/util/SparkSerDeUtils.scala    |  4 +--
 .../org/apache/spark/sql/avro/CustomDecimal.scala  |  4 +--
 .../org/apache/spark/util/StubClassLoader.scala    |  4 +--
 .../spark/sql/errors/CompilationErrors.scala       |  2 +-
 .../spark/sql/types/DataTypeExpression.scala       | 30 +++++++++++-----------
 .../org/apache/spark/sql/jdbc/JdbcDialects.scala   |  2 +-
 10 files changed, 29 insertions(+), 29 deletions(-)

diff --git a/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala 
b/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala
index 23f671f9d764..ebc62460d231 100644
--- a/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala
+++ b/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala
@@ -18,7 +18,7 @@ package org.apache.spark
 
 import java.util.Properties
 
-object SparkBuildInfo {
+private[spark] object SparkBuildInfo {
 
   val (
     spark_version: String,
diff --git 
a/common/utils/src/main/scala/org/apache/spark/util/SparkClassUtils.scala 
b/common/utils/src/main/scala/org/apache/spark/util/SparkClassUtils.scala
index 679d546d04c9..5984eaee42e7 100644
--- a/common/utils/src/main/scala/org/apache/spark/util/SparkClassUtils.scala
+++ b/common/utils/src/main/scala/org/apache/spark/util/SparkClassUtils.scala
@@ -20,7 +20,7 @@ import java.util.Random
 
 import scala.util.Try
 
-trait SparkClassUtils {
+private[spark] trait SparkClassUtils {
   val random = new Random()
 
   def getSparkClassLoader: ClassLoader = getClass.getClassLoader
@@ -80,4 +80,4 @@ trait SparkClassUtils {
   }
 }
 
-object SparkClassUtils extends SparkClassUtils
+private[spark] object SparkClassUtils extends SparkClassUtils
diff --git 
a/common/utils/src/main/scala/org/apache/spark/util/SparkCollectionUtils.scala 
b/common/utils/src/main/scala/org/apache/spark/util/SparkCollectionUtils.scala
index 7fecc9ccb664..be8282db31be 100644
--- 
a/common/utils/src/main/scala/org/apache/spark/util/SparkCollectionUtils.scala
+++ 
b/common/utils/src/main/scala/org/apache/spark/util/SparkCollectionUtils.scala
@@ -18,7 +18,7 @@ package org.apache.spark.sql.catalyst.util
 
 import scala.collection.immutable
 
-trait SparkCollectionUtils {
+private[spark] trait SparkCollectionUtils {
   /**
    * Same function as `keys.zipWithIndex.toMap`, but has perf gain.
    */
@@ -34,4 +34,4 @@ trait SparkCollectionUtils {
   }
 }
 
-object SparkCollectionUtils extends SparkCollectionUtils
+private[spark] object SparkCollectionUtils extends SparkCollectionUtils
diff --git 
a/common/utils/src/main/scala/org/apache/spark/util/SparkErrorUtils.scala 
b/common/utils/src/main/scala/org/apache/spark/util/SparkErrorUtils.scala
index 97a07984a228..8194d1e42417 100644
--- a/common/utils/src/main/scala/org/apache/spark/util/SparkErrorUtils.scala
+++ b/common/utils/src/main/scala/org/apache/spark/util/SparkErrorUtils.scala
@@ -90,4 +90,4 @@ private[spark] trait SparkErrorUtils extends Logging {
   }
 }
 
-object SparkErrorUtils extends SparkErrorUtils
+private[spark] object SparkErrorUtils extends SparkErrorUtils
diff --git 
a/common/utils/src/main/scala/org/apache/spark/util/SparkSerDeUtils.scala 
b/common/utils/src/main/scala/org/apache/spark/util/SparkSerDeUtils.scala
index 9b6174c47bde..2cc14fea5f30 100644
--- a/common/utils/src/main/scala/org/apache/spark/util/SparkSerDeUtils.scala
+++ b/common/utils/src/main/scala/org/apache/spark/util/SparkSerDeUtils.scala
@@ -18,7 +18,7 @@ package org.apache.spark.util
 
 import java.io.{ByteArrayInputStream, ByteArrayOutputStream, 
ObjectInputStream, ObjectOutputStream, ObjectStreamClass}
 
-trait SparkSerDeUtils {
+private[spark] trait SparkSerDeUtils {
   /** Serialize an object using Java serialization */
   def serialize[T](o: T): Array[Byte] = {
     val bos = new ByteArrayOutputStream()
@@ -51,4 +51,4 @@ trait SparkSerDeUtils {
   }
 }
 
-object SparkSerDeUtils extends SparkSerDeUtils
+private[spark] object SparkSerDeUtils extends SparkSerDeUtils
diff --git 
a/connector/avro/src/main/java/org/apache/spark/sql/avro/CustomDecimal.scala 
b/connector/avro/src/main/java/org/apache/spark/sql/avro/CustomDecimal.scala
index d76f40c7635c..fab3d4493e34 100644
--- a/connector/avro/src/main/java/org/apache/spark/sql/avro/CustomDecimal.scala
+++ b/connector/avro/src/main/java/org/apache/spark/sql/avro/CustomDecimal.scala
@@ -22,14 +22,14 @@ import org.apache.avro.Schema
 
 import org.apache.spark.sql.types.DecimalType
 
-object CustomDecimal {
+private[spark] object CustomDecimal {
   val TYPE_NAME = "custom-decimal"
 }
 
 // A customized logical type, which will be registered to Avro. This logical 
type is similar to
 // Avro's builtin Decimal type, but is meant to be registered for long type. 
It indicates that
 // the long type should be converted to Spark's Decimal type, with provided 
precision and scale.
-private class CustomDecimal(schema: Schema) extends 
LogicalType(CustomDecimal.TYPE_NAME) {
+private[spark] class CustomDecimal(schema: Schema) extends 
LogicalType(CustomDecimal.TYPE_NAME) {
   val scale : Int = {
     val obj = schema.getObjectProp("scale")
     obj match {
diff --git a/core/src/main/scala/org/apache/spark/util/StubClassLoader.scala 
b/core/src/main/scala/org/apache/spark/util/StubClassLoader.scala
index 8d903c2a3e40..ed58ccf1bcf1 100644
--- a/core/src/main/scala/org/apache/spark/util/StubClassLoader.scala
+++ b/core/src/main/scala/org/apache/spark/util/StubClassLoader.scala
@@ -28,7 +28,7 @@ import org.apache.spark.internal.Logging
  * whose capturing class contains unknown (and unneeded) classes. The lambda 
itself does not need
  * the class and therefor is safe to replace by a stub.
  */
-class StubClassLoader(parent: ClassLoader, shouldStub: String => Boolean)
+private[spark] class StubClassLoader(parent: ClassLoader, shouldStub: String 
=> Boolean)
   extends ClassLoader(parent) with Logging {
   override def findClass(name: String): Class[_] = {
     if (!shouldStub(name)) {
@@ -40,7 +40,7 @@ class StubClassLoader(parent: ClassLoader, shouldStub: String 
=> Boolean)
   }
 }
 
-object StubClassLoader {
+private[spark] object StubClassLoader {
   def apply(parent: ClassLoader, binaryName: Seq[String]): StubClassLoader = {
     new StubClassLoader(parent, name => binaryName.exists(p => 
name.startsWith(p)))
   }
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala
index deae1198d9cb..7c0b3c6cf308 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala
@@ -51,4 +51,4 @@ private[sql] trait CompilationErrors extends 
DataTypeErrorsBase {
   }
 }
 
-object CompilationErrors extends CompilationErrors
+private[sql] object CompilationErrors extends CompilationErrors
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeExpression.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeExpression.scala
index 1b74419a4af7..026272a0f2d8 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeExpression.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeExpression.scala
@@ -18,7 +18,7 @@ package org.apache.spark.sql.types
 
 import org.apache.spark.sql.catalyst.expressions.Expression
 
-abstract class DataTypeExpression(val dataType: DataType) {
+private[sql] abstract class DataTypeExpression(val dataType: DataType) {
   /**
    * Enables matching against DataType for expressions:
    * {{{
@@ -29,18 +29,18 @@ abstract class DataTypeExpression(val dataType: DataType) {
   private[sql] def unapply(e: Expression): Boolean = e.dataType == dataType
 }
 
-case object BooleanTypeExpression extends DataTypeExpression(BooleanType)
-case object StringTypeExpression extends DataTypeExpression(StringType)
-case object TimestampTypeExpression extends DataTypeExpression(TimestampType)
-case object DateTypeExpression extends DataTypeExpression(DateType)
-case object ByteTypeExpression extends DataTypeExpression(ByteType)
-case object ShortTypeExpression extends DataTypeExpression(ShortType)
-case object IntegerTypeExpression extends DataTypeExpression(IntegerType)
-case object LongTypeExpression extends DataTypeExpression(LongType)
-case object DoubleTypeExpression extends DataTypeExpression(DoubleType)
-case object FloatTypeExpression extends DataTypeExpression(FloatType)
+private[sql] case object BooleanTypeExpression extends 
DataTypeExpression(BooleanType)
+private[sql] case object StringTypeExpression extends 
DataTypeExpression(StringType)
+private[sql] case object TimestampTypeExpression extends 
DataTypeExpression(TimestampType)
+private[sql] case object DateTypeExpression extends 
DataTypeExpression(DateType)
+private[sql] case object ByteTypeExpression extends 
DataTypeExpression(ByteType)
+private[sql] case object ShortTypeExpression extends 
DataTypeExpression(ShortType)
+private[sql] case object IntegerTypeExpression extends 
DataTypeExpression(IntegerType)
+private[sql] case object LongTypeExpression extends 
DataTypeExpression(LongType)
+private[sql] case object DoubleTypeExpression extends 
DataTypeExpression(DoubleType)
+private[sql] case object FloatTypeExpression extends 
DataTypeExpression(FloatType)
 
-object NumericTypeExpression {
+private[sql] object NumericTypeExpression {
   /**
    * Enables matching against NumericType for expressions:
    * {{{
@@ -53,7 +53,7 @@ object NumericTypeExpression {
   }
 }
 
-object IntegralTypeExpression {
+private[sql] object IntegralTypeExpression {
   /**
    * Enables matching against IntegralType for expressions:
    * {{{
@@ -66,12 +66,12 @@ object IntegralTypeExpression {
   }
 }
 
-object AnyTimestampTypeExpression {
+private[sql] object AnyTimestampTypeExpression {
   def unapply(e: Expression): Boolean =
     e.dataType.isInstanceOf[TimestampType] || 
e.dataType.isInstanceOf[TimestampNTZType]
 }
 
-object DecimalExpression {
+private[sql] object DecimalExpression {
   def unapply(e: Expression): Option[(Int, Int)] = e.dataType match {
     case t: DecimalType => Some((t.precision, t.scale))
     case _ => None
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
index fac3cc60d952..2f5e813dcb61 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
@@ -719,6 +719,6 @@ object JdbcDialects {
 /**
  * NOOP dialect object, always returning the neutral element.
  */
-object NoopDialect extends JdbcDialect {
+private[spark] object NoopDialect extends JdbcDialect {
   override def canHandle(url : String): Boolean = true
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to