This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new 61d22b6f313 [SPARK-39371][DOCS][CORE] Review and fix issues in 
Scala/Java API docs of Core module
61d22b6f313 is described below

commit 61d22b6f313c20de1b65a595e88b6f5bd9595299
Author: Yuanjian Li <yuanjian...@databricks.com>
AuthorDate: Fri Jun 3 17:49:01 2022 +0900

    [SPARK-39371][DOCS][CORE] Review and fix issues in Scala/Java API docs of 
Core module
    
    Compare the 3.3.0 API doc with the latest release version 3.2.1. Fix the 
following issues:
    
    * Add missing Since annotation for new APIs
    * Remove the leaking class/object in API doc
    
    Improve API docs
    
    No
    
    Existing UT
    
    Closes #36757 from xuanyuanking/doc.
    
    Authored-by: Yuanjian Li <yuanjian...@databricks.com>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
    (cherry picked from commit 1fbb1d46feb992c3441f2a4f2c5d5179da465d4b)
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala       | 2 +-
 .../spark/storage/BlockSavedOnDecommissionedBlockManagerException.scala | 2 +-
 launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java  | 2 +-
 launcher/src/main/java/org/apache/spark/launcher/InProcessLauncher.java | 2 +-
 launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java | 2 ++
 5 files changed, 6 insertions(+), 4 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala 
b/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala
index aecef8ed2d6..1da02884462 100644
--- a/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala
+++ b/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala
@@ -30,7 +30,7 @@ import org.apache.spark.storage.{BlockId, BlockManagerId, 
BlockNotFoundException
 /**
  * Object for grouping error messages from (most) exceptions thrown during 
query execution.
  */
-object SparkCoreErrors {
+private[spark] object SparkCoreErrors {
   def unexpectedPy4JServerError(other: Object): Throwable = {
     new RuntimeException(s"Unexpected Py4J server ${other.getClass}")
   }
diff --git 
a/core/src/main/scala/org/apache/spark/storage/BlockSavedOnDecommissionedBlockManagerException.scala
 
b/core/src/main/scala/org/apache/spark/storage/BlockSavedOnDecommissionedBlockManagerException.scala
index 4684d9c6775..21a022864bb 100644
--- 
a/core/src/main/scala/org/apache/spark/storage/BlockSavedOnDecommissionedBlockManagerException.scala
+++ 
b/core/src/main/scala/org/apache/spark/storage/BlockSavedOnDecommissionedBlockManagerException.scala
@@ -17,5 +17,5 @@
 
 package org.apache.spark.storage
 
-class BlockSavedOnDecommissionedBlockManagerException(blockId: BlockId)
+private[spark] class BlockSavedOnDecommissionedBlockManagerException(blockId: 
BlockId)
   extends Exception(s"Block $blockId cannot be saved on decommissioned 
executor")
diff --git 
a/launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java 
b/launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java
index 8a1256f7341..80b71e53075 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java
@@ -26,7 +26,7 @@ import static org.apache.spark.launcher.CommandBuilderUtils.*;
 /**
  * Base class for launcher implementations.
  *
- * @since Spark 2.3.0
+ * @since 2.3.0
  */
 public abstract class AbstractLauncher<T extends AbstractLauncher<T>> {
 
diff --git 
a/launcher/src/main/java/org/apache/spark/launcher/InProcessLauncher.java 
b/launcher/src/main/java/org/apache/spark/launcher/InProcessLauncher.java
index 688e1f763c2..6867518b321 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/InProcessLauncher.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/InProcessLauncher.java
@@ -37,7 +37,7 @@ import java.util.logging.Logger;
  * driver memory or configs which modify the driver's class path) do not take 
effect. Logging
  * configuration is also inherited from the parent application.
  *
- * @since Spark 2.3.0
+ * @since 2.3.0
  */
 public class InProcessLauncher extends AbstractLauncher<InProcessLauncher> {
 
diff --git 
a/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java 
b/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java
index c7d3df99c6e..978466cd77c 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java
@@ -21,6 +21,8 @@ package org.apache.spark.launcher;
  * This helper class is used to place the all `--add-opens` options
  * required by Spark when using Java 17. `DEFAULT_MODULE_OPTIONS` has added
  * `-XX:+IgnoreUnrecognizedVMOptions` to be compatible with Java 8 and Java 11.
+ *
+ * @since 3.3.0
  */
 public class JavaModuleOptions {
     private static final String[] DEFAULT_MODULE_OPTIONS = {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to