This is an automated email from the ASF dual-hosted git repository.

hvanhovell pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f43ecd017ee [SPARK-43428] Move some class utils to common/utils
f43ecd017ee is described below

commit f43ecd017eec8cefd25e33dd3d4d33b625955a07
Author: Rui Wang <rui.w...@databricks.com>
AuthorDate: Tue May 9 21:51:24 2023 -0400

    [SPARK-43428] Move some class utils to common/utils
    
    ### What changes were proposed in this pull request?
    
    This PR moves some commonly used class loader/reflection utils to 
common/utils.
    
    ### Why are the changes needed?
    
    Reduce the required dependency on Spark core.
    
    ### Does this PR introduce _any_ user-facing change?
    
    NO
    ### How was this patch tested?
    
    Existing UT
    
    Closes #41109 from amaliujia/move_some_utils.
    
    Authored-by: Rui Wang <rui.w...@databricks.com>
    Signed-off-by: Herman van Hovell <her...@databricks.com>
---
 .../org/apache/spark/SparkThrowableHelper.scala    |  3 +-
 .../org/apache/spark/util/SparkClassUtils.scala    | 42 ++++++++++++++++++++++
 .../main/scala/org/apache/spark/util/Utils.scala   | 12 ++-----
 3 files changed, 47 insertions(+), 10 deletions(-)

diff --git 
a/common/utils/src/main/scala/org/apache/spark/SparkThrowableHelper.scala 
b/common/utils/src/main/scala/org/apache/spark/SparkThrowableHelper.scala
index a504c25456c..5106460d145 100644
--- a/common/utils/src/main/scala/org/apache/spark/SparkThrowableHelper.scala
+++ b/common/utils/src/main/scala/org/apache/spark/SparkThrowableHelper.scala
@@ -22,6 +22,7 @@ import scala.collection.JavaConverters._
 import com.fasterxml.jackson.core.util.MinimalPrettyPrinter
 
 import org.apache.spark.util.JsonUtils.toJsonString
+import org.apache.spark.util.SparkClassUtils
 
 private[spark] object ErrorMessageFormat extends Enumeration {
   val PRETTY, MINIMAL, STANDARD = Value
@@ -33,7 +34,7 @@ private[spark] object ErrorMessageFormat extends Enumeration {
  */
 private[spark] object SparkThrowableHelper {
   val errorReader = new ErrorClassesJsonReader(
-    Seq(getClass.getClassLoader.getResource("error/error-classes.json")))
+    
Seq(SparkClassUtils.getSparkClassLoader.getResource("error/error-classes.json")))
 
   def getMessage(
       errorClass: String,
diff --git 
a/common/utils/src/main/scala/org/apache/spark/util/SparkClassUtils.scala 
b/common/utils/src/main/scala/org/apache/spark/util/SparkClassUtils.scala
new file mode 100644
index 00000000000..a1fd9027946
--- /dev/null
+++ b/common/utils/src/main/scala/org/apache/spark/util/SparkClassUtils.scala
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.util
+
+object SparkClassUtils {
+  def getSparkClassLoader: ClassLoader = getClass.getClassLoader
+
+  def getContextOrSparkClassLoader: ClassLoader =
+    
Option(Thread.currentThread().getContextClassLoader).getOrElse(getSparkClassLoader)
+
+  // scalastyle:off classforname
+  /**
+   * Preferred alternative to Class.forName(className), as well as
+   * Class.forName(className, initialize, loader) with current thread's 
ContextClassLoader.
+   */
+  def classForName[C](
+      className: String,
+      initialize: Boolean = true,
+      noSparkClassLoader: Boolean = false): Class[C] = {
+    if (!noSparkClassLoader) {
+      Class.forName(className, initialize, 
getContextOrSparkClassLoader).asInstanceOf[Class[C]]
+    } else {
+      Class.forName(className, initialize, 
Thread.currentThread().getContextClassLoader).
+        asInstanceOf[Class[C]]
+    }
+    // scalastyle:on classforname
+  }
+}
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala 
b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 5c5ada96f98..f150569a992 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -197,7 +197,7 @@ private[spark] object Utils extends Logging {
   /**
    * Get the ClassLoader which loaded Spark.
    */
-  def getSparkClassLoader: ClassLoader = getClass.getClassLoader
+  def getSparkClassLoader: ClassLoader = SparkClassUtils.getSparkClassLoader
 
   /**
    * Get the Context ClassLoader on this thread or, if not present, the 
ClassLoader that
@@ -206,8 +206,7 @@ private[spark] object Utils extends Logging {
    * This should be used whenever passing a ClassLoader to Class.ForName or 
finding the currently
    * active loader when setting up ClassLoader delegation chains.
    */
-  def getContextOrSparkClassLoader: ClassLoader =
-    
Option(Thread.currentThread().getContextClassLoader).getOrElse(getSparkClassLoader)
+  def getContextOrSparkClassLoader: ClassLoader = 
SparkClassUtils.getContextOrSparkClassLoader
 
   /** Determines whether the provided class is loadable in the current thread. 
*/
   def classIsLoadable(clazz: String): Boolean = {
@@ -223,12 +222,7 @@ private[spark] object Utils extends Logging {
       className: String,
       initialize: Boolean = true,
       noSparkClassLoader: Boolean = false): Class[C] = {
-    if (!noSparkClassLoader) {
-      Class.forName(className, initialize, 
getContextOrSparkClassLoader).asInstanceOf[Class[C]]
-    } else {
-      Class.forName(className, initialize, 
Thread.currentThread().getContextClassLoader).
-        asInstanceOf[Class[C]]
-    }
+    SparkClassUtils.classForName(className, initialize, noSparkClassLoader);
     // scalastyle:on classforname
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to