This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f985e3e84a2 [SPARK-44255][SQL] Relocate StorageLevel to common/utils
f985e3e84a2 is described below

commit f985e3e84a23ab5a83842047408e3fd92887447a
Author: Rui Wang <rui.w...@databricks.com>
AuthorDate: Sat Jul 1 12:10:22 2023 +0300

    [SPARK-44255][SQL] Relocate StorageLevel to common/utils
    
    ### What changes were proposed in this pull request?
    
    Relocate `StorageLevel` to `common/utils`.
    
    ### Why are the changes needed?
    
    Scala client needs `StorageLevel` so this can be shared in the 
`common/utils`.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Existing tests
    
    Closes #41797 from amaliujia/move_storage_level_to_common_utils.
    
    Authored-by: Rui Wang <rui.w...@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../java/org/apache/spark/memory/MemoryMode.java   |  0
 .../org/apache/spark/storage/StorageLevel.scala    |  6 ++---
 .../org/apache/spark/util/SparkErrorUtils.scala    | 30 +++++++++++++++++-----
 .../main/scala/org/apache/spark/util/Utils.scala   | 11 +-------
 project/MimaExcludes.scala                         |  4 +++
 5 files changed, 32 insertions(+), 19 deletions(-)

diff --git a/core/src/main/java/org/apache/spark/memory/MemoryMode.java 
b/common/utils/src/main/java/org/apache/spark/memory/MemoryMode.java
similarity index 100%
copy from core/src/main/java/org/apache/spark/memory/MemoryMode.java
copy to common/utils/src/main/java/org/apache/spark/memory/MemoryMode.java
diff --git a/core/src/main/scala/org/apache/spark/storage/StorageLevel.scala 
b/common/utils/src/main/scala/org/apache/spark/storage/StorageLevel.scala
similarity index 97%
rename from core/src/main/scala/org/apache/spark/storage/StorageLevel.scala
rename to 
common/utils/src/main/scala/org/apache/spark/storage/StorageLevel.scala
index 4a2b705e069..73bc53dab89 100644
--- a/core/src/main/scala/org/apache/spark/storage/StorageLevel.scala
+++ b/common/utils/src/main/scala/org/apache/spark/storage/StorageLevel.scala
@@ -22,7 +22,7 @@ import java.util.concurrent.ConcurrentHashMap
 
 import org.apache.spark.annotation.DeveloperApi
 import org.apache.spark.memory.MemoryMode
-import org.apache.spark.util.Utils
+import org.apache.spark.util.SparkErrorUtils
 
 /**
  * :: DeveloperApi ::
@@ -98,12 +98,12 @@ class StorageLevel private(
     ret
   }
 
-  override def writeExternal(out: ObjectOutput): Unit = Utils.tryOrIOException 
{
+  override def writeExternal(out: ObjectOutput): Unit = 
SparkErrorUtils.tryOrIOException {
     out.writeByte(toInt)
     out.writeByte(_replication)
   }
 
-  override def readExternal(in: ObjectInput): Unit = Utils.tryOrIOException {
+  override def readExternal(in: ObjectInput): Unit = 
SparkErrorUtils.tryOrIOException {
     val flags = in.readByte()
     _useDisk = (flags & 8) != 0
     _useMemory = (flags & 4) != 0
diff --git a/core/src/main/java/org/apache/spark/memory/MemoryMode.java 
b/common/utils/src/main/scala/org/apache/spark/util/SparkErrorUtils.scala
similarity index 50%
rename from core/src/main/java/org/apache/spark/memory/MemoryMode.java
rename to 
common/utils/src/main/scala/org/apache/spark/util/SparkErrorUtils.scala
index 3a5e72d8aae..8e4de01885e 100644
--- a/core/src/main/java/org/apache/spark/memory/MemoryMode.java
+++ b/common/utils/src/main/scala/org/apache/spark/util/SparkErrorUtils.scala
@@ -14,13 +14,31 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.apache.spark.util
 
-package org.apache.spark.memory;
+import java.io.IOException
 
-import org.apache.spark.annotation.Private;
+import scala.util.control.NonFatal
 
-@Private
-public enum MemoryMode {
-  ON_HEAP,
-  OFF_HEAP
+import org.apache.spark.internal.Logging
+
+object SparkErrorUtils extends Logging {
+  /**
+   * Execute a block of code that returns a value, re-throwing any non-fatal 
uncaught
+   * exceptions as IOException. This is used when implementing Externalizable 
and Serializable's
+   * read and write methods, since Java's serializer will not report 
non-IOExceptions properly;
+   * see SPARK-4080 for more context.
+   */
+  def tryOrIOException[T](block: => T): T = {
+    try {
+      block
+    } catch {
+      case e: IOException =>
+        logError("Exception encountered", e)
+        throw e
+      case NonFatal(e) =>
+        logError("Exception encountered", e)
+        throw new IOException(e)
+    }
+  }
 }
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala 
b/core/src/main/scala/org/apache/spark/util/Utils.scala
index ada0cffd2b0..60895c791b5 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1449,16 +1449,7 @@ private[spark] object Utils extends Logging with 
SparkClassUtils {
    * see SPARK-4080 for more context.
    */
   def tryOrIOException[T](block: => T): T = {
-    try {
-      block
-    } catch {
-      case e: IOException =>
-        logError("Exception encountered", e)
-        throw e
-      case NonFatal(e) =>
-        logError("Exception encountered", e)
-        throw new IOException(e)
-    }
+    SparkErrorUtils.tryOrIOException(block)
   }
 
   /** Executes the given block. Log non-fatal errors if any, and only throw 
fatal errors */
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index 6ca32e9d9e8..2e70fd9225c 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -100,6 +100,10 @@ object MimaExcludes {
     // SPARK-44104: shaded protobuf code and Apis with parameters relocated
     
ProblemFilters.exclude[Problem]("org.sparkproject.spark_protobuf.protobuf.*"),
     
ProblemFilters.exclude[Problem]("org.apache.spark.sql.protobuf.utils.SchemaConverters.*"),
+    
+    // SPARK-44255: Relocate StorageLevel to common/utils
+    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.storage.StorageLevel"),
+    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.storage.StorageLevel$"),
 
     (problem: Problem) => problem match {
       case MissingClassProblem(cls) => 
!cls.fullName.startsWith("org.sparkproject.jpmml") &&


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to