This is an automated email from the ASF dual-hosted git repository.

baunsgaard pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/systemds.git

commit 1abff839d4e565268a1b557a3987e7f7845cd362
Author: baunsgaard <[email protected]>
AuthorDate: Fri Dec 9 11:49:13 2022 +0100

    [SYSTEMDS-3229] Spark unsafe warning suppression
    
    This commit adds a small workaround to the spark execution context
    creation, to allow spark to access java 11 unsafe packages.
    The workaround have been verified to work in cluster situations,
    and remove the warnings written from Java about illegal access to
    Unsafe.
---
 .../controlprogram/context/SparkExecutionContext.java    | 16 ++++++++++++++--
 1 file changed, 14 insertions(+), 2 deletions(-)

diff --git 
a/src/main/java/org/apache/sysds/runtime/controlprogram/context/SparkExecutionContext.java
 
b/src/main/java/org/apache/sysds/runtime/controlprogram/context/SparkExecutionContext.java
index df8f84c6b9..4118eee170 100644
--- 
a/src/main/java/org/apache/sysds/runtime/controlprogram/context/SparkExecutionContext.java
+++ 
b/src/main/java/org/apache/sysds/runtime/controlprogram/context/SparkExecutionContext.java
@@ -22,6 +22,7 @@ package org.apache.sysds.runtime.controlprogram.context;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.io.PrintStream;
+import java.lang.annotation.Target;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -203,11 +204,21 @@ public class SparkExecutionContext extends 
ExecutionContext
                return LAZY_SPARKCTX_CREATION;
        }
 
-       private synchronized static void initSparkContext()
-       {
+       public static void handleIllegalReflectiveAccessSpark(){
+               Module pf = org.apache.spark.unsafe.Platform.class.getModule();
+               Target.class.getModule().addOpens("java.nio", pf);
+
+               Module se = 
org.apache.spark.util.SizeEstimator.class.getModule();
+               Target.class.getModule().addOpens("java.util", se);
+               Target.class.getModule().addOpens("java.lang", se);
+               Target.class.getModule().addOpens("java.util.concurrent", se);
+       }
+
+       private synchronized static void initSparkContext(){
                //check for redundant spark context init
                if( _spctx != null )
                        return;
+               handleIllegalReflectiveAccessSpark();
 
                long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
 
@@ -1780,6 +1791,7 @@ public class SparkExecutionContext extends 
ExecutionContext
 
                public SparkClusterConfig()
                {
+                       handleIllegalReflectiveAccessSpark();
                        SparkConf sconf = createSystemDSSparkConf();
                        _confOnly = true;
 

Reply via email to