[ 
https://issues.apache.org/jira/browse/SPARK-24728?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

ant_nebula updated SPARK-24728:
-------------------------------
    Description: 
My realtime platform with spark support sql coding and scala coding in jsp page.

In order to can also do scala coding. My solution is:

{{{code:none}}}{{}}

export SPARK_DIST_CLASSPATH=/data/xx/my-driver-jar-with-dependencies.jar

--conf spark.repl.class.outputDir=/data/xx/myclasss/

{{{code}}}

val flusher = new java.io.PrintWriter(System.err)
 val interpreter =

{ val interpArguments = List( "-Yrepl-class-based", "-Yrepl-outdir", 
complieClassPath ) val settings = new GenericRunnerSettings(println _) 
settings.embeddedDefaults(this.getClass.getClassLoader) 
settings.usejavacp.value = true settings.processArguments(interpArguments, 
true) new IMain(settings, flusher) }

interpreter.setContextClassLoader()
 ExecutorContext.interpreter = interpreter

 

val apiCode = InterpretCodeFactory.interpret(taskId, updateTime, codeFromJsp)
 apiCode.sql(ExecutorContext.spark, fromTable,cacheTable)

 

trait IApiCode extends Serializable

{ def sql(spark: org.apache.spark.sql.SparkSession, fromTable: String, 
cacheTable: String): Unit }

object InterpretCodeFactory extends Logging {

val sqlActMap: Cache[String, IApiCode] = 
CacheBuilder.newBuilder().expireAfterAccess(30, TimeUnit.MINUTES).build()

def interpret(taskId: Integer, updateTime: java.util.Date, code: String): 
IApiCode = {
 val key = taskId + DateFormatUtils.format(updateTime, "yyyyMMddHHmmss")
 var result = sqlActMap.getIfPresent(key)
 if (result == null)

{ result = interpret(key, code) }

result
 }

def interpret(key: String, code: String): IApiCode = synchronized {
 var result = sqlActMap.getIfPresent(key)
 if (result == null)

{ val genCodeResult = doGenCode(key, code) 
ExecutorContext.interpreter.compileString(genCodeResult) result = 
Class.forName(s"com.duowan.meteor.server.executor.apicode.ApiCode$key", true, 
ExecutorContext.interpreter.classLoader).newInstance().asInstanceOf[IApiCode] 
sqlActMap.put(key, result) }

result
 }

def doGenCode(key: String, code: String): String = {
 val result = s"""
|package com.duowan.meteor.server.executor.apicode|
|class ApiCode$key extends com.duowan.meteor.server.executor.IApiCode {|
|override def sql(spark: org.apache.spark.sql.SparkSession, fromTable: String, 
cacheTable: String): Unit = \{ \| $code \| }|
|}
 """.stripMargin
 logInfo(result)
 result
 }
 }|

 

 

 

  was:
My realtime platform with spark support sql coding and scala coding in jsp page.

In order to can also do scala coding. My solution is:

export SPARK_DIST_CLASSPATH=/data/xx/my-driver-jar-with-dependencies.jar

--conf spark.repl.class.outputDir=/data/xx/myclasss/

val flusher = new java.io.PrintWriter(System.err)
val interpreter = {
 val interpArguments = List(
 "-Yrepl-class-based",
 "-Yrepl-outdir", complieClassPath
 )
 val settings = new GenericRunnerSettings(println _)
 settings.embeddedDefaults(this.getClass.getClassLoader)
 settings.usejavacp.value = true
 settings.processArguments(interpArguments, true)
 new IMain(settings, flusher)
}
interpreter.setContextClassLoader()
ExecutorContext.interpreter = interpreter

 

val apiCode = InterpretCodeFactory.interpret(taskId, updateTime, codeFromJsp)
apiCode.sql(ExecutorContext.spark, fromTable,cacheTable)

 

trait IApiCode extends Serializable {

 def sql(spark: org.apache.spark.sql.SparkSession, fromTable: String, 
cacheTable: String): Unit

}


object InterpretCodeFactory extends Logging {

 val sqlActMap: Cache[String, IApiCode] = 
CacheBuilder.newBuilder().expireAfterAccess(30, TimeUnit.MINUTES).build()

 def interpret(taskId: Integer, updateTime: java.util.Date, code: String): 
IApiCode = {
 val key = taskId + DateFormatUtils.format(updateTime, "yyyyMMddHHmmss")
 var result = sqlActMap.getIfPresent(key)
 if (result == null) {
 result = interpret(key, code)
 }
 result
 }

 def interpret(key: String, code: String): IApiCode = synchronized {
 var result = sqlActMap.getIfPresent(key)
 if (result == null) {
 val genCodeResult = doGenCode(key, code)
 ExecutorContext.interpreter.compileString(genCodeResult)
 result = 
Class.forName(s"com.duowan.meteor.server.executor.apicode.ApiCode$key", true, 
ExecutorContext.interpreter.classLoader).newInstance().asInstanceOf[IApiCode]
 sqlActMap.put(key, result)
 }
 result
 }

 def doGenCode(key: String, code: String): String = {
 val result = s"""
 |package com.duowan.meteor.server.executor.apicode
 |
 |class ApiCode$key extends com.duowan.meteor.server.executor.IApiCode {
 |
 | override def sql(spark: org.apache.spark.sql.SparkSession, fromTable: 
String, cacheTable: String): Unit = {
 | $code
 | }
 |
 |}
 """.stripMargin
 logInfo(result)
 result
 }
}

 

 

 


> org.apache.spark.repl.ExecutorClassLoader with cache
> ----------------------------------------------------
>
>                 Key: SPARK-24728
>                 URL: https://issues.apache.org/jira/browse/SPARK-24728
>             Project: Spark
>          Issue Type: Improvement
>          Components: Spark Core
>    Affects Versions: 2.3.1
>            Reporter: ant_nebula
>            Priority: Major
>
> My realtime platform with spark support sql coding and scala coding in jsp 
> page.
> In order to can also do scala coding. My solution is:
> {{{code:none}}}{{}}
> export SPARK_DIST_CLASSPATH=/data/xx/my-driver-jar-with-dependencies.jar
> --conf spark.repl.class.outputDir=/data/xx/myclasss/
> {{{code}}}
> val flusher = new java.io.PrintWriter(System.err)
>  val interpreter =
> { val interpArguments = List( "-Yrepl-class-based", "-Yrepl-outdir", 
> complieClassPath ) val settings = new GenericRunnerSettings(println _) 
> settings.embeddedDefaults(this.getClass.getClassLoader) 
> settings.usejavacp.value = true settings.processArguments(interpArguments, 
> true) new IMain(settings, flusher) }
> interpreter.setContextClassLoader()
>  ExecutorContext.interpreter = interpreter
>  
> val apiCode = InterpretCodeFactory.interpret(taskId, updateTime, codeFromJsp)
>  apiCode.sql(ExecutorContext.spark, fromTable,cacheTable)
>  
> trait IApiCode extends Serializable
> { def sql(spark: org.apache.spark.sql.SparkSession, fromTable: String, 
> cacheTable: String): Unit }
> object InterpretCodeFactory extends Logging {
> val sqlActMap: Cache[String, IApiCode] = 
> CacheBuilder.newBuilder().expireAfterAccess(30, TimeUnit.MINUTES).build()
> def interpret(taskId: Integer, updateTime: java.util.Date, code: String): 
> IApiCode = {
>  val key = taskId + DateFormatUtils.format(updateTime, "yyyyMMddHHmmss")
>  var result = sqlActMap.getIfPresent(key)
>  if (result == null)
> { result = interpret(key, code) }
> result
>  }
> def interpret(key: String, code: String): IApiCode = synchronized {
>  var result = sqlActMap.getIfPresent(key)
>  if (result == null)
> { val genCodeResult = doGenCode(key, code) 
> ExecutorContext.interpreter.compileString(genCodeResult) result = 
> Class.forName(s"com.duowan.meteor.server.executor.apicode.ApiCode$key", true, 
> ExecutorContext.interpreter.classLoader).newInstance().asInstanceOf[IApiCode] 
> sqlActMap.put(key, result) }
> result
>  }
> def doGenCode(key: String, code: String): String = {
>  val result = s"""
> |package com.duowan.meteor.server.executor.apicode|
> |class ApiCode$key extends com.duowan.meteor.server.executor.IApiCode {|
> |override def sql(spark: org.apache.spark.sql.SparkSession, fromTable: 
> String, cacheTable: String): Unit = \{ \| $code \| }|
> |}
>  """.stripMargin
>  logInfo(result)
>  result
>  }
>  }|
>  
>  
>  



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to