Github user tgravescs commented on a diff in the pull request: https://github.com/apache/spark/pull/15009#discussion_r78378891 --- Diff: launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java --- @@ -538,6 +539,71 @@ public SparkAppHandle startApplication(SparkAppHandle.Listener... listeners) thr return handle; } + private String getAppName() throws IOException { + String appName = builder.getEffectiveConfig().get(CHILD_PROCESS_LOGGER_NAME); + if (appName == null) { + if (builder.appName != null) { + appName = builder.appName; + } else if (builder.mainClass != null) { + int dot = builder.mainClass.lastIndexOf("."); + if (dot >= 0 && dot < builder.mainClass.length() - 1) { + appName = builder.mainClass.substring(dot + 1, builder.mainClass.length()); + } else { + appName = builder.mainClass; + } + } else if (builder.appResource != null) { + appName = new File(builder.appResource).getName(); + } else { + appName = String.valueOf(COUNTER.incrementAndGet()); + } + } + return appName; + } + + /** + * Starts a Spark application. + * <p> + * This method returns a handle that provides information about the running application and can + * be used to do basic interaction with it. + * <p> + * The returned handle assumes that the application will instantiate a single SparkContext + * during its lifetime. Once that context reports a final state (one that indicates the + * SparkContext has stopped), the handle will not perform new state transitions, so anything + * that happens after that cannot be monitored. The underlying application is launched as + * a Thread, {@link SparkAppHandle#kill()} can still be used to kill the spark application. + * <p> + * @since 2.1.0 + * @param listeners Listeners to add to the handle before the app is launched. + * @return A handle for the launched application. + */ + public SparkAppHandle startApplicationInProcess(SparkAppHandle.Listener... listeners) throws IOException { + + ChildThreadAppHandle handle = LauncherServer.newAppThreadHandle(); + for (SparkAppHandle.Listener l : listeners) { + handle.addListener(l); + } + + String appName = getAppName(); + setConf(LAUNCHER_INTERNAL_PORT,String.valueOf(LauncherServer.getServerInstance().getPort())); + setConf(CHILD_PROCESS_LAUNCHER_INTERNAL_SECRET, handle.getSecret()); + setConf(CHILD_PROCESS_LAUNCHER_STOP_FLAG, String.valueOf(stopIfInterrupted)); + try { + //trying to see if method is available in the classpath. + Method main = SparkSubmitRunner.getSparkSubmitMain(); + Thread submitJobThread = new Thread(new SparkSubmitRunner(main, builder.buildSparkSubmitArgs())); + submitJobThread.setName(appName); + handle.setChildThread(submitJobThread); + submitJobThread.start(); + } catch (ClassNotFoundException cnfe) { + throw new IOException(cnfe); + } catch (NoSuchMethodException nsme) { + throw new IOException(nsme); + } + return handle; + --- End diff -- remove extra newline
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org