xy720 commented on a change in pull request #4383:
URL: https://github.com/apache/incubator-doris/pull/4383#discussion_r477182446
##########
File path:
fe/fe-core/src/main/java/org/apache/doris/load/loadv2/SparkEtlJobHandler.java
##########
@@ -140,93 +137,98 @@ public void submitEtlJob(long loadJobId, String
loadLabel, EtlJobConfig etlJobCo
.setAppName(String.format(ETL_JOB_NAME, loadLabel))
.setSparkHome(sparkHome)
.addAppArgs(jobConfigHdfsPath)
- .redirectError()
- .redirectOutput(new File(Config.sys_log_dir +
"/spark-submitter.log"));
+ .redirectError();
// spark configs
for (Map.Entry<String, String> entry :
resource.getSparkConfigs().entrySet()) {
launcher.setConf(entry.getKey(), entry.getValue());
}
// start app
- SparkAppHandle handle = null;
+ SparkLoadAppHandle handle = null;
State state = null;
String appId = null;
- int retry = 0;
String errMsg = "start spark app failed. error: ";
try {
- handle = launcher.startApplication(new SparkAppListener());
+ Process process = launcher.launch();
+ handle = new SparkLoadAppHandle(process);
+ handle.addListener(new SparkAppListener());
+ if (!FeConstants.runningUnitTest) {
+ SparkLauncherMonitors.LogMonitor logMonitor =
SparkLauncherMonitors.createLogMonitor(handle);
+ logMonitor.setSubmitTimeoutMs(GET_APPID_TIMEOUT_MS);
+ logMonitor.start();
+ try {
+ logMonitor.join();
+ } catch (InterruptedException e) {
+ logMonitor.interrupt();
+ throw new LoadException(errMsg + e.getMessage());
+ }
+ }
+ appId = handle.getAppId();
+ state = handle.getState();
} catch (IOException e) {
LOG.warn(errMsg, e);
throw new LoadException(errMsg + e.getMessage());
}
- while (retry++ < GET_APPID_MAX_RETRY_TIMES) {
- appId = handle.getAppId();
- if (appId != null) {
- break;
- }
-
- // check state and retry
- state = handle.getState();
- if (fromSparkState(state) == TEtlState.CANCELLED) {
- throw new LoadException(errMsg + "spark app state: " +
state.toString());
- }
- if (retry >= GET_APPID_MAX_RETRY_TIMES) {
- throw new LoadException(errMsg + "wait too much time for
getting appid. spark app state: "
- + state.toString());
- }
+ if (fromSparkState(state) == TEtlState.CANCELLED) {
+ throw new LoadException(errMsg + "spark app state: " +
state.toString() + ", loadJobId:" + loadJobId);
+ }
- // log
- if (retry % 10 == 0) {
- LOG.info("spark appid that handle get is null. load job id:
{}, state: {}, retry times: {}",
- loadJobId, state.toString(), retry);
- }
- try {
- Thread.sleep(GET_APPID_SLEEP_MS);
- } catch (InterruptedException e) {
- LOG.warn(e.getMessage());
- }
+ if (appId == null) {
+ throw new LoadException(errMsg + "Failed to get appId from handle.
spark app state: "
+ + state.toString() + ", loadJobId:" + loadJobId);
}
// success
attachment.setAppId(appId);
attachment.setHandle(handle);
}
- public EtlStatus getEtlJobStatus(SparkAppHandle handle, String appId, long
loadJobId, String etlOutputPath,
- SparkResource resource, BrokerDesc
brokerDesc) {
+ public EtlStatus getEtlJobStatus(SparkLoadAppHandle handle, String appId,
long loadJobId, String etlOutputPath,
+ SparkResource resource, BrokerDesc
brokerDesc) throws LoadException {
EtlStatus status = new EtlStatus();
if (resource.isYarnMaster()) {
- // state from yarn
Preconditions.checkState(appId != null && !appId.isEmpty());
Review comment:
done
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]