MaoMiMao removed a comment on issue #1743:
URL:
https://github.com/apache/incubator-linkis/issues/1743#issuecomment-1073392657
public class linkisDemo {
private static DWSClientConfig clientConfig = ((DWSClientConfigBuilder)
(DWSClientConfigBuilder.newBuilder()
.addServerUrl("http://192.168.9.109:9001") //set
linkis-mg-gateway url: http://{ip}:{port}
.connectionTimeout(30000) //connectionTimeOut
.discoveryEnabled(false) //disable discovery
.discoveryFrequency(1, TimeUnit.MINUTES) // discovery frequency
.loadbalancerEnabled(true) // enable loadbalance
.maxConnectionSize(5) // set max Connection
.retryEnabled(false) // set retry
.readTimeout(30000) //set read timeout
.setAuthenticationStrategy(new StaticAuthenticationStrategy())
//AuthenticationStrategy Linkis authen suppory static and Token
.setAuthTokenKey("root") // set submit user
.setAuthTokenValue("root"))) // set passwd or token
(setAuthTokenValue("BML-AUTH"))
.setDWSVersion("v1") //linkis rest version v1
.build();
private static UJESClient client = new UJESClientImpl(clientConfig);
public static void main(String[] args){
String user = "hadoop"; // execute user
String executeCode = "ls"; // code support:sql/hql/py/scala
try {
System.out.println("user : " + user + ", code : [" + executeCode
+ "]");
// 3. build job and execute
JobExecuteResult jobExecuteResult = toSubmit(user, executeCode);
//0.x:JobExecuteResult jobExecuteResult = toExecute(user,
executeCode);
System.out.println("execId: " + jobExecuteResult.getExecID() +
", taskId: " + jobExecuteResult.taskID());
// 4. get job jonfo
JobInfoResult jobInfoResult =
client.getJobInfo(jobExecuteResult);
int sleepTimeMills = 1000;
int logFromLen = 0;
int logSize = 100;
while(!jobInfoResult.isCompleted()) {
// 5. get progress and log
JobProgressResult progress =
client.progress(jobExecuteResult);
System.out.println("progress: " + progress.getProgress());
JobLogResult logRes = client.log(jobExecuteResult,
logFromLen, logSize);
logFromLen = logRes.fromLine();
// 0: info 1: warn 2: error 3: all
System.out.println(logRes.log().get(3));
Utils.sleepQuietly(sleepTimeMills);
jobInfoResult = client.getJobInfo(jobExecuteResult);
}
JobInfoResult jobInfo = client.getJobInfo(jobExecuteResult);
// 6. Get the result set list (if the user submits multiple SQLs
at a time,
// multiple result sets will be generated)
String resultSet = jobInfo.getResultSetList(client)[0];
// 7. get resultContent
Object fileContents =
client.resultSet(ResultSetAction.builder().setPath(resultSet).setUser(jobExecuteResult.getUser()).build()).getFileContent();
System.out.println("res: " + fileContents);
} catch (Exception e) {
e.printStackTrace();
IOUtils.closeQuietly(client);
}
IOUtils.closeQuietly(client);
}
private static JobExecuteResult toSubmit(String user, String code) {
// 1. build params
// set label map
:EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant
Map<String, Object> labels = new HashMap<String, Object>();
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "shell-1"); // required
engineType Label
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, "hadoop-IDE");//
required execute user and creator
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "shell"); // required
codeType
// set start up map :engineConn start params
Map<String, Object> startupMap = new HashMap<String, Object>(16);
// Support setting engine native parameters,For example: parameters
of engines such as spark/hive
startupMap.put("spark.executor.instances", 2);
// setting linkis params
startupMap.put("wds.linkis.rm.yarnqueue", "dws");
// 2. build jobSubmitAction
JobSubmitAction jobSubmitAction = JobSubmitAction.builder()
.addExecuteCode(code)
.setStartupParams(startupMap)
.setUser("root") //submit user
.addExecuteUser("root") // execute user
.setLabels(labels)
.build();
// 3. to execute
return client.submit(jobSubmitAction);
}
}
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]