This is an automated email from the ASF dual-hosted git repository.
peacewong pushed a commit to branch dev-1.3.2
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git
The following commit(s) were added to refs/heads/dev-1.3.2 by this push:
new 1ee4bcea7 [Bug] fix scala.collection.JavaConversions to
scala.collection.JavaConverter (#3998)
1ee4bcea7 is described below
commit 1ee4bcea7f7335c19c4b88514c0e00f00402afec
Author: GuoPhilipse <[email protected]>
AuthorDate: Wed Dec 21 20:45:04 2022 +0800
[Bug] fix scala.collection.JavaConversions to
scala.collection.JavaConverter (#3998)
* fix JavaConversions
* fix comment
* fix jdbc
* remove plugin
add jdbc module
fix outdated method
* fix conflict
* add license
Co-authored-by: gf13871 <[email protected]>
---
.../linkis/storage/source/AbstractFileSource.scala | 4 +-
.../ujes/client/response/JobProgressResult.scala | 11 ++-
.../ecm/core/launch/ProcessEngineConnLaunch.scala | 28 +++---
.../impl/BmlResourceLocalizationService.scala | 9 +-
.../linkis/entrance/cs/CSEntranceHelper.scala | 2 +-
.../interceptor/impl/CustomVariableUtils.scala | 5 +-
.../interceptor/impl/LabelCheckInterceptor.scala | 8 +-
.../impl/ParserVarLabelInterceptor.scala | 10 +-
.../linkis/ujes/jdbc/UJESSQLDriverMain.scala | 2 +-
.../apache/linkis/ujes/jdbc/UJESSQLResultSet.scala | 4 +-
.../jdbc/hook/JDBCDriverPreExecutionHook.scala | 2 +-
linkis-computation-governance/pom.xml | 2 +-
linkis-dist/release-docs/LICENSE | 1 +
.../jdbc/executor/JDBCEngineConnExecutor.scala | 5 +-
.../jdbc/executor/TestJDBCEngineConnExecutor.scala | 9 +-
.../executor/SeatunnelFlinkOnceCodeExecutor.scala | 6 +-
.../SeatunnelFlinkSQLOnceCodeExecutor.scala | 6 +-
.../engineplugin/spark/common/LogContainer.scala | 4 +-
.../spark/datacalc/DataCalcExecution.scala | 7 +-
.../executer/TestTrinoEngineConnExecutor.scala | 12 +--
.../parser/DefaultCodeJobParserTransform.scala | 4 +-
.../service/impl/JobHistoryQueryServiceImpl.scala | 8 --
.../dws/response/DWSHttpMessageFactory.scala | 3 +-
.../parser/EntranceExecutionGatewayParser.scala | 4 +-
.../ujes/route/DefaultLabelGatewayRouter.scala | 10 +-
.../SpringCloudGatewayConfiguration.scala | 7 +-
.../SpringCloudGatewayWebsocketUtils.scala | 14 +--
.../linkis-gateway-datasource-ruler/pom.xml | 105 ---------------------
.../ruler/datasource/cache/DatasourceMapCache.java | 98 -------------------
.../ruler/datasource/dao/DatasourceMapMapper.java | 43 ---------
.../datasource/dao/impl/datasourceMapMapper.xml | 57 -----------
.../ruler/datasource/entity/DatasourceMap.java | 77 ---------------
.../datasource/service/DatasourceMapService.java | 29 ------
.../service/impl/DatasourceMapServiceImpl.java | 76 ---------------
.../datasource/DatasourceGatewayRouterRuler.scala | 89 -----------------
tool/dependencies/known-dependencies.txt | 1 +
36 files changed, 88 insertions(+), 674 deletions(-)
diff --git
a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala
b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala
index 9401d3844..1a9034406 100644
---
a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala
+++
b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala
@@ -24,7 +24,7 @@ import org.apache.commons.math3.util.Pair
import java.util
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
abstract class AbstractFileSource(var fileSplits: Array[FileSplit]) extends
FileSource {
@@ -51,7 +51,7 @@ abstract class AbstractFileSource(var fileSplits:
Array[FileSplit]) extends File
override def getFileSplits: Array[FileSplit] = this.fileSplits
override def getParams: util.Map[String, String] =
- fileSplits.map(_.params).foldLeft(Map[String, String]())(_ ++ _)
+ fileSplits.map(_.params.asScala).foldLeft(Map[String, String]())(_ ++
_).asJava
override def write[K <: MetaData, V <: Record](fsWriter: FsWriter[K, V]):
Unit =
fileSplits.foreach(_.write(fsWriter))
diff --git
a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/JobProgressResult.scala
b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/JobProgressResult.scala
index c9a5819bf..0cf163c28 100644
---
a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/JobProgressResult.scala
+++
b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/JobProgressResult.scala
@@ -20,9 +20,9 @@ package org.apache.linkis.ujes.client.response
import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult
import org.apache.linkis.protocol.engine.JobProgressInfo
-import java.util.{List, Map}
+import java.util
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import org.json4s._
import org.json4s.jackson.Serialization._
@@ -31,7 +31,7 @@ import org.json4s.jackson.Serialization._
class JobProgressResult extends UJESJobResult {
private var progress: Float = _
- private var progressInfo: List[Map[String, AnyRef]] = _
+ private var progressInfo: util.List[util.Map[String, AnyRef]] = _
private var progressInfos: Array[JobProgressInfo] = _
private implicit val formats = DefaultFormats
@@ -39,9 +39,10 @@ class JobProgressResult extends UJESJobResult {
def setProgress(progress: Float): Unit = this.progress = progress
def getProgress: Float = progress
- def setProgressInfo(progressInfo: List[Map[String, AnyRef]]): Unit = {
+ def setProgressInfo(progressInfo: util.List[util.Map[String, AnyRef]]): Unit
= {
this.progressInfo = progressInfo
- progressInfos = progressInfo.map(map =>
read[JobProgressInfo](write(map.toMap))).toArray
+ progressInfos =
+ progressInfo.asScala.map(map =>
read[JobProgressInfo](write(map.asScala.toMap))).toArray
}
def getProgressInfo: Array[JobProgressInfo] = progressInfos
diff --git
a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala
b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala
index 2d371d68c..0cf9b0cb4 100644
---
a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala
+++
b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala
@@ -20,7 +20,6 @@ package org.apache.linkis.ecm.core.launch
import org.apache.linkis.common.conf.{CommonVars, Configuration}
import org.apache.linkis.common.exception.ErrorException
import org.apache.linkis.common.utils.{Logging, Utils}
-import org.apache.linkis.ecm.core.conf.ECMErrorCode
import org.apache.linkis.ecm.core.errorcode.LinkisECMErrorCodeSummary._
import org.apache.linkis.ecm.core.exception.ECMCoreException
import org.apache.linkis.ecm.core.utils.PortUtils
@@ -39,13 +38,12 @@ import
org.apache.linkis.manager.engineplugin.common.launch.process.Environment.
import
org.apache.linkis.manager.engineplugin.common.launch.process.LaunchConstants._
import org.apache.linkis.server.conf.ServerConfiguration
-import org.apache.commons.io.{FileUtils, IOUtils}
+import org.apache.commons.io.FileUtils
import org.apache.commons.lang3.StringUtils
-import java.io.{File, InputStream, IOException, OutputStream}
-import java.net.ServerSocket
+import java.io.{File, InputStream, OutputStream}
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging {
@@ -113,10 +111,7 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch
with Logging {
case EUREKA_PREFER_IP =>
environment.put(EUREKA_PREFER_IP.toString,
Configuration.EUREKA_PREFER_IP.toString)
case ENGINECONN_ENVKEYS =>
- environment.put(
- ENGINECONN_ENVKEYS.toString,
- GovernanceCommonConf.ENGINECONN_ENVKEYS.toString
- )
+ environment.put(ENGINECONN_ENVKEYS.toString,
GovernanceCommonConf.ENGINECONN_ENVKEYS)
case _ =>
}
}
@@ -163,7 +158,7 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with
Logging {
protected def getCommandArgs: Array[String] = {
if (
- request.creationDesc.properties.exists { case (k, v) =>
+ request.creationDesc.properties.asScala.exists { case (k, v) =>
k.contains(" ") || (v != null && v.contains(" "))
}
) {
@@ -181,7 +176,7 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with
Logging {
"server.port" -> engineConnPort,
"spring.profiles.active" -> "engineconn",
"logging.config" ->
s"classpath:${EnvConfiguration.LOG4J2_XML_FILE.getValue}"
- ) ++: discoveryMsgGenerator.generate(engineConnManagerEnv)
+ ) ++: discoveryMsgGenerator.generate(engineConnManagerEnv).asScala
val eurekaPreferIp: Boolean = Configuration.EUREKA_PREFER_IP
logger.info(s"EUREKA_PREFER_IP: " + eurekaPreferIp)
@@ -199,15 +194,16 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch
with Logging {
springConf =
springConf + ("eureka.instance.metadata-map.prometheus.path" ->
("\\${prometheus.path:" + endpoint + "}"))
}
- request.creationDesc.properties.filter(_._1.startsWith("spring.")).foreach
{ case (k, v) =>
- springConf = springConf += (k -> v)
+
request.creationDesc.properties.asScala.filter(_._1.startsWith("spring.")).foreach
{
+ case (k, v) =>
+ springConf = springConf += (k -> v)
}
arguments.addSpringConf(springConf.toMap)
var engineConnConf = Map("ticketId" -> request.ticketId, "user" ->
request.user)
- engineConnConf = engineConnConf ++: request.labels
+ engineConnConf = engineConnConf ++: request.labels.asScala
.map(l => EngineConnArgumentsParser.LABEL_PREFIX + l.getLabelKey ->
l.getStringValue)
.toMap
- engineConnConf = engineConnConf ++: request.creationDesc.properties
+ engineConnConf = engineConnConf ++: request.creationDesc.properties.asScala
.filterNot(_._1.startsWith("spring."))
.toMap
arguments.addEngineConnConf(engineConnConf)
@@ -233,7 +229,7 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with
Logging {
initializeEnv()
// TODO env需要考虑顺序问题
val classPath = request.environment.remove(CLASSPATH.toString)
- request.environment.foreach { case (k, v) =>
+ request.environment.asScala.foreach { case (k, v) =>
val value = v.replaceAll(CLASS_PATH_SEPARATOR, File.pathSeparator)
setMoreAvailPort(value)
processBuilder.setEnv(k, processBuilder.replaceExpansionMarker(value))
diff --git
a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala
b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala
index 867ca2aad..72cd51e39 100644
---
a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala
+++
b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala
@@ -21,7 +21,6 @@ import org.apache.linkis.DataWorkCloudApplication
import org.apache.linkis.common.conf.Configuration
import org.apache.linkis.common.io.FsPath
import org.apache.linkis.common.utils.{Logging, Utils, ZipUtils}
-import org.apache.linkis.ecm.core.conf.ECMErrorCode
import org.apache.linkis.ecm.core.engineconn.EngineConn
import org.apache.linkis.ecm.core.launch.EngineConnManagerEnv
import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary._
@@ -42,7 +41,7 @@ import org.springframework.core.env.Environment
import java.io.File
import java.nio.file.Paths
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import scala.collection.mutable
class BmlResourceLocalizationService extends ResourceLocalizationService with
Logging {
@@ -89,7 +88,7 @@ class BmlResourceLocalizationService extends
ResourceLocalizationService with Lo
val tmpDirs = createDirIfNotExit(
localDirsHandleService.getEngineConnTmpDir(user, ticketId,
engineType)
)
- files.foreach(downloadBmlResource(request, linkDirsP, _, workDir))
+ files.asScala.foreach(downloadBmlResource(request, linkDirsP, _,
workDir))
engineConn.getEngineConnLaunchRunner.getEngineConnLaunch.setEngineConnManagerEnv(
new EngineConnManagerEnv {
override val engineConnManagerHomeDir: String = emHomeDir
@@ -168,13 +167,13 @@ class BmlResourceLocalizationService extends
ResourceLocalizationService with Lo
}
// 2.软连,并且添加到map
val dirAndFileList = fs.listPathWithError(fsPath)
- var paths = dirAndFileList.getFsPaths.toList
+ var paths = dirAndFileList.getFsPaths.asScala.toList
if (paths.exists(_.getPath.endsWith(".zip"))) {
logger.info(s"Start to wait fs path to init ${fsPath.getPath}")
resourceId.intern().synchronized {
logger.info(s"Finished to wait fs path to init ${fsPath.getPath} ")
}
- paths = fs.listPathWithError(fsPath).getFsPaths.toList
+ paths = fs.listPathWithError(fsPath).getFsPaths.asScala.toList
}
paths.foreach { path =>
val name = new File(path.getPath).getName
diff --git
a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cs/CSEntranceHelper.scala
b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cs/CSEntranceHelper.scala
index d12a9b9d3..5eca0a139 100644
---
a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cs/CSEntranceHelper.scala
+++
b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cs/CSEntranceHelper.scala
@@ -185,7 +185,7 @@ object CSEntranceHelper extends Logging {
if (StringUtils.isNotBlank(contextIDValueStr)) {
logger.info("parse variable nodeName: {}", nodeNameStr)
val linkisVariableList: util.List[LinkisVariable] =
-
CSVariableService.getInstance().getUpstreamVariables(contextIDValueStr,
nodeNameStr);
+
CSVariableService.getInstance().getUpstreamVariables(contextIDValueStr,
nodeNameStr)
if (null != linkisVariableList) {
linkisVariableList.asScala.foreach { linkisVariable =>
variableMap.put(linkisVariable.getKey, linkisVariable.getValue)
diff --git
a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala
b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala
index 888e7bf8a..61ba81664 100644
---
a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala
+++
b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala
@@ -31,7 +31,7 @@ import org.apache.linkis.rpc.Sender
import java.util
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
object CustomVariableUtils extends Logging {
@@ -65,8 +65,9 @@ object CustomVariableUtils extends Logging {
}
val variableMap = TaskUtils
.getVariableMap(jobRequest.getParams.asInstanceOf[util.Map[String, Any]])
+ .asScala
.map { case (k, v) => k -> v.asInstanceOf[String] }
- variables.putAll(variableMap)
+ variables.putAll(variableMap.asJava)
if (!variables.containsKey("user")) {
variables.put("user", jobRequest.getExecuteUser)
}
diff --git
a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/LabelCheckInterceptor.scala
b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/LabelCheckInterceptor.scala
index 41753ba4e..c352eb561 100644
---
a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/LabelCheckInterceptor.scala
+++
b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/LabelCheckInterceptor.scala
@@ -20,16 +20,14 @@ package org.apache.linkis.entrance.interceptor.impl
import org.apache.linkis.entrance.interceptor.EntranceInterceptor
import org.apache.linkis.entrance.interceptor.exception.LabelCheckException
import org.apache.linkis.governance.common.entity.job.JobRequest
-import org.apache.linkis.governance.common.entity.task.RequestPersistTask
import org.apache.linkis.manager.label.entity.Label
import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel,
UserCreatorLabel}
-import org.apache.linkis.protocol.task.Task
import org.apache.commons.lang3.StringUtils
import java.lang
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
class LabelCheckInterceptor extends EntranceInterceptor {
@@ -56,7 +54,7 @@ class LabelCheckInterceptor extends EntranceInterceptor {
}
private def checkEngineTypeLabel(labels: java.util.List[Label[_]]): Unit = {
- val engineTypeLabelOption = labels.find(_.isInstanceOf[EngineTypeLabel])
+ val engineTypeLabelOption =
labels.asScala.find(_.isInstanceOf[EngineTypeLabel])
if (engineTypeLabelOption.isDefined) {
val engineLabel = engineTypeLabelOption.get.asInstanceOf[EngineTypeLabel]
if (StringUtils.isNotBlank(engineLabel.getEngineType)) {
@@ -71,7 +69,7 @@ class LabelCheckInterceptor extends EntranceInterceptor {
submitUser: String,
executeUser: String
): Unit = {
- val userCreatorLabelOption = labels.find(_.isInstanceOf[UserCreatorLabel])
+ val userCreatorLabelOption =
labels.asScala.find(_.isInstanceOf[UserCreatorLabel])
if (userCreatorLabelOption.isDefined) {
val userCreator =
userCreatorLabelOption.get.asInstanceOf[UserCreatorLabel]
if (StringUtils.isNotBlank(userCreator.getUser)) {
diff --git
a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/ParserVarLabelInterceptor.scala
b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/ParserVarLabelInterceptor.scala
index baf1f2278..b0f4b2db3 100644
---
a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/ParserVarLabelInterceptor.scala
+++
b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/ParserVarLabelInterceptor.scala
@@ -22,21 +22,17 @@ import
org.apache.linkis.governance.common.entity.job.JobRequest
import
org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext
import org.apache.linkis.manager.label.constant.LabelKeyConstant
import org.apache.linkis.manager.label.entity.TenantLabel
-import org.apache.linkis.protocol.utils.TaskUtils
-import java.{lang, util}
-
-import scala.collection.JavaConversions._
+import java.lang
class ParserVarLabelInterceptor extends EntranceInterceptor {
override def apply(jobRequest: JobRequest, logAppender: lang.StringBuilder):
JobRequest = {
jobRequest match {
case requestPersistTask: JobRequest =>
- val variableMap =
-
TaskUtils.getVariableMap(requestPersistTask.getParams.asInstanceOf[util.Map[String,
Any]])
+ val variableMap = requestPersistTask.getParams
val labels = requestPersistTask.getLabels
- if (variableMap.contains(LabelKeyConstant.TENANT_KEY)) {
+ if (variableMap.containsKey(LabelKeyConstant.TENANT_KEY)) {
val tenantLabel = LabelBuilderFactoryContext.getLabelBuilderFactory
.createLabel[TenantLabel](LabelKeyConstant.TENANT_KEY)
tenantLabel.setTenant(variableMap.get(LabelKeyConstant.TENANT_KEY).toString)
diff --git
a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala
b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala
index 4e4dfb609..783713cf4 100644
---
a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala
+++
b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala
@@ -39,7 +39,7 @@ class UJESSQLDriverMain extends Driver with Logging {
override def connect(url: String, properties: Properties): Connection = if
(acceptsURL(url)) {
val props = if (properties != null) properties else new Properties
props.putAll(parseURL(url))
- info(s"input url:$url, properties:$properties")
+ logger.info(s"input url:$url, properties:$properties")
val ujesClient = UJESClientFactory.getUJESClient(props)
new UJESSQLConnection(ujesClient, props)
} else throw new UJESSQLException(UJESSQLErrorCode.BAD_URL, "bad url: " +
url)
diff --git
a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala
b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala
index c68babfad..b8cf1b23b 100644
---
a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala
+++
b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala
@@ -895,9 +895,9 @@ class UJESSQLResultSet(
}
override def getStatement: Statement = {
- if (statement != null && !hasClosed)
+ if (statement != null && !hasClosed) {
statement.asInstanceOf[Statement]
- else throw new UJESSQLException(UJESSQLErrorCode.STATEMENT_CLOSED)
+ } else throw new UJESSQLException(UJESSQLErrorCode.STATEMENT_CLOSED)
}
override def getObject(columnIndex: Int, map: util.Map[String, Class[_]]):
AnyRef = {
diff --git
a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/JDBCDriverPreExecutionHook.scala
b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/JDBCDriverPreExecutionHook.scala
index 70c16cce5..038ff3871 100644
---
a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/JDBCDriverPreExecutionHook.scala
+++
b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/JDBCDriverPreExecutionHook.scala
@@ -42,7 +42,7 @@ object JDBCDriverPreExecutionHook extends Logging {
val obj = clazz.newInstance()
obj match {
case hook: JDBCDriverPreExecutionHook => hooks += hook
- case _ => warn(s"obj is not a engineHook obj is ${obj.getClass}")
+ case _ => logger.warn(s"obj is not a engineHook obj is
${obj.getClass}")
}
} { case e: Exception =>
error(s"failed to load class ${hookStr}", e)
diff --git a/linkis-computation-governance/pom.xml
b/linkis-computation-governance/pom.xml
index 12170d8ed..2efd86537 100644
--- a/linkis-computation-governance/pom.xml
+++ b/linkis-computation-governance/pom.xml
@@ -35,7 +35,7 @@
<module>linkis-entrance</module>
<module>linkis-client/linkis-computation-client</module>
<module>linkis-client/linkis-cli</module>
- <!--<module>linkis-jdbc-driver</module>-->
+ <module>linkis-jdbc-driver</module>
</modules>
diff --git a/linkis-dist/release-docs/LICENSE b/linkis-dist/release-docs/LICENSE
index a353b1c81..4f68bc780 100644
--- a/linkis-dist/release-docs/LICENSE
+++ b/linkis-dist/release-docs/LICENSE
@@ -358,6 +358,7 @@ See licenses/ for text of these licenses.
(Apache License, Version 2.0) Jackson-module-parameter-names
(com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.4 -
https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names)
(Apache License, Version 2.0) Jettison
(org.codehaus.jettison:jettison:1.3.7 - http://codehaus.org/jettison/)
(Apache License, Version 2.0) Joda-Time (joda-time:joda-time:2.3 -
http://www.joda.org/joda-time/)
+ (Apache License, Version 2.0) Joda-Time (joda-time:joda-time:2.10.5 -
http://www.joda.org/joda-time/)
(Apache License, Version 2.0) Netty/All-in-One
(io.netty:netty-all:4.0.23.Final - http://netty.io/netty-all/)
(Apache License, Version 2.0) Netty/Buffer
(io.netty:netty-buffer:4.1.49.Final - https://netty.io/netty-buffer/)
(Apache License, Version 2.0) Netty/Codec
(io.netty:netty-codec:4.1.49.Final - https://netty.io/netty-codec/)
diff --git
a/linkis-engineconn-plugins/jdbc/src/main/scala/org/apache/linkis/manager/engineplugin/jdbc/executor/JDBCEngineConnExecutor.scala
b/linkis-engineconn-plugins/jdbc/src/main/scala/org/apache/linkis/manager/engineplugin/jdbc/executor/JDBCEngineConnExecutor.scala
index ce582be7d..69e28d2a1 100644
---
a/linkis-engineconn-plugins/jdbc/src/main/scala/org/apache/linkis/manager/engineplugin/jdbc/executor/JDBCEngineConnExecutor.scala
+++
b/linkis-engineconn-plugins/jdbc/src/main/scala/org/apache/linkis/manager/engineplugin/jdbc/executor/JDBCEngineConnExecutor.scala
@@ -66,7 +66,6 @@ import java.util
import java.util.Collections
import java.util.concurrent.ConcurrentHashMap
-import scala.collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer
class JDBCEngineConnExecutor(override val outputPrintLimit: Int, val id: Int)
@@ -261,7 +260,9 @@ class JDBCEngineConnExecutor(override val outputPrintLimit:
Int, val id: Int)
val md = resultSet.getMetaData
val metaArrayBuffer = new ArrayBuffer[(String, String)]()
for (i <- 1 to md.getColumnCount) {
- metaArrayBuffer.add(Tuple2(md.getColumnName(i),
JDBCHelper.getTypeStr(md.getColumnType(i))))
+ metaArrayBuffer.append(
+ Tuple2(md.getColumnName(i),
JDBCHelper.getTypeStr(md.getColumnType(i)))
+ )
}
val columns =
metaArrayBuffer.map { c => Column(c._1, DataType.toDataType(c._2), "")
}.toArray[Column]
diff --git
a/linkis-engineconn-plugins/jdbc/src/test/scala/org/apache/linkis/manager/engineplugin/jdbc/executor/TestJDBCEngineConnExecutor.scala
b/linkis-engineconn-plugins/jdbc/src/test/scala/org/apache/linkis/manager/engineplugin/jdbc/executor/TestJDBCEngineConnExecutor.scala
index 26b2a3644..bc57f122f 100644
---
a/linkis-engineconn-plugins/jdbc/src/test/scala/org/apache/linkis/manager/engineplugin/jdbc/executor/TestJDBCEngineConnExecutor.scala
+++
b/linkis-engineconn-plugins/jdbc/src/test/scala/org/apache/linkis/manager/engineplugin/jdbc/executor/TestJDBCEngineConnExecutor.scala
@@ -31,7 +31,6 @@ import
org.apache.linkis.governance.common.conf.GovernanceCommonConf
import org.apache.linkis.governance.common.entity.ExecutionNodeStatus
import org.apache.linkis.governance.common.utils.EngineConnArgumentsParser
import org.apache.linkis.manager.engineplugin.common.launch.process.Environment
-import
org.apache.linkis.manager.engineplugin.jdbc.executor.JDBCEngineConnExecutor
import
org.apache.linkis.manager.engineplugin.jdbc.factory.JDBCEngineConnFactory
import org.apache.linkis.manager.engineplugin.jdbc.monitor.ProgressMonitor
import org.apache.linkis.manager.label.builder.factory.{
@@ -45,7 +44,7 @@ import
org.apache.linkis.scheduler.executer.SuccessExecuteResponse
import java.sql.Statement
import java.util
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import org.h2.tools.Server
@@ -131,7 +130,7 @@ class TestJDBCEngineConnExecutor {
engineExecutionContext.setLabels(anyArray.map(_.asInstanceOf[Label[_]]))
val testPath = this.getClass.getClassLoader.getResource("").getPath
engineExecutionContext.setStorePath(testPath)
- engineCreationContext.getOptions.foreach({ case (key, value) =>
+ engineCreationContext.getOptions.asScala.foreach({ case (key, value) =>
engineExecutionContext.addProperty(key, value)
})
Assertions.assertNotNull(jdbcExecutor.getProgressInfo(taskId))
@@ -184,13 +183,13 @@ class TestJDBCEngineConnExecutor {
labels += labelBuilderFactory
.createLabel[Label[_]](key.replace(EngineConnArgumentsParser.LABEL_PREFIX, ""),
value)
}
- engineCreationContext.setLabels(labels.toList)
+ engineCreationContext.setLabels(labels.toList.asJava)
}
val jMap = new java.util.HashMap[String, String](engineConf.size)
jMap.put("jdbc.url", "jdbc:h2:~/test")
jMap.put("jdbc.username", "sas")
jMap.put("jdbc.password", "sa")
- jMap.putAll(engineConf)
+ jMap.putAll(engineConf.asJava)
this.engineCreationContext.setOptions(jMap)
this.engineCreationContext.setArgs(args)
}
diff --git
a/linkis-engineconn-plugins/seatunnel/src/main/scala/org/apache/linkis/engineconnplugin/seatunnel/executor/SeatunnelFlinkOnceCodeExecutor.scala
b/linkis-engineconn-plugins/seatunnel/src/main/scala/org/apache/linkis/engineconnplugin/seatunnel/executor/SeatunnelFlinkOnceCodeExecutor.scala
index 22538e0a8..f5ed0820f 100644
---
a/linkis-engineconn-plugins/seatunnel/src/main/scala/org/apache/linkis/engineconnplugin/seatunnel/executor/SeatunnelFlinkOnceCodeExecutor.scala
+++
b/linkis-engineconn-plugins/seatunnel/src/main/scala/org/apache/linkis/engineconnplugin/seatunnel/executor/SeatunnelFlinkOnceCodeExecutor.scala
@@ -59,7 +59,7 @@ import java.nio.file.Files
import java.util
import java.util.concurrent.{Future, TimeUnit}
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
class SeatunnelFlinkOnceCodeExecutor(
override val id: Long,
@@ -122,7 +122,7 @@ class SeatunnelFlinkOnceCodeExecutor(
if (params.containsKey(variable)) {
val variableMap = GSON.fromJson(params.get(variable),
classOf[util.HashMap[String, String]])
- variableMap.foreach(f => {
+ variableMap.asScala.foreach(f => {
args ++ Array(GET_LINKIS_FLINK_VARIABLE, s"${f._1}=${f._2}")
})
}
@@ -130,7 +130,7 @@ class SeatunnelFlinkOnceCodeExecutor(
} else {
args = localArray(code)
}
- System.setProperty("SEATUNNEL_HOME",
System.getenv(ENGINE_CONN_LOCAL_PATH_PWD_KEY.getValue));
+ System.setProperty("SEATUNNEL_HOME",
System.getenv(ENGINE_CONN_LOCAL_PATH_PWD_KEY.getValue))
Files.createSymbolicLink(
new File(System.getenv(ENGINE_CONN_LOCAL_PATH_PWD_KEY.getValue) +
"/seatunnel").toPath,
new File(SeatunnelEnvConfiguration.SEATUNNEL_HOME.getValue).toPath
diff --git
a/linkis-engineconn-plugins/seatunnel/src/main/scala/org/apache/linkis/engineconnplugin/seatunnel/executor/SeatunnelFlinkSQLOnceCodeExecutor.scala
b/linkis-engineconn-plugins/seatunnel/src/main/scala/org/apache/linkis/engineconnplugin/seatunnel/executor/SeatunnelFlinkSQLOnceCodeExecutor.scala
index b5651a5d9..6795fe9a1 100644
---
a/linkis-engineconn-plugins/seatunnel/src/main/scala/org/apache/linkis/engineconnplugin/seatunnel/executor/SeatunnelFlinkSQLOnceCodeExecutor.scala
+++
b/linkis-engineconn-plugins/seatunnel/src/main/scala/org/apache/linkis/engineconnplugin/seatunnel/executor/SeatunnelFlinkSQLOnceCodeExecutor.scala
@@ -59,7 +59,7 @@ import java.nio.file.Files
import java.util
import java.util.concurrent.{Future, TimeUnit}
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
class SeatunnelFlinkSQLOnceCodeExecutor(
override val id: Long,
@@ -122,7 +122,7 @@ class SeatunnelFlinkSQLOnceCodeExecutor(
if (params.containsKey(variable)) {
val variableMap = GSON.fromJson(params.get(variable),
classOf[util.HashMap[String, String]])
- variableMap.foreach(f => {
+ variableMap.asScala.foreach(f => {
args ++ Array(GET_LINKIS_FLINK_VARIABLE, s"${f._1}=${f._2}")
})
}
@@ -130,7 +130,7 @@ class SeatunnelFlinkSQLOnceCodeExecutor(
} else {
args = localArray(code)
}
- System.setProperty("SEATUNNEL_HOME",
System.getenv(ENGINE_CONN_LOCAL_PATH_PWD_KEY.getValue));
+ System.setProperty("SEATUNNEL_HOME",
System.getenv(ENGINE_CONN_LOCAL_PATH_PWD_KEY.getValue))
Files.createSymbolicLink(
new File(System.getenv(ENGINE_CONN_LOCAL_PATH_PWD_KEY.getValue) +
"/seatunnel").toPath,
new File(SeatunnelEnvConfiguration.SEATUNNEL_HOME.getValue).toPath
diff --git
a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/common/LogContainer.scala
b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/common/LogContainer.scala
index 698e52a3b..842ef2130 100644
---
a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/common/LogContainer.scala
+++
b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/common/LogContainer.scala
@@ -18,7 +18,7 @@
package org.apache.linkis.engineplugin.spark.common
import scala.collection.Iterable
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
/**
@@ -70,6 +70,6 @@ class LogContainer(val logSize: Int) {
else tail - flag
}
- def getLogList: java.util.List[String] = getLogs
+ def getLogList: java.util.List[String] = getLogs.asJava
}
diff --git
a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/DataCalcExecution.scala
b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/DataCalcExecution.scala
index 5d2870138..447005f5c 100644
---
a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/DataCalcExecution.scala
+++
b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/DataCalcExecution.scala
@@ -37,7 +37,7 @@ import javax.validation.{Validation, Validator}
import java.text.MessageFormat
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import scala.collection.mutable
import org.slf4j.{Logger, LoggerFactory}
@@ -83,8 +83,9 @@ object DataCalcExecution {
sinks: Array[DataCalcSink[SK]]
): Unit = {
if (sources != null && !sources.isEmpty) sources.foreach(source =>
sourceProcess(spark, source))
- if (transformations != null && !transformations.isEmpty)
+ if (transformations != null && !transformations.isEmpty) {
transformations.foreach(transformation => transformProcess(spark,
transformation))
+ }
if (sinks != null && !sinks.isEmpty) sinks.foreach(sink =>
sinkProcess(spark, sink))
DataCalcTempData.clean(spark.sqlContext)
@@ -210,7 +211,7 @@ object DataCalcExecution {
log.error(
s"Configuration check error,
${BDPJettyServerHelper.gson.toJson(plugin.getConfig)}"
)
- for (violation <- violations) {
+ for (violation <- violations.asScala) {
if (
violation.getMessageTemplate
.startsWith("{") && violation.getMessageTemplate.endsWith("}")
diff --git
a/linkis-engineconn-plugins/trino/src/test/scala/org/apache/linkis/engineplugin/trino/executer/TestTrinoEngineConnExecutor.scala
b/linkis-engineconn-plugins/trino/src/test/scala/org/apache/linkis/engineplugin/trino/executer/TestTrinoEngineConnExecutor.scala
index d8d2d66be..e4d04a8ad 100644
---
a/linkis-engineconn-plugins/trino/src/test/scala/org/apache/linkis/engineplugin/trino/executer/TestTrinoEngineConnExecutor.scala
+++
b/linkis-engineconn-plugins/trino/src/test/scala/org/apache/linkis/engineplugin/trino/executer/TestTrinoEngineConnExecutor.scala
@@ -41,10 +41,10 @@ import org.apache.linkis.manager.label.entity.Label
import java.util
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
-import org.junit.jupiter.api.{Assertions, Test}
+import org.junit.jupiter.api.Assertions
class TestTrinoEngineConnExecutor {
@@ -105,7 +105,7 @@ class TestTrinoEngineConnExecutor {
engineExecutionContext.setLabels(anyArray.map(_.asInstanceOf[Label[_]]))
val testPath = this.getClass.getClassLoader.getResource("").getPath
engineExecutionContext.setStorePath(testPath)
- engineCreationContext.getOptions.foreach({ case (key, value) =>
+ engineCreationContext.getOptions.asScala.foreach({ case (key, value) =>
engineExecutionContext.addProperty(key, value)
})
Assertions.assertNotNull(jdbcExecutor.getProgressInfo(taskId))
@@ -130,13 +130,13 @@ class TestTrinoEngineConnExecutor {
labels += labelBuilderFactory
.createLabel[Label[_]](key.replace(EngineConnArgumentsParser.LABEL_PREFIX, ""),
value)
}
- engineCreationContext.setLabels(labels.toList)
+ engineCreationContext.setLabels(labels.toList.asJava)
}
val jMap = new java.util.HashMap[String, String](engineConf.size)
- jMap.putAll(engineConf)
+ jMap.putAll(engineConf.asJava)
this.engineCreationContext.setOptions(jMap)
this.engineCreationContext.setArgs(args)
- sys.props.putAll(jMap)
+ sys.props.asJava.putAll(jMap)
}
}
diff --git
a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/catalyst/parser/DefaultCodeJobParserTransform.scala
b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/catalyst/parser/DefaultCodeJobParserTransform.scala
index 535e09fb2..7ffdcbfdb 100644
---
a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/catalyst/parser/DefaultCodeJobParserTransform.scala
+++
b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/catalyst/parser/DefaultCodeJobParserTransform.scala
@@ -24,7 +24,7 @@ import org.apache.linkis.orchestrator.parser.Parser
import org.apache.linkis.orchestrator.plans.ast.{ASTContext, Job}
import org.apache.linkis.orchestrator.plans.unit.CodeLogicalUnit
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import com.google.common.collect.Lists
@@ -63,7 +63,7 @@ class DefaultCodeJobParserTransform extends ParserTransform
with Logging {
def splitCode(codeJob: CodeJob): Array[CodeLogicalUnit] = {
val codeLogicalUnits = new ArrayBuffer[CodeLogicalUnit]
- codeJob.getCodeLogicalUnit.getCodes.foreach { code =>
+ codeJob.getCodeLogicalUnit.getCodes.asScala.foreach { code =>
code.split(codeJob.getCodeLogicalUnit.getSeparator).foreach { line =>
codeLogicalUnits.append(
new CodeLogicalUnit(
diff --git
a/linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/impl/JobHistoryQueryServiceImpl.scala
b/linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/impl/JobHistoryQueryServiceImpl.scala
index 3251d5e64..31929263c 100644
---
a/linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/impl/JobHistoryQueryServiceImpl.scala
+++
b/linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/impl/JobHistoryQueryServiceImpl.scala
@@ -243,14 +243,6 @@ class JobHistoryQueryServiceImpl extends
JobHistoryQueryService with Logging {
jobResp
}
- /* private def queryTaskList2RequestPersistTaskList(queryTask:
java.util.List[QueryTask]): java.util.List[RequestPersistTask] = {
- import scala.collection.JavaConversions._
- val tasks = new util.ArrayList[RequestPersistTask]
- import
org.apache.linkis.jobhistory.conversions.TaskConversions.queryTask2RequestPersistTask
- queryTask.foreach(f => tasks.add(f))
- tasks
- } */
-
override def getJobHistoryByIdAndName(jobId: java.lang.Long, userName:
String): JobHistory = {
val jobReq = new JobHistory
jobReq.setId(jobId)
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/response/DWSHttpMessageFactory.scala
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/response/DWSHttpMessageFactory.scala
index d537c741b..f3891b22a 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/response/DWSHttpMessageFactory.scala
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/response/DWSHttpMessageFactory.scala
@@ -22,7 +22,7 @@ import org.apache.linkis.httpclient.response.Result
import org.apache.commons.lang3.ClassUtils
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
object DWSHttpMessageFactory {
@@ -30,6 +30,7 @@ object DWSHttpMessageFactory {
private val methodToHttpMessageClasses = reflections
.getTypesAnnotatedWith(classOf[DWSHttpMessageResult])
+ .asScala
.filter(ClassUtils.isAssignable(_, classOf[Result]))
.map { c =>
val httpMessageResult = c.getAnnotation(classOf[DWSHttpMessageResult])
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/parser/EntranceExecutionGatewayParser.scala
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/parser/EntranceExecutionGatewayParser.scala
index a145daf84..d5c4df23e 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/parser/EntranceExecutionGatewayParser.scala
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/parser/EntranceExecutionGatewayParser.scala
@@ -33,7 +33,7 @@ import javax.annotation.Resource
import java.util
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
@Component
class EntranceExecutionGatewayParser extends AbstractGatewayParser {
@@ -64,7 +64,7 @@ class EntranceExecutionGatewayParser extends
AbstractGatewayParser {
gatewayContext: GatewayContext
): Option[util.List[RouteLabel]] = {
var routeLabels: Option[util.List[RouteLabel]] = None
- for (parser <- routeLabelParsers if routeLabels.isEmpty ||
routeLabels.get.isEmpty) {
+ for (parser <- routeLabelParsers.asScala if routeLabels.isEmpty ||
routeLabels.get.isEmpty) {
routeLabels = Option(parser.parse(gatewayContext))
}
routeLabels
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/route/DefaultLabelGatewayRouter.scala
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/route/DefaultLabelGatewayRouter.scala
index bc4134218..d1c2318ae 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/route/DefaultLabelGatewayRouter.scala
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/route/DefaultLabelGatewayRouter.scala
@@ -29,7 +29,7 @@ import org.apache.commons.lang3.StringUtils
import java.text.MessageFormat
import java.util
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import scala.util.Random
class DefaultLabelGatewayRouter(var routeLabelParsers:
util.List[RouteLabelParser])
@@ -43,7 +43,7 @@ class DefaultLabelGatewayRouter(var routeLabelParsers:
util.List[RouteLabelParse
gatewayContext: GatewayContext
): util.List[RouteLabel] = {
var routeLabels: Option[util.List[RouteLabel]] = None
- for (parser <- routeLabelParsers if routeLabels.isEmpty ||
routeLabels.get.isEmpty) {
+ for (parser <- routeLabelParsers.asScala if routeLabels.isEmpty ||
routeLabels.get.isEmpty) {
routeLabels = Option(parser.parse(gatewayContext))
}
routeLabels.getOrElse(new util.ArrayList[RouteLabel]())
@@ -63,10 +63,10 @@ class DefaultLabelGatewayRouter(var routeLabelParsers:
util.List[RouteLabelParse
) {
val applicationName: String =
gatewayContext.getGatewayRoute.getServiceInstance.getApplicationName
- val filterCandidates = candidates.filter(serviceInstance =>
+ val filterCandidates = candidates.asScala.filter(serviceInstance =>
serviceInstance.getApplicationName.equalsIgnoreCase(applicationName)
)
- roulette(filterCandidates)
+ roulette(filterCandidates.asJava)
} else {
roulette(candidates)
}
@@ -84,7 +84,7 @@ class DefaultLabelGatewayRouter(var routeLabelParsers:
util.List[RouteLabelParse
throw new GatewayErrorException(NO_ROUTE_SERVICE.getErrorCode,
NO_ROUTE_SERVICE.getErrorDesc)
}
- val serviceIds = serviceInstances.map(_.getApplicationName).distinct
+ val serviceIds =
serviceInstances.asScala.map(_.getApplicationName).distinct
val filteredInstances = new util.ArrayList[ServiceInstance]()
for (serviceId <- serviceIds) {
filteredInstances.addAll(retainAllInRegistry(serviceId,
serviceInstances))
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/scala/org/apache/linkis/gateway/springcloud/SpringCloudGatewayConfiguration.scala
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/scala/org/apache/linkis/gateway/springcloud/SpringCloudGatewayConfiguration.scala
index 57d9cc1f0..39fe9a064 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/scala/org/apache/linkis/gateway/springcloud/SpringCloudGatewayConfiguration.scala
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/scala/org/apache/linkis/gateway/springcloud/SpringCloudGatewayConfiguration.scala
@@ -43,7 +43,7 @@ import org.springframework.context.annotation.{Bean,
Configuration}
import org.springframework.web.reactive.socket.client.WebSocketClient
import org.springframework.web.reactive.socket.server.WebSocketService
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import com.netflix.loadbalancer.Server
import org.slf4j.{Logger, LoggerFactory}
@@ -137,7 +137,7 @@ class SpringCloudGatewayConfiguration {
val serviceInstance = getServiceInstance(serviceId)
logger.info("redirect to " + serviceInstance)
val lb = this.getLoadBalancer(serviceInstance.getApplicationName)
- lb.getAllServers.find(_.getHostPort == serviceInstance.getInstance).get
+ lb.getAllServers.asScala.find(_.getHostPort ==
serviceInstance.getInstance).get
} else super.getServer(serviceId)
def isSecure(server: Server, serviceId: String) = {
@@ -158,7 +158,8 @@ class SpringCloudGatewayConfiguration {
val serviceInstance = getServiceInstance(serviceId)
logger.info("redirect to " + serviceInstance)
val lb = this.getLoadBalancer(serviceInstance.getApplicationName)
- val server = lb.getAllServers.find(_.getHostPort ==
serviceInstance.getInstance).get
+ val server =
+ lb.getAllServers.asScala.find(_.getHostPort ==
serviceInstance.getInstance).get
new RibbonLoadBalancerClient.RibbonServer(
serviceId,
server,
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/scala/org/apache/linkis/gateway/springcloud/websocket/SpringCloudGatewayWebsocketUtils.scala
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/scala/org/apache/linkis/gateway/springcloud/websocket/SpringCloudGatewayWebsocketUtils.scala
index 64f78663b..819b97330 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/scala/org/apache/linkis/gateway/springcloud/websocket/SpringCloudGatewayWebsocketUtils.scala
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/scala/org/apache/linkis/gateway/springcloud/websocket/SpringCloudGatewayWebsocketUtils.scala
@@ -35,7 +35,7 @@ import java.net.InetSocketAddress
import java.util
import java.util.concurrent.{ConcurrentHashMap, TimeUnit}
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import reactor.core.publisher.{Flux, Mono}
@@ -66,7 +66,7 @@ object SpringCloudGatewayWebsocketUtils extends Logging {
new Runnable {
override def run(): Unit = Utils.tryQuietly {
- cachedWebSocketSessions
+ cachedWebSocketSessions.asScala
.filter { case (_, session) =>
session.removeDeadProxySessions()
session.canRelease
@@ -78,7 +78,7 @@ object SpringCloudGatewayWebsocketUtils extends Logging {
session.release()
cachedWebSocketSessions.remove(key)
}
- cachedWebSocketSessions.foreach(_._2.heartbeat())
+ cachedWebSocketSessions.asScala.foreach(_._2.heartbeat())
}
},
@@ -88,12 +88,12 @@ object SpringCloudGatewayWebsocketUtils extends Logging {
)
def removeAllGatewayWebSocketSessionConnection(user: String): Unit =
- cachedWebSocketSessions.filter(_._2.user == user).values.foreach { session
=>
+ cachedWebSocketSessions.asScala.filter(_._2.user == user).values.foreach {
session =>
session.release()
}
def removeGatewayWebSocketSessionConnection(inetSocketAddress:
InetSocketAddress): Unit =
- cachedWebSocketSessions
+ cachedWebSocketSessions.asScala
.find(_._2.getAddress == inetSocketAddress)
.foreach { case (_, session) =>
session.release()
@@ -113,9 +113,9 @@ object SpringCloudGatewayWebsocketUtils extends Logging {
val key = getWebSocketSessionKey(webSocketSession)
if (cachedWebSocketSessions.containsKey(key)) cachedWebSocketSessions
synchronized {
val webSocketSession = cachedWebSocketSessions.get(key)
- if (webSocketSession != null)
+ if (webSocketSession != null) {
webSocketSession.getProxyWebSocketSession(serviceInstance).orNull
- else null
+ } else null
}
else null
}
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/pom.xml
b/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/pom.xml
deleted file mode 100644
index 144f39d86..000000000
---
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/pom.xml
+++ /dev/null
@@ -1,105 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one or more
- ~ contributor license agreements. See the NOTICE file distributed with
- ~ this work for additional information regarding copyright ownership.
- ~ The ASF licenses this file to You under the Apache License, Version 2.0
- ~ (the "License"); you may not use this file except in compliance with
- ~ the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <parent>
- <artifactId>linkis</artifactId>
- <groupId>org.apache.linkis</groupId>
- <version>1.3.1</version>
- <relativePath>../../../../pom.xml</relativePath>
- </parent>
- <modelVersion>4.0.0</modelVersion>
-
- <artifactId>linkis-gateway-ujes-datasource-ruler</artifactId>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.linkis</groupId>
- <artifactId>linkis-gateway-ujes-support</artifactId>
- <version>${project.version}</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.linkis</groupId>
- <artifactId>linkis-mybatis</artifactId>
- <version>${project.version}</version>
- </dependency>
-
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-deploy-plugin</artifactId>
- </plugin>
-
- <plugin>
- <groupId>net.alchim31.maven</groupId>
- <artifactId>scala-maven-plugin</artifactId>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- </plugin>
-
- <!-- 打包jar文件时,配置manifest文件,加入lib包的jar依赖 -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
- <version>2.10</version>
- <executions>
- <execution>
- <id>copy-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>copy-dependencies</goal>
- </goals>
- <configuration>
- <outputDirectory>target/lib</outputDirectory>
- <excludeScope>provided</excludeScope>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
-
- <resources>
- <resource>
- <directory>src/main/java</directory>
- <includes>
- <include>**/*.xml</include>
- </includes>
- </resource>
- <resource>
- <directory>src/main/resources</directory>
- <excludes>
- <exclude>**/*.properties</exclude>
- <exclude>**/application.yml</exclude>
- <exclude>**/bootstrap.yml</exclude>
- <exclude>**/log4j2.xml</exclude>
- </excludes>
- </resource>
- </resources>
- </build>
-
-
-</project>
\ No newline at end of file
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/cache/DatasourceMapCache.java
b/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/cache/DatasourceMapCache.java
deleted file mode 100644
index 77054e667..000000000
---
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/cache/DatasourceMapCache.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.gateway.ruler.datasource.cache;
-
-import org.apache.linkis.common.ServiceInstance;
-import org.apache.linkis.common.utils.Utils;
-import org.apache.linkis.gateway.ruler.datasource.dao.DatasourceMapMapper;
-import org.apache.linkis.gateway.ruler.datasource.entity.DatasourceMap;
-import org.apache.linkis.rpc.interceptor.ServiceInstanceUtils;
-import org.apache.linkis.rpc.sender.eureka.EurekaClientRefreshUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-
-import javax.annotation.PostConstruct;
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.TimeUnit;
-
-
-@Component
-public class DatasourceMapCache {
-
- private Logger logger = LoggerFactory.getLogger(DatasourceMapCache.class);
-
- private Map<String, DatasourceMap> DATASOURCE_MAP_CACHE = new
ConcurrentHashMap<>();
-
- public void cache(DatasourceMap datasourceMap) {
- DATASOURCE_MAP_CACHE.put(datasourceMap.getDatasourceName(),
datasourceMap);
- }
-
- public DatasourceMap get(String datasourceName) {
- return DATASOURCE_MAP_CACHE.get(datasourceName);
- }
-
- @Autowired
- private DatasourceMapMapper datasourceMapMapper;
-
- private final static long CLEAN_PERIOD = 3 * 60 * 1000;
-
- @PostConstruct
- public void init() {
- datasourceMapMapper.createTableIfNotExists();
-
- // init load all datasourceMap to cache
- List<DatasourceMap> datasourceMapList = datasourceMapMapper.listAll();
- if (datasourceMapList != null && !datasourceMapList.isEmpty()) {
- datasourceMapList.forEach(item -> cache(item));
- }
-
- // add a scheduler task to clean up datasourceMap
- Utils.defaultScheduler().scheduleWithFixedDelay(new CleanRunnable(),
CLEAN_PERIOD, CLEAN_PERIOD, TimeUnit.MILLISECONDS);
- }
-
- class CleanRunnable implements Runnable {
-
- @Override
- public void run() {
- Set<DatasourceMap> datasourceMaps = new
HashSet<>(DATASOURCE_MAP_CACHE.values());
- try {
- EurekaClientRefreshUtils.apply().refreshEurekaClient();
- } catch (Throwable t) {
- logger.warn("DatasourceMapCache clean runner refresh eureka
client error, {}", t);
- }
-
- Set<String> badInstances = new HashSet<>();
- datasourceMaps.forEach(datasourceMap -> {
- String instance = datasourceMap.getInstance();
- if (badInstances.contains(instance)) return;
- ServiceInstance[] serviceInstances =
ServiceInstanceUtils.getRPCServerLoader()
- .getServiceInstances(datasourceMap.getServiceId());
- if (serviceInstances == null || serviceInstances.length < 1 ||
!Arrays.asList(serviceInstances).contains(instance)) {
- badInstances.add(datasourceMap.getInstance());
- }
- });
-
- if (!badInstances.isEmpty())
- datasourceMapMapper.cleanBadInstances(badInstances);
- }
- }
-
-}
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/dao/DatasourceMapMapper.java
b/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/dao/DatasourceMapMapper.java
deleted file mode 100644
index 8a188a7a0..000000000
---
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/dao/DatasourceMapMapper.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.gateway.ruler.datasource.dao;
-
-import org.apache.linkis.gateway.ruler.datasource.entity.DatasourceMap;
-import org.apache.ibatis.annotations.Param;
-import org.springframework.stereotype.Repository;
-
-import java.util.List;
-import java.util.Set;
-
-
-@Repository
-public interface DatasourceMapMapper {
-
- void createTableIfNotExists();
-
- List<DatasourceMap> listAll();
-
- void insert(DatasourceMap datasourceMap);
-
- long countByInstance(@Param("instance") String instance);
-
- DatasourceMap getByDatasource(@Param("datasourceName") String
datasourceName);
-
- void cleanBadInstances(@Param("badInstances") Set<String> badInstances);
-
-}
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/dao/impl/datasourceMapMapper.xml
b/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/dao/impl/datasourceMapMapper.xml
deleted file mode 100644
index 43b8643a2..000000000
---
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/dao/impl/datasourceMapMapper.xml
+++ /dev/null
@@ -1,57 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one or more
- ~ contributor license agreements. See the NOTICE file distributed with
- ~ this work for additional information regarding copyright ownership.
- ~ The ASF licenses this file to You under the Apache License, Version 2.0
- ~ (the "License"); you may not use this file except in compliance with
- ~ the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
-
-<mapper
namespace="org.apache.linkis.gateway.ruler.datasource.dao.DatasourceMapMapper">
-
- <select id="listAll"
resultType="org.apache.linkis.gateway.ruler.datasource.entity.DatasourceMap">
- SELECT * from linkis_datasource_map
- </select>
-
- <update id="createTableIfNotExists">
- CREATE TABLE IF NOT EXISTS linkis.linkis_datasource_map (
- `datasource_name` varchar(50),
- `instance` varchar(50),
- `service_id` varchar(50),
- PRIMARY KEY (`datasource_name`)
- )
- </update>
-
- <insert id="insert"
parameterType="org.apache.linkis.gateway.ruler.datasource.entity.DatasourceMap">
- insert into linkis_datasource_map(datasource_name, instance,
service_id) values (#{datasourceName}, #{instance}, #{serviceId})
- </insert>
-
- <select id="countByInstance" parameterType="String" resultType="Long">
- select count(*) from linkis_datasource_map where instance = #{instance}
- </select>
-
- <select id="getByDatasource" parameterType="String"
resultType="org.apache.linkis.gateway.ruler.datasource.entity.DatasourceMap">
- select * from linkis_datasource_map where datasource_name =
#{datasourceName}
- </select>
-
-
- <delete id="cleanBadInstances" parameterType="java.util.Set">
- delete from linkis_datasource_map
- where instance in
- <foreach collection="badInstances" index="index" item="item"
open="(" separator="," close=")">
- item
- </foreach>
- </delete>
-
-</mapper>
\ No newline at end of file
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/entity/DatasourceMap.java
b/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/entity/DatasourceMap.java
deleted file mode 100644
index 646a026ab..000000000
---
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/entity/DatasourceMap.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.gateway.ruler.datasource.entity;
-
-import java.util.Objects;
-
-
-public class DatasourceMap {
-
- public DatasourceMap() {}
-
- public DatasourceMap(String datasourceName, String instance, String
serviceId) {
- this.datasourceName = datasourceName;
- this.instance = instance;
- this.serviceId = serviceId;
- }
-
- private String datasourceName;
-
- private String instance;
-
- private String serviceId;
-
- public String getDatasourceName() {
- return datasourceName;
- }
-
- public void setDatasourceName(String datasourceName) {
- this.datasourceName = datasourceName;
- }
-
- public String getInstance() {
- return instance;
- }
-
- public void setInstance(String instance) {
- this.instance = instance;
- }
-
- public String getServiceId() {
- return serviceId;
- }
-
- public void setServiceId(String serviceId) {
- this.serviceId = serviceId;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- DatasourceMap that = (DatasourceMap) o;
- return Objects.equals(datasourceName, that.datasourceName) &&
- Objects.equals(instance, that.instance) &&
- Objects.equals(serviceId, that.serviceId);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(datasourceName, instance, serviceId);
- }
-}
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/service/DatasourceMapService.java
b/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/service/DatasourceMapService.java
deleted file mode 100644
index 19a06d7e3..000000000
---
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/service/DatasourceMapService.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.gateway.ruler.datasource.service;
-
-
-public interface DatasourceMapService {
-
- String getInstanceByDatasource(String datasourceName);
-
- long countByInstance(String instance);
-
- String insertDatasourceMap(String datasourceName, String instance, String
serviceId);
-
-}
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/service/impl/DatasourceMapServiceImpl.java
b/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/service/impl/DatasourceMapServiceImpl.java
deleted file mode 100644
index 89ef68245..000000000
---
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/java/org/apache/linkis/gateway/ruler/datasource/service/impl/DatasourceMapServiceImpl.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.gateway.ruler.datasource.service.impl;
-
-import org.apache.linkis.gateway.ruler.datasource.cache.DatasourceMapCache;
-import org.apache.linkis.gateway.ruler.datasource.dao.DatasourceMapMapper;
-import org.apache.linkis.gateway.ruler.datasource.entity.DatasourceMap;
-import org.apache.linkis.gateway.ruler.datasource.service.DatasourceMapService;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.dao.DuplicateKeyException;
-import org.springframework.stereotype.Service;
-
-import javax.annotation.PostConstruct;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-
-@Service
-public class DatasourceMapServiceImpl implements DatasourceMapService {
-
- @Autowired
- private DatasourceMapMapper datasourceMapMapper;
-
- @Autowired
- private DatasourceMapCache datasourceMapCache;
-
- private Map<String, DatasourceMap> DATASOURCE_MAP_CACHE = new
ConcurrentHashMap<>();
-
- public String getInstanceByDatasource(String datasourceName) {
- DatasourceMap datasourceMap = datasourceMapCache.get(datasourceName);
- if (datasourceMap != null) return datasourceMap.getInstance();
- datasourceMap = datasourceMapMapper.getByDatasource(datasourceName);
- if (datasourceMap == null) return null;
- datasourceMapCache.cache(datasourceMap);
- return datasourceMap.getInstance();
- }
-
- @Override
- public long countByInstance(String instance) {
- return datasourceMapMapper.countByInstance(instance);
- }
-
- @Override
- public String insertDatasourceMap(String datasourceName, String instance,
String serviceId) {
- try {
- DatasourceMap datasourceMap = new DatasourceMap(datasourceName,
instance, serviceId);
- datasourceMapMapper.insert(datasourceMap);
- datasourceMapCache.cache(datasourceMap);
- return instance;
- } catch (DuplicateKeyException e) {
- return getInstanceByDatasource(datasourceName);
- }
- }
-
- @PostConstruct
- public void init() {
- // create linkis_datasource_map table if not exists
- datasourceMapMapper.createTableIfNotExists();
- }
-
-}
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/scala/org/apache/linkis/gateway/ruler/datasource/DatasourceGatewayRouterRuler.scala
b/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/scala/org/apache/linkis/gateway/ruler/datasource/DatasourceGatewayRouterRuler.scala
deleted file mode 100644
index 1fdaec090..000000000
---
a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/src/main/scala/org/apache/linkis/gateway/ruler/datasource/DatasourceGatewayRouterRuler.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.gateway.ruler.datasource
-
-import java.util
-
-import org.apache.linkis.common.utils.Logging
-import org.apache.linkis.gateway.http.GatewayContext
-import org.apache.linkis.gateway.ruler.datasource.service.DatasourceMapService
-import org.apache.linkis.gateway.ujes.route.EntranceGatewayRouterRuler
-import org.apache.linkis.protocol.constants.TaskConstant
-import org.apache.linkis.protocol.utils.TaskUtils
-import org.apache.linkis.rpc.interceptor.ServiceInstanceUtils
-import org.apache.linkis.server.{BDPJettyServerHelper, JMap}
-import org.apache.commons.lang3.StringUtils
-import org.springframework.beans.factory.annotation.Autowired
-import org.springframework.stereotype.Component
-
-import scala.collection.JavaConversions._
-
-
-@Component
-class DatasourceGatewayRouterRuler extends EntranceGatewayRouterRuler with
Logging {
-
- @Autowired
- var datasourceMapService: DatasourceMapService = _
-
- override def rule(serviceId: String, gatewayContext: GatewayContext): Unit =
if(StringUtils.isNotBlank(gatewayContext.getRequest.getRequestBody)) {
- val datasourceName =
getDatasourceName(gatewayContext.getRequest.getRequestBody)
- if (StringUtils.isBlank(datasourceName)) return
- debug(s"datasourceName: $datasourceName")
- datasourceMapService.getInstanceByDatasource(datasourceName) match {
- case i: String if StringUtils.isNotBlank(i) =>
gatewayContext.getGatewayRoute.getServiceInstance.setInstance(i)
- case _ => {
- val newInstance =
ServiceInstanceUtils.getRPCServerLoader.getServiceInstances(serviceId)
- .map(item => (item,
datasourceMapService.countByInstance(item.getInstance)))
- .sortBy(_._2).map(_._1.getInstance).headOption match {
- case Some(item) =>
datasourceMapService.insertDatasourceMap(datasourceName, item, serviceId)
- case None => null
- }
- debug(s"newInstance: $newInstance")
- if (StringUtils.isNotBlank(newInstance)) {
-
gatewayContext.getGatewayRoute.getServiceInstance.setInstance(newInstance)
- }
- }
- }
- }
-
- def getDatasourceName(body: String): String =
if(StringUtils.isNotBlank(body)) {
- val requestObject = BDPJettyServerHelper.gson.fromJson(body,
classOf[util.Map[String, Any]])
- if (requestObject == null || requestObject.get(TaskConstant.PARAMS) ==
null) return null
- val paramsObject =
requestObject.get(TaskConstant.PARAMS).asInstanceOf[util.Map[String, Any]]
- var datasourceName: String = null
- val startupMap = TaskUtils.getStartupMap(paramsObject)
- val runtimeMap = TaskUtils.getRuntimeMap(paramsObject)
- val properties = new JMap[String, String]
- startupMap.foreach {case (k, v) => if(v != null) properties.put(k,
v.toString)}
- runtimeMap.foreach {case (k, v) => if(v != null) properties.put(k,
v.toString)}
- properties.get(DatasourceGatewayRouterRuler.DATASOURCE_NAME_KEY) match {
- case s: String => datasourceName = s
- case _ =>
- }
- datasourceName
- } else {
- null
- }
-
-}
-
-object DatasourceGatewayRouterRuler {
-
- val DATASOURCE_NAME_KEY = "wds.linkis.datasource"
-
-}
\ No newline at end of file
diff --git a/tool/dependencies/known-dependencies.txt
b/tool/dependencies/known-dependencies.txt
index aa5e00d97..fdcd56fcd 100644
--- a/tool/dependencies/known-dependencies.txt
+++ b/tool/dependencies/known-dependencies.txt
@@ -303,6 +303,7 @@ jna-platform-5.12.1.jar
joda-time-2.3.jar
joda-time-2.8.1.jar
joda-time-2.9.3.jar
+joda-time-2.10.5.jar
jol-core-0.2.jar
joni-2.1.2.jar
jpam-1.1.jar
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]