This is an automated email from the ASF dual-hosted git repository.

casion pushed a commit to branch dev-1.3.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git


The following commit(s) were added to refs/heads/dev-1.3.1 by this push:
     new 33e34203e [hive]Scala code format alarm clear #2835 (#3212)
33e34203e is described below

commit 33e34203e77b2a703478e00e13b70a2699bae089
Author: huangxiaoping <[email protected]>
AuthorDate: Fri Sep 9 16:57:02 2022 +0800

    [hive]Scala code format alarm clear #2835 (#3212)
---
 .../engineplugin/hive/common/HiveUtils.scala       |  2 +
 .../hive/creation/HiveEngineConnFactory.scala      | 12 +++---
 .../hive/executor/HiveEngineConnExecutor.scala     | 43 +++++++++++++---------
 .../hive/hook/HiveAddJarsEngineHook.scala          |  4 +-
 .../linkis/engineplugin/hive/log/LogHelper.scala   |  2 +
 .../hive/progress/HiveProgressHelper.scala         |  4 +-
 6 files changed, 40 insertions(+), 27 deletions(-)

diff --git 
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/common/HiveUtils.scala
 
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/common/HiveUtils.scala
index 1944a225e..931ebe6ef 100644
--- 
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/common/HiveUtils.scala
+++ 
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/common/HiveUtils.scala
@@ -85,7 +85,9 @@ object HiveUtils {
   }
 
   def main(args: Array[String]): Unit = {
+    // scalastyle:off println
     jarOfClass(classOf[Driver]).foreach(println)
+    // scalastyle:on println
   }
 
 }
diff --git 
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala
 
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala
index 4d97338cf..d4a78c5ba 100644
--- 
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala
+++ 
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala
@@ -40,7 +40,7 @@ import org.apache.hadoop.hive.ql.session.SessionState
 import java.io.{ByteArrayOutputStream, PrintStream}
 import java.security.PrivilegedExceptionAction
 
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
 
 class HiveEngineConnFactory extends ComputationSingleExecutorEngineConnFactory 
with Logging {
 
@@ -82,8 +82,8 @@ class HiveEngineConnFactory extends 
ComputationSingleExecutorEngineConnFactory w
           )
         )
     )
-    options.foreach { case (k, v) => logger.info(s"key is $k, value is $v") }
-    options
+    options.asScala.foreach { case (k, v) => logger.info(s"key is $k, value is 
$v") }
+    options.asScala
       .filter { case (k, v) =>
         k.startsWith("hive.") || k.startsWith("mapreduce.") || 
k.startsWith("mapred.reduce.") || k
           .startsWith("wds.linkis.")
@@ -108,7 +108,8 @@ class HiveEngineConnFactory extends 
ComputationSingleExecutorEngineConnFactory w
       hiveConf.setVar(HiveConf.ConfVars.HIVEAUXJARS, 
HiveEngineConfiguration.HIVE_AUX_JARS_PATH)
     }
 
-    /* //add hook to HiveDriver
+    /*
+    //add hook to HiveDriver
      if (StringUtils.isNotBlank(EnvConfiguration.LINKIS_HIVE_POST_HOOKS)) {
        val hooks = if 
(StringUtils.isNotBlank(hiveConf.get("hive.exec.post.hooks"))) {
          hiveConf.get("hive.exec.post.hooks") + "," + 
EnvConfiguration.LINKIS_HIVE_POST_HOOKS
@@ -116,7 +117,8 @@ class HiveEngineConnFactory extends 
ComputationSingleExecutorEngineConnFactory w
          EnvConfiguration.LINKIS_HIVE_POST_HOOKS
        }
        hiveConf.set("hive.exec.post.hooks", hooks)
-     }*/
+     }
+     */
     // enable hive.stats.collect.scancols
     hiveConf.setBoolean("hive.stats.collect.scancols", true)
     val ugi = HDFSUtils.getUserGroupInformation(Utils.getJvmUser)
diff --git 
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
 
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
index 4e9abbb20..5fee64676 100644
--- 
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
+++ 
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
@@ -74,7 +74,7 @@ import java.security.PrivilegedExceptionAction
 import java.util
 import java.util.concurrent.atomic.AtomicBoolean
 
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
 import scala.collection.mutable
 import scala.collection.mutable.ArrayBuffer
 
@@ -215,8 +215,9 @@ class HiveEngineConnExecutor(
           case e: Exception => logger.warn("obtain hive execute query plan 
failed,", e)
           case t: Throwable => logger.warn("obtain hive execute query plan 
failed,", t)
         }
-        if (numberOfMRJobs > 0)
+        if (numberOfMRJobs > 0) {
           engineExecutorContext.appendStdout(s"Your hive sql has 
$numberOfMRJobs MR jobs to do")
+        }
         val hiveResponse: CommandProcessorResponse = driver.run(realCode)
         if (hiveResponse.getResponseCode != 0) {
           LOG.error("Hive query failed, response code is {}", 
hiveResponse.getResponseCode)
@@ -298,20 +299,25 @@ class HiveEngineConnExecutor(
     val result = new util.ArrayList[String]()
     var rows = 0
     while (driver.getResults(result)) {
-      val scalaResult: mutable.Buffer[String] = result
+      val scalaResult: mutable.Buffer[String] = result.asScala
       scalaResult foreach { s =>
         val arr: Array[String] = s.split("\t")
         val arrAny: ArrayBuffer[Any] = new ArrayBuffer[Any]()
         if (arr.length > colLength) {
-          logger.error(s"""hive code 查询的结果中有\t制表符,hive不能进行切割,请使用spark执行""")
-          throw new ErrorException(60078, 
"""您查询的结果中有\t制表符,hive不能进行切割,请使用spark执行""")
+          logger.error(
+            s"""There is a \t tab in the result of hive code query, hive 
cannot cut it, please use spark to 
execute(查询的结果中有\t制表符,hive不能进行切割,请使用spark执行)"""
+          )
+          throw new ErrorException(
+            60078,
+            """There is a \t tab in the result of your query, hive cannot cut 
it, please use spark to execute(您查询的结果中有\t制表符,hive不能进行切割,请使用spark执行)"""
+          )
         }
-        if (arr.length == colLength) arr foreach arrAny.add
-        else if (arr.length == 0) for (i <- 1 to colLength) arrAny add ""
+        if (arr.length == colLength) arr foreach arrAny.asJava.add
+        else if (arr.length == 0) for (i <- 1 to colLength) arrAny.asJava add 
""
         else {
           val i = colLength - arr.length
-          arr foreach arrAny.add
-          for (i <- 1 to i) arrAny add ""
+          arr foreach arrAny.asJava.add
+          for (i <- 1 to i) arrAny.asJava add ""
         }
         resultSetWriter.addRecord(new TableRecord(arrAny.toArray))
       }
@@ -330,18 +336,18 @@ class HiveEngineConnExecutor(
     var results: util.List[FieldSchema] = null
     val nameSet = new mutable.HashSet[String]()
     val cleanSchema = new util.ArrayList[FieldSchema]()
-    fieldSchemas foreach { fieldSchema =>
+    fieldSchemas.asScala foreach { fieldSchema =>
       val name = fieldSchema.getName
       if (name.split('.').length == 2) {
         nameSet.add(name.split('.')(1))
-        cleanSchema += new FieldSchema(
+        cleanSchema.asScala += new FieldSchema(
           name.split('.')(1),
           fieldSchema.getType,
           fieldSchema.getComment
         )
       }
     }
-    if (nameSet.size < fieldSchemas.length) {
+    if (nameSet.size < fieldSchemas.asScala.length) {
       results = fieldSchemas
     } else {
       if (useTableName) {
@@ -351,7 +357,7 @@ class HiveEngineConnExecutor(
       }
     }
 
-    val columns = results
+    val columns = results.asScala
       .map(result =>
         Column(result.getName, 
DataType.toDataType(result.getType.toLowerCase()), result.getComment)
       )
@@ -365,7 +371,8 @@ class HiveEngineConnExecutor(
   }
 
   /**
-   * 在job完成之前,要将singleSqlProgressMap的剩余的内容全部变为成功
+   * Before the job is completed, all the remaining contents of the 
singleSqlProgressMap should be
+   * changed to success
    */
   private def onComplete(): Unit = {}
 
@@ -399,7 +406,7 @@ class HiveEngineConnExecutor(
   override def FetchResource: util.HashMap[String, ResourceWithStatus] = {
     val resourceMap = new util.HashMap[String, ResourceWithStatus]()
     val queue = hiveConf.get("mapreduce.job.queuename")
-    HadoopJobExecHelper.runningJobs.foreach(yarnJob => {
+    HadoopJobExecHelper.runningJobs.asScala.foreach(yarnJob => {
       val counters = yarnJob.getCounters
       if (counters != null) {
         val millsMap = counters.getCounter(Counters.MILLIS_MAPS)
@@ -439,7 +446,7 @@ class HiveEngineConnExecutor(
       val currentSQL = engineExecutorContext.getCurrentParagraph
       val currentBegin = (currentSQL - 1) / totalSQLs.asInstanceOf[Float]
       HadoopJobExecHelper.runningJobs synchronized {
-        HadoopJobExecHelper.runningJobs foreach { runningJob =>
+        HadoopJobExecHelper.runningJobs.asScala foreach { runningJob =>
           val name = runningJob.getID.toString
           val _progress = runningJob.reduceProgress() + 
runningJob.mapProgress()
           singleSqlProgressMap.put(name, _progress / 2)
@@ -447,7 +454,7 @@ class HiveEngineConnExecutor(
       }
       var totalProgress: Float = 0.0f
       val hiveRunJobs = if (numberOfMRJobs <= 0) 1 else numberOfMRJobs
-      singleSqlProgressMap foreach { case (_name, _progress) =>
+      singleSqlProgressMap.asScala foreach { case (_name, _progress) =>
         totalProgress += _progress
       }
       try {
@@ -484,7 +491,7 @@ class HiveEngineConnExecutor(
     }
 
     HadoopJobExecHelper.runningJobs synchronized {
-      HadoopJobExecHelper.runningJobs foreach { runningJob =>
+      HadoopJobExecHelper.runningJobs.asScala foreach { runningJob =>
         val succeedTask =
           ((runningJob.mapProgress() + runningJob.reduceProgress()) * 
100).asInstanceOf[Int]
         if (
diff --git 
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/hook/HiveAddJarsEngineHook.scala
 
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/hook/HiveAddJarsEngineHook.scala
index 768d8efbf..9019db962 100644
--- 
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/hook/HiveAddJarsEngineHook.scala
+++ 
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/hook/HiveAddJarsEngineHook.scala
@@ -31,7 +31,7 @@ import 
org.apache.linkis.manager.label.entity.engine.{CodeLanguageLabel, RunType
 
 import org.apache.commons.lang3.StringUtils
 
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
 
 class HiveAddJarsEngineHook extends EngineConnHook with Logging {
 
@@ -54,7 +54,7 @@ class HiveAddJarsEngineHook extends EngineConnHook with 
Logging {
     var jars: String = ""
     val udf_jars = CommonVars(Environment.UDF_JARS.toString, "", "UDF jar 
PAth").getValue
     logger.info("udf jar_path:" + udf_jars)
-    options foreach { case (key, value) =>
+    options.asScala foreach { case (key, value) =>
       if (JARS.equals(key)) {
         jars = value
       }
diff --git 
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/log/LogHelper.scala
 
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/log/LogHelper.scala
index f6edf10d9..8abd56600 100644
--- 
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/log/LogHelper.scala
+++ 
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/log/LogHelper.scala
@@ -66,7 +66,9 @@ object LogHelper {
 
   def main(args: Array[String]): Unit = {
     val log = "ssssx"
+    // scalastyle:off println
     println(matchCompletedPattern(log))
+    // scalastyle:on println
   }
 
 }
diff --git 
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/progress/HiveProgressHelper.scala
 
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/progress/HiveProgressHelper.scala
index 842602061..ee67d2969 100644
--- 
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/progress/HiveProgressHelper.scala
+++ 
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/progress/HiveProgressHelper.scala
@@ -43,8 +43,8 @@ object HiveProgressHelper {
 
   def storeHiveProgress(hiveProgress: java.util.List[HiveProgress]): Unit = {
     // logger.info("begin to store hive progress")
-    import scala.collection.JavaConversions._
-    hiveProgress foreach hiveProgressQueue.put
+    import scala.collection.JavaConverters._
+    hiveProgress.asScala foreach hiveProgressQueue.put
   }
 
   def clearHiveProgress(): Unit = {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to