This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b8f22f33308 [MINOR] Remove redundant character escape "\\" and add UT
b8f22f33308 is described below

commit b8f22f33308ab51b93052457dba17b04c2daeb4a
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Mon May 15 18:04:31 2023 -0500

    [MINOR] Remove redundant character escape "\\" and add UT
    
    ### What changes were proposed in this pull request?
    The pr aims to remove redundant character escape "\\" and add UT for 
SparkHadoopUtil.substituteHadoopVariables.
    
    ### Why are the changes needed?
    Make code clean & remove warning.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass GA & Add new UT.
    
    Closes #41170 from panbingkun/SparkHadoopUtil_fix.
    
    Authored-by: panbingkun <pbk1...@gmail.com>
    Signed-off-by: Sean Owen <sro...@gmail.com>
---
 .../org/apache/spark/deploy/SparkHadoopUtil.scala  |  4 +-
 .../apache/spark/deploy/SparkHadoopUtilSuite.scala | 52 ++++++++++++++++++++++
 2 files changed, 54 insertions(+), 2 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala 
b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 4908a081367..9ff2621b791 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -174,7 +174,7 @@ private[spark] class SparkHadoopUtil extends Logging {
      * So we need a map to track the bytes read from the child threads and 
parent thread,
      * summing them together to get the bytes read of this task.
      */
-    new Function0[Long] {
+    new (() => Long) {
       private val bytesReadMap = new mutable.HashMap[Long, Long]()
 
       override def apply(): Long = {
@@ -248,7 +248,7 @@ private[spark] class SparkHadoopUtil extends Logging {
     if (isGlobPath(pattern)) globPath(fs, pattern) else Seq(pattern)
   }
 
-  private val HADOOP_CONF_PATTERN = 
"(\\$\\{hadoopconf-[^\\}\\$\\s]+\\})".r.unanchored
+  private val HADOOP_CONF_PATTERN = 
"(\\$\\{hadoopconf-[^}$\\s]+})".r.unanchored
 
   /**
    * Substitute variables by looking them up in Hadoop configs. Only variables 
that match the
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/SparkHadoopUtilSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/SparkHadoopUtilSuite.scala
index 17f1476cd8d..6250b7d0ed2 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkHadoopUtilSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkHadoopUtilSuite.scala
@@ -123,6 +123,58 @@ class SparkHadoopUtilSuite extends SparkFunSuite {
     assertConfigValue(hadoopConf, "fs.s3a.session.token", null)
   }
 
+  test("substituteHadoopVariables") {
+    val hadoopConf = new Configuration(false)
+    hadoopConf.set("xxx", "yyy")
+
+    val text1 = "${hadoopconf-xxx}"
+    val result1 = new SparkHadoopUtil().substituteHadoopVariables(text1, 
hadoopConf)
+    assert(result1 == "yyy")
+
+    val text2 = "${hadoopconf-xxx"
+    val result2 = new SparkHadoopUtil().substituteHadoopVariables(text2, 
hadoopConf)
+    assert(result2 == "${hadoopconf-xxx")
+
+    val text3 = "${hadoopconf-xxx}zzz"
+    val result3 = new SparkHadoopUtil().substituteHadoopVariables(text3, 
hadoopConf)
+    assert(result3 == "yyyzzz")
+
+    val text4 = "www${hadoopconf-xxx}zzz"
+    val result4 = new SparkHadoopUtil().substituteHadoopVariables(text4, 
hadoopConf)
+    assert(result4 == "wwwyyyzzz")
+
+    val text5 = "www${hadoopconf-xxx}"
+    val result5 = new SparkHadoopUtil().substituteHadoopVariables(text5, 
hadoopConf)
+    assert(result5 == "wwwyyy")
+
+    val text6 = "www${hadoopconf-xxx"
+    val result6 = new SparkHadoopUtil().substituteHadoopVariables(text6, 
hadoopConf)
+    assert(result6 == "www${hadoopconf-xxx")
+
+    val text7 = "www$hadoopconf-xxx}"
+    val result7 = new SparkHadoopUtil().substituteHadoopVariables(text7, 
hadoopConf)
+    assert(result7 == "www$hadoopconf-xxx}")
+
+    val text8 = "www{hadoopconf-xxx}"
+    val result8 = new SparkHadoopUtil().substituteHadoopVariables(text8, 
hadoopConf)
+    assert(result8 == "www{hadoopconf-xxx}")
+  }
+
+  test("Redundant character escape '\\}' in RegExp ") {
+    val HADOOP_CONF_PATTERN_1 = "(\\$\\{hadoopconf-[^}$\\s]+})".r.unanchored
+    val HADOOP_CONF_PATTERN_2 = "(\\$\\{hadoopconf-[^}$\\s]+\\})".r.unanchored
+
+    val text = "www${hadoopconf-xxx}zzz"
+    val target1 = text match {
+      case HADOOP_CONF_PATTERN_1(matched) => text.replace(matched, "yyy")
+    }
+    val target2 = text match {
+      case HADOOP_CONF_PATTERN_2(matched) => text.replace(matched, "yyy")
+    }
+    assert(target1 == "wwwyyyzzz")
+    assert(target2 == "wwwyyyzzz")
+  }
+
   /**
    * Assert that a hadoop configuration option has the expected value.
    * @param hadoopConf configuration to query


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to