This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new aea9fb74ca6 [MINOR][CORE][SQL][FOLLOWUP] Add missing s prefix to 
enable string interpolation
aea9fb74ca6 is described below

commit aea9fb74ca6bc91604b32696a5638e5c93933d1f
Author: Enrico Minack <git...@enrico.minack.dev>
AuthorDate: Fri Oct 21 20:20:04 2022 +0900

    [MINOR][CORE][SQL][FOLLOWUP] Add missing s prefix to enable string 
interpolation
    
    ### What changes were proposed in this pull request?
    Adds missing `s` prefix to enable string interpolation. Complements #38297.
    
    ### Why are the changes needed?
    Strings will not contain substituted values but variable names.
    
    ### Does this PR introduce _any_ user-facing change?
    Log messages will change.
    
    ### How was this patch tested?
    Not tested.
    
    Closes #38307 from EnricoMi/branch-fix-string-interpolation-2.
    
    Authored-by: Enrico Minack <git...@enrico.minack.dev>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala | 2 +-
 .../main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala | 2 +-
 .../apache/spark/executor/CoarseGrainedExecutorBackendSuite.scala | 4 ++--
 .../spark/ml/regression/GeneralizedLinearRegressionSuite.scala    | 2 +-
 .../scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala     | 8 ++++----
 5 files changed, 9 insertions(+), 9 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala 
b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
index b6d6441925a..4efce34b18c 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
@@ -1883,7 +1883,7 @@ private[spark] class DAGScheduler(
           if (ignoreStageFailure) {
             logInfo(s"Ignoring fetch failure from $task of $failedStage 
attempt " +
               s"${task.stageAttemptId} when count 
spark.stage.maxConsecutiveAttempts " +
-              "as executor ${bmAddress.executorId} is decommissioned and " +
+              s"as executor ${bmAddress.executorId} is decommissioned and " +
               s" ${config.STAGE_IGNORE_DECOMMISSION_FETCH_FAILURE.key}=true")
           } else {
             failedStage.failedAttemptIds.add(task.stageAttemptId)
diff --git 
a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala 
b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
index 5004262a71c..1eb588124a7 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
@@ -641,7 +641,7 @@ private[spark] class TaskSchedulerImpl(
                     if (!unschedulableTaskSetToExpiryTime.contains(taskSet)) {
                       logInfo("Notifying ExecutorAllocationManager to allocate 
more executors to" +
                         " schedule the unschedulable task before aborting" +
-                        " stage ${taskSet.stageId}.")
+                        s" stage ${taskSet.stageId}.")
                       
dagScheduler.unschedulableTaskSetAdded(taskSet.taskSet.stageId,
                         taskSet.taskSet.stageAttemptId)
                       
updateUnschedulableTaskSetTimeoutAndStartAbortTimer(taskSet, taskIndex)
diff --git 
a/core/src/test/scala/org/apache/spark/executor/CoarseGrainedExecutorBackendSuite.scala
 
b/core/src/test/scala/org/apache/spark/executor/CoarseGrainedExecutorBackendSuite.scala
index a12b7034a6d..a8b1304b76f 100644
--- 
a/core/src/test/scala/org/apache/spark/executor/CoarseGrainedExecutorBackendSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/executor/CoarseGrainedExecutorBackendSuite.scala
@@ -395,7 +395,7 @@ class CoarseGrainedExecutorBackendSuite extends 
SparkFunSuite
 
       // Fake tasks with different taskIds.
       val taskDescriptions = (1 to numTasks).map {
-        taskId => new TaskDescription(taskId, 2, "1", "TASK ${taskId}", 19,
+        taskId => new TaskDescription(taskId, 2, "1", s"TASK $taskId", 19,
           1, mutable.Map.empty, mutable.Map.empty, mutable.Map.empty, new 
Properties, 1,
           Map(GPU -> new ResourceInformation(GPU, Array("0", "1"))), data)
       }
@@ -483,7 +483,7 @@ class CoarseGrainedExecutorBackendSuite extends 
SparkFunSuite
 
       // Fake tasks with different taskIds.
       val taskDescriptions = (1 to numTasks).map {
-        taskId => new TaskDescription(taskId, 2, "1", "TASK ${taskId}", 19,
+        taskId => new TaskDescription(taskId, 2, "1", s"TASK $taskId", 19,
           1, mutable.Map.empty, mutable.Map.empty, mutable.Map.empty, new 
Properties, 1,
           Map(GPU -> new ResourceInformation(GPU, Array("0", "1"))), data)
       }
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala
index 3acb0bc3c32..1836b07cfd8 100644
--- 
a/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala
@@ -1686,7 +1686,7 @@ class GeneralizedLinearRegressionSuite extends MLTest 
with DefaultReadWriteTest
         .setFeaturesCol("features")
       val model = trainer.fit(dataset)
       val actual = model.summary.aic
-      assert(actual ~= expected(idx) absTol 1e-4, "Model mismatch: GLM with 
regParam = $regParam.")
+      assert(actual ~= expected(idx) absTol 1e-4, s"Model mismatch: GLM with 
regParam = $regParam.")
       idx += 1
     }
   }
diff --git 
a/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala 
b/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala
index b7382364732..83d77a0a791 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala
@@ -629,14 +629,14 @@ object DecisionTreeSuite extends SparkFunSuite {
       case (Some(aNode), Some(bNode)) => checkEqual(aNode, bNode)
       case (None, None) =>
       case _ =>
-        fail("Only one instance has leftNode defined. (a.leftNode: 
${a.leftNode}," +
-          " b.leftNode: ${b.leftNode})")
+        fail(s"Only one instance has leftNode defined. (a.leftNode: 
${a.leftNode}," +
+          s" b.leftNode: ${b.leftNode})")
     }
     (a.rightNode, b.rightNode) match {
       case (Some(aNode: Node), Some(bNode: Node)) => checkEqual(aNode, bNode)
       case (None, None) =>
-      case _ => fail("Only one instance has rightNode defined. (a.rightNode: 
${a.rightNode}, " +
-        "b.rightNode: ${b.rightNode})")
+      case _ => fail(s"Only one instance has rightNode defined. (a.rightNode: 
${a.rightNode}, " +
+        s"b.rightNode: ${b.rightNode})")
     }
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to