This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 014685c41e4 [SPARK-43277][YARN] Clean up deprecation hadoop api usage 
in `yarn` module
014685c41e4 is described below

commit 014685c41e4741f83570d8a2a6a253e48967919a
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Tue Apr 25 22:12:35 2023 -0500

    [SPARK-43277][YARN] Clean up deprecation hadoop api usage in `yarn` module
    
    ### What changes were proposed in this pull request?
    `yarn` module has the following compilation warnings related to the Hadoop 
API:
    
    ```
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala:157:
 [deprecation  
org.apache.spark.deploy.yarn.ApplicationMaster.prepareLocalResources.setupDistributedCache
 | origin=org.apache.hadoop.yarn.util.ConverterUtils.getYarnUrlFromURI | 
version=] method getYarnUrlFromURI in class ConverterUtils is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala:292:
 [deprecation  
org.apache.spark.deploy.yarn.Client.createApplicationSubmissionContext | 
origin=org.apache.hadoop.yarn.api.records.Resource.setMemory | version=] method 
setMemory in class Resource is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala:307:
 [deprecation  
org.apache.spark.deploy.yarn.Client.createApplicationSubmissionContext | 
origin=org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext.setAMContainerResourceRequest
 | version=] method setAMContainerResourceRequest in class 
ApplicationSubmissionContext is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala:392:
 [deprecation  
org.apache.spark.deploy.yarn.Client.verifyClusterResources.maxMem | 
origin=org.apache.hadoop.yarn.api.records.Resource.getMemory | version=] method 
getMemory in class Resource is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManager.scala:76:
 [deprecation  
org.apache.spark.deploy.yarn.ClientDistributedCacheManager.addResource | 
origin=org.apache.hadoop.yarn.util.ConverterUtils.getYarnUrlFromPath | 
version=] method getYarnUrlFromPath in class ConverterUtils is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala:510:
 [deprecation  
org.apache.spark.deploy.yarn.YarnAllocator.updateResourceRequests.$anonfun.requestContainerMessage
 | origin=org.apache.hadoop.yarn.api.records.Resource.getMemory | version=] 
method getMemory in class Resource is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala:737:
 [deprecation  
org.apache.spark.deploy.yarn.YarnAllocator.runAllocatedContainers.$anonfun | 
origin=org.apache.hadoop.yarn.api.records.Resource.getMemory | version=] method 
getMemory in class Resource is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala:737:
 [deprecation  
org.apache.spark.deploy.yarn.YarnAllocator.runAllocatedContainers.$anonfun | 
origin=org.apache.hadoop.yarn.api.records.Resource.getMemory | version=] method 
getMemory in class Resource is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala:202:
 [deprecation  org.apache.spark.deploy.yarn.YarnSparkHadoopUtil.getContainerId 
| origin=org.apache.hadoop.yarn.util.ConverterUtils.toContainerId | version=] 
method toContainerId in class ConverterUtils is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/main/scala/org/apache/spark/util/YarnContainerInfoHelper.scala:75:
 [deprecation  org.apache.spark.util.YarnContainerInfoHelper.getAttributes | 
origin=org.apache.hadoop.yarn.util.ConverterUtils.toString | version=] method 
toString in class ConverterUtils is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManagerSuite.scala:83:
 [deprecation  
org.apache.spark.deploy.yarn.ClientDistributedCacheManagerSuite.<local 
ClientDistributedCacheManagerSuite>.$org_scalatest_assert_macro_expr.$org_scalatest_assert_macro_left
 | origin=org.apache.hadoop.yarn.util.ConverterUtils.getPathFromYarnURL | 
version=] method getPathFromYarnURL in class ConverterUtils is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManagerSuite.scala:105:
 [deprecation  
org.apache.spark.deploy.yarn.ClientDistributedCacheManagerSuite.<local 
ClientDistributedCacheManagerSuite>.$org_scalatest_assert_macro_expr.$org_scalatest_assert_macro_left
 | origin=org.apache.hadoop.yarn.util.ConverterUtils.getPathFromYarnURL | 
version=] method getPathFromYarnURL in class ConverterUtils is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManagerSuite.scala:161:
 [deprecation  
org.apache.spark.deploy.yarn.ClientDistributedCacheManagerSuite.<local 
ClientDistributedCacheManagerSuite>.$org_scalatest_assert_macro_expr.$org_scalatest_assert_macro_left
 | origin=org.apache.hadoop.yarn.util.ConverterUtils.getPathFromYarnURL | 
version=] method getPathFromYarnURL in class ConverterUtils is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManagerSuite.scala:190:
 [deprecation  
org.apache.spark.deploy.yarn.ClientDistributedCacheManagerSuite.<local 
ClientDistributedCacheManagerSuite>.$org_scalatest_assert_macro_expr.$org_scalatest_assert_macro_left
 | origin=org.apache.hadoop.yarn.util.ConverterUtils.getPathFromYarnURL | 
version=] method getPathFromYarnURL in class ConverterUtils is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ResourceRequestHelperSuite.scala:180:
 [deprecation  
org.apache.spark.deploy.yarn.ResourceRequestHelperSuite.createResource | 
origin=org.apache.hadoop.yarn.api.records.Resource.setMemory | version=] method 
setMemory in class Resource is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala:288:
 [deprecation  org.apache.spark.deploy.yarn.YarnAllocatorSuite.<local 
YarnAllocatorSuite>.expectedResources | 
origin=org.apache.hadoop.yarn.api.records.Resource.getMemory | version=] method 
getMemory in class Resource is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala:726:
 [deprecation  org.apache.spark.deploy.yarn.YarnAllocatorSuite.<local 
YarnAllocatorSuite>.memory | 
origin=org.apache.hadoop.yarn.api.records.Resource.getMemory | version=] method 
getMemory in class Resource is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala:742:
 [deprecation  org.apache.spark.deploy.yarn.YarnAllocatorSuite.<local 
YarnAllocatorSuite>.memory | 
origin=org.apache.hadoop.yarn.api.records.Resource.getMemory | version=] method 
getMemory in class Resource is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala:757:
 [deprecation  org.apache.spark.deploy.yarn.YarnAllocatorSuite.<local 
YarnAllocatorSuite>.memory | 
origin=org.apache.hadoop.yarn.api.records.Resource.getMemory | version=] method 
getMemory in class Resource is deprecated
    [WARNING] [Warn] 
/${spark-source-dir}/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala:655:
 [deprecation  
org.apache.spark.deploy.yarn.YarnClusterDriver.main.expectationAttributes | 
origin=org.apache.hadoop.yarn.util.ConverterUtils.toString | version=] method 
toString in class ConverterUtils is deprecated
    ```
    
    This pr do the following changes as follows:
    
    - Use instead `URL.fromURI` of `ConverterUtils.getYarnUrlFromURI` refer to
    
    
https://github.com/apache/hadoop/blob/706d88266abcee09ed78fbaa0ad5f74d818ab0e9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java#L77-L84
    
    - Use `Resource.setMemorySize` instead of `Resource.setMemorySize` refer to
    
    
https://github.com/apache/hadoop/blob/706d88266abcee09ed78fbaa0ad5f74d818ab0e9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ResourcePBImpl.java#L110-L114
    
    - Use `ApplicationSubmissionContext#setAMContainerResourceRequests` instead 
of `ApplicationSubmissionContext#setAMContainerResourceRequest` refer to
    
    
https://github.com/apache/hadoop/blob/706d88266abcee09ed78fbaa0ad5f74d818ab0e9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationSubmissionContext.java#L482-L490
    
    - Use `Resource.getMemorySize` instead of `Resource.getMemory` refer to
    
    
https://github.com/apache/hadoop/blob/706d88266abcee09ed78fbaa0ad5f74d818ab0e9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ResourcePBImpl.java#L92-L96
    
    - Use `URL.fromPath` instead of `ConverterUtils.getYarnUrlFromPath` refer to
    
    
https://github.com/apache/hadoop/blob/706d88266abcee09ed78fbaa0ad5f74d818ab0e9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java#L68-L75
    
    - Use `ContainerId.fromString` instead of `ConverterUtils.toContainerId` 
refer to
    
    
https://github.com/apache/hadoop/blob/706d88266abcee09ed78fbaa0ad5f74d818ab0e9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java#L133-L141
    
    - Use `ContainerId.toString` instead of `ConverterUtils.toString` and 
introduce `YarnContainerInfoHelper.convertToString` to defense against `null`
    
    
https://github.com/apache/hadoop/blob/706d88266abcee09ed78fbaa0ad5f74d818ab0e9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java#L106-L113
    
    ### Why are the changes needed?
    Clean up deprecation hadoop api usage in yarn module
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GitHub Actions
    
    Closes #40940 from LuciferYang/yarn-deprecation.
    
    Authored-by: yangjie01 <yangji...@baidu.com>
    Signed-off-by: Sean Owen <sro...@gmail.com>
---
 .../scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala   | 4 ++--
 .../src/main/scala/org/apache/spark/deploy/yarn/Client.scala     | 8 ++++----
 .../apache/spark/deploy/yarn/ClientDistributedCacheManager.scala | 4 ++--
 .../main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala  | 4 ++--
 .../scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala | 3 +--
 .../scala/org/apache/spark/util/YarnContainerInfoHelper.scala    | 7 +++++--
 .../spark/deploy/yarn/ClientDistributedCacheManagerSuite.scala   | 9 ++++-----
 .../apache/spark/deploy/yarn/ResourceRequestHelperSuite.scala    | 2 +-
 .../scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala  | 8 ++++----
 .../scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala    | 3 +--
 10 files changed, 26 insertions(+), 26 deletions(-)

diff --git 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
index 1994775f36d..0149a3f6217 100644
--- 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
+++ 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
@@ -36,7 +36,7 @@ import org.apache.hadoop.yarn.api._
 import org.apache.hadoop.yarn.api.records._
 import org.apache.hadoop.yarn.conf.YarnConfiguration
 import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException
-import org.apache.hadoop.yarn.util.{ConverterUtils, Records}
+import org.apache.hadoop.yarn.util.Records
 
 import org.apache.spark._
 import org.apache.spark.deploy.{ExecutorFailureTracker, SparkHadoopUtil}
@@ -154,7 +154,7 @@ private[spark] class ApplicationMaster(
       val amJarRsrc = Records.newRecord(classOf[LocalResource])
       amJarRsrc.setType(rtype)
       amJarRsrc.setVisibility(LocalResourceVisibility.valueOf(vis))
-      amJarRsrc.setResource(ConverterUtils.getYarnUrlFromURI(uri))
+      amJarRsrc.setResource(URL.fromURI(uri))
       amJarRsrc.setTimestamp(timestamp.toLong)
       amJarRsrc.setSize(size.toLong)
 
diff --git 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
index 4619d65f136..8257a08fd14 100644
--- 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
+++ 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
@@ -22,7 +22,7 @@ import java.net.{InetAddress, UnknownHostException, URI, URL}
 import java.nio.ByteBuffer
 import java.nio.charset.StandardCharsets
 import java.nio.file.{Files, Paths}
-import java.util.{Locale, Properties, UUID}
+import java.util.{Collections, Locale, Properties, UUID}
 import java.util.zip.{ZipEntry, ZipOutputStream}
 
 import scala.collection.JavaConverters._
@@ -289,7 +289,7 @@ private[spark] class Client(
     }
 
     val capability = Records.newRecord(classOf[Resource])
-    capability.setMemory(amMemory + amMemoryOverhead)
+    capability.setMemorySize(amMemory + amMemoryOverhead)
     capability.setVirtualCores(amCores)
     if (amResources.nonEmpty) {
       ResourceRequestHelper.setResourceRequests(amResources, capability)
@@ -304,7 +304,7 @@ private[spark] class Client(
         amRequest.setCapability(capability)
         amRequest.setNumContainers(1)
         amRequest.setNodeLabelExpression(expr)
-        appContext.setAMContainerResourceRequest(amRequest)
+        
appContext.setAMContainerResourceRequests(Collections.singletonList(amRequest))
       case None =>
         appContext.setResource(capability)
     }
@@ -389,7 +389,7 @@ private[spark] class Client(
    * Fail fast if we have requested more resources per container than is 
available in the cluster.
    */
   private def verifyClusterResources(newAppResponse: 
GetNewApplicationResponse): Unit = {
-    val maxMem = newAppResponse.getMaximumResourceCapability().getMemory()
+    val maxMem = newAppResponse.getMaximumResourceCapability.getMemorySize
     logInfo("Verifying our application has not requested more than the maximum 
" +
       s"memory capability of the cluster ($maxMem MB per container)")
     val executorMem =
diff --git 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManager.scala
 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManager.scala
index e02fbd0c914..8add7442888 100644
--- 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManager.scala
+++ 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManager.scala
@@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
 import org.apache.hadoop.fs.permission.FsAction
 import org.apache.hadoop.yarn.api.records._
-import org.apache.hadoop.yarn.util.{ConverterUtils, Records}
+import org.apache.hadoop.yarn.util.Records
 
 import org.apache.spark.SparkConf
 import org.apache.spark.deploy.yarn.config._
@@ -73,7 +73,7 @@ private[spark] class ClientDistributedCacheManager() extends 
Logging {
     amJarRsrc.setType(resourceType)
     val visibility = getVisibility(conf, destPath.toUri(), statCache)
     amJarRsrc.setVisibility(visibility)
-    amJarRsrc.setResource(ConverterUtils.getYarnUrlFromPath(destPath))
+    amJarRsrc.setResource(URL.fromPath(destPath))
     amJarRsrc.setTimestamp(destStatus.getModificationTime())
     amJarRsrc.setSize(destStatus.getLen())
     require(link != null && link.nonEmpty, "You must specify a valid link 
name.")
diff --git 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala
 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala
index 07c4da0ee81..b6ee21ed817 100644
--- 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala
+++ 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala
@@ -507,7 +507,7 @@ private[yarn] class YarnAllocator(
           var requestContainerMessage = s"Will request $missing executor 
container(s) for " +
             s" ResourceProfile Id: $rpId, each with " +
             s"${resource.getVirtualCores} core(s) and " +
-            s"${resource.getMemory} MB memory."
+            s"${resource.getMemorySize} MB memory."
           if (resource.getResources().nonEmpty) {
             requestContainerMessage ++= s" with custom resources: $resource"
           }
@@ -734,7 +734,7 @@ private[yarn] class YarnAllocator(
       val containerId = container.getId
       val executorId = executorIdCounter.toString
       val yarnResourceForRpId = rpIdToYarnResource.get(rpId)
-      assert(container.getResource.getMemory >= yarnResourceForRpId.getMemory)
+      assert(container.getResource.getMemorySize >= 
yarnResourceForRpId.getMemorySize)
       logInfo(s"Launching container $containerId on host $executorHostname " +
         s"for executor with ID $executorId for ResourceProfile Id $rpId")
 
diff --git 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
index 4d050b91a85..2055646973b 100644
--- 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
+++ 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
@@ -23,7 +23,6 @@ import scala.util.matching.Regex
 
 import org.apache.hadoop.yarn.api.ApplicationConstants
 import org.apache.hadoop.yarn.api.records.{ApplicationAccessType, ContainerId, 
Priority}
-import org.apache.hadoop.yarn.util.ConverterUtils
 
 import org.apache.spark.{SecurityManager, SparkConf}
 import org.apache.spark.launcher.YarnCommandBuilderUtils
@@ -199,7 +198,7 @@ object YarnSparkHadoopUtil {
 
   def getContainerId: ContainerId = {
     val containerIdString = 
System.getenv(ApplicationConstants.Environment.CONTAINER_ID.name())
-    ConverterUtils.toContainerId(containerIdString)
+    ContainerId.fromString(containerIdString)
   }
 
   /**
diff --git 
a/resource-managers/yarn/src/main/scala/org/apache/spark/util/YarnContainerInfoHelper.scala
 
b/resource-managers/yarn/src/main/scala/org/apache/spark/util/YarnContainerInfoHelper.scala
index 854fe18c224..280a5f2a522 100644
--- 
a/resource-managers/yarn/src/main/scala/org/apache/spark/util/YarnContainerInfoHelper.scala
+++ 
b/resource-managers/yarn/src/main/scala/org/apache/spark/util/YarnContainerInfoHelper.scala
@@ -22,7 +22,6 @@ import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
 import org.apache.hadoop.yarn.api.records.{Container, ContainerId}
 import org.apache.hadoop.yarn.conf.YarnConfiguration
-import org.apache.hadoop.yarn.util.ConverterUtils
 
 import org.apache.spark.deploy.yarn.YarnSparkHadoopUtil
 import org.apache.spark.internal.Logging
@@ -72,7 +71,7 @@ private[spark] object YarnContainerInfoHelper extends Logging 
{
         "NM_HTTP_PORT" -> getNodeManagerHttpPort(container),
         "NM_HTTP_ADDRESS" -> getNodeManagerHttpAddress(container),
         "CLUSTER_ID" -> getClusterId(yarnConf).getOrElse(""),
-        "CONTAINER_ID" -> ConverterUtils.toString(getContainerId(container)),
+        "CONTAINER_ID" -> convertToString(getContainerId(container)),
         "USER" -> Utils.getCurrentUserName(),
         "LOG_FILES" -> "stderr,stdout"
       ))
@@ -84,6 +83,10 @@ private[spark] object YarnContainerInfoHelper extends 
Logging {
     }
   }
 
+  def convertToString(containerId: ContainerId): String = if (containerId != 
null) {
+    containerId.toString
+  } else null
+
   def getContainerId(container: Option[Container]): ContainerId = container 
match {
     case Some(c) => c.getId
     case None => YarnSparkHadoopUtil.getContainerId
diff --git 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManagerSuite.scala
 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManagerSuite.scala
index c04b4e5cb55..996654f7415 100644
--- 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManagerSuite.scala
+++ 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientDistributedCacheManagerSuite.scala
@@ -29,7 +29,6 @@ import org.apache.hadoop.fs.Path
 import org.apache.hadoop.yarn.api.records.LocalResource
 import org.apache.hadoop.yarn.api.records.LocalResourceType
 import org.apache.hadoop.yarn.api.records.LocalResourceVisibility
-import org.apache.hadoop.yarn.util.ConverterUtils
 import org.mockito.Mockito.when
 import org.scalatestplus.mockito.MockitoSugar
 
@@ -80,7 +79,7 @@ class ClientDistributedCacheManagerSuite extends 
SparkFunSuite with MockitoSugar
       statCache, false)
     val resource = localResources("link")
     assert(resource.getVisibility() === LocalResourceVisibility.PRIVATE)
-    assert(ConverterUtils.getPathFromYarnURL(resource.getResource()) === 
destPath)
+    assert(resource.getResource().toPath === destPath)
     assert(resource.getTimestamp() === 0)
     assert(resource.getSize() === 0)
     assert(resource.getType() === LocalResourceType.FILE)
@@ -102,7 +101,7 @@ class ClientDistributedCacheManagerSuite extends 
SparkFunSuite with MockitoSugar
       statCache, false)
     val resource2 = localResources("link2")
     assert(resource2.getVisibility() === LocalResourceVisibility.PRIVATE)
-    assert(ConverterUtils.getPathFromYarnURL(resource2.getResource()) === 
destPath2)
+    assert(resource2.getResource().toPath === destPath2)
     assert(resource2.getTimestamp() === 10)
     assert(resource2.getSize() === 20)
     assert(resource2.getType() === LocalResourceType.FILE)
@@ -158,7 +157,7 @@ class ClientDistributedCacheManagerSuite extends 
SparkFunSuite with MockitoSugar
       statCache, true)
     val resource = localResources("link")
     assert(resource.getVisibility() === LocalResourceVisibility.PRIVATE)
-    assert(ConverterUtils.getPathFromYarnURL(resource.getResource()) === 
destPath)
+    assert(resource.getResource().toPath === destPath)
     assert(resource.getTimestamp() === 10)
     assert(resource.getSize() === 20)
     assert(resource.getType() === LocalResourceType.ARCHIVE)
@@ -187,7 +186,7 @@ class ClientDistributedCacheManagerSuite extends 
SparkFunSuite with MockitoSugar
       statCache, false)
     val resource = localResources("link")
     assert(resource.getVisibility() === LocalResourceVisibility.PRIVATE)
-    assert(ConverterUtils.getPathFromYarnURL(resource.getResource()) === 
destPath)
+    assert(resource.getResource().toPath === destPath)
     assert(resource.getTimestamp() === 10)
     assert(resource.getSize() === 20)
     assert(resource.getType() === LocalResourceType.ARCHIVE)
diff --git 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ResourceRequestHelperSuite.scala
 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ResourceRequestHelperSuite.scala
index 575784bba34..53b6d192e72 100644
--- 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ResourceRequestHelperSuite.scala
+++ 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ResourceRequestHelperSuite.scala
@@ -177,7 +177,7 @@ class ResourceRequestHelperSuite extends SparkFunSuite with 
Matchers {
 
   private def createResource(): Resource = {
     val resource = Records.newRecord(classOf[Resource])
-    resource.setMemory(512)
+    resource.setMemorySize(512)
     resource.setVirtualCores(2)
     resource
   }
diff --git 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala
 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala
index f2b4222b85e..88c08abdca3 100644
--- 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala
+++ 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala
@@ -285,7 +285,7 @@ class YarnAllocatorSuite extends SparkFunSuite with 
Matchers {
     handler.handleAllocatedContainers(Array(container))
 
     // get amount of memory and vcores from resource, so effectively skipping 
their validation
-    val expectedResources = Resource.newInstance(defaultResource.getMemory(),
+    val expectedResources = 
Resource.newInstance(defaultResource.getMemorySize(),
       defaultResource.getVirtualCores)
     setResourceRequests(Map("gpu" -> "2G"), expectedResources)
     val captor = ArgumentCaptor.forClass(classOf[ContainerRequest])
@@ -723,7 +723,7 @@ class YarnAllocatorSuite extends SparkFunSuite with 
Matchers {
       val (handler, _) = createAllocator(maxExecutors = 1,
         additionalConfigs = Map(EXECUTOR_MEMORY.key -> 
executorMemory.toString))
       val defaultResource = handler.rpIdToYarnResource.get(defaultRPId)
-      val memory = defaultResource.getMemory
+      val memory = defaultResource.getMemorySize
       assert(memory ==
         executorMemory + offHeapMemoryInMB + 
ResourceProfile.MEMORY_OVERHEAD_MIN_MIB)
     } finally {
@@ -739,7 +739,7 @@ class YarnAllocatorSuite extends SparkFunSuite with 
Matchers {
       val (handler, _) = createAllocator(maxExecutors = 1,
         additionalConfigs = Map(EXECUTOR_MEMORY.key -> 
executorMemory.toString))
       val defaultResource = handler.rpIdToYarnResource.get(defaultRPId)
-      val memory = defaultResource.getMemory
+      val memory = defaultResource.getMemorySize
       assert(memory == (executorMemory * 1.5).toLong)
     } finally {
       sparkConf.set(EXECUTOR_MEMORY_OVERHEAD_FACTOR, 0.1)
@@ -754,7 +754,7 @@ class YarnAllocatorSuite extends SparkFunSuite with 
Matchers {
       val (handler, _) = createAllocator(maxExecutors = 1,
         additionalConfigs = Map(EXECUTOR_MEMORY.key -> 
executorMemory.toString))
       val defaultResource = handler.rpIdToYarnResource.get(defaultRPId)
-      val memory = defaultResource.getMemory
+      val memory = defaultResource.getMemorySize
       assert(memory == (executorMemory * 1.4).toLong)
     } finally {
       sparkConf.set(EXECUTOR_MEMORY_OVERHEAD_FACTOR, 0.1)
diff --git 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
index 67551276d46..2637b2eab80 100644
--- 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
+++ 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
@@ -29,7 +29,6 @@ import scala.io.Source
 
 import com.google.common.io.{ByteStreams, Files}
 import org.apache.hadoop.yarn.conf.YarnConfiguration
-import org.apache.hadoop.yarn.util.ConverterUtils
 import org.scalatest.concurrent.Eventually._
 import org.scalatest.exceptions.TestFailedException
 import org.scalatest.matchers.must.Matchers
@@ -652,7 +651,7 @@ private object YarnClusterDriver extends Logging with 
Matchers {
           "NM_HTTP_PORT" -> 
YarnContainerInfoHelper.getNodeManagerHttpPort(container = None),
           "NM_HTTP_ADDRESS" -> 
YarnContainerInfoHelper.getNodeManagerHttpAddress(container = None),
           "CLUSTER_ID" -> 
YarnContainerInfoHelper.getClusterId(yarnConf).getOrElse(""),
-          "CONTAINER_ID" -> ConverterUtils.toString(containerId),
+          "CONTAINER_ID" -> 
YarnContainerInfoHelper.convertToString(containerId),
           "USER" -> user,
           "LOG_FILES" -> "stderr,stdout")
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to