Github user vanzin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20147#discussion_r159757298
  
    --- Diff: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
 ---
    @@ -85,6 +90,43 @@ class HiveExternalCatalogVersionsSuite extends 
SparkSubmitTestUtils {
         new File(tmpDataDir, name).getCanonicalPath
       }
     
    +  private def getFileFromUrl(urlString: String, targetDir: String, 
filename: String): Boolean = {
    +    val conf = new SparkConf
    +    val securityManager = new SecurityManager(conf)
    +    val hadoopConf = new Configuration
    +
    +    val outDir = new File(targetDir)
    +    if (!outDir.exists()) {
    +      outDir.mkdirs()
    +    }
    +
    +    try {
    +      val result = Utils.doFetchFile(urlString, outDir, filename, conf, 
securityManager, hadoopConf)
    +      result.exists()
    +    } catch {
    +      case ex: Exception => logWarning("Could not get file from url " + 
urlString + ": "
    +        + ex.getMessage)
    +        false
    +    }
    +  }
    +
    +  private def getStringFromUrl(urlString: String, encoding: String = 
"UTF-8"): String = {
    +    val outDir = Files.createTempDirectory("string-")
    +    val filename = "string-out.txt"
    +
    +    if (!getFileFromUrl(urlString, outDir.toString, filename)) {
    +      throw new IOException("Could not get string from url " + urlString)
    --- End diff --
    
    How about letting the exception from `doFetchFile` propagate, and only 
handle it as part of the retries in `tryDownloadSpark`?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to