Github user xuanyuanking commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22093#discussion_r209650955
  
    --- Diff: core/src/test/scala/org/apache/spark/FileSuite.scala ---
    @@ -424,6 +425,39 @@ class FileSuite extends SparkFunSuite with 
LocalSparkContext {
         randomRDD.saveAsNewAPIHadoopDataset(jobConfig)
         assert(new File(tempDir.getPath + 
"/outputDataset_new/part-r-00000").exists() === true)
       }
    +  
    +  test("SPARK-25100: Using KryoSerializer and" +
    +      " setting registrationRequired true can lead job failed") {
    +    val tempDir = Utils.createTempDir()
    +    val inputDir = tempDir.getAbsolutePath + "/input"
    +    val outputDir = tempDir.getAbsolutePath + "/tmp"
    +    
    +    val writer = new PrintWriter(new File(inputDir))
    +    
    +    for(i <- 1 to 100) {
    +      writer.print(i)
    +      writer.write('\n')
    +    }
    +    
    +    writer.close()
    +    
    +    val conf = new SparkConf(false).setMaster("local").
    +        set("spark.kryo.registrationRequired", "true").setAppName("test")
    +    conf.set("spark.serializer", classOf[KryoSerializer].getName)
    +    conf.set("spark.serializer", 
"org.apache.spark.serializer.KryoSerializer")
    --- End diff --
    
    Why we need set 'spark.serializer' twice?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to