Github user viirya commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19556#discussion_r146426012
  
    --- Diff: core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala ---
    @@ -91,6 +91,52 @@ private[spark] object ClosureCleaner extends Logging {
         (seen - obj.getClass).toList
       }
     
    +  /** Initializes the accessed fields for outer classes and their super 
classes. */
    +  private def initAccessedFields(
    +      accessedFields: Map[Class[_], Set[String]],
    +      outerClasses: Seq[Class[_]]): Unit = {
    +    for (cls <- outerClasses) {
    +      accessedFields(cls) = Set.empty[String]
    +
    +      var superClass = cls.getSuperclass()
    +      while (superClass != null) {
    +        accessedFields(superClass) = Set.empty[String]
    +        superClass = superClass.getSuperclass()
    +      }
    +    }
    +  }
    +
    +  /** Sets accessed fields for given class in clone object based on given 
object. */
    +  private def setAccessedFields(
    +      outerClass: Class[_],
    +      clone: AnyRef,
    +      obj: AnyRef,
    +      accessedFields: Map[Class[_], Set[String]]): Unit = {
    +    for (fieldName <- accessedFields(outerClass)) {
    +      val field = outerClass.getDeclaredField(fieldName)
    +      field.setAccessible(true)
    +      val value = field.get(obj)
    +      field.set(clone, value)
    +    }
    +  }
    +
    +  /** Clones a given object and sets accessed fields in cloned object. */
    +  private def cloneAndSetFields(
    +      parent: AnyRef,
    +      obj: AnyRef,
    +      outerClass: Class[_],
    +      accessedFields: Map[Class[_], Set[String]]): AnyRef = {
    +    val clone = instantiateClass(outerClass, parent)
    +    setAccessedFields(outerClass, clone, obj, accessedFields)
    +
    +    var superClass = outerClass.getSuperclass()
    +    while (superClass != null) {
    --- End diff --
    
    Thanks. Looks good.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to