Github user WeichenXu123 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21153#discussion_r184626842
  
    --- Diff: python/pyspark/ml/util.py ---
    @@ -523,11 +534,29 @@ def getAndSetParams(instance, metadata):
             """
             Extract Params from metadata, and set them in the instance.
             """
    +        # User-supplied param values
             for paramName in metadata['paramMap']:
                 param = instance.getParam(paramName)
                 paramValue = metadata['paramMap'][paramName]
                 instance.set(param, paramValue)
     
    +        # Default param values
    +        majorAndMinorVersions = majorMinorVersion(metadata['sparkVersion'])
    +        assert majorAndMinorVersions is not None, "Error loading metadata: 
Expected " + \
    +            "Spark version string but found 
{}".format(metadata['sparkVersion'])
    +
    +        major = majorAndMinorVersions[0]
    +        minor = majorAndMinorVersions[1]
    +        # For metadata file prior to Spark 2.4, there is no default 
section.
    +        if major > 2 or (major == 2 and minor >= 4):
    +            assert 'defaultParamMap' in metadata, "Error loading metadata: 
Expected " + \
    +                "`defaultParamMap` section not found"
    +
    +            for paramName in metadata['defaultParamMap']:
    +                param = instance.getParam(paramName)
    +                paramValue = metadata['defaultParamMap'][paramName]
    +                instance._setDefault(**{param.name: paramValue})
    --- End diff --
    
    remove line `param = instance.getParam(paramName)` and change this line to
    `instance._setDefault(**{paramName: paramValue})`


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to