Github user jkbradley commented on a diff in the pull request:

    https://github.com/apache/spark/pull/10469#discussion_r49408919
  
    --- Diff: python/pyspark/ml/util.py ---
    @@ -52,3 +71,141 @@ def _randomUID(cls):
             concatenates the class name, "_", and 12 random hex chars.
             """
             return cls.__name__ + "_" + uuid.uuid4().hex[12:]
    +
    +
    +@inherit_doc
    +class MLWriter(object):
    +    """
    +    Abstract class for utility classes that can save ML instances.
    +
    +    .. versionadded:: 2.0.0
    +    """
    +
    +    def __init__(self, instance):
    +        self._jwrite = instance._java_obj.write()
    +
    +    @since("2.0.0")
    +    def save(self, path):
    +        """Saves the ML instances to the input path."""
    +        self._jwrite.save(path)
    +
    +    @since("2.0.0")
    +    def overwrite(self):
    +        """Overwrites if the output path already exists."""
    +        self._jwrite.overwrite()
    +        return self
    +
    +    @since("2.0.0")
    +    def context(self, sqlContext):
    +        """Sets the SQL context to use for saving."""
    +        self._jwrite.context(sqlContext._ssql_ctx)
    +        return self
    +
    +
    +@inherit_doc
    +class MLWritable(object):
    +    """
    +    Mixin for ML instances that provide MLWriter through their Scala
    +    implementation.
    +
    +    .. versionadded:: 2.0.0
    +    """
    +
    +    @since("2.0.0")
    --- End diff --
    
    Will this annotation get inherited to classes which are added in later 
Spark versions?  (Can you please try generating the docs to see if it appears 
for LinearRegression?) If so, let's omit it.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to