Repository: spark
Updated Branches:
  refs/heads/branch-1.4 139c8240f -> 815e05654


[SPARK-7985] [ML] [MLlib] [Docs] Remove "fittingParamMap" references. Updating 
ML Doc "Estimator, Transformer, and Param" examples.

Updating ML Doc's *"Estimator, Transformer, and Param"* example to use 
`model.extractParamMap` instead of `model.fittingParamMap`, which no longer 
exists.

mengxr, I believe this addresses (part of) the *update documentation* TODO list 
item from [PR 5820](https://github.com/apache/spark/pull/5820).

Author: Mike Dusenberry <dusenberr...@gmail.com>

Closes #6514 from dusenberrymw/Fix_ML_Doc_Estimator_Transformer_Param_Example 
and squashes the following commits:

6366e1f [Mike Dusenberry] Updating instances of model.extractParamMap to 
model.parent.extractParamMap, since the Params of the parent Estimator could 
possibly differ from thos of the Model.
d850e0e [Mike Dusenberry] Removing all references to "fittingParamMap" 
throughout Spark, since it has been removed.
0480304 [Mike Dusenberry] Updating the ML Doc "Estimator, Transformer, and 
Param" Java example to use model.extractParamMap() instead of 
model.fittingParamMap(), which no longer exists.
7d34939 [Mike Dusenberry] Updating ML Doc "Estimator, Transformer, and Param" 
example to use model.extractParamMap instead of model.fittingParamMap, which no 
longer exists.

(cherry picked from commit ad06727fe985ca243ebdaaba55cd7d35a4749d0a)
Signed-off-by: Joseph K. Bradley <jos...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/815e0565
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/815e0565
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/815e0565

Branch: refs/heads/branch-1.4
Commit: 815e0565422d20d8ab734f735da4bf4d06d5475e
Parents: 139c824
Author: Mike Dusenberry <dusenberr...@gmail.com>
Authored: Tue Jun 2 12:38:14 2015 -0700
Committer: Joseph K. Bradley <jos...@databricks.com>
Committed: Tue Jun 2 12:38:33 2015 -0700

----------------------------------------------------------------------
 docs/ml-guide.md                                             | 8 ++++----
 .../org/apache/spark/ml/classification/GBTClassifier.scala   | 2 +-
 .../spark/ml/classification/RandomForestClassifier.scala     | 2 +-
 .../scala/org/apache/spark/ml/regression/GBTRegressor.scala  | 2 +-
 .../apache/spark/ml/regression/RandomForestRegressor.scala   | 2 +-
 .../ml/classification/DecisionTreeClassifierSuite.scala      | 2 +-
 .../apache/spark/ml/classification/GBTClassifierSuite.scala  | 2 +-
 .../ml/classification/RandomForestClassifierSuite.scala      | 2 +-
 .../spark/ml/regression/DecisionTreeRegressorSuite.scala     | 2 +-
 .../org/apache/spark/ml/regression/GBTRegressorSuite.scala   | 2 +-
 .../spark/ml/regression/RandomForestRegressorSuite.scala     | 2 +-
 11 files changed, 14 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/815e0565/docs/ml-guide.md
----------------------------------------------------------------------
diff --git a/docs/ml-guide.md b/docs/ml-guide.md
index c5f50ed..4eb622d 100644
--- a/docs/ml-guide.md
+++ b/docs/ml-guide.md
@@ -207,7 +207,7 @@ val model1 = lr.fit(training.toDF)
 // we can view the parameters it used during fit().
 // This prints the parameter (name: value) pairs, where names are unique IDs 
for this
 // LogisticRegression instance.
-println("Model 1 was fit using parameters: " + model1.fittingParamMap)
+println("Model 1 was fit using parameters: " + model1.parent.extractParamMap)
 
 // We may alternatively specify parameters using a ParamMap,
 // which supports several methods for specifying parameters.
@@ -222,7 +222,7 @@ val paramMapCombined = paramMap ++ paramMap2
 // Now learn a new model using the paramMapCombined parameters.
 // paramMapCombined overrides all parameters set earlier via lr.set* methods.
 val model2 = lr.fit(training.toDF, paramMapCombined)
-println("Model 2 was fit using parameters: " + model2.fittingParamMap)
+println("Model 2 was fit using parameters: " + model2.parent.extractParamMap)
 
 // Prepare test data.
 val test = sc.parallelize(Seq(
@@ -289,7 +289,7 @@ LogisticRegressionModel model1 = lr.fit(training);
 // we can view the parameters it used during fit().
 // This prints the parameter (name: value) pairs, where names are unique IDs 
for this
 // LogisticRegression instance.
-System.out.println("Model 1 was fit using parameters: " + 
model1.fittingParamMap());
+System.out.println("Model 1 was fit using parameters: " + 
model1.parent().extractParamMap());
 
 // We may alternatively specify parameters using a ParamMap.
 ParamMap paramMap = new ParamMap();
@@ -305,7 +305,7 @@ ParamMap paramMapCombined = paramMap.$plus$plus(paramMap2);
 // Now learn a new model using the paramMapCombined parameters.
 // paramMapCombined overrides all parameters set earlier via lr.set* methods.
 LogisticRegressionModel model2 = lr.fit(training, paramMapCombined);
-System.out.println("Model 2 was fit using parameters: " + 
model2.fittingParamMap());
+System.out.println("Model 2 was fit using parameters: " + 
model2.parent().extractParamMap());
 
 // Prepare test documents.
 List<LabeledPoint> localTest = Lists.newArrayList(

http://git-wip-us.apache.org/repos/asf/spark/blob/815e0565/mllib/src/main/scala/org/apache/spark/ml/classification/GBTClassifier.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/main/scala/org/apache/spark/ml/classification/GBTClassifier.scala 
b/mllib/src/main/scala/org/apache/spark/ml/classification/GBTClassifier.scala
index d8592eb..62f4b51 100644
--- 
a/mllib/src/main/scala/org/apache/spark/ml/classification/GBTClassifier.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/ml/classification/GBTClassifier.scala
@@ -208,7 +208,7 @@ private[ml] object GBTClassificationModel {
     require(oldModel.algo == OldAlgo.Classification, "Cannot convert 
GradientBoostedTreesModel" +
       s" with algo=${oldModel.algo} (old API) to GBTClassificationModel (new 
API).")
     val newTrees = oldModel.trees.map { tree =>
-      // parent, fittingParamMap for each tree is null since there are no good 
ways to set these.
+      // parent for each tree is null since there is no good way to set this.
       DecisionTreeRegressionModel.fromOld(tree, null, categoricalFeatures)
     }
     val uid = if (parent != null) parent.uid else 
Identifiable.randomUID("gbtc")

http://git-wip-us.apache.org/repos/asf/spark/blob/815e0565/mllib/src/main/scala/org/apache/spark/ml/classification/RandomForestClassifier.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/main/scala/org/apache/spark/ml/classification/RandomForestClassifier.scala
 
b/mllib/src/main/scala/org/apache/spark/ml/classification/RandomForestClassifier.scala
index 67600eb..852a67e 100644
--- 
a/mllib/src/main/scala/org/apache/spark/ml/classification/RandomForestClassifier.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/ml/classification/RandomForestClassifier.scala
@@ -170,7 +170,7 @@ private[ml] object RandomForestClassificationModel {
     require(oldModel.algo == OldAlgo.Classification, "Cannot convert 
RandomForestModel" +
       s" with algo=${oldModel.algo} (old API) to 
RandomForestClassificationModel (new API).")
     val newTrees = oldModel.trees.map { tree =>
-      // parent, fittingParamMap for each tree is null since there are no good 
ways to set these.
+      // parent for each tree is null since there is no good way to set this.
       DecisionTreeClassificationModel.fromOld(tree, null, categoricalFeatures)
     }
     val uid = if (parent != null) parent.uid else Identifiable.randomUID("rfc")

http://git-wip-us.apache.org/repos/asf/spark/blob/815e0565/mllib/src/main/scala/org/apache/spark/ml/regression/GBTRegressor.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/main/scala/org/apache/spark/ml/regression/GBTRegressor.scala 
b/mllib/src/main/scala/org/apache/spark/ml/regression/GBTRegressor.scala
index 69f4f54..b7e374b 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/regression/GBTRegressor.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/regression/GBTRegressor.scala
@@ -198,7 +198,7 @@ private[ml] object GBTRegressionModel {
     require(oldModel.algo == OldAlgo.Regression, "Cannot convert 
GradientBoostedTreesModel" +
       s" with algo=${oldModel.algo} (old API) to GBTRegressionModel (new 
API).")
     val newTrees = oldModel.trees.map { tree =>
-      // parent, fittingParamMap for each tree is null since there are no good 
ways to set these.
+      // parent for each tree is null since there is no good way to set this.
       DecisionTreeRegressionModel.fromOld(tree, null, categoricalFeatures)
     }
     val uid = if (parent != null) parent.uid else 
Identifiable.randomUID("gbtr")

http://git-wip-us.apache.org/repos/asf/spark/blob/815e0565/mllib/src/main/scala/org/apache/spark/ml/regression/RandomForestRegressor.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/main/scala/org/apache/spark/ml/regression/RandomForestRegressor.scala
 
b/mllib/src/main/scala/org/apache/spark/ml/regression/RandomForestRegressor.scala
index ae767a1..49a1f7c 100644
--- 
a/mllib/src/main/scala/org/apache/spark/ml/regression/RandomForestRegressor.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/ml/regression/RandomForestRegressor.scala
@@ -152,7 +152,7 @@ private[ml] object RandomForestRegressionModel {
     require(oldModel.algo == OldAlgo.Regression, "Cannot convert 
RandomForestModel" +
       s" with algo=${oldModel.algo} (old API) to RandomForestRegressionModel 
(new API).")
     val newTrees = oldModel.trees.map { tree =>
-      // parent, fittingParamMap for each tree is null since there are no good 
ways to set these.
+      // parent for each tree is null since there is no good way to set this.
       DecisionTreeRegressionModel.fromOld(tree, null, categoricalFeatures)
     }
     new RandomForestRegressionModel(parent.uid, newTrees)

http://git-wip-us.apache.org/repos/asf/spark/blob/815e0565/mllib/src/test/scala/org/apache/spark/ml/classification/DecisionTreeClassifierSuite.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/classification/DecisionTreeClassifierSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/ml/classification/DecisionTreeClassifierSuite.scala
index 3fdc66b..1e385e7 100644
--- 
a/mllib/src/test/scala/org/apache/spark/ml/classification/DecisionTreeClassifierSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/ml/classification/DecisionTreeClassifierSuite.scala
@@ -266,7 +266,7 @@ private[ml] object DecisionTreeClassifierSuite extends 
FunSuite {
     val oldTree = OldDecisionTree.train(data, oldStrategy)
     val newData: DataFrame = TreeTests.setMetadata(data, categoricalFeatures, 
numClasses)
     val newTree = dt.fit(newData)
-    // Use parent, fittingParamMap from newTree since these are not checked 
anyways.
+    // Use parent from newTree since this is not checked anyways.
     val oldTreeAsNew = DecisionTreeClassificationModel.fromOld(
       oldTree, newTree.parent.asInstanceOf[DecisionTreeClassifier], 
categoricalFeatures)
     TreeTests.checkEqual(oldTreeAsNew, newTree)

http://git-wip-us.apache.org/repos/asf/spark/blob/815e0565/mllib/src/test/scala/org/apache/spark/ml/classification/GBTClassifierSuite.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/classification/GBTClassifierSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/ml/classification/GBTClassifierSuite.scala
index ea86867..fdc0e22 100644
--- 
a/mllib/src/test/scala/org/apache/spark/ml/classification/GBTClassifierSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/ml/classification/GBTClassifierSuite.scala
@@ -128,7 +128,7 @@ private object GBTClassifierSuite {
     val oldModel = oldGBT.run(data)
     val newData: DataFrame = TreeTests.setMetadata(data, categoricalFeatures, 
numClasses = 2)
     val newModel = gbt.fit(newData)
-    // Use parent, fittingParamMap from newTree since these are not checked 
anyways.
+    // Use parent from newTree since this is not checked anyways.
     val oldModelAsNew = GBTClassificationModel.fromOld(
       oldModel, newModel.parent.asInstanceOf[GBTClassifier], 
categoricalFeatures)
     TreeTests.checkEqual(oldModelAsNew, newModel)

http://git-wip-us.apache.org/repos/asf/spark/blob/815e0565/mllib/src/test/scala/org/apache/spark/ml/classification/RandomForestClassifierSuite.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/classification/RandomForestClassifierSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/ml/classification/RandomForestClassifierSuite.scala
index cdbbaca..be860f6 100644
--- 
a/mllib/src/test/scala/org/apache/spark/ml/classification/RandomForestClassifierSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/ml/classification/RandomForestClassifierSuite.scala
@@ -158,7 +158,7 @@ private object RandomForestClassifierSuite {
       data, oldStrategy, rf.getNumTrees, rf.getFeatureSubsetStrategy, 
rf.getSeed.toInt)
     val newData: DataFrame = TreeTests.setMetadata(data, categoricalFeatures, 
numClasses)
     val newModel = rf.fit(newData)
-    // Use parent, fittingParamMap from newTree since these are not checked 
anyways.
+    // Use parent from newTree since this is not checked anyways.
     val oldModelAsNew = RandomForestClassificationModel.fromOld(
       oldModel, newModel.parent.asInstanceOf[RandomForestClassifier], 
categoricalFeatures)
     TreeTests.checkEqual(oldModelAsNew, newModel)

http://git-wip-us.apache.org/repos/asf/spark/blob/815e0565/mllib/src/test/scala/org/apache/spark/ml/regression/DecisionTreeRegressorSuite.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/regression/DecisionTreeRegressorSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/ml/regression/DecisionTreeRegressorSuite.scala
index 1196a77..e6b0c55 100644
--- 
a/mllib/src/test/scala/org/apache/spark/ml/regression/DecisionTreeRegressorSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/ml/regression/DecisionTreeRegressorSuite.scala
@@ -83,7 +83,7 @@ private[ml] object DecisionTreeRegressorSuite extends 
FunSuite {
     val oldTree = OldDecisionTree.train(data, oldStrategy)
     val newData: DataFrame = TreeTests.setMetadata(data, categoricalFeatures, 
numClasses = 0)
     val newTree = dt.fit(newData)
-    // Use parent, fittingParamMap from newTree since these are not checked 
anyways.
+    // Use parent from newTree since this is not checked anyways.
     val oldTreeAsNew = DecisionTreeRegressionModel.fromOld(
       oldTree, newTree.parent.asInstanceOf[DecisionTreeRegressor], 
categoricalFeatures)
     TreeTests.checkEqual(oldTreeAsNew, newTree)

http://git-wip-us.apache.org/repos/asf/spark/blob/815e0565/mllib/src/test/scala/org/apache/spark/ml/regression/GBTRegressorSuite.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/regression/GBTRegressorSuite.scala 
b/mllib/src/test/scala/org/apache/spark/ml/regression/GBTRegressorSuite.scala
index 40e7e32..d997a3f 100644
--- 
a/mllib/src/test/scala/org/apache/spark/ml/regression/GBTRegressorSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/ml/regression/GBTRegressorSuite.scala
@@ -129,7 +129,7 @@ private object GBTRegressorSuite {
     val oldModel = oldGBT.run(data)
     val newData: DataFrame = TreeTests.setMetadata(data, categoricalFeatures, 
numClasses = 0)
     val newModel = gbt.fit(newData)
-    // Use parent, fittingParamMap from newTree since these are not checked 
anyways.
+    // Use parent from newTree since this is not checked anyways.
     val oldModelAsNew = GBTRegressionModel.fromOld(
       oldModel, newModel.parent.asInstanceOf[GBTRegressor], 
categoricalFeatures)
     TreeTests.checkEqual(oldModelAsNew, newModel)

http://git-wip-us.apache.org/repos/asf/spark/blob/815e0565/mllib/src/test/scala/org/apache/spark/ml/regression/RandomForestRegressorSuite.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/regression/RandomForestRegressorSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/ml/regression/RandomForestRegressorSuite.scala
index 3efffbb..fe81e84 100644
--- 
a/mllib/src/test/scala/org/apache/spark/ml/regression/RandomForestRegressorSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/ml/regression/RandomForestRegressorSuite.scala
@@ -114,7 +114,7 @@ private object RandomForestRegressorSuite extends FunSuite {
       data, oldStrategy, rf.getNumTrees, rf.getFeatureSubsetStrategy, 
rf.getSeed.toInt)
     val newData: DataFrame = TreeTests.setMetadata(data, categoricalFeatures, 
numClasses = 0)
     val newModel = rf.fit(newData)
-    // Use parent, fittingParamMap from newTree since these are not checked 
anyways.
+    // Use parent from newTree since this is not checked anyways.
     val oldModelAsNew = RandomForestRegressionModel.fromOld(
       oldModel, newModel.parent.asInstanceOf[RandomForestRegressor], 
categoricalFeatures)
     TreeTests.checkEqual(oldModelAsNew, newModel)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to