Looks like there might be a problem with the way you specified your
parameter values, probably you have an integer value where it should be a
floating-point.  Double check that and if there is still a problem please
share the rest of your code so we can see how you defined "gridS".

On Fri, May 5, 2017 at 7:40 AM, issues solution <issues.solut...@gmail.com>
wrote:

> Hi get the following error after trying to perform
> gridsearch and crossvalidation on randomforst estimator for classificaiton
>
> rf = RandomForestClassifier(labelCol="Labeld",featuresCol="features")
>
> evaluator =  BinaryClassificationEvaluator(metricName="F1 Score")
>
> rf_cv = CrossValidator(estimator=rf, 
> estimatorParamMaps=gridS,evaluator=evaluator,numFolds=5)
> (trainingData, testData) = transformed13.randomSplit([0.7, 0.3])
> rfmodel  =  rf_cv.fit(trainingData)
> ---------------------------------------------------------------------------Py4JJavaError
>                              Traceback (most recent call 
> last)<ipython-input-76-bdd59134bbe0> in <module>()----> 1 rfmodel  =  
> rf_cv.fit(trainingData)
> /opt/cloudera/parcels/CDH/lib/spark/python/pyspark/ml/pipeline.py in 
> fit(self, dataset, params)     67                 return 
> self.copy(params)._fit(dataset)     68             else:---> 69               
>   return self._fit(dataset)     70         else:     71             raise 
> ValueError("Params must be either a param map or a list/tuple of param maps, "
> /opt/cloudera/parcels/CDH/lib/spark/python/pyspark/ml/tuning.py in _fit(self, 
> dataset)    237             train = df.filter(~condition)    238             
> for j in range(numModels):--> 239                 model = est.fit(train, 
> epm[j])    240                 # TODO: duplicate evaluator to take extra 
> params from input    241                 metric = 
> eva.evaluate(model.transform(validation, epm[j]))
> /opt/cloudera/parcels/CDH/lib/spark/python/pyspark/ml/pipeline.py in 
> fit(self, dataset, params)     65         elif isinstance(params, dict):     
> 66             if params:---> 67                 return 
> self.copy(params)._fit(dataset)     68             else:     69               
>   return self._fit(dataset)
> /opt/cloudera/parcels/CDH/lib/spark/python/pyspark/ml/wrapper.py in 
> _fit(self, dataset)    131     132     def _fit(self, dataset):--> 133        
>  java_model = self._fit_java(dataset)    134         return 
> self._create_model(java_model)    135
> /opt/cloudera/parcels/CDH/lib/spark/python/pyspark/ml/wrapper.py in 
> _fit_java(self, dataset)    127         :return: fitted Java model    128     
>     """--> 129         self._transfer_params_to_java()    130         return 
> self._java_obj.fit(dataset._jdf)    131
> /opt/cloudera/parcels/CDH/lib/spark/python/pyspark/ml/wrapper.py in 
> _transfer_params_to_java(self)     80         for param in self.params:     
> 81             if param in paramMap:---> 82                 pair = 
> self._make_java_param_pair(param, paramMap[param])     83                 
> self._java_obj.set(pair)     84
> /opt/cloudera/parcels/CDH/lib/spark/python/pyspark/ml/wrapper.py in 
> _make_java_param_pair(self, param, value)     71         java_param = 
> self._java_obj.getParam(param.name)     72         java_value = _py2java(sc, 
> value)---> 73         return java_param.w(java_value)     74      75     def 
> _transfer_params_to_java(self):
> /opt/cloudera/parcels/CDH/lib/spark/python/lib/py4j-0.9-src.zip/py4j/java_gateway.py
>  in __call__(self, *args)    811         answer = 
> self.gateway_client.send_command(command)    812         return_value = 
> get_return_value(--> 813             answer, self.gateway_client, 
> self.target_id, self.name)    814     815         for temp_arg in temp_args:
> /opt/cloudera/parcels/CDH/lib/spark/python/pyspark/sql/utils.py in deco(*a, 
> **kw)     43     def deco(*a, **kw):     44         try:---> 45             
> return f(*a, **kw)     46         except py4j.protocol.Py4JJavaError as e:    
>  47             s = e.java_exception.toString()
> /opt/cloudera/parcels/CDH/lib/spark/python/lib/py4j-0.9-src.zip/py4j/protocol.py
>  in get_return_value(answer, gateway_client, target_id, name)    306          
>        raise Py4JJavaError(    307                     "An error occurred 
> while calling {0}{1}{2}.\n".--> 308                     format(target_id, 
> ".", name), value)    309             else:    310                 raise 
> Py4JError(
> Py4JJavaError: An error occurred while calling o91602.w.
> : java.lang.ClassCastException: java.lang.Integer cannot be cast to 
> java.lang.Double
>       at scala.runtime.BoxesRunTime.unboxToDouble(BoxesRunTime.java:119)
>       at org.apache.spark.ml.param.DoubleParam.w(params.scala:225)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:606)
>       at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
>       at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:381)
>       at py4j.Gateway.invoke(Gateway.java:259)
>       at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
>       at py4j.commands.CallCommand.execute(CallCommand.java:79)
>       at py4j.GatewayConnection.run(GatewayConnection.java:209)
>       at java.lang.Thread.run(Thread.java:745)
>
>

Reply via email to