[ 
https://issues.apache.org/jira/browse/FLINK-6226?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16236297#comment-16236297
 ] 

Fabian Hueske commented on FLINK-6226:
--------------------------------------

I tried to reproduce the issue but the provided UDFs work on the current master 
branch.
Seems like the issue was fixed in the mean time.

I'd suggest to modify two existing test cases to use UDFs with {{byte}}, 
{{short}}, and {{float}} types.

> ScalarFunction and TableFunction do not support parameters of byte, short and 
> float
> -----------------------------------------------------------------------------------
>
>                 Key: FLINK-6226
>                 URL: https://issues.apache.org/jira/browse/FLINK-6226
>             Project: Flink
>          Issue Type: Bug
>          Components: Table API & SQL
>            Reporter: Zhuoluo Yang
>            Assignee: Zhuoluo Yang
>            Priority: Major
>
> It seems to be a problem that ScalarFunction and TableFunction do not support 
> types of byte, short or float.
> It will throw some exceptions like following;
> {panel}
> org.apache.flink.table.api.ValidationException: Given parameters of function 
> 'org$apache$flink$table$expressions$utils$Func18$$98a126fbdab73f43d640516da603291a'
>  do not match any signature. 
> Actual: (java.lang.String, java.lang.Integer, java.lang.Integer, 
> java.lang.Integer, java.lang.Long) 
> Expected: (java.lang.String, byte, short, int, long)
>       at 
> org.apache.flink.table.functions.utils.ScalarSqlFunction$$anon$1.inferReturnType(ScalarSqlFunction.scala:82)
>       at 
> org.apache.calcite.sql.SqlOperator.inferReturnType(SqlOperator.java:469)
>       at 
> org.apache.calcite.rex.RexBuilder.deriveReturnType(RexBuilder.java:271)
>       at org.apache.calcite.tools.RelBuilder.call(RelBuilder.java:518)
>       at 
> org.apache.flink.table.expressions.ScalarFunctionCall.toRexNode(call.scala:68)
>       at 
> org.apache.flink.table.expressions.Alias.toRexNode(fieldExpression.scala:76)
>       at 
> org.apache.flink.table.plan.logical.Project$$anonfun$construct$1.apply(operators.scala:95)
>       at 
> org.apache.flink.table.plan.logical.Project$$anonfun$construct$1.apply(operators.scala:95)
>       at 
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
>       at 
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
>       at scala.collection.immutable.List.foreach(List.scala:318)
>       at 
> scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:32)
>       at scala.collection.mutable.ListBuffer.foreach(ListBuffer.scala:45)
>       at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
>       at scala.collection.AbstractTraversable.map(Traversable.scala:105)
>       at 
> org.apache.flink.table.plan.logical.Project.construct(operators.scala:95)
>       at 
> org.apache.flink.table.plan.logical.LogicalNode.toRelNode(LogicalNode.scala:77)
>       at org.apache.flink.table.api.Table.getRelNode(table.scala:72)
>       at 
> org.apache.flink.table.expressions.utils.ExpressionTestBase.addTableApiTestExpr(ExpressionTestBase.scala:215)
>       at 
> org.apache.flink.table.expressions.utils.ExpressionTestBase.testAllApis(ExpressionTestBase.scala:241)
>       at 
> org.apache.flink.table.expressions.UserDefinedScalarFunctionTest.testVariableArgs(UserDefinedScalarFunctionTest.scala:240)
> {panel}
> Testing code looks like following:
> {code:java}
> object Func18 extends ScalarFunction {
>   def eval(a: String, b: Byte, c: Short, d: Int, e: Long): String = {
>     a + "," + b + "," + c + "," + d + "," + e
>   }
> }
> class TableFunc4 extends  TableFunction[Row] {
>   def eval(data: String, tinyInt: Byte, smallInt: Short, int: Int, long: 
> Long): Unit = {
>     val row = new Row(5)
>     row.setField(0, data)
>     row.setField(1, tinyInt)
>     row.setField(2, smallInt)
>     row.setField(3, int)
>     row.setField(4, long)
>     collect(row)
>   }
>   override def getResultType: TypeInformation[Row] = {
>     new RowTypeInfo(
>       BasicTypeInfo.STRING_TYPE_INFO,
>       BasicTypeInfo.BYTE_TYPE_INFO,
>       BasicTypeInfo.SHORT_TYPE_INFO,
>       BasicTypeInfo.INT_TYPE_INFO,
>       BasicTypeInfo.LONG_TYPE_INFO
>     )
>   }
> }
> {code}



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

Reply via email to