[ 
https://issues.apache.org/jira/browse/FLINK-4469?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15671941#comment-15671941
 ] 

ASF GitHub Bot commented on FLINK-4469:
---------------------------------------

Github user fhueske commented on a diff in the pull request:

    https://github.com/apache/flink/pull/2653#discussion_r88342301
  
    --- Diff: 
flink-libraries/flink-table/src/main/scala/org/apache/flink/api/table/plan/rules/datastream/DataStreamCorrelateRule.scala
 ---
    @@ -0,0 +1,89 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements.  See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership.  The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.flink.api.table.plan.rules.datastream
    +
    +import org.apache.calcite.plan.volcano.RelSubset
    +import org.apache.calcite.plan.{Convention, RelOptRule, RelOptRuleCall, 
RelTraitSet}
    +import org.apache.calcite.rel.RelNode
    +import org.apache.calcite.rel.convert.ConverterRule
    +import org.apache.calcite.rel.logical.{LogicalFilter, LogicalCorrelate, 
LogicalTableFunctionScan}
    +import org.apache.calcite.rex.RexNode
    +import 
org.apache.flink.api.table.plan.nodes.datastream.{DataStreamCorrelate, 
DataStreamConvention}
    +
    +/**
    +  * parser cross/outer apply
    +  */
    +class DataStreamCorrelateRule
    +  extends ConverterRule(
    +    classOf[LogicalCorrelate],
    +    Convention.NONE,
    +    DataStreamConvention.INSTANCE,
    +    "DataStreamCorrelateRule")
    +{
    +
    +  override def matches(call: RelOptRuleCall): Boolean = {
    +    val join: LogicalCorrelate = call.rel(0).asInstanceOf[LogicalCorrelate]
    +    val right = join.getRight.asInstanceOf[RelSubset].getOriginal
    +
    +    right match {
    +      // right node is a table function
    +      case scan: LogicalTableFunctionScan => true
    +      // a filter is pushed above the table function
    +      case filter: LogicalFilter =>
    +        filter.getInput.asInstanceOf[RelSubset].getOriginal
    +          .isInstanceOf[LogicalTableFunctionScan]
    +      case _ => false
    +    }
    +  }
    +
    +  override def convert(rel: RelNode): RelNode = {
    +    val join: LogicalCorrelate = rel.asInstanceOf[LogicalCorrelate]
    +    val traitSet: RelTraitSet = 
rel.getTraitSet.replace(DataStreamConvention.INSTANCE)
    +    val convInput: RelNode = RelOptRule.convert(join.getInput(0), 
DataStreamConvention.INSTANCE)
    +    val right: RelNode = join.getInput(1)
    +
    +    def convertToCorrelate(relNode: RelNode, condition: RexNode): 
DataStreamCorrelate = {
    --- End diff --
    
    define `condition` as `Option[RexNode]` so we do not have to use `null`


> Add support for user defined table function in Table API & SQL
> --------------------------------------------------------------
>
>                 Key: FLINK-4469
>                 URL: https://issues.apache.org/jira/browse/FLINK-4469
>             Project: Flink
>          Issue Type: New Feature
>          Components: Table API & SQL
>            Reporter: Jark Wu
>            Assignee: Jark Wu
>
> Normal user-defined functions, such as concat(), take in a single input row 
> and output a single output row. In contrast, table-generating functions 
> transform a single input row to multiple output rows. It is very useful in 
> some cases, such as look up in HBase by rowkey and return one or more rows.
> Adding a user defined table function should:
> 1. inherit from UDTF class with specific generic type T
> 2. define one or more evel function. 
> NOTE: 
> 1. the eval method must be public and non-static.
> 2. the generic type T is the row type returned by table function. Because of 
> Java type erasure, we can’t extract T from the Iterable.
> 3. use {{collect(T)}} to emit table row
> 4. eval method can be overload. Blink will choose the best match eval method 
> to call according to parameter types and number.
> {code}
> public class Word {
>   public String word;
>   public Integer length;
> }
> public class SplitStringUDTF extends UDTF<Word> {
>     public Iterable<Word> eval(String str) {
>         if (str != null) {
>             for (String s : str.split(",")) {
>                 collect(new Word(s, s.length()));
>             }
>         }
>     }
> }
> // in SQL
> tableEnv.registerFunction("split", new SplitStringUDTF())
> tableEnv.sql("SELECT a, b, t.* FROM MyTable, LATERAL TABLE(split(c)) AS 
> t(w,l)")
> // in Java Table API
> tableEnv.registerFunction("split", new SplitStringUDTF())
> // rename split table columns to “w” and “l”
> table.crossApply("split(c) as (w, l)")        
>      .select("a, b, w, l")
> // without renaming, we will use the origin field names in the POJO/case/...
> table.crossApply("split(c)")
>      .select("a, b, word, length")
> // in Scala Table API
> val split = new SplitStringUDTF()
> table.crossApply(split('c) as ('w, 'l))
>      .select('a, 'b, 'w, 'l)
> // outerApply for outer join to a UDTF
> table.outerApply(split('c))
>      .select('a, 'b, 'word, 'length)
> {code}
> See [1] for more information about UDTF design.
> [1] 
> https://docs.google.com/document/d/15iVc1781dxYWm3loVQlESYvMAxEzbbuVFPZWBYuY1Ek/edit#



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to