wuchong commented on a change in pull request #9099: [FLINK-13237][table-planner-blink] Add expression table api test to blink URL: https://github.com/apache/flink/pull/9099#discussion_r309590465
########## File path: flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/expressions/ScalarFunctionsTest.scala ########## @@ -655,124 +749,168 @@ class ScalarFunctionsTest extends ScalarTypesTestBase { @Test def testConcatWs(): Unit = { - testSqlApi( + testAllApis( + concat_ws('f33, "AA"), + "concat_ws(f33, 'AA')", "CONCAT_WS(f33, 'AA')", "AA") - testSqlApi( + testAllApis( + concat_ws("~~~~", "AA"), + "concat_ws('~~~~','AA')", "concat_ws('~~~~','AA')", "AA") - testSqlApi( + testAllApis( + concat_ws("~", "AA", "BB"), + "concat_ws('~','AA','BB')", "concat_ws('~','AA','BB')", "AA~BB") - testSqlApi( + testAllApis( + concat_ws("~", 'f33, "AA", "BB", "", 'f33, "CC"), + "concat_ws('~',f33, 'AA','BB','',f33, 'CC')", "concat_ws('~',f33, 'AA','BB','',f33, 'CC')", "AA~BB~~CC") - testSqlApi( + testAllApis( + concat_ws("~~~~", "Flink", 'f33, "xx", 'f33, 'f33), + "concat_ws('~~~~','Flink', f33, 'xx', f33, f33)", "CONCAT_WS('~~~~','Flink', f33, 'xx', f33, f33)", "Flink~~~~xx") - - testSqlApi("concat_ws('||', f35, f36, f33)", "a||b") } + @Test def testRegexpReplace(): Unit = { - testSqlApi( + testAllApis( + "foobar".regexpReplace("oo|ar", "abc"), + "'foobar'.regexpReplace('oo|ar', 'abc')", "regexp_replace('foobar', 'oo|ar', 'abc')", "fabcbabc") - testSqlApi( + testAllApis( + "foofar".regexpReplace("^f", ""), + "'foofar'.regexpReplace('^f', '')", "regexp_replace('foofar', '^f', '')", "oofar") - testSqlApi( + testAllApis( + "foobar".regexpReplace("^f*.*r$", ""), + "'foobar'.regexpReplace('^f*.*r$', '')", "regexp_replace('foobar', '^f*.*r$', '')", "") - testSqlApi( + testAllApis( + "foo1bar2".regexpReplace("\\d", ""), + "'foo1bar2'.regexpReplace('\\d', '')", "regexp_replace('foobar', '\\d', '')", "foobar") - testSqlApi( + testAllApis( + "foobar".regexpReplace("\\w", ""), + "'foobar'.regexpReplace('\\w', '')", "regexp_replace('foobar', '\\w', '')", "") - testSqlApi( + testAllApis( + "fooobar".regexpReplace("oo", "$"), + "'fooobar'.regexpReplace('oo', '$')", "regexp_replace('fooobar', 'oo', '$')", "f$obar") - testSqlApi( + testAllApis( + "foobar".regexpReplace("oo", "\\"), + "'foobar'.regexpReplace('oo', '\\')", "regexp_replace('foobar', 'oo', '\\')", "f\\bar") - testSqlApi( + testAllApis( + 'f33.regexpReplace("oo|ar", ""), + "f33.regexpReplace('oo|ar', '')", "REGEXP_REPLACE(f33, 'oo|ar', '')", "null") - testSqlApi( + testAllApis( + "foobar".regexpReplace('f33, ""), + "'foobar'.regexpReplace(f33, '')", "REGEXP_REPLACE('foobar', f33, '')", "null") - testSqlApi( + testAllApis( + "foobar".regexpReplace("oo|ar", 'f33), + "'foobar'.regexpReplace('oo|ar', f33)", "REGEXP_REPLACE('foobar', 'oo|ar', f33)", "null") // This test was added for the null literal problem in string expression parsing (FLINK-10463). - testSqlApi( + testAllApis( + nullOf(Types.STRING).regexpReplace("oo|ar", 'f33), + "nullOf(STRING).regexpReplace('oo|ar', f33)", "REGEXP_REPLACE(CAST(NULL AS VARCHAR), 'oo|ar', f33)", "null") - - testSqlApi("regexp_replace('100-200', '(\\d+)', 'num')", "num-num") - testSqlApi("regexp_replace('100-200', '(\\d+)-(\\d+)', '400')", "400") - testSqlApi("regexp_replace('100-200', '(\\d+)', '400')", "400-400") - testSqlApi("regexp_replace('100-200', '', '400')", "100-200") - testSqlApi("regexp_replace(f40, '(\\d+)', '400')", "null") - testSqlApi("regexp_replace(CAST(null as VARCHAR), '(\\d+)', 'num')", "null") - testSqlApi("regexp_replace('100-200', CAST(null as VARCHAR), '400')", "null") - testSqlApi("regexp_replace('100-200', '(\\d+)', CAST(null as VARCHAR))", "null") } @Test def testRegexpExtract(): Unit = { - testSqlApi( + testAllApis( + "foothebar".regexpExtract("foo(.*?)(bar)", 2), + "'foothebar'.regexpExtract('foo(.*?)(bar)', 2)", "REGEXP_EXTRACT('foothebar', 'foo(.*?)(bar)', 2)", "bar") - testSqlApi( + testAllApis( + "foothebar".regexpExtract("foo(.*?)(bar)", 0), + "'foothebar'.regexpExtract('foo(.*?)(bar)', 0)", "REGEXP_EXTRACT('foothebar', 'foo(.*?)(bar)', 0)", "foothebar") - testSqlApi( + testAllApis( + "foothebar".regexpExtract("foo(.*?)(bar)", 1), + "'foothebar'.regexpExtract('foo(.*?)(bar)', 1)", "REGEXP_EXTRACT('foothebar', 'foo(.*?)(bar)', 1)", "the") - testSqlApi( + testAllApis( + "foothebar".regexpExtract("foo([\\w]+)", 1), + "'foothebar'.regexpExtract('foo([\\w]+)', 1)", "REGEXP_EXTRACT('foothebar', 'foo([\\w]+)', 1)", "thebar") - testSqlApi( + testAllApis( + "foothebar".regexpExtract("foo([\\d]+)", 1), + "'foothebar'.regexpExtract('foo([\\d]+)', 1)", "REGEXP_EXTRACT('foothebar', 'foo([\\d]+)', 1)", "null") - testSqlApi( + testAllApis( + 'f33.regexpExtract("foo(.*?)(bar)", 2), + "f33.regexpExtract('foo(.*?)(bar)', 2)", "REGEXP_EXTRACT(f33, 'foo(.*?)(bar)', 2)", "null") - testSqlApi( + testAllApis( + "foothebar".regexpExtract('f33, 2), + "'foothebar'.regexpExtract(f33, 2)", "REGEXP_EXTRACT('foothebar', f33, 2)", "null") //test for optional group index - testSqlApi( + testAllApis( + "foothebar".regexpExtract("foo(.*?)(bar)"), + "'foothebar'.regexpExtract('foo(.*?)(bar)')", "REGEXP_EXTRACT('foothebar', 'foo(.*?)(bar)')", "foothebar") + } - testSqlApi("regexp_extract('100-200', '(\\d+)-(\\d+)', 1)", "100") - testSqlApi("regexp_extract('100-200', '', 1)", "null") - testSqlApi("regexp_extract('100-200', '(\\d+)-(\\d+)', -1)", "null") - testSqlApi("regexp_extract(f40, '(\\d+)-(\\d+)', 1)", "null") - testSqlApi("regexp_extract(CAST(null as VARCHAR), '(\\d+)-(\\d+)', 1)", "null") - testSqlApi("regexp_extract('100-200', CAST(null as VARCHAR), 1)", "null") - testSqlApi("regexp_extract('100-200', '(\\d+)-(\\d+)', CAST(null as BIGINT))", "null") + @Ignore // Implicit type conversion + @Test + def testConcat2(): Unit = { Review comment: Remove this test if we don't support implicit type conversion yet? I think we need to add a bunch of thorough tests when supporting implicit type conversion instead of enable the ignored tests. What do you think? ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services