(flink) branch master updated: [FLINK-35820] Converting Duration to String fails for big values (#25077)

2024-07-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 98e07d14213 [FLINK-35820] Converting Duration to String fails for big 
values (#25077)
98e07d14213 is described below

commit 98e07d14213e2b739ac231f1863b1b6f71031851
Author: Dawid Wysakowicz 
AuthorDate: Fri Jul 12 13:23:54 2024 +0200

[FLINK-35820] Converting Duration to String fails for big values (#25077)
---
 .../main/java/org/apache/flink/util/TimeUtils.java | 81 --
 .../flink/util/TimeUtilsPrettyPrintingTest.java|  6 +-
 .../java/org/apache/flink/util/TimeUtilsTest.java  |  2 +
 3 files changed, 68 insertions(+), 21 deletions(-)

diff --git a/flink-core/src/main/java/org/apache/flink/util/TimeUtils.java 
b/flink-core/src/main/java/org/apache/flink/util/TimeUtils.java
index 80d38c78078..3b47da1a73c 100644
--- a/flink-core/src/main/java/org/apache/flink/util/TimeUtils.java
+++ b/flink-core/src/main/java/org/apache/flink/util/TimeUtils.java
@@ -20,6 +20,7 @@ package org.apache.flink.util;
 
 import org.apache.flink.api.common.time.Time;
 
+import java.math.BigInteger;
 import java.time.Duration;
 import java.time.temporal.ChronoUnit;
 import java.util.Arrays;
@@ -39,6 +40,8 @@ public class TimeUtils {
 private static final Map LABEL_TO_UNIT_MAP =
 Collections.unmodifiableMap(initMap());
 
+private static final BigInteger NANOS_PER_SECOND = 
BigInteger.valueOf(1_000_000_000L);
+
 /**
  * Parse the given string to a java {@link Duration}. The string is in 
format "{length
  * value}{time unit label}", e.g. "123ms", "321 s". If no time unit label 
is specified, it will
@@ -79,30 +82,45 @@ public class TimeUtils {
 throw new NumberFormatException("text does not start with a 
number");
 }
 
-final long value;
+final BigInteger value;
 try {
-value = Long.parseLong(number); // this throws a 
NumberFormatException on overflow
+value = new BigInteger(number); // this throws a 
NumberFormatException
 } catch (NumberFormatException e) {
 throw new IllegalArgumentException(
-"The value '"
-+ number
-+ "' cannot be re represented as 64bit number 
(numeric overflow).");
+"The value '" + number + "' cannot be represented as an 
integer number.", e);
 }
 
+final ChronoUnit unit;
 if (unitLabel.isEmpty()) {
-return Duration.of(value, ChronoUnit.MILLIS);
-}
-
-ChronoUnit unit = LABEL_TO_UNIT_MAP.get(unitLabel);
-if (unit != null) {
-return Duration.of(value, unit);
+unit = ChronoUnit.MILLIS;
 } else {
+unit = LABEL_TO_UNIT_MAP.get(unitLabel);
+}
+if (unit == null) {
 throw new IllegalArgumentException(
 "Time interval unit label '"
 + unitLabel
 + "' does not match any of the recognized units: "
 + TimeUnit.getAllUnits());
 }
+
+try {
+return convertBigIntToDuration(value, unit);
+} catch (ArithmeticException e) {
+throw new IllegalArgumentException(
+"The value '"
++ number
++ "' cannot be represented as java.time.Duration 
(numeric overflow).",
+e);
+}
+}
+
+private static Duration convertBigIntToDuration(BigInteger value, 
ChronoUnit unit) {
+final BigInteger nanos = 
value.multiply(BigInteger.valueOf(unit.getDuration().toNanos()));
+
+final BigInteger[] dividedAndRemainder = 
nanos.divideAndRemainder(NANOS_PER_SECOND);
+return Duration.ofSeconds(dividedAndRemainder[0].longValueExact())
+.plusNanos(dividedAndRemainder[1].longValueExact());
 }
 
 private static Map initMap() {
@@ -136,17 +154,35 @@ public class TimeUtils {
  * NOTE: It supports only durations that fit into long.
  */
 public static String formatWithHighestUnit(Duration duration) {
-long nanos = duration.toNanos();
+BigInteger nanos = toNanos(duration);
 
 TimeUnit highestIntegerUnit = getHighestIntegerUnit(nanos);
 return String.format(
-"%d %s",
-nanos / highestIntegerUnit.unit.getDuration().toNanos(),
+"%s %s",
+nanos.divide(highestIntegerUnit.getUnitAsNanos()),
 highestIntegerUnit.getLabels().get(0));
 }
 
-private static TimeUnit getHighestIntegerUnit(lo

(flink-benchmarks) branch master updated: [hotfix] Update shaded 19.0

2024-07-09 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink-benchmarks.git


The following commit(s) were added to refs/heads/master by this push:
 new d3bfc1c  [hotfix] Update shaded 19.0
d3bfc1c is described below

commit d3bfc1c390dd3e94d8cf30309470fd40da619db7
Author: Dawid Wysakowicz 
AuthorDate: Tue Jul 9 10:23:13 2024 +0200

[hotfix] Update shaded 19.0
---
 pom.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/pom.xml b/pom.xml
index 73daab4..214cfc0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -46,8 +46,8 @@ under the License.


UTF-8
2.0-SNAPSHOT
-   16.1
-   2.0.54.Final
+   19.0
+   2.0.62.Final
1.8
2.12
${java.version}



(flink-shaded) branch master updated: [hoftix] Update 20.0 version (#139)

2024-07-09 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink-shaded.git


The following commit(s) were added to refs/heads/master by this push:
 new ccb1f09  [hoftix] Update 20.0 version (#139)
ccb1f09 is described below

commit ccb1f09c7f2df343cb8d99e83dcb2e9a0ea1181e
Author: Dawid Wysakowicz 
AuthorDate: Tue Jul 9 10:25:12 2024 +0200

[hoftix] Update 20.0 version (#139)
---
 flink-shaded-asm-9/pom.xml| 4 ++--
 flink-shaded-force-shading/pom.xml| 2 +-
 flink-shaded-guava-32/pom.xml | 4 ++--
 flink-shaded-jackson-parent/flink-shaded-jackson-2/pom.xml| 2 +-
 .../flink-shaded-jackson-module-jsonSchema-2/pom.xml  | 2 +-
 flink-shaded-jackson-parent/flink-shaded-jsonpath/pom.xml | 4 ++--
 flink-shaded-jackson-parent/pom.xml   | 4 ++--
 flink-shaded-netty-4/pom.xml  | 4 ++--
 flink-shaded-netty-tcnative-dynamic/pom.xml   | 4 ++--
 flink-shaded-netty-tcnative-static/pom.xml| 4 ++--
 flink-shaded-swagger/pom.xml  | 2 +-
 flink-shaded-zookeeper-parent/flink-shaded-zookeeper-35/pom.xml   | 4 ++--
 flink-shaded-zookeeper-parent/flink-shaded-zookeeper-36/pom.xml   | 4 ++--
 flink-shaded-zookeeper-parent/flink-shaded-zookeeper-37/pom.xml   | 4 ++--
 flink-shaded-zookeeper-parent/flink-shaded-zookeeper-38/pom.xml   | 4 ++--
 flink-shaded-zookeeper-parent/pom.xml | 2 +-
 pom.xml   | 2 +-
 17 files changed, 28 insertions(+), 28 deletions(-)

diff --git a/flink-shaded-asm-9/pom.xml b/flink-shaded-asm-9/pom.xml
index 1722471..7e9eb68 100644
--- a/flink-shaded-asm-9/pom.xml
+++ b/flink-shaded-asm-9/pom.xml
@@ -25,11 +25,11 @@ under the License.
 
 org.apache.flink
 flink-shaded
-19.0
+20.0
 
 
 flink-shaded-asm-${asm.major.version}
-${asm.version}-19.0
+${asm.version}-20.0
 
 jar
 
diff --git a/flink-shaded-force-shading/pom.xml 
b/flink-shaded-force-shading/pom.xml
index 0fcfb5b..2029d15 100644
--- a/flink-shaded-force-shading/pom.xml
+++ b/flink-shaded-force-shading/pom.xml
@@ -25,7 +25,7 @@ under the License.
 
 flink-shaded
 org.apache.flink
-19.0
+20.0
 
 
 flink-shaded-force-shading
diff --git a/flink-shaded-guava-32/pom.xml b/flink-shaded-guava-32/pom.xml
index 87f..5ece3a2 100644
--- a/flink-shaded-guava-32/pom.xml
+++ b/flink-shaded-guava-32/pom.xml
@@ -25,12 +25,12 @@ under the License.
 
 org.apache.flink
 flink-shaded
-19.0
+20.0
 
 
 flink-shaded-guava${flink.ci.license.suffix}
 flink-shaded-guava-32
-${guava.version}-19.0
+${guava.version}-20.0
 
 jar
 
diff --git a/flink-shaded-jackson-parent/flink-shaded-jackson-2/pom.xml 
b/flink-shaded-jackson-parent/flink-shaded-jackson-2/pom.xml
index aba2c59..258ef7e 100644
--- a/flink-shaded-jackson-parent/flink-shaded-jackson-2/pom.xml
+++ b/flink-shaded-jackson-parent/flink-shaded-jackson-2/pom.xml
@@ -25,7 +25,7 @@ under the License.
 
 org.apache.flink
 flink-shaded-jackson-parent
-2.15.3-19.0
+2.15.3-20.0
 
 
 flink-shaded-jackson${flink.ci.license.suffix}
diff --git 
a/flink-shaded-jackson-parent/flink-shaded-jackson-module-jsonSchema-2/pom.xml 
b/flink-shaded-jackson-parent/flink-shaded-jackson-module-jsonSchema-2/pom.xml
index 3e0ce58..e5c71f5 100644
--- 
a/flink-shaded-jackson-parent/flink-shaded-jackson-module-jsonSchema-2/pom.xml
+++ 
b/flink-shaded-jackson-parent/flink-shaded-jackson-module-jsonSchema-2/pom.xml
@@ -25,7 +25,7 @@ under the License.
 
 org.apache.flink
 flink-shaded-jackson-parent
-2.15.3-19.0
+2.15.3-20.0
 
 
 
flink-shaded-jackson-module-jsonSchema${flink.ci.license.suffix}
diff --git a/flink-shaded-jackson-parent/flink-shaded-jsonpath/pom.xml 
b/flink-shaded-jackson-parent/flink-shaded-jsonpath/pom.xml
index 44d6636..149d198 100644
--- a/flink-shaded-jackson-parent/flink-shaded-jsonpath/pom.xml
+++ b/flink-shaded-jackson-parent/flink-shaded-jsonpath/pom.xml
@@ -25,13 +25,13 @@ under the License.
 
 org.apache.flink
 flink-shaded-jackson-parent
-2.15.3-19.0
+2.15.3-20.0
 
 
 flink-shaded-jsonpath
 flink-shaded-jsonpath
 
-2.7.0-19.0
+2.7.0-20.0
 
 
 
diff --git a/flink-shaded-jackson-parent/pom.xml 
b/flink-shaded-jackson-parent/pom.xml
index 8c044d3..106a06f 100644
--- a/flink-shaded-jackson-parent/pom.xml
+++ b/flink-shaded-jackson-parent/pom.xml
@@ -25,13 +25,13 @@ under

(flink) branch master updated: [FLINK-35696] JSON_VALUE/QUERY functions incorrectly map floating numbers (#25026)

2024-07-05 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 3606302fd3e [FLINK-35696] JSON_VALUE/QUERY functions incorrectly map 
floating numbers (#25026)
3606302fd3e is described below

commit 3606302fd3e73f0da5701983fd8b21e3dd610bef
Author: Dawid Wysakowicz 
AuthorDate: Fri Jul 5 15:10:46 2024 +0200

[FLINK-35696] JSON_VALUE/QUERY functions incorrectly map floating numbers 
(#25026)
---
 .../planner/codegen/calls/JsonValueCallGen.scala   |  2 +-
 .../planner/functions/JsonFunctionsITCase.java | 23 --
 .../src/test/resources/json/json-value.json|  1 +
 .../table/runtime/functions/SqlJsonUtils.java  | 10 +++---
 4 files changed, 30 insertions(+), 6 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/JsonValueCallGen.scala
 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/JsonValueCallGen.scala
index 02bbcb252ba..c6783d8ccc2 100644
--- 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/JsonValueCallGen.scala
+++ 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/JsonValueCallGen.scala
@@ -64,7 +64,7 @@ class JsonValueCallGen extends CallGenerator {
   
s"$BINARY_STRING.fromString(java.lang.String.valueOf($rawResultTerm))"
 case LogicalTypeRoot.BOOLEAN => s"(java.lang.Boolean) 
$rawResultTerm"
 case LogicalTypeRoot.INTEGER => s"(java.lang.Integer) 
$rawResultTerm"
-case LogicalTypeRoot.DOUBLE => s"(java.lang.Double) $rawResultTerm"
+case LogicalTypeRoot.DOUBLE => s"((java.math.BigDecimal) 
$rawResultTerm).doubleValue()"
 case _ =>
   throw new CodeGenException(
 s"Unsupported type '$returnType' "
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/JsonFunctionsITCase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/JsonFunctionsITCase.java
index b8780a34059..9fe8a48d077 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/JsonFunctionsITCase.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/JsonFunctionsITCase.java
@@ -186,6 +186,20 @@ class JsonFunctionsITCase extends BuiltInFunctionTestBase {
 STRING(),
 STRING())
 
+// floating numbers
+.testResult(
+$("f0").jsonValue("$.longBalance"),
+"JSON_VALUE(f0, '$.longBalance')",
+"123456789.987654321",
+STRING(),
+STRING())
+.testResult(
+$("f0").jsonValue("$.balance"),
+"JSON_VALUE(f0, '$.balance')",
+"13.37",
+STRING(),
+STRING())
+
 // RETURNING + Supported Data Types
 .testResult(
 $("f0").jsonValue("$.type"),
@@ -208,6 +222,11 @@ class JsonFunctionsITCase extends BuiltInFunctionTestBase {
 "JSON_VALUE(f0, '$.balance' RETURNING DOUBLE)",
 13.37,
 DOUBLE())
+.testResult(
+$("f0").jsonValue("$.longBalance", DOUBLE()),
+"JSON_VALUE(f0, '$.longBalance' RETURNING DOUBLE)",
+123456789.987654321,
+DOUBLE())
 
 // ON EMPTY / ON ERROR
 .testResult(
@@ -459,9 +478,9 @@ class JsonFunctionsITCase extends BuiltInFunctionTestBase {
 $("f0").jsonQuery("$.items", ARRAY(STRING())),
 "JSON_QUERY(f0, '$.items' RETURNING 
ARRAY)",
 new String[] {
-"{\"itemId\":1234,\"count\":10}",
+"{\"count\":10,\"itemId\":1234}",
 null,
-"{\"itemId\":4567,\"count\":11}"
+"{\"count\":11,\"itemId\":4567}"
 },
 

(flink) 02/02: [FLINK-35755] Upgrade to flink-shaded 19.0

2024-07-05 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 59769b7eaa5a8c040e3939ef273fa13ab8b6c8b3
Author: Dawid Wysakowicz 
AuthorDate: Thu Jul 4 10:54:52 2024 +0200

[FLINK-35755] Upgrade to flink-shaded 19.0

This closes #25022
---
 flink-table/flink-table-planner/pom.xml  |  9 +
 flink-table/flink-table-runtime/pom.xml  | 20 
 .../flink/table/runtime/functions/JsonPathCache.java |  5 ++---
 .../flink/table/runtime/functions/SqlJsonUtils.java  | 19 +--
 flink-table/pom.xml  |  1 -
 pom.xml  |  3 ++-
 tools/maven/checkstyle.xml   |  4 
 7 files changed, 26 insertions(+), 35 deletions(-)

diff --git a/flink-table/flink-table-planner/pom.xml 
b/flink-table/flink-table-planner/pom.xml
index 08b506bd538..d578c2618bc 100644
--- a/flink-table/flink-table-planner/pom.xml
+++ b/flink-table/flink-table-planner/pom.xml
@@ -354,16 +354,17 @@ under the License.

org.apache.flink.calcite.shaded.org.checkerframework

 
-   
+   


com.fasterxml


org.apache.flink.shaded.jackson2.com.fasterxml


-   
com.jayway
-   
-   
org.apache.flink.table.shaded.com.jayway
+   
com.jayway.jsonpath
+   
+   
org.apache.flink.shaded.com.jayway.jsonpath

 

diff --git a/flink-table/flink-table-runtime/pom.xml 
b/flink-table/flink-table-runtime/pom.xml
index 0791b9fe788..8c859137659 100644
--- a/flink-table/flink-table-runtime/pom.xml
+++ b/flink-table/flink-table-runtime/pom.xml
@@ -91,11 +91,10 @@ under the License.

 

-

-   com.jayway.jsonpath
-   json-path
-   ${jsonpath.version}
+   org.apache.flink
+   flink-shaded-jsonpath
+   
${flink.shaded.jsonpath.version}-${flink.shaded.version}
${flink.markBundledAsOptional}

 
@@ -168,22 +167,11 @@ under the License.

false


-   
com.jayway.jsonpath:json-path
+   
org.apache.flink:flink-shaded-jsonpath

org.codehaus.janino:*

org.apache.flink:flink-table-code-splitter


-   
-   
-   
-   
com.jayway
-   
org.apache.flink.table.shaded.com.jayway
-   
-   
-   
com.fasterxml
-   
org.apache.flink.shaded.jackson2.com.fasterxml
-   
-   



diff --git 
a/flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/JsonPathCache.java
 
b/flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime

(flink) branch master updated (6b08eb15d09 -> 59769b7eaa5)

2024-07-05 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 6b08eb15d09 [FLINK-35731][runtime] Fix incorrect parallelism 
configured detection for Sink V2.
 new 7e11de65715 [FLINK-33705] Upgrade to flink-shaded 18.0
 new 59769b7eaa5 [FLINK-35755] Upgrade to flink-shaded 19.0

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../f7a4e6fa-e7de-48c9-a61e-c13e83f0c72e |  2 +-
 .../client/program/StreamContextEnvironment.java |  4 ++--
 .../client/program/rest/UrlPrefixDecorator.java  |  2 +-
 .../application/JarManifestParserTest.java   |  4 ++--
 .../client/program/PerJobMiniClusterFactoryTest.java |  2 +-
 .../flink/client/testjar/ForbidConfigurationJob.java |  2 +-
 .../file/table/FileSystemOutputFormatTest.java   |  2 +-
 .../delegation/hive/HiveParserCalcitePlanner.java|  2 +-
 .../connectors/hive/FlinkEmbeddedHiveRunner.java |  2 +-
 .../connectors/hive/FlinkStandaloneHiveRunner.java   |  4 ++--
 .../flink/connectors/hive/HiveTableSinkITCase.java   |  2 +-
 .../flink/table/catalog/hive/HiveCatalogITCase.java  |  2 +-
 .../apache/flink/api/common/io/OutputFormatBase.java |  2 +-
 .../ratelimiting/GuavaFlinkConnectorRateLimiter.java |  2 +-
 .../source/util/ratelimit/GuavaRateLimiter.java  |  2 +-
 .../typeutils/runtime/JavaRecordBuilderFactory.java  |  2 +-
 .../apache/flink/configuration/CleanupOptions.java   |  2 +-
 .../org/apache/flink/configuration/CoreOptions.java  |  2 +-
 .../flink/configuration/description/TextElement.java |  2 +-
 .../core/classloading/ComponentClassLoader.java  |  2 +-
 .../apache/flink/core/fs/AutoCloseableRegistry.java  |  2 +-
 .../org/apache/flink/core/fs/CloseableRegistry.java  |  2 +-
 .../java/org/apache/flink/core/fs/FileSystem.java|  8 
 .../flink/core/plugin/DefaultPluginManager.java  |  4 ++--
 .../main/java/org/apache/flink/util/FileUtils.java   |  4 ++--
 .../main/java/org/apache/flink/util/NetUtils.java|  2 +-
 .../apache/flink/configuration/ConfigOptionTest.java |  2 +-
 .../core/io/SimpleVersionedSerializationTest.java|  2 +-
 .../apache/flink/core/plugin/PluginConfigTest.java   |  2 +-
 .../org/apache/flink/dist/BashJavaUtilsITCase.java   |  2 +-
 .../changelog/fs/AbstractStateChangeFsUploader.java  |  2 +-
 .../changelog/fs/FsStateChangelogWriterTest.java |  2 +-
 .../tests/queryablestate/QsStateProducer.java|  2 +-
 .../org/apache/flink/fs/gs/utils/ChecksumUtils.java  |  4 ++--
 .../flink/fs/gs/writer/GSChecksumWriteChannel.java   |  2 +-
 .../flink/fs/gs/utils/ConfigUtilsHadoopTest.java |  4 ++--
 .../fs/s3/common/HAJobRunOnMinioS3StoreITCase.java   |  4 ++--
 .../formats/csv/CsvRowDataSerializationSchema.java   | 12 +---
 .../flink/formats/csv/RowDataToCsvConverters.java| 15 +++
 .../flink/formats/csv/CsvFormatFactoryTest.java  |  2 +-
 .../formats/json/JsonRowDataSerializationSchema.java |  6 +-
 .../flink/formats/json/RowDataToJsonConverters.java  |  7 ++-
 .../parquet/vector/ParquetSplitReaderUtil.java   |  2 +-
 .../parquet/vector/type/ParquetGroupField.java   |  2 +-
 .../vector/ParquetColumnarRowSplitReaderTest.java|  2 +-
 .../formats/protobuf/util/PbCodegenAppender.java |  2 +-
 .../decorators/FlinkConfMountDecorator.java  |  2 +-
 .../decorators/KerberosMountDecorator.java   |  2 +-
 .../decorators/PodTemplateMountDecorator.java|  2 +-
 .../apache/flink/kubernetes/KubernetesTestUtils.java |  2 +-
 .../resources/KubernetesSharedInformerITCase.java|  2 +-
 .../flink/cep/nfa/sharedbuffer/SharedBuffer.java | 10 +-
 .../apache/flink/cep/nfa/AfterMatchSkipITCase.java   |  2 +-
 .../java/org/apache/flink/cep/nfa/GreedyITCase.java  |  2 +-
 .../java/org/apache/flink/cep/nfa/GroupITCase.java   |  2 +-
 .../flink/cep/nfa/IterativeConditionsITCase.java |  2 +-
 .../java/org/apache/flink/cep/nfa/NFAITCase.java |  2 +-
 .../org/apache/flink/cep/nfa/NotPatternITCase.java   |  2 +-
 .../org/apache/flink/cep/nfa/SameElementITCase.java  |  4 ++--
 .../org/apache/flink/cep/nfa/TimesOrMoreITCase.java  |  2 +-
 .../org/apache/flink/cep/nfa/TimesRangeITCase.java   |  2 +-
 .../apache/flink/cep/nfa/UntilConditionITCase.java   |  2 +-
 .../flink/cep/nfa/compiler/NFACompilerTest.java  |  2 +-
 .../apache/flink/cep/operator/CEPOperatorTest.java   |  2 +-
 .../flink/state/api/runtime/OperatorIDGenerator.java |  2 +-
 .../apache/flink/optimizer/dag/SingleInputNode.java  |  2 +-
 .../org/apache/flink/optimizer/dag/TwoInputNode.java |  2 +-
 .../apache/flink/client/python/PythonEnvUtils.java   |  2 +-
 .../flink/client/python/Python

(flink-web) branch asf-site updated (abf0b0fe8 -> dd795eecb)

2024-07-03 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch asf-site
in repository https://gitbox.apache.org/repos/asf/flink-web.git


from abf0b0fe8 Rebuild website
 new 96fd15c58 Release flink-shaded 19.0
 new dd795eecb Rebuild website

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../08/26/apache-flink-0.6-available/index.html|  2 +-
 .../09/26/apache-flink-0.6.1-available/index.html  |  2 +-
 content/2014/10/03/upcoming-events/index.html  |  2 +-
 .../11/04/apache-flink-0.7.0-available/index.html  |  2 +-
 .../11/18/hadoop-compatibility-in-flink/index.html |  2 +-
 .../index.html |  2 +-
 .../01/21/apache-flink-0.8.0-available/index.html  |  2 +-
 .../january-2015-in-the-flink-community/index.html |  2 +-
 .../02/09/introducing-flink-streaming/index.html   |  2 +-
 .../index.html |  2 +-
 .../index.html |  2 +-
 .../march-2015-in-the-flink-community/index.html   |  2 +-
 .../index.html |  2 +-
 .../05/11/juggling-with-bits-and-bytes/index.html  |  2 +-
 .../april-2015-in-the-flink-community/index.html   |  2 +-
 .../06/24/announcing-apache-flink-0.9.0/index.html |  2 +-
 .../index.html |  2 +-
 .../09/01/apache-flink-0.9.1-available/index.html  |  2 +-
 .../09/03/announcing-flink-forward-2015/index.html |  2 +-
 .../index.html |  2 +-
 .../16/announcing-apache-flink-0.10.0/index.html   |  2 +-
 .../2015/11/27/flink-0.10.1-released/index.html|  2 +-
 .../index.html |  2 +-
 .../index.html |  2 +-
 .../index.html |  2 +-
 .../2016/02/11/flink-0.10.2-released/index.html|  2 +-
 .../03/08/announcing-apache-flink-1.0.0/index.html |  2 +-
 content/2016/04/06/flink-1.0.1-released/index.html |  2 +-
 .../index.html |  2 +-
 .../index.html |  2 +-
 content/2016/04/22/flink-1.0.2-released/index.html |  2 +-
 content/2016/05/11/flink-1.0.3-released/index.html |  2 +-
 .../index.html |  2 +-
 .../08/04/announcing-apache-flink-1.1.0/index.html |  2 +-
 content/2016/08/04/flink-1.1.1-released/index.html |  2 +-
 .../index.html |  2 +-
 .../09/05/apache-flink-1.1.2-released/index.html   |  2 +-
 .../10/12/apache-flink-1.1.3-released/index.html   |  2 +-
 .../apache-flink-in-2016-year-in-review/index.html |  2 +-
 .../12/21/apache-flink-1.1.4-released/index.html   |  2 +-
 .../02/06/announcing-apache-flink-1.2.0/index.html |  2 +-
 .../03/23/apache-flink-1.1.5-released/index.html   |  2 +-
 .../index.html |  2 +-
 .../index.html |  2 +-
 .../04/26/apache-flink-1.2.1-released/index.html   |  2 +-
 .../index.html |  2 +-
 .../index.html |  2 +-
 .../06/23/apache-flink-1.3.1-released/index.html   |  2 +-
 .../index.html |  2 +-
 .../08/05/apache-flink-1.3.2-released/index.html   |  2 +-
 .../index.html |  2 +-
 .../index.html |  2 +-
 .../apache-flink-in-2017-year-in-review/index.html |  2 +-
 .../index.html |  2 +-
 .../02/15/apache-flink-1.4.1-released/index.html   |  2 +-
 .../index.html |  2 +-
 .../03/08/apache-flink-1.4.2-released/index.html   |  2 +-
 .../03/15/apache-flink-1.3.3-released/index.html   |  2 +-
 .../index.html |  2 +-
 .../07/12/apache-flink-1.5.1-released/index.html   |  2 +-
 .../07/31/apache-flink-1.5.2-released/index.html   |  2 +-
 .../index.html |  2 +-
 .../08/21/apache-flink-1.5.3-released/index.html   |  2 +-
 .../09/20/apache-flink-1.5.4-released/index.html   |  2 +-
 .../09/20/apache-flink-1.6.1-released/index.html   |  2 +-
 .../10/29/apache-flink-1.5.5-released/index.html   |  2 +-
 .../10/29/apache-flink-1.6.2-released/index.html   |  2 +-
 .../index.html |  2 +-
 .../12/21/apache-flink-1.7.1-released/index.html   |  2 +-
 .../12/22/apache-flink-1.6.3-released/index.html   |  2 +-
 .../12/26/apache-flink-1.5.6-released/index.html   |  2 +-
 .../index.html |  2 +-
 .../02/15/apache-flink-1.7.2-released/index.html   |  2 +-
 .../index.html

(flink-web) 01/02: Release flink-shaded 19.0

2024-07-03 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/flink-web.git

commit 96fd15c58eec53768f70b27ea39a0688afb5fcad
Author: Dawid Wysakowicz 
AuthorDate: Fri Jun 28 12:40:25 2024 +0200

Release flink-shaded 19.0
---
 docs/data/additional_components.yml | 12 ++--
 docs/data/release_archive.yml   |  3 +++
 2 files changed, 9 insertions(+), 6 deletions(-)

diff --git a/docs/data/additional_components.yml 
b/docs/data/additional_components.yml
index b19adeecf..8e311edaa 100644
--- a/docs/data/additional_components.yml
+++ b/docs/data/additional_components.yml
@@ -21,6 +21,12 @@ flink-connector-parent:
   source_release_asc_url: 
"https://downloads.apache.org/flink/flink-connector-parent-1.1.0/flink-connector-parent-1.1.0-src.tgz.asc;
   source_release_sha512_url: 
"https://downloads.apache.org/flink/flink-connector-parent-1.1.0/flink-connector-parent-1.1.0-src.tgz.sha512;
 
+flink-shaded-19.0:
+  name: "Apache Flink-shaded 19.0 Source Release"
+  source_release_url: 
"https://www.apache.org/dyn/closer.lua/flink/flink-shaded-19.0/flink-shaded-19.0-src.tgz;
+  source_release_asc_url: 
"https://downloads.apache.org/flink/flink-shaded-19.0/flink-shaded-19.0-src.tgz.asc;
+  source_release_sha512_url: 
"https://downloads.apache.org/flink/flink-shaded-19.0/flink-shaded-19.0-src.tgz.sha512;
+
 flink-shaded-18.0:
   name: "Apache Flink-shaded 18.0 Source Release"
   source_release_url: 
"https://www.apache.org/dyn/closer.lua/flink/flink-shaded-18.0/flink-shaded-18.0-src.tgz;
@@ -33,12 +39,6 @@ flink-shaded-17.0:
   source_release_asc_url: 
"https://downloads.apache.org/flink/flink-shaded-17.0/flink-shaded-17.0-src.tgz.asc;
   source_release_sha512_url: 
"https://downloads.apache.org/flink/flink-shaded-17.0/flink-shaded-17.0-src.tgz.sha512;
 
-flink-shaded-16.2:
-  name: "Apache Flink-shaded 16.2 Source Release"
-  source_release_url: 
"https://www.apache.org/dyn/closer.lua/flink/flink-shaded-16.2/flink-shaded-16.2-src.tgz;
-  source_release_asc_url: 
"https://downloads.apache.org/flink/flink-shaded-16.2/flink-shaded-16.2-src.tgz.asc;
-  source_release_sha512_url: 
"https://downloads.apache.org/flink/flink-shaded-16.2/flink-shaded-16.2-src.tgz.sha512;
-
 pre-bundled-hadoop-2.4:
   name: "Pre-bundled Hadoop 2.4.1"
   source_release_url: 
"https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.4.1-10.0/flink-shaded-hadoop-2-uber-2.4.1-10.0.jar;
diff --git a/docs/data/release_archive.yml b/docs/data/release_archive.yml
index 7d8bd2688..49557b903 100644
--- a/docs/data/release_archive.yml
+++ b/docs/data/release_archive.yml
@@ -630,6 +630,9 @@ release_archive:
   filename: "opensearch"
 
   flink_shaded:
+-
+  version: "19.0"
+  release_date: 2024-07-03
 -
   version: "18.0"
   release_date: 2024-01-11



(flink-shaded) annotated tag release-19.0 updated (26473c0 -> e686c1a)

2024-07-03 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to annotated tag release-19.0
in repository https://gitbox.apache.org/repos/asf/flink-shaded.git


*** WARNING: tag release-19.0 was modified! ***

from 26473c0  (commit)
  to e686c1a  (tag)
 tagging 84e93b4e6ea8c3b0b9913817de82ab106a963318 (tag)
  length 998 bytes
  by Dawid Wysakowicz
  on Wed Jul 3 14:28:54 2024 +0200

- Log -
release-19.0
-BEGIN PGP SIGNATURE-

iQIzBAABCAAdFiEE6pOkNbTiybTJ9TP2MdLdEL/BWi0FAmaFRAYACgkQMdLdEL/B
Wi3EMw//UQq5PR6jVJgytEzH7b131ugESEepyLtv7SY1bSA+QNqDqN7stiMLQRQ3
teypu8e7RGWCsg2oDJih9/zUau7CLVaLu32ttQNgOwt97MwaAhRNb91PDdZtJJI8
6CUL8wz8XAf8HZW+Zl22g4AWzykFl00sMN38lO5q2MYHsAwvGlMQuyFGOsXCzVxE
Iydhi36E2k8JibYa0k8YaODJ5zBKInkWbqGYqYAqFOW3stGc9rAomTzOEA64Ux8U
PaWTryQyEel6GTy10ru5H3oNqWrV1PKGLZguqDSCOysATxdwhfhDBauwZUAjibiJ
we8GnUXfcUAYZ8/w2Mqnd9Tx6NgqwcSkmINI6yC9ZzbuWBSL3KY3EfcKDJJoxhK2
PYRA1Uzg2K83CsuydIvX0mkOpngIlU8sssz9DEZYIpdGyOLcSNh38+2nyR+urSTC
z1IOAuqlRk5yMX2MJVk9QbG95VRYwwiGcpkKd2MwWdFSJH8LrXZ/2ho25Ch593df
R7cS2K5cvgmWHX+6zvm6TypQvvUz1WrGqw+JmCOYEWVpiU6j7rk6Pgm9E0jl/jKp
GxYILjbXnHH19ruNbpUAMIWE4AptI4TBLpuI8D1IVBniNJuuHo/4ydABQ9Fnwniw
zJfzAZKDOrXaFzL95FdKbnKgY0x3dK0axgQEDuBmakFd1x0qK0c=
=AWi3
-END PGP SIGNATURE-
---


No new revisions were added by this update.

Summary of changes:



svn commit: r70114 - /dev/flink/flink-shaded-19.0-rc1/ /release/flink/flink-shaded-19.0/

2024-07-03 Thread dwysakowicz
Author: dwysakowicz
Date: Wed Jul  3 12:26:56 2024
New Revision: 70114

Log:
Release Flink-shaded 19.0

Added:
release/flink/flink-shaded-19.0/
  - copied from r70113, dev/flink/flink-shaded-19.0-rc1/
Removed:
dev/flink/flink-shaded-19.0-rc1/



(flink-shaded) annotated tag release-19.0-rc1 updated (26473c0 -> 84e93b4)

2024-06-28 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to annotated tag release-19.0-rc1
in repository https://gitbox.apache.org/repos/asf/flink-shaded.git


*** WARNING: tag release-19.0-rc1 was modified! ***

from 26473c0  (commit)
  to 84e93b4  (tag)
 tagging 26473c0bd8a326f6625cdc02aa623fa62c22def7 (commit)
 replaces release-17.0
  by Dawid Wysakowicz
  on Fri Jun 28 12:05:31 2024 +0200

- Log -
release-19.0-rc1
-BEGIN PGP SIGNATURE-

iQIzBAABCAAdFiEE6pOkNbTiybTJ9TP2MdLdEL/BWi0FAmZ+iusACgkQMdLdEL/B
Wi000RAAjJ+nOua2MT/8bhRrfuWKqKvu9VvUSB1tsr4VKu3QEeUc8iN7u/wlDYw5
ddo8KNsqDiY93b2WIH7o2+DizUvPQvUTU4+cdiTPkttbVxZ9Mz7YcLqyHHT32Sbh
GlGpLI7HWfZQPJBYM0MZ+ck2hwHYcK8p1QZEcfYUD6h/Ck5M2tVkxmwCOVlsZ5RF
WRgIFkzxsWMGfsM+ovlZhf13gtaHYfGPDhkhI66zCMbWi2kZKgKiLDkHlOXYWNGt
SlARRkpnn4iEtUw50h07WfpcSCD2dqa32bFkcusuNpKCBqSWUvWCrdR8DHWpdATT
uFwYlp9X6OxRi5el64BVH2yat6pFftveLawCuZZ3ECBDHhqh+oG6QKgv9qMEjAv1
QH09vuNIlY201UAUGvkCHhWn/LFxzP/AGg9DOKXCQHiuikg33SolMnAp4jXEcERt
bRXXTDRPifLJkyMVBxeDhaARpEM9hwuwqJpLvTFO+VRPTk4sMXv0djjXY8cYErRc
LW79Rx4yBLWQ2wQ7hqdfBzxPYNPri0TKFe4FVrJkj0lHsvj+Zm2uOYWwCo/pu4MR
PAvR/s1Gwri8Y782dsep+CzYqEJh7CM/aju86m0lFPMXk+meNwxh/rN0K1UcG0dN
/bw+GizQ9thbJdIySM8Y3JMDtQL/9lA2VjmCXYXwK/WEjbufuJk=
=pyQW
-END PGP SIGNATURE-
---


No new revisions were added by this update.

Summary of changes:



svn commit: r70038 - in /dev/flink/flink-shaded-19.0-rc1: ./ flink-shaded-19.0-src.tgz flink-shaded-19.0-src.tgz.asc flink-shaded-19.0-src.tgz.sha512

2024-06-28 Thread dwysakowicz
Author: dwysakowicz
Date: Fri Jun 28 10:29:31 2024
New Revision: 70038

Log:
Add flink-shaded 19.0-rc1

Added:
dev/flink/flink-shaded-19.0-rc1/
dev/flink/flink-shaded-19.0-rc1/flink-shaded-19.0-src.tgz   (with props)
dev/flink/flink-shaded-19.0-rc1/flink-shaded-19.0-src.tgz.asc
dev/flink/flink-shaded-19.0-rc1/flink-shaded-19.0-src.tgz.sha512

Added: dev/flink/flink-shaded-19.0-rc1/flink-shaded-19.0-src.tgz
==
Binary file - no diff available.

Propchange: dev/flink/flink-shaded-19.0-rc1/flink-shaded-19.0-src.tgz
--
svn:mime-type = application/octet-stream

Added: dev/flink/flink-shaded-19.0-rc1/flink-shaded-19.0-src.tgz.asc
==
--- dev/flink/flink-shaded-19.0-rc1/flink-shaded-19.0-src.tgz.asc (added)
+++ dev/flink/flink-shaded-19.0-rc1/flink-shaded-19.0-src.tgz.asc Fri Jun 28 
10:29:31 2024
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCAAdFiEE6pOkNbTiybTJ9TP2MdLdEL/BWi0FAmZ+jSMACgkQMdLdEL/B
+Wi36YhAArysZh4jz061K477HjfWW1dSVu5+1rruyDdo1KnTKisVIXoJQqBwzKjHB
+avDhaP36OpOEZFAHqQzuylWjPyujzj8xrP6rjODEY/9KGw23H39hK3f4PYRyxZ9e
+1TaVyZeh44Dco/u0aps5oYDkoQtoB0W7oaUpk6oQJlcGg/3aNcNL1xHpNrDnXjKe
+DhARMVDkZJRzXfYuSuJ/TYzilGU1dTxh72svNEiU6Ee75HaegXORSvbqyTFotSKp
+0C2GuJMjpbQAbeFB8d7Y8Nqw3CEbbjRhulsfpVvAH7cDYcFQitMyq7p0SlrZqyHA
+Kwefco4ZgL3EHum4mBOBfX8pUpY/pR9RlzADviH6Bx6Gzf6u8pErB7/0shWBDy59
+6EtgKjwBg/VtMrjzqMWWyHyYgR8n9l1hh8U5ddqzwU9oDI/XwIcD81o31RD8z4Gc
+tagPb5RijG9btAT5b/4hHr+zX7rTCfo544pY+V2S5DXdexHcpDD6D/MCiGIfUW2+
+Q7KrIPn075ipSCUBUKaAmIVXQCSx7y4YIOfDgsJaSGU7OjfRen6iaBd0OiBVNlEF
+0y69Nh+PoiOpnRhNzgGLDWqQ8WrEZlFemiCvV7LVHVhy3VkCbA8rTM6FDNNc/BMM
+tgk4IFnKpmqO0rkqmsAFf8CbkRFc9p/gaTD0WckprnRxHPFna7s=
+=kAT6
+-END PGP SIGNATURE-

Added: dev/flink/flink-shaded-19.0-rc1/flink-shaded-19.0-src.tgz.sha512
==
--- dev/flink/flink-shaded-19.0-rc1/flink-shaded-19.0-src.tgz.sha512 (added)
+++ dev/flink/flink-shaded-19.0-rc1/flink-shaded-19.0-src.tgz.sha512 Fri Jun 28 
10:29:31 2024
@@ -0,0 +1 @@
+eecb4708a754414ecd0de073a78b8df79561e634b7fd2a85713cb441f20b2db2f787f9bc7a20457ba21fa437e868a34b542b0fcc6e82576c21bf5de420bbb18a
  flink-shaded-19.0-src.tgz




svn commit: r70037 - /release/flink/KEYS

2024-06-28 Thread dwysakowicz
Author: dwysakowicz
Date: Fri Jun 28 10:03:03 2024
New Revision: 70037

Log:
[flink] Update Dawid Wysakowicz's public key

Modified:
release/flink/KEYS

Modified: release/flink/KEYS
==
--- release/flink/KEYS (original)
+++ release/flink/KEYS Fri Jun 28 10:03:03 2024
@@ -2190,15 +2190,15 @@ Gjpn7MXPW9kBr710JNliQcgQMU5vuyxb
 =Ped7
 -END PGP PUBLIC KEY BLOCK-
 
-pub   rsa4096 2017-06-16 [SC] [expires: 2023-04-01]
+pub   rsa4096 2017-06-16 [SC] [wygasa: 2028-06-27]
   EA93A435B4E2C9B4C9F533F631D2DD10BFC15A2D
-uid   [ultimate] Dawid Wysakowicz 
-sig 331D2DD10BFC15A2D 2021-04-01  Dawid Wysakowicz 

+uid[nieznane   ] Dawid Wysakowicz 
+sig 331D2DD10BFC15A2D 2024-06-28  Dawid Wysakowicz 

 sig 331D2DD10BFC15A2D 2017-06-16  Dawid Wysakowicz 

-uid   [ultimate] Dawid Wysakowicz 
-sig 331D2DD10BFC15A2D 2021-04-01  Dawid Wysakowicz 

-sub   rsa4096 2017-06-16 [E] [expires: 2023-04-01]
-sig  31D2DD10BFC15A2D 2021-04-01  Dawid Wysakowicz 

+uid[nieznane   ] Dawid Wysakowicz 
+sig 331D2DD10BFC15A2D 2024-06-28  Dawid Wysakowicz 

+sub   rsa4096 2017-06-16 [E] [wygasa: 2028-06-27]
+sig  31D2DD10BFC15A2D 2024-06-28  Dawid Wysakowicz 

 
 -BEGIN PGP PUBLIC KEY BLOCK-
 
@@ -2215,18 +2215,18 @@ zH7vtQd9gXv6qcqVDZr8yd2fzgfo1JJxc6we+ujV
 VfOlbIj8SgIdZBNEHdtB3HEZRZOCGSYBM/8Opa4qXs+3AG3W4Z3vW49ogwARAQAB
 tC1EYXdpZCBXeXNha293aWN6IDx3eXNha293aWN6LmRhd2lkQGdtYWlsLmNvbT6J
 AlcEEwEIAEECGwMFCwkIBwMFFQoJCAsFFgIDAQACHgECF4ACGQEWIQTqk6Q1tOLJ
-tMn1M/Yx0t0Qv8FaLQUCYGWjegUJCuRq7wAKCRAx0t0Qv8FaLQ6gD/9zRUyX1kRD
-pNfDKUhsCcHG03knMk1YYE2QOXBhm0OY040sXK80+wucu1wFGrKxlf6ic3jm4duq
-2zxVo6Yjit6jADyHWgnH/k2oGO+YV3/LjwLr090GmFTVHFQPd0kZCixA5iCp1wLa
-njCS6+klGo++8hakWrcPw0oorxQd6PTyWne0o1iNTVGOtL46a86s+cihD0Anw3b0
-Pcqj53HTh6lhk/9IhRs7glWF1wlTtean8LzXXwmDlu0ZnCCua3Ge+i0tsbOUMCMF
-hsiBr9lO2lXFJ76oS49pvK4kF/RkIfwPJvpMIScuJ8nTIi4IVsY767KnoBBN+h7A
-GnVfXbTacpjsPskopz8MYdEP/HaBtdsxoX6oSs6wRDJjvq5JD1AKkvDWQ6pvjbtz
-fKHBPXuhqdmt/FlCsyMsk6xowwI5jlRr4w6RH7l0bJaWhikX1ncO0YIohwC8gh36
-Ccgdj9S4rPx+X2IMwiIDpBugW+XMlohNKtbRQIMlzBUfjFf315f699OXVRua3t77
-vF34m/nHhZ8EpX8LnnG0vNwtMS0qd5XDn6iPumWAcN9uVaKdjBNdGIsh7ed3owiM
-YqeCW8GEKFYAEMf0wtquRSlnMfUvSbHBZdMsHvK5/mVy1OEn7gqlPAjWz2owAv17
-kiqOnZwclBFkd6dYYPyxQSPrTXBRVpS6LYkCPQQTAQoAJwUCWUOfiwIbAwUJB4Yf
+tMn1M/Yx0t0Qv8FaLQUCZn6ChAUJFL+w+QAKCRAx0t0Qv8FaLWNTD/wJd1kS/aKT
++ESFE3VYEosTx09rtn+PoIWHTUuEL/k5rzL5Fc+yD/KSNrquubtqSXilFPCAz16Z
+gXrAJ7/zuLW+Hzj4k9r5qMOPDI0FPcJDXrOPEpCoFubEhX1ehYC0OepEYlPdttSG
+bjCE6Xg6tqSuYVm0Nq0/FgUzudDnrcF6pJH6IdKcM6NwX91NgXWMQSKcgYlVydOx
+LxR2mxptObN16oqmDo9QIVlDAqTRHAIuxsZs9iim5lW55QDwjpEw4Ek4GNZKyC1h
+pEJVKZJS8sk3Vu1q7yV6jzzLMXRHCfnM41IPfEP4UdTUT1eDCxHyzNsXo/TfqMni
+9jKb1w9NOs7ZVMku+pRs4yS/oobG95rfqnECk+ATNxv0j3eqFUIWY2frXkz0TSVm
+sQgBVTBsWIfTHwMyI2BHZnd76jU2rV3UTNNBG+qEzejmzf26OP9SRsk9uKFmLoeA
+AXY4q6tEP65zOH2Ca/MdpC5EcKONVOb0HvYzAjwEvFW4OtTP2bSrKAZnX2ItE9qM
+dl10OyVcUKSUGL+D1BonMCHMAyo9h9Msi72+nbkcqrFyR+VadD2lhRAn+wUhP0vv
+SKOpQc+rvZ7tZqEhPGIdaKkPidHRjx6bUBUEetLjmfWVs41ty/RXa13mMnUZsa4Q
+GHJfHzlvNRO2t5CCiX+yf9nz0sO+XCl8cYkCPQQTAQoAJwUCWUOfiwIbAwUJB4Yf
 gAULCQgHAwUVCgkICwUWAgMBAAIeAQIXgAAKCRAx0t0Qv8FaLY76D/9Y+WGoSRDY
 DRdgawZOAlWwWmQkdced6zg2P0UT+WB6oOn+qGZibgJXMdSqxEOcWrc9pkYCCQ/8
 iq7KRF0+GkaqKQm4pYW8eqYVJfgZp3N+VxID549fwJBGoruy23uptWgdd1gs4kdY
@@ -2240,18 +2240,18 @@ gsR9/BMdF693gHFcLha2GZos0rGJ9X+0y1ytusNO
 bOvTuUHNO4p/4KI6r7rUz4rJ6CWFFlNfO7XVqvyEVWJg3LOp0nIurNDCNqW4DQL0
 vSc9Qv3mDSZ3g9W/Q6r5O7OyKPZJrXUgSLQpRGF3aWQgV3lzYWtvd2ljeiA8ZHd5
 c2Frb3dpY3pAYXBhY2hlLm9yZz6JAlQEEwEIAD4CGwMFCwkIBwMFFQoJCAsFFgID
-AQACHgECF4AWIQTqk6Q1tOLJtMn1M/Yx0t0Qv8FaLQUCYGWjegUJCuRq7wAKCRAx
-0t0Qv8FaLfINEACr5CuYL8tn5F0z2k1kKI4jWTrmUUwuMNT3vDp6RCdJbmkT6UZ9
-GcM3N7soMjQRNucVRetvgCNYYxhU/JzjRpEyN6la69AtwrW+LCRY+SnS5j3OJkNE
-nl6OwJOBeBTYSHalto5idKdfqVk85NYxAliUl6o/ZTYIdpdTLAsABNkhS42loTlb
-XGQa5eTvTfDuQq1NUy+bzMretTOiwq5cVhkzK3JVais5TURvpSSl30T20xCdzFvN
-0ywrb4Fq4sDgZqrQVyZBzmeLL0H9pZiFiqDmZTsV2VAHB4EHEbCpKs/y1surhfnx
-gUmeXTQLo4qGPTQ7CCbECHxyiaAbBtfLR/BAqwcl3WyPaRb9o55WOyJzU8NBJGqV
-ecEHvOYSMuRGmxsdDQDGfS3i/7vINH6gDcFo/0A5FJfLB3uEio+0bA0QDxqe73pP
-rIrZ8v+v8eckQzidCLG0BfOSFBYKkSv6AR+KiAxjWNT/m37O9ek58xT9X+F2Odc3
-PdRiiZiOQUnOc/3vNYaNPskfpBz4cHleBmzZ4bDqYb1NnnfOS9AhlboGwbAIh/D5
-ZUiPclSncA3ceGpG/8+ke1fpFLdyohA1NPi6hQAJFxgmN44Ze2x3t+IEgpOacB5Y
-ImeAply76pjfWq2Bb0wWvvrneEzBeOtK15Fp+Vab3x86kBlGF7DObWUJ8rkCDQRZ
+AQACHgECF4AWIQTqk6Q1tOLJtMn1M/Yx0t0Qv8FaLQUCZn6ChQUJFL+w+QAKCRAx
+0t0Qv8FaLczyD/9XLANK2h+4VxfCIySwWvCNEMLZOMvh+0XmelVhp4je6IOY5IFk
+/KmDO/wukXM2pG1jtMxxyr5VtCVsEG/kOXHhzOvgCrvTFITD3svq8+v96V44hAQF
+DN2P1SrcdcOCrRBy0OLu7urcPA+JPOSxnILs/xoCdvfhfW2hoAimBeRDwBt7YaDz
+EAXLPh36DIqGmsYZIaiMf55tGR5mkB5Z4j4YTHKfRylyyWmBLQQTMGY96MMevbgA
++i6bJYO7FWH+QHXwSr/aAwF/dn4t83f1QDoMlW6jjWcX41ThzYAliy+QRgpqCQHj
+iFWSaFW9oofcrCD/bTaLgjhsHK3Mdmk02436box+x+OKVJTuamA/tc4LpudAWoMY
+aHWlCy4yFJ+MXg1Zl5xozbNaqwSbICKzcDHZU1B7GYhm1LRAE57k4gwWPzyS83Uf
+hKolnQ/FTPXW

(flink-shaded) branch master updated: [FLINK-35696] Shade com.jayway.jsonpath:json-path (#138)

2024-06-27 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink-shaded.git


The following commit(s) were added to refs/heads/master by this push:
 new b23e1a8  [FLINK-35696] Shade com.jayway.jsonpath:json-path (#138)
b23e1a8 is described below

commit b23e1a811fcacbc5f53993297304131246bb5d04
Author: Dawid Wysakowicz 
AuthorDate: Thu Jun 27 09:37:01 2024 +0200

[FLINK-35696] Shade com.jayway.jsonpath:json-path (#138)
---
 .../{ => flink-shaded-jsonpath}/pom.xml| 63 ++
 .../src/main/resources/META-INF/NOTICE |  9 
 flink-shaded-jackson-parent/pom.xml|  4 ++
 pom.xml|  1 +
 4 files changed, 42 insertions(+), 35 deletions(-)

diff --git a/flink-shaded-jackson-parent/pom.xml 
b/flink-shaded-jackson-parent/flink-shaded-jsonpath/pom.xml
similarity index 55%
copy from flink-shaded-jackson-parent/pom.xml
copy to flink-shaded-jackson-parent/flink-shaded-jsonpath/pom.xml
index 3b8f37c..44d6636 100644
--- a/flink-shaded-jackson-parent/pom.xml
+++ b/flink-shaded-jackson-parent/flink-shaded-jsonpath/pom.xml
@@ -24,64 +24,57 @@ under the License.
 
 
 org.apache.flink
-flink-shaded
-19.0
+flink-shaded-jackson-parent
+2.15.3-19.0
 
 
-flink-shaded-jackson-parent
-flink-shaded-jackson-parent
-pom
-2.15.3-19.0
+flink-shaded-jsonpath
+flink-shaded-jsonpath
+
+2.7.0-19.0
 
-
-flink-shaded-jackson-2
-flink-shaded-jackson-module-jsonSchema-2
-
-
-
-
-
-com.fasterxml.jackson
-jackson-bom
-${jackson.version}
-import
-pom
-
-
-
+
+
+com.jayway.jsonpath
+json-path
+${jsonpath.version}
+
+
 
 
 
 
 org.apache.maven.plugins
 maven-shade-plugin
+
+3.4.1
 
 
 shade-flink
-package
-
-shade
-
 
-
true
-
${project.basedir}/target/dependency-reduced-pom.xml
 
-
-
com.fasterxml.jackson.*:*
+
+
com.jayway.jsonpath:json-path
 
 
-
+
 
-
-com.fasterxml.jackson
-
${shading.prefix}.jackson2.com.fasterxml.jackson
+com.jayway.jsonpath
+
${shading.prefix}.com.jayway.jsonpath
+
 
 
 
 
 
 
+
+
+
+org.codehaus.mojo
+flatten-maven-plugin
+
 
 
-
 
\ No newline at end of file
diff --git 
a/flink-shaded-jackson-parent/flink-shaded-jsonpath/src/main/resources/META-INF/NOTICE
 
b/flink-shaded-jackson-parent/flink-shaded-jsonpath/src/main/resources/META-INF/NOTICE
new file mode 100644
index 000..05c16ad
--- /dev/null
+++ 
b/flink-shaded-jackson-parent/flink-shaded-jsonpath/src/main/resources/META-INF/NOTICE
@@ -0,0 +1,9 @@
+flink-shaded-jsonpath
+Copyright 2014-2024 The Apache Software Foundation
+
+This project includes software developed at
+The Apache Software Foundation (http://www.apache.org/).
+
+This project bundles the following dependencies under the Apache Software 
License 2.0 (http://www.apache.org/licenses/LICENSE-2.0.txt)
+
+- com.jayway.jsonpath:json-path:2.7.0
diff --git a/flink-shaded-jackson-parent/pom.xml 
b/flink-shaded-jackson-parent/pom.xml
index 3b8f37c..8c044d3 100644
--- a/flink-shaded-jackson-parent/pom.xml
+++ b/flink-shaded-jackson-parent/pom.xml
@@ -36,6 +36,10 @@ under the License.
 
 flink-shaded-jackson-2
 flink-shaded-jackson-module-jsonSchema-2
+
+flink-shaded-jsonpath
 
 
 
diff --git a/pom.xml b/pom.xml
index d261725..0c6f67f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -64,6 +64,7 @@ under the License.
 org.apache.flink.shaded
 4.1.100.Final
 2.15.3
+2.7.0
 32.1.3-jre
 

(flink) branch master updated: [FLINK-35687] JSON_QUERY should return a well formatted nested objects/arrays for ARRAY (#24976)

2024-06-26 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 90508a00bb3 [FLINK-35687] JSON_QUERY should return a well formatted 
nested objects/arrays for ARRAY (#24976)
90508a00bb3 is described below

commit 90508a00bb310d26f9be245098cda9192647ab14
Author: Dawid Wysakowicz 
AuthorDate: Wed Jun 26 08:49:57 2024 +0200

[FLINK-35687] JSON_QUERY should return a well formatted nested 
objects/arrays for ARRAY (#24976)
---
 .../planner/functions/JsonFunctionsITCase.java | 27 ++
 .../table/runtime/functions/SqlJsonUtils.java  |  8 ++-
 2 files changed, 34 insertions(+), 1 deletion(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/JsonFunctionsITCase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/JsonFunctionsITCase.java
index 5108fcdac3d..b8780a34059 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/JsonFunctionsITCase.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/JsonFunctionsITCase.java
@@ -447,6 +447,33 @@ class JsonFunctionsITCase extends BuiltInFunctionTestBase {
 "JSON_QUERY(f0, '$.a' RETURNING ARRAY 
WITHOUT WRAPPER EMPTY ARRAY ON ERROR)",
 new String[] {},
 DataTypes.ARRAY(DataTypes.STRING())),
+
+// stringifying RETURNING
+TestSetSpec.forFunction(BuiltInFunctionDefinitions.JSON_QUERY)
+.onFieldsWithData(
+"{\"items\": [{\"itemId\":1234, \"count\":10}, 
null, {\"itemId\":4567, \"count\":11}]}",
+"{\"items\": [[1234, 2345], null, [\"itemId\", 
\"count\"]]}",
+"{\"arr\": [\"abc\", null, \"def\"]}")
+.andDataTypes(STRING(), STRING(), STRING())
+.testResult(
+$("f0").jsonQuery("$.items", ARRAY(STRING())),
+"JSON_QUERY(f0, '$.items' RETURNING 
ARRAY)",
+new String[] {
+"{\"itemId\":1234,\"count\":10}",
+null,
+"{\"itemId\":4567,\"count\":11}"
+},
+ARRAY(STRING()))
+.testResult(
+$("f1").jsonQuery("$.items", ARRAY(STRING())),
+"JSON_QUERY(f1, '$.items' RETURNING 
ARRAY)",
+new String[] {"[1234,2345]", null, 
"[\"itemId\",\"count\"]"},
+ARRAY(STRING()))
+.testResult(
+$("f2").jsonQuery("$.arr", ARRAY(STRING())),
+"JSON_QUERY(f2, '$.arr' RETURNING 
ARRAY)",
+new String[] {"abc", null, "def"},
+ARRAY(STRING())),
 TestSetSpec.forFunction(BuiltInFunctionDefinitions.JSON_QUERY)
 .onFieldsWithData(jsonValue)
 .andDataTypes(STRING())
diff --git 
a/flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/SqlJsonUtils.java
 
b/flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/SqlJsonUtils.java
index 2169f5c470a..09cb0009bf1 100644
--- 
a/flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/SqlJsonUtils.java
+++ 
b/flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/SqlJsonUtils.java
@@ -265,7 +265,13 @@ public class SqlJsonUtils {
 for (int i = 0; i < list.size(); i++) {
 final Object el = list.get(i);
 if (el != null) {
-arr[i] = 
StringData.fromString(el.toString());
+final String stringifiedEl;
+if (isScalarObject(el)) {
+stringifiedEl = String.valueOf(el);
+} else {
+stringifiedEl = jsonize(el);
+}
+arr[i] = 
StringData.fromString(stringifiedEl);
 }
 }
 



(flink) branch master updated: [FLINK-35619] Window rank query fails with 'must call validate first' (#24940)

2024-06-19 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new e91d4a16305 [FLINK-35619] Window rank query fails with 'must call 
validate first' (#24940)
e91d4a16305 is described below

commit e91d4a16305ddf7d4e34f45c9eec9c0af454a1d4
Author: Dawid Wysakowicz 
AuthorDate: Wed Jun 19 10:30:49 2024 +0200

[FLINK-35619] Window rank query fails with 'must call validate first' 
(#24940)
---
 .../calcite/sql/validate/ProcedureNamespace.java   |   8 +-
 .../functions/sql/SqlWindowTableFunction.java  |  22 
 .../planner/plan/nodes/exec/stream/MiscTests.java  | 116 +
 .../nodes/exec/stream/WindowRankTestPrograms.java  |  41 
 4 files changed, 185 insertions(+), 2 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/calcite/sql/validate/ProcedureNamespace.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/calcite/sql/validate/ProcedureNamespace.java
index 9bd055f75d2..ee811aafbf9 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/calcite/sql/validate/ProcedureNamespace.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/calcite/sql/validate/ProcedureNamespace.java
@@ -25,6 +25,7 @@ import org.apache.calcite.sql.SqlCallBinding;
 import org.apache.calcite.sql.SqlNode;
 import org.apache.calcite.sql.SqlOperator;
 import org.apache.calcite.sql.SqlTableFunction;
+import org.apache.calcite.sql.SqlWindowTableFunction;
 import org.apache.calcite.sql.type.SqlReturnTypeInference;
 
 import static java.util.Objects.requireNonNull;
@@ -56,14 +57,17 @@ public final class ProcedureNamespace extends 
AbstractNamespace {
 
 public RelDataType validateImpl(RelDataType targetRowType) {
 validator.inferUnknownTypes(validator.unknownType, scope, call);
-// The result is ignored but the type is derived to trigger the 
validation
+final SqlOperator operator = call.getOperator();
 final SqlCallBinding callBinding = new FlinkSqlCallBinding(validator, 
scope, call);
+// The result is ignored but the type is derived to trigger the 
function resolution
 validator.deriveTypeImpl(scope, callBinding.permutedCall());
-final SqlOperator operator = call.getOperator();
 if (!(operator instanceof SqlTableFunction)) {
 throw new IllegalArgumentException(
 "Argument must be a table function: " + 
operator.getNameAsId());
 }
+if (operator instanceof SqlWindowTableFunction) {
+callBinding.permutedCall().validate(validator, scope);
+}
 final SqlTableFunction tableFunction = (SqlTableFunction) operator;
 final SqlReturnTypeInference rowTypeInference = 
tableFunction.getRowTypeInference();
 final RelDataType rowRelDataType =
diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlWindowTableFunction.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlWindowTableFunction.java
index 3f22bed1907..4a33380839b 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlWindowTableFunction.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlWindowTableFunction.java
@@ -42,6 +42,7 @@ import org.apache.calcite.sql.type.SqlTypeName;
 import org.apache.calcite.sql.type.SqlTypeUtil;
 import org.apache.calcite.sql.validate.SqlNameMatcher;
 import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorScope;
 
 import java.util.Collections;
 import java.util.List;
@@ -96,6 +97,27 @@ public class SqlWindowTableFunction extends 
org.apache.calcite.sql.SqlWindowTabl
 return ARG0_TABLE_FUNCTION_WINDOWING;
 }
 
+@Override
+public void validateCall(
+SqlCall call,
+SqlValidator validator,
+SqlValidatorScope scope,
+SqlValidatorScope operandScope) {
+assert call.getOperator() == this;
+final List operandList = call.getOperandList();
+// Validation for DESCRIPTOR or PARTITION BY of SESSION window is 
broken, and we
+// make assumptions at different locations those are not validated and 
not properly scoped.
+// Theoretically, we should scope identifiers of the above to the 
result of the subquery
+// from the first argument. Unfortunately this breaks at other 
locations which do not expect
+// it. We run additional validations while deriving the return type, 
therefore we can skip
+// it here.
+SqlNode selectQuery = operandList.get(0);
+if (selectQuery.getKind().equals(SqlKind.SET_SEMAN

(flink) branch master updated: [FLINK-35437] Rewrite BlockStatementGrouper so that it uses less memory (#24834)

2024-05-24 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 3d40bd7dd19 [FLINK-35437] Rewrite BlockStatementGrouper so that it 
uses less memory (#24834)
3d40bd7dd19 is described below

commit 3d40bd7dd197b12b7b156bd758b4129148e885d1
Author: Dawid Wysakowicz 
AuthorDate: Fri May 24 10:25:35 2024 +0200

[FLINK-35437] Rewrite BlockStatementGrouper so that it uses less memory 
(#24834)
---
 .../table/codesplit/BlockStatementGrouper.java | 118 +
 .../planner/functions/CaseFunctionsITCase.java |  50 +
 2 files changed, 100 insertions(+), 68 deletions(-)

diff --git 
a/flink-table/flink-table-code-splitter/src/main/java/org/apache/flink/table/codesplit/BlockStatementGrouper.java
 
b/flink-table/flink-table-code-splitter/src/main/java/org/apache/flink/table/codesplit/BlockStatementGrouper.java
index 0d6854ab748..e43a34c41b8 100644
--- 
a/flink-table/flink-table-code-splitter/src/main/java/org/apache/flink/table/codesplit/BlockStatementGrouper.java
+++ 
b/flink-table/flink-table-code-splitter/src/main/java/org/apache/flink/table/codesplit/BlockStatementGrouper.java
@@ -28,10 +28,9 @@ import org.antlr.v4.runtime.ParserRuleContext;
 import org.antlr.v4.runtime.Token;
 import org.antlr.v4.runtime.TokenStreamRewriter;
 import org.antlr.v4.runtime.atn.PredictionMode;
-import org.apache.commons.lang3.tuple.Pair;
 
 import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -107,12 +106,14 @@ import java.util.stream.Collectors;
 @Internal
 public class BlockStatementGrouper {
 
-private final String code;
-
 private final long maxMethodLength;
 
 private final String parameters;
 
+private final TokenStreamRewriter rewriter;
+
+private final StatementContext topStatement;
+
 /**
  * Initialize new BlockStatementGrouper.
  *
@@ -121,9 +122,14 @@ public class BlockStatementGrouper {
  * @param parameters parameters definition that should be used for 
extracted methods.
  */
 public BlockStatementGrouper(String code, long maxMethodLength, String 
parameters) {
-this.code = code;
 this.maxMethodLength = maxMethodLength;
 this.parameters = parameters;
+CommonTokenStream tokenStream =
+new CommonTokenStream(new 
JavaLexer(CharStreams.fromString(code)));
+JavaParser javaParser = new JavaParser(tokenStream);
+javaParser.getInterpreter().setPredictionMode(PredictionMode.SLL);
+this.topStatement = javaParser.statement();
+this.rewriter = new TokenStreamRewriter(tokenStream);
 }
 
 /**
@@ -135,38 +141,21 @@ public class BlockStatementGrouper {
  * groups with their names and content.
  */
 public RewriteGroupedCode rewrite(String context) {
-
 BlockStatementGrouperVisitor visitor =
 new BlockStatementGrouperVisitor(maxMethodLength, parameters);
-CommonTokenStream tokenStream =
-new CommonTokenStream(new 
JavaLexer(CharStreams.fromString(code)));
-JavaParser javaParser = new JavaParser(tokenStream);
-javaParser.getInterpreter().setPredictionMode(PredictionMode.SLL);
-TokenStreamRewriter rewriter = new TokenStreamRewriter(tokenStream);
-visitor.visitStatement(javaParser.statement(), context, rewriter);
-
-visitor.rewrite();
-Map>> groups 
= visitor.groups;
-
-Map> groupStrings =
-CollectionUtil.newHashMapWithExpectedSize(groups.size());
-for (Entry>> 
group :
-groups.entrySet()) {
-List collectedStringGroups =
-group.getValue().getValue().stream()
-.map(LocalGroupElement::getBody)
-.collect(Collectors.toList());
-
-groupStrings.put(group.getKey(), collectedStringGroups);
-}
+
+visitor.visitStatement(topStatement, context);
+final Map> groupStrings = 
visitor.rewrite(rewriter);
 
 return new RewriteGroupedCode(rewriter.getText(), groupStrings);
 }
 
 private static class BlockStatementGrouperVisitor {
 
-private final Map>> groups =
-new HashMap<>();
+// Needs to be an ordered map, so that we later apply the innermost 
nested
+// groups/transformations first and work on the results of such 
extractions with outer
+// groups/transformations.
+private final Map> groups = new 
LinkedHashMap<>();
 
 private final long maxMethodLength;
 
@@ -179,8 +168,7 @@ public class BlockStatementGrouper {
 this.parameters = parameters;
 

(flink) branch master updated: [FLINK-32706][table] Add built-in SPLIT_STRING function (#24365)

2024-05-22 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 4c6571d075b [FLINK-32706][table] Add built-in SPLIT_STRING function 
(#24365)
4c6571d075b is described below

commit 4c6571d075b1d1ff5e7b9d7ec3bf625329155fbf
Author: Hanyu Zheng <135176127+hanyuzhe...@users.noreply.github.com>
AuthorDate: Wed May 22 09:01:24 2024 -0500

[FLINK-32706][table] Add built-in SPLIT_STRING function (#24365)
---
 docs/data/sql_functions.yml|  5 +-
 .../docs/reference/pyflink.table/expressions.rst   |  1 +
 flink-python/pyflink/table/expression.py   | 11 +++
 .../flink/table/api/internal/BaseExpressions.java  | 15 
 .../functions/BuiltInFunctionDefinitions.java  | 12 
 .../functions/CollectionFunctionsITCase.java   | 82 +-
 .../runtime/functions/scalar/SplitFunction.java| 65 +
 7 files changed, 189 insertions(+), 2 deletions(-)

diff --git a/docs/data/sql_functions.yml b/docs/data/sql_functions.yml
index 795a4d8f904..e7a0455159a 100644
--- a/docs/data/sql_functions.yml
+++ b/docs/data/sql_functions.yml
@@ -688,7 +688,10 @@ collection:
   - sql: ARRAY_EXCEPT(array1, array2)
 table: arrayOne.arrayExcept(arrayTwo)
 description: Returns an ARRAY that contains the elements from array1 that 
are not in array2. If no elements remain after excluding the elements in array2 
from array1, the function returns an empty ARRAY. If one or both arguments are 
NULL, the function returns NULL. The order of the elements from array1 is kept.
-
+  - sql: SPLIT(string, delimiter)
+table: string.split(delimiter)
+description: Returns an array of substrings by splitting the input string 
based on the given delimiter. If the delimiter is not found in the string, the 
original string is returned as the only element in the array. If the delimiter 
is empty, every character in the string is split. If the string or delimiter is 
null, a null value is returned. If the delimiter is found at the beginning or 
end of the string, or there are contiguous delimiters, then an empty string is 
added to the array.
+
 json:
   - sql: IS JSON [ { VALUE | SCALAR | ARRAY | OBJECT } ]
 table: STRING.isJson([JsonType type])
diff --git a/flink-python/docs/reference/pyflink.table/expressions.rst 
b/flink-python/docs/reference/pyflink.table/expressions.rst
index e6ae0d92921..f9c3247c5f2 100644
--- a/flink-python/docs/reference/pyflink.table/expressions.rst
+++ b/flink-python/docs/reference/pyflink.table/expressions.rst
@@ -243,6 +243,7 @@ advanced type helper functions
 Expression.map_union
 Expression.map_values
 Expression.array_except
+Expression.split
 
 
 time definition functions
diff --git a/flink-python/pyflink/table/expression.py 
b/flink-python/pyflink/table/expression.py
index 648ad62bfb5..f305a95f5c9 100644
--- a/flink-python/pyflink/table/expression.py
+++ b/flink-python/pyflink/table/expression.py
@@ -1618,6 +1618,17 @@ class Expression(Generic[T]):
 """
 return _binary_op("arrayExcept")(self, array)
 
+def split(self, delimiter) -> 'Expression':
+"""
+Returns an array of substrings by splitting the input string based on 
the given delimiter.
+If the delimiter is not found in the string, the original string is 
returned as the only
+element in the array. If the delimiter is empty, every character in 
the string is split.
+If the string or delimiter is null, a null value is returned. If the 
delimiter is found a
+t the beginning or end of the string, or there are contiguous 
delimiters, then an empty
+string is added to the array.
+"""
+return _binary_op("split")(self, delimiter)
+
 @property
 def map_keys(self) -> 'Expression':
 """
diff --git 
a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/BaseExpressions.java
 
b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/BaseExpressions.java
index bf5df75126f..e523fe59b13 100644
--- 
a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/BaseExpressions.java
+++ 
b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/BaseExpressions.java
@@ -174,6 +174,7 @@ import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.SIGN;
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.SIMILAR;
 import static org.apache.flink.table.functions.BuiltInFunctionDefinitions.SIN;
 import static org.apache.flink.table.functions.BuiltInFunctionDefinitions.SINH;
+import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.SPLIT;
 imp

(flink) branch master updated: [FLINK-35155] Introduce TableRuntimeException (#24679)

2024-05-06 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 1904b215e36 [FLINK-35155] Introduce TableRuntimeException (#24679)
1904b215e36 is described below

commit 1904b215e36e4fd48e48ece7ffdf2f1470653130
Author: Dawid Wysakowicz 
AuthorDate: Mon May 6 11:00:53 2024 +0200

[FLINK-35155] Introduce TableRuntimeException (#24679)
---
 .../TableRuntimeException.java}| 34 -
 .../flink/table/data/utils/CastExecutor.java   |  4 +-
 .../casting/AbstractCodeGeneratorCastRule.java |  6 +-
 .../table/planner/codegen/ExprCodeGenerator.scala  |  2 +-
 .../BuiltInAggregateFunctionTestBase.java  | 79 ++-
 .../planner/functions/BuiltInFunctionTestBase.java | 15 
 .../planner/functions/JsonFunctionsITCase.java |  7 ++
 .../planner/functions/MiscAggFunctionITCase.java   | 54 +
 .../planner/functions/casting/CastRulesTest.java   | 89 +++---
 .../table/runtime/functions/SqlJsonUtils.java  | 42 +-
 10 files changed, 239 insertions(+), 93 deletions(-)

diff --git 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/CastExecutor.java
 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/TableRuntimeException.java
similarity index 54%
copy from 
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/CastExecutor.java
copy to 
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/TableRuntimeException.java
index e701a198533..fffbedbd629 100644
--- 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/CastExecutor.java
+++ 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/TableRuntimeException.java
@@ -16,28 +16,24 @@
  * limitations under the License.
  */
 
-package org.apache.flink.table.data.utils;
+package org.apache.flink.table.api;
 
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.table.api.TableException;
-
-import javax.annotation.Nullable;
+import org.apache.flink.annotation.PublicEvolving;
 
 /**
- * Interface to model a function that performs the casting of a value from one 
type to another.
+ * Exception for errors occurring in the runtime.
  *
- * @param  Input internal type
- * @param  Output internal type
+ * This exception indicates the exception was thrown intentionally, e.g. 
during evaluation of
+ * {@code SINGLE_VALUE} function. Most likely a user error.
  */
-@Internal
-@FunctionalInterface
-public interface CastExecutor {
-/**
- * Cast the input value. The output is null only and only if the input is 
null. The method
- * throws an exception if something goes wrong when casting.
- *
- * @param value Input value.
- */
-@Nullable
-OUT cast(@Nullable IN value) throws TableException;
+@PublicEvolving
+public class TableRuntimeException extends RuntimeException {
+
+public TableRuntimeException(String message, Throwable cause) {
+super(message, cause);
+}
+
+public TableRuntimeException(String message) {
+super(message);
+}
 }
diff --git 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/CastExecutor.java
 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/CastExecutor.java
index e701a198533..032b68dda67 100644
--- 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/CastExecutor.java
+++ 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/CastExecutor.java
@@ -19,7 +19,7 @@
 package org.apache.flink.table.data.utils;
 
 import org.apache.flink.annotation.Internal;
-import org.apache.flink.table.api.TableException;
+import org.apache.flink.table.api.TableRuntimeException;
 
 import javax.annotation.Nullable;
 
@@ -39,5 +39,5 @@ public interface CastExecutor {
  * @param value Input value.
  */
 @Nullable
-OUT cast(@Nullable IN value) throws TableException;
+OUT cast(@Nullable IN value) throws TableRuntimeException;
 }
diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/AbstractCodeGeneratorCastRule.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/AbstractCodeGeneratorCastRule.java
index 7eabc4d4f12..71cdaa33563 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/AbstractCodeGeneratorCastRule.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/AbstractCodeGeneratorCastRule.java
@@ -19,7 +19,7 @@
 package org.apache.flink.table.planner.functions.casting;
 
 import org.apache.flink.api.common.typeutils.TypeSerializer;
-import

(flink) 02/02: [FLINK-33676] Deleting WindowAggregateJsonPlanTest.java and WindowAggregateJsonITCase.java

2024-04-08 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 68cc61a86187021c61e7f51ccff8c5912125d013
Author: Jim Hughes 
AuthorDate: Fri Mar 22 18:18:31 2024 -0400

[FLINK-33676] Deleting WindowAggregateJsonPlanTest.java and 
WindowAggregateJsonITCase.java
---
 .../exec/stream/WindowAggregateJsonPlanTest.java   | 528 -
 .../stream/jsonplan/WindowAggregateJsonITCase.java | 243 --
 2 files changed, 771 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowAggregateJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowAggregateJsonPlanTest.java
deleted file mode 100644
index 413b5a30062..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowAggregateJsonPlanTest.java
+++ /dev/null
@@ -1,528 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.TableConfig;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.api.config.OptimizerConfigOptions;
-import 
org.apache.flink.table.planner.plan.utils.JavaUserDefinedAggFunctions.ConcatDistinctAggFunction;
-import org.apache.flink.table.planner.utils.StreamTableTestUtil;
-import org.apache.flink.table.planner.utils.TableTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-/** Test json serialization/deserialization for window aggregate. */
-class WindowAggregateJsonPlanTest extends TableTestBase {
-
-private StreamTableTestUtil util;
-private TableEnvironment tEnv;
-
-@BeforeEach
-void setup() {
-util = streamTestUtil(TableConfig.getDefault());
-tEnv = util.getTableEnv();
-
-String insertOnlyTableDdl =
-"CREATE TABLE MyTable (\n"
-+ " a INT,\n"
-+ " b BIGINT,\n"
-+ " c VARCHAR,\n"
-+ " `rowtime` AS TO_TIMESTAMP(c),\n"
-+ " proctime as PROCTIME(),\n"
-+ " WATERMARK for `rowtime` AS `rowtime` - INTERVAL 
'1' SECOND\n"
-+ ") WITH (\n"
-+ " 'connector' = 'values')\n";
-tEnv.executeSql(insertOnlyTableDdl);
-
-String changelogTableDdl =
-"CREATE TABLE MyCDCTable (\n"
-+ " a INT,\n"
-+ " b BIGINT,\n"
-+ " c VARCHAR,\n"
-+ " `rowtime` AS TO_TIMESTAMP(c),\n"
-+ " proctime as PROCTIME(),\n"
-+ " WATERMARK for `rowtime` AS `rowtime` - INTERVAL 
'1' SECOND\n"
-+ ") WITH (\n"
-+ " 'connector' = 'values',\n"
-+ " 'changelog-mode' = 'I,UA,UB,D')\n";
-tEnv.executeSql(changelogTableDdl);
-}
-
-@Test
-void testEventTimeTumbleWindow() {
-tEnv.createFunction("concat_distinct_agg", 
ConcatDistinctAggFunction.class);
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ " b BIGINT,\n"
-+ " window_start TIMESTAMP(3),\n"
-+ " window_end TIMESTAMP(3),\n"
-+ " cnt BIGINT,\n"
-+ " sum_a INT,\n"
-+ " distinct_cnt BIGINT,\n"
-+ " concat_distinct STRING\n"
-+ ") WITH (\n"
-+ " 'connector' = 'values')\n";
-  

(flink) branch master updated (82116865b01 -> 68cc61a8618)

2024-04-08 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 82116865b01 [FLINK-34649][table] Migrate 
PushFilterIntoLegacyTableSourceScanRule to java
 new 0dc2a0f74cf [FLINK-33676] Implement RestoreTests for WindowAggregate
 new 68cc61a8618 [FLINK-33676] Deleting WindowAggregateJsonPlanTest.java 
and WindowAggregateJsonITCase.java

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../WindowAggregateEventTimeRestoreTest.java   |  65 +++
 .../exec/stream/WindowAggregateJsonPlanTest.java   | 528 
 .../exec/stream/WindowAggregateTestPrograms.java   | 538 +
 .../stream/jsonplan/WindowAggregateJsonITCase.java | 243 --
 .../testEventTimeSessionWindowWithPartitionKey.out | 373 --
 ...stEventTimeSessionWindowWithoutPartitionKey.out | 355 --
 .../testProcTimeCumulateWindowWithCDCSource.out| 474 --
 .../testProcTimeHopWindow.out  | 473 --
 .../testProcTimeHopWindowWithCDCSource.out | 474 --
 .../testProcTimeSessionWindowWithPartitionKey.out  | 460 --
 ...estProcTimeSessionWindowWithoutPartitionKey.out | 432 -
 .../testProcTimeTumbleWindow.out   | 459 --
 .../testProcTimeTumbleWindowWithCDCSource.out  | 460 --
 ...ulate-event-time-two-phase-distinct-split.json} | 333 ++---
 .../savepoint/_metadata| Bin 0 -> 29761 bytes
 ...cumulate-event-time-two-phase-with-offset.json} | 315 +---
 .../savepoint/_metadata| Bin 0 -> 23185 bytes
 ...w-aggregate-cumulate-event-time-two-phase.json} | 306 
 .../savepoint/_metadata| Bin 0 -> 23181 bytes
 ...ate-event-time-with-offset-distinct-split.json} | 337 ++---
 .../savepoint/_metadata| Bin 0 -> 29761 bytes
 ...aggregate-cumulate-event-time-with-offset.json} | 301 ++--
 .../savepoint/_metadata| Bin 0 -> 23134 bytes
 .../window-aggregate-cumulate-event-time.json} | 299 ++--
 .../savepoint/_metadata| Bin 0 -> 23134 bytes
 ...e-hop-event-time-two-phase-distinct-split.json} | 343 ++---
 .../savepoint/_metadata| Bin 0 -> 30037 bytes
 ...time-two-phase-with-offset-distinct-split.json} | 347 +++--
 .../savepoint/_metadata| Bin 0 -> 30031 bytes
 ...gate-hop-event-time-two-phase-with-offset.json} | 308 +---
 .../savepoint/_metadata| Bin 0 -> 23049 bytes
 ...window-aggregate-hop-event-time-two-phase.json} | 306 
 .../savepoint/_metadata| Bin 0 -> 23045 bytes
 ...ndow-aggregate-hop-event-time-with-offset.json} | 308 ++--
 .../savepoint/_metadata| Bin 0 -> 22998 bytes
 .../plan/window-aggregate-hop-event-time.json} | 305 ++--
 .../savepoint/_metadata| Bin 0 -> 22994 bytes
 ...ssion-event-time-two-phase-distinct-split.json} | 363 +++---
 .../savepoint/_metadata| Bin 0 -> 31910 bytes
 ...ow-aggregate-session-event-time-two-phase.json} | 411 
 .../savepoint/_metadata| Bin 0 -> 31476 bytes
 .../plan/window-aggregate-session-event-time.json} | 365 +++---
 .../savepoint/_metadata| Bin 0 -> 31472 bytes
 ...ition-event-time-two-phase-distinct-split.json} | 266 +-
 .../savepoint/_metadata| Bin 0 -> 23641 bytes
 ...te-session-partition-event-time-two-phase.json} | 305 ++--
 .../savepoint/_metadata| Bin 0 -> 23215 bytes
 ...ow-aggregate-session-partition-event-time.json} | 265 +-
 .../savepoint/_metadata| Bin 0 -> 23219 bytes
 ...umble-event-time-two-phase-distinct-split.json} | 337 ++---
 .../savepoint/_metadata| Bin 0 -> 29223 bytes
 ...time-two-phase-with-offset-distinct-split.json} | 341 ++---
 .../savepoint/_metadata| Bin 0 -> 29223 bytes
 ...e-tumble-event-time-two-phase-with-offset.json} | 300 
 .../savepoint/_metadata| Bin 0 -> 22637 bytes
 ...dow-aggregate-tumble-event-time-two-phase.json} | 296 
 .../savepoint/_metadata 

(flink) branch master updated: [FLINK-35021] AggregateQueryOperations produces wrong asSerializableString representation (#24624)

2024-04-05 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 77215aaf6ca [FLINK-35021] AggregateQueryOperations produces wrong 
asSerializableString representation (#24624)
77215aaf6ca is described below

commit 77215aaf6ca7ccbff7bd3752e59068ac9956d549
Author: Dawid Wysakowicz 
AuthorDate: Fri Apr 5 14:17:52 2024 +0200

[FLINK-35021] AggregateQueryOperations produces wrong asSerializableString 
representation (#24624)
---
 .../table/operations/AggregateQueryOperation.java  | 17 +++---
 .../table/api/QueryOperationSqlExecutionTest.java  |  1 +
 .../api/QueryOperationSqlSerializationTest.java|  3 ++-
 .../table/api/QueryOperationTestPrograms.java  | 27 ++
 4 files changed, 44 insertions(+), 4 deletions(-)

diff --git 
a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/AggregateQueryOperation.java
 
b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/AggregateQueryOperation.java
index 22baeffef6d..c1e6d3a5479 100644
--- 
a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/AggregateQueryOperation.java
+++ 
b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/AggregateQueryOperation.java
@@ -76,15 +76,26 @@ public class AggregateQueryOperation implements 
QueryOperation {
 
 @Override
 public String asSerializableString() {
+final String groupingExprs = getGroupingExprs();
 return String.format(
 "SELECT %s FROM (%s\n)\nGROUP BY %s",
 Stream.concat(groupingExpressions.stream(), 
aggregateExpressions.stream())
 .map(ResolvedExpression::asSerializableString)
 .collect(Collectors.joining(", ")),
 OperationUtils.indent(child.asSerializableString()),
-groupingExpressions.stream()
-.map(ResolvedExpression::asSerializableString)
-.collect(Collectors.joining(", ")));
+groupingExprs);
+}
+
+private String getGroupingExprs() {
+if (groupingExpressions.isEmpty()) {
+return "1";
+} else {
+final String groupingExprs =
+groupingExpressions.stream()
+.map(ResolvedExpression::asSerializableString)
+.collect(Collectors.joining(", "));
+return groupingExprs;
+}
 }
 
 @Override
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationSqlExecutionTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationSqlExecutionTest.java
index f686ba1f283..300120eb644 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationSqlExecutionTest.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationSqlExecutionTest.java
@@ -60,6 +60,7 @@ public class QueryOperationSqlExecutionTest implements 
TableTestProgramRunner {
 QueryOperationTestPrograms.VALUES_QUERY_OPERATION,
 QueryOperationTestPrograms.FILTER_QUERY_OPERATION,
 QueryOperationTestPrograms.AGGREGATE_QUERY_OPERATION,
+
QueryOperationTestPrograms.AGGREGATE_NO_GROUP_BY_QUERY_OPERATION,
 QueryOperationTestPrograms.DISTINCT_QUERY_OPERATION,
 QueryOperationTestPrograms.JOIN_QUERY_OPERATION,
 QueryOperationTestPrograms.ORDER_BY_QUERY_OPERATION,
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationSqlSerializationTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationSqlSerializationTest.java
index 1c9251608a3..6acaf2c7a1d 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationSqlSerializationTest.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationSqlSerializationTest.java
@@ -53,6 +53,7 @@ public class QueryOperationSqlSerializationTest implements 
TableTestProgramRunne
 QueryOperationTestPrograms.VALUES_QUERY_OPERATION,
 QueryOperationTestPrograms.FILTER_QUERY_OPERATION,
 QueryOperationTestPrograms.AGGREGATE_QUERY_OPERATION,
+
QueryOperationTestPrograms.AGGREGATE_NO_GROUP_BY_QUERY_OPERATION,
 QueryOperationTestPrograms.DISTINCT_QUERY_OPERATION,
 QueryOperationTestPrograms.JOIN_QUERY_OPERATION,
 QueryOperationTestPrograms.ORDER_BY_QUERY_OPERATION,
@@ -114,7 +115,7 

(flink) branch master updated (1668a072769 -> bf60c881359)

2024-03-28 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 1668a072769 [FLINK-34933][test] Fixes 
JobMasterServiceLeadershipRunnerTest#testResultFutureCompletionOfOutdatedLeaderIsIgnored
 add b3334d1527a [FLINK-33805] Implement restore tests for OverAggregate 
node
 add bf60c881359 [FLINK-33805] Deleting OverAggregateJsonPlanTest.java and 
OverAggregateJsonPlanITCase.java

No new revisions were added by this update.

Summary of changes:
 .../exec/stream/OverAggregateJsonPlanTest.java | 200 -
 .../exec/stream/OverAggregateRestoreTest.java  |  42 ++
 .../exec/stream/OverAggregateTestPrograms.java | 235 +++
 .../jsonplan/OverAggregateJsonPlanITCase.java  | 202 -
 .../testProcTimeBoundedNonPartitionedRangeOver.out | 456 
 ...undedPartitionedRowsOverWithBuiltinProctime.out | 449 
 .../testRowTimeBoundedPartitionedRowsOver.out  | 317 --
 ...er-aggregate-bounded-non-partitioned-rows.json} | 465 -
 .../savepoint/_metadata| Bin 0 -> 27838 bytes
 ...regate-bounded-partitioned-preceding-rows.json} | 455 
 .../savepoint/_metadata| Bin 0 -> 28142 bytes
 .../over-aggregate-bounded-partitioned-rows.json}  | 425 ++-
 .../savepoint/_metadata| Bin 0 -> 27838 bytes
 ...over-aggregate-unbounded-partitioned-rows.json} | 447 
 .../savepoint/_metadata| Bin 0 -> 25506 bytes
 15 files changed, 1299 insertions(+), 2394 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/OverAggregateJsonPlanTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/OverAggregateRestoreTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/OverAggregateTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/OverAggregateJsonPlanITCase.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/OverAggregateJsonPlanTest_jsonplan/testProcTimeBoundedNonPartitionedRangeOver.out
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/OverAggregateJsonPlanTest_jsonplan/testProcTimeBoundedPartitionedRowsOverWithBuiltinProctime.out
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/OverAggregateJsonPlanTest_jsonplan/testRowTimeBoundedPartitionedRowsOver.out
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/OverAggregateJsonPlanTest_jsonplan/testProcTimeBoundedPartitionedRangeOver.out
 => 
restore-tests/stream-exec-over-aggregate_1/over-aggregate-bounded-non-partitioned-rows/plan/over-aggregate-bounded-non-partitioned-rows.json}
 (56%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-over-aggregate_1/over-aggregate-bounded-non-partitioned-rows/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/OverAggregateJsonPlanTest_jsonplan/testProctimeBoundedDistinctPartitionedRowOver.out
 => 
restore-tests/stream-exec-over-aggregate_1/over-aggregate-bounded-partitioned-preceding-rows/plan/over-aggregate-bounded-partitioned-preceding-rows.json}
 (56%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-over-aggregate_1/over-aggregate-bounded-partitioned-preceding-rows/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/OverAggregateJsonPlanTest_jsonplan/testProctimeBoundedDistinctWithNonDistinctPartitionedRowOver.out
 => 
restore-tests/stream-exec-over-aggregate_1/over-aggregate-bounded-partitioned-rows/plan/over-aggregate-bounded-partitioned-rows.json}
 (66%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-over-aggregate_1/over-aggregate-bounded-partitioned-rows/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/OverAggregateJsonPlanTest_jsonplan/testProcTimeUnboundedPartitionedRangeOver.out
 => 
restore-tests/stream-exec-over-aggregate_1/over-aggregate-unbounded-partitioned-rows/plan/over-aggregate-unbounded-partitioned-rows.json}
 (56%)
 create mode 100644 

(flink) branch master updated: [FLINK-34938] Fix incorrect behaviour for comparison functions (#24566)

2024-03-27 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 7ea5bcce6a5 [FLINK-34938] Fix incorrect behaviour for comparison 
functions (#24566)
7ea5bcce6a5 is described below

commit 7ea5bcce6a58b69543b571e9746d7374ded028c5
Author: Dawid Wysakowicz 
AuthorDate: Wed Mar 27 09:38:45 2024 +0100

[FLINK-34938] Fix incorrect behaviour for comparison functions (#24566)
---
 .../calcite/sql/type/SqlTypeFactoryImpl.java   | 19 -
 .../planner/codegen/calls/ScalarOperatorGens.scala | 55 +++--
 .../functions/ComparisonFunctionITCase.java| 93 ++
 3 files changed, 159 insertions(+), 8 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/calcite/sql/type/SqlTypeFactoryImpl.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/calcite/sql/type/SqlTypeFactoryImpl.java
index a55c40475e2..eccac441204 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/calcite/sql/type/SqlTypeFactoryImpl.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/calcite/sql/type/SqlTypeFactoryImpl.java
@@ -38,7 +38,8 @@ import static java.util.Objects.requireNonNull;
  * FLINK modifications are at lines
  *
  * 
- *   Should be removed after fix of FLINK-31350: Lines 541 ~ 553.
+ *   Should be removed after fixing CALCITE-6342: Lines 475-485
+ *   Should be removed after fix of FLINK-31350: Lines 552 ~ 564.
  * 
  */
 public class SqlTypeFactoryImpl extends RelDataTypeFactoryImpl {
@@ -470,9 +471,21 @@ public class SqlTypeFactoryImpl extends 
RelDataTypeFactoryImpl {
 resultType, nullCount > 0 || nullableCount > 
0);
 }
 }
+
+// FLINK MODIFICATION BEGIN
+// in case we compare TIME(STAMP) and TIME(STAMP)_LTZ we 
should adjust the precision
+// as well
+if (type.getSqlTypeName().getFamily() == 
resultType.getSqlTypeName().getFamily()
+&& type.getSqlTypeName().allowsPrec()
+&& type.getPrecision() != resultType.getPrecision()) {
+final int precision =
+SqlTypeUtil.maxPrecision(
+resultType.getPrecision(), 
type.getPrecision());
+
+resultType = createSqlType(type.getSqlTypeName(), 
precision);
+}
+// FLINK MODIFICATION END
 } else {
-// TODO:  datetime precision details; for now we let
-// leastRestrictiveByCast handle it
 return null;
 }
 }
diff --git 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/ScalarOperatorGens.scala
 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/ScalarOperatorGens.scala
index 3ee1ea8b96a..69081918fd2 100644
--- 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/ScalarOperatorGens.scala
+++ 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/ScalarOperatorGens.scala
@@ -37,6 +37,7 @@ import 
org.apache.flink.table.runtime.typeutils.TypeCheckUtils._
 import org.apache.flink.table.types.logical._
 import org.apache.flink.table.types.logical.LogicalTypeFamily.DATETIME
 import org.apache.flink.table.types.logical.LogicalTypeRoot._
+import org.apache.flink.table.types.logical.utils.LogicalTypeChecks
 import 
org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getFieldTypes
 import 
org.apache.flink.table.types.logical.utils.LogicalTypeMerging.findCommonType
 import org.apache.flink.table.utils.DateTimeUtils.MILLIS_PER_DAY
@@ -384,6 +385,13 @@ object ScalarOperatorGens {
 else if (isNumeric(left.resultType) && isNumeric(right.resultType)) {
   generateComparison(ctx, operator, left, right, resultType)
 }
+// both sides are timestamp family (timestamp or timestamp_ltz)
+else if (
+  left.resultType.is(LogicalTypeFamily.TIMESTAMP) && right.resultType.is(
+LogicalTypeFamily.TIMESTAMP)
+) {
+  generateComparison(ctx, operator, left, right, resultType)
+}
 // array types
 else if (isArray(left.resultType) && canEqual) {
   wrapExpressionIfNonEq(
@@ -456,11 +464,7 @@ object ScalarOperatorGens {
   generateEqualAndNonEqual(
 ctx,
 newLeft,
-if (newRight.literal) {
-  generateCastLiteral(ctx, newRight, newLeft.resultType)
-} else {
-  generateCast(ctx, newRight, newLeft.resultType, nullOnFailure = true)
-},
+generateCastOrCastLiteral(ctx, newRight, newLeft.resultType),
 op

(flink) branch master updated: [FLINK-34910] Fix optimizing window join (#24549)

2024-03-21 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 709bf93534f [FLINK-34910] Fix optimizing window join (#24549)
709bf93534f is described below

commit 709bf93534fcdfd2b4452667af450f1748bf1ccc
Author: Dawid Wysakowicz 
AuthorDate: Thu Mar 21 17:04:22 2024 +0100

[FLINK-34910] Fix optimizing window join (#24549)
---
 .../JoinTableFunctionScanToCorrelateRule.java  | 12 ++
 .../plan/stream/sql/join/WindowJoinTest.xml| 44 ++
 .../plan/stream/sql/join/WindowJoinTest.scala  | 14 +++
 3 files changed, 70 insertions(+)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/JoinTableFunctionScanToCorrelateRule.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/JoinTableFunctionScanToCorrelateRule.java
index b14054906c1..bd5b783b200 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/JoinTableFunctionScanToCorrelateRule.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/JoinTableFunctionScanToCorrelateRule.java
@@ -18,16 +18,22 @@
 
 package org.apache.flink.table.planner.plan.rules.logical;
 
+import 
org.apache.flink.table.planner.plan.rules.physical.stream.StreamPhysicalConstantTableFunctionScanRule;
+
 import org.apache.calcite.plan.RelOptRuleCall;
 import org.apache.calcite.plan.RelRule;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.logical.LogicalJoin;
 import org.apache.calcite.rel.logical.LogicalTableFunctionScan;
+import org.apache.calcite.rex.RexUtil;
 import org.immutables.value.Value;
 
 /**
  * Rule that rewrites {@link org.apache.calcite.rel.core.Join} on {@link
  * org.apache.calcite.rel.core.TableFunctionScan} to {@link 
org.apache.calcite.rel.core.Correlate}.
+ *
+ * Note: The rule was implemented so that we can apply {@link
+ * StreamPhysicalConstantTableFunctionScanRule} later.
  */
 @Value.Enclosing
 public class JoinTableFunctionScanToCorrelateRule
@@ -68,6 +74,12 @@ public class JoinTableFunctionScanToCorrelateRule
 b2.operand(

 LogicalTableFunctionScan

 .class)
+
.predicate(
+   
 scan ->
+   
 !RexUtil
+   
 .containsInputRef(
+   
 scan
+   
 .getCall()))
 
.noInputs()))
 .description("JoinTableFunctionScanToCorrelateRule")
 .build();
diff --git 
a/flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/stream/sql/join/WindowJoinTest.xml
 
b/flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/stream/sql/join/WindowJoinTest.xml
index 12cb68df0ee..a0a733641c3 100644
--- 
a/flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/stream/sql/join/WindowJoinTest.xml
+++ 
b/flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/stream/sql/join/WindowJoinTest.xml
@@ -2009,6 +2009,50 @@ Calc(select=[a, b, c, rowtime, 
PROCTIME_MATERIALIZE(proctime) AS proctime, windo
 ]]>
 
   
+   
+   
+   
+   
+   
+   
+   
+   
+   
+   
+   
   
 
   

(flink) branch master updated (3f4a80989fe -> a9cde49118b)

2024-03-21 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 3f4a80989fe [FLINK-34725][dist] Fix wrong config file dir when 
`config-parser-utils.sh` script attempt to retrieve the Java Home.
 add a9cde49118b [Flink 32701] [cep] Fix CEP Operator Memory Leak Issue  
(#24084)

No new revisions were added by this update.

Summary of changes:
 .../java/org/apache/flink/cep/nfa/sharedbuffer/SharedBuffer.java  | 5 +
 .../src/main/java/org/apache/flink/cep/operator/CepOperator.java  | 5 +
 .../test/java/org/apache/flink/cep/nfa/NFAStateAccessTest.java| 8 
 3 files changed, 14 insertions(+), 4 deletions(-)



(flink) branch master updated: [FLINK-34745] Improve validations for a period in Time Travel (#24534)

2024-03-20 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 4142c4386a9 [FLINK-34745] Improve validations for a period in Time 
Travel (#24534)
4142c4386a9 is described below

commit 4142c4386a92f1ec5016583f4832f8869782765e
Author: Dawid Wysakowicz 
AuthorDate: Wed Mar 20 15:14:52 2024 +0100

[FLINK-34745] Improve validations for a period in Time Travel (#24534)
---
 .../planner/calcite/FlinkCalciteSqlValidator.java  | 22 ++
 .../plan/stream/sql/join/TemporalJoinTest.scala| 21 +
 2 files changed, 39 insertions(+), 4 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/calcite/FlinkCalciteSqlValidator.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/calcite/FlinkCalciteSqlValidator.java
index f091ab3e70a..4eb652dde60 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/calcite/FlinkCalciteSqlValidator.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/calcite/FlinkCalciteSqlValidator.java
@@ -59,6 +59,7 @@ import org.apache.calcite.sql.SqlTableFunction;
 import org.apache.calcite.sql.SqlUtil;
 import org.apache.calcite.sql.SqlWindowTableFunction;
 import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.SqlTypeUtil;
 import org.apache.calcite.sql.validate.DelegatingScope;
 import org.apache.calcite.sql.validate.IdentifierNamespace;
 import org.apache.calcite.sql.validate.IdentifierSnapshotNamespace;
@@ -203,7 +204,7 @@ public final class FlinkCalciteSqlValidator extends 
SqlValidatorImpl {
 Optional snapshot = getSnapShotNode(ns);
 if (usingScope != null
 && snapshot.isPresent()
-&& !(snapshot.get().getPeriod() instanceof SqlIdentifier)) {
+&& !(hasInputReference(snapshot.get().getPeriod( {
 SqlSnapshot sqlSnapshot = snapshot.get();
 SqlNode periodNode = sqlSnapshot.getPeriod();
 SqlToRelConverter sqlToRelConverter = 
this.createSqlToRelConverter();
@@ -222,14 +223,23 @@ public final class FlinkCalciteSqlValidator extends 
SqlValidatorImpl {
 Collections.singletonList(simplifiedRexNode),
 reducedNodes);
 // check whether period is the unsupported expression
-if (!(reducedNodes.get(0) instanceof RexLiteral)) {
-throw new UnsupportedOperationException(
+final RexNode reducedNode = reducedNodes.get(0);
+if (!(reducedNode instanceof RexLiteral)) {
+throw new ValidationException(
 String.format(
 "Unsupported time travel expression: %s for 
the expression can not be reduced to a constant by Flink.",
 periodNode));
 }
 
-RexLiteral rexLiteral = (RexLiteral) (reducedNodes).get(0);
+RexLiteral rexLiteral = (RexLiteral) reducedNode;
+final RelDataType sqlType = rexLiteral.getType();
+if (!SqlTypeUtil.isTimestamp(sqlType)) {
+throw newValidationError(
+periodNode,
+Static.RESOURCE.illegalExpressionForTemporal(
+sqlType.getSqlTypeName().getName()));
+}
+
 TimestampString timestampString = 
rexLiteral.getValueAs(TimestampString.class);
 checkNotNull(
 timestampString,
@@ -264,6 +274,10 @@ public final class FlinkCalciteSqlValidator extends 
SqlValidatorImpl {
 super.registerNamespace(usingScope, alias, ns, forceNullable);
 }
 
+private static boolean hasInputReference(SqlNode node) {
+return node.accept(new SqlToRelConverter.SqlIdentifierFinder());
+}
+
 /**
  * Get the {@link SqlSnapshot} node in a {@link SqlValidatorNamespace}.
  *
diff --git 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/join/TemporalJoinTest.scala
 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/join/TemporalJoinTest.scala
index 29d6521540c..351cc8429dd 100644
--- 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/join/TemporalJoinTest.scala
+++ 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/join/TemporalJoinTest.scala
@@ -508,6 +508,27 @@ class TemporalJoinTest extends TableTestBase {
 " table, but the rowtime types are TIMESTAMP_LTZ(3) *ROWTIME* and 
TIMESTAMP(3) *ROWTIME*.",
   classOf[ValidationExce

(flink) branch master updated: [FLINK-31663] Implement ARRAY_EXCEPT function

2024-03-07 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 2429c296a60 [FLINK-31663] Implement ARRAY_EXCEPT function
2429c296a60 is described below

commit 2429c296a60bf0a0e8a4acebc04a059008708d1f
Author: Hanyu Zheng <135176127+hanyuzhe...@users.noreply.github.com>
AuthorDate: Wed Jul 26 23:31:09 2023 -0700

[FLINK-31663] Implement ARRAY_EXCEPT function
---
 docs/data/sql_functions.yml|   5 +-
 .../docs/reference/pyflink.table/expressions.rst   |   1 +
 flink-python/pyflink/table/expression.py   |   9 ++
 .../flink/table/api/internal/BaseExpressions.java  |  14 +++
 .../functions/BuiltInFunctionDefinitions.java  |  10 ++
 .../functions/CollectionFunctionsITCase.java   | 127 -
 .../functions/scalar/ArrayExceptFunction.java  | 103 +
 .../runtime/util/EqualityAndHashcodeProvider.java  |  92 +++
 .../flink/table/runtime/util/ObjectContainer.java  |  65 +++
 9 files changed, 424 insertions(+), 2 deletions(-)

diff --git a/docs/data/sql_functions.yml b/docs/data/sql_functions.yml
index 7f06199ecb0..b7150391606 100644
--- a/docs/data/sql_functions.yml
+++ b/docs/data/sql_functions.yml
@@ -682,7 +682,10 @@ collection:
   - sql: MAP_FROM_ARRAYS(array_of_keys, array_of_values)
 table: mapFromArrays(array_of_keys, array_of_values)
 description: Returns a map created from an arrays of keys and values. Note 
that the lengths of two arrays should be the same.
-
+  - sql: ARRAY_EXCEPT(array1, array2)
+table: arrayOne.arrayExcept(arrayTwo)
+description: Returns an ARRAY that contains the elements from array1 that 
are not in array2. If no elements remain after excluding the elements in array2 
from array1, the function returns an empty ARRAY. If one or both arguments are 
NULL, the function returns NULL. The order of the elements from array1 is kept.
+
 json:
   - sql: IS JSON [ { VALUE | SCALAR | ARRAY | OBJECT } ]
 table: STRING.isJson([JsonType type])
diff --git a/flink-python/docs/reference/pyflink.table/expressions.rst 
b/flink-python/docs/reference/pyflink.table/expressions.rst
index 3d3cc1ad326..dbc69682a3b 100644
--- a/flink-python/docs/reference/pyflink.table/expressions.rst
+++ b/flink-python/docs/reference/pyflink.table/expressions.rst
@@ -241,6 +241,7 @@ advanced type helper functions
 Expression.map_entries
 Expression.map_keys
 Expression.map_values
+Expression.array_except
 
 
 time definition functions
diff --git a/flink-python/pyflink/table/expression.py 
b/flink-python/pyflink/table/expression.py
index c3b8f1b6591..8c892b52e35 100644
--- a/flink-python/pyflink/table/expression.py
+++ b/flink-python/pyflink/table/expression.py
@@ -1609,6 +1609,15 @@ class Expression(Generic[T]):
 """
 return _unary_op("arrayMin")(self)
 
+def array_except(self, array) -> 'Expression':
+"""
+Returns an ARRAY that contains the elements from array1 that are not 
in array2.
+If no elements remain after excluding the elements in array2 from 
array1,
+the function returns an empty ARRAY. If one or both arguments are NULL,
+the function returns NULL. The order of the elements from array1 is 
kept.
+"""
+return _binary_op("arrayExcept")(self, array)
+
 @property
 def map_keys(self) -> 'Expression':
 """
diff --git 
a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/BaseExpressions.java
 
b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/BaseExpressions.java
index 8e022179277..090274d89a5 100644
--- 
a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/BaseExpressions.java
+++ 
b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/BaseExpressions.java
@@ -59,6 +59,7 @@ import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_CONTAINS;
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_DISTINCT;
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_ELEMENT;
+import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_EXCEPT;
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_MAX;
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_MIN;
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_POSITION;
@@ -230,6 +231,19 @@ public abstract class BaseExpressions {
 .toArray(Expression[

(flink) branch master updated (9b1375520b6 -> 5cc49c4f944)

2024-03-07 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 9b1375520b6 Modify obvious errors in the doc.
 add 440fa5757ea [FLINK-34268] Add a test to verify if restore test exists 
for ExecNode
 add 5cc49c4f944 [FLINK-34095] Move async calc restore test

No new revisions were added by this update.

Summary of changes:
 .../planner/plan/utils/ExecNodeMetadataUtil.java   |   6 +-
 .../nodes/exec/stream/AsyncCalcRestoreTest.java|   2 +-
 .../exec/stream/ChangelogNormalizeRestoreTest.java |   5 +-
 .../exec/stream/GroupAggregateRestoreTest.java |   3 +-
 .../IncrementalGroupAggregateRestoreTest.java  |   5 +-
 .../plan/nodes/exec/testutils/RestoreTestBase.java |  28 -
 .../exec/testutils/RestoreTestCompleteness.java| 138 +
 .../plan/async-calc-complex.json   |   0
 .../async-calc-complex/savepoint/_metadata | Bin
 .../plan/async-calc-condition.json |   0
 .../async-calc-condition/savepoint/_metadata   | Bin
 .../plan/async-calc-failure-exception.json |   0
 .../savepoint/_metadata| Bin
 .../async-calc-nested/plan/async-calc-nested.json  |   0
 .../async-calc-nested/savepoint/_metadata  | Bin
 .../async-calc-simple/plan/async-calc-simple.json  |   0
 .../async-calc-simple/savepoint/_metadata  | Bin
 17 files changed, 180 insertions(+), 7 deletions(-)
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/RestoreTestCompleteness.java
 rename 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-calc_1
 => stream-exec-async-calc_1}/async-calc-complex/plan/async-calc-complex.json 
(100%)
 rename 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-calc_1
 => stream-exec-async-calc_1}/async-calc-complex/savepoint/_metadata (100%)
 rename 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-calc_1
 => 
stream-exec-async-calc_1}/async-calc-condition/plan/async-calc-condition.json 
(100%)
 rename 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-calc_1
 => stream-exec-async-calc_1}/async-calc-condition/savepoint/_metadata (100%)
 rename 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-calc_1
 => 
stream-exec-async-calc_1}/async-calc-failure-exception/plan/async-calc-failure-exception.json
 (100%)
 rename 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-calc_1
 => stream-exec-async-calc_1}/async-calc-failure-exception/savepoint/_metadata 
(100%)
 rename 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-calc_1
 => stream-exec-async-calc_1}/async-calc-nested/plan/async-calc-nested.json 
(100%)
 rename 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-calc_1
 => stream-exec-async-calc_1}/async-calc-nested/savepoint/_metadata (100%)
 rename 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-calc_1
 => stream-exec-async-calc_1}/async-calc-simple/plan/async-calc-simple.json 
(100%)
 rename 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-calc_1
 => stream-exec-async-calc_1}/async-calc-simple/savepoint/_metadata (100%)



(flink) branch master updated: [FLINK-34493][table] Migrate ReplaceMinusWithAntiJoinRule to java.

2024-03-06 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 6a12668bcfe [FLINK-34493][table] Migrate ReplaceMinusWithAntiJoinRule 
to java.
6a12668bcfe is described below

commit 6a12668bcfe651fa938517eb2da4d537ce6ce668
Author: liuyongvs 
AuthorDate: Fri Mar 1 16:08:52 2024 +0800

[FLINK-34493][table] Migrate ReplaceMinusWithAntiJoinRule to java.
---
 .../logical/ReplaceMinusWithAntiJoinRule.java  | 95 ++
 .../logical/ReplaceMinusWithAntiJoinRule.scala | 65 ---
 2 files changed, 95 insertions(+), 65 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/ReplaceMinusWithAntiJoinRule.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/ReplaceMinusWithAntiJoinRule.java
new file mode 100644
index 000..35c719e3846
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/ReplaceMinusWithAntiJoinRule.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.rules.logical;
+
+import org.apache.calcite.plan.RelOptRuleCall;
+import org.apache.calcite.plan.RelRule;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.core.JoinRelType;
+import org.apache.calcite.rel.core.Minus;
+import org.apache.calcite.rel.core.RelFactories;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.tools.RelBuilder;
+import org.apache.calcite.util.Util;
+import org.immutables.value.Value;
+
+import java.util.List;
+
+import static 
org.apache.flink.table.planner.plan.utils.SetOpRewriteUtil.generateEqualsCondition;
+
+/**
+ * Planner rule that replaces distinct {@link 
org.apache.calcite.rel.core.Minus} (SQL keyword:
+ * EXCEPT) with a distinct {@link org.apache.calcite.rel.core.Aggregate} on an 
ANTI {@link
+ * org.apache.calcite.rel.core.Join}.
+ *
+ * Only handle the case of input size 2.
+ */
+@Value.Enclosing
+public class ReplaceMinusWithAntiJoinRule
+extends 
RelRule {
+
+public static final ReplaceMinusWithAntiJoinRule INSTANCE =
+
ReplaceMinusWithAntiJoinRule.ReplaceMinusWithAntiJoinRuleConfig.DEFAULT.toRule();
+
+private ReplaceMinusWithAntiJoinRule(ReplaceMinusWithAntiJoinRuleConfig 
config) {
+super(config);
+}
+
+@Override
+public boolean matches(RelOptRuleCall call) {
+Minus minus = call.rel(0);
+return !minus.all && minus.getInputs().size() == 2;
+}
+
+@Override
+public void onMatch(RelOptRuleCall call) {
+Minus minus = call.rel(0);
+RelNode left = minus.getInput(0);
+RelNode right = minus.getInput(1);
+
+RelBuilder relBuilder = call.builder();
+List keys = Util.range(left.getRowType().getFieldCount());
+List conditions = generateEqualsCondition(relBuilder, left, 
right, keys);
+
+relBuilder.push(left);
+relBuilder.push(right);
+relBuilder
+.join(JoinRelType.ANTI, conditions)
+.aggregate(
+
relBuilder.groupKey(keys.stream().mapToInt(Integer::intValue).toArray()));
+RelNode rel = relBuilder.build();
+call.transformTo(rel);
+}
+
+/** Rule configuration. */
+@Value.Immutable(singleton = false)
+public interface ReplaceMinusWithAntiJoinRuleConfig extends RelRule.Config 
{
+ReplaceMinusWithAntiJoinRule.ReplaceMinusWithAntiJoinRuleConfig 
DEFAULT =
+
ImmutableReplaceMinusWithAntiJoinRule.ReplaceMinusWithAntiJoinRuleConfig.builder()
+.build()
+.withOperandSupplier(b0 -> 
b0.operand(Minus.class).anyInputs())
+.withRelBuilderFactory(RelFactories.LOGICAL_BUILDER)
+.withDescription("ReplaceMinusWithAntiJoinRule");
+
+@Override
+default ReplaceMinusWithAntiJo

(flink) 02/02: [FLINK-34118] Remove Sort Json Plan tests

2024-02-23 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit faacf7e28bd9a43723303d0bd4a6ee9adebcb5bb
Author: bvarghese1 
AuthorDate: Tue Jan 16 17:58:33 2024 -0800

[FLINK-34118] Remove Sort Json Plan tests

- Covered with restore tests
---
 .../plan/nodes/exec/stream/SortJsonPlanTest.java   |  66 
 .../stream/SortJsonPlanTest_jsonplan/testSort.out  | 172 -
 2 files changed, 238 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/SortJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/SortJsonPlanTest.java
deleted file mode 100644
index 91c62f9fe33..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/SortJsonPlanTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.TableConfig;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.planner.utils.StreamTableTestUtil;
-import org.apache.flink.table.planner.utils.TableTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-/** Test json serialization for sort limit. */
-class SortJsonPlanTest extends TableTestBase {
-
-private StreamTableTestUtil util;
-private TableEnvironment tEnv;
-
-@BeforeEach
-void setup() {
-util = streamTestUtil(TableConfig.getDefault());
-tEnv = util.getTableEnv();
-
-String srcTableDdl =
-"CREATE TABLE MyTable (\n"
-+ "  a bigint,\n"
-+ "  b int not null,\n"
-+ "  c varchar,\n"
-+ "  d timestamp(3)\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'bounded' = 'false')";
-tEnv.executeSql(srcTableDdl);
-}
-
-@Test
-void testSort() {
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ "  a bigint,\n"
-+ "  b bigint\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'sink-insert-only' = 'false',\n"
-+ "  'table-sink-class' = 'DEFAULT')";
-tEnv.executeSql(sinkTableDdl);
-String sql = "insert into MySink SELECT a, a from MyTable order by b";
-util.verifyJsonPlan(sql);
-}
-}
diff --git 
a/flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/SortJsonPlanTest_jsonplan/testSort.out
 
b/flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/SortJsonPlanTest_jsonplan/testSort.out
deleted file mode 100644
index c73bfeead3f..000
--- 
a/flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/SortJsonPlanTest_jsonplan/testSort.out
+++ /dev/null
@@ -1,172 +0,0 @@
-{
-  "flinkVersion" : "",
-  "nodes" : [ {
-"id" : 1,
-"type" : "stream-exec-table-source-scan_1",
-"scanTableSource" : {
-  "table" : {
-"identifier" : "`default_catalog`.`default_database`.`MyTable`",
-"resolvedTable" : {
-  "schema" : {
-"columns" : [ {
-  "name" : "a",
-  "dataType" : "BIGINT"
-}, {
-  "name" : "b",
-  "dataType" : "INT NOT NUL

(flink) branch master updated (6c8f3a0799c -> faacf7e28bd)

2024-02-23 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 6c8f3a0799c [FLINK-34496] Remove unused method
 new fe3d9a42995 [FLINK-34118] Implement restore tests for Sort node
 new faacf7e28bd [FLINK-34118] Remove Sort Json Plan tests

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../plan/nodes/exec/stream/SortJsonPlanTest.java   |  66 
 ...{LimitRestoreTest.java => SortRestoreTest.java} |  10 +-
 .../plan/nodes/exec/stream/SortTestPrograms.java   |  48 +-
 .../plan/nodes/exec/testutils/RestoreTestBase.java |   9 +-
 .../stream/SortJsonPlanTest_jsonplan/testSort.out  | 172 -
 .../sort-asc/plan/sort-asc.json}   |  28 +---
 .../sort-desc/plan/sort-desc.json} |  28 +---
 7 files changed, 66 insertions(+), 295 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/SortJsonPlanTest.java
 copy 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/{LimitRestoreTest.java
 => SortRestoreTest.java} (79%)
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/SortJsonPlanTest_jsonplan/testSort.out
 copy 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-sort-limit_1/sort-limit-asc/plan/sort-limit-asc.json
 => stream-exec-sort_1/sort-asc/plan/sort-asc.json} (85%)
 copy 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-sort-limit_1/sort-limit-desc/plan/sort-limit-desc.json
 => stream-exec-sort_1/sort-desc/plan/sort-desc.json} (85%)



(flink) 01/02: [FLINK-34118] Implement restore tests for Sort node

2024-02-23 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit fe3d9a42995cfee0dfd90e8031768cb130543189
Author: bvarghese1 
AuthorDate: Tue Jan 16 17:54:46 2024 -0800

[FLINK-34118] Implement restore tests for Sort node
---
 .../plan/nodes/exec/stream/SortRestoreTest.java|  38 +
 .../plan/nodes/exec/stream/SortTestPrograms.java   |  48 +-
 .../plan/nodes/exec/testutils/RestoreTestBase.java |   9 +-
 .../stream-exec-sort_1/sort-asc/plan/sort-asc.json | 164 +
 .../sort-desc/plan/sort-desc.json  | 164 +
 5 files changed, 421 insertions(+), 2 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/SortRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/SortRestoreTest.java
new file mode 100644
index 000..18e9792f9ed
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/SortRestoreTest.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
+import org.apache.flink.table.test.program.TableTestProgram;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** Restore tests for {@link StreamExecSort}. */
+public class SortRestoreTest extends RestoreTestBase {
+
+public SortRestoreTest() {
+super(StreamExecSort.class, AfterRestoreSource.NO_RESTORE);
+}
+
+@Override
+public List programs() {
+return Arrays.asList(SortTestPrograms.SORT_ASC, 
SortTestPrograms.SORT_DESC);
+}
+}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/SortTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/SortTestPrograms.java
index 0a6f68d4e76..2959e2e6a0e 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/SortTestPrograms.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/SortTestPrograms.java
@@ -18,6 +18,7 @@
 
 package org.apache.flink.table.planner.plan.nodes.exec.stream;
 
+import org.apache.flink.table.planner.utils.InternalConfigOptions;
 import org.apache.flink.table.test.program.SinkTestStep;
 import org.apache.flink.table.test.program.SourceTestStep;
 import org.apache.flink.table.test.program.TableTestProgram;
@@ -25,7 +26,8 @@ import org.apache.flink.types.Row;
 
 /**
  * {@link TableTestProgram} definitions for testing {@link
- * org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecSortLimit}.
+ * org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecSortLimit} 
and {@link
+ * org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecSort}.
  */
 public class SortTestPrograms {
 
@@ -123,4 +125,48 @@ public class SortTestPrograms {
 .build())
 .runSql("INSERT INTO sink_t SELECT * from source_t ORDER 
BY a DESC LIMIT 3")
 .build();
+
+static final TableTestProgram SORT_ASC =
+TableTestProgram.of("sort-asc", "validates sort node by sorting 
integers in asc mode")
+
.setupConfig(InternalConfigOptions.TABLE_EXEC_NON_TEMPORAL_SORT_ENABLED, true)
+.setupTableSource(
+SourceTestStep.newBuilder("source_t")
+.addSchema("a INT", "b VARCHAR", "c INT")
+.producedValues(DATA)
+.build())
+.setupTableSink(
+SinkTestStep.newBuilder("sink_t")
+  

(flink) branch master updated (80090c76f88 -> aaaea64d60d)

2024-02-22 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 80090c76f88 [FLINK-34455] Move RestoreMode from flink-runtime to 
flink-core (#24320)
 add 263e7bf690e [FLINK-33517] Implement restore tests for Values node
 add aaaea64d60d [FLINK-33517] Remove Values Json Plan & Json IT tests

No new revisions were added by this update.

Summary of changes:
 .../plan/nodes/exec/stream/ValuesJsonPlanTest.java |  56 -
 ...pandRestoreTest.java => ValuesRestoreTest.java} |  10 +-
 ...alcRestoreTest.java => ValuesTestPrograms.java} |  34 ++---
 .../plan/nodes/exec/testutils/RestoreTestBase.java |  21 ++--
 .../stream/jsonplan/ValuesJsonPlanITCase.java  |  42 ---
 .../ValuesJsonPlanTest_jsonplan/testValues.out | 140 -
 .../values-test/plan/values-test.json} |  76 ++-
 7 files changed, 68 insertions(+), 311 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ValuesJsonPlanTest.java
 copy 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/{ExpandRestoreTest.java
 => ValuesRestoreTest.java} (80%)
 copy 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/{CalcRestoreTest.java
 => ValuesTestPrograms.java} (55%)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/ValuesJsonPlanITCase.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/ValuesJsonPlanTest_jsonplan/testValues.out
 copy 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-sink_1/sink-bucketing_with-count/plan/sink-bucketing_with-count.json
 => stream-exec-values_1/values-test/plan/values-test.json} (56%)



(flink) branch master updated: [FLINK-26948][table] Add-ARRAY_SORT-function.

2024-02-16 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 620e5975985 [FLINK-26948][table] Add-ARRAY_SORT-function.
620e5975985 is described below

commit 620e5975985944a02886b82362a2bc1774c733e3
Author: Hanyu Zheng <135176127+hanyuzhe...@users.noreply.github.com>
AuthorDate: Mon Jul 3 07:59:23 2023 -0700

[FLINK-26948][table] Add-ARRAY_SORT-function.

[FLINK-26948][table] Add-ARRAY_SORT-function.
---
 docs/data/sql_functions.yml|   3 +
 .../docs/reference/pyflink.table/expressions.rst   |   1 +
 flink-python/pyflink/table/expression.py   |  16 +++
 .../functions/BuiltInFunctionDefinitions.java  |  25 +++-
 .../table/types/inference/InputTypeStrategies.java |   5 -
 ...rrayComparableElementArgumentTypeStrategy.java} |  37 ++
 .../strategies/SpecificInputTypeStrategies.java|   5 +
 .../types/inference/InputTypeStrategiesTest.java   |  13 ++
 .../ArrayComparableElementTypeStrategyTest.java|  55 
 .../functions/CollectionFunctionsITCase.java   | 148 -
 .../functions/scalar/ArraySortFunction.java| 124 +
 11 files changed, 345 insertions(+), 87 deletions(-)

diff --git a/docs/data/sql_functions.yml b/docs/data/sql_functions.yml
index 9692db6994b..7ebf715e279 100644
--- a/docs/data/sql_functions.yml
+++ b/docs/data/sql_functions.yml
@@ -646,6 +646,9 @@ collection:
   - sql: ARRAY_SLICE(array, start_offset[, end_offset])
 table: array.arraySlice(start_offset[, end_offset])
 description: Returns a subarray of the input array between 'start_offset' 
and 'end_offset' inclusive. The offsets are 1-based however 0 is also treated 
as the beginning of the array. Positive values are counted from the beginning 
of the array while negative from the end. If 'end_offset' is omitted then this 
offset is treated as the length of the array. If 'start_offset' is after 
'end_offset' or both are out of array bounds an empty array will be returned. 
Returns null if any input is null.
+  - sql: ARRAY_SORT(array[, ascending_order[, null_first]])
+table: array.arraySort([, ascendingOrder[, null_first]])
+description: Returns the array in sorted order.The function sorts an 
array, defaulting to ascending order with NULLs at the start when only the 
array is input. Specifying ascending_order as true orders the array in 
ascending with NULLs first, and setting it to false orders it in descending 
with NULLs last. Independently, null_first as true moves NULLs to the 
beginning, and as false to the end, irrespective of the sorting order. The 
function returns null if any input is null.
   - sql: ARRAY_UNION(array1, array2)
 table: haystack.arrayUnion(array)
 description: Returns an array of the elements in the union of array1 and 
array2, without duplicates. If any of the array is null, the function will 
return null.
diff --git a/flink-python/docs/reference/pyflink.table/expressions.rst 
b/flink-python/docs/reference/pyflink.table/expressions.rst
index 5c7ea97df03..3d3cc1ad326 100644
--- a/flink-python/docs/reference/pyflink.table/expressions.rst
+++ b/flink-python/docs/reference/pyflink.table/expressions.rst
@@ -236,6 +236,7 @@ advanced type helper functions
 Expression.array_max
 Expression.array_slice
 Expression.array_min
+Expression.array_sort
 Expression.array_union
 Expression.map_entries
 Expression.map_keys
diff --git a/flink-python/pyflink/table/expression.py 
b/flink-python/pyflink/table/expression.py
index 4272f1724cb..84772c1739e 100644
--- a/flink-python/pyflink/table/expression.py
+++ b/flink-python/pyflink/table/expression.py
@@ -1531,6 +1531,22 @@ class Expression(Generic[T]):
 else:
 return _ternary_op("array_slice")(self, start_offset, end_offset)
 
+def array_sort(self, ascending_order=None, null_first=None) -> 
'Expression':
+"""
+Returns the array in sorted order.
+The function sorts an array, defaulting to ascending order with NULLs 
at the start when
+only the array is input. Specifying ascending_order as true orders the 
array in ascending
+with NULLs first, and setting it to false orders it in descending with 
NULLs last.
+Independently, null_first as true moves NULLs to the beginning, and as 
false to the end,
+irrespective of the sorting order. The function returns null if any 
input is null.
+"""
+if ascending_order and null_first is None:
+return _unary_op("array_sort")(self)
+elif null_first is None:
+return _binary_op("array_sort")(self, ascending_order)
+else:
+return _ternary_op("array_sort")(self, 

(flink) 01/02: [FLINK-34248] Implement restore tests for changelog normalize node

2024-02-15 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit e76ccdc1fd8b15a5aac4968fd89643b0b17e1a48
Author: bvarghese1 
AuthorDate: Fri Jan 26 15:04:19 2024 -0800

[FLINK-34248] Implement restore tests for changelog normalize node
---
 .../exec/stream/ChangelogNormalizeRestoreTest.java |  41 +
 .../stream/ChangelogNormalizeTestPrograms.java | 167 +++
 .../changelog-normalize-source-mini-batch.json | 178 +
 .../savepoint/_metadata| Bin 0 -> 11960 bytes
 .../plan/changelog-normalize-source.json   | 155 ++
 .../changelog-normalize-source/savepoint/_metadata | Bin 0 -> 13487 bytes
 .../plan/changelog-normalize-upsert.json   | 136 
 .../changelog-normalize-upsert/savepoint/_metadata | Bin 0 -> 13436 bytes
 8 files changed, 677 insertions(+)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogNormalizeRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogNormalizeRestoreTest.java
new file mode 100644
index 000..c7e1f5e36ce
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogNormalizeRestoreTest.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
+import org.apache.flink.table.test.program.TableTestProgram;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** Restore tests for {@link StreamExecChangelogNormalize}. */
+public class ChangelogNormalizeRestoreTest extends RestoreTestBase {
+
+public ChangelogNormalizeRestoreTest() {
+super(StreamExecChangelogNormalize.class);
+}
+
+@Override
+public List programs() {
+return Arrays.asList(
+ChangelogNormalizeTestPrograms.CHANGELOG_SOURCE,
+ChangelogNormalizeTestPrograms.CHANGELOG_SOURCE_MINI_BATCH,
+ChangelogNormalizeTestPrograms.UPSERT_SOURCE);
+}
+}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogNormalizeTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogNormalizeTestPrograms.java
new file mode 100644
index 000..e4a3cd1825e
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogNormalizeTestPrograms.java
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import org.apache.flink.table.api.config.ExecutionConfigOptions;
+import org.apache.flink.table.test.program.SinkTestStep;
+import org.apache.flink.table.test.program.SourceTestStep;
+import org.apache.flink.table.test.program.TableTestProgram;
+import org.apache.flink.types.Row;
+import org.apache.flink.types.RowKind;
+
+import java.time.Duration;
+
+/** {@link TableTestProgram} definitions

(flink) branch master updated (2298e53f351 -> 6e93394b4f2)

2024-02-15 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 2298e53f351 [FLINK-34403][ci] Transforms VeryBigPbRowToProtoTest into 
an integration test
 new e76ccdc1fd8 [FLINK-34248] Implement restore tests for changelog 
normalize node
 new 6e93394b4f2 [FLINK-34248] Remove ChangelogNormalize Json Plan & IT 
tests

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../exec/stream/ChangelogNormalizeRestoreTest.java |  41 +
 .../stream/ChangelogNormalizeTestPrograms.java | 167 +
 .../exec/stream/ChangelogSourceJsonPlanTest.java   |  96 
 .../jsonplan/ChangelogSourceJsonPlanITCase.java| 113 --
 .../changelog-normalize-source-mini-batch.json}| 124 +++
 .../savepoint/_metadata| Bin 0 -> 11960 bytes
 .../plan/changelog-normalize-source.json}  |  71 -
 .../changelog-normalize-source/savepoint/_metadata | Bin 0 -> 13487 bytes
 .../plan/changelog-normalize-upsert.json}  |  89 +--
 .../changelog-normalize-upsert/savepoint/_metadata | Bin 0 -> 13436 bytes
 10 files changed, 333 insertions(+), 368 deletions(-)
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogNormalizeRestoreTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogNormalizeTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogSourceJsonPlanTest.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/ChangelogSourceJsonPlanITCase.java
 copy 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogSourceJsonPlanTest_jsonplan/testChangelogSource.out
 => 
restore-tests/stream-exec-changelog-normalize_1/changelog-normalize-source-mini-batch/plan/changelog-normalize-source-mini-batch.json}
 (56%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-changelog-normalize_1/changelog-normalize-source-mini-batch/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogSourceJsonPlanTest_jsonplan/testChangelogSource.out
 => 
restore-tests/stream-exec-changelog-normalize_1/changelog-normalize-source/plan/changelog-normalize-source.json}
 (64%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-changelog-normalize_1/changelog-normalize-source/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogSourceJsonPlanTest_jsonplan/testUpsertSource.out
 => 
restore-tests/stream-exec-changelog-normalize_1/changelog-normalize-upsert/plan/changelog-normalize-upsert.json}
 (58%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-changelog-normalize_1/changelog-normalize-upsert/savepoint/_metadata



(flink) 02/02: [FLINK-34248] Remove ChangelogNormalize Json Plan & IT tests

2024-02-15 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 6e93394b4f2c22e5c50858242c17bcbd8fcf45c3
Author: bvarghese1 
AuthorDate: Fri Jan 26 17:44:14 2024 -0800

[FLINK-34248] Remove ChangelogNormalize Json Plan & IT tests
---
 .../exec/stream/ChangelogSourceJsonPlanTest.java   |  96 
 .../jsonplan/ChangelogSourceJsonPlanITCase.java| 113 -
 .../testChangelogSource.out| 174 -
 .../testUpsertSource.out   | 155 --
 4 files changed, 538 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogSourceJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogSourceJsonPlanTest.java
deleted file mode 100644
index 0e35bc11bca..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ChangelogSourceJsonPlanTest.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.TableConfig;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.api.config.ExecutionConfigOptions;
-import org.apache.flink.table.planner.utils.StreamTableTestUtil;
-import org.apache.flink.table.planner.utils.TableTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-/** Test json serialization/deserialization for changelog source, including 
upsert source. */
-class ChangelogSourceJsonPlanTest extends TableTestBase {
-
-private StreamTableTestUtil util;
-private TableEnvironment tEnv;
-
-@BeforeEach
-void setup() {
-util = streamTestUtil(TableConfig.getDefault());
-tEnv = util.getTableEnv();
-
tEnv.getConfig().set(ExecutionConfigOptions.TABLE_EXEC_SOURCE_CDC_EVENTS_DUPLICATE,
 true);
-}
-
-@Test
-void testChangelogSource() {
-String srcTableDdl =
-"CREATE TABLE MyTable (\n"
-+ "  a bigint,\n"
-+ "  b int not null,\n"
-+ "  c varchar,\n"
-+ "  d timestamp(3),\n"
-+ "  PRIMARY KEY (a, b) NOT ENFORCED\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'changelog-mode' = 'I,UA,UB,D',\n"
-+ "  'bounded' = 'false')";
-tEnv.executeSql(srcTableDdl);
-
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ "  a bigint,\n"
-+ "  b int\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'sink-insert-only' = 'false',\n"
-+ "  'table-sink-class' = 'DEFAULT')";
-tEnv.executeSql(sinkTableDdl);
-util.verifyJsonPlan("insert into MySink select a, b from MyTable");
-}
-
-@Test
-void testUpsertSource() {
-String srcTableDdl =
-"CREATE TABLE MyTable (\n"
-+ "  a bigint,\n"
-+ "  b int not null,\n"
-+ "  c varchar,\n"
-+ "  d timestamp(3),\n"
-+ "  PRIMARY KEY (a, b) NOT ENFORCED\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'changelog-mode' = 'I,UA,D',\n"
-+ "  'bounded' = 'false')";
-tEnv.executeSql(srcTableDdl)

(flink) 01/02: [FLINK-34000] Implement restore tests for IncrementalGroupAgg node

2024-02-13 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 14d5dbc4c53b2e200dc57e3f4c053583f2419b14
Author: bvarghese1 
AuthorDate: Fri Jan 19 09:07:00 2024 -0800

[FLINK-34000] Implement restore tests for IncrementalGroupAgg node
---
 .../IncrementalGroupAggregateRestoreTest.java  |  40 ++
 .../IncrementalGroupAggregateTestPrograms.java | 131 +
 .../plan/incremental-group-aggregate-complex.json  | 573 +
 .../savepoint/_metadata| Bin 0 -> 21025 bytes
 .../plan/incremental-group-aggregate-simple.json   | 373 ++
 .../savepoint/_metadata| Bin 0 -> 14840 bytes
 6 files changed, 1117 insertions(+)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateRestoreTest.java
new file mode 100644
index 000..250f50a38c7
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateRestoreTest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
+import org.apache.flink.table.test.program.TableTestProgram;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** Restore tests for {@link StreamExecIncrementalGroupAggregate}. */
+public class IncrementalGroupAggregateRestoreTest extends RestoreTestBase {
+
+public IncrementalGroupAggregateRestoreTest() {
+super(StreamExecIncrementalGroupAggregate.class);
+}
+
+@Override
+public List programs() {
+return Arrays.asList(
+
IncrementalGroupAggregateTestPrograms.INCREMENTAL_GROUP_AGGREGATE_SIMPLE,
+
IncrementalGroupAggregateTestPrograms.INCREMENTAL_GROUP_AGGREGATE_COMPLEX);
+}
+}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateTestPrograms.java
new file mode 100644
index 000..f14c4921b4c
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateTestPrograms.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import org.apache.flink.table.api.config.ExecutionConfigOptions;
+import org.apache.flink.table.api.config.OptimizerConfigOptions;
+import org.apache.flink.table.test.program.SinkTestStep;
+import org.apache.flink.table.test.program.SourceTestStep;
+import org.apache.flink.table.test.program.TableTestProgram;
+import org.apache.flink.types.Row;
+
+import java.time.Duration;
+
+/** {@link TableTestProgram} definitions for testing {@link 
StreamExecGroupAggregate}. */
+public class IncrementalGroupAggregateTestPr

(flink) 02/02: [FLINK-34000] Remove IncrementalGroupAgg Json Plan & IT tests

2024-02-13 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 5844092408d21023a738077d0922cc75f1e634d7
Author: bvarghese1 
AuthorDate: Fri Jan 19 09:11:28 2024 -0800

[FLINK-34000] Remove IncrementalGroupAgg Json Plan & IT tests

- These are covered by restore tests
---
 .../stream/IncrementalAggregateJsonPlanTest.java   | 106 
 .../IncrementalAggregateJsonPlanITCase.java|  78 ---
 .../testIncrementalAggregate.out   | 401 --
 ...lAggregateWithSumCountDistinctAndRetraction.out | 585 -
 4 files changed, 1170 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest.java
deleted file mode 100644
index 26dcc04f303..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.TableConfig;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.api.config.ExecutionConfigOptions;
-import org.apache.flink.table.api.config.OptimizerConfigOptions;
-import 
org.apache.flink.table.planner.plan.rules.physical.stream.IncrementalAggregateRule;
-import org.apache.flink.table.planner.utils.AggregatePhaseStrategy;
-import org.apache.flink.table.planner.utils.StreamTableTestUtil;
-import org.apache.flink.table.planner.utils.TableTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import java.time.Duration;
-
-/** Test json serialization/deserialization for incremental aggregate. */
-class IncrementalAggregateJsonPlanTest extends TableTestBase {
-
-private StreamTableTestUtil util;
-private TableEnvironment tEnv;
-
-@BeforeEach
-void setup() {
-util = streamTestUtil(TableConfig.getDefault());
-tEnv = util.getTableEnv();
-tEnv.getConfig()
-.set(
-
OptimizerConfigOptions.TABLE_OPTIMIZER_AGG_PHASE_STRATEGY,
-AggregatePhaseStrategy.TWO_PHASE.name())
-
.set(OptimizerConfigOptions.TABLE_OPTIMIZER_DISTINCT_AGG_SPLIT_ENABLED, true)
-.set(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED, true)
-.set(
-
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY,
-Duration.ofSeconds(10))
-.set(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_SIZE, 5L)
-
.set(IncrementalAggregateRule.TABLE_OPTIMIZER_INCREMENTAL_AGG_ENABLED(), true);
-
-String srcTableDdl =
-"CREATE TABLE MyTable (\n"
-+ "  a bigint,\n"
-+ "  b int not null,\n"
-+ "  c varchar,\n"
-+ "  d bigint\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'bounded' = 'false')";
-tEnv.executeSql(srcTableDdl);
-}
-
-@Test
-void testIncrementalAggregate() {
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ "  a bigint,\n"
-+ "  c bigint\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'sink-insert-only' = 'false',\n"
-+ "  'table-sink-class' = 'DEFAULT')";
-tEnv.executeSql(sinkTableDdl);
-util.verifyJsonPlan(
-   

(flink) branch master updated (9a316a5bcc4 -> 5844092408d)

2024-02-13 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 9a316a5bcc4 [FLINK-34403][ci] Transforms VeryBigPbProtoToRowTest into 
an integration test
 new 14d5dbc4c53 [FLINK-34000] Implement restore tests for 
IncrementalGroupAgg node
 new 5844092408d [FLINK-34000] Remove IncrementalGroupAgg Json Plan & IT 
tests

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../stream/IncrementalAggregateJsonPlanTest.java   | 106 
 ...a => IncrementalGroupAggregateRestoreTest.java} |  13 +-
 .../IncrementalGroupAggregateTestPrograms.java | 131 +++
 .../IncrementalAggregateJsonPlanITCase.java|  78 -
 .../plan/incremental-group-aggregate-complex.json} | 182 ++---
 .../savepoint/_metadata| Bin 0 -> 21025 bytes
 .../plan/incremental-group-aggregate-simple.json}  | 118 +
 .../savepoint/_metadata| Bin 13431 -> 14840 bytes
 8 files changed, 267 insertions(+), 361 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest.java
 copy 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/{IntervalJoinRestoreTest.java
 => IncrementalGroupAggregateRestoreTest.java} (72%)
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/IncrementalAggregateJsonPlanITCase.java
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest_jsonplan/testIncrementalAggregateWithSumCountDistinctAndRetraction.out
 => 
restore-tests/stream-exec-incremental-group-aggregate_1/incremental-group-aggregate-complex/plan/incremental-group-aggregate-complex.json}
 (80%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-incremental-group-aggregate_1/incremental-group-aggregate-complex/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest_jsonplan/testIncrementalAggregate.out
 => 
restore-tests/stream-exec-incremental-group-aggregate_1/incremental-group-aggregate-simple/plan/incremental-group-aggregate-simple.json}
 (70%)
 copy 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-mini-batch-assigner_1/mini-batch-assigner-proc-time
 => 
stream-exec-incremental-group-aggregate_1/incremental-group-aggregate-simple}/savepoint/_metadata
 (56%)



(flink) branch release-1.19 updated: [FLINK-33958] Fix IntervalJoin restore test flakiness

2024-02-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.19 by this push:
 new 04d3b1b1423 [FLINK-33958] Fix IntervalJoin restore test flakiness
04d3b1b1423 is described below

commit 04d3b1b1423676dc87c366841b1e521beb9953dc
Author: bvarghese1 
AuthorDate: Thu Jan 25 11:22:22 2024 -0800

[FLINK-33958] Fix IntervalJoin restore test flakiness

- Update input data to make test output predictable
---
 .../plan/nodes/exec/stream/IntervalJoinTestPrograms.java| 13 ++---
 1 file changed, 6 insertions(+), 7 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinTestPrograms.java
index 4e326af2430..6cc1c546beb 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinTestPrograms.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinTestPrograms.java
@@ -47,9 +47,9 @@ public class IntervalJoinTestPrograms {
 };
 
 static final Row[] SHIPMENT_AFTER_DATA = {
-Row.of(7, 3, "2020-04-15 08:00:16"),
-Row.of(11, 7, "2020-04-15 08:00:11"),
-Row.of(13, 10, "2020-04-15 08:00:13")
+Row.of(7, 3, "2020-04-15 08:00:15"),
+Row.of(11, 7, "2020-04-15 08:00:16"),
+Row.of(13, 10, "2020-04-15 08:00:16")
 };
 
 static final String[] ORDERS_EVENT_TIME_SCHEMA = {
@@ -102,8 +102,7 @@ public class IntervalJoinTestPrograms {
 "+I[2, 2020-04-15 08:00:02, 
2020-04-15 08:00:05]",
 "+I[5, 2020-04-15 08:00:05, 
2020-04-15 08:00:06]")
 .consumedAfterRestore(
-"+I[7, 2020-04-15 08:00:09, 
2020-04-15 08:00:11]",
-"+I[10, 2020-04-15 08:00:11, 
2020-04-15 08:00:13]")
+"+I[10, 2020-04-15 08:00:11, 
2020-04-15 08:00:16]")
 .build())
 .runSql(
 "INSERT INTO sink_t SELECT\n"
@@ -140,8 +139,8 @@ public class IntervalJoinTestPrograms {
 "+I[5, 2020-04-15 08:00:05, 
2020-04-15 08:00:06]",
 "+I[4, 2020-04-15 08:00:04, 
2020-04-15 08:00:15]")
 .consumedAfterRestore(
-"+I[7, 2020-04-15 08:00:09, 
2020-04-15 08:00:11]",
-"+I[10, 2020-04-15 08:00:11, 
2020-04-15 08:00:13]")
+"+I[7, 2020-04-15 08:00:09, 
2020-04-15 08:00:16]",
+"+I[10, 2020-04-15 08:00:11, 
2020-04-15 08:00:16]")
 .build())
 .runSql(
 "INSERT INTO sink_t SELECT\n"



(flink) branch master updated: [FLINK-33958] Fix IntervalJoin restore test flakiness

2024-02-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 1fbf92dfc9e [FLINK-33958] Fix IntervalJoin restore test flakiness
1fbf92dfc9e is described below

commit 1fbf92dfc9ee0e111d6ec740fe87fae27ef87d8b
Author: bvarghese1 
AuthorDate: Thu Jan 25 11:22:22 2024 -0800

[FLINK-33958] Fix IntervalJoin restore test flakiness

- Update input data to make test output predictable
---
 .../plan/nodes/exec/stream/IntervalJoinTestPrograms.java| 13 ++---
 1 file changed, 6 insertions(+), 7 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinTestPrograms.java
index 4e326af2430..6cc1c546beb 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinTestPrograms.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinTestPrograms.java
@@ -47,9 +47,9 @@ public class IntervalJoinTestPrograms {
 };
 
 static final Row[] SHIPMENT_AFTER_DATA = {
-Row.of(7, 3, "2020-04-15 08:00:16"),
-Row.of(11, 7, "2020-04-15 08:00:11"),
-Row.of(13, 10, "2020-04-15 08:00:13")
+Row.of(7, 3, "2020-04-15 08:00:15"),
+Row.of(11, 7, "2020-04-15 08:00:16"),
+Row.of(13, 10, "2020-04-15 08:00:16")
 };
 
 static final String[] ORDERS_EVENT_TIME_SCHEMA = {
@@ -102,8 +102,7 @@ public class IntervalJoinTestPrograms {
 "+I[2, 2020-04-15 08:00:02, 
2020-04-15 08:00:05]",
 "+I[5, 2020-04-15 08:00:05, 
2020-04-15 08:00:06]")
 .consumedAfterRestore(
-"+I[7, 2020-04-15 08:00:09, 
2020-04-15 08:00:11]",
-"+I[10, 2020-04-15 08:00:11, 
2020-04-15 08:00:13]")
+"+I[10, 2020-04-15 08:00:11, 
2020-04-15 08:00:16]")
 .build())
 .runSql(
 "INSERT INTO sink_t SELECT\n"
@@ -140,8 +139,8 @@ public class IntervalJoinTestPrograms {
 "+I[5, 2020-04-15 08:00:05, 
2020-04-15 08:00:06]",
 "+I[4, 2020-04-15 08:00:04, 
2020-04-15 08:00:15]")
 .consumedAfterRestore(
-"+I[7, 2020-04-15 08:00:09, 
2020-04-15 08:00:11]",
-"+I[10, 2020-04-15 08:00:11, 
2020-04-15 08:00:13]")
+"+I[7, 2020-04-15 08:00:09, 
2020-04-15 08:00:16]",
+"+I[10, 2020-04-15 08:00:11, 
2020-04-15 08:00:16]")
 .build())
 .runSql(
 "INSERT INTO sink_t SELECT\n"



(flink) branch master updated: [FLINK-24239] Event time temporal join should support values from array, map, row, etc. as join key (#24253)

2024-02-08 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 01cdc703ee6 [FLINK-24239] Event time temporal join should support 
values from array, map, row, etc. as join key (#24253)
01cdc703ee6 is described below

commit 01cdc703ee6fa56bdfdf799d016c0e882e9e5d99
Author: Dawid Wysakowicz 
AuthorDate: Thu Feb 8 15:26:16 2024 +0100

[FLINK-24239] Event time temporal join should support values from array, 
map, row, etc. as join key (#24253)
---
 ...gicalCorrelateToJoinFromTemporalTableRule.scala | 151 +++---
 .../nodes/exec/stream/TemporalJoinRestoreTest.java |   2 +
 .../exec/stream/TemporalJoinTestPrograms.java  |  82 +++
 .../temporal-join-table-join-key-from-map.json | 569 +++
 .../savepoint/_metadata| Bin 0 -> 14977 bytes
 .../plan/temporal-join-table-join-nested-key.json  | 600 +
 .../savepoint/_metadata| Bin 0 -> 14973 bytes
 7 files changed, 1336 insertions(+), 68 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/rules/logical/LogicalCorrelateToJoinFromTemporalTableRule.scala
 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/rules/logical/LogicalCorrelateToJoinFromTemporalTableRule.scala
index 217b9597561..25f1d29d8ea 100644
--- 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/rules/logical/LogicalCorrelateToJoinFromTemporalTableRule.scala
+++ 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/rules/logical/LogicalCorrelateToJoinFromTemporalTableRule.scala
@@ -26,11 +26,12 @@ import 
org.apache.flink.table.planner.plan.schema.{LegacyTableSourceTable, Table
 import org.apache.flink.table.planner.plan.utils.TemporalJoinUtil
 import org.apache.flink.table.sources.LookupableTableSource
 
-import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall, RelOptRuleOperand}
+import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall, RelOptRuleOperand, 
RelOptUtil}
 import org.apache.calcite.plan.RelOptRule.{any, operand}
 import org.apache.calcite.plan.hep.{HepPlanner, HepRelVertex}
+import org.apache.calcite.rel.`type`.RelDataType
 import org.apache.calcite.rel.RelNode
-import org.apache.calcite.rel.core.TableScan
+import org.apache.calcite.rel.core.{CorrelationId, TableScan}
 import org.apache.calcite.rel.logical._
 import org.apache.calcite.rex._
 
@@ -141,6 +142,30 @@ abstract class LogicalCorrelateToJoinFromTemporalTableRule(
   }
 case _ => false
   }
+
+  protected def decorrelate(
+  rexNode: RexNode,
+  leftRowType: RelDataType,
+  correlationId: CorrelationId): RexNode = {
+rexNode.accept(new RexShuttle() {
+  // change correlate variable expression to normal RexInputRef (which is 
from left side)
+  override def visitFieldAccess(fieldAccess: RexFieldAccess): RexNode = {
+fieldAccess.getReferenceExpr match {
+  case corVar: RexCorrelVariable =>
+require(correlationId.equals(corVar.id))
+val index = leftRowType.getFieldList.indexOf(fieldAccess.getField)
+RexInputRef.of(index, leftRowType)
+  case _ => super.visitFieldAccess(fieldAccess)
+}
+  }
+
+  // update the field index from right side
+  override def visitInputRef(inputRef: RexInputRef): RexNode = {
+val rightIndex = leftRowType.getFieldCount + inputRef.getIndex
+new RexInputRef(rightIndex, inputRef.getType)
+  }
+})
+  }
 }
 
 /**
@@ -161,24 +186,7 @@ abstract class 
LogicalCorrelateToJoinFromLookupTemporalTableRule(
 validateSnapshotInCorrelate(snapshot, correlate)
 
 val leftRowType = leftInput.getRowType
-val joinCondition = filterCondition.accept(new RexShuttle() {
-  // change correlate variable expression to normal RexInputRef (which is 
from left side)
-  override def visitFieldAccess(fieldAccess: RexFieldAccess): RexNode = {
-fieldAccess.getReferenceExpr match {
-  case corVar: RexCorrelVariable =>
-require(correlate.getCorrelationId.equals(corVar.id))
-val index = leftRowType.getFieldList.indexOf(fieldAccess.getField)
-RexInputRef.of(index, leftRowType)
-  case _ => super.visitFieldAccess(fieldAccess)
-}
-  }
-
-  // update the field index from right side
-  override def visitInputRef(inputRef: RexInputRef): RexNode = {
-val rightIndex = leftRowType.getFieldCount + inputRef.getIndex
-new RexInputRef(rightIndex, inputRef.getType)
-  }
-})
+val joinCondition = decorrelate(filterCondition, leftRowType, 
correlate.getCorrelationId)
 
 val builder = call.builder()
 builder.push(leftIn

(flink) branch master updated (6ca9ec7be95 -> 042a4d2d8a8)

2024-02-07 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 6ca9ec7be95 [hotfix][build] Fixes error message when printing the 15 
biggest directories
 add 042a4d2d8a8 [FLINK-21949][table] Support ARRAY_AGG aggregate function 
(#23411)

No new revisions were added by this update.

Summary of changes:
 docs/data/sql_functions.yml|   7 +
 docs/data/sql_functions_zh.yml |   6 +
 .../docs/reference/pyflink.table/expressions.rst   |   1 +
 flink-python/pyflink/table/expression.py   |   4 +
 .../pyflink/table/tests/test_expression.py |   1 +
 .../src/main/codegen/data/Parser.tdd   |   1 +
 .../flink/sql/parser/FlinkSqlParserImplTest.java   |  21 +-
 .../flink/table/api/internal/BaseExpressions.java  |   6 +
 .../functions/BuiltInFunctionDefinitions.java  |   7 +
 .../planner/expressions/SqlAggFunctionVisitor.java |   2 +
 .../functions/sql/FlinkSqlOperatorTable.java   |  20 +
 .../planner/plan/utils/AggFunctionFactory.scala|   9 +
 .../table/planner/plan/utils/AggregateUtil.scala   |   2 +-
 .../planner/functions/ArrayAggFunctionITCase.java  |  91 +
 .../aggfunctions/ArrayAggFunctionTest.java | 411 +
 .../functions/aggregate/ArrayAggFunction.java  | 175 +
 16 files changed, 758 insertions(+), 6 deletions(-)
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/ArrayAggFunctionITCase.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/ArrayAggFunctionTest.java
 create mode 100644 
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/aggregate/ArrayAggFunction.java



(flink) branch master updated (839f298c383 -> 3d0cddef418)

2024-02-05 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 839f298c383 [FLINK-34343][rpc] Use actor path when rejecting early 
messages
 new f5d889fb706 [FLINK-33441] Move Union restore tests
 new 3d0cddef418 [FLINK-33441] Remove unused json plan

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../{testutils => stream}/UnionRestoreTest.java|   4 +-
 .../{testutils => stream}/UnionTestPrograms.java   |   3 +-
 .../UnionJsonPlanTest_jsonplan/testUnion.out   | 157 -
 3 files changed, 3 insertions(+), 161 deletions(-)
 rename 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/{testutils
 => stream}/UnionRestoreTest.java (90%)
 rename 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/{testutils
 => stream}/UnionTestPrograms.java (98%)
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionJsonPlanTest_jsonplan/testUnion.out



(flink) 01/02: [FLINK-33441] Move Union restore tests

2024-02-05 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit f5d889fb70688bc3e29fc979dc233d2c9674fa96
Author: bvarghese1 
AuthorDate: Tue Jan 16 18:16:42 2024 -0800

[FLINK-33441] Move Union restore tests

- Moving from nodes.exec.testutil to nodes.exec.stream
- Related commit: f362dcc
---
 .../plan/nodes/exec/{testutils => stream}/UnionRestoreTest.java   | 4 ++--
 .../plan/nodes/exec/{testutils => stream}/UnionTestPrograms.java  | 3 +--
 2 files changed, 3 insertions(+), 4 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionRestoreTest.java
similarity index 90%
rename from 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionRestoreTest.java
rename to 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionRestoreTest.java
index ca27c175fc6..b271e0221ad 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionRestoreTest.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionRestoreTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package org.apache.flink.table.planner.plan.nodes.exec.testutils;
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
 
-import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecUnion;
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
 import org.apache.flink.table.test.program.TableTestProgram;
 
 import java.util.Arrays;
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionTestPrograms.java
similarity index 98%
rename from 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionTestPrograms.java
rename to 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionTestPrograms.java
index 562199588b5..e40ee481fce 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionTestPrograms.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionTestPrograms.java
@@ -16,9 +16,8 @@
  * limitations under the License.
  */
 
-package org.apache.flink.table.planner.plan.nodes.exec.testutils;
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
 
-import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecUnion;
 import org.apache.flink.table.test.program.SinkTestStep;
 import org.apache.flink.table.test.program.SourceTestStep;
 import org.apache.flink.table.test.program.TableTestProgram;



(flink) 02/02: [FLINK-33441] Remove unused json plan

2024-02-05 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 3d0cddef4188995ea3d0731d6a88e7a6af56f44e
Author: bvarghese1 
AuthorDate: Tue Jan 16 18:20:34 2024 -0800

[FLINK-33441] Remove unused json plan
---
 .../UnionJsonPlanTest_jsonplan/testUnion.out   | 157 -
 1 file changed, 157 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionJsonPlanTest_jsonplan/testUnion.out
 
b/flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionJsonPlanTest_jsonplan/testUnion.out
deleted file mode 100644
index 17fba7f6a3d..000
--- 
a/flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionJsonPlanTest_jsonplan/testUnion.out
+++ /dev/null
@@ -1,157 +0,0 @@
-{
-  "flinkVersion" : "",
-  "nodes" : [ {
-"id" : 1,
-"type" : "stream-exec-table-source-scan_1",
-"scanTableSource" : {
-  "table" : {
-"identifier" : "`default_catalog`.`default_database`.`MyTable`",
-"resolvedTable" : {
-  "schema" : {
-"columns" : [ {
-  "name" : "a",
-  "dataType" : "BIGINT"
-}, {
-  "name" : "b",
-  "dataType" : "INT NOT NULL"
-}, {
-  "name" : "c",
-  "dataType" : "VARCHAR(2147483647)"
-}, {
-  "name" : "d",
-  "dataType" : "TIMESTAMP(3)"
-} ],
-"watermarkSpecs" : [ ]
-  },
-  "partitionKeys" : [ ],
-  "options" : {
-"bounded" : "false",
-"connector" : "values"
-  }
-}
-  },
-  "abilities" : [ {
-"type" : "ProjectPushDown",
-"projectedFields" : [ [ 0 ], [ 1 ] ],
-"producedType" : "ROW<`a` BIGINT, `b` INT NOT NULL> NOT NULL"
-  }, {
-"type" : "ReadingMetadata",
-"metadataKeys" : [ ],
-"producedType" : "ROW<`a` BIGINT, `b` INT NOT NULL> NOT NULL"
-  } ]
-},
-"outputType" : "ROW<`a` BIGINT, `b` INT NOT NULL>",
-"description" : "TableSourceScan(table=[[default_catalog, 
default_database, MyTable, project=[a, b], metadata=[]]], fields=[a, b])",
-"inputProperties" : [ ]
-  }, {
-"id" : 2,
-"type" : "stream-exec-table-source-scan_1",
-"scanTableSource" : {
-  "table" : {
-"identifier" : "`default_catalog`.`default_database`.`MyTable2`",
-"resolvedTable" : {
-  "schema" : {
-"columns" : [ {
-  "name" : "d",
-  "dataType" : "BIGINT"
-}, {
-  "name" : "e",
-  "dataType" : "INT NOT NULL"
-} ],
-"watermarkSpecs" : [ ]
-  },
-  "partitionKeys" : [ ],
-  "options" : {
-"bounded" : "false",
-"connector" : "values"
-  }
-}
-  }
-},
-"outputType" : "ROW<`d` BIGINT, `e` INT NOT NULL>",
-"description" : "TableSourceScan(table=[[default_catalog, 
default_database, MyTable2]], fields=[d, e])",
-"inputProperties" : [ ]
-  }, {
-"id" : 3,
-"type" : "stream-exec-union_1",
-"inputProperties" : [ {
-  "requiredDistribution" : {
-"type" : "UNKNOWN"
-  },
-  "damBehavior" : "PIPELINED",
-  "priority" : 0
-}, {
-  "requiredDistribution" : {
-"type" : "UNKNOWN"
-  },
-  "damBehavior" : "PIPELINED",
-  "priority" : 0
-} ],
-"outputType" : "ROW<`a` BIGINT, `b` INT NOT NULL>",
-"description" : "Union(all=[true], union=[a, b])"
-  }, {
-"id" : 4,
-"type" : "stream-exec-sink_1",
-"configuration" : {
-  "

(flink) branch master updated: [hotfix] peekNextBufferSubpartitionId shouldn't throw UnsupportedDataTypeException

2024-01-19 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 8dd28a2e8bf [hotfix] peekNextBufferSubpartitionId shouldn't throw 
UnsupportedDataTypeException
8dd28a2e8bf is described below

commit 8dd28a2e8bf10ec278ff90c99563468e294a135a
Author: Weijie Guo 
AuthorDate: Fri Jan 19 13:32:10 2024 +0800

[hotfix] peekNextBufferSubpartitionId shouldn't throw 
UnsupportedDataTypeException
---
 .../partition/consumer/RecoveredInputChannel.java  |  3 +--
 .../tiered/netty/TestingNettyConnectionReader.java | 23 ++-
 .../tiered/netty/TestingTierConsumerAgent.java | 26 +-
 3 files changed, 40 insertions(+), 12 deletions(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/consumer/RecoveredInputChannel.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/consumer/RecoveredInputChannel.java
index 06ae4258da4..1f41a099931 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/consumer/RecoveredInputChannel.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/consumer/RecoveredInputChannel.java
@@ -36,7 +36,6 @@ import org.apache.flink.util.Preconditions;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.activation.UnsupportedDataTypeException;
 import javax.annotation.Nullable;
 import javax.annotation.concurrent.GuardedBy;
 
@@ -193,7 +192,7 @@ public abstract class RecoveredInputChannel extends 
InputChannel implements Chan
 
 @Override
 protected int peekNextBufferSubpartitionIdInternal() throws IOException {
-throw new UnsupportedDataTypeException();
+throw new UnsupportedOperationException();
 }
 
 @Override
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/netty/TestingNettyConnectionReader.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/netty/TestingNettyConnectionReader.java
index 4b9358eb319..5fc1855ca04 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/netty/TestingNettyConnectionReader.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/netty/TestingNettyConnectionReader.java
@@ -20,24 +20,28 @@ package 
org.apache.flink.runtime.io.network.partition.hybrid.tiered.netty;
 
 import org.apache.flink.runtime.io.network.buffer.Buffer;
 
-import javax.activation.UnsupportedDataTypeException;
-
 import java.io.IOException;
 import java.util.Optional;
 import java.util.function.Function;
+import java.util.function.Supplier;
 
 /** Test implementation for {@link NettyConnectionReader}. */
 public class TestingNettyConnectionReader implements NettyConnectionReader {
 
 private final Function readBufferFunction;
 
-private TestingNettyConnectionReader(Function 
readBufferFunction) {
+private final Supplier peekNextBufferSubpartitionIdSupplier;
+
+private TestingNettyConnectionReader(
+Function readBufferFunction,
+Supplier peekNextBufferSubpartitionIdSupplier) {
 this.readBufferFunction = readBufferFunction;
+this.peekNextBufferSubpartitionIdSupplier = 
peekNextBufferSubpartitionIdSupplier;
 }
 
 @Override
 public int peekNextBufferSubpartitionId() throws IOException {
-throw new UnsupportedDataTypeException();
+return peekNextBufferSubpartitionIdSupplier.get();
 }
 
 @Override
@@ -50,6 +54,8 @@ public class TestingNettyConnectionReader implements 
NettyConnectionReader {
 
 private Function readBufferFunction = segmentId -> 
null;
 
+private Supplier peekNextBufferSubpartitionIdSupplier = () -> 
-1;
+
 public Builder() {}
 
 public Builder setReadBufferFunction(Function 
readBufferFunction) {
@@ -57,8 +63,15 @@ public class TestingNettyConnectionReader implements 
NettyConnectionReader {
 return this;
 }
 
+public Builder setPeekNextBufferSubpartitionIdSupplier(
+Supplier peekNextBufferSubpartitionIdSupplier) {
+this.peekNextBufferSubpartitionIdSupplier = 
peekNextBufferSubpartitionIdSupplier;
+return this;
+}
+
 public TestingNettyConnectionReader build() {
-return new TestingNettyConnectionReader(readBufferFunction);
+return new TestingNettyConnectionReader(
+readBufferFunction, peekNextBufferSubpartitionIdSupplier);
 }
 }
 }
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/netty/TestingTierConsumerAgent.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/par

(flink) branch master updated (38f7b51d0cc -> 3d088f6e154)

2024-01-19 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 38f7b51d0cc [FLINK-34083][config] Deprecate string configuration keys 
and unused constants in ConfigConstants
 add 3d088f6e154 [FLINK-34153] Set ALWAYS ChainingStrategy in TemporalSort

No new revisions were added by this update.

Summary of changes:
 .../flink/table/runtime/operators/sort/BaseTemporalSortOperator.java | 5 +
 1 file changed, 5 insertions(+)



(flink) branch master updated: [FLINK-27992] Set ALWAYS chaining strategy for CepOperator in ExecMatch

2024-01-18 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 201571b486f [FLINK-27992] Set ALWAYS chaining strategy for CepOperator 
in ExecMatch
201571b486f is described below

commit 201571b486f405358a31e077247241892d537198
Author: Dawid Wysakowicz 
AuthorDate: Wed Jan 17 17:17:37 2024 +0100

[FLINK-27992] Set ALWAYS chaining strategy for CepOperator in ExecMatch
---
 .../flink/table/planner/plan/nodes/exec/common/CommonExecMatch.java   | 4 
 1 file changed, 4 insertions(+)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/common/CommonExecMatch.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/common/CommonExecMatch.java
index a5d13b00bf8..8bc4dc4ff06 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/common/CommonExecMatch.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/common/CommonExecMatch.java
@@ -31,6 +31,7 @@ import org.apache.flink.cep.pattern.Quantifier;
 import org.apache.flink.cep.pattern.conditions.BooleanConditions;
 import org.apache.flink.cep.pattern.conditions.IterativeCondition;
 import org.apache.flink.configuration.ReadableConfig;
+import org.apache.flink.streaming.api.operators.ChainingStrategy;
 import org.apache.flink.streaming.api.transformations.OneInputTransformation;
 import org.apache.flink.streaming.api.windowing.time.Time;
 import org.apache.flink.table.api.TableException;
@@ -188,6 +189,9 @@ public abstract class CommonExecMatch extends 
ExecNodeBase
 transform.setStateKeySelector(selector);
 transform.setStateKeyType(selector.getProducedType());
 
+// should be chained with the timestamp inserter
+transform.setChainingStrategy(ChainingStrategy.ALWAYS);
+
 if (inputsContainSingleton()) {
 transform.setParallelism(1);
 transform.setMaxParallelism(1);



(flink) branch master updated (488d60a1d39 -> 534df6490e0)

2024-01-16 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 488d60a1d39 [FLINK-34066][table-planner] Fix LagFunction throw NPE 
when input argument are not null (#24075)
 add 36886541522 [FLINK-33958] Implement restore tests for IntervalJoin node
 add 534df6490e0 [FLINK-33958] Remove IntervalJoin Json Plan & IT tests

No new revisions were added by this update.

Summary of changes:
 .../exec/stream/IntervalJoinJsonPlanTest.java  |  99 ---
 .../nodes/exec/stream/IntervalJoinRestoreTest.java |  41 ++
 .../exec/stream/IntervalJoinTestPrograms.java  | 194 +
 .../jsonplan/IntervalJoinJsonPlanITCase.java   | 117 ---
 .../testProcessingTimeInnerJoinWithOnClause.out| 784 -
 .../plan/interval-join-event-time.json}| 257 +++
 .../interval-join-event-time/savepoint/_metadata   | Bin 0 -> 19515 bytes
 .../plan/interval-join-negative-interval.json} | 321 -
 .../savepoint/_metadata| Bin 0 -> 9505 bytes
 .../plan/interval-join-proc-time.json  | 445 
 .../interval-join-proc-time/savepoint/_metadata| Bin 0 -> 19511 bytes
 11 files changed, 920 insertions(+), 1338 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinJsonPlanTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinRestoreTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/IntervalJoinJsonPlanITCase.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinJsonPlanTest_jsonplan/testProcessingTimeInnerJoinWithOnClause.out
 copy 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinJsonPlanTest_jsonplan/testRowTimeInnerJoinWithOnClause.out
 => 
restore-tests/stream-exec-interval-join_1/interval-join-event-time/plan/interval-join-event-time.json}
 (66%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-interval-join_1/interval-join-event-time/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/IntervalJoinJsonPlanTest_jsonplan/testRowTimeInnerJoinWithOnClause.out
 => 
restore-tests/stream-exec-interval-join_1/interval-join-negative-interval/plan/interval-join-negative-interval.json}
 (62%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-interval-join_1/interval-join-negative-interval/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-interval-join_1/interval-join-proc-time/plan/interval-join-proc-time.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-interval-join_1/interval-join-proc-time/savepoint/_metadata



(flink) branch master updated (4499553ce7d -> 6c050e92040)

2024-01-15 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 4499553ce7d [FLINK-32815] Write tests for HASHCODE function
 add 6c050e92040 [FLINK-31481][table] Support enhanced show databases syntax

No new revisions were added by this update.

Summary of changes:
 docs/content.zh/docs/dev/table/sql/show.md |   2 +-
 docs/content/docs/dev/table/sql/show.md|  16 ++-
 .../src/test/resources/sql/catalog_database.q  | 159 +
 .../src/main/codegen/includes/parserImpls.ftl  |  43 +-
 .../flink/sql/parser/dql/SqlShowDatabases.java |  88 +++-
 .../flink/sql/parser/FlinkSqlParserImplTest.java   |  25 
 .../table/operations/ShowDatabasesOperation.java   |  72 --
 .../operations/SqlNodeToOperationConversion.java   |   9 --
 .../operations/converters/SqlNodeConverters.java   |   1 +
 ...nverter.java => SqlShowDatabasesConverter.java} |  30 ++--
 .../operations/SqlOtherOperationConverterTest.java |  35 +
 11 files changed, 442 insertions(+), 38 deletions(-)
 copy 
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/{SqlTruncateTableConverter.java
 => SqlShowDatabasesConverter.java} (51%)



(flink) branch master updated (19cb9de5c54 -> 4499553ce7d)

2024-01-15 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 19cb9de5c54 [FLINK-33268][rest] Skip unknown fields in REST response 
deserialization
 new 36542c134b1 [FLINK-32815] Implement internal HASHCODE function
 new 4499553ce7d [FLINK-32815] Write tests for HASHCODE function

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../functions/BuiltInFunctionDefinitions.java  |  10 +
 .../table/planner/codegen/ExprCodeGenerator.scala  |   3 +
 .../codegen/calls/BridgingFunctionGenUtil.scala|   2 +-
 .../table/planner/functions/HashcodeITCase.java| 208 +
 4 files changed, 222 insertions(+), 1 deletion(-)
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/HashcodeITCase.java



(flink) 02/02: [FLINK-32815] Write tests for HASHCODE function

2024-01-15 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 4499553ce7d35b9782c54b66245a4ccb0627cbfc
Author: Dawid Wysakowicz 
AuthorDate: Thu Jan 11 12:09:20 2024 +0100

[FLINK-32815] Write tests for HASHCODE function
---
 .../functions/BuiltInFunctionDefinitions.java  |  20 +-
 .../table/planner/codegen/ExprCodeGenerator.scala  |   2 +-
 .../table/planner/functions/HashcodeITCase.java| 208 +
 3 files changed, 219 insertions(+), 11 deletions(-)

diff --git 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
index 177cf1aba04..93bb9d3690c 100644
--- 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
+++ 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
@@ -381,6 +381,16 @@ public final class BuiltInFunctionDefinitions {
 .internal()
 .build();
 
+public static final BuiltInFunctionDefinition INTERNAL_HASHCODE =
+BuiltInFunctionDefinition.newBuilder()
+.name("$HASHCODE$1")
+.kind(SCALAR)
+.inputTypeStrategy(sequence(ANY))
+
.outputTypeStrategy(nullableIfArgs(explicit(DataTypes.INT().notNull(
+.runtimeProvided()
+.internal()
+.build();
+
 // 

 // Logic functions
 // 

@@ -2292,16 +2302,6 @@ public final class BuiltInFunctionDefinitions {
 .runtimeProvided()
 .build();
 
-public static final BuiltInFunctionDefinition HASHCODE =
-BuiltInFunctionDefinition.newBuilder()
-.name("$HASHCODE$1")
-.kind(SCALAR)
-.inputTypeStrategy(sequence(ANY))
-
.outputTypeStrategy(nullableIfArgs(explicit(DataTypes.INT().notNull(
-.runtimeProvided()
-.internal()
-.build();
-
 public static final BuiltInFunctionDefinition JSON_OBJECT =
 BuiltInFunctionDefinition.newBuilder()
 .name("JSON_OBJECT")
diff --git 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/ExprCodeGenerator.scala
 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/ExprCodeGenerator.scala
index b40631a5e39..07d45f396fb 100644
--- 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/ExprCodeGenerator.scala
+++ 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/ExprCodeGenerator.scala
@@ -805,7 +805,7 @@ class ExprCodeGenerator(ctx: CodeGeneratorContext, 
nullableInput: Boolean)
   case BuiltInFunctionDefinitions.JSON_STRING =>
 new JsonStringCallGen(call).generate(ctx, operands, resultType)
 
-  case BuiltInFunctionDefinitions.HASHCODE =>
+  case BuiltInFunctionDefinitions.INTERNAL_HASHCODE =>
 new HashCodeCallGen().generate(ctx, operands, resultType)
 
   case BuiltInFunctionDefinitions.AGG_DECIMAL_PLUS |
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/HashcodeITCase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/HashcodeITCase.java
new file mode 100644
index 000..a0d64adb821
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/HashcodeITCase.java
@@ -0,0 +1,208 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+

(flink) 01/02: [FLINK-32815] Implement internal HASHCODE function

2024-01-15 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 36542c134b1d4482b37850033dc66a209fd42331
Author: Hanyu Zheng <135176127+hanyuzhe...@users.noreply.github.com>
AuthorDate: Wed Jul 26 23:31:09 2023 -0700

[FLINK-32815] Implement internal HASHCODE function
---
 .../flink/table/functions/BuiltInFunctionDefinitions.java  | 10 ++
 .../apache/flink/table/planner/codegen/ExprCodeGenerator.scala |  3 +++
 .../table/planner/codegen/calls/BridgingFunctionGenUtil.scala  |  2 +-
 3 files changed, 14 insertions(+), 1 deletion(-)

diff --git 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
index dedeb6860c7..177cf1aba04 100644
--- 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
+++ 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
@@ -2292,6 +2292,16 @@ public final class BuiltInFunctionDefinitions {
 .runtimeProvided()
 .build();
 
+public static final BuiltInFunctionDefinition HASHCODE =
+BuiltInFunctionDefinition.newBuilder()
+.name("$HASHCODE$1")
+.kind(SCALAR)
+.inputTypeStrategy(sequence(ANY))
+
.outputTypeStrategy(nullableIfArgs(explicit(DataTypes.INT().notNull(
+.runtimeProvided()
+.internal()
+.build();
+
 public static final BuiltInFunctionDefinition JSON_OBJECT =
 BuiltInFunctionDefinition.newBuilder()
 .name("JSON_OBJECT")
diff --git 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/ExprCodeGenerator.scala
 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/ExprCodeGenerator.scala
index 3f847670107..b40631a5e39 100644
--- 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/ExprCodeGenerator.scala
+++ 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/ExprCodeGenerator.scala
@@ -805,6 +805,9 @@ class ExprCodeGenerator(ctx: CodeGeneratorContext, 
nullableInput: Boolean)
   case BuiltInFunctionDefinitions.JSON_STRING =>
 new JsonStringCallGen(call).generate(ctx, operands, resultType)
 
+  case BuiltInFunctionDefinitions.HASHCODE =>
+new HashCodeCallGen().generate(ctx, operands, resultType)
+
   case BuiltInFunctionDefinitions.AGG_DECIMAL_PLUS |
   BuiltInFunctionDefinitions.HIVE_AGG_DECIMAL_PLUS =>
 val left = operands.head
diff --git 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/BridgingFunctionGenUtil.scala
 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/BridgingFunctionGenUtil.scala
index 499e085be47..87c666ca5dc 100644
--- 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/BridgingFunctionGenUtil.scala
+++ 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/BridgingFunctionGenUtil.scala
@@ -587,7 +587,7 @@ object BridgingFunctionGenUtil {
|public class $evaluatorName extends 
${className[AbstractRichFunction]} {
|
|  ${ctx.reuseMemberCode()}
-   |
+   |  ${ctx.reuseInnerClassDefinitionCode()}
|  public $evaluatorName(Object[] references) throws Exception {
|${ctx.reuseInitCode()}
|  }



(flink) 01/02: [FLINK-33979] Implement restore tests for TableSink node

2024-01-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 9a9b9ce81ca05398f8891c918c74294402462f5c
Author: bvarghese1 
AuthorDate: Wed Jan 3 17:24:19 2024 -0800

[FLINK-33979] Implement restore tests for TableSink node
---
 .../planner/factories/TestValuesTableFactory.java  |   9 +-
 .../nodes/exec/stream/TableSinkRestoreTest.java|  43 ++
 .../nodes/exec/stream/TableSinkTestPrograms.java   | 158 +
 .../utils/JavaUserDefinedScalarFunctions.java  |   2 +-
 .../plan/sink-ndf-primary-key.json | 123 
 .../sink-ndf-primary-key/savepoint/_metadata   | Bin 0 -> 5080 bytes
 .../sink-overwrite/plan/sink-overwrite.json|  84 +++
 .../sink-overwrite/savepoint/_metadata | Bin 0 -> 8381 bytes
 .../plan/sink-partial-insert.json  | 128 +
 .../sink-partial-insert/savepoint/_metadata| Bin 0 -> 11034 bytes
 .../sink-partition/plan/sink-partition.json| 126 
 .../sink-partition/savepoint/_metadata | Bin 0 -> 9435 bytes
 .../plan/sink-writing-metadata.json|  87 
 .../sink-writing-metadata/savepoint/_metadata  | Bin 0 -> 8331 bytes
 14 files changed, 758 insertions(+), 2 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestValuesTableFactory.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestValuesTableFactory.java
index 8511e30ce3c..3dbf4d5b9c0 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestValuesTableFactory.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestValuesTableFactory.java
@@ -51,6 +51,7 @@ import 
org.apache.flink.table.connector.sink.DataStreamSinkProvider;
 import org.apache.flink.table.connector.sink.DynamicTableSink;
 import org.apache.flink.table.connector.sink.OutputFormatProvider;
 import org.apache.flink.table.connector.sink.SinkFunctionProvider;
+import org.apache.flink.table.connector.sink.abilities.SupportsOverwrite;
 import org.apache.flink.table.connector.sink.abilities.SupportsPartitioning;
 import org.apache.flink.table.connector.sink.abilities.SupportsWritingMetadata;
 import org.apache.flink.table.connector.source.AsyncTableFunctionProvider;
@@ -1937,7 +1938,10 @@ public final class TestValuesTableFactory
 
 /** Values {@link DynamicTableSink} for testing. */
 private static class TestValuesTableSink
-implements DynamicTableSink, SupportsWritingMetadata, 
SupportsPartitioning {
+implements DynamicTableSink,
+SupportsWritingMetadata,
+SupportsPartitioning,
+SupportsOverwrite {
 
 private DataType consumedDataType;
 private int[] primaryKeyIndices;
@@ -2135,6 +2139,9 @@ public final class TestValuesTableFactory
 public boolean requiresPartitionGrouping(boolean supportsGrouping) {
 return supportsGrouping;
 }
+
+@Override
+public void applyOverwrite(boolean overwrite) {}
 }
 
 /** A TableSink used for testing the implementation of {@link 
SinkFunction.Context}. */
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkRestoreTest.java
new file mode 100644
index 000..1ab3651cf09
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkRestoreTest.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
+import org.apache.flink.table.test.program.TableTestProgram;

(flink) branch master updated (f0b28bf47a3 -> 413aed08497)

2024-01-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from f0b28bf47a3 Revert "[FLINK-33973] Add new interfaces for SinkV2 to 
synchronize the API with the SourceV2 API"
 new 9a9b9ce81ca [FLINK-33979] Implement restore tests for TableSink node
 new 413aed08497 [FLINK-33979] Remove TableSink Json Plan & Json IT tests

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../planner/factories/TestValuesTableFactory.java  |   9 +-
 .../nodes/exec/stream/TableSinkJsonPlanTest.java   | 149 ---
 .../nodes/exec/stream/TableSinkRestoreTest.java|  43 ++
 .../nodes/exec/stream/TableSinkTestPrograms.java   | 158 +
 .../stream/jsonplan/TableSinkJsonPlanITCase.java   |  75 --
 .../utils/JavaUserDefinedScalarFunctions.java  |   2 +-
 ...WithNonDeterministicFuncSinkWithDifferentPk.out | 153 
 .../plan/sink-ndf-primary-key.json}|  98 ++---
 .../sink-ndf-primary-key/savepoint/_metadata   | Bin 0 -> 5080 bytes
 .../sink-overwrite/plan/sink-overwrite.json}   |  45 +++---
 .../sink-overwrite/savepoint/_metadata | Bin 0 -> 8381 bytes
 .../plan/sink-partial-insert.json} |  84 ---
 .../sink-partial-insert/savepoint/_metadata| Bin 0 -> 11034 bytes
 .../sink-partition/plan/sink-partition.json}   |  71 -
 .../sink-partition/savepoint/_metadata | Bin 0 -> 9435 bytes
 .../plan/sink-writing-metadata.json}   |  50 +++
 .../sink-writing-metadata/savepoint/_metadata  | Bin 0 -> 8331 bytes
 17 files changed, 352 insertions(+), 585 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkJsonPlanTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkRestoreTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/TableSinkJsonPlanITCase.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkJsonPlanTest_jsonplan/testCdcWithNonDeterministicFuncSinkWithDifferentPk.out
 copy 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkJsonPlanTest_jsonplan/testPartitioning.out
 => 
restore-tests/stream-exec-sink_1/sink-ndf-primary-key/plan/sink-ndf-primary-key.json}
 (57%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-sink_1/sink-ndf-primary-key/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkJsonPlanTest_jsonplan/testOverwrite.out
 => restore-tests/stream-exec-sink_1/sink-overwrite/plan/sink-overwrite.json} 
(71%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-sink_1/sink-overwrite/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkJsonPlanTest_jsonplan/testPartialInsert.out
 => 
restore-tests/stream-exec-sink_1/sink-partial-insert/plan/sink-partial-insert.json}
 (62%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-sink_1/sink-partial-insert/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkJsonPlanTest_jsonplan/testPartitioning.out
 => restore-tests/stream-exec-sink_1/sink-partition/plan/sink-partition.json} 
(66%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-sink_1/sink-partition/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkJsonPlanTest_jsonplan/testWritingMetadata.out
 => 
restore-tests/stream-exec-sink_1/sink-writing-metadata/plan/sink-writing-metadata.json}
 (69%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-sink_1/sink-writing-metadata/savepoint/_metadata



(flink) 02/02: [FLINK-33979] Remove TableSink Json Plan & Json IT tests

2024-01-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 413aed084974efe708833b3e1cfeb7a3f5ce544c
Author: bvarghese1 
AuthorDate: Wed Jan 3 17:25:34 2024 -0800

[FLINK-33979] Remove TableSink Json Plan & Json IT tests

- These are covered by the new restore tests
---
 .../nodes/exec/stream/TableSinkJsonPlanTest.java   | 149 
 .../stream/jsonplan/TableSinkJsonPlanITCase.java   |  75 --
 ...WithNonDeterministicFuncSinkWithDifferentPk.out | 153 -
 .../testOverwrite.out  |  93 -
 .../testPartialInsert.out  | 150 
 .../testPartitioning.out   | 137 --
 .../testWritingMetadata.out|  95 -
 7 files changed, 852 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkJsonPlanTest.java
deleted file mode 100644
index 3c321b5c59a..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSinkJsonPlanTest.java
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.TableConfig;
-import org.apache.flink.table.api.TableEnvironment;
-import 
org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions;
-import org.apache.flink.table.planner.utils.StreamTableTestUtil;
-import org.apache.flink.table.planner.utils.TableTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-/** Test json serialization/deserialization for table sink. */
-class TableSinkJsonPlanTest extends TableTestBase {
-
-private StreamTableTestUtil util;
-private TableEnvironment tEnv;
-
-@BeforeEach
-void setup() {
-util = streamTestUtil(TableConfig.getDefault());
-tEnv = util.getTableEnv();
-
-String srcTableDdl =
-"CREATE TABLE MyTable (\n"
-+ "  a bigint,\n"
-+ "  b int,\n"
-+ "  c varchar\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'bounded' = 'false')";
-tEnv.executeSql(srcTableDdl);
-}
-
-@Test
-void testOverwrite() {
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ "  a bigint,\n"
-+ "  b int,\n"
-+ "  c varchar\n"
-+ ") with (\n"
-+ "  'connector' = 'filesystem',\n"
-+ "  'format' = 'testcsv',\n"
-+ "  'path' = '/tmp')";
-tEnv.executeSql(sinkTableDdl);
-util.verifyJsonPlan("insert overwrite MySink select * from MyTable");
-}
-
-@Test
-void testPartitioning() {
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ "  a bigint,\n"
-+ "  b int,\n"
-+ "  c varchar\n"
-+ ") partitioned by (c) with (\n"
-+ "  'connector' = 'filesystem',\n"
-+ "  'format' = 'testcsv',\n"
-+ "  'path' = '/tmp')";
-tEnv.executeSql(sinkTableDdl);
-util.verifyJsonPlan("insert into MySink partition (c='A') select a, b 
from MyTable");
-}
-
-@Test
-void testWritingMetadata() {
-String

(flink) branch master updated (e6556fa898d -> 1800b8744a4)

2024-01-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from e6556fa898d [FLINK-33697][state] Allow incorrect public evolving API 
violation
 new 2c70bd346d3 [FLINK-32256] Add ARRAY_MIN function
 new 1800b8744a4 [FLINK-32256] Remove argument count check

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 docs/data/sql_functions.yml|   3 +
 .../docs/reference/pyflink.table/expressions.rst   |   1 +
 flink-python/pyflink/table/expression.py   |   7 +
 .../flink/table/api/internal/BaseExpressions.java  |  10 ++
 .../functions/BuiltInFunctionDefinitions.java  |  10 ++
 .../functions/CollectionFunctionsITCase.java   | 167 -
 ...ArrayMaxFunction.java => ArrayMinFunction.java} |  18 +--
 7 files changed, 205 insertions(+), 11 deletions(-)
 copy 
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/scalar/{ArrayMaxFunction.java
 => ArrayMinFunction.java} (84%)



(flink) 02/02: [FLINK-32256] Remove argument count check

2024-01-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 1800b8744a4e68c7dfffdd15a3ac11f038082df5
Author: Dawid Wysakowicz 
AuthorDate: Wed Jan 10 13:58:37 2024 +0100

[FLINK-32256] Remove argument count check
---
 .../org/apache/flink/table/types/inference/TypeInferenceUtil.java| 5 -
 .../inference/strategies/ArrayComparableElementTypeStrategy.java | 5 -
 2 files changed, 10 deletions(-)

diff --git 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/TypeInferenceUtil.java
 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/TypeInferenceUtil.java
index 2e69044d7cf..8247ccad1b0 100644
--- 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/TypeInferenceUtil.java
+++ 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/TypeInferenceUtil.java
@@ -344,11 +344,6 @@ public final class TypeInferenceUtil {
 
 // 

 
-public static boolean checkInputArgumentNumber(
-ArgumentCount argumentCount, int actualCount, boolean 
throwOnFailure) {
-return validateArgumentCount(argumentCount, actualCount, 
throwOnFailure);
-}
-
 private static Result runTypeInferenceInternal(
 TypeInference typeInference,
 CallContext callContext,
diff --git 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/ArrayComparableElementTypeStrategy.java
 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/ArrayComparableElementTypeStrategy.java
index c3e40e26647..175a41d45dd 100644
--- 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/ArrayComparableElementTypeStrategy.java
+++ 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/ArrayComparableElementTypeStrategy.java
@@ -27,7 +27,6 @@ import org.apache.flink.table.types.inference.CallContext;
 import org.apache.flink.table.types.inference.ConstantArgumentCount;
 import org.apache.flink.table.types.inference.InputTypeStrategy;
 import org.apache.flink.table.types.inference.Signature;
-import org.apache.flink.table.types.inference.TypeInferenceUtil;
 import org.apache.flink.table.types.logical.LegacyTypeInformationType;
 import org.apache.flink.table.types.logical.LogicalType;
 import org.apache.flink.table.types.logical.LogicalTypeFamily;
@@ -68,10 +67,6 @@ public final class ArrayComparableElementTypeStrategy 
implements InputTypeStrate
 public Optional> inferInputTypes(
 CallContext callContext, boolean throwOnFailure) {
 final List argumentDataTypes = 
callContext.getArgumentDataTypes();
-if (!TypeInferenceUtil.checkInputArgumentNumber(
-argumentCount, argumentDataTypes.size(), throwOnFailure)) {
-return callContext.fail(throwOnFailure, "the input argument number 
should be one");
-}
 final DataType argumentType = argumentDataTypes.get(0);
 if (!argumentType.getLogicalType().is(LogicalTypeRoot.ARRAY)) {
 return callContext.fail(throwOnFailure, "All arguments requires to 
be an ARRAY type");



(flink) 01/02: [FLINK-32256] Add ARRAY_MIN function

2024-01-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 2c70bd346d37f96f01007c89e3eb66e919c0c0a8
Author: Hanyu Zheng <135176127+hanyuzhe...@users.noreply.github.com>
AuthorDate: Fri Jun 9 20:09:52 2023 -0700

[FLINK-32256] Add ARRAY_MIN function

Find the minimum among all elements in the array for which ordering is 
supported.
---
 docs/data/sql_functions.yml|   3 +
 .../docs/reference/pyflink.table/expressions.rst   |   1 +
 flink-python/pyflink/table/expression.py   |   7 +
 .../flink/table/api/internal/BaseExpressions.java  |  10 ++
 .../functions/BuiltInFunctionDefinitions.java  |  10 ++
 .../table/types/inference/TypeInferenceUtil.java   |   5 +
 .../ArrayComparableElementTypeStrategy.java|   5 +
 .../functions/CollectionFunctionsITCase.java   | 167 -
 .../runtime/functions/scalar/ArrayMinFunction.java |  89 +++
 9 files changed, 295 insertions(+), 2 deletions(-)

diff --git a/docs/data/sql_functions.yml b/docs/data/sql_functions.yml
index 09a5c9543e2..dd95bed7778 100644
--- a/docs/data/sql_functions.yml
+++ b/docs/data/sql_functions.yml
@@ -658,6 +658,9 @@ collection:
   - sql: ARRAY_JOIN(array, delimiter[, nullReplacement])
 table: array.arrayJoin(delimiter[, nullReplacement])
 description: Returns a string that represents the concatenation of the 
elements in the given array and the elements' data type in the given array is 
string. The delimiter is a string that separates each pair of consecutive 
elements of the array. The optional nullReplacement is a string that replaces 
null elements in the array. If nullReplacement is not specified, null elements 
in the array will be omitted from the resulting string. Returns null if input 
array or delimiter or nullRepl [...]
+  - sql: ARRAY_MIN(array)
+table: array.arrayMin()
+description: Returns the minimum value from the array, if array itself is 
null, the function returns null.
   - sql: MAP_KEYS(map)
 table: MAP.mapKeys()
 description: Returns the keys of the map as array. No order guaranteed.
diff --git a/flink-python/docs/reference/pyflink.table/expressions.rst 
b/flink-python/docs/reference/pyflink.table/expressions.rst
index 93541c53983..908a6ceda5a 100644
--- a/flink-python/docs/reference/pyflink.table/expressions.rst
+++ b/flink-python/docs/reference/pyflink.table/expressions.rst
@@ -234,6 +234,7 @@ advanced type helper functions
 Expression.array_reverse
 Expression.array_max
 Expression.array_slice
+Expression.array_min
 Expression.array_union
 Expression.map_entries
 Expression.map_keys
diff --git a/flink-python/pyflink/table/expression.py 
b/flink-python/pyflink/table/expression.py
index 3f55b762292..cb72ba40b21 100644
--- a/flink-python/pyflink/table/expression.py
+++ b/flink-python/pyflink/table/expression.py
@@ -1564,6 +1564,13 @@ class Expression(Generic[T]):
 else:
 return _ternary_op("array_join")(self, delimiter, null_replacement)
 
+def array_min(self) -> 'Expression':
+"""
+Returns the minimum value from the array.
+if array itself is null, the function returns null.
+"""
+return _unary_op("arrayMin")(self)
+
 @property
 def map_keys(self) -> 'Expression':
 """
diff --git 
a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/BaseExpressions.java
 
b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/BaseExpressions.java
index c3b356d67bf..cdc108d3672 100644
--- 
a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/BaseExpressions.java
+++ 
b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/BaseExpressions.java
@@ -58,6 +58,7 @@ import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_DISTINCT;
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_ELEMENT;
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_MAX;
+import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_MIN;
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_POSITION;
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_REMOVE;
 import static 
org.apache.flink.table.functions.BuiltInFunctionDefinitions.ARRAY_REVERSE;
@@ -1477,6 +1478,15 @@ public abstract class BaseExpressions {
 return toApiSpecificExpression(unresolvedCall(ARRAY_MAX, toExpr()));
 }
 
+/**
+ * Returns the minimum value from the array.
+ *
+ * if array itself is null, the fun

(flink) branch master updated: [FLINK-34005] Implement restore tests for MiniBatchAssigner node

2024-01-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 881062f352f [FLINK-34005] Implement restore tests for 
MiniBatchAssigner node
881062f352f is described below

commit 881062f352f8bf8c21ab7cbea95e111fd82fdf20
Author: bvarghese1 
AuthorDate: Fri Jan 5 11:46:26 2024 -0800

[FLINK-34005] Implement restore tests for MiniBatchAssigner node
---
 .../exec/stream/MiniBatchAssignerRestoreTest.java  |  40 +
 .../exec/stream/MiniBatchAssignerTestPrograms.java | 144 
 .../plan/mini-batch-assigner-proc-time.json| 257 +++
 .../savepoint/_metadata| Bin 0 -> 13431 bytes
 .../plan/mini-batch-assigner-row-time.json | 854 +
 .../savepoint/_metadata| Bin 0 -> 24113 bytes
 6 files changed, 1295 insertions(+)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/MiniBatchAssignerRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/MiniBatchAssignerRestoreTest.java
new file mode 100644
index 000..213c96fd6eb
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/MiniBatchAssignerRestoreTest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
+import org.apache.flink.table.test.program.TableTestProgram;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** Restore tests for {@link StreamExecMiniBatchAssigner}. */
+public class MiniBatchAssignerRestoreTest extends RestoreTestBase {
+
+public MiniBatchAssignerRestoreTest() {
+super(StreamExecMiniBatchAssigner.class);
+}
+
+@Override
+public List programs() {
+return Arrays.asList(
+MiniBatchAssignerTestPrograms.MINI_BATCH_ASSIGNER_ROW_TIME,
+MiniBatchAssignerTestPrograms.MINI_BATCH_ASSIGNER_PROC_TIME);
+}
+}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/MiniBatchAssignerTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/MiniBatchAssignerTestPrograms.java
new file mode 100644
index 000..c2c701f6d1c
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/MiniBatchAssignerTestPrograms.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import org.apache.flink.table.api.config.ExecutionConfigOptions;
+import org.apache.flink.table.test.program.SinkTestStep;
+import org.apache.flink.table.test.program.SourceTestStep;
+import org.apache.flink.table.test.program.TableTestProgram;
+import org.apache.flink.types.Row;
+
+import java.time.Duration;
+
+/** {@link TableTestProgram} definitions for testing {@link 
StreamExecMiniBatchAssigner}. */
+public class MiniBatchAssignerTestPrograms 

(flink) 01/02: [FLINK-33518] Implement restore tests for WatermarkAssigner node

2024-01-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 23629a80c574a9f998b41e258b8e656274714c9d
Author: bvarghese1 
AuthorDate: Wed Dec 27 11:15:22 2023 -0800

[FLINK-33518] Implement restore tests for WatermarkAssigner node
---
 .../exec/stream/WatermarkAssignerRestoreTest.java  |  40 +++
 .../exec/stream/WatermarkAssignerTestPrograms.java | 134 ++
 .../plan/watermark-assigner-basic-filter.json  | 259 
 .../savepoint/_metadata| Bin 0 -> 9203 bytes
 .../plan/watermark-assigner-pushdown-metadata.json | 270 +
 .../savepoint/_metadata| Bin 0 -> 8688 bytes
 6 files changed, 703 insertions(+)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerRestoreTest.java
new file mode 100644
index 000..8ea9f928801
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerRestoreTest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
+import org.apache.flink.table.test.program.TableTestProgram;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** Restore tests for {@link StreamExecWatermarkAssigner}. */
+public class WatermarkAssignerRestoreTest extends RestoreTestBase {
+
+public WatermarkAssignerRestoreTest() {
+super(StreamExecWatermarkAssigner.class);
+}
+
+@Override
+public List programs() {
+return Arrays.asList(
+WatermarkAssignerTestPrograms.WATERMARK_ASSIGNER_BASIC_FILTER,
+
WatermarkAssignerTestPrograms.WATERMARK_ASSIGNER_PUSHDOWN_METADATA);
+}
+}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerTestPrograms.java
new file mode 100644
index 000..23f225c7d7b
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerTestPrograms.java
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import org.apache.flink.table.test.program.SinkTestStep;
+import org.apache.flink.table.test.program.SourceTestStep;
+import org.apache.flink.table.test.program.TableTestProgram;
+import org.apache.flink.table.utils.DateTimeUtils;
+import org.apache.flink.types.Row;
+
+/** {@link TableTestProgram} definitions for testing {@link 
StreamExecWindowRank}. */
+public class WatermarkAssignerTestPrograms {
+
+static final Row[] BEFORE_DATA = {
+Row.of(
+2,
+2L,
+"Hello",
+"2020-04-15 08:00:00",
+DateTime

(flink) 02/02: [FLINK-33518] Remove WatermarkAssigner JSON Plan & IT tests

2024-01-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit c233ed2599188ba63e361b1b4525d9f322965f65
Author: bvarghese1 
AuthorDate: Wed Dec 27 11:18:35 2023 -0800

[FLINK-33518] Remove WatermarkAssigner JSON Plan & IT tests

- The json tests are covered by the newly introduced restore tests
---
 .../exec/stream/WatermarkAssignerJsonPlanTest.java |  66 
 .../jsonplan/WatermarkAssignerJsonPlanITCase.java  | 135 ---
 .../testWatermarkAssigner.out  | 181 -
 3 files changed, 382 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerJsonPlanTest.java
deleted file mode 100644
index a8690eec56f..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerJsonPlanTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.TableConfig;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.planner.utils.StreamTableTestUtil;
-import org.apache.flink.table.planner.utils.TableTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-/** Test json serialization/deserialization for watermark assigner. */
-class WatermarkAssignerJsonPlanTest extends TableTestBase {
-
-private StreamTableTestUtil util;
-private TableEnvironment tEnv;
-
-@BeforeEach
-void setup() {
-util = streamTestUtil(TableConfig.getDefault());
-tEnv = util.getTableEnv();
-}
-
-@Test
-void testWatermarkAssigner() {
-String srcTableDdl =
-"CREATE TABLE WatermarkTable (\n"
-+ "  a bigint,\n"
-+ "  b int,\n"
-+ "  c timestamp(3),\n"
-+ "  watermark for c as c - interval '5' second\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'bounded' = 'false',"
-+ "  'enable-watermark-push-down' = 'false',"
-+ "  'disable-lookup' = 'true')";
-tEnv.executeSql(srcTableDdl);
-String sinkTableDdl =
-"CREATE TABLE sink (\n"
-+ "  a bigint,\n"
-+ "  b int,\n"
-+ "  c timestamp(3)\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'table-sink-class' = 'DEFAULT')";
-tEnv.executeSql(sinkTableDdl);
-util.verifyJsonPlan("insert into sink select * from WatermarkTable");
-}
-}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/WatermarkAssignerJsonPlanITCase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/WatermarkAssignerJsonPlanITCase.java
deleted file mode 100644
index bfc32e880a5..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/WatermarkAssignerJsonPlanITCase.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file 

(flink) branch master updated (e98ded88876 -> c233ed25991)

2024-01-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from e98ded88876 [FLINK-33969] Remove TableSource Json Plan & Json IT tests
 new 23629a80c57 [FLINK-33518] Implement restore tests for 
WatermarkAssigner node
 new c233ed25991 [FLINK-33518] Remove WatermarkAssigner JSON Plan & IT tests

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../exec/stream/WatermarkAssignerJsonPlanTest.java |  66 
 ...Test.java => WatermarkAssignerRestoreTest.java} |  12 +-
 .../exec/stream/WatermarkAssignerTestPrograms.java | 134 +
 .../jsonplan/WatermarkAssignerJsonPlanITCase.java  | 135 -
 .../plan/watermark-assigner-basic-filter.json} | 152 ++-
 .../savepoint/_metadata| Bin 0 -> 9203 bytes
 .../watermark-assigner-pushdown-metadata.json} | 166 ++---
 .../savepoint/_metadata| Bin 0 -> 8688 bytes
 8 files changed, 371 insertions(+), 294 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerJsonPlanTest.java
 copy 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/{TemporalJoinRestoreTest.java
 => WatermarkAssignerRestoreTest.java} (75%)
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/WatermarkAssignerJsonPlanITCase.java
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerJsonPlanTest_jsonplan/testWatermarkAssigner.out
 => 
restore-tests/stream-exec-watermark-assigner_1/watermark-assigner-basic-filter/plan/watermark-assigner-basic-filter.json}
 (57%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-watermark-assigner_1/watermark-assigner-basic-filter/savepoint/_metadata
 copy 
flink-table/flink-table-planner/src/test/resources/restore-tests/{stream-exec-table-source-scan_1/table-source-scan-multiple-pushdowns/plan/table-source-scan-multiple-pushdowns.json
 => 
stream-exec-watermark-assigner_1/watermark-assigner-pushdown-metadata/plan/watermark-assigner-pushdown-metadata.json}
 (62%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-watermark-assigner_1/watermark-assigner-pushdown-metadata/savepoint/_metadata



(flink) branch master updated (907d0f32126 -> e98ded88876)

2024-01-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 907d0f32126 [FLINK-33881][state] Avoid copy and update value in 
TtlListState#getUnexpiredOrNull
 add d6209a18bbc [FLINK-33969] Implement restore tests for TableSourceScan 
node
 add e98ded88876 [FLINK-33969] Remove TableSource Json Plan & Json IT tests

No new revisions were added by this update.

Summary of changes:
 .../table/test/program/StatementSetTestStep.java   |  16 +-
 .../flink/table/test/program/TableTestProgram.java |  15 ++
 .../nodes/exec/stream/TableSourceJsonPlanTest.java | 198 
 .../exec/stream/TableSourceScanRestoreTest.java|  47 
 .../exec/stream/TableSourceScanTestPrograms.java   | 263 +
 .../plan/nodes/exec/testutils/RestoreTestBase.java |  13 +-
 .../stream/jsonplan/TableSourceJsonPlanITCase.java | 180 --
 .../testWatermarkPushDown.out  | 190 ---
 .../plan/table-source-scan-filter-pushdown.json}   |  49 ++--
 .../savepoint/_metadata| Bin 0 -> 8291 bytes
 .../plan/table-source-scan-limit-pushdown.json}|  66 +++---
 .../savepoint/_metadata| Bin 0 -> 11254 bytes
 .../plan/table-source-scan-multiple-pushdowns.json | 204 
 .../savepoint/_metadata| Bin 0 -> 5409 bytes
 .../table-source-scan-partition-pushdown.json} |  77 +++---
 .../savepoint/_metadata| Bin 0 -> 8342 bytes
 ...le-source-scan-project-push-down-disabled.json} |  87 ---
 .../savepoint/_metadata| Bin 0 -> 7260 bytes
 .../plan/table-source-scan-project-pushdown.json}  |  42 ++--
 .../savepoint/_metadata| Bin 0 -> 7207 bytes
 .../plan/table-source-scan-reading-metadata.json}  |  53 ++---
 .../savepoint/_metadata| Bin 0 -> 9287 bytes
 .../plan/table-source-scan-reuse-source.json}  | 128 ++
 .../savepoint/_metadata| Bin 0 -> 13123 bytes
 .../plan/table-source-scan-source-watermark.json}  |  93 
 .../savepoint/_metadata| Bin 0 -> 6794 bytes
 26 files changed, 851 insertions(+), 870 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSourceJsonPlanTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSourceScanRestoreTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSourceScanTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/TableSourceJsonPlanITCase.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/TableSourceJsonPlanTest_jsonplan/testWatermarkPushDown.out
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/TableSourceJsonPlanTest_jsonplan/testFilterPushDown.out
 => 
restore-tests/stream-exec-table-source-scan_1/table-source-scan-filter-pushdown/plan/table-source-scan-filter-pushdown.json}
 (72%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-table-source-scan_1/table-source-scan-filter-pushdown/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/TableSourceJsonPlanTest_jsonplan/testLimitPushDown.out
 => 
restore-tests/stream-exec-table-source-scan_1/table-source-scan-limit-pushdown/plan/table-source-scan-limit-pushdown.json}
 (74%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-table-source-scan_1/table-source-scan-limit-pushdown/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-table-source-scan_1/table-source-scan-multiple-pushdowns/plan/table-source-scan-multiple-pushdowns.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-table-source-scan_1/table-source-scan-multiple-pushdowns/savepoint/_metadata
 copy 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/TableSourceJsonPlanTest_jsonplan/testPartitionPushDown.out
 => 
restore-tests/stream-exec-table-source-scan_1/table-source-scan-partition-pushdown/plan/table-source-scan-partition-pushdown.json}
 (67%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-table-source-scan_1/table-source-sca

(flink) 02/02: [FLINK-33896] Remove Correlate Json Plan & Json IT tests

2024-01-10 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 263f3283724a5081e41f679659fa6a5819350739
Author: bvarghese1 
AuthorDate: Mon Jan 8 09:57:49 2024 -0800

[FLINK-33896] Remove Correlate Json Plan & Json IT tests

- These are covered by the restore tests
---
 .../nodes/exec/stream/CorrelateJsonPlanTest.java   | 139 -
 .../stream/jsonplan/CorrelateJsonPlanITCase.java   | 119 ---
 .../testCrossJoin.out  | 152 ---
 .../testCrossJoinOverrideParameters.out| 156 ---
 .../testJoinWithFilter.out | 166 -
 .../testLeftOuterJoinWithLiteralTrue.out   | 152 ---
 6 files changed, 884 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateJsonPlanTest.java
deleted file mode 100644
index d7672a03e03..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateJsonPlanTest.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.TableConfig;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.planner.utils.StreamTableTestUtil;
-import org.apache.flink.table.planner.utils.TableFunc1;
-import org.apache.flink.table.planner.utils.TableTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Disabled;
-import org.junit.jupiter.api.Test;
-
-/** Test json serialization/deserialization for correlate. */
-class CorrelateJsonPlanTest extends TableTestBase {
-private StreamTableTestUtil util;
-private TableEnvironment tEnv;
-
-@BeforeEach
-void setup() {
-util = streamTestUtil(TableConfig.getDefault());
-tEnv = util.getTableEnv();
-
-String srcTableDdl =
-"CREATE TABLE MyTable (\n"
-+ "  a bigint,\n"
-+ "  b int not null,\n"
-+ "  c varchar,\n"
-+ "  d timestamp(3)\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'bounded' = 'false')";
-tEnv.executeSql(srcTableDdl);
-}
-
-@Test
-void testCrossJoin() {
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ "  a varchar,\n"
-+ "  b varchar\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'table-sink-class' = 'DEFAULT')";
-tEnv.executeSql(sinkTableDdl);
-
-util.addTemporarySystemFunction("func1", TableFunc1.class);
-String sqlQuery =
-"insert into MySink SELECT c, s FROM MyTable, LATERAL 
TABLE(func1(c)) AS T(s)";
-util.verifyJsonPlan(sqlQuery);
-}
-
-@Test
-@Disabled("the case is ignored because of FLINK-21870")
-void testRegisterByClass() {
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ "  a varchar,\n"
-+ "  b varchar\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'table-sink-class' = 'DEFAULT')";
-tEnv.executeSql(sinkTableDdl);
-
-tEnv.createTemporaryFunction("func1", TableFunc1.class);
-String sqlQuery =
-   

(flink) 01/02: [FLINK-33896] Implement restore tests for Correlate node

2024-01-10 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit c89933e99d5087f81389560663984012733d3bf8
Author: bvarghese1 
AuthorDate: Mon Jan 8 09:56:25 2024 -0800

[FLINK-33896] Implement restore tests for Correlate node
---
 .../nodes/exec/stream/CorrelateRestoreTest.java|  43 +
 .../nodes/exec/stream/CorrelateTestPrograms.java   | 174 +
 .../plan/correlate-catalog-func.json   | 145 ++
 .../correlate-catalog-func/savepoint/_metadata | Bin 0 -> 7245 bytes
 .../plan/correlate-cross-join-unnest.json  | 138 ++
 .../savepoint/_metadata| Bin 0 -> 7091 bytes
 .../plan/correlate-join-filter.json| 212 +
 .../correlate-join-filter/savepoint/_metadata  | Bin 0 -> 7120 bytes
 .../plan/correlate-left-join.json  | 141 ++
 .../correlate-left-join/savepoint/_metadata| Bin 0 -> 7245 bytes
 .../plan/correlate-system-func.json| 145 ++
 .../correlate-system-func/savepoint/_metadata  | Bin 0 -> 7245 bytes
 12 files changed, 998 insertions(+)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateRestoreTest.java
new file mode 100644
index 000..cc24919cdca
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateRestoreTest.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
+import org.apache.flink.table.test.program.TableTestProgram;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** Restore tests for {@link StreamExecCorrelate}. */
+public class CorrelateRestoreTest extends RestoreTestBase {
+
+public CorrelateRestoreTest() {
+super(StreamExecCorrelate.class);
+}
+
+@Override
+public List programs() {
+return Arrays.asList(
+CorrelateTestPrograms.CORRELATE_CATALOG_FUNC,
+CorrelateTestPrograms.CORRELATE_SYSTEM_FUNC,
+CorrelateTestPrograms.CORRELATE_JOIN_FILTER,
+CorrelateTestPrograms.CORRELATE_LEFT_JOIN,
+CorrelateTestPrograms.CORRELATE_CROSS_JOIN_UNNEST);
+}
+}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateTestPrograms.java
new file mode 100644
index 000..d1a2a1e46e3
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateTestPrograms.java
@@ -0,0 +1,174 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import 
org.apache.flink.table.planner.runtime.utils.JavaUserDefinedTableFunctions.StringSplit;
+import o

(flink) branch master updated (5b1a706ba1c -> 263f3283724)

2024-01-10 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 5b1a706ba1c [FLINK-33997] [docs] Typo in the doc 
'classloader.parent-first-patterns-additional'
 new c89933e99d5 [FLINK-33896] Implement restore tests for Correlate node
 new 263f3283724 [FLINK-33896] Remove Correlate Json Plan & Json IT tests

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../nodes/exec/stream/CorrelateJsonPlanTest.java   | 139 
 .../nodes/exec/stream/CorrelateRestoreTest.java|  43 +
 .../nodes/exec/stream/CorrelateTestPrograms.java   | 174 +
 .../stream/jsonplan/CorrelateJsonPlanITCase.java   | 119 --
 .../plan/correlate-catalog-func.json}  |  39 ++---
 .../correlate-catalog-func/savepoint/_metadata | Bin 0 -> 7245 bytes
 .../plan/correlate-cross-join-unnest.json} |  84 +-
 .../savepoint/_metadata| Bin 0 -> 7091 bytes
 .../plan/correlate-join-filter.json}   | 154 +++---
 .../correlate-join-filter/savepoint/_metadata  | Bin 0 -> 7120 bytes
 .../plan/correlate-left-join.json} |  59 +++
 .../correlate-left-join/savepoint/_metadata| Bin 0 -> 7245 bytes
 .../plan/correlate-system-func.json}   |  63 
 .../correlate-system-func/savepoint/_metadata  | Bin 0 -> 7245 bytes
 14 files changed, 418 insertions(+), 456 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateJsonPlanTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateRestoreTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/CorrelateJsonPlanITCase.java
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateJsonPlanTest_jsonplan/testCrossJoinOverrideParameters.out
 => 
restore-tests/stream-exec-correlate_1/correlate-catalog-func/plan/correlate-catalog-func.json}
 (77%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-correlate_1/correlate-catalog-func/savepoint/_metadata
 copy 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateJsonPlanTest_jsonplan/testCrossJoin.out
 => 
restore-tests/stream-exec-correlate_1/correlate-cross-join-unnest/plan/correlate-cross-join-unnest.json}
 (56%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-correlate_1/correlate-cross-join-unnest/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateJsonPlanTest_jsonplan/testJoinWithFilter.out
 => 
restore-tests/stream-exec-correlate_1/correlate-join-filter/plan/correlate-join-filter.json}
 (54%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-correlate_1/correlate-join-filter/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateJsonPlanTest_jsonplan/testLeftOuterJoinWithLiteralTrue.out
 => 
restore-tests/stream-exec-correlate_1/correlate-left-join/plan/correlate-left-join.json}
 (73%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-correlate_1/correlate-left-join/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/CorrelateJsonPlanTest_jsonplan/testCrossJoin.out
 => 
restore-tests/stream-exec-correlate_1/correlate-system-func/plan/correlate-system-func.json}
 (72%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-correlate_1/correlate-system-func/savepoint/_metadata



(flink) 02/02: Revert "[FLINK-34000] Implement restore tests for IncrementalGroupAgg node"

2024-01-09 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit e27a4cbc74beba7dff8a408dcff38d816ff70457
Author: Dawid Wysakowicz 
AuthorDate: Tue Jan 9 09:52:50 2024 +0100

Revert "[FLINK-34000] Implement restore tests for IncrementalGroupAgg node"

This reverts commit df71d07188e745553b8174297ec7989f05cebf7a.
---
 .../IncrementalGroupAggregateRestoreTest.java  |  40 --
 .../IncrementalGroupAggregateTestPrograms.java | 119 -
 .../plan/incremental-group-aggregate-complex.json  | 573 -
 .../savepoint/_metadata| Bin 20817 -> 0 bytes
 .../plan/incremental-group-aggregate-simple.json   | 373 --
 .../savepoint/_metadata| Bin 14768 -> 0 bytes
 6 files changed, 1105 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateRestoreTest.java
deleted file mode 100644
index 250f50a38c7..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateRestoreTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
-import org.apache.flink.table.test.program.TableTestProgram;
-
-import java.util.Arrays;
-import java.util.List;
-
-/** Restore tests for {@link StreamExecIncrementalGroupAggregate}. */
-public class IncrementalGroupAggregateRestoreTest extends RestoreTestBase {
-
-public IncrementalGroupAggregateRestoreTest() {
-super(StreamExecIncrementalGroupAggregate.class);
-}
-
-@Override
-public List programs() {
-return Arrays.asList(
-
IncrementalGroupAggregateTestPrograms.INCREMENTAL_GROUP_AGGREGATE_SIMPLE,
-
IncrementalGroupAggregateTestPrograms.INCREMENTAL_GROUP_AGGREGATE_COMPLEX);
-}
-}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateTestPrograms.java
deleted file mode 100644
index a1ca086d258..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateTestPrograms.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.config.ExecutionConfigOptions;
-import org.apache.flink.table.api.config.OptimizerConfigOptions;
-import org.apache.flink.table.test.program.SinkTestStep;
-import org.apache.flink.table.test.program.SourceTestStep;
-import org.apache.flink.table.test.program.TableTestProgram;
-import org.apache.flink.types.Row;
-
-import java.time.Duration;
-
-/** {@link TableTestProgram} definitions f

(flink) 01/02: Revert "[FLINK-34000] Remove IncrementalGroupAgg Json Plan & IT tests"

2024-01-09 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit ba49e50e14c6d78c11ee87afbb851da471d3db68
Author: Dawid Wysakowicz 
AuthorDate: Tue Jan 9 09:52:41 2024 +0100

Revert "[FLINK-34000] Remove IncrementalGroupAgg Json Plan & IT tests"

This reverts commit 0df5ab5a3318d21e8be3ab9237900664e3741013.
---
 .../stream/IncrementalAggregateJsonPlanTest.java   | 106 
 .../IncrementalAggregateJsonPlanITCase.java|  78 +++
 .../testIncrementalAggregate.out   | 401 ++
 ...lAggregateWithSumCountDistinctAndRetraction.out | 585 +
 4 files changed, 1170 insertions(+)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest.java
new file mode 100644
index 000..26dcc04f303
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import org.apache.flink.table.api.TableConfig;
+import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.api.config.ExecutionConfigOptions;
+import org.apache.flink.table.api.config.OptimizerConfigOptions;
+import 
org.apache.flink.table.planner.plan.rules.physical.stream.IncrementalAggregateRule;
+import org.apache.flink.table.planner.utils.AggregatePhaseStrategy;
+import org.apache.flink.table.planner.utils.StreamTableTestUtil;
+import org.apache.flink.table.planner.utils.TableTestBase;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import java.time.Duration;
+
+/** Test json serialization/deserialization for incremental aggregate. */
+class IncrementalAggregateJsonPlanTest extends TableTestBase {
+
+private StreamTableTestUtil util;
+private TableEnvironment tEnv;
+
+@BeforeEach
+void setup() {
+util = streamTestUtil(TableConfig.getDefault());
+tEnv = util.getTableEnv();
+tEnv.getConfig()
+.set(
+
OptimizerConfigOptions.TABLE_OPTIMIZER_AGG_PHASE_STRATEGY,
+AggregatePhaseStrategy.TWO_PHASE.name())
+
.set(OptimizerConfigOptions.TABLE_OPTIMIZER_DISTINCT_AGG_SPLIT_ENABLED, true)
+.set(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED, true)
+.set(
+
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY,
+Duration.ofSeconds(10))
+.set(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_SIZE, 5L)
+
.set(IncrementalAggregateRule.TABLE_OPTIMIZER_INCREMENTAL_AGG_ENABLED(), true);
+
+String srcTableDdl =
+"CREATE TABLE MyTable (\n"
++ "  a bigint,\n"
++ "  b int not null,\n"
++ "  c varchar,\n"
++ "  d bigint\n"
++ ") with (\n"
++ "  'connector' = 'values',\n"
++ "  'bounded' = 'false')";
+tEnv.executeSql(srcTableDdl);
+}
+
+@Test
+void testIncrementalAggregate() {
+String sinkTableDdl =
+"CREATE TABLE MySink (\n"
++ "  a bigint,\n"
++ "  c bigint\n"
++ ") with (\n"
++ "  'connector' = 'values',\n"
++ "  'sink-insert-only' = 'false',\n"
++ "  'table-sink-class' = 'DEFAULT')";
+tEnv.executeSql(sinkTableDdl);
+util.verifyJs

(flink) branch master updated (e07545e458b -> e27a4cbc74b)

2024-01-09 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from e07545e458b [FLINK-33938][runtime-web] Correct implicit coercions in 
relational operators to adopt typescript 5.0
 new ba49e50e14c Revert "[FLINK-34000] Remove IncrementalGroupAgg Json Plan 
& IT tests"
 new e27a4cbc74b Revert "[FLINK-34000] Implement restore tests for 
IncrementalGroupAgg node"

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 java => IncrementalAggregateJsonPlanTest.java} |  66 
 .../IncrementalGroupAggregateRestoreTest.java  |  40 -
 .../IncrementalGroupAggregateTestPrograms.java | 119 --
 .../IncrementalAggregateJsonPlanITCase.java|  78 +
 .../testIncrementalAggregate.out}  | 116 --
 ...AggregateWithSumCountDistinctAndRetraction.out} | 176 +++--
 .../savepoint/_metadata| Bin 20817 -> 0 bytes
 .../savepoint/_metadata| Bin 14768 -> 0 bytes
 8 files changed, 282 insertions(+), 313 deletions(-)
 copy 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/{ChangelogSourceJsonPlanTest.java
 => IncrementalAggregateJsonPlanTest.java} (56%)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateRestoreTest.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateTestPrograms.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/IncrementalAggregateJsonPlanITCase.java
 rename 
flink-table/flink-table-planner/src/test/resources/{restore-tests/stream-exec-incremental-group-aggregate_1/incremental-group-aggregate-simple/plan/incremental-group-aggregate-simple.json
 => 
org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest_jsonplan/testIncrementalAggregate.out}
 (71%)
 rename 
flink-table/flink-table-planner/src/test/resources/{restore-tests/stream-exec-incremental-group-aggregate_1/incremental-group-aggregate-complex/plan/incremental-group-aggregate-complex.json
 => 
org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest_jsonplan/testIncrementalAggregateWithSumCountDistinctAndRetraction.out}
 (81%)
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-incremental-group-aggregate_1/incremental-group-aggregate-complex/savepoint/_metadata
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-incremental-group-aggregate_1/incremental-group-aggregate-simple/savepoint/_metadata



(flink) 02/02: [FLINK-34000] Remove IncrementalGroupAgg Json Plan & IT tests

2024-01-08 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 0df5ab5a3318d21e8be3ab9237900664e3741013
Author: bvarghese1 
AuthorDate: Thu Jan 4 20:07:47 2024 -0800

[FLINK-34000] Remove IncrementalGroupAgg Json Plan & IT tests

- These are covered by the restore tests
---
 .../stream/IncrementalAggregateJsonPlanTest.java   | 106 
 .../IncrementalAggregateJsonPlanITCase.java|  78 ---
 .../testIncrementalAggregate.out   | 401 --
 ...lAggregateWithSumCountDistinctAndRetraction.out | 585 -
 4 files changed, 1170 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest.java
deleted file mode 100644
index 26dcc04f303..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.TableConfig;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.api.config.ExecutionConfigOptions;
-import org.apache.flink.table.api.config.OptimizerConfigOptions;
-import 
org.apache.flink.table.planner.plan.rules.physical.stream.IncrementalAggregateRule;
-import org.apache.flink.table.planner.utils.AggregatePhaseStrategy;
-import org.apache.flink.table.planner.utils.StreamTableTestUtil;
-import org.apache.flink.table.planner.utils.TableTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import java.time.Duration;
-
-/** Test json serialization/deserialization for incremental aggregate. */
-class IncrementalAggregateJsonPlanTest extends TableTestBase {
-
-private StreamTableTestUtil util;
-private TableEnvironment tEnv;
-
-@BeforeEach
-void setup() {
-util = streamTestUtil(TableConfig.getDefault());
-tEnv = util.getTableEnv();
-tEnv.getConfig()
-.set(
-
OptimizerConfigOptions.TABLE_OPTIMIZER_AGG_PHASE_STRATEGY,
-AggregatePhaseStrategy.TWO_PHASE.name())
-
.set(OptimizerConfigOptions.TABLE_OPTIMIZER_DISTINCT_AGG_SPLIT_ENABLED, true)
-.set(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED, true)
-.set(
-
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY,
-Duration.ofSeconds(10))
-.set(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_SIZE, 5L)
-
.set(IncrementalAggregateRule.TABLE_OPTIMIZER_INCREMENTAL_AGG_ENABLED(), true);
-
-String srcTableDdl =
-"CREATE TABLE MyTable (\n"
-+ "  a bigint,\n"
-+ "  b int not null,\n"
-+ "  c varchar,\n"
-+ "  d bigint\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'bounded' = 'false')";
-tEnv.executeSql(srcTableDdl);
-}
-
-@Test
-void testIncrementalAggregate() {
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ "  a bigint,\n"
-+ "  c bigint\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'sink-insert-only' = 'false',\n"
-+ "  'table-sink-class' = 'DEFAULT')";
-tEnv.executeSql(sinkTableDdl);
-util.verifyJsonPlan(
-   

(flink) 01/02: [FLINK-34000] Implement restore tests for IncrementalGroupAgg node

2024-01-08 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit df71d07188e745553b8174297ec7989f05cebf7a
Author: bvarghese1 
AuthorDate: Thu Jan 4 20:05:38 2024 -0800

[FLINK-34000] Implement restore tests for IncrementalGroupAgg node
---
 .../IncrementalGroupAggregateRestoreTest.java  |  40 ++
 .../IncrementalGroupAggregateTestPrograms.java | 119 +
 .../plan/incremental-group-aggregate-complex.json  | 573 +
 .../savepoint/_metadata| Bin 0 -> 20817 bytes
 .../plan/incremental-group-aggregate-simple.json   | 373 ++
 .../savepoint/_metadata| Bin 0 -> 14768 bytes
 6 files changed, 1105 insertions(+)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateRestoreTest.java
new file mode 100644
index 000..250f50a38c7
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateRestoreTest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
+import org.apache.flink.table.test.program.TableTestProgram;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** Restore tests for {@link StreamExecIncrementalGroupAggregate}. */
+public class IncrementalGroupAggregateRestoreTest extends RestoreTestBase {
+
+public IncrementalGroupAggregateRestoreTest() {
+super(StreamExecIncrementalGroupAggregate.class);
+}
+
+@Override
+public List programs() {
+return Arrays.asList(
+
IncrementalGroupAggregateTestPrograms.INCREMENTAL_GROUP_AGGREGATE_SIMPLE,
+
IncrementalGroupAggregateTestPrograms.INCREMENTAL_GROUP_AGGREGATE_COMPLEX);
+}
+}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateTestPrograms.java
new file mode 100644
index 000..a1ca086d258
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateTestPrograms.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import org.apache.flink.table.api.config.ExecutionConfigOptions;
+import org.apache.flink.table.api.config.OptimizerConfigOptions;
+import org.apache.flink.table.test.program.SinkTestStep;
+import org.apache.flink.table.test.program.SourceTestStep;
+import org.apache.flink.table.test.program.TableTestProgram;
+import org.apache.flink.types.Row;
+
+import java.time.Duration;
+
+/** {@link TableTestProgram} definitions for testing {@link 
StreamExecGroupAggregate}. */
+public class IncrementalGroupAggregateTestPr

(flink) branch master updated (ed79a1fc312 -> 0df5ab5a331)

2024-01-08 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from ed79a1fc312 [hotfix][api] Adds @PublicEvolving to StateTtlConfig inner 
classes to remove ArchUnit exclusions
 new df71d07188e [FLINK-34000] Implement restore tests for 
IncrementalGroupAgg node
 new 0df5ab5a331 [FLINK-34000] Remove IncrementalGroupAgg Json Plan & IT 
tests

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../stream/IncrementalAggregateJsonPlanTest.java   | 106 -
 ...a => IncrementalGroupAggregateRestoreTest.java} |  12 +-
 .../IncrementalGroupAggregateTestPrograms.java | 119 ++
 .../IncrementalAggregateJsonPlanITCase.java|  78 -
 .../plan/incremental-group-aggregate-complex.json} | 176 ++---
 .../savepoint/_metadata| Bin 0 -> 20817 bytes
 .../plan/incremental-group-aggregate-simple.json}  | 116 ++
 .../savepoint/_metadata| Bin 0 -> 14768 bytes
 8 files changed, 251 insertions(+), 356 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest.java
 copy 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/{TemporalJoinRestoreTest.java
 => IncrementalGroupAggregateRestoreTest.java} (72%)
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalGroupAggregateTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/IncrementalAggregateJsonPlanITCase.java
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest_jsonplan/testIncrementalAggregateWithSumCountDistinctAndRetraction.out
 => 
restore-tests/stream-exec-incremental-group-aggregate_1/incremental-group-aggregate-complex/plan/incremental-group-aggregate-complex.json}
 (81%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-incremental-group-aggregate_1/incremental-group-aggregate-complex/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/IncrementalAggregateJsonPlanTest_jsonplan/testIncrementalAggregate.out
 => 
restore-tests/stream-exec-incremental-group-aggregate_1/incremental-group-aggregate-simple/plan/incremental-group-aggregate-simple.json}
 (71%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-incremental-group-aggregate_1/incremental-group-aggregate-simple/savepoint/_metadata



(flink) 02/02: [FLINK-33860] Remove WindowTableFunction JsonPlan & JsonIT tests

2023-12-20 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit cabb28d25c4c58af3ee23fc4a63f9564aefd6146
Author: bvarghese1 
AuthorDate: Fri Dec 15 13:15:33 2023 -0800

[FLINK-33860] Remove WindowTableFunction JsonPlan & JsonIT tests

- These are covered by the new restore tests
---
 .../stream/WindowTableFunctionJsonPlanTest.java| 210 -
 .../jsonplan/WindowTableFunctionJsonITCase.java| 109 ---
 .../testFollowedByWindowDeduplicate.out| 584 --
 .../testFollowedByWindowJoin.out   | 842 -
 .../testFollowedByWindowRank.out   | 515 -
 .../testIndividualWindowTVF.out| 351 -
 .../testIndividualWindowTVFProcessingTime.out  | 456 ---
 7 files changed, 3067 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowTableFunctionJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowTableFunctionJsonPlanTest.java
deleted file mode 100644
index 2b9ec7ba1e4..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowTableFunctionJsonPlanTest.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.TableConfig;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.planner.utils.StreamTableTestUtil;
-import org.apache.flink.table.planner.utils.TableTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-/** Test json serialization/deserialization for window table function. */
-class WindowTableFunctionJsonPlanTest extends TableTestBase {
-
-private StreamTableTestUtil util;
-private TableEnvironment tEnv;
-
-@BeforeEach
-void setup() {
-util = streamTestUtil(TableConfig.getDefault());
-tEnv = util.getTableEnv();
-
-String srcTable1Ddl =
-"CREATE TABLE MyTable (\n"
-+ " a INT,\n"
-+ " b BIGINT,\n"
-+ " c VARCHAR,\n"
-+ " `rowtime` AS TO_TIMESTAMP(c),\n"
-+ " proctime as PROCTIME(),\n"
-+ " WATERMARK for `rowtime` AS `rowtime` - INTERVAL 
'1' SECOND\n"
-+ ") WITH (\n"
-+ " 'connector' = 'values')\n";
-tEnv.executeSql(srcTable1Ddl);
-
-String srcTable2Ddl =
-"CREATE TABLE MyTable2 (\n"
-+ " a INT,\n"
-+ " b BIGINT,\n"
-+ " c VARCHAR,\n"
-+ " `rowtime` AS TO_TIMESTAMP(c),\n"
-+ " proctime as PROCTIME(),\n"
-+ " WATERMARK for `rowtime` AS `rowtime` - INTERVAL 
'1' SECOND\n"
-+ ") WITH (\n"
-+ " 'connector' = 'values')\n";
-tEnv.executeSql(srcTable2Ddl);
-}
-
-@Test
-void testIndividualWindowTVF() {
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ " window_start TIMESTAMP(3),\n"
-+ " window_end TIMESTAMP(3),\n"
-+ " a INT,\n"
-+ " b BIGINT,\n"
-+ " c VARCHAR\n"
-+ ") WITH (\n"
-+ " 'connector' = 'values')\n";
-tEnv.executeSql(sinkTableDdl);
-util.verifyJsonPlan(
-"insert into MySink select\n"
-   

(flink) branch master updated (5919251d7a9 -> cabb28d25c4)

2023-12-20 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 5919251d7a9 [FLINK-33823] Make PlannerQueryOperation SQL serializable
 new 1a8b8d512c2 [FLINK-33860] Implement restore tests for 
WindowTableFunction node
 new cabb28d25c4 [FLINK-33860] Remove WindowTableFunction JsonPlan & JsonIT 
tests

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../WindowTableFunctionEventTimeRestoreTest.java   |  46 ++
 .../stream/WindowTableFunctionJsonPlanTest.java| 210 -
 .../WindowTableFunctionProcTimeRestoreTest.java|  41 +
 .../stream/WindowTableFunctionTestPrograms.java| 348 +
 .../plan/nodes/exec/testutils/RestoreTestBase.java |   2 +
 .../jsonplan/WindowTableFunctionJsonITCase.java| 109 ---
 .../testFollowedByWindowDeduplicate.out| 584 --
 .../testFollowedByWindowJoin.out   | 842 -
 ...table-function-cumulate-tvf-agg-proc-time.json} | 365 +
 .../savepoint/_metadata| Bin 0 -> 8502 bytes
 .../window-table-function-cumulate-tvf-agg.json| 431 +++
 .../savepoint/_metadata| Bin 0 -> 12149 bytes
 .../plan/window-table-function-cumulate-tvf.json}  | 221 +++---
 .../savepoint/_metadata| Bin 0 -> 15177 bytes
 ...ndow-table-function-hop-tvf-agg-proc-time.json} | 436 +--
 .../savepoint/_metadata| Bin 0 -> 8945 bytes
 .../plan/window-table-function-hop-tvf-agg.json| 431 +++
 .../savepoint/_metadata| Bin 0 -> 12575 bytes
 .../plan/window-table-function-hop-tvf.json}   | 189 +++--
 .../savepoint/_metadata| Bin 0 -> 15785 bytes
 ...w-table-function-tumble-tvf-agg-proc-time.json} | 330 
 .../savepoint/_metadata| Bin 0 -> 8446 bytes
 .../window-table-function-tumble-tvf-agg.json} | 413 +-
 .../savepoint/_metadata| Bin 0 -> 12000 bytes
 ...table-function-tumble-tvf-negative-offset.json} | 219 +++---
 .../savepoint/_metadata| Bin 0 -> 14873 bytes
 ...table-function-tumble-tvf-positive-offset.json} | 219 +++---
 .../savepoint/_metadata| Bin 0 -> 14873 bytes
 .../plan/window-table-function-tumble-tvf.json}| 218 +++---
 .../savepoint/_metadata| Bin 0 -> 14873 bytes
 30 files changed, 2618 insertions(+), 3036 deletions(-)
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowTableFunctionEventTimeRestoreTest.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowTableFunctionJsonPlanTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowTableFunctionProcTimeRestoreTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowTableFunctionTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/WindowTableFunctionJsonITCase.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowTableFunctionJsonPlanTest_jsonplan/testFollowedByWindowDeduplicate.out
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowTableFunctionJsonPlanTest_jsonplan/testFollowedByWindowJoin.out
 copy 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/WindowTableFunctionJsonPlanTest_jsonplan/testIndividualWindowTVFProcessingTime.out
 => 
restore-tests/stream-exec-window-table-function_1/window-table-function-cumulate-tvf-agg-proc-time/plan/window-table-function-cumulate-tvf-agg-proc-time.json}
 (61%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-table-function_1/window-table-function-cumulate-tvf-agg-proc-time/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-table-function_1/window-table-function-cumulate-tvf-agg/plan/window-table-function-cumulate-tvf-agg.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-table-function_1/window-tabl

(flink) branch master updated: [FLINK-33823] Make PlannerQueryOperation SQL serializable

2023-12-20 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 5919251d7a9 [FLINK-33823] Make PlannerQueryOperation SQL serializable
5919251d7a9 is described below

commit 5919251d7a94264a6a72c31de0716b3f72d65437
Author: Dawid Wysakowicz 
AuthorDate: Mon Dec 18 12:20:12 2023 +0100

[FLINK-33823] Make PlannerQueryOperation SQL serializable
---
 .../expressions/ExpressionSerializationTest.java   |  4 ++--
 .../flink/table/test/program/TableApiTestStep.java |  8 
 .../functions/BuiltInFunctionDefinitions.java  |  2 +-
 .../planner/operations/PlannerQueryOperation.java  | 16 ++-
 .../operations/SqlNodeToOperationConversion.java   | 16 ---
 .../operations/converters/SqlNodeConvertUtils.java |  3 ++-
 .../operations/converters/SqlQueryConverter.java   |  3 ++-
 .../converters/SqlReplaceTableAsConverter.java |  4 +++-
 .../table/planner/delegation/PlannerBase.scala |  4 +++-
 .../table/api/QueryOperationSqlExecutionTest.java  |  3 ++-
 .../api/QueryOperationSqlSerializationTest.java|  3 ++-
 .../table/api/QueryOperationTestPrograms.java  | 24 ++
 .../flink/table/planner/utils/TableTestBase.scala  |  7 ++-
 13 files changed, 83 insertions(+), 14 deletions(-)

diff --git 
a/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/expressions/ExpressionSerializationTest.java
 
b/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/expressions/ExpressionSerializationTest.java
index 2693cb38517..ea5d0318b55 100644
--- 
a/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/expressions/ExpressionSerializationTest.java
+++ 
b/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/expressions/ExpressionSerializationTest.java
@@ -183,10 +183,10 @@ public class ExpressionSerializationTest {
 .expectStr("OVERLAY(`f0` PLACING 'ABC' FROM 2 FOR 5)"),
 TestSpec.forExpr($("f0").substr(2))
 .withField("f0", DataTypes.STRING())
-.expectStr("SUBSTR(`f0` FROM 2)"),
+.expectStr("SUBSTR(`f0`, 2)"),
 TestSpec.forExpr($("f0").substr(2, 5))
 .withField("f0", DataTypes.STRING())
-.expectStr("SUBSTR(`f0` FROM 2 FOR 5)"),
+.expectStr("SUBSTR(`f0`, 2, 5)"),
 TestSpec.forExpr($("f0").substring(2))
 .withField("f0", DataTypes.STRING())
 .expectStr("SUBSTRING(`f0` FROM 2)"),
diff --git 
a/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableApiTestStep.java
 
b/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableApiTestStep.java
index 06c16c70931..07e147ae208 100644
--- 
a/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableApiTestStep.java
+++ 
b/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableApiTestStep.java
@@ -57,6 +57,11 @@ public class TableApiTestStep implements TestStep {
 public Table fromValues(AbstractDataType dataType, 
Object... values) {
 return env.fromValues(dataType, values);
 }
+
+@Override
+public Table sqlQuery(String query) {
+return env.sqlQuery(query);
+}
 });
 }
 
@@ -83,5 +88,8 @@ public class TableApiTestStep implements TestStep {
 
 /** See {@link TableEnvironment#fromValues(AbstractDataType, 
Object...)}. */
 Table fromValues(AbstractDataType dataType, Object... values);
+
+/** See {@link TableEnvironment#sqlQuery(String)}. */
+Table sqlQuery(String query);
 }
 }
diff --git 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
index b65afdc4284..669f4012003 100644
--- 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
+++ 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
@@ -807,7 +807,7 @@ public final class BuiltInFunctionDefinitions {
 public static final BuiltInFunctionDefinition SUBSTR =
 BuiltInFunctionDefinition.newBuilder()
 .name("substr")
-.callSyntax("SUBSTR",

(flink) branch master updated: [FLINK-33861] Implement restore tests for WindowRank node

2023-12-20 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new aa5766e257a [FLINK-33861] Implement restore tests for WindowRank node
aa5766e257a is described below

commit aa5766e257a8b40e15d08eafa1e005837694772b
Author: bvarghese1 
AuthorDate: Mon Dec 18 19:44:40 2023 -0800

[FLINK-33861] Implement restore tests for WindowRank node
---
 .../nodes/exec/stream/WindowRankRestoreTest.java   |  44 ++
 .../nodes/exec/stream/WindowRankTestPrograms.java  | 266 
 .../plan/window-rank-cumulate-tvf-min-top-n.json   | 687 +
 .../savepoint/_metadata| Bin 0 -> 21448 bytes
 .../plan/window-rank-hop-tvf-min-top-n.json| 687 +
 .../savepoint/_metadata| Bin 0 -> 21929 bytes
 .../plan/window-rank-tumble-tvf-agg-max-top-n.json | 664 
 .../savepoint/_metadata| Bin 0 -> 23994 bytes
 .../plan/window-rank-tumble-tvf-agg-min-top-n.json | 664 
 .../savepoint/_metadata| Bin 0 -> 24000 bytes
 .../plan/window-rank-tumble-tvf-max-top-n.json | 685 
 .../savepoint/_metadata| Bin 0 -> 21084 bytes
 .../plan/window-rank-tumble-tvf-min-top-n.json | 685 
 .../savepoint/_metadata| Bin 0 -> 21084 bytes
 14 files changed, 4382 insertions(+)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowRankRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowRankRestoreTest.java
new file mode 100644
index 000..ddc47a8f624
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowRankRestoreTest.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
+import org.apache.flink.table.test.program.TableTestProgram;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** Restore tests for {@link StreamExecWindowRank}. */
+public class WindowRankRestoreTest extends RestoreTestBase {
+
+public WindowRankRestoreTest() {
+super(StreamExecWindowRank.class);
+}
+
+@Override
+public List programs() {
+return Arrays.asList(
+WindowRankTestPrograms.WINDOW_RANK_TUMBLE_TVF_MIN_TOP_N,
+WindowRankTestPrograms.WINDOW_RANK_TUMBLE_TVF_AGG_MIN_TOP_N,
+WindowRankTestPrograms.WINDOW_RANK_TUMBLE_TVF_MAX_TOP_N,
+WindowRankTestPrograms.WINDOW_RANK_TUMBLE_TVF_AGG_MAX_TOP_N,
+WindowRankTestPrograms.WINDOW_RANK_HOP_TVF_MIN_TOP_N,
+WindowRankTestPrograms.WINDOW_RANK_CUMULATE_TVF_MIN_TOP_N);
+}
+}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowRankTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowRankTestPrograms.java
new file mode 100644
index 000..bacbe648a9d
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowRankTestPrograms.java
@@ -0,0 +1,266 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *

(flink) branch master updated: [FLINK-32850][flink-runtime][JUnit5 Migration] The io.network.api package of flink-runtime module

2023-12-18 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 1c67cccd2fd [FLINK-32850][flink-runtime][JUnit5 Migration] The 
io.network.api package of flink-runtime module
1c67cccd2fd is described below

commit 1c67cccd2fdd6c674a38e0c26fe990e1dd7b62ae
Author: Jiabao Sun 
AuthorDate: Wed Oct 25 16:31:45 2023 +0800

[FLINK-32850][flink-runtime][JUnit5 Migration] The io.network.api package 
of flink-runtime module
---
 .../io/network/api/CheckpointBarrierTest.java  |  26 +--
 .../io/network/api/reader/AbstractReaderTest.java  |  89 
 .../serialization/CheckpointSerializationTest.java |  26 +--
 .../api/serialization/EventSerializerTest.java |  43 ++--
 .../network/api/serialization/PagedViewsTest.java  | 238 +
 .../SpanningRecordSerializationTest.java   |  55 +++--
 .../api/serialization/SpanningWrapperTest.java |  21 +-
 .../api/writer/BroadcastRecordWriterTest.java  |   3 +-
 .../api/writer/RecordWriterDelegateTest.java   |  61 +++---
 .../network/api/writer/SubtaskStateMapperTest.java | 169 +++
 10 files changed, 273 insertions(+), 458 deletions(-)

diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/api/CheckpointBarrierTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/api/CheckpointBarrierTest.java
index 70de3450654..9b34ee62a64 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/api/CheckpointBarrierTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/api/CheckpointBarrierTest.java
@@ -22,37 +22,29 @@ import org.apache.flink.core.memory.DataInputDeserializer;
 import org.apache.flink.core.memory.DataOutputSerializer;
 import org.apache.flink.runtime.checkpoint.CheckpointOptions;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.fail;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
 
 /** Tests for the {@link CheckpointBarrier} type. */
-public class CheckpointBarrierTest {
+class CheckpointBarrierTest {
 
 /**
  * Test serialization of the checkpoint barrier. The checkpoint barrier 
does not support its own
  * serialization, in order to be immutable.
  */
 @Test
-public void testSerialization() throws Exception {
+void testSerialization() {
 long id = Integer.MAX_VALUE + 123123L;
 long timestamp = Integer.MAX_VALUE + 1228L;
 
 CheckpointOptions options = 
CheckpointOptions.forCheckpointWithDefaultLocation();
 CheckpointBarrier barrier = new CheckpointBarrier(id, timestamp, 
options);
 
-try {
-barrier.write(new DataOutputSerializer(1024));
-fail("should throw an exception");
-} catch (UnsupportedOperationException e) {
-// expected
-}
-
-try {
-barrier.read(new DataInputDeserializer(new byte[32]));
-fail("should throw an exception");
-} catch (UnsupportedOperationException e) {
-// expected
-}
+assertThatThrownBy(() -> barrier.write(new DataOutputSerializer(1024)))
+.isInstanceOf(UnsupportedOperationException.class);
+
+assertThatThrownBy(() -> barrier.read(new DataInputDeserializer(new 
byte[32])))
+.isInstanceOf(UnsupportedOperationException.class);
 }
 }
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/api/reader/AbstractReaderTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/api/reader/AbstractReaderTest.java
index 969cae48997..32228784396 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/api/reader/AbstractReaderTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/io/network/api/reader/AbstractReaderTest.java
@@ -26,25 +26,24 @@ import 
org.apache.flink.runtime.io.network.api.EndOfSuperstepEvent;
 import org.apache.flink.runtime.io.network.partition.consumer.InputGate;
 import org.apache.flink.runtime.util.event.EventListener;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Matchers;
 
 import java.io.IOException;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
 /** Tests for the event handling behaviour. */
-public class AbstractReaderTest {
+class AbstractReaderTest {
 
 @Test
 @SuppressWarnings(&qu

(flink) branch master updated (3c86dcadf53 -> 011f7770365)

2023-12-18 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 3c86dcadf53 [FLINK-33778][table] Cleanup usage of deprecated 
TableConfig#setIdleStateRetentionTime
 new 5799d8d0622 [FLINK-33818] Implement restore tests for 
WindowDeduplicate node
 new 011f7770365 [FLINK-33818] Remove WindowDeduplicateJsonITCase test

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../WindowDeduplicateEventTimeRestoreTest.java |  49 ++
 .../exec/stream/WindowDeduplicateTestPrograms.java | 303 ++
 .../jsonplan/WindowDeduplicateJsonITCase.java  | 110 
 .../window-deduplicate-asc-cumulate-first-row.json | 669 +
 .../savepoint/_metadata| Bin 0 -> 19751 bytes
 .../plan/window-deduplicate-asc-hop-first-row.json | 669 +
 .../savepoint/_metadata| Bin 0 -> 19897 bytes
 ...ate-asc-partition-by-item-tumble-first-row.json | 668 
 .../savepoint/_metadata| Bin 0 -> 22588 bytes
 ...duplicate-asc-tumble-first-row-condition-1.json | 667 
 .../savepoint/_metadata| Bin 0 -> 19567 bytes
 ...duplicate-asc-tumble-first-row-condition-3.json | 667 
 .../savepoint/_metadata| Bin 0 -> 19567 bytes
 .../window-deduplicate-asc-tumble-first-row.json   | 667 
 .../savepoint/_metadata| Bin 0 -> 19571 bytes
 ...te-desc-partition-by-item-tumble-first-row.json | 668 
 .../savepoint/_metadata| Bin 0 -> 22592 bytes
 .../window-deduplicate-desc-tumble-last-row.json   | 667 
 .../savepoint/_metadata| Bin 0 -> 19571 bytes
 19 files changed, 5694 insertions(+), 110 deletions(-)
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowDeduplicateEventTimeRestoreTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowDeduplicateTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/WindowDeduplicateJsonITCase.java
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-deduplicate_1/window-deduplicate-asc-cumulate-first-row/plan/window-deduplicate-asc-cumulate-first-row.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-deduplicate_1/window-deduplicate-asc-cumulate-first-row/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-deduplicate_1/window-deduplicate-asc-hop-first-row/plan/window-deduplicate-asc-hop-first-row.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-deduplicate_1/window-deduplicate-asc-hop-first-row/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-deduplicate_1/window-deduplicate-asc-partition-by-item-tumble-first-row/plan/window-deduplicate-asc-partition-by-item-tumble-first-row.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-deduplicate_1/window-deduplicate-asc-partition-by-item-tumble-first-row/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-deduplicate_1/window-deduplicate-asc-tumble-first-row-condition-1/plan/window-deduplicate-asc-tumble-first-row-condition-1.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-deduplicate_1/window-deduplicate-asc-tumble-first-row-condition-1/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-deduplicate_1/window-deduplicate-asc-tumble-first-row-condition-3/plan/window-deduplicate-asc-tumble-first-row-condition-3.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-deduplicate_1/window-deduplicate-asc-tumble-first-row-condition-3/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-deduplicate_1/window-deduplicate-asc-tumble-first-row/plan/window-deduplicate-asc-tumble-first-row.json
 create mode 1006

(flink) 02/02: [FLINK-33818] Remove WindowDeduplicateJsonITCase test

2023-12-18 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 011f777036540d0f027b04306714bf9e64003a97
Author: bvarghese1 
AuthorDate: Wed Dec 13 14:53:10 2023 -0800

[FLINK-33818] Remove WindowDeduplicateJsonITCase test

 - This is covered by the new restore tests
---
 .../jsonplan/WindowDeduplicateJsonITCase.java  | 110 -
 1 file changed, 110 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/WindowDeduplicateJsonITCase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/WindowDeduplicateJsonITCase.java
deleted file mode 100644
index ef2cd5d58d9..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/WindowDeduplicateJsonITCase.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.runtime.stream.jsonplan;
-
-import org.apache.flink.table.planner.factories.TestValuesTableFactory;
-import org.apache.flink.table.planner.runtime.utils.TestData;
-import org.apache.flink.table.planner.utils.JavaScalaConversionUtil;
-import org.apache.flink.table.planner.utils.JsonPlanTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-
-/** Test for window deduplicate json plan. */
-class WindowDeduplicateJsonITCase extends JsonPlanTestBase {
-
-@BeforeEach
-@Override
-protected void setup() throws Exception {
-super.setup();
-createTestValuesSourceTable(
-"MyTable",
-
JavaScalaConversionUtil.toJava(TestData.windowDataWithTimestamp()),
-new String[] {
-"ts STRING",
-"`int` INT",
-"`double` DOUBLE",
-"`float` FLOAT",
-"`bigdec` DECIMAL(10, 2)",
-"`string` STRING",
-"`name` STRING",
-"`rowtime` AS TO_TIMESTAMP(`ts`)",
-"WATERMARK for `rowtime` AS `rowtime` - INTERVAL '1' 
SECOND",
-},
-new HashMap() {
-{
-put("enable-watermark-push-down", "true");
-put("failing-source", "true");
-}
-});
-}
-
-@Test
-void testEventTimeTumbleWindow() throws Exception {
-createTestValuesSinkTable(
-"MySink",
-"ts STRING",
-"`int` INT",
-"`double` DOUBLE",
-"`float` FLOAT",
-"`bigdec` DECIMAL(10, 2)",
-"`string` STRING",
-"`name` STRING",
-"`rowtime` STRING",
-"window_start TIMESTAMP(3)",
-"window_end TIMESTAMP(3)",
-"window_time TIMESTAMP(3)");
-compileSqlAndExecutePlan(
-"insert into MySink select\n"
-+ "  `ts`,\n"
-+ "  `int`,\n"
-+ "  `double`,\n"
-+ "  `float`, \n"
-+ "  `bigdec`, \n"
-+ "  `string`, \n"
-+ "  `name`, \n"
-+ "  CAST(`rowtime` AS STRING), \n"
-+ "  window_start, \n"
-+ "  window_end, \n"
-  

(flink) branch master updated (d4a3687aacd -> 20a328d80a1)

2023-12-15 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from d4a3687aacd [FLIP-321] Update the docs to add migration periods for 
deprecated APIs. (#23865)
 new 46d817d8d29 [FLINK-33767] Implement restore tests for TemporalJoin node
 new 20a328d80a1 [FLINK-33767] Deleting TemporalJoinJsonPlanTest.java and 
TemporalJoinJsonPlanITCase.java

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../flink/table/test/program/TableTestProgram.java |  22 ++
 ...TestStep.java => TemporalFunctionTestStep.java} |  44 ++--
 .../apache/flink/table/test/program/TestStep.java  |   1 +
 .../exec/stream/TemporalJoinJsonPlanTest.java  | 101 -
 ...storeTest.java => TemporalJoinRestoreTest.java} |  12 +-
 .../exec/stream/TemporalJoinTestPrograms.java  | 103 +
 .../plan/nodes/exec/testutils/RestoreTestBase.java |   3 +
 .../jsonplan/TemporalJoinJsonPlanITCase.java   | 107 --
 .../plan/temporal-join-table-join.json}| 213 +--
 .../temporal-join-table-join/savepoint/_metadata   | Bin 0 -> 14926 bytes
 .../plan/temporal-join-temporal-function.json} | 235 ++---
 .../savepoint/_metadata| Bin 0 -> 14926 bytes
 12 files changed, 450 insertions(+), 391 deletions(-)
 copy 
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/{FunctionTestStep.java
 => TemporalFunctionTestStep.java} (58%)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TemporalJoinJsonPlanTest.java
 copy 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/{TemporalSortRestoreTest.java
 => TemporalJoinRestoreTest.java} (77%)
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TemporalJoinTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/TemporalJoinJsonPlanITCase.java
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/TemporalJoinJsonPlanTest_jsonplan/testTemporalTableJoin.out
 => 
restore-tests/stream-exec-temporal-join_1/temporal-join-table-join/plan/temporal-join-table-join.json}
 (69%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-temporal-join_1/temporal-join-table-join/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/TemporalJoinJsonPlanTest_jsonplan/testJoinTemporalFunction.out
 => 
restore-tests/stream-exec-temporal-join_1/temporal-join-temporal-function/plan/temporal-join-temporal-function.json}
 (66%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-temporal-join_1/temporal-join-temporal-function/savepoint/_metadata



(flink) 02/02: [FLINK-33767] Deleting TemporalJoinJsonPlanTest.java and TemporalJoinJsonPlanITCase.java

2023-12-15 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 20a328d80a1dbc50974cf3de9f4b6178246f6dee
Author: Jim Hughes 
AuthorDate: Tue Dec 12 14:04:48 2023 -0500

[FLINK-33767] Deleting TemporalJoinJsonPlanTest.java and 
TemporalJoinJsonPlanITCase.java
---
 .../exec/stream/TemporalJoinJsonPlanTest.java  | 101 -
 .../jsonplan/TemporalJoinJsonPlanITCase.java   | 107 --
 .../testJoinTemporalFunction.out   | 421 -
 .../testTemporalTableJoin.json | 421 -
 .../testTemporalTableJoin.out  | 421 -
 5 files changed, 1471 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TemporalJoinJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TemporalJoinJsonPlanTest.java
deleted file mode 100644
index da3e6eaebae..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TemporalJoinJsonPlanTest.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.TableConfig;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.functions.TemporalTableFunction;
-import org.apache.flink.table.planner.utils.StreamTableTestUtil;
-import org.apache.flink.table.planner.utils.TableTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import static org.apache.flink.table.api.Expressions.$;
-
-/** Test json serialization/deserialization for TemporalJoin. */
-class TemporalJoinJsonPlanTest extends TableTestBase {
-
-private StreamTableTestUtil util;
-private TableEnvironment tEnv;
-
-@BeforeEach
-void setup() {
-util = streamTestUtil(TableConfig.getDefault());
-tEnv = util.getTableEnv();
-
-tEnv.executeSql(
-"CREATE TABLE Orders (\n"
-+ " amount INT,\n"
-+ " currency STRING,\n"
-+ " rowtime TIMESTAMP(3),\n"
-+ " proctime AS PROCTIME(),\n"
-+ " WATERMARK FOR rowtime AS rowtime\n"
-+ ") WITH (\n"
-+ " 'connector' = 'values'\n"
-+ ")");
-tEnv.executeSql(
-"CREATE TABLE RatesHistory (\n"
-+ " currency STRING,\n"
-+ " rate INT,\n"
-+ " rowtime TIMESTAMP(3),\n"
-+ " WATERMARK FOR rowtime AS rowtime,\n"
-+ " PRIMARY KEY(currency) NOT ENFORCED\n"
-+ ") WITH (\n"
-+ " 'connector' = 'values'\n"
-+ ")");
-TemporalTableFunction ratesHistory =
-
tEnv.from("RatesHistory").createTemporalTableFunction($("rowtime"), 
$("currency"));
-tEnv.createTemporarySystemFunction("Rates", ratesHistory);
-}
-
-@Test
-void testJoinTemporalFunction() {
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ "  a int\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'table-sink-class' = 'DEFAULT')";
-tEnv.executeSql(sinkTableDdl);
-util.verifyJsonPlan(
-"INSERT INTO MySink "
-+ "SELECT amount * r.rate "
-+ "FROM Orders AS o,  "
-

(flink) 01/02: [FLINK-33767] Implement restore tests for TemporalJoin node

2023-12-15 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 46d817d8d297b50fe91b5fb9471bda791a6f4319
Author: Jim Hughes 
AuthorDate: Mon Dec 11 13:26:42 2023 -0500

[FLINK-33767] Implement restore tests for TemporalJoin node

This closes #23916
---
 .../flink/table/test/program/TableTestProgram.java |  22 +
 .../test/program/TemporalFunctionTestStep.java |  67 +++
 .../apache/flink/table/test/program/TestStep.java  |   1 +
 .../nodes/exec/stream/TemporalJoinRestoreTest.java |  40 ++
 .../exec/stream/TemporalJoinTestPrograms.java  | 103 +
 .../plan/nodes/exec/testutils/RestoreTestBase.java |   3 +
 .../testTemporalTableJoin.json | 421 ++
 .../plan/temporal-join-table-join.json | 494 +
 .../temporal-join-table-join/savepoint/_metadata   | Bin 0 -> 14926 bytes
 .../plan/temporal-join-temporal-function.json  | 494 +
 .../savepoint/_metadata| Bin 0 -> 14926 bytes
 11 files changed, 1645 insertions(+)

diff --git 
a/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
 
b/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
index f5323b0..7d4c4b45eb5 100644
--- 
a/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
+++ 
b/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
@@ -20,6 +20,7 @@ package org.apache.flink.table.test.program;
 
 import org.apache.flink.configuration.ConfigOption;
 import org.apache.flink.table.api.Table;
+import org.apache.flink.table.expressions.Expression;
 import org.apache.flink.table.functions.UserDefinedFunction;
 import org.apache.flink.table.test.program.FunctionTestStep.FunctionBehavior;
 import 
org.apache.flink.table.test.program.FunctionTestStep.FunctionPersistence;
@@ -176,6 +177,14 @@ public class TableTestProgram {
 .collect(Collectors.toList());
 }
 
+/** Convenience method to avoid casting. It assumes that the order of 
steps is not important. */
+public List getSetupTemporalFunctionTestSteps() {
+return setupSteps.stream()
+.filter(s -> s.getKind() == TestKind.TEMPORAL_FUNCTION)
+.map(TemporalFunctionTestStep.class::cast)
+.collect(Collectors.toList());
+}
+
 /**
  * Convenience method to avoid boilerplate code. It assumes that only a 
single SQL statement is
  * tested.
@@ -231,6 +240,19 @@ public class TableTestProgram {
 return this;
 }
 
+/** Setup step for registering a temporary system function. */
+public Builder setupTemporarySystemTemporalTableFunction(
+String name, String table, Expression timeAttribute, 
Expression primaryKey) {
+this.setupSteps.add(
+new TemporalFunctionTestStep(
+TemporalFunctionTestStep.FunctionBehavior.SYSTEM,
+name,
+table,
+timeAttribute,
+primaryKey));
+return this;
+}
+
 /** Setup step for registering a temporary catalog function. */
 public Builder setupTemporaryCatalogFunction(
 String name, Class function) {
diff --git 
a/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TemporalFunctionTestStep.java
 
b/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TemporalFunctionTestStep.java
new file mode 100644
index 000..206f7fa38c1
--- /dev/null
+++ 
b/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TemporalFunctionTestStep.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.test.program;
+
+import org.apache.flink.table.

(flink) branch master updated (b2b8323ccd9 -> 01b3db6f8b2)

2023-12-14 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from b2b8323ccd9 [FLINK-33641][test] Suppress the 
DirectoryNotEmptyException in StreamingWithStateTestBase to prevent test 
failures (#23914)
 add b691a2ee33e [FLINK-33808] Implement restore tests for WindowJoin node
 add 01b3db6f8b2 [FLINK-33808] Remove WindowJoin Json & JsonIT tests

No new revisions were added by this update.

Summary of changes:
 .../stream/WindowJoinEventTimeRestoreTest.java |   46 +
 .../nodes/exec/stream/WindowJoinJsonPlanTest.java  |  112 --
 .../nodes/exec/stream/WindowJoinTestPrograms.java  |  442 +++
 .../stream/jsonplan/WindowJoinJsonITCase.java  |  130 --
 .../testEventTimeTumbleWindow.out  | 1266 
 .../plan/window-join-anti-tumble-event-time.json   |  751 
 .../savepoint/_metadata|  Bin 0 -> 23448 bytes
 .../plan/window-join-cumulate-event-time.json  |  774 
 .../savepoint/_metadata|  Bin 0 -> 27251 bytes
 .../window-join-full-outer-tumble-event-time.json  |  770 
 .../savepoint/_metadata|  Bin 0 -> 25893 bytes
 .../plan/window-join-hop-event-time.json   |  774 
 .../window-join-hop-event-time/savepoint/_metadata |  Bin 0 -> 28011 bytes
 .../plan/window-join-inner-tumble-event-time.json  |  770 
 .../savepoint/_metadata|  Bin 0 -> 24825 bytes
 .../plan/window-join-left-tumble-event-time.json   |  770 
 .../savepoint/_metadata|  Bin 0 -> 25281 bytes
 .../plan/window-join-right-tumble-event-time.json  |  770 
 .../savepoint/_metadata|  Bin 0 -> 25433 bytes
 .../plan/window-join-semi-tumble-event-time.json   |  751 
 .../savepoint/_metadata|  Bin 0 -> 23820 bytes
 21 files changed, 6618 insertions(+), 1508 deletions(-)
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowJoinEventTimeRestoreTest.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowJoinJsonPlanTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowJoinTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/WindowJoinJsonITCase.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/WindowJoinJsonPlanTest_jsonplan/testEventTimeTumbleWindow.out
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-join_1/window-join-anti-tumble-event-time/plan/window-join-anti-tumble-event-time.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-join_1/window-join-anti-tumble-event-time/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-join_1/window-join-cumulate-event-time/plan/window-join-cumulate-event-time.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-join_1/window-join-cumulate-event-time/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-join_1/window-join-full-outer-tumble-event-time/plan/window-join-full-outer-tumble-event-time.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-join_1/window-join-full-outer-tumble-event-time/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-join_1/window-join-hop-event-time/plan/window-join-hop-event-time.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-join_1/window-join-hop-event-time/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-join_1/window-join-inner-tumble-event-time/plan/window-join-inner-tumble-event-time.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-join_1/window-join-inner-tumble-event-time/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-window-join_1/window-join-left-tumble-event-time/plan/window-join-left-tumble-event-time.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stre

(flink) branch master updated (6334923af1e -> 3532f59cb94)

2023-12-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 6334923af1e [hotfix][docs] config key for parquet int64 option (#23909)
 add 3532f59cb94 [FLINK-33754] Serialize QueryOperations into SQL string 
(#23884)

No new revisions were added by this update.

Summary of changes:
 .../expressions/LocalReferenceExpression.java  |   6 +
 .../table/operations/AggregateQueryOperation.java  |  15 +
 .../table/operations/CalculatedQueryOperation.java |  12 +
 .../table/operations/DistinctQueryOperation.java   |   8 +
 .../table/operations/FilterQueryOperation.java |   9 +
 .../flink/table/operations/JoinQueryOperation.java |  23 +
 .../flink/table/operations/OperationUtils.java |  36 +-
 .../table/operations/ProjectQueryOperation.java|  33 +
 .../flink/table/operations/SetQueryOperation.java  |  18 +
 .../flink/table/operations/SortQueryOperation.java |  26 +
 .../table/operations/SourceQueryOperation.java |  20 +
 .../table/operations/ValuesQueryOperation.java |  21 +
 .../operations/WindowAggregateQueryOperation.java  |  46 ++
 .../flink/table/operations/OperationUtilsTest.java |  64 ++
 .../flink/table/test/program/SinkTestStep.java |  12 +-
 .../flink/table/test/program/TableApiTestStep.java |  87 +++
 .../flink/table/test/program/TableTestProgram.java |  18 +-
 .../apache/flink/table/test/program/TestStep.java  |   1 +
 .../functions/BuiltInFunctionDefinitions.java  |   4 +
 .../flink/table/functions/SqlCallSyntax.java   |   2 +
 .../table/api/QueryOperationSqlExecutionTest.java  | 145 +
 .../api/QueryOperationSqlSerializationTest.java| 106 
 .../table/api/QueryOperationTestPrograms.java  | 692 +
 23 files changed, 1395 insertions(+), 9 deletions(-)
 create mode 100644 
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/operations/OperationUtilsTest.java
 create mode 100644 
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableApiTestStep.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationSqlExecutionTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationSqlSerializationTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationTestPrograms.java



(flink) 02/02: [FLINK-33421] Remove UnionJsonPlanTest and UnionJsonPlanITCase

2023-12-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit f2460363303e49621589b1cb2b45347c8ee5dd4f
Author: bvarghese1 
AuthorDate: Thu Nov 2 12:35:37 2023 -0700

[FLINK-33421] Remove UnionJsonPlanTest and UnionJsonPlanITCase

- These have been covered by the new restore tests
- Related commit: 960a692488614b2c9214ed6ff8b19713f5f31879
---
 .../plan/nodes/exec/stream/UnionJsonPlanTest.java  | 74 --
 .../stream/jsonplan/UnionJsonPlanITCase.java   | 66 ---
 2 files changed, 140 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionJsonPlanTest.java
deleted file mode 100644
index bbb1b041c23..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionJsonPlanTest.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.TableConfig;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.planner.utils.StreamTableTestUtil;
-import org.apache.flink.table.planner.utils.TableTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-/** Test json serialization/deserialization for union. */
-class UnionJsonPlanTest extends TableTestBase {
-private StreamTableTestUtil util;
-private TableEnvironment tEnv;
-
-@BeforeEach
-void setup() {
-util = streamTestUtil(TableConfig.getDefault());
-tEnv = util.getTableEnv();
-
-String srcTableDdl =
-"CREATE TABLE MyTable (\n"
-+ "  a bigint,\n"
-+ "  b int not null,\n"
-+ "  c varchar,\n"
-+ "  d timestamp(3)\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'bounded' = 'false')";
-tEnv.executeSql(srcTableDdl);
-
-String srcTableDdl2 =
-"CREATE TABLE MyTable2 (\n"
-+ "  d bigint,\n"
-+ "  e int not null\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'bounded' = 'false')";
-tEnv.executeSql(srcTableDdl2);
-}
-
-@Test
-void testUnion() {
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ "  a bigint,\n"
-+ "  b int\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'table-sink-class' = 'DEFAULT')";
-tEnv.executeSql(sinkTableDdl);
-util.verifyJsonPlan(
-"insert into MySink select * from"
-+ " (select a, b from MyTable) union all (select d, e 
from MyTable2)");
-}
-}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/UnionJsonPlanITCase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/UnionJsonPlanITCase.java
deleted file mode 100644
index 2145dd0179a..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/UnionJsonPlanITCase.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information

(flink) 01/02: [FLINK-33441] Implement restore tests for ExecUnion node

2023-12-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit f362dcc9d4e14cfa30a27881158ec9431dd9e274
Author: bvarghese1 
AuthorDate: Thu Nov 2 12:33:56 2023 -0700

[FLINK-33441] Implement restore tests for ExecUnion node
---
 .../nodes/exec/testutils/UnionRestoreTest.java |  41 
 .../nodes/exec/testutils/UnionTestPrograms.java| 158 ++
 .../plan/union-all-two-sources.json| 145 +
 .../union-all-two-sources/savepoint/_metadata  | Bin 0 -> 7626 bytes
 .../plan/union-all-with-filter.json| 241 +
 .../union-all-with-filter/savepoint/_metadata  | Bin 0 -> 8740 bytes
 .../union-two-sources/plan/union-two-sources.json  | 199 +
 .../union-two-sources/savepoint/_metadata  | Bin 0 -> 12445 bytes
 8 files changed, 784 insertions(+)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionRestoreTest.java
new file mode 100644
index 000..ca27c175fc6
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionRestoreTest.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.testutils;
+
+import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecUnion;
+import org.apache.flink.table.test.program.TableTestProgram;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** Restore tests for {@link StreamExecUnion}. */
+public class UnionRestoreTest extends RestoreTestBase {
+
+public UnionRestoreTest() {
+super(StreamExecUnion.class);
+}
+
+@Override
+public List programs() {
+return Arrays.asList(
+UnionTestPrograms.UNION_TWO_SOURCES,
+UnionTestPrograms.UNION_ALL_TWO_SOURCES,
+UnionTestPrograms.UNION_ALL_WITH_FILTER);
+}
+}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionTestPrograms.java
new file mode 100644
index 000..562199588b5
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionTestPrograms.java
@@ -0,0 +1,158 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.testutils;
+
+import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecUnion;
+import org.apache.flink.table.test.program.SinkTestStep;
+import org.apache.flink.table.test.program.SourceTestStep;
+import org.apache.flink.table.test.program.TableTestProgram;
+import org.apache.flink.types.Row;
+
+import java.time.LocalDateTime;
+
+/** {@link TableTestProgram} definitions for testing {@link StreamExecUnion}. 
*/
+public class UnionTestPrograms {
+
+static final TableTestProgram UNION_TWO_SOURCES =
+TableTestProgram.of(&

(flink) branch master updated (c49ab9ae429 -> f2460363303)

2023-12-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from c49ab9ae429 [FLINK-33647] Remove LookupJoin JsonPlan & JsonIT tests
 new f362dcc9d4e [FLINK-33441] Implement restore tests for ExecUnion node
 new f2460363303 [FLINK-33421] Remove UnionJsonPlanTest and 
UnionJsonPlanITCase

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../plan/nodes/exec/stream/UnionJsonPlanTest.java  |  74 ---
 .../nodes/exec/testutils/UnionRestoreTest.java |  41 
 .../nodes/exec/testutils/UnionTestPrograms.java| 158 ++
 .../stream/jsonplan/UnionJsonPlanITCase.java   |  66 --
 .../plan/union-all-two-sources.json| 145 +
 .../union-all-two-sources/savepoint/_metadata  | Bin 0 -> 7626 bytes
 .../plan/union-all-with-filter.json| 241 +
 .../union-all-with-filter/savepoint/_metadata  | Bin 0 -> 8740 bytes
 .../union-two-sources/plan/union-two-sources.json  | 199 +
 .../union-two-sources/savepoint/_metadata  | Bin 0 -> 12445 bytes
 10 files changed, 784 insertions(+), 140 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/UnionJsonPlanTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionRestoreTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/UnionTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/UnionJsonPlanITCase.java
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-union_1/union-all-two-sources/plan/union-all-two-sources.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-union_1/union-all-two-sources/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-union_1/union-all-with-filter/plan/union-all-with-filter.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-union_1/union-all-with-filter/savepoint/_metadata
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-union_1/union-two-sources/plan/union-two-sources.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-union_1/union-two-sources/savepoint/_metadata



(flink) 01/02: [FLINK-33647] Implement restore tests for LookupJoin node

2023-12-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 7e4a2f3fe6c558a08ec68dcb8e21ba43e85f3cf1
Author: bvarghese1 
AuthorDate: Mon Nov 27 10:47:56 2023 -0800

[FLINK-33647] Implement restore tests for LookupJoin node
---
 .../nodes/exec/stream/LookupJoinRestoreTest.java   |  46 +++
 .../nodes/exec/stream/LookupJoinTestPrograms.java  | 388 +
 .../plan/nodes/exec/testutils/RestoreTestBase.java |   3 -
 .../plan/lookup-join-async-hint.json   | 251 +
 .../lookup-join-async-hint/savepoint/_metadata | Bin 0 -> 15900 bytes
 .../plan/lookup-join-filter-pushdown.json  | 264 ++
 .../savepoint/_metadata| Bin 0 -> 13735 bytes
 .../plan/lookup-join-left-join.json| 279 +++
 .../lookup-join-left-join/savepoint/_metadata  | Bin 0 -> 14069 bytes
 .../plan/lookup-join-post-filter.json  | 266 ++
 .../lookup-join-post-filter/savepoint/_metadata| Bin 0 -> 14021 bytes
 .../plan/lookup-join-pre-filter.json   | 279 +++
 .../lookup-join-pre-filter/savepoint/_metadata | Bin 0 -> 14005 bytes
 .../plan/lookup-join-pre-post-filter.json  | 281 +++
 .../savepoint/_metadata| Bin 0 -> 13957 bytes
 .../plan/lookup-join-project-pushdown.json | 248 +
 .../savepoint/_metadata| Bin 0 -> 13139 bytes
 .../plan/lookup-join-retry-hint.json   | 252 +
 .../lookup-join-retry-hint/savepoint/_metadata | Bin 0 -> 14135 bytes
 19 files changed, 2554 insertions(+), 3 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinRestoreTest.java
new file mode 100644
index 000..42ca645162b
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinRestoreTest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
+import org.apache.flink.table.test.program.TableTestProgram;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** Restore tests for {@link StreamExecLookupJoin}. */
+public class LookupJoinRestoreTest extends RestoreTestBase {
+
+public LookupJoinRestoreTest() {
+super(StreamExecLookupJoin.class);
+}
+
+@Override
+public List programs() {
+return Arrays.asList(
+LookupJoinTestPrograms.LOOKUP_JOIN_PROJECT_PUSHDOWN,
+LookupJoinTestPrograms.LOOKUP_JOIN_FILTER_PUSHDOWN,
+LookupJoinTestPrograms.LOOKUP_JOIN_LEFT_JOIN,
+LookupJoinTestPrograms.LOOKUP_JOIN_PRE_FILTER,
+LookupJoinTestPrograms.LOOKUP_JOIN_POST_FILTER,
+LookupJoinTestPrograms.LOOKUP_JOIN_PRE_POST_FILTER,
+LookupJoinTestPrograms.LOOKUP_JOIN_ASYNC_HINT,
+LookupJoinTestPrograms.LOOKUP_JOIN_RETRY_HINT);
+}
+}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinTestPrograms.java
new file mode 100644
index 000..9b5c18b3f98
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinTestPrograms.java
@@ -0,0 +1,388 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * 

(flink) branch master updated (59c42d96062 -> c49ab9ae429)

2023-12-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 59c42d96062 [FLINK-25857] Add committer metrics to track the status of 
committables
 new 7e4a2f3fe6c [FLINK-33647] Implement restore tests for LookupJoin node
 new c49ab9ae429 [FLINK-33647] Remove LookupJoin JsonPlan & JsonIT tests

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../nodes/exec/stream/LookupJoinJsonPlanTest.java  | 273 ---
 .../nodes/exec/stream/LookupJoinRestoreTest.java   |  46 ++
 .../nodes/exec/stream/LookupJoinTestPrograms.java  | 388 +++
 .../plan/nodes/exec/testutils/RestoreTestBase.java |   3 -
 .../stream/jsonplan/LookupJoinJsonPlanITCase.java  | 113 -
 .../testAggAndLeftJoinWithTryResolveMode.out   | 465 --
 .../testJoinTemporalTable.out  | 477 ---
 .../testJoinTemporalTableWithAsyncHint.out | 477 ---
 .../testJoinTemporalTableWithAsyncHint2.out| 477 ---
 .../testJoinTemporalTableWithAsyncRetryHint.out| 483 ---
 .../testJoinTemporalTableWithAsyncRetryHint2.out   | 483 ---
 ...testJoinTemporalTableWithProjectionPushDown.out | 466 --
 .../testJoinTemporalTableWithRetryHint.out | 483 ---
 ...eftJoinTemporalTableWithMultiJoinConditions.out | 526 -
 .../testLeftJoinTemporalTableWithPostFilter.out| 501 
 .../testLeftJoinTemporalTableWithPreFilter.out | 492 ---
 .../plan/lookup-join-async-hint.json   | 251 ++
 .../lookup-join-async-hint/savepoint/_metadata | Bin 0 -> 15900 bytes
 .../plan/lookup-join-filter-pushdown.json  | 264 +++
 .../savepoint/_metadata| Bin 0 -> 13735 bytes
 .../plan/lookup-join-left-join.json| 279 +++
 .../lookup-join-left-join/savepoint/_metadata  | Bin 0 -> 14069 bytes
 .../plan/lookup-join-post-filter.json  | 266 +++
 .../lookup-join-post-filter/savepoint/_metadata| Bin 0 -> 14021 bytes
 .../plan/lookup-join-pre-filter.json   | 279 +++
 .../lookup-join-pre-filter/savepoint/_metadata | Bin 0 -> 14005 bytes
 .../plan/lookup-join-pre-post-filter.json  | 281 +++
 .../savepoint/_metadata| Bin 0 -> 13957 bytes
 .../plan/lookup-join-project-pushdown.json | 248 ++
 .../savepoint/_metadata| Bin 0 -> 13139 bytes
 .../plan/lookup-join-retry-hint.json   | 252 ++
 .../lookup-join-retry-hint/savepoint/_metadata | Bin 0 -> 14135 bytes
 32 files changed, 2554 insertions(+), 5719 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinJsonPlanTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinRestoreTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/LookupJoinJsonPlanITCase.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinJsonPlanTest_jsonplan/testAggAndLeftJoinWithTryResolveMode.out
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinJsonPlanTest_jsonplan/testJoinTemporalTable.out
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinJsonPlanTest_jsonplan/testJoinTemporalTableWithAsyncHint.out
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinJsonPlanTest_jsonplan/testJoinTemporalTableWithAsyncHint2.out
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinJsonPlanTest_jsonplan/testJoinTemporalTableWithAsyncRetryHint.out
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/LookupJoinJsonPlanTest_jsonplan/testJoinTemporalTableWithAsyncRetryHint2.out
 delete mode 100644 
flink-table/flink-table-planner/src/test/resources/org/a

(flink) branch master updated (560ada286d3 -> be5cf3c9d67)

2023-12-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 560ada286d3 [FLINK-33470] Moving (Join|Expand)TestPrograms and 
RestoreTests to the right package.
 new 314b418efea [FLINK-33757] Implement restore tests for Rank node
 new be5cf3c9d67 [FLINK-33757] Deleting RankJsonPlanTest.java and 
RankJsonPlanITCase.java

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../plan/nodes/exec/stream/RankJsonPlanTest.java   |  71 -
 .../plan/nodes/exec/stream/RankRestoreTest.java|  42 +++
 .../plan/nodes/exec/stream/RankTestPrograms.java   | 187 +
 .../stream/jsonplan/RankJsonPlanITCase.java|  75 -
 .../rank-n-test/plan/rank-n-test.json  | 301 +
 .../rank-n-test/savepoint/_metadata| Bin 0 -> 12161 bytes
 .../plan/rank-test-append-fast-strategy.json}  | 111 +++-
 .../savepoint/_metadata| Bin 0 -> 13768 bytes
 .../plan/rank-test-retract-strategy.json}  | 133 -
 .../rank-test-retract-strategy/savepoint/_metadata | Bin 0 -> 18147 bytes
 .../plan/rank-test-update-fast-strategy.json}  | 186 +++--
 .../savepoint/_metadata| Bin 0 -> 22387 bytes
 12 files changed, 728 insertions(+), 378 deletions(-)
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/RankJsonPlanTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/RankRestoreTest.java
 create mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/RankTestPrograms.java
 delete mode 100644 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/RankJsonPlanITCase.java
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-rank_1/rank-n-test/plan/rank-n-test.json
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-rank_1/rank-n-test/savepoint/_metadata
 copy 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/RankJsonPlanTest_jsonplan/testRank.out
 => 
restore-tests/stream-exec-rank_1/rank-test-append-fast-strategy/plan/rank-test-append-fast-strategy.json}
 (65%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-rank_1/rank-test-append-fast-strategy/savepoint/_metadata
 copy 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/RankJsonPlanTest_jsonplan/testRank.out
 => 
restore-tests/stream-exec-rank_1/rank-test-retract-strategy/plan/rank-test-retract-strategy.json}
 (59%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-rank_1/rank-test-retract-strategy/savepoint/_metadata
 rename 
flink-table/flink-table-planner/src/test/resources/{org/apache/flink/table/planner/plan/nodes/exec/stream/RankJsonPlanTest_jsonplan/testRank.out
 => 
restore-tests/stream-exec-rank_1/rank-test-update-fast-strategy/plan/rank-test-update-fast-strategy.json}
 (50%)
 create mode 100644 
flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-rank_1/rank-test-update-fast-strategy/savepoint/_metadata



(flink) 01/02: [FLINK-33757] Implement restore tests for Rank node

2023-12-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 314b418efea8f35d39b05abef5361289b054b6a7
Author: Jim Hughes 
AuthorDate: Thu Dec 7 08:12:42 2023 -0500

[FLINK-33757] Implement restore tests for Rank node
---
 .../plan/nodes/exec/stream/RankRestoreTest.java|  42 +++
 .../plan/nodes/exec/stream/RankTestPrograms.java   | 187 +
 .../rank-n-test/plan/rank-n-test.json  | 301 +
 .../rank-n-test/savepoint/_metadata| Bin 0 -> 12161 bytes
 .../plan/rank-test-append-fast-strategy.json}  | 111 +++-
 .../savepoint/_metadata| Bin 0 -> 13768 bytes
 .../plan/rank-test-retract-strategy.json}  | 133 -
 .../rank-test-retract-strategy/savepoint/_metadata | Bin 0 -> 18147 bytes
 .../plan/rank-test-update-fast-strategy.json}  | 186 +++--
 .../savepoint/_metadata| Bin 0 -> 22387 bytes
 10 files changed, 728 insertions(+), 232 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/RankRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/RankRestoreTest.java
new file mode 100644
index 000..cb76c1ca723
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/RankRestoreTest.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
+import org.apache.flink.table.test.program.TableTestProgram;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** Restore tests for {@link StreamExecRank}. */
+public class RankRestoreTest extends RestoreTestBase {
+
+public RankRestoreTest() {
+super(StreamExecRank.class);
+}
+
+@Override
+public List programs() {
+return Arrays.asList(
+RankTestPrograms.RANK_TEST_APPEND_FAST_STRATEGY,
+RankTestPrograms.RANK_TEST_RETRACT_STRATEGY,
+RankTestPrograms.RANK_TEST_UPDATE_FAST_STRATEGY,
+RankTestPrograms.RANK_N_TEST);
+}
+}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/RankTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/RankTestPrograms.java
new file mode 100644
index 000..979e2adbb52
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/RankTestPrograms.java
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
+
+import org.apache.flink.table.test.program.SinkTestStep;
+import org.apache.flink.table.test.program.SourceTestStep;
+import org.apache.flink.table.test.program.TableTestProgram;
+import org.apache.flink.types.Row;
+
+import static 
org.apache.flink.table.factories.TestFormatFactory.CHANGELOG_MODE;
+
+/** {@link TableTestProgram} definitions for testing {@link StreamExecRank

(flink) 02/02: [FLINK-33757] Deleting RankJsonPlanTest.java and RankJsonPlanITCase.java

2023-12-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit be5cf3c9d679ff141a1041774070c66b46b866a7
Author: Jim Hughes 
AuthorDate: Thu Dec 7 08:12:55 2023 -0500

[FLINK-33757] Deleting RankJsonPlanTest.java and RankJsonPlanITCase.java
---
 .../plan/nodes/exec/stream/RankJsonPlanTest.java   | 71 
 .../stream/jsonplan/RankJsonPlanITCase.java| 75 --
 2 files changed, 146 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/RankJsonPlanTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/RankJsonPlanTest.java
deleted file mode 100644
index ba4bdd7c6f1..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/RankJsonPlanTest.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.api.TableConfig;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.planner.utils.StreamTableTestUtil;
-import org.apache.flink.table.planner.utils.TableTestBase;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-/** Test json serialization for rank. */
-class RankJsonPlanTest extends TableTestBase {
-
-private StreamTableTestUtil util;
-private TableEnvironment tEnv;
-
-@BeforeEach
-void setup() {
-util = streamTestUtil(TableConfig.getDefault());
-tEnv = util.getTableEnv();
-
-String srcTableDdl =
-"CREATE TABLE MyTable (\n"
-+ "  a bigint,\n"
-+ "  b int not null,\n"
-+ "  c varchar,\n"
-+ "  d timestamp(3)\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'bounded' = 'false')";
-tEnv.executeSql(srcTableDdl);
-}
-
-@Test
-void testRank() {
-String sinkTableDdl =
-"CREATE TABLE MySink (\n"
-+ "  a bigint,\n"
-+ "  b bigint\n"
-+ ") with (\n"
-+ "  'connector' = 'values',\n"
-+ "  'sink-insert-only' = 'false',\n"
-+ "  'table-sink-class' = 'DEFAULT')";
-tEnv.executeSql(sinkTableDdl);
-String sql =
-"insert into MySink SELECT a, row_num\n"
-+ "FROM (\n"
-+ "  SELECT a, ROW_NUMBER() OVER (PARTITION BY b ORDER 
BY a) as row_num\n"
-+ "  FROM MyTable)\n"
-+ "WHERE row_num <= a";
-util.verifyJsonPlan(sql);
-}
-}
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/RankJsonPlanITCase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/RankJsonPlanITCase.java
deleted file mode 100644
index b6ce7733ebf..000
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/RankJsonPlanITCase.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.a

(flink) branch master updated: [FLINK-33470] Moving (Join|Expand)TestPrograms and RestoreTests to the right package.

2023-12-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 560ada286d3 [FLINK-33470] Moving (Join|Expand)TestPrograms and 
RestoreTests to the right package.
560ada286d3 is described below

commit 560ada286d367e5f74eda3e9d0dd4459d4aab2cd
Author: Jim Hughes 
AuthorDate: Thu Dec 7 14:58:13 2023 -0500

[FLINK-33470] Moving (Join|Expand)TestPrograms and RestoreTests to the 
right package.
---
 .../plan/nodes/exec/{testutils => stream}/ExpandRestoreTest.java  | 4 ++--
 .../plan/nodes/exec/{testutils => stream}/ExpandTestPrograms.java | 3 +--
 .../plan/nodes/exec/{testutils => stream}/JoinRestoreTest.java| 4 ++--
 .../plan/nodes/exec/{testutils => stream}/JoinTestPrograms.java   | 3 +--
 4 files changed, 6 insertions(+), 8 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/ExpandRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ExpandRestoreTest.java
similarity index 89%
rename from 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/ExpandRestoreTest.java
rename to 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ExpandRestoreTest.java
index 5ffe8a01792..b26edde9c40 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/ExpandRestoreTest.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ExpandRestoreTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package org.apache.flink.table.planner.plan.nodes.exec.testutils;
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
 
-import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecExpand;
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
 import org.apache.flink.table.test.program.TableTestProgram;
 
 import java.util.Collections;
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/ExpandTestPrograms.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ExpandTestPrograms.java
similarity index 96%
rename from 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/ExpandTestPrograms.java
rename to 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ExpandTestPrograms.java
index 216dce2b523..83bd19c1314 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/ExpandTestPrograms.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ExpandTestPrograms.java
@@ -16,10 +16,9 @@
  * limitations under the License.
  */
 
-package org.apache.flink.table.planner.plan.nodes.exec.testutils;
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
 
 import org.apache.flink.table.api.config.OptimizerConfigOptions;
-import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecExpand;
 import 
org.apache.flink.table.planner.plan.rules.physical.stream.IncrementalAggregateRule;
 import org.apache.flink.table.planner.utils.AggregatePhaseStrategy;
 import org.apache.flink.table.test.program.SinkTestStep;
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/JoinRestoreTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/JoinRestoreTest.java
similarity index 92%
rename from 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/JoinRestoreTest.java
rename to 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/JoinRestoreTest.java
index 5e21db37ca8..7be9c8574e7 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/JoinRestoreTest.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/JoinRestoreTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package org.apache.flink.table.planner.plan.nodes.exec.testutils;
+package org.apache.flink.table.planner.plan.nodes.exec.stream;
 
-import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecJoin;
+import 
org.apache.flink.table.planner.plan.nodes.exec.testutils.RestoreTestBase;
 import org.apache.flink.table.test.program.TableTestProgram;
 
 import java

(flink) 02/02: [FLINK-33777] Fix ParquetTimestampITCase>FsStreamingSinkITCaseBase failing

2023-12-08 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 0e515dce78832dbbbf5fce9c8cdd113bbb62cdf0
Author: Sergey Nuyanzin 
AuthorDate: Fri Dec 8 08:51:38 2023 +0100

[FLINK-33777] Fix ParquetTimestampITCase>FsStreamingSinkITCaseBase failing
---
 .../apache/flink/formats/parquet/ParquetTimestampITCase.java |  4 ++--
 .../planner/runtime/stream/FsStreamingSinkITCaseBase.scala   | 12 +---
 2 files changed, 3 insertions(+), 13 deletions(-)

diff --git 
a/flink-formats/flink-parquet/src/test/java/org/apache/flink/formats/parquet/ParquetTimestampITCase.java
 
b/flink-formats/flink-parquet/src/test/java/org/apache/flink/formats/parquet/ParquetTimestampITCase.java
index ed267d329d0..90c130e22f5 100644
--- 
a/flink-formats/flink-parquet/src/test/java/org/apache/flink/formats/parquet/ParquetTimestampITCase.java
+++ 
b/flink-formats/flink-parquet/src/test/java/org/apache/flink/formats/parquet/ParquetTimestampITCase.java
@@ -181,7 +181,7 @@ public class ParquetTimestampITCase extends 
FsStreamingSinkITCaseBase {
 Types.STRING,
 Types.STRING
 },
-new String[] {"a", "b", "c", "d", 
"e"})));
+new String[] {"f0", "f1", "f2", "f3", 
"f4"})));
 }
 
 @Override
@@ -198,7 +198,7 @@ public class ParquetTimestampITCase extends 
FsStreamingSinkITCaseBase {
 Types.STRING,
 Types.STRING
 },
-new String[] {"a", "b", "c", "d", 
"e"})));
+new String[] {"f0", "f1", "f2", "f3", 
"f4"})));
 }
 
 @Override
diff --git 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/stream/FsStreamingSinkITCaseBase.scala
 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/stream/FsStreamingSinkITCaseBase.scala
index c4f3c2c1764..d772d8ace77 100644
--- 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/stream/FsStreamingSinkITCaseBase.scala
+++ 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/stream/FsStreamingSinkITCaseBase.scala
@@ -25,8 +25,6 @@ import org.apache.flink.streaming.api.CheckpointingMode
 import org.apache.flink.streaming.api.functions.source.SourceFunction
 import org.apache.flink.streaming.api.scala.DataStream
 import org.apache.flink.streaming.api.watermark.Watermark
-import org.apache.flink.table.api.{DataTypes, Schema}
-import org.apache.flink.table.api.Expressions.$
 import org.apache.flink.table.data.TimestampData
 import org.apache.flink.table.planner.runtime.utils.{StreamingTestBase, 
TestSinkUtil}
 import org.apache.flink.testutils.junit.utils.TempDirUtils
@@ -176,15 +174,7 @@ abstract class FsStreamingSinkITCaseBase extends 
StreamingTestBase {
 
 tEnv.createTemporaryView(
   "my_table",
-  dataStream,
-  Schema
-.newBuilder()
-.column("f0", DataTypes.INT())
-.column("f1", DataTypes.STRING())
-.column("f2", DataTypes.STRING())
-.column("f3", DataTypes.STRING())
-.column("f4", DataTypes.STRING())
-.build()
+  dataStream
 )
 
 val ddl: String = getDDL(



(flink) branch master updated (fcc7bc2e5e5 -> 0e515dce788)

2023-12-08 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from fcc7bc2e5e5 [FLINK-33523][table-planner] Revert [FLINK-31835] Fix the 
array type that can't be converted from the external primitive array"
 new ca72f6302bd [FLINK-33480] Fix GroupAggregate restore tests
 new 0e515dce788 [FLINK-33777] Fix 
ParquetTimestampITCase>FsStreamingSinkITCaseBase failing

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../apache/flink/formats/parquet/ParquetTimestampITCase.java |  4 ++--
 .../plan/group-aggregate-distinct-mini-batch.json|  4 ++--
 .../plan/group-aggregate-distinct.json   |  4 ++--
 .../plan/group-aggregate-simple-mini-batch.json  |  4 ++--
 .../group-aggregate-simple/plan/group-aggregate-simple.json  |  4 ++--
 .../plan/group-aggregate-udf-with-merge-mini-batch.json  |  4 ++--
 .../plan/group-aggregate-udf-with-merge.json |  4 ++--
 .../plan/group-aggregate-udf-without-merge-mini-batch.json   |  4 ++--
 .../plan/group-aggregate-udf-without-merge.json  |  4 ++--
 .../planner/runtime/stream/FsStreamingSinkITCaseBase.scala   | 12 +---
 10 files changed, 19 insertions(+), 29 deletions(-)



(flink) 01/02: [FLINK-33480] Fix GroupAggregate restore tests

2023-12-08 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit ca72f6302bd3d760d2b47cd8b1b8f2e48705117c
Author: bvarghese1 
AuthorDate: Thu Dec 7 12:40:01 2023 -0800

[FLINK-33480] Fix GroupAggregate restore tests

- This commit ca1c7ce48127472a7c7965099f8a7227549f09df has updated the logic
  to generate the primary key name while generating the compiled plan
- This commit updates/regenerates the plans to have updated primary key 
names
- Related commit: 193b1c68976cdfbd66147278f23d7d427d9b5562
---
 .../plan/group-aggregate-distinct-mini-batch.json | 4 ++--
 .../group-aggregate-distinct/plan/group-aggregate-distinct.json   | 4 ++--
 .../plan/group-aggregate-simple-mini-batch.json   | 4 ++--
 .../group-aggregate-simple/plan/group-aggregate-simple.json   | 4 ++--
 .../plan/group-aggregate-udf-with-merge-mini-batch.json   | 4 ++--
 .../plan/group-aggregate-udf-with-merge.json  | 4 ++--
 .../plan/group-aggregate-udf-without-merge-mini-batch.json| 4 ++--
 .../plan/group-aggregate-udf-without-merge.json   | 4 ++--
 8 files changed, 16 insertions(+), 16 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-group-aggregate_1/group-aggregate-distinct-mini-batch/plan/group-aggregate-distinct-mini-batch.json
 
b/flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-group-aggregate_1/group-aggregate-distinct-mini-batch/plan/group-aggregate-distinct-mini-batch.json
index 8dceb51f726..f52bc4055c5 100644
--- 
a/flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-group-aggregate_1/group-aggregate-distinct-mini-batch/plan/group-aggregate-distinct-mini-batch.json
+++ 
b/flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-group-aggregate_1/group-aggregate-distinct-mini-batch/plan/group-aggregate-distinct-mini-batch.json
@@ -547,7 +547,7 @@
 } ],
 "watermarkSpecs" : [ ],
 "primaryKey" : {
-  "name" : "PK_132",
+  "name" : "PK_e",
   "type" : "PRIMARY_KEY",
   "columns" : [ "e" ]
 }
@@ -618,4 +618,4 @@
 },
 "shuffleMode" : "PIPELINED"
   } ]
-}
\ No newline at end of file
+}
diff --git 
a/flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-group-aggregate_1/group-aggregate-distinct/plan/group-aggregate-distinct.json
 
b/flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-group-aggregate_1/group-aggregate-distinct/plan/group-aggregate-distinct.json
index b1801290fb3..eb0f5e19536 100644
--- 
a/flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-group-aggregate_1/group-aggregate-distinct/plan/group-aggregate-distinct.json
+++ 
b/flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-group-aggregate_1/group-aggregate-distinct/plan/group-aggregate-distinct.json
@@ -299,7 +299,7 @@
 } ],
 "watermarkSpecs" : [ ],
 "primaryKey" : {
-  "name" : "PK_132",
+  "name" : "PK_e",
   "type" : "PRIMARY_KEY",
   "columns" : [ "e" ]
 }
@@ -356,4 +356,4 @@
 },
 "shuffleMode" : "PIPELINED"
   } ]
-}
\ No newline at end of file
+}
diff --git 
a/flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-group-aggregate_1/group-aggregate-simple-mini-batch/plan/group-aggregate-simple-mini-batch.json
 
b/flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-group-aggregate_1/group-aggregate-simple-mini-batch/plan/group-aggregate-simple-mini-batch.json
index 6ada7bb67e8..5e56eb25708 100644
--- 
a/flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-group-aggregate_1/group-aggregate-simple-mini-batch/plan/group-aggregate-simple-mini-batch.json
+++ 
b/flink-table/flink-table-planner/src/test/resources/restore-tests/stream-exec-group-aggregate_1/group-aggregate-simple-mini-batch/plan/group-aggregate-simple-mini-batch.json
@@ -277,7 +277,7 @@
 } ],
 "watermarkSpecs" : [ ],
 "primaryKey" : {
-  "name" : "PK_129",
+  "name" : "PK_b",
   "type" : "PRIMARY_KEY",
   "columns" : [ "b" ]
 }
@@ -348,4 +348,4 @@
 },
 "shuffleMode" : "PIPELINED"
   } ]
-}
\ No new

  1   2   3   4   5   6   7   8   9   10   >