[flink-table-store] branch master updated: [hotfix] Make Predicates Serializable

2022-01-25 Thread lzljs3620320
This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink-table-store.git


The following commit(s) were added to refs/heads/master by this push:
 new e7eed4d  [hotfix] Make Predicates Serializable
e7eed4d is described below

commit e7eed4d9f25068b76112296f4717a2194486c510
Author: JingsongLi 
AuthorDate: Wed Jan 26 13:39:30 2022 +0800

[hotfix] Make Predicates Serializable
---
 .../main/java/org/apache/flink/table/store/file/predicate/And.java| 2 ++
 .../main/java/org/apache/flink/table/store/file/predicate/Equal.java  | 2 ++
 .../org/apache/flink/table/store/file/predicate/GreaterOrEqual.java   | 2 ++
 .../java/org/apache/flink/table/store/file/predicate/GreaterThan.java | 2 ++
 .../java/org/apache/flink/table/store/file/predicate/IsNotNull.java   | 2 ++
 .../main/java/org/apache/flink/table/store/file/predicate/IsNull.java | 2 ++
 .../java/org/apache/flink/table/store/file/predicate/LessOrEqual.java | 2 ++
 .../java/org/apache/flink/table/store/file/predicate/LessThan.java| 2 ++
 .../java/org/apache/flink/table/store/file/predicate/NotEqual.java| 2 ++
 .../src/main/java/org/apache/flink/table/store/file/predicate/Or.java | 2 ++
 .../java/org/apache/flink/table/store/file/predicate/Predicate.java   | 4 +++-
 11 files changed, 23 insertions(+), 1 deletion(-)

diff --git 
a/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/And.java
 
b/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/And.java
index b926902..5c2817a 100644
--- 
a/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/And.java
+++ 
b/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/And.java
@@ -23,6 +23,8 @@ import org.apache.flink.table.store.file.stats.FieldStats;
 /** A {@link Predicate} to eval and. */
 public class And implements Predicate {
 
+private static final long serialVersionUID = 1L;
+
 private final Predicate predicate1;
 private final Predicate predicate2;
 
diff --git 
a/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/Equal.java
 
b/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/Equal.java
index cfec2e1..56fc91d 100644
--- 
a/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/Equal.java
+++ 
b/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/Equal.java
@@ -25,6 +25,8 @@ import static 
org.apache.flink.util.Preconditions.checkNotNull;
 /** A {@link Predicate} to eval equal. */
 public class Equal implements Predicate {
 
+private static final long serialVersionUID = 1L;
+
 private final int index;
 
 private final Literal literal;
diff --git 
a/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/GreaterOrEqual.java
 
b/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/GreaterOrEqual.java
index 9188b5d..79a0cdc 100644
--- 
a/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/GreaterOrEqual.java
+++ 
b/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/GreaterOrEqual.java
@@ -25,6 +25,8 @@ import static 
org.apache.flink.util.Preconditions.checkNotNull;
 /** A {@link Predicate} to eval greater or equal. */
 public class GreaterOrEqual implements Predicate {
 
+private static final long serialVersionUID = 1L;
+
 private final int index;
 
 private final Literal literal;
diff --git 
a/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/GreaterThan.java
 
b/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/GreaterThan.java
index 25d24ac..81ddc26 100644
--- 
a/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/GreaterThan.java
+++ 
b/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/GreaterThan.java
@@ -25,6 +25,8 @@ import static 
org.apache.flink.util.Preconditions.checkNotNull;
 /** A {@link Predicate} to eval greater. */
 public class GreaterThan implements Predicate {
 
+private static final long serialVersionUID = 1L;
+
 private final int index;
 
 private final Literal literal;
diff --git 
a/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/IsNotNull.java
 
b/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/IsNotNull.java
index 7cb9a3e..9de7d5b 100644
--- 
a/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/IsNotNull.java
+++ 
b/flink-table-store-core/src/main/java/org/apache/flink/table/store/file/predicate/IsNotNull.java
@@ -23,6 +23,8 @@ import org.apache.flink.table.store.file.stats.FieldStats;
 /** A {@link Predicate} to eval is not null. */
 public class IsNotNull impl

[flink] branch master updated (b608a51 -> 873c481)

2022-01-25 Thread lzljs3620320
This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from b608a51  [FLINK-18356][tests] Disable fork-reuse for table-planner
 add 873c481  [hotfix][filesystem] Fix the typo in InProgressFileWriter

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/functions/sink/filesystem/InProgressFileWriter.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)


[flink] branch master updated (589a566 -> b608a51)

2022-01-25 Thread gaoyunhaii
This is an automated email from the ASF dual-hosted git repository.

gaoyunhaii pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 589a566  [FLINK-21789][network] Make 
FileChannelManagerImpl#getNextPathNum select data directories fairly
 add b608a51  [FLINK-18356][tests] Disable fork-reuse for table-planner

No new revisions were added by this update.

Summary of changes:
 flink-table/flink-table-planner/pom.xml | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)


[flink] branch master updated (30d128d5 -> 589a566)

2022-01-25 Thread yingjie
This is an automated email from the ASF dual-hosted git repository.

yingjie pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 30d128d5 [FLINK-21790][network] Shuffle data directories to make 
directory selection of different TaskManagers fairer
 add 589a566  [FLINK-21789][network] Make 
FileChannelManagerImpl#getNextPathNum select data directories fairly

No new revisions were added by this update.

Summary of changes:
 .../runtime/io/disk/FileChannelManagerImpl.java| 13 ++--
 .../io/disk/FileChannelManagerImplTest.java| 38 ++
 2 files changed, 41 insertions(+), 10 deletions(-)


[flink] branch master updated (19eb5f3 -> 30d128d5)

2022-01-25 Thread yingjie
This is an automated email from the ASF dual-hosted git repository.

yingjie pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 19eb5f3  [hotfix][fs][gs] Update serializer version.
 add 30d128d5 [FLINK-21790][network] Shuffle data directories to make 
directory selection of different TaskManagers fairer

No new revisions were added by this update.

Summary of changes:
 .../runtime/taskmanager/NettyShuffleEnvironmentConfiguration.java | 8 +++-
 1 file changed, 7 insertions(+), 1 deletion(-)


[flink] branch master updated (7962a5d -> 19eb5f3)

2022-01-25 Thread xtsong
This is an automated email from the ASF dual-hosted git repository.

xtsong pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 7962a5d  [FLINK-25668][runtime] Support to compute network memory for 
dynamic graph.
 new d1b6cf6  [hotfix][dist] Include flink-gs-fs-hadoop into flink-dist
 new 19eb5f3  [hotfix][fs][gs] Update serializer version.

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 flink-dist/pom.xml  | 17 +
 flink-dist/src/main/assemblies/opt.xml  |  7 +++
 .../fs/gs/writer/GSCommitRecoverableSerializer.java |  4 ++--
 .../fs/gs/writer/GSResumeRecoverableSerializer.java |  4 ++--
 4 files changed, 28 insertions(+), 4 deletions(-)


[flink] 02/02: [hotfix][fs][gs] Update serializer version.

2022-01-25 Thread xtsong
This is an automated email from the ASF dual-hosted git repository.

xtsong pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 19eb5f3e5d05745fe4423c165224fb35365f8172
Author: Xintong Song 
AuthorDate: Tue Jan 25 10:40:55 2022 +0800

[hotfix][fs][gs] Update serializer version.

The purpose of this change is to prevent serialization/deserialization 
problems
between the PR and the final merged versions, as the PR has been opened for 
a
long time and we noticed it already have many users before being merged.

This commit should change nothing for new users of the officially merged 
version.

See the following discussion for more details:
https://github.com/apache/flink/pull/15599#issuecomment-1017633539

This closes #18409
---
 .../org/apache/flink/fs/gs/writer/GSCommitRecoverableSerializer.java  | 4 ++--
 .../org/apache/flink/fs/gs/writer/GSResumeRecoverableSerializer.java  | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git 
a/flink-filesystems/flink-gs-fs-hadoop/src/main/java/org/apache/flink/fs/gs/writer/GSCommitRecoverableSerializer.java
 
b/flink-filesystems/flink-gs-fs-hadoop/src/main/java/org/apache/flink/fs/gs/writer/GSCommitRecoverableSerializer.java
index b92c49fd..58f1120 100644
--- 
a/flink-filesystems/flink-gs-fs-hadoop/src/main/java/org/apache/flink/fs/gs/writer/GSCommitRecoverableSerializer.java
+++ 
b/flink-filesystems/flink-gs-fs-hadoop/src/main/java/org/apache/flink/fs/gs/writer/GSCommitRecoverableSerializer.java
@@ -40,7 +40,7 @@ class GSCommitRecoverableSerializer implements 
SimpleVersionedSerializer= 0);
+Preconditions.checkArgument(version > 0);
 Preconditions.checkNotNull(serialized);
 
 // ensure this serializer can deserialize data with this version
diff --git 
a/flink-filesystems/flink-gs-fs-hadoop/src/main/java/org/apache/flink/fs/gs/writer/GSResumeRecoverableSerializer.java
 
b/flink-filesystems/flink-gs-fs-hadoop/src/main/java/org/apache/flink/fs/gs/writer/GSResumeRecoverableSerializer.java
index d3120c4..3341aa4 100644
--- 
a/flink-filesystems/flink-gs-fs-hadoop/src/main/java/org/apache/flink/fs/gs/writer/GSResumeRecoverableSerializer.java
+++ 
b/flink-filesystems/flink-gs-fs-hadoop/src/main/java/org/apache/flink/fs/gs/writer/GSResumeRecoverableSerializer.java
@@ -37,7 +37,7 @@ class GSResumeRecoverableSerializer implements 
SimpleVersionedSerializer= 0);
+Preconditions.checkArgument(version > 0);
 Preconditions.checkNotNull(serialized);
 
 // ensure this serializer can deserialize data with this version


[flink] 01/02: [hotfix][dist] Include flink-gs-fs-hadoop into flink-dist

2022-01-25 Thread xtsong
This is an automated email from the ASF dual-hosted git repository.

xtsong pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit d1b6cf652def2141507a50758ea1de258f2e1cc7
Author: Xintong Song 
AuthorDate: Thu Jan 20 13:17:53 2022 +0800

[hotfix][dist] Include flink-gs-fs-hadoop into flink-dist
---
 flink-dist/pom.xml | 17 +
 flink-dist/src/main/assemblies/opt.xml |  7 +++
 2 files changed, 24 insertions(+)

diff --git a/flink-dist/pom.xml b/flink-dist/pom.xml
index f949105..5a55d8f 100644
--- a/flink-dist/pom.xml
+++ b/flink-dist/pom.xml
@@ -423,6 +423,23 @@ under the License.
 

org.apache.flink
+   flink-gs-fs-hadoop
+   ${project.version}
+   provided
+   
+   
+   
+   io.grpc
+   *
+   
+   
+   
+
+   
+   org.apache.flink
flink-queryable-state-runtime
${project.version}
provided
diff --git a/flink-dist/src/main/assemblies/opt.xml 
b/flink-dist/src/main/assemblies/opt.xml
index 8320948..364151b 100644
--- a/flink-dist/src/main/assemblies/opt.xml
+++ b/flink-dist/src/main/assemblies/opt.xml
@@ -102,6 +102,13 @@
0644

 
+   
+   
../flink-filesystems/flink-gs-fs-hadoop/target/flink-gs-fs-hadoop-${project.version}.jar
+   opt/
+   
flink-gs-fs-hadoop-${project.version}.jar
+   0644
+   
+



../flink-queryable-state/flink-queryable-state-runtime/target/flink-queryable-state-runtime-${project.version}.jar


svn commit: r52271 - /dev/flink/flink-statefun-3.2.0-rc1/

2022-01-25 Thread trohrmann
Author: trohrmann
Date: Tue Jan 25 21:47:45 2022
New Revision: 52271

Log:
Apache Flink Stateful Functions, version 3.2.0, release candidate 1

Modified:
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.asc
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.sha512

dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl

dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl.asc

dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl.sha512
dev/flink/flink-statefun-3.2.0-rc1/flink-statefun-3.2.0-src.tgz
dev/flink/flink-statefun-3.2.0-rc1/flink-statefun-3.2.0-src.tgz.asc
dev/flink/flink-statefun-3.2.0-rc1/flink-statefun-3.2.0-src.tgz.sha512

Modified: dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz
==
Binary files - no diff available.

Modified: 
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.asc
==
--- dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.asc 
(original)
+++ dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.asc 
Tue Jan 25 21:47:45 2022
@@ -1,16 +1,16 @@
 -BEGIN PGP SIGNATURE-
 
-iQIzBAABCAAdFiEE4c0KvyqNHOfSD0No5lOO1ZnjUBMFAmHwIhwACgkQ5lOO1Znj
-UBPZqg//QRw+2Ea5fiBF51aAsT6iEB8A/L5GgGuRfyy0Nk6FQkGav+dpaOZef81I
-IMWqtiYCghytQ2+OQjiSNfQj25U/XnQOd/xCdhM75iJI+d+2DRzK/vFe5c3EeqPc
-NccIqfu6c4xzkzPF4ApGmyYIj2qJ93SNuUlkvva3f2urTFALPhOLjkIpcvapl6f4
-pA+iOKIRFZjBcMhmGFrQgoQSmxttF+bU2QSx6jmioFWNmugIXRwSVHvHdVbCBjuy
-KRt+qNjWjF5N+EPQBBxUYWKDMoebn3kAa2AHWtZmGj3N1+JsJJcYVlgRe0FwFs34
-hWBqVedDjuJ+Vm+O+RrBqylZSzi2aQclm1HW6CKI3y8M8pivaBelQmYqz8p2o8B8
-dSAyhZn0zhikWiBEjanxh2azVHYYz3oyNABIkwsZe+So7vly+BOeK9k1l7rsNM/o
-mshiV8KAHUy3xisZJXeh6tEeC4Ij2n1aM5fkWbOzathjojjYib0odWBRd3c/qRvV
-WSZeKXssSO8cHtOe86pZpEU1Ds4WcA4CoEOV+ANpIt+b+foyP1YrtWSVqgiTVXLM
-zdgPRf5D8MS8yrOaOA/WEfw4nMdCPizmjod4BiNWpZ0GFGh4wHzp9HJtk5Q+1TPn
-yXLeLdX4NMkvmUPunpkGcyTMfFBIBypHGJ1mqO0nvO7k2Su4lTg=
-=GQhN
+iQIzBAABCAAdFiEEuUmfpp7/Xe7rw8H1un5Bh8b3PYIFAmHwb24ACgkQun5Bh8b3
+PYI9yRAAgXIeuU805XhXDMQCtQsxHEBp1rfW3o3qWRIKJR2YzRyi+ZQ4wb1LWsD3
+6+0VdbtYFeML+FAwJjd0tqX3KgOfRO3qTxEB+fNE86humfpne2Xuj1NVvuk8FjEV
+q7rbeOoMH4Ne069onJhDnTT7zM9w2Hp1M48rQN9mvt2t8LdlPwpKndxsDYXKeVxX
+fwHyvnksnb+w5lAE5Ol5Ve5oCE3LbpnVA0q8WIBqKtB9ZUArFheTyJl7vbGEiNIt
+9PkUDjwwvqv8zgGv64rChARpj/+OZpL17TenMH2+ffJ+4TC2rkzFHnutRq9E3vTK
+VX5fcuCJenRdoD0YFH0InAatUQGH46yZ1ZbrDERVQn7MuWkF4g6hfhxty8Mq1U8P
+ouz6EBPe8zFACbg20Jfz7Oi/CO2jirb9MTdFmVI5WrMzseQVhqyzJk72PYnTcboE
+m16iI2E2vXs0D2mehnaHuYqFuypa3ONuqUKw95WuOor+S+2F5OxLf3OnNunLTKpc
+Tf5LHSQ480kfyiiuW99F5I/t3N3DMss6puTfhruyvMXf2Y/HF8bPHmOf8w7UW20W
+S2KsKrFLfOESDKkZZl2Xm4pr55ThgFB691JLjIeAamgaBW7owHwNMd7hqAVd2Sk8
+FNKTRGkO14silDix2u6mDZ9SqGGaeohwjPkAOqEqTieEabQIajc=
+=yr8s
 -END PGP SIGNATURE-

Modified: 
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.sha512
==
--- 
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.sha512 
(original)
+++ 
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.sha512 
Tue Jan 25 21:47:45 2022
@@ -1 +1 @@
-66d76089b2149c780715305a457f106d9112ed00c6940fa41d5f31ebebc7e0785dc6c34f9bb86ee9076f9798bd7cb9e9e0aaa28646ebbb152e6930476d9909af
  apache-flink-statefun-3.2.0.tar.gz
+c966a8d19d31ae36ff947ba1dd7578b2d2a30c66d5a03cfd9c69f23af1939702b98e8045d0ecf83e5556a5364688ece7b868795550d77b5d711e69cc93d48547
  apache-flink-statefun-3.2.0.tar.gz

Modified: 
dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl
==
Binary files - no diff available.

Modified: 
dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl.asc
==
--- 
dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl.asc
 (original)
+++ 
dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl.asc
 Tue Jan 25 21:47:45 2022
@@ -1,16 +1,16 @@
 -BEGIN PGP SIGNATURE-
 
-iQIzBAABCAAdFiEE4c0KvyqNHOfSD0No5lOO1ZnjUBMFAmHwIhwACgkQ5lOO1Znj
-UBMMeBAAgG9NxWFIrKlQgHEDZ3SauC7CpjtyV6cwH0x8NJ05LhUqXstuNGLsfDei
-t3iRhkCFLLXC1rlt4JRYfnl9YBZyxLdWu56AHnp7nzo6skB4ps4GAq2bQdr5i45k
-9KiGn0bzrvwV5jtC59xA7Bbru1TNNDOetk7ruY0rTXK5/sLLWMvbfuFJcxf4Rho3
-0ZkrHB3vjy5IJ4Vc+1oeUjqRIbBNuShch9G7uvaPWnlpLuaryU+KgNaXkVXbnLMq
-lIwtnY5KxQ30FiTfkW7snDNzbY8IxCCIqurwxWZGumTDnU+tZGf82pW1kI2RJsgN
-ZlHDSSPjNIcphkMFff1s5KlB4bCPoFJtLlAT77Ri0AQ3ZcK4QDV8jYr7sjPhyOfS
-PtIsG2c5WJVkSuQe7uPmQ3Napo/HshvjlO/5HlT++Vhim7Ak+ix67PA7UENvQ1dy
-eA1YlfwCFlchG5Z0XC3xLRDaZbITAKz9

svn commit: r52270 - /release/flink/KEYS

2022-01-25 Thread knaufk
Author: knaufk
Date: Tue Jan 25 21:40:52 2022
New Revision: 52270

Log:
Add knaufk's signging key to Apache Flink's KEYS file

Modified:
release/flink/KEYS

Modified: release/flink/KEYS
==
--- release/flink/KEYS (original)
+++ release/flink/KEYS Tue Jan 25 21:40:52 2022
@@ -2530,3 +2530,62 @@ f5hnC+DqlvCuLrAAVjXutdVt9A/yNMkgQkrQ9iYJ
 tYcvi9ykRdAT6B6ydkNAeT/EBDrPeSBgnKXOTv8C1jL8lM/EP28w6Aq8XyReIM0=
 =mAWr
 -END PGP PUBLIC KEY BLOCK-
+pub   rsa4096 2022-01-24 [SC]
+  CCFA852FD039380AB3EC36D08C3FB007FE60DEFA
+uid   [ultimate] Konstantin Knauf (CODE SIGNING KEY) 

+sig 38C3FB007FE60DEFA 2022-01-24  Konstantin Knauf (CODE SIGNING KEY) 

+sub   rsa4096 2022-01-24 [E]
+sig  8C3FB007FE60DEFA 2022-01-24  Konstantin Knauf (CODE SIGNING KEY) 

+
+-BEGIN PGP PUBLIC KEY BLOCK-
+
+mQINBGHusUoBEACuFZ+IvB/k6TZYcU0C1rETPkwewXurb4FJEWL8qm4Q7WmWu4E5
+WyHedtSIAgHpk0qSOJ3l5yh1g5uABiup/XcxopSCNTELh49hvIo/73igFzbbdYYf
+HhT04xvqpb4sb9ErYv0ZWhI4GPrTopAZSIdjAxa5ye3zRQEsWfninDH19xWlFEwi
+q1fGqZwuH2BdzMKAPOrg8CtcND5UyLopmyIOHhrcCstdLeglL6KTEAdpLcbIl9k8
+ZbQVMvI2VY0opcBUFGDWFoSm7yJD3kPapeuR8F27Jz6USV4vf1YV48rsU423N6mW
+qKAH5Z50Pc5pfsWlyIhWzdHpoYTlgrnsYASRXQuofIklnqzjPYRI1gVyCxPKlew3
+ONHyTHJhkWNEkMHOcUfIPA/GDtwB5X4YE9xS7U+yj8fvZFPfHkvtKZCpjq7RL5Bv
+rBRAl564ch95W+e66o1uqVGPMbLhtNUHXNZ6CcDhDrumLiubFXb+OppSug3dil8K
+fqWGaW7cDPEfcTfb6bnST4EsGCZvJbU82N3386jMsPcZzwJFQtlUmuEPfzzEDUgX
+wqoxkzqZcpLLM97e+4xM43Ilgad8R1NX2Qfus0SwtiED1KtZy0JKCwa8rK/q65pX
+8wFKu62QKiGEoYTI7GHWp8Sl9uC1xm6QOuYAejp8qwxbH+ThGv6H8MKuiQARAQAB
+tDdLb25zdGFudGluIEtuYXVmIChDT0RFIFNJR05JTkcgS0VZKSA8a25hdWZrQGFw
+YWNoZS5vcmc+iQJOBBMBCAA4FiEEzPqFL9A5OAqz7DbQjD+wB/5g3voFAmHusUoC
+GwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQjD+wB/5g3vr+eBAAg5UtOn4p
+ier9cKiZSNJSvAg5fJEurfXiJtEqZZNkpFENruuAif6RjnYfNMdy1kf1mrOyn7lS
+Kv3G/fXJlL2fw4oCMpkn99FJppzbFawmqJr3wQuoZ0/h++dJZB9Gu6v2hF4JvHOM
+FGfF9rS7u1M35PSp+Ulott/T60NOq92PJBuM4hzRkHCcYJF1/IOuzEyXaMQmlU7i
+ZeaEwfjTSiiQN89JYNxUz3wsFX2E1xhQhzf0cM/annu0jG1Bd2ydCRepBHOFXcS+
+aTou2leXIeLBNvU9/8rhEzRyWDsIzJK5pn0UDIFN08P9at/fzZV5GaB+iGh/wVYF
+nO+BFoh42FlD8oah8YpG3Xx3lB2XZltUehYdh5thl0JKoRsnv3M5MDRJF3AjDD2L
+kanY8HICnTX0czfwf7WpKn3O9nEXO40yRAEVRV+5zlgex2YkZmG3wrIKwJYMYvCH
+Wf5oaNgsai6iqVaCvVoYZMh+kZyeu/zLceu4xP/KrZij/PIQuOqUGtpCAVhguyOb
+Lvf9n/nR0unU+sOn5tJEu0FUhVeiwIr7o+/+puGvDv7Bbi1qoVDi93ePmOIZ3n+j
+nEPIY8ITgiEENYc5kl+0vHCnhDAIkLqhuwJsW303fGvYkGkNNsBYpEQo4dBklFsa
+rUwI5qorZ3ynmQWFK+dUdOpZjpw4MEnguwm5Ag0EYe6xSgEQAP1hMceScMQWtMoz
+7nIe9AJOr3VrTBm+LDJW/LVXbDq/Co+v3S4GPmqnkfKIOd1iDiqnCAjL8REK4N8S
+ad0Vkk1BFr0q+bl5/yBNioGswlMKNV32b9nqQK14gqC17S+BItbTMPuQQMkk1TE7
+VLsWy9yz0X1Id3RFAi0JaKD+Jtjho8sSZIIpFkJ8x1k3cWrk2jmcGcHm1w2uCYWg
+eEcmleApqeLLpVxEW9DQ9SPZgcUkZuNcLrP78J2eIwig668ysZB3GZYdskGFS0/P
+RJkE52NTB4pPI6F+KzDn3CeYaMZAtpTTROSJof5cR/NcdfPFCJ+bwF7/+wCZ7hr3
+39HfBweD4WyJHBrhgIw+QufUaAm8hKEanNVf+iSQ3czJHv/iDYIAzSMQvvR/G8a1
+wSX0xE5xBXbdey5AB97cQJsP6rPbJ7CXhuf0B4afDq/F/Z0wJDoC0/mG65g3P4N1
+NbwQvXIjwLhBHA0VMnF+IFpQZNH51KE7YvxC09kd/jF2v1wcQ7AxAcHlOjaM2NAh
+xy3ngASCldnJpaFsDiV1mkZLX3Kwl5+TzJqLPR7fNZdD/v/cZRLx6nhQvlOgj7t0
+JyU/yq9MqaJB3/3c6ZtLx8kx6XdTOu2s+gEspZSbBO+L1cXn06oNAvDjdfksN299
+dfwhGeZUZEfseql6LYWuCXkyLi9HABEBAAGJAjYEGAEIACAWIQTM+oUv0Dk4CrPs
+NtCMP7AH/mDe+gUCYe6xSgIbDAAKCRCMP7AH/mDe+tHKEACBf6bdlSuheNqvFA+1
+EAq5mw0VpMQm9R/iJTXp3/8qPXy2sG0J0SLgdw4mF886GOXlSSB2LwRjvqnm6VTY
+Fu9GRm4Ckunk2by8HPKrnoxzAkvYouA/W5y8PC+hsv0asITaMIGq4ouRpYTwwVOq
+os2vcFtJ0pKO9941qAKN/Djwyr0Dn7rNlsuvIF2RSfOdrg+GXGCMx4oni9O6dumi
+OH8uiE5l65yQCRJdlUUIi7aNUnYzhMJ7zE7w9BImTHPHSDi7+MHicsWyYCz7n78w
+rI+iJUXeGNNd4d28LjomCXLtQQn5b9YVzkZdjHg76SNN2dRWp6I9XdY03Tsr17Cs
+6YnM0jdROYdXmwO8+g3LPFWtU6RM3r8ZO/lDi6mOX0mgdDhO7GXclQ0QN1VBsC3j
+jZiApw1NSi3+eYF/fN8O1XZvIZVPGGxZhOBn5zOOGS9n/neYJeOzvLl+mAR8mHMP
+aqiAE1p6Awe4pE3ci0XNI0efld9ih3viTqfdgFEx2XkUN9tcoqjGj+MwfGlRlUIn
+SDy+CiMvakm6JpwZmaNcQo8gZEFbnbZXFer0eRvTd+GwJpIingYEe3gwDtNlPgUc
+yqLDKueNnlzzybSLll20sjps1k2UAiLGCPdqX9NPduB4oxONuBhH+vhiodbBqFAl
+ylriNSVnTevZXjp2uOOo86BeOg==
+=3UZf
+-END PGP PUBLIC KEY BLOCK-




[flink] branch master updated: [FLINK-25668][runtime] Support to compute network memory for dynamic graph.

2022-01-25 Thread zhuzh
This is an automated email from the ASF dual-hosted git repository.

zhuzh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 7962a5d  [FLINK-25668][runtime] Support to compute network memory for 
dynamic graph.
7962a5d is described below

commit 7962a5d9c47f49f435f822a1af2c4141c42a849b
Author: Lijie Wang 
AuthorDate: Tue Dec 14 22:04:20 2021 +0800

[FLINK-25668][runtime] Support to compute network memory for dynamic graph.

This closes #18376.
---
 .../runtime/deployment/SubpartitionIndexRange.java |   4 +
 .../TaskDeploymentDescriptorFactory.java   |  24 ++-
 .../executiongraph/DefaultExecutionGraph.java  |  20 ++
 .../runtime/executiongraph/IntermediateResult.java |   2 +-
 .../flink/runtime/scheduler/DefaultScheduler.java  |  11 -
 .../SsgNetworkMemoryCalculationUtils.java  |  63 +-
 .../executiongraph/ExecutionJobVertexTest.java |   4 +-
 .../IntermediateResultPartitionTest.java   |   2 +-
 .../SsgNetworkMemoryCalculationUtilsTest.java  | 228 +
 9 files changed, 291 insertions(+), 67 deletions(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/deployment/SubpartitionIndexRange.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/deployment/SubpartitionIndexRange.java
index 1fb1d52..19484a6 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/deployment/SubpartitionIndexRange.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/deployment/SubpartitionIndexRange.java
@@ -43,6 +43,10 @@ public class SubpartitionIndexRange implements Serializable {
 return endIndex;
 }
 
+public int size() {
+return endIndex - startIndex + 1;
+}
+
 @Override
 public String toString() {
 return String.format("[%d, %d]", startIndex, endIndex);
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/deployment/TaskDeploymentDescriptorFactory.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/deployment/TaskDeploymentDescriptorFactory.java
index 528a954..bd0f5b3 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/deployment/TaskDeploymentDescriptorFactory.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/deployment/TaskDeploymentDescriptorFactory.java
@@ -128,17 +128,9 @@ public class TaskDeploymentDescriptorFactory {
 IntermediateResultPartition resultPartition =
 
resultPartitionRetriever.apply(consumedPartitionGroup.getFirst());
 
-int numConsumers = resultPartition.getConsumerVertexGroup().size();
 IntermediateResult consumedIntermediateResult = 
resultPartition.getIntermediateResult();
-int consumerIndex = subtaskIndex % numConsumers;
-int numSubpartitions = resultPartition.getNumberOfSubpartitions();
 SubpartitionIndexRange consumedSubpartitionRange =
-computeConsumedSubpartitionRange(
-consumerIndex,
-numConsumers,
-numSubpartitions,
-
consumedIntermediateResult.getProducer().getGraph().isDynamic(),
-consumedIntermediateResult.isBroadcast());
+computeConsumedSubpartitionRange(resultPartition, 
subtaskIndex);
 
 IntermediateDataSetID resultId = 
consumedIntermediateResult.getId();
 ResultPartitionType partitionType = 
consumedIntermediateResult.getResultType();
@@ -155,6 +147,20 @@ public class TaskDeploymentDescriptorFactory {
 return inputGates;
 }
 
+public static SubpartitionIndexRange computeConsumedSubpartitionRange(
+IntermediateResultPartition resultPartition, int 
consumerSubtaskIndex) {
+int numConsumers = resultPartition.getConsumerVertexGroup().size();
+int consumerIndex = consumerSubtaskIndex % numConsumers;
+IntermediateResult consumedIntermediateResult = 
resultPartition.getIntermediateResult();
+int numSubpartitions = resultPartition.getNumberOfSubpartitions();
+return computeConsumedSubpartitionRange(
+consumerIndex,
+numConsumers,
+numSubpartitions,
+
consumedIntermediateResult.getProducer().getGraph().isDynamic(),
+consumedIntermediateResult.isBroadcast());
+}
+
 @VisibleForTesting
 static SubpartitionIndexRange computeConsumedSubpartitionRange(
 int consumerIndex,
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/DefaultExecutionGraph.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/DefaultExecutionGraph.java
index 8796bc7..77f0e19 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/DefaultExecutionGraph.java
+++ 

[flink-statefun] annotated tag release-3.2.0-rc1 updated (70daa3a -> dd9aa5e)

2022-01-25 Thread trohrmann
This is an automated email from the ASF dual-hosted git repository.

trohrmann pushed a change to annotated tag release-3.2.0-rc1
in repository https://gitbox.apache.org/repos/asf/flink-statefun.git.


*** WARNING: tag release-3.2.0-rc1 was modified! ***

from 70daa3a  (commit)
  to dd9aa5e  (tag)
 tagging 70daa3ab2ed22bcde8779af6ba4fc2ccb1fd6d07 (commit)
  by Till Rohrmann
  on Tue Jan 25 17:14:52 2022 +0100

- Log -
release-3.2.0-rc1
-BEGIN PGP SIGNATURE-

iQIzBAABCAAdFiEEuUmfpp7/Xe7rw8H1un5Bh8b3PYIFAmHwIfwACgkQun5Bh8b3
PYIO7Q//WYZsCM6UR94CdksR+2b9lE77VxYh4NZw84YEk1OpWBkNjUyYKBA8rV32
kYFKvwXB9z/tVuuPE8G69WG6chbCIAFXzjeQ7jUHf3Phfcd9ObeMY55TldbSx8w7
IxnSeHkHRDmZodDS0OtnEbyfzPXsRoZDre5jn3Lj4jMd+NNgDRlkV9fm1fP2hxa7
nhEHvRcmdolcM7A4Ouw8oNdxUKQr2tedbcAHI2Chow4nXCoJA9N6sFvnCTunhgJC
gdyrr+7/awQ5WuL1GDMcndK6epBXJBm9aqAKBGZnzaa6cLA0bv6j7H4k1He9adLV
yERKZheQ8iLj8l2ogsjSl7tOInMbvClnp0S4TD0nk3RU0ez0r/jTTLpFG51CfHZ7
LOa3FkrFeMuG3Q7h87tpbBOQ1bSWfxyMX96MBVrqHs3y+HRGD6KG6cA3mdR77j2r
NWFXelDvPZ47yLCpdVhFj5V4FsNDgD8SEhw2OhCp8CVxyt6srVotua1NwhT31fTb
buD/mNIysunCIE+2gHANA0otSFTRiWuos1oto4DZPWh/s3xPND+OQF9ASSZyH24s
5MF8ORnBo09BfGPoaS5ywl8clAavaJy17HHiACp6CZ+WcdyOfEwW/BvZceST80MQ
khJ7+wK0ZGbDhEPI09Go/MqDLoBnR8fAuFV6l71bBPZ0lsUfTZk=
=iDXI
-END PGP SIGNATURE-
---


No new revisions were added by this update.

Summary of changes:


[flink] branch master updated (8e079a2 -> 72ed6b8)

2022-01-25 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 8e079a2  [FLINK-25763][match-recognize][docs] Updated docs to use code 
tag consistent with other tables
 add ce9c100  [FLINK-25769][table-common] Introduce hidden functions in 
module system
 add 54f1f9e  [FLINK-25769][table] Introduce internal and versioned 
built-in functions
 add 72ed6b8  [FLINK-25769][table-planner] Update some SqlFunction to new 
architecture

No new revisions were added by this update.

Summary of changes:
 .../apache/flink/table/module/ModuleManager.java   |   8 +-
 .../flink/table/module/ModuleManagerTest.java  | 102 -
 .../org/apache/flink/table/utils/ModuleMock.java   |  39 +++-
 .../table/functions/BuiltInFunctionDefinition.java | 122 ++-
 .../functions/BuiltInFunctionDefinitions.java  |   3 +-
 .../org/apache/flink/table/module/CoreModule.java  |  51 +++--
 .../java/org/apache/flink/table/module/Module.java |  17 +-
 .../apache/flink/table/module/CoreModuleTest.java  |  47 +++-
 .../inference/utils/FunctionDefinitionMock.java|   9 +
 .../planner/functions/sql/BuiltInSqlFunction.java  | 244 +
 .../planner/functions/sql/BuiltInSqlOperator.java  |  77 +++
 .../planner/functions/sql/CalciteSqlFunction.java  |  69 --
 .../functions/sql/FlinkSqlOperatorTable.java   | 162 +++---
 .../functions/sql/MatchRowTimeFunction.java|   3 +-
 .../sql/ProctimeMaterializeSqlFunction.java|  66 --
 .../sql/StreamRecordTimestampSqlFunction.java  |  53 -
 .../exec/stream/StreamExecDataStreamScan.java  |   4 +-
 .../planner/plan/batch/sql/SetOperatorsTest.xml|   4 +-
 .../planner/plan/batch/table/SetOperatorsTest.xml  |   4 +-
 .../planner/plan/common/PartialInsertTest.xml  |   8 +-
 .../rules/logical/RewriteIntersectAllRuleTest.xml  |   8 +-
 .../plan/rules/logical/RewriteMinusAllRuleTest.xml |   8 +-
 .../planner/plan/stream/sql/SetOperatorsTest.xml   |   4 +-
 23 files changed, 732 insertions(+), 380 deletions(-)
 create mode 100644 
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/BuiltInSqlFunction.java
 create mode 100644 
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/BuiltInSqlOperator.java
 delete mode 100644 
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/CalciteSqlFunction.java
 delete mode 100644 
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/ProctimeMaterializeSqlFunction.java
 delete mode 100644 
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/StreamRecordTimestampSqlFunction.java


svn commit: r52265 - /dev/flink/flink-statefun-3.2.0-rc1/

2022-01-25 Thread trohrmann
Author: trohrmann
Date: Tue Jan 25 16:29:32 2022
New Revision: 52265

Log:
Apache Flink Stateful Functions, version 3.2.0, release candidate 1

Added:
dev/flink/flink-statefun-3.2.0-rc1/
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz   
(with props)
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.asc
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.sha512

dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl 
  (with props)

dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl.asc

dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl.sha512
dev/flink/flink-statefun-3.2.0-rc1/flink-statefun-3.2.0-src.tgz   (with 
props)
dev/flink/flink-statefun-3.2.0-rc1/flink-statefun-3.2.0-src.tgz.asc
dev/flink/flink-statefun-3.2.0-rc1/flink-statefun-3.2.0-src.tgz.sha512

Added: dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz
==
Binary file - no diff available.

Propchange: 
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz
--
svn:mime-type = application/octet-stream

Added: dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.asc
==
--- dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.asc 
(added)
+++ dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.asc 
Tue Jan 25 16:29:32 2022
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCAAdFiEE4c0KvyqNHOfSD0No5lOO1ZnjUBMFAmHwIhwACgkQ5lOO1Znj
+UBPZqg//QRw+2Ea5fiBF51aAsT6iEB8A/L5GgGuRfyy0Nk6FQkGav+dpaOZef81I
+IMWqtiYCghytQ2+OQjiSNfQj25U/XnQOd/xCdhM75iJI+d+2DRzK/vFe5c3EeqPc
+NccIqfu6c4xzkzPF4ApGmyYIj2qJ93SNuUlkvva3f2urTFALPhOLjkIpcvapl6f4
+pA+iOKIRFZjBcMhmGFrQgoQSmxttF+bU2QSx6jmioFWNmugIXRwSVHvHdVbCBjuy
+KRt+qNjWjF5N+EPQBBxUYWKDMoebn3kAa2AHWtZmGj3N1+JsJJcYVlgRe0FwFs34
+hWBqVedDjuJ+Vm+O+RrBqylZSzi2aQclm1HW6CKI3y8M8pivaBelQmYqz8p2o8B8
+dSAyhZn0zhikWiBEjanxh2azVHYYz3oyNABIkwsZe+So7vly+BOeK9k1l7rsNM/o
+mshiV8KAHUy3xisZJXeh6tEeC4Ij2n1aM5fkWbOzathjojjYib0odWBRd3c/qRvV
+WSZeKXssSO8cHtOe86pZpEU1Ds4WcA4CoEOV+ANpIt+b+foyP1YrtWSVqgiTVXLM
+zdgPRf5D8MS8yrOaOA/WEfw4nMdCPizmjod4BiNWpZ0GFGh4wHzp9HJtk5Q+1TPn
+yXLeLdX4NMkvmUPunpkGcyTMfFBIBypHGJ1mqO0nvO7k2Su4lTg=
+=GQhN
+-END PGP SIGNATURE-

Added: 
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.sha512
==
--- 
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.sha512 
(added)
+++ 
dev/flink/flink-statefun-3.2.0-rc1/apache-flink-statefun-3.2.0.tar.gz.sha512 
Tue Jan 25 16:29:32 2022
@@ -0,0 +1 @@
+66d76089b2149c780715305a457f106d9112ed00c6940fa41d5f31ebebc7e0785dc6c34f9bb86ee9076f9798bd7cb9e9e0aaa28646ebbb152e6930476d9909af
  apache-flink-statefun-3.2.0.tar.gz

Added: 
dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl
==
Binary file - no diff available.

Propchange: 
dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl
--
svn:mime-type = application/octet-stream

Added: 
dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl.asc
==
--- 
dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl.asc
 (added)
+++ 
dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl.asc
 Tue Jan 25 16:29:32 2022
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCAAdFiEE4c0KvyqNHOfSD0No5lOO1ZnjUBMFAmHwIhwACgkQ5lOO1Znj
+UBMMeBAAgG9NxWFIrKlQgHEDZ3SauC7CpjtyV6cwH0x8NJ05LhUqXstuNGLsfDei
+t3iRhkCFLLXC1rlt4JRYfnl9YBZyxLdWu56AHnp7nzo6skB4ps4GAq2bQdr5i45k
+9KiGn0bzrvwV5jtC59xA7Bbru1TNNDOetk7ruY0rTXK5/sLLWMvbfuFJcxf4Rho3
+0ZkrHB3vjy5IJ4Vc+1oeUjqRIbBNuShch9G7uvaPWnlpLuaryU+KgNaXkVXbnLMq
+lIwtnY5KxQ30FiTfkW7snDNzbY8IxCCIqurwxWZGumTDnU+tZGf82pW1kI2RJsgN
+ZlHDSSPjNIcphkMFff1s5KlB4bCPoFJtLlAT77Ri0AQ3ZcK4QDV8jYr7sjPhyOfS
+PtIsG2c5WJVkSuQe7uPmQ3Napo/HshvjlO/5HlT++Vhim7Ak+ix67PA7UENvQ1dy
+eA1YlfwCFlchG5Z0XC3xLRDaZbITAKz9dYR9s6rOpOcjDkOC5x7OWAYZQGK6TZmz
+WtcjEU2N9J3oHynJdr2tAjnmZToEtn+62ust31D/k/o+7MxkkEmlKTywPMnmk0Or
+YN4lARXyOOJkitIqpmCE+LhKKllGFnzRKEjBlXb16P4aqg3ErBreeBZwTD31ixRt
+sSdWn+owHIbxdZkBnxpoxLuNRy5dBV+ZYwqWAo3rw403QLe2JWs=
+=JDqb
+-END PGP SIGNATURE-

Added: 
dev/flink/flink-statefun-3.2.0-rc1/apache_flink_statefun-3.2.0-py3-none-any.whl.sha512
==
--- 
dev/flink/flink-statefun-3.2.0-rc1/

[flink] branch master updated: [FLINK-25763][match-recognize][docs] Updated docs to use code tag consistent with other tables

2022-01-25 Thread fpaul
This is an automated email from the ASF dual-hosted git repository.

fpaul pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 8e079a2  [FLINK-25763][match-recognize][docs] Updated docs to use code 
tag consistent with other tables
8e079a2 is described below

commit 8e079a2420e26db1c495f61703ed4510449e6028
Author: mans2singh 
AuthorDate: Sat Jan 22 18:30:35 2022 -0500

[FLINK-25763][match-recognize][docs] Updated docs to use code tag 
consistent with other tables
---
 docs/content.zh/docs/dev/table/sql/queries/match_recognize.md | 8 ++--
 docs/content/docs/dev/table/sql/queries/match_recognize.md| 8 ++--
 2 files changed, 4 insertions(+), 12 deletions(-)

diff --git a/docs/content.zh/docs/dev/table/sql/queries/match_recognize.md 
b/docs/content.zh/docs/dev/table/sql/queries/match_recognize.md
index 4181f9d..d1729aa 100644
--- a/docs/content.zh/docs/dev/table/sql/queries/match_recognize.md
+++ b/docs/content.zh/docs/dev/table/sql/queries/match_recognize.md
@@ -632,9 +632,7 @@ _Logical offsets_ 在映射到指定模式变量的事件启用导航。这可
   
   
 
-```text
-LAST(variable.field, n)
-```
+  LAST(variable.field, n)
 
 
   返回映射到变量最后 n 个元素的事件中的字段值。计数从映射的最后一个元素开始。
@@ -642,9 +640,7 @@ LAST(variable.field, n)
   
   
 
-```text
-FIRST(variable.field, n)
-```
+  FIRST(variable.field, n)
 
 
   返回映射到变量的第 n 个元素的事件中的字段值。计数从映射的第一个元素开始。
diff --git a/docs/content/docs/dev/table/sql/queries/match_recognize.md 
b/docs/content/docs/dev/table/sql/queries/match_recognize.md
index 7d8fef0..f7d1103 100644
--- a/docs/content/docs/dev/table/sql/queries/match_recognize.md
+++ b/docs/content/docs/dev/table/sql/queries/match_recognize.md
@@ -723,9 +723,7 @@ variable. This can be expressed with two corresponding 
functions:
   
   
 
-```text
-LAST(variable.field, n)
-```
+LAST(variable.field, n)
 
 
   Returns the value of the field from the event that was mapped to the 
n-th
@@ -734,9 +732,7 @@ LAST(variable.field, n)
   
   
 
-```text
-FIRST(variable.field, n)
-```
+FIRST(variable.field, n)
 
 
   Returns the value of the field from the event that was mapped to the 
n-th element


[flink-statefun] branch master updated: [release] Add 3.2 release docs to docs/config.toml

2022-01-25 Thread trohrmann
This is an automated email from the ASF dual-hosted git repository.

trohrmann pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink-statefun.git


The following commit(s) were added to refs/heads/master by this push:
 new b9230a5  [release] Add 3.2 release docs to docs/config.toml
b9230a5 is described below

commit b9230a593dd16865ff7c88d06347d8dc34a4d2bd
Author: Till Rohrmann 
AuthorDate: Tue Jan 25 16:17:11 2022 +0100

[release] Add 3.2 release docs to docs/config.toml
---
 docs/config.toml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/docs/config.toml b/docs/config.toml
index 1a09bf6..837974f 100644
--- a/docs/config.toml
+++ b/docs/config.toml
@@ -61,6 +61,7 @@ pygmentsUseClasses = true
   ]
 
   PreviousDocs = [
+["3.2", 
"https://nightlies.apache.org/flink/flink-statefun-docs-release-3.2";],
 ["3.1", 
"https://nightlies.apache.org/flink/flink-statefun-docs-release-3.1";],
 ["3.0", 
"https://nightlies.apache.org/flink/flink-statefun-docs-release-3.0";],
 ["2.2", 
"https://nightlies.apache.org/flink/flink-statefun-docs-release-2.2";]


[flink-statefun] 01/01: [release] Update the docs/config.toml to point to 3.2.0 release

2022-01-25 Thread trohrmann
This is an automated email from the ASF dual-hosted git repository.

trohrmann pushed a commit to branch release-3.2
in repository https://gitbox.apache.org/repos/asf/flink-statefun.git

commit 4a7f6eef3ba0f9cb07aafa3930ef5b6c7b7d2fb0
Author: Till Rohrmann 
AuthorDate: Tue Jan 25 16:16:08 2022 +0100

[release] Update the docs/config.toml to point to 3.2.0 release
---
 docs/config.toml | 12 ++--
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/docs/config.toml b/docs/config.toml
index ac13064..974529a 100644
--- a/docs/config.toml
+++ b/docs/config.toml
@@ -14,7 +14,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-baseURL = '//nightlies.apache.org/flink/flink-statefun-docs-master'
+baseURL = '//nightlies.apache.org/flink/flink-statefun-docs-release-3.2'
 languageCode = "en-us"
 title = "Apache Flink Stateful Functions"
 enableGitInfo = false
@@ -24,7 +24,7 @@ pygmentsUseClasses = true
 [params]
   # Flag whether this is a stable version or not.
   # Used for the quickstart page.
-  IsStable = false
+  IsStable = true
 
   # Flag to indicate whether an outdated warning should be shown.
   ShowOutDatedWarning = false
@@ -34,14 +34,14 @@ pygmentsUseClasses = true
   # we change the version for the complete docs when forking of a release 
branch
   # etc.
   # The full version string as referenced in Maven (e.g. 1.2.1)
-  Version = "3.2-SNAPSHOT"
+  Version = "3.2.0"
 
   # For stable releases, leave the bugfix version out (e.g. 1.2). For snapshot
   # release this should be the same as the regular version
-  VersionTitle = "3.2-SNAPSHOT"
+  VersionTitle = "3.2"
 
   # The branch for this version of Apache Flink Stateful Functions
-  Branch = "master"
+  Branch = "release-3.2"
 
   # The github repository for Apache Flink Stateful Functions
   Repo = "//github.com/apache/flink-statefun"
@@ -57,7 +57,7 @@ pygmentsUseClasses = true
   # of the menu
   MenuLinks = [
 ["Project Homepage", "//flink.apache.org"],
-["JavaDocs", 
"//nightlies.apache.org/flink/flink-statefun-docs-master/api/java/"],
+["JavaDocs", 
"//nightlies.apache.org/flink/flink-statefun-docs-release-3.2/api/java/"],
   ]
 
   PreviousDocs = [


[flink-statefun] branch release-3.2 created (now 4a7f6ee)

2022-01-25 Thread trohrmann
This is an automated email from the ASF dual-hosted git repository.

trohrmann pushed a change to branch release-3.2
in repository https://gitbox.apache.org/repos/asf/flink-statefun.git.


  at 4a7f6ee  [release] Update the docs/config.toml to point to 3.2.0 
release

This branch includes the following new commits:

 new 4a7f6ee  [release] Update the docs/config.toml to point to 3.2.0 
release

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[flink-statefun] branch master updated: [release] Update version to 3.3-SNAPSHOT

2022-01-25 Thread trohrmann
This is an automated email from the ASF dual-hosted git repository.

trohrmann pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink-statefun.git


The following commit(s) were added to refs/heads/master by this push:
 new d5f7c62  [release] Update version to 3.3-SNAPSHOT
d5f7c62 is described below

commit d5f7c627e67465f5c4ec5d37a33426fa04e4d17f
Author: Till Rohrmann 
AuthorDate: Tue Jan 25 16:02:40 2022 +0100

[release] Update version to 3.3-SNAPSHOT
---
 docs/config.toml  | 4 ++--
 pom.xml   | 2 +-
 statefun-e2e-tests/pom.xml| 2 +-
 statefun-e2e-tests/statefun-e2e-tests-common/pom.xml  | 2 +-
 statefun-e2e-tests/statefun-smoke-e2e-common/pom.xml  | 2 +-
 statefun-e2e-tests/statefun-smoke-e2e-driver/pom.xml  | 2 +-
 statefun-e2e-tests/statefun-smoke-e2e-embedded/pom.xml| 4 ++--
 .../statefun-smoke-e2e-embedded/src/test/resources/Dockerfile | 2 +-
 statefun-e2e-tests/statefun-smoke-e2e-golang/pom.xml  | 2 +-
 .../statefun-smoke-e2e-golang/src/test/resources/Dockerfile   | 2 +-
 statefun-e2e-tests/statefun-smoke-e2e-java/pom.xml| 2 +-
 .../statefun-smoke-e2e-java/src/test/resources/Dockerfile | 2 +-
 statefun-e2e-tests/statefun-smoke-e2e-js/pom.xml  | 2 +-
 .../statefun-smoke-e2e-js/src/test/resources/Dockerfile   | 2 +-
 statefun-e2e-tests/statefun-smoke-e2e-multilang-base/pom.xml  | 2 +-
 statefun-e2e-tests/statefun-smoke-e2e-multilang-harness/pom.xml   | 4 ++--
 statefun-flink/pom.xml| 2 +-
 statefun-flink/statefun-flink-common/pom.xml  | 2 +-
 statefun-flink/statefun-flink-core/pom.xml| 2 +-
 statefun-flink/statefun-flink-datastream/pom.xml  | 2 +-
 statefun-flink/statefun-flink-distribution/pom.xml| 2 +-
 statefun-flink/statefun-flink-extensions/pom.xml  | 2 +-
 statefun-flink/statefun-flink-harness/pom.xml | 2 +-
 statefun-flink/statefun-flink-io-bundle/pom.xml   | 2 +-
 statefun-flink/statefun-flink-io/pom.xml  | 2 +-
 statefun-flink/statefun-flink-launcher/pom.xml| 2 +-
 statefun-flink/statefun-flink-state-processor/pom.xml | 2 +-
 statefun-kafka-io/pom.xml | 2 +-
 statefun-kinesis-io/pom.xml   | 2 +-
 statefun-sdk-embedded/pom.xml | 2 +-
 statefun-sdk-go/pom.xml   | 2 +-
 statefun-sdk-java/pom.xml | 2 +-
 statefun-sdk-js/package.json  | 2 +-
 statefun-sdk-js/pom.xml   | 2 +-
 statefun-sdk-protos/pom.xml   | 2 +-
 statefun-sdk-python/pom.xml   | 2 +-
 statefun-sdk-python/setup.py  | 2 +-
 statefun-shaded/pom.xml   | 2 +-
 statefun-shaded/statefun-protobuf-shaded/pom.xml  | 2 +-
 statefun-shaded/statefun-protocol-shaded/pom.xml  | 2 +-
 statefun-testutil/pom.xml | 2 +-
 tools/docker/build-stateful-functions.sh  | 2 +-
 tools/k8s/Chart.yaml  | 2 +-
 43 files changed, 46 insertions(+), 46 deletions(-)

diff --git a/docs/config.toml b/docs/config.toml
index ac13064..1a09bf6 100644
--- a/docs/config.toml
+++ b/docs/config.toml
@@ -34,11 +34,11 @@ pygmentsUseClasses = true
   # we change the version for the complete docs when forking of a release 
branch
   # etc.
   # The full version string as referenced in Maven (e.g. 1.2.1)
-  Version = "3.2-SNAPSHOT"
+  Version = "3.3-SNAPSHOT"
 
   # For stable releases, leave the bugfix version out (e.g. 1.2). For snapshot
   # release this should be the same as the regular version
-  VersionTitle = "3.2-SNAPSHOT"
+  VersionTitle = "3.3-SNAPSHOT"
 
   # The branch for this version of Apache Flink Stateful Functions
   Branch = "master"
diff --git a/pom.xml b/pom.xml
index 8434e95..2042c51 100644
--- a/pom.xml
+++ b/pom.xml
@@ -29,7 +29,7 @@ under the License.
 statefun-parent
 org.apache.flink
 statefun-parent
-3.2-SNAPSHOT
+3.3-SNAPSHOT
 pom
 
 http://flink.apache.org
diff --git a/statefun-e2e-tests/pom.xml b/statefun-e2e-tests/pom.xml
index f2e850c..863dc35 100644
--- a/statefun-e2e-tests/pom.xml

[flink] branch release-1.13 updated: [FLINK-25278][ci] Use Maven proxy for confluent repo

2022-01-25 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.13
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.13 by this push:
 new 5c0af53  [FLINK-25278][ci] Use Maven proxy for confluent repo
5c0af53 is described below

commit 5c0af5367f6af575cdc6dfa5186710dcbc861097
Author: Chesnay Schepler 
AuthorDate: Tue Jan 11 17:03:32 2022 +0100

[FLINK-25278][ci] Use Maven proxy for confluent repo
---
 tools/ci/alibaba-mirror-settings.xml | 6 ++
 1 file changed, 6 insertions(+)

diff --git a/tools/ci/alibaba-mirror-settings.xml 
b/tools/ci/alibaba-mirror-settings.xml
index 0a817f1..34fddbe 100644
--- a/tools/ci/alibaba-mirror-settings.xml
+++ b/tools/ci/alibaba-mirror-settings.xml
@@ -24,5 +24,11 @@ under the License.
   http://172.17.0.1:/repository/maven-central/
   central
 
+
+  alicloud-mvn-confluent-mirror
+  Alibaba Confluent Maven mirror
+  http://172.17.0.1:/repository/confluent/
+  confluent
+
   
 


[flink] branch release-1.14 updated: [FLINK-25278][ci] Use Maven proxy for confluent repo

2022-01-25 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.14
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.14 by this push:
 new 3f54ec7  [FLINK-25278][ci] Use Maven proxy for confluent repo
3f54ec7 is described below

commit 3f54ec789f9d14f1c89e38393fc184f4bcd4350d
Author: Chesnay Schepler 
AuthorDate: Tue Jan 11 17:03:32 2022 +0100

[FLINK-25278][ci] Use Maven proxy for confluent repo
---
 tools/ci/alibaba-mirror-settings.xml | 6 ++
 1 file changed, 6 insertions(+)

diff --git a/tools/ci/alibaba-mirror-settings.xml 
b/tools/ci/alibaba-mirror-settings.xml
index 0a817f1..34fddbe 100644
--- a/tools/ci/alibaba-mirror-settings.xml
+++ b/tools/ci/alibaba-mirror-settings.xml
@@ -24,5 +24,11 @@ under the License.
   http://172.17.0.1:/repository/maven-central/
   central
 
+
+  alicloud-mvn-confluent-mirror
+  Alibaba Confluent Maven mirror
+  http://172.17.0.1:/repository/confluent/
+  confluent
+
   
 


[flink-statefun] branch master updated (09a5cba -> 895fb0b)

2022-01-25 Thread igal
This is an automated email from the ASF dual-hosted git repository.

igal pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink-statefun.git.


from 09a5cba  [FLINK-25708] Bump Flink dependency to 1.14.3
 add 895fb0b  [FLINK-25775][doc] Add Javascript SDK documentation for NodeJS

No new revisions were added by this update.

Summary of changes:
 docs/content/_index.md  |   1 +
 docs/content/docs/sdk/js.md | 267 
 2 files changed, 268 insertions(+)
 create mode 100644 docs/content/docs/sdk/js.md


[flink] branch master updated (8a694ca -> d8a031c)

2022-01-25 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 8a694ca  [FLINK-25702][Kafka] Use the configure feature provided by 
the kafka Serializer/Deserializer
 add d8a031c  [FLINK-24041][connector] Fixing reversed insertion of failed 
requests

No new revisions were added by this update.

Summary of changes:
 .../base/sink/writer/AsyncSinkWriter.java  |  4 ++-
 .../base/sink/writer/AsyncSinkWriterTest.java  | 33 +-
 2 files changed, 35 insertions(+), 2 deletions(-)


[flink] branch release-1.14 updated: Update for 1.14.3

2022-01-25 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.14
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.14 by this push:
 new bf8d811  Update for 1.14.3
bf8d811 is described below

commit bf8d81102fe9951745d032aa98040ec763ac4997
Author: Chesnay Schepler 
AuthorDate: Tue Jan 25 10:46:19 2022 +0100

Update for 1.14.3
---
 docs/config.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/config.toml b/docs/config.toml
index 571e90c..6203ccd 100644
--- a/docs/config.toml
+++ b/docs/config.toml
@@ -34,7 +34,7 @@ pygmentsUseClasses = true
   # we change the version for the complete docs when forking of a release 
branch
   # etc.
   # The full version string as referenced in Maven (e.g. 1.2.1)
-  Version = "1.14.2"
+  Version = "1.14.3"
 
   # For stable releases, leave the bugfix version out (e.g. 1.2). For snapshot
   # release this should be the same as the regular version


[flink] branch master updated (9ab8dbd -> 8a694ca)

2022-01-25 Thread lindong
This is an automated email from the ASF dual-hosted git repository.

lindong pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 9ab8dbd  [FLINK-14954][rest] Add OpenAPI spec generator
 add 8a694ca  [FLINK-25702][Kafka] Use the configure feature provided by 
the kafka Serializer/Deserializer

No new revisions were added by this update.

Summary of changes:
 .../kafka/sink/KafkaRecordSerializationSchema.java |   6 +-
 .../KafkaRecordSerializationSchemaBuilder.java |  24 +++--
 .../kafka/sink/KafkaSerializerWrapper.java |  12 ++-
 .../KafkaRecordDeserializationSchema.java  |  15 ++-
 .../KafkaValueOnlyDeserializerWrapper.java |   8 ++
 .../KafkaRecordSerializationSchemaBuilderTest.java | 105 -
 .../KafkaRecordDeserializationSchemaTest.java  |  69 ++
 7 files changed, 189 insertions(+), 50 deletions(-)


[flink] branch master updated (dfcf8bc -> 9ab8dbd)

2022-01-25 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from dfcf8bc  [hotfix][table-planner] Unify the JSON test utilities
 add b79232e  [hotfix][rest][docs] Regenerate REST API docs
 add 9ab8dbd  [FLINK-14954][rest] Add OpenAPI spec generator

No new revisions were added by this update.

Summary of changes:
 docs/content.zh/docs/ops/rest_api.md   |   11 +-
 docs/content/docs/ops/rest_api.md  |   12 +-
 .../shortcodes/generated/rest_v1_dispatcher.html   |   76 +-
 docs/static/generated/rest_v1_dispatcher.yml   | 2878 
 flink-docs/README.md   |2 +-
 flink-docs/pom.xml |   15 +
 .../flink/docs/rest/OpenApiSpecGenerator.java  |  436 +++
 ...atorTest.java => OpenApiSpecGeneratorTest.java} |   40 +-
 .../handler/async/AsynchronousOperationResult.java |2 +
 .../json/SerializedThrowableSerializer.java|2 +-
 pom.xml|2 +-
 11 files changed, 3444 insertions(+), 32 deletions(-)
 create mode 100644 docs/static/generated/rest_v1_dispatcher.yml
 create mode 100644 
flink-docs/src/main/java/org/apache/flink/docs/rest/OpenApiSpecGenerator.java
 copy 
flink-docs/src/test/java/org/apache/flink/docs/rest/{RestAPIDocGeneratorTest.java
 => OpenApiSpecGeneratorTest.java} (71%)


[flink] branch master updated (fa161d3 -> dfcf8bc)

2022-01-25 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from fa161d3  [FLINK-25391][format-json] Forward catalog table options
 add b5dcb85  [hotfix][table-common] More Javadoc on 
DynamicTableFactory.Context#getObjectIdentifier
 add a65c12b  [FLINK-25791][table-planner] Add Parser to the SerdeContext
 add 04f8724  [FLINK-25791][table-planner] Use serializable string for 
ObjectIdentifier in JSON
 add dfcf8bc  [hotfix][table-planner] Unify the JSON test utilities

No new revisions were added by this update.

Summary of changes:
 .../flink/table/factories/DynamicTableFactory.java |  20 +++-
 .../exec/serde/LogicalTypeJsonDeserializer.java|   7 +-
 .../serde/ObjectIdentifierJsonDeserializer.java|  35 --
 .../exec/serde/ObjectIdentifierJsonSerializer.java |  11 +-
 .../plan/nodes/exec/serde/SerdeContext.java|   8 ++
 .../table/planner/delegation/PlannerBase.scala |   1 +
 .../nodes/exec/serde/DataTypeJsonSerdeTest.java|   6 +-
 .../exec/serde/DynamicTableSinkSpecSerdeTest.java  |  19 +--
 .../serde/DynamicTableSourceSpecSerdeTest.java |  18 +--
 .../plan/nodes/exec/serde/JsonSerdeMocks.java  |  76 
 .../plan/nodes/exec/serde/JsonSerdeTestUtil.java   | 132 +
 .../nodes/exec/serde/LogicalTypeJsonSerdeTest.java |   9 +-
 .../nodes/exec/serde/LogicalWindowSerdeTest.java   |   1 +
 .../plan/nodes/exec/serde/LookupKeySerdeTest.java  |   1 +
 .../nodes/exec/serde/RelDataTypeJsonSerdeTest.java |  10 +-
 .../plan/nodes/exec/serde/RexNodeSerdeTest.java|   1 +
 .../nodes/exec/serde/RexWindowBoundSerdeTest.java  |   1 +
 .../serde/TemporalTableSourceSpecSerdeTest.java|   9 +-
 .../test/resources/jsonplan/testGetJsonPlan.out|  12 +-
 .../CalcJsonPlanTest_jsonplan/testComplexCalc.out  |  12 +-
 .../CalcJsonPlanTest_jsonplan/testSimpleFilter.out |  12 +-
 .../testSimpleProject.out  |  12 +-
 .../testChangelogSource.out|  12 +-
 .../testUpsertSource.out   |  12 +-
 .../testCrossJoin.out  |  12 +-
 .../testCrossJoinOverrideParameters.out|  12 +-
 .../testJoinWithFilter.out |  12 +-
 .../testLeftOuterJoinWithLiteralTrue.out   |  12 +-
 .../testDeduplication.out  |  12 +-
 .../ExpandJsonPlanTest_jsonplan/testExpand.out |  12 +-
 ...tDistinctAggCalls[isMiniBatchEnabled=false].out |  12 +-
 ...stDistinctAggCalls[isMiniBatchEnabled=true].out |  12 +-
 ...gCallsWithGroupBy[isMiniBatchEnabled=false].out |  12 +-
 ...ggCallsWithGroupBy[isMiniBatchEnabled=true].out |  12 +-
 ...AggWithoutGroupBy[isMiniBatchEnabled=false].out |  12 +-
 ...eAggWithoutGroupBy[isMiniBatchEnabled=true].out |  12 +-
 ...erDefinedAggCalls[isMiniBatchEnabled=false].out |  12 +-
 ...serDefinedAggCalls[isMiniBatchEnabled=true].out |  12 +-
 .../testEventTimeHopWindow.out |  12 +-
 .../testEventTimeSessionWindow.out |  12 +-
 .../testEventTimeTumbleWindow.out  |  12 +-
 .../testProcTimeHopWindow.out  |  12 +-
 .../testProcTimeSessionWindow.out  |  12 +-
 .../testProcTimeTumbleWindow.out   |  12 +-
 .../testIncrementalAggregate.out   |  12 +-
 ...lAggregateWithSumCountDistinctAndRetraction.out |  12 +-
 .../testProcessingTimeInnerJoinWithOnClause.out|  18 +--
 .../testRowTimeInnerJoinWithOnClause.out   |  18 +--
 .../JoinJsonPlanTest_jsonplan/testInnerJoin.out|  18 +--
 .../testInnerJoinWithEqualPk.out   |  18 +--
 .../testInnerJoinWithPk.out|  18 +--
 .../testLeftJoinNonEqui.out|  18 +--
 .../LimitJsonPlanTest_jsonplan/testLimit.out   |  12 +-
 .../testJoinTemporalTable.out  |  18 +--
 ...testJoinTemporalTableWithProjectionPushDown.out |  18 +--
 .../testMatch.out  |  12 +-
 .../testProcTimeBoundedNonPartitionedRangeOver.out |  12 +-
 .../testProcTimeBoundedPartitionedRangeOver.out|  12 +-
 ...undedPartitionedRowsOverWithBuiltinProctime.out |  12 +-
 .../testProcTimeUnboundedPartitionedRangeOver.out  |  12 +-
 ...stProctimeBoundedDistinctPartitionedRowOver.out |  12 +-
 ...edDistinctWithNonDistinctPartitionedRowOver.out |  12 +-
 .../testRowTimeBoundedPartitionedRowsOver.out  |  12 +-
 .../testPythonCalc.out |  12 +-
 .../testPythonFunctionInWhereClause.out|  12 +-
 .../testJoinWithFilter.out |  12 +-
 .../testPythonTableFunction.out|  12 +-
 .../tesPythonAggCallsWithGroupBy.out   |  12 +-
 .../testEventTimeHopWindow.out |  12 +-
 .../testEventTimeSessionWindow.out |  12 +-
 .../testEventTime

[flink] 05/09: [FLINK-25391][connector-kinesis] Forward catalog table options

2022-01-25 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 2cb86ff03747499bfceda74cb8cc1ea48c385452
Author: slinkydeveloper 
AuthorDate: Thu Jan 6 16:21:52 2022 +0100

[FLINK-25391][connector-kinesis] Forward catalog table options
---
 docs/content/docs/connectors/table/kinesis.md  | 79 --
 .../kinesis/table/KinesisDynamicTableFactory.java  | 16 -
 2 files changed, 88 insertions(+), 7 deletions(-)

diff --git a/docs/content/docs/connectors/table/kinesis.md 
b/docs/content/docs/connectors/table/kinesis.md
index f26b1e9..1840862 100644
--- a/docs/content/docs/connectors/table/kinesis.md
+++ b/docs/content/docs/connectors/table/kinesis.md
@@ -122,11 +122,12 @@ Connector Options
 
 
 
-  Option
-  Required
-  Default
-  Type
-  Description
+Option
+Required
+Forwarded
+Default
+Type
+Description
 
 
   Common Options
@@ -136,6 +137,7 @@ Connector Options
 
   connector
   required
+  no
   (none)
   String
   Specify what connector to use. For Kinesis use 
'kinesis'.
@@ -143,6 +145,7 @@ Connector Options
 
   stream
   required
+  yes
   (none)
   String
   Name of the Kinesis data stream backing this table.
@@ -150,6 +153,7 @@ Connector Options
 
   format
   required
+  no
   (none)
   String
   The format used to deserialize and serialize Kinesis data stream 
records. See Data Type Mapping for 
details.
@@ -157,6 +161,7 @@ Connector Options
 
   aws.region
   optional
+  no
   (none)
   String
   The AWS region where the stream is defined. Either this or 
aws.endpoint are required.
@@ -164,6 +169,7 @@ Connector Options
 
   aws.endpoint
   optional
+  no
   (none)
   String
   The AWS endpoint for Kinesis (derived from the AWS region setting if 
not set). Either this or aws.region are required.
@@ -185,6 +191,7 @@ Connector Options
 
   aws.credentials.provider
   optional
+  no
   AUTO
   String
   A credentials provider to use when authenticating against the 
Kinesis endpoint. See Authentication for 
details.
@@ -192,6 +199,7 @@ Connector Options
 
  aws.credentials.basic.accesskeyid
  optional
+  no
  (none)
  String
  The AWS access key ID to use when setting credentials provider 
type to BASIC.
@@ -199,6 +207,7 @@ Connector Options
 
  aws.credentials.basic.secretkey
  optional
+  no
  (none)
  String
  The AWS secret key to use when setting credentials provider type 
to BASIC.
@@ -206,6 +215,7 @@ Connector Options
 
  aws.credentials.profile.path
  optional
+  no
  (none)
  String
  Optional configuration for profile path if credential provider 
type is set to be PROFILE.
@@ -213,6 +223,7 @@ Connector Options
 
  aws.credentials.profile.name
  optional
+  no
  (none)
  String
  Optional configuration for profile name if credential provider 
type is set to be PROFILE.
@@ -220,6 +231,7 @@ Connector Options
 
  aws.credentials.role.arn
  optional
+  no
  (none)
  String
  The role ARN to use when credential provider type is set to 
ASSUME_ROLE or WEB_IDENTITY_TOKEN.
@@ -227,6 +239,7 @@ Connector Options
 
  aws.credentials.role.sessionName
  optional
+  no
  (none)
  String
  The role session name to use when credential provider type is set 
to ASSUME_ROLE or WEB_IDENTITY_TOKEN.
@@ -234,6 +247,7 @@ Connector Options
 
  aws.credentials.role.externalId
  optional
+  no
  (none)
  String
  The external ID to use when credential provider type is set to 
ASSUME_ROLE.
@@ -241,6 +255,7 @@ Connector Options
 
  aws.credentials.role.provider
  optional
+  no
  (none)
  String
  The credentials provider that provides credentials for assuming 
the role when credential provider type is set to ASSUME_ROLE. Roles can be 
nested, so this value can again be set to ASSUME_ROLE
@@ -248,6 +263,7 @@ Connector Options
 
  aws.credentials.webIdentityToken.file
  optional
+  no
  (none)
  String
  The absolute path to the web identity token file that should be 
used if provider type is set to WEB_IDENTITY_TOKEN.
@@ -262,6 +278,7 @@ Connector Options
 
   scan.stream.initpos
   optional
+  no
   LATEST
   String
   Initial position to be used when reading from the table. See Start Reading Position for details.
@@ -269,6 +286,7 @@ Connector Options
 
   scan.stream.init

[flink] 07/09: [FLINK-25391][format-avro] Forward catalog table options

2022-01-25 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 8f77862fa5ecbec5ee26b7b2b68478ad50943a3e
Author: slinkydeveloper 
AuthorDate: Thu Jan 6 16:37:17 2022 +0100

[FLINK-25391][format-avro] Forward catalog table options
---
 .../docs/connectors/table/formats/avro-confluent.md   | 15 ++-
 docs/content/docs/connectors/table/formats/avro.md|  5 -
 .../registry/confluent/RegistryAvroFormatFactory.java | 19 +++
 .../flink/formats/avro/AvroFileFormatFactory.java |  5 +
 4 files changed, 42 insertions(+), 2 deletions(-)

diff --git a/docs/content/docs/connectors/table/formats/avro-confluent.md 
b/docs/content/docs/connectors/table/formats/avro-confluent.md
index cf2fe70..28b33da 100644
--- a/docs/content/docs/connectors/table/formats/avro-confluent.md
+++ b/docs/content/docs/connectors/table/formats/avro-confluent.md
@@ -176,15 +176,17 @@ Format Options
   
 Option
 Required
+Forwarded
 Default
 Type
-Description
+Description
   
 
 
 
 format
 required
+no
 (none)
 String
 Specify what format to use, here should be 
'avro-confluent'.
@@ -192,6 +194,7 @@ Format Options
 
 avro-confluent.basic-auth.credentials-source
 optional
+yes
 (none)
 String
 Basic auth credentials source for Schema Registry
@@ -199,6 +202,7 @@ Format Options
 
 avro-confluent.basic-auth.user-info
 optional
+yes
 (none)
 String
 Basic auth user info for schema registry
@@ -206,6 +210,7 @@ Format Options
 
 avro-confluent.bearer-auth.credentials-source
 optional
+yes
 (none)
 String
 Bearer auth credentials source for Schema Registry
@@ -213,6 +218,7 @@ Format Options
 
 avro-confluent.bearer-auth.token
 optional
+yes
 (none)
 String
 Bearer auth token for Schema Registry
@@ -220,6 +226,7 @@ Format Options
 
 avro-confluent.properties
 optional
+yes
 (none)
 Map
 Properties map that is forwarded to the underlying Schema 
Registry. This is useful for options that are not officially exposed via Flink 
config options. However, note that Flink options have higher precedence.
@@ -227,6 +234,7 @@ Format Options
 
 avro-confluent.ssl.keystore.location
 optional
+yes
 (none)
 String
 Location / File of SSL keystore
@@ -234,6 +242,7 @@ Format Options
 
 avro-confluent.ssl.keystore.password
 optional
+yes
 (none)
 String
 Password for SSL keystore
@@ -241,6 +250,7 @@ Format Options
 
 avro-confluent.ssl.truststore.location
 optional
+yes
 (none)
 String
 Location / File of SSL truststore
@@ -248,6 +258,7 @@ Format Options
 
 avro-confluent.ssl.truststore.password
 optional
+yes
 (none)
 String
 Password for SSL truststore
@@ -255,6 +266,7 @@ Format Options
 
 avro-confluent.subject
 optional
+yes
 (none)
 String
 The Confluent Schema Registry subject under which to register 
the schema used by this format during serialization. By default, 'kafka' and 
'upsert-kafka' connectors use '-value' or 
'-key' as the default subject name if this format is used as 
the value or key format. But for other connectors (e.g. 'filesystem'), the 
subject option is required when used as sink.
@@ -262,6 +274,7 @@ Format Options
 
 avro-confluent.url
 required
+yes
 (none)
 String
 The URL of the Confluent Schema Registry to fetch/register 
schemas.
diff --git a/docs/content/docs/connectors/table/formats/avro.md 
b/docs/content/docs/connectors/table/formats/avro.md
index 341ca0a..601a9dc 100644
--- a/docs/content/docs/connectors/table/formats/avro.md
+++ b/docs/content/docs/connectors/table/formats/avro.md
@@ -65,15 +65,17 @@ Format Options
   
 Option
 Required
+Forwarded
 Default
 Type
-Description
+Description
   
 
 
 
   format
   required
+  no
   (none)
   String
   Specify what format to use, here should be 'avro'.
@@ -81,6 +83,7 @@ Format Options
 
   av

[flink] 06/09: [FLINK-25391][connector-hbase] Forward catalog table options

2022-01-25 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 5175ed0d48835344c1cd4282372d6b01571d914b
Author: slinkydeveloper 
AuthorDate: Thu Jan 6 16:35:09 2022 +0100

[FLINK-25391][connector-hbase] Forward catalog table options
---
 docs/content/docs/connectors/table/hbase.md| 17 -
 .../hbase1/HBase1DynamicTableFactory.java  | 29 --
 .../hbase2/HBase2DynamicTableFactory.java  | 27 ++--
 .../hbase/table/HBaseConnectorOptionsUtil.java | 18 ++
 4 files changed, 65 insertions(+), 26 deletions(-)

diff --git a/docs/content/docs/connectors/table/hbase.md 
b/docs/content/docs/connectors/table/hbase.md
index 86d45e5..21436cd 100644
--- a/docs/content/docs/connectors/table/hbase.md
+++ b/docs/content/docs/connectors/table/hbase.md
@@ -82,15 +82,17 @@ Connector Options
   
 Option
 Required
+Forwarded
 Default
 Type
-Description
+Description
   
 
 
 
   connector
   required
+  no
   (none)
   String
   Specify what connector to use, valid values are:
@@ -103,6 +105,7 @@ Connector Options
 
   table-name
   required
+  yes
   (none)
   String
   The name of HBase table to connect. By default, the table is in 
'default' namespace. To assign the table a specified namespace you need to use 
'namespace:table'.
@@ -110,6 +113,7 @@ Connector Options
 
   zookeeper.quorum
   required
+  yes
   (none)
   String
   The HBase Zookeeper quorum.
@@ -117,6 +121,7 @@ Connector Options
 
   zookeeper.znode.parent
   optional
+  yes
   /hbase
   String
   The root dir in Zookeeper for HBase cluster.
@@ -124,6 +129,7 @@ Connector Options
 
   null-string-literal
   optional
+  yes
   null
   String
   Representation for null values for string fields. HBase source and 
sink encodes/decodes empty bytes as null values for all types except string 
type.
@@ -131,6 +137,7 @@ Connector Options
 
   sink.buffer-flush.max-size
   optional
+  yes
   2mb
   MemorySize
   Writing option, maximum size in memory of buffered rows for each 
writing request.
@@ -141,6 +148,7 @@ Connector Options
 
   sink.buffer-flush.max-rows
   optional
+  yes
   1000
   Integer
   Writing option, maximum number of rows to buffer for each writing 
request.
@@ -151,6 +159,7 @@ Connector Options
 
   sink.buffer-flush.interval
   optional
+  yes
   1s
   Duration
   Writing option, the interval to flush any buffered rows.
@@ -162,6 +171,7 @@ Connector Options
 
   sink.parallelism
   optional
+  no
   (none)
   Integer
   Defines the parallelism of the HBase sink operator. By default, the 
parallelism is determined by the framework using the same parallelism of the 
upstream chained operator.
@@ -169,6 +179,7 @@ Connector Options
 
   lookup.async
   optional
+  no
   false
   Boolean
   Whether async lookup are enabled. If true, the lookup will be async. 
Note, async only supports hbase-2.2 connector.
@@ -176,6 +187,7 @@ Connector Options
 
   lookup.cache.max-rows
   optional
+  yes
   -1
   Long
   The max number of rows of lookup cache, over this value, the oldest 
rows will be expired. Note, "lookup.cache.max-rows" and "lookup.cache.ttl" 
options must all be specified if any of them is specified. Lookup cache is 
disabled by default.
@@ -183,6 +195,7 @@ Connector Options
 
   lookup.cache.ttl
   optional
+  yes
   0 s
   Duration
   The max time to live for each rows in lookup cache, over this time, 
the oldest rows will be expired. Note, "cache.max-rows" and "cache.ttl" options 
must all be specified if any of them is specified.Lookup cache is disabled by 
default.
@@ -190,6 +203,7 @@ Connector Options
 
   lookup.max-retries
   optional
+  yes
   3
   Integer
   The max retry times if lookup database failed.
@@ -197,6 +211,7 @@ Connector Options
 
   properties.*
   optional
+  no
   (none)
   String
   
diff --git 
a/flink-connectors/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/HBase1DynamicTableFactory.java
 
b/flink-connectors/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/HBase1DynamicTableFactory.java
index 3454064..6a3e6ba 100644
--- 
a/flink-connectors/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/HBase1DynamicTableFactory.java
+++ 
b/flink-connectors/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/HBase1DynamicTableFactory.java
@@ -34,9 +34,11 @@ import 
org.apache.flink.table.fa

[flink] 04/09: [FLINK-25391][connector-kafka] Forward catalog table options

2022-01-25 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 0c34c994f9906e58963f85739fc951221b11d26a
Author: slinkydeveloper 
AuthorDate: Thu Jan 6 16:20:12 2022 +0100

[FLINK-25391][connector-kafka] Forward catalog table options
---
 docs/content/docs/connectors/table/kafka.md| 24 +-
 .../kafka/table/KafkaDynamicTableFactory.java  | 23 +
 2 files changed, 38 insertions(+), 9 deletions(-)

diff --git a/docs/content/docs/connectors/table/kafka.md 
b/docs/content/docs/connectors/table/kafka.md
index 6b728f6..34d73c4 100644
--- a/docs/content/docs/connectors/table/kafka.md
+++ b/docs/content/docs/connectors/table/kafka.md
@@ -179,15 +179,17 @@ Connector Options
 
   Option
   Required
+  Forwarded
   Default
   Type
-  Description
+  Description
 
 
 
 
   connector
   required
+  no
   (none)
   String
   Specify what connector to use, for Kafka use 
'kafka'.
@@ -195,6 +197,7 @@ Connector Options
 
   topic
   required for sink
+  yes
   (none)
   String
   Topic name(s) to read data from when the table is used as source. It 
also supports topic list for source by separating topic by semicolon like 
'topic-1;topic-2'. Note, only one of "topic-pattern" and "topic" 
can be specified for sources. When the table is used as sink, the topic name is 
the topic to write data to. Note topic list is not supported for sinks.
@@ -202,6 +205,7 @@ Connector Options
 
   topic-pattern
   optional
+  yes
   (none)
   String
   The regular expression for a pattern of topic names to read from. 
All topics with names that match the specified regular expression will be 
subscribed by the consumer when the job starts running. Note, only one of 
"topic-pattern" and "topic" can be specified for sources.
@@ -209,6 +213,7 @@ Connector Options
 
   properties.bootstrap.servers
   required
+  yes
   (none)
   String
   Comma separated list of Kafka brokers.
@@ -216,6 +221,7 @@ Connector Options
 
   properties.group.id
   optional for source, not applicable for sink
+  yes
   (none)
   String
   The id of the consumer group for Kafka source. If group ID is not 
specified, an automatically generated id "KafkaSource-{tableIdentifier}" will 
be used.
@@ -223,6 +229,7 @@ Connector Options
 
   properties.*
   optional
+  no
   (none)
   String
   
@@ -232,6 +239,7 @@ Connector Options
 
   format
   required
+  no
   (none)
   String
   The format used to deserialize and serialize the value part of Kafka 
messages.
@@ -243,6 +251,7 @@ Connector Options
 
   key.format
   optional
+  no
   (none)
   String
   The format used to deserialize and serialize the key part of Kafka 
messages.
@@ -254,6 +263,7 @@ Connector Options
 
   key.fields
   optional
+  no
   []
   List
   Defines an explicit list of physical columns from the table schema 
that configure the data
@@ -264,6 +274,7 @@ Connector Options
 
   key.fields-prefix
   optional
+  no
   (none)
   String
   Defines a custom prefix for all fields of the key format to avoid 
name clashes with fields
@@ -277,6 +288,7 @@ Connector Options
 
   value.format
   required
+  no
   (none)
   String
   The format used to deserialize and serialize the value part of Kafka 
messages.
@@ -288,6 +300,7 @@ Connector Options
 
   value.fields-include
   optional
+  no
   ALL
   EnumPossible values: [ALL, EXCEPT_KEY]
   Defines a strategy how to deal with key columns in the data type of 
the value format. By
@@ -298,6 +311,7 @@ Connector Options
 
   scan.startup.mode
   optional
+  yes
   group-offsets
   String
   Startup mode for Kafka consumer, valid values are 
'earliest-offset', 'latest-offset', 
'group-offsets', 'timestamp' and 
'specific-offsets'.
@@ -306,6 +320,7 @@ Connector Options
 
   scan.startup.specific-offsets
   optional
+  yes
   (none)
   String
   Specify offsets for each partition in case of 
'specific-offsets' startup mode, e.g. 
'partition:0,offset:42;partition:1,offset:300'.
@@ -314,6 +329,7 @@ Connector Options
 
   scan.startup.timestamp-millis
   optional
+  yes
   (none)
   Long
   Start from the specified epoch timestamp (milliseconds) used in case 
of 'timestamp' startup mode.
@@ -321,6 +337,7 @@ Connector Options
 
   scan.topic-partition-discovery.interval
   optional
+  yes
   (none)
   Duration
   Interval for consumer to discover dynamically created Kafka topics 
and partitions periodically.
@@ -328,6 +34

[flink] 09/09: [FLINK-25391][format-json] Forward catalog table options

2022-01-25 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit fa161d3c5636370f5129320a9ca464e38f88fc6f
Author: slinkydeveloper 
AuthorDate: Wed Jan 12 18:57:48 2022 +0100

[FLINK-25391][format-json] Forward catalog table options

This closes #18290.
---
 docs/content/docs/connectors/table/formats/json.md | 10 +-
 .../java/org/apache/flink/formats/json/JsonFormatFactory.java  |  1 -
 2 files changed, 9 insertions(+), 2 deletions(-)

diff --git a/docs/content/docs/connectors/table/formats/json.md 
b/docs/content/docs/connectors/table/formats/json.md
index 56b8c84..d9c5e5c 100644
--- a/docs/content/docs/connectors/table/formats/json.md
+++ b/docs/content/docs/connectors/table/formats/json.md
@@ -69,15 +69,17 @@ Format Options
   
 Option
 Required
+Forwarded
 Default
 Type
-Description
+Description
   
 
 
 
   format
   required
+  no
   (none)
   String
   Specify what format to use, here should be 'json'.
@@ -85,6 +87,7 @@ Format Options
 
   json.fail-on-missing-field
   optional
+  no
   false
   Boolean
   Whether to fail if a field is missing or not.
@@ -92,6 +95,7 @@ Format Options
 
   json.ignore-parse-errors
   optional
+  no
   false
   Boolean
   Skip fields and rows with parse errors instead of failing.
@@ -100,6 +104,7 @@ Format Options
 
   json.timestamp-format.standard
   optional
+  yes
   'SQL'
   String
   Specify the input and output timestamp format for 
TIMESTAMP and TIMESTAMP_LTZ type. Currently supported 
values are 'SQL' and 'ISO-8601':
@@ -114,6 +119,7 @@ Format Options
 
   json.map-null-key.mode
   optional
+  yes
   'FAIL'
   String
   Specify the handling mode when serializing null keys for map data. 
Currently supported values are 'FAIL', 'DROP' and 
'LITERAL':
@@ -127,6 +133,7 @@ Format Options
 
   json.map-null-key.literal
   optional
+  yes
   'null'
   String
   Specify string literal to replace null key when 
'json.map-null-key.mode' is LITERAL.
@@ -134,6 +141,7 @@ Format Options
 
   json.encode.decimal-as-plain-number
   optional
+  yes
   false
   Boolean
   Encode all decimals as plain numbers instead of possible scientific 
notations. By default, decimals may be written using scientific notation. For 
example, 0.00027 is encoded as 2.7E-8 by default, 
and will be written as 0.00027 if set this option to true.
diff --git 
a/flink-formats/flink-json/src/main/java/org/apache/flink/formats/json/JsonFormatFactory.java
 
b/flink-formats/flink-json/src/main/java/org/apache/flink/formats/json/JsonFormatFactory.java
index bf2e287..74d8c53 100644
--- 
a/flink-formats/flink-json/src/main/java/org/apache/flink/formats/json/JsonFormatFactory.java
+++ 
b/flink-formats/flink-json/src/main/java/org/apache/flink/formats/json/JsonFormatFactory.java
@@ -155,7 +155,6 @@ public class JsonFormatFactory implements 
DeserializationFormatFactory, Serializ
 @Override
 public Set> forwardOptions() {
 Set> options = new HashSet<>();
-options.add(IGNORE_PARSE_ERRORS);
 options.add(TIMESTAMP_FORMAT);
 options.add(MAP_NULL_KEY_MODE);
 options.add(MAP_NULL_KEY_LITERAL);


[flink] 08/09: [FLINK-25391][format-csv] Forward catalog table options

2022-01-25 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 0370c36eff86a0af9485405ca3a51663c33cbadf
Author: slinkydeveloper 
AuthorDate: Thu Jan 6 16:42:25 2022 +0100

[FLINK-25391][format-csv] Forward catalog table options
---
 docs/content/docs/connectors/table/formats/csv.md   | 12 +++-
 .../java/org/apache/flink/formats/csv/CsvFormatFactory.java | 13 +
 2 files changed, 24 insertions(+), 1 deletion(-)

diff --git a/docs/content/docs/connectors/table/formats/csv.md 
b/docs/content/docs/connectors/table/formats/csv.md
index 1e9918c..e7cc549 100644
--- a/docs/content/docs/connectors/table/formats/csv.md
+++ b/docs/content/docs/connectors/table/formats/csv.md
@@ -67,15 +67,17 @@ Format Options
   
 Option
 Required
+Forwarded
 Default
 Type
-Description
+Description
   
 
 
 
   format
   required
+  no
   (none)
   String
   Specify what format to use, here should be 'csv'.
@@ -83,6 +85,7 @@ Format Options
 
   csv.field-delimiter
   optional
+  yes
   ,
   String
   Field delimiter character (',' by default), must be 
single character. You can use backslash to specify special characters, e.g. 
'\t' represents the tab character.
@@ -92,6 +95,7 @@ Format Options
 
   csv.disable-quote-character
   optional
+  yes
   false
   Boolean
   Disabled quote character for enclosing field values (false by 
default).
@@ -100,6 +104,7 @@ Format Options
 
   csv.quote-character
   optional
+  yes
   "
   String
   Quote character for enclosing field values (" by 
default).
@@ -107,6 +112,7 @@ Format Options
 
   csv.allow-comments
   optional
+  yes
   false
   Boolean
   Ignore comment lines that start with '#' (disabled by 
default).
@@ -115,6 +121,7 @@ Format Options
 
   csv.ignore-parse-errors
   optional
+  no
   false
   Boolean
   Skip fields and rows with parse errors instead of failing.
@@ -123,6 +130,7 @@ Format Options
 
   csv.array-element-delimiter
   optional
+  yes
   ;
   String
   Array element delimiter string for separating
@@ -131,6 +139,7 @@ Format Options
 
   csv.escape-character
   optional
+  yes
   (none)
   String
   Escape character for escaping values (disabled by default).
@@ -138,6 +147,7 @@ Format Options
 
   csv.null-literal
   optional
+  yes
   (none)
   String
   Null literal string that is interpreted as a null value (disabled by 
default).
diff --git 
a/flink-formats/flink-csv/src/main/java/org/apache/flink/formats/csv/CsvFormatFactory.java
 
b/flink-formats/flink-csv/src/main/java/org/apache/flink/formats/csv/CsvFormatFactory.java
index 124f4a2..ddfd685 100644
--- 
a/flink-formats/flink-csv/src/main/java/org/apache/flink/formats/csv/CsvFormatFactory.java
+++ 
b/flink-formats/flink-csv/src/main/java/org/apache/flink/formats/csv/CsvFormatFactory.java
@@ -137,6 +137,19 @@ public final class CsvFormatFactory
 return options;
 }
 
+@Override
+public Set> forwardOptions() {
+Set> options = new HashSet<>();
+options.add(FIELD_DELIMITER);
+options.add(DISABLE_QUOTE_CHARACTER);
+options.add(QUOTE_CHARACTER);
+options.add(ALLOW_COMMENTS);
+options.add(ARRAY_ELEMENT_DELIMITER);
+options.add(ESCAPE_CHARACTER);
+options.add(NULL_LITERAL);
+return options;
+}
+
 // 
 //  Validation
 // 


[flink] 02/09: [FLINK-25391][connector-jdbc] Forward catalog table options

2022-01-25 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit c61162b30f4b5567ecc2ee29481fcc87e5016428
Author: slinkydeveloper 
AuthorDate: Thu Jan 6 15:01:39 2022 +0100

[FLINK-25391][connector-jdbc] Forward catalog table options
---
 docs/content/docs/connectors/table/jdbc.md | 24 +-
 .../jdbc/table/JdbcDynamicTableFactory.java| 23 +
 2 files changed, 46 insertions(+), 1 deletion(-)

diff --git a/docs/content/docs/connectors/table/jdbc.md 
b/docs/content/docs/connectors/table/jdbc.md
index b289831..81a179a 100644
--- a/docs/content/docs/connectors/table/jdbc.md
+++ b/docs/content/docs/connectors/table/jdbc.md
@@ -93,15 +93,17 @@ Connector Options
   
 Option
 Required
+Forwarded
 Default
 Type
-Description
+Description
   
 
 
 
   connector
   required
+  no
   (none)
   String
   Specify what connector to use, here should be 
'jdbc'.
@@ -109,6 +111,7 @@ Connector Options
 
   url
   required
+  yes
   (none)
   String
   The JDBC database url.
@@ -116,6 +119,7 @@ Connector Options
 
   table-name
   required
+  yes
   (none)
   String
   The name of JDBC table to connect.
@@ -123,6 +127,7 @@ Connector Options
 
   driver
   optional
+  yes
   (none)
   String
   The class name of the JDBC driver to use to connect to this URL, if 
not set, it will automatically be derived from the URL.
@@ -130,6 +135,7 @@ Connector Options
 
   username
   optional
+  yes
   (none)
   String
   The JDBC user name. 'username' and 
'password' must both be specified if any of them is specified.
@@ -137,6 +143,7 @@ Connector Options
 
   password
   optional
+  yes
   (none)
   String
   The JDBC password.
@@ -144,6 +151,7 @@ Connector Options
 
   connection.max-retry-timeout
   optional
+  yes
   60s
   Duration
   Maximum timeout between retries. The timeout should be in second 
granularity and shouldn't be smaller than 1 second.
@@ -151,6 +159,7 @@ Connector Options
 
   scan.partition.column
   optional
+  no
   (none)
   String
   The column name used for partitioning the input. See the following 
Partitioned Scan section for more details.
@@ -158,6 +167,7 @@ Connector Options
 
   scan.partition.num
   optional
+  no
   (none)
   Integer
   The number of partitions.
@@ -165,6 +175,7 @@ Connector Options
 
   scan.partition.lower-bound
   optional
+  no
   (none)
   Integer
   The smallest value of the first partition.
@@ -172,6 +183,7 @@ Connector Options
 
   scan.partition.upper-bound
   optional
+  no
   (none)
   Integer
   The largest value of the last partition.
@@ -179,6 +191,7 @@ Connector Options
 
   scan.fetch-size
   optional
+  yes
   0
   Integer
   The number of rows that should be fetched from the database when 
reading per round trip. If the value specified is zero, then the hint is 
ignored.
@@ -186,6 +199,7 @@ Connector Options
 
   scan.auto-commit
   optional
+  yes
   true
   Boolean
   Sets the https://docs.oracle.com/javase/tutorial/jdbc/basics/transactions.html#commit_transactions";>auto-commit
 flag on the JDBC driver,
@@ -195,6 +209,7 @@ Connector Options
 
   lookup.cache.max-rows
   optional
+  yes
   (none)
   Integer
   The max number of rows of lookup cache, over this value, the oldest 
rows will be expired.
@@ -203,6 +218,7 @@ Connector Options
 
   lookup.cache.ttl
   optional
+  yes
   (none)
   Duration
   The max time to live for each rows in lookup cache, over this time, 
the oldest rows will be expired.
@@ -211,6 +227,7 @@ Connector Options
 
   lookup.cache.caching-missing-key
   optional
+  yes
   true
   Boolean
   Flag to cache missing key, true by default
@@ -218,6 +235,7 @@ Connector Options
 
   lookup.max-retries
   optional
+  yes
   3
   Integer
   The max retry times if lookup database failed.
@@ -225,6 +243,7 @@ Connector Options
 
   sink.buffer-flush.max-rows
   optional
+  yes
   100
   Integer
   The max size of buffered records before flush. Can be set to zero to 
disable it.
@@ -232,6 +251,7 @@ Connector Options
 
   sink.buffer-flush.interval
   optional
+  yes
   1s
   Duration
   The flush interval mills, over this time, asynchronous threads will 
flush data. Can be set to '0' to disable it. Note, 
'sink.buffer-flush.max-rows' can be set to '0' with 
the flush interval set allowing for complet

[flink] branch master updated (2160735 -> fa161d3)

2022-01-25 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 2160735  [FLINK-25739][dist] Include Changelog to flink-dist jar
 new 5e28f66  [FLINK-25391][connector-elasticsearch] Forward catalog table 
options
 new c61162b  [FLINK-25391][connector-jdbc] Forward catalog table options
 new c926031  [FLINK-25391][connector-files] Forward catalog table options
 new 0c34c99  [FLINK-25391][connector-kafka] Forward catalog table options
 new 2cb86ff  [FLINK-25391][connector-kinesis] Forward catalog table options
 new 5175ed0  [FLINK-25391][connector-hbase] Forward catalog table options
 new 8f77862  [FLINK-25391][format-avro] Forward catalog table options
 new 0370c36e [FLINK-25391][format-csv] Forward catalog table options
 new fa161d3  [FLINK-25391][format-json] Forward catalog table options

The 9 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../content/docs/connectors/table/elasticsearch.md | 23 ++-
 docs/content/docs/connectors/table/filesystem.md   | 80 +-
 .../connectors/table/formats/avro-confluent.md | 15 +++-
 docs/content/docs/connectors/table/formats/avro.md |  5 +-
 docs/content/docs/connectors/table/formats/csv.md  | 12 +++-
 docs/content/docs/connectors/table/formats/json.md | 10 ++-
 docs/content/docs/connectors/table/hbase.md| 17 -
 docs/content/docs/connectors/table/jdbc.md | 24 ++-
 docs/content/docs/connectors/table/kafka.md| 24 ++-
 docs/content/docs/connectors/table/kinesis.md  | 79 +++--
 .../table/ElasticsearchDynamicSinkFactoryBase.java | 50 +-
 .../table/Elasticsearch6DynamicSinkFactory.java| 21 +++---
 .../file/table/AbstractFileSystemTable.java| 24 +++
 .../file/table/FileSystemTableFactory.java | 29 +++-
 .../connector/file/table/FileSystemTableSink.java  | 21 --
 .../file/table/FileSystemTableSource.java  | 25 ---
 .../hbase1/HBase1DynamicTableFactory.java  | 29 ++--
 .../hbase2/HBase2DynamicTableFactory.java  | 27 ++--
 .../hbase/table/HBaseConnectorOptionsUtil.java | 18 ++---
 .../jdbc/table/JdbcDynamicTableFactory.java| 23 +++
 .../kafka/table/KafkaDynamicTableFactory.java  | 23 ---
 .../kinesis/table/KinesisDynamicTableFactory.java  | 16 -
 .../confluent/RegistryAvroFormatFactory.java   | 19 +
 .../flink/formats/avro/AvroFileFormatFactory.java  |  5 ++
 .../apache/flink/formats/csv/CsvFormatFactory.java | 13 
 .../flink/formats/json/JsonFormatFactory.java  |  1 -
 26 files changed, 509 insertions(+), 124 deletions(-)


[flink] 03/09: [FLINK-25391][connector-files] Forward catalog table options

2022-01-25 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit c9260311637ad47a6e67f154c629ddd49d9f262a
Author: slinkydeveloper 
AuthorDate: Thu Jan 6 14:52:50 2022 +0100

[FLINK-25391][connector-files] Forward catalog table options
---
 docs/content/docs/connectors/table/filesystem.md   | 80 +-
 .../file/table/AbstractFileSystemTable.java| 24 +++
 .../file/table/FileSystemTableFactory.java | 29 +++-
 .../connector/file/table/FileSystemTableSink.java  | 21 --
 .../file/table/FileSystemTableSource.java  | 25 ---
 5 files changed, 131 insertions(+), 48 deletions(-)

diff --git a/docs/content/docs/connectors/table/filesystem.md 
b/docs/content/docs/connectors/table/filesystem.md
index 24dfa11..e2c0aeb 100644
--- a/docs/content/docs/connectors/table/filesystem.md
+++ b/docs/content/docs/connectors/table/filesystem.md
@@ -208,21 +208,27 @@ a timeout that specifies the maximum duration for which a 
file can be open.
 
   
 
-Key
-Default
+Option
+Required
+Forwarded
+Default
 Type
-Description
+Description
 
   
   
 
 sink.rolling-policy.file-size
+optional
+yes
 128MB
 MemorySize
 The maximum part file size before rolling.
 
 
 sink.rolling-policy.rollover-interval
+optional
+yes
 30 min
 Duration
 The maximum time duration a part file can stay open before rolling 
(by default 30 min to avoid to many small files).
@@ -230,6 +236,8 @@ a timeout that specifies the maximum duration for which a 
file can be open.
 
 
 sink.rolling-policy.check-interval
+optional
+yes
 1 min
 Duration
 The interval for checking time based rolling policies. This 
controls the frequency to check whether a part file should rollover based on 
'sink.rolling-policy.rollover-interval'.
@@ -250,21 +258,27 @@ The file sink supports file compactions, which allows 
applications to have small
 
   
 
-Key
-Default
+Option
+Required
+Forwarded
+Default
 Type
-Description
+Description
 
   
   
 
 auto-compaction
+optional
+no
 false
 Boolean
 Whether to enable automatic compaction in streaming sink or not. 
The data will be written to temporary files. After the checkpoint is completed, 
the temporary files generated by a checkpoint will be compacted. The temporary 
files are invisible before compaction.
 
 
 compaction.file-size
+optional
+yes
 (none)
 MemorySize
 The compaction target file size, the default value is the rolling 
file size.
@@ -294,27 +308,35 @@ To define when to commit a partition, providing partition 
commit trigger:
 
   
 
-Key
-Default
+Option
+Required
+Forwarded
+Default
 Type
-Description
+Description
 
   
   
 
 sink.partition-commit.trigger
+optional
+yes
 process-time
 String
 Trigger type for partition commit: 'process-time': based on the 
time of the machine, it neither requires partition time extraction nor 
watermark generation. Commit partition once the 'current system time' passes 
'partition creation system time' plus 'delay'. 'partition-time': based on the 
time that extracted from partition values, it requires watermark generation. 
Commit partition once the 'watermark' passes 'time extracted from partition 
values' plus 'delay'.
 
 
 sink.partition-commit.delay
+optional
+yes
 0 s
 Duration
 The partition will not commit until the delay time. If it is a 
daily partition, should be '1 d', if it is a hourly partition, should be '1 
h'.
 
 
 sink.partition-commit.watermark-time-zone
+optional
+yes
 UTC
 String
 The time zone to parse the long watermark value to TIMESTAMP 
value, the parsed watermark timestamp is used to compare with partition time to 
decide the partition should commit or not. This option is only take effect when 
`sink.partition-commit.trigger` is set to 'partition-time'. If this option is 
not configured correctly, e.g. source rowtime is defined on TIMESTAMP_LTZ 
column, but this config is not configured, then users may see the partition 
committed after a few hours. Th [...]
@@ -356,33 +378,43 @@ Time extractors define extracting time from partition 
values.
 
   
 
-Key
-Default
+Option
+Required
+Forwarded
+Default
 Type
-Description
+Description
 
   
   
 

[flink] 01/09: [FLINK-25391][connector-elasticsearch] Forward catalog table options

2022-01-25 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 5e28f66f6ef2ed03f9ee69148fe5079ae5e358c4
Author: slinkydeveloper 
AuthorDate: Thu Jan 6 14:52:38 2022 +0100

[FLINK-25391][connector-elasticsearch] Forward catalog table options
---
 .../content/docs/connectors/table/elasticsearch.md | 23 +-
 .../table/ElasticsearchDynamicSinkFactoryBase.java | 50 ++
 .../table/Elasticsearch6DynamicSinkFactory.java| 21 +
 3 files changed, 66 insertions(+), 28 deletions(-)

diff --git a/docs/content/docs/connectors/table/elasticsearch.md 
b/docs/content/docs/connectors/table/elasticsearch.md
index 22f0b60..b5ae31d 100644
--- a/docs/content/docs/connectors/table/elasticsearch.md
+++ b/docs/content/docs/connectors/table/elasticsearch.md
@@ -67,15 +67,17 @@ Connector Options
   
 Option
 Required
+Forwarded
 Default
 Type
-Description
+Description
   
 
 
 
   connector
   required
+  no
   (none)
   String
   Specify what connector to use, valid values are:
@@ -87,6 +89,7 @@ Connector Options
 
   hosts
   required
+  yes
   (none)
   String
   One or more Elasticsearch hosts to connect to, e.g. 
'http://host_name:9092;http://host_name:9093'.
@@ -94,6 +97,7 @@ Connector Options
 
   index
   required
+  yes
   (none)
   String
   Elasticsearch index for every record. Can be a static index (e.g. 
'myIndex') or
@@ -103,6 +107,7 @@ Connector Options
 
   document-type
   required in 6.x
+  yes in 6.x
   (none)
   String
   Elasticsearch document type. Not necessary anymore in 
elasticsearch-7.
@@ -110,6 +115,7 @@ Connector Options
 
   document-id.key-delimiter
   optional
+  yes
   _
   String
   Delimiter for composite keys ("_" by default), e.g., "$" would 
result in IDs "KEY1$KEY2$KEY3".
@@ -117,6 +123,7 @@ Connector Options
 
   username
   optional
+  yes
   (none)
   String
   Username used to connect to Elasticsearch instance. Please notice 
that Elasticsearch doesn't pre-bundled security feature, but you can enable it 
by following the https://www.elastic.co/guide/en/elasticsearch/reference/master/configuring-security.html";>guideline
 to secure an Elasticsearch cluster.
@@ -124,6 +131,7 @@ Connector Options
 
   password
   optional
+  yes
   (none)
   String
   Password used to connect to Elasticsearch instance. If 
username is configured, this option must be configured with 
non-empty string as well.
@@ -131,6 +139,7 @@ Connector Options
 
   sink.delivery-guarantee
   optional
+  no
   NONE
   String
   Optional delivery guarantee when committing. Valid values are 
NONE or AT_LEAST_ONCE.
@@ -138,6 +147,7 @@ Connector Options
 
   sink.bulk-flush.max-actions
   optional
+  yes
   1000
   Integer
   Maximum number of buffered actions per bulk request.
@@ -147,6 +157,7 @@ Connector Options
 
   sink.bulk-flush.max-size
   optional
+  yes
   2mb
   MemorySize
   Maximum size in memory of buffered actions per bulk request. Must be 
in MB granularity.
@@ -156,6 +167,7 @@ Connector Options
 
   sink.bulk-flush.interval
   optional
+  yes
   1s
   Duration
   The interval to flush buffered actions.
@@ -166,6 +178,7 @@ Connector Options
 
   sink.bulk-flush.backoff.strategy
   optional
+  yes
   NONE
   String
   Specify how to perform retries if any flush actions failed due to a 
temporary request error. Valid strategies are:
@@ -179,6 +192,7 @@ Connector Options
 
   sink.bulk-flush.backoff.max-retries
   optional
+  yes
   (none)
   Integer
   Maximum number of backoff retries.
@@ -186,6 +200,7 @@ Connector Options
 
   sink.bulk-flush.backoff.delay
   optional
+  yes
   (none)
   Duration
   Delay between each backoff attempt. For CONSTANT 
backoff, this is simply the delay between each retry. For 
EXPONENTIAL backoff, this is the initial base delay.
@@ -193,6 +208,7 @@ Connector Options
 
   sink.parallelism
   optional
+  no
   (none)
   Integer
   Defines the parallelism of the Elasticsearch sink operator. By 
default, the parallelism is determined by the framework using the same 
parallelism of the upstream chained operator.
@@ -200,6 +216,7 @@ Connector Options
 
   connection.path-prefix
   optional
+  yes
   (none)
   String
   Prefix string to be added to every REST communication, e.g., 
'/v1'.
@@ -207,6 +224,7 @@ Connector Options
 
   connection.request-timeout
   optional
+  yes
   (none)
   Duration
   T