(flink) branch master updated: [FLINK-33407] Remove old expression stack leftovers for time functions

2023-11-03 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 5da6b3eff29 [FLINK-33407] Remove old expression stack leftovers for 
time functions
5da6b3eff29 is described below

commit 5da6b3eff29c14931c86b2018331c40ed256420f
Author: Dawid Wysakowicz 
AuthorDate: Tue Oct 31 10:22:54 2023 +0100

[FLINK-33407] Remove old expression stack leftovers for time functions
---
 .../expressions/PlannerExpressionConverter.scala   |  40 ---
 .../flink/table/planner/expressions/time.scala | 309 -
 2 files changed, 349 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/expressions/PlannerExpressionConverter.scala
 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/expressions/PlannerExpressionConverter.scala
index 6a0d7b028ed..8983fbbd64a 100644
--- 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/expressions/PlannerExpressionConverter.scala
+++ 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/expressions/PlannerExpressionConverter.scala
@@ -147,46 +147,6 @@ class PlannerExpressionConverter private extends 
ApiExpressionVisitor[PlannerExp
 assert(args.size == 1)
 Collect(args.head)
 
-  case EXTRACT =>
-assert(args.size == 2)
-Extract(args.head, args.last)
-
-  case CURRENT_DATE =>
-assert(args.isEmpty)
-CurrentDate()
-
-  case CURRENT_TIME =>
-assert(args.isEmpty)
-CurrentTime()
-
-  case CURRENT_TIMESTAMP =>
-assert(args.isEmpty)
-CurrentTimestamp()
-
-  case LOCAL_TIME =>
-assert(args.isEmpty)
-LocalTime()
-
-  case LOCAL_TIMESTAMP =>
-assert(args.isEmpty)
-LocalTimestamp()
-
-  case TEMPORAL_OVERLAPS =>
-assert(args.size == 4)
-TemporalOverlaps(args.head, args(1), args(2), args.last)
-
-  case DATE_FORMAT =>
-assert(args.size == 2)
-DateFormat(args.head, args.last)
-
-  case TIMESTAMP_DIFF =>
-assert(args.size == 3)
-TimestampDiff(args.head, args(1), args.last)
-
-  case TO_TIMESTAMP_LTZ =>
-assert(args.size == 2)
-ToTimestampLtz(args.head, args.last)
-
   case AT =>
 assert(args.size == 2)
 ItemAt(args.head, args.last)
diff --git 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/expressions/time.scala
 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/expressions/time.scala
deleted file mode 100644
index 19295dd36aa..000
--- 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/expressions/time.scala
+++ /dev/null
@@ -1,309 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.flink.table.planner.expressions
-
-import org.apache.flink.api.common.typeinfo.{LocalTimeTypeInfo, 
SqlTimeTypeInfo, TypeInformation}
-import org.apache.flink.api.common.typeinfo.BasicTypeInfo._
-import org.apache.flink.table.planner.calcite.FlinkRelBuilder
-import org.apache.flink.table.planner.functions.sql.FlinkSqlOperatorTable
-import org.apache.flink.table.planner.typeutils.TypeInfoCheckUtils
-import 
org.apache.flink.table.planner.typeutils.TypeInfoCheckUtils.isTimeInterval
-import org.apache.flink.table.planner.validate.{ValidationFailure, 
ValidationResult, ValidationSuccess}
-import org.apache.flink.table.runtime.typeutils.LegacyLocalDateTimeTypeInfo
-import org.apache.flink.table.typeutils.TimeIntervalTypeInfo
-
-import org.apache.calcite.rex._
-
-case class Extract(timeIntervalUnit: PlannerExpression, temporal: 
PlannerExpression)
-  extends PlannerExpression {
-
-  override private[flink] def children: Seq[PlannerExpression] = 
timeIntervalUnit :: temporal :: Nil
-
-  override private[flink] def resultType: TypeInformation

(flink-connector-opensearch) annotated tag v1.1.0-rc1 updated (0f659cc -> 1a841f6)

2023-11-03 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a change to annotated tag v1.1.0-rc1
in repository https://gitbox.apache.org/repos/asf/flink-connector-opensearch.git


*** WARNING: tag v1.1.0-rc1 was modified! ***

from 0f659cc  (commit)
  to 1a841f6  (tag)
 tagging 0f659cc65131c9ff7c8c35eb91f5189e80414ea1 (commit)
  by Danny Cranmer
  on Fri Nov 3 09:12:19 2023 +

- Log -
v1.1.0-rc1
-BEGIN PGP SIGNATURE-

iQIzBAABCAAdFiEED3nyr7I1G8KWeFRFkfnB7BJf2NsFAmVEuXMACgkQkfnB7BJf
2NtyCw//XNQ4E+uPuF4zMonO1/6+gAoJipJv1T8zU6GtNzkK2LNx8Hqtv3WjFr08
QWIFfQ0DtBkrgNaXvNprn4yCTZltdYprZ6dgSpD5yNhPiGbPZexLpOhLBxBKP6tp
stNISZdRKH8Pk6Mm2S/Q8n3o2BRFtABqokzFS/9u56BDDFS1v/jGTEikPexeQ9Nt
QXLNLe1AkpR+WM6W0RN3VtlvyYoHjWMY6lFfdU+lJofb2pqdjBKwW5mC9ZJQt7cH
feS+Kas3Sqs9VXNx7dZmGoD0DCjXrHNgydVlLCMKS39QmZ0LOAKIEh1D009e6oux
l4DRmVw/NKw1NK+HbaFcEFwuI+w/KSHwCTC/+Zx9+rdp9P9CJ/ClYHMPvpMn6Lmk
0wyFL3R48TF9U3gVPIyYxvxX0XXvHzK3i/scJ+VEjo+UJIFt7sQlNSwSjwUYL7oE
N7BmrFIhAaFABsefDBTob2qJDev/xL1BpMxHlfZs9gOKwvpdhXIZ36WAXuYkygvi
Ubf71NHcwXq/CFmJm6fijrtZeUJ1QnrjsQqxJiqY3G1Xy74901gt3StgKz5mdTIm
RTEWJgA4w2TT+32MGDOoboLD07lmxn60iCqPFyZ+eoHYwEU22gHRXyT8VPRiPUDZ
hWFOnvLPKaQ51mjNS6cJoYKuIiDt+9NdCq/+KFA4vLpaea6fvHQ=
=behX
-END PGP SIGNATURE-
---


No new revisions were added by this update.

Summary of changes:



svn commit: r64995 - in /dev/flink/flink-connector-opensearch-1.1.0-rc1: ./ flink-connector-opensearch-1.1.0-src.tgz flink-connector-opensearch-1.1.0-src.tgz.asc flink-connector-opensearch-1.1.0-src.t

2023-11-03 Thread dannycranmer
Author: dannycranmer
Date: Fri Nov  3 09:13:21 2023
New Revision: 64995

Log:
Add flink-connector-opensearch-1.1.0-rc1

Added:
dev/flink/flink-connector-opensearch-1.1.0-rc1/

dev/flink/flink-connector-opensearch-1.1.0-rc1/flink-connector-opensearch-1.1.0-src.tgz
   (with props)

dev/flink/flink-connector-opensearch-1.1.0-rc1/flink-connector-opensearch-1.1.0-src.tgz.asc

dev/flink/flink-connector-opensearch-1.1.0-rc1/flink-connector-opensearch-1.1.0-src.tgz.sha512

Added: 
dev/flink/flink-connector-opensearch-1.1.0-rc1/flink-connector-opensearch-1.1.0-src.tgz
==
Binary file - no diff available.

Propchange: 
dev/flink/flink-connector-opensearch-1.1.0-rc1/flink-connector-opensearch-1.1.0-src.tgz
--
svn:mime-type = application/octet-stream

Added: 
dev/flink/flink-connector-opensearch-1.1.0-rc1/flink-connector-opensearch-1.1.0-src.tgz.asc
==
--- 
dev/flink/flink-connector-opensearch-1.1.0-rc1/flink-connector-opensearch-1.1.0-src.tgz.asc
 (added)
+++ 
dev/flink/flink-connector-opensearch-1.1.0-rc1/flink-connector-opensearch-1.1.0-src.tgz.asc
 Fri Nov  3 09:13:21 2023
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCAAdFiEED3nyr7I1G8KWeFRFkfnB7BJf2NsFAmVEuawACgkQkfnB7BJf
+2NsL3A/+LOboQ5EtSpGvZFKUma0adHXGO8f5z2cSTMoWb/OXlMsd5CRV5tZ80OtX
+TfVrQsB/uxQcefuUvhok4oJlsT7gtuBAFi2nccfl4i6ZWwXotIdFLwi4AjuZHkg/
+UHb2rKySo0Ch707WX+gyeeCmxiBmsiDpwORl2bgxumED9SpkogXVLWvFG0VQGveS
+hBR9XFoQUhprCRO2vpybonon/zSHQ62t9RrIeKR7xfBNEbYVYa1kNEqKcoXaoBn/
+CiE/tAf6o5Uo2RimeXThqiEePs82O0WXKEa3MszlMhxGrsNDq7NLVp1lv7odwEsY
+Si/z+8tYEold0MSW0B9ebr7FXyZsv23/jkfDpLYkf2sSqkxpRYgBOz+4QRwa7OFp
+CclEG2Hg58YlYdmO49xIcqg2P+rsGv1dnWPlgE7n2/AZHR/nQOPd6066XgsRK8eI
+tDnrpCc/2Psi4ZhaUt9DjUWvjBFNvLsb5f6chGIFo7PdGm6Io0BoE6KoTnaeVsVD
+6B7EOqT+t4uc2fHwsij+l01EN3M54Zy2SC3D9GXhGfne5nmMSPDeZuaudBY/Biga
+HIOah1Udr5Yhs+cGPQmPSwAI6HwbMtgo9eKZpjweCDPUNgdhEZI63PQ1iB/H2Uh3
+8DVAIfrKtnBi0r3Ffe/GDbTbATZaG637fj3n9gYNv/D/LO63gIo=
+=JiI4
+-END PGP SIGNATURE-

Added: 
dev/flink/flink-connector-opensearch-1.1.0-rc1/flink-connector-opensearch-1.1.0-src.tgz.sha512
==
--- 
dev/flink/flink-connector-opensearch-1.1.0-rc1/flink-connector-opensearch-1.1.0-src.tgz.sha512
 (added)
+++ 
dev/flink/flink-connector-opensearch-1.1.0-rc1/flink-connector-opensearch-1.1.0-src.tgz.sha512
 Fri Nov  3 09:13:21 2023
@@ -0,0 +1 @@
+23f980de6a59271b145e3c060d4e597c494574e7720bfedf332011750b77d72e87df81dcac1e1c6373c9a7aa76844096f22a089e9f78862432f251cac4ea135d
  flink-connector-opensearch-1.1.0-src.tgz




(flink-connector-opensearch) annotated tag v deleted (was d4947a7)

2023-11-03 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a change to annotated tag v
in repository https://gitbox.apache.org/repos/asf/flink-connector-opensearch.git


*** WARNING: tag v was deleted! ***

   tag was  d4947a7

The revisions that were on this annotated tag are still contained in
other references; therefore, this change does not discard any commits
from the repository.



(flink-shaded) branch release-16.0 updated: [FLINK-33417][netty] Upgrade netty version from 4.1.82 to 4.1.83

2023-11-03 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch release-16.0
in repository https://gitbox.apache.org/repos/asf/flink-shaded.git


The following commit(s) were added to refs/heads/release-16.0 by this push:
 new 2e81185  [FLINK-33417][netty] Upgrade netty version from 4.1.82 to 
4.1.83
2e81185 is described below

commit 2e81185ea46261e7f6aca88fdd0a982193b4c86c
Author: Yuxin Tan 
AuthorDate: Thu Nov 2 11:05:33 2023 +0800

[FLINK-33417][netty] Upgrade netty version from 4.1.82 to 4.1.83
---
 .../src/main/resources/META-INF/NOTICE | 66 +++---
 .../src/main/resources/META-INF/NOTICE | 18 +++---
 .../src/main/resources/META-INF/NOTICE | 18 +++---
 .../src/main/resources/META-INF/NOTICE | 18 +++---
 .../src/main/resources/META-INF/NOTICE | 18 +++---
 pom.xml|  2 +-
 6 files changed, 70 insertions(+), 70 deletions(-)

diff --git a/flink-shaded-netty-4/src/main/resources/META-INF/NOTICE 
b/flink-shaded-netty-4/src/main/resources/META-INF/NOTICE
index cc89ec0..7cedf60 100644
--- a/flink-shaded-netty-4/src/main/resources/META-INF/NOTICE
+++ b/flink-shaded-netty-4/src/main/resources/META-INF/NOTICE
@@ -6,36 +6,36 @@ The Apache Software Foundation (http://www.apache.org/).
 
 This project bundles the following dependencies under the Apache Software 
License 2.0 (http://www.apache.org/licenses/LICENSE-2.0.txt)
 
-- io.netty:netty-all:4.1.82.Final
-- io.netty:netty-buffer:4.1.82.Final
-- io.netty:netty-codec:4.1.82.Final
-- io.netty:netty-codec-dns:4.1.82.Final
-- io.netty:netty-codec-haproxy:4.1.82.Final
-- io.netty:netty-codec-http:4.1.82.Final
-- io.netty:netty-codec-http2:4.1.82.Final
-- io.netty:netty-codec-memcache:4.1.82.Final
-- io.netty:netty-codec-mqtt:4.1.82.Final
-- io.netty:netty-codec-redis:4.1.82.Final
-- io.netty:netty-codec-smtp:4.1.82.Final
-- io.netty:netty-codec-socks:4.1.82.Final
-- io.netty:netty-codec-stomp:4.1.82.Final
-- io.netty:netty-codec-xml:4.1.82.Final
-- io.netty:netty-common:4.1.82.Final
-- io.netty:netty-handler:4.1.82.Final
-- io.netty:netty-handler-proxy:4.1.82.Final
-- io.netty:netty-resolver:4.1.82.Final
-- io.netty:netty-resolver-dns:4.1.82.Final
-- io.netty:netty-resolver-dns-classes-macos:4.1.82.Final
-- io.netty:netty-resolver-dns-native-macos:osx-x86_64:4.1.82.Final
-- io.netty:netty-resolver-dns-native-macos:osx-aarch_64:4.1.82.Final
-- io.netty:netty-transport:4.1.82.Final
-- io.netty:netty-transport-classes-epoll:4.1.82.Final
-- io.netty:netty-transport-classes-kqueue:4.1.82.Final
-- io.netty:netty-transport-native-epoll:linux-x86_64:4.1.82.Final
-- io.netty:netty-transport-native-epoll:linux-aarch_64:4.1.82.Final
-- io.netty:netty-transport-native-kqueue:osx-x86_64:4.1.82.Final
-- io.netty:netty-transport-native-kqueue:osx-aarch_64:4.1.82.Final
-- io.netty:netty-transport-native-unix-common:4.1.82.Final
-- io.netty:netty-transport-rxtx:4.1.82.Final
-- io.netty:netty-transport-sctp:4.1.82.Final
-- io.netty:netty-transport-udt:4.1.82.Final
\ No newline at end of file
+- io.netty:netty-all:4.1.83.Final
+- io.netty:netty-buffer:4.1.83.Final
+- io.netty:netty-codec:4.1.83.Final
+- io.netty:netty-codec-dns:4.1.83.Final
+- io.netty:netty-codec-haproxy:4.1.83.Final
+- io.netty:netty-codec-http:4.1.83.Final
+- io.netty:netty-codec-http2:4.1.83.Final
+- io.netty:netty-codec-memcache:4.1.83.Final
+- io.netty:netty-codec-mqtt:4.1.83.Final
+- io.netty:netty-codec-redis:4.1.83.Final
+- io.netty:netty-codec-smtp:4.1.83.Final
+- io.netty:netty-codec-socks:4.1.83.Final
+- io.netty:netty-codec-stomp:4.1.83.Final
+- io.netty:netty-codec-xml:4.1.83.Final
+- io.netty:netty-common:4.1.83.Final
+- io.netty:netty-handler:4.1.83.Final
+- io.netty:netty-handler-proxy:4.1.83.Final
+- io.netty:netty-resolver:4.1.83.Final
+- io.netty:netty-resolver-dns:4.1.83.Final
+- io.netty:netty-resolver-dns-classes-macos:4.1.83.Final
+- io.netty:netty-resolver-dns-native-macos:osx-x86_64:4.1.83.Final
+- io.netty:netty-resolver-dns-native-macos:osx-aarch_64:4.1.83.Final
+- io.netty:netty-transport:4.1.83.Final
+- io.netty:netty-transport-classes-epoll:4.1.83.Final
+- io.netty:netty-transport-classes-kqueue:4.1.83.Final
+- io.netty:netty-transport-native-epoll:linux-x86_64:4.1.83.Final
+- io.netty:netty-transport-native-epoll:linux-aarch_64:4.1.83.Final
+- io.netty:netty-transport-native-kqueue:osx-x86_64:4.1.83.Final
+- io.netty:netty-transport-native-kqueue:osx-aarch_64:4.1.83.Final
+- io.netty:netty-transport-native-unix-common:4.1.83.Final
+- io.netty:netty-transport-rxtx:4.1.83.Final
+- io.netty:netty-transport-sctp:4.1.83.Final
+- io.netty:netty-transport-udt:4.1.83.Final
diff --git 
a/flink-shaded-zookeeper-parent/flink-shaded-zookeeper-35/src/main/resources/META-INF/NOTICE
 
b/flink-shaded-zookeeper-parent/flink-shaded-zookeeper-35/src/main/resources/META-INF/NOTICE
index 097cbec..5d1dbc0 100644
--- 
a/flink-shaded-zookeeper-paren

(flink) branch release-1.17 updated: [FLINK-33442] Copy local state collection preventively

2023-11-03 Thread roman
This is an automated email from the ASF dual-hosted git repository.

roman pushed a commit to branch release-1.17
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.17 by this push:
 new 9f067c4a825 [FLINK-33442] Copy local state collection preventively
9f067c4a825 is described below

commit 9f067c4a825c51d5856fa77ff66d6cbcb9a62336
Author: Roman Khachatryan 
AuthorDate: Thu Nov 2 21:46:14 2023 +

[FLINK-33442] Copy local state collection preventively
---
 .../streaming/state/restore/RocksDBIncrementalRestoreOperation.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git 
a/flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/restore/RocksDBIncrementalRestoreOperation.java
 
b/flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/restore/RocksDBIncrementalRestoreOperation.java
index 5a89403617e..6fbae63c432 100644
--- 
a/flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/restore/RocksDBIncrementalRestoreOperation.java
+++ 
b/flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/restore/RocksDBIncrementalRestoreOperation.java
@@ -164,7 +164,7 @@ public class RocksDBIncrementalRestoreOperation 
implements RocksDBRestoreOper
 || 
!Objects.equals(theFirstStateHandle.getKeyGroupRange(), keyGroupRange));
 
 if (isRescaling) {
-restoreWithRescaling(restoreStateHandles);
+restoreWithRescaling(new ArrayList<>(restoreStateHandles));
 } else {
 restoreWithoutRescaling(theFirstStateHandle);
 }



svn commit: r64998 - in /dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1: ./ flink-connector-gcp-pubsub-3.0.2-src.tgz flink-connector-gcp-pubsub-3.0.2-src.tgz.asc flink-connector-gcp-pubsub-3.0.2-src.

2023-11-03 Thread dannycranmer
Author: dannycranmer
Date: Fri Nov  3 11:12:36 2023
New Revision: 64998

Log:
Add =flink-connector-gcp-pubsub-3.0.2-rc1

Added:
dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/

dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz
   (with props)

dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.asc

dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.sha512

Added: 
dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz
==
Binary file - no diff available.

Propchange: 
dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz
--
svn:mime-type = application/octet-stream

Added: 
dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.asc
==
--- 
dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.asc
 (added)
+++ 
dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.asc
 Fri Nov  3 11:12:36 2023
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCAAdFiEED3nyr7I1G8KWeFRFkfnB7BJf2NsFAmVE1Z4ACgkQkfnB7BJf
+2Nt54RAAkWu1+nOMQ8Qt4BipUa+8mzD6mgKI90sYRvgI9jdwADYb1Dywkj1Z8Dn3
+xGh0UXjAJ0SBhqBUYSaH/MKIzlw00lMQCqxTtCSzZx42lk2kdw9U51C9th8qG77u
+oV1QwuEFEg5kd1Agq8G2zvChwa312dFgkfVmrUgPGCjligDxYBWqDtHfsjbi6yDl
+mJaVqOcRFk/zlxnzfhLSXXC3T85KT749KbPmAVPGQHaQN3RxBDOKgxAFP9jCMBI6
+KFcfcD+innyOrVT5itXtrnS1BQCzsHz452wqNsAdmgknqN891D6zNCRQAgBzFxWn
+wyXRUaaf+B3whduLW3xnO9VwrHVajzQ3g3KusFrRiCmS7i2FAFv345T+TA3VEWXM
+YhaUBuzDwlkLKXR9ufA28XHLFDCeOlSQ2EiqwEwwMw8MOZKmWmTQiQSpGYTbfnnr
+Sv5f4LnoI0Q9IBKkyzcHbQ3c1O40vTiXr684l52y8VjHrgCidqGyLrOeGl0So2re
+pY7quqyJjlHBim8HdInArHN99IDAohNbFfHs9L1kjCRw58wdqcsrEn6ez5Yjepnq
+Hq/2B3qJ1GWaKLoTVjmyliji2dotHXF0gV/pIu730jcOQH2BvaNmsIwZPM/sVSwE
+oaSocHI2mtMmpoxABIHA7T/ACGqjBTeiB6qyk4aSJCjiiRGQ83Y=
+=Gzv7
+-END PGP SIGNATURE-

Added: 
dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.sha512
==
--- 
dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.sha512
 (added)
+++ 
dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.sha512
 Fri Nov  3 11:12:36 2023
@@ -0,0 +1 @@
+2a9ad170e4a8fca57f07ee74c6fcc9a0677cbbe4915d0e504b7cc317dbfc3b529acf2a08f5374de252574a55a2e670c216c66b0e43e4d7b200c118cfcc36280f
  flink-connector-gcp-pubsub-3.0.2-src.tgz




(flink-connector-gcp-pubsub) annotated tag v3.0.2-rc1 updated (4c6be83 -> 71fd780)

2023-11-03 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a change to annotated tag v3.0.2-rc1
in repository https://gitbox.apache.org/repos/asf/flink-connector-gcp-pubsub.git


*** WARNING: tag v3.0.2-rc1 was modified! ***

from 4c6be83  (commit)
  to 71fd780  (tag)
 tagging 4c6be836e6c0f36ef5711f12d7b935254e7d248d (commit)
  by Danny Cranmer
  on Fri Nov 3 11:11:39 2023 +

- Log -
v3.0.2-rc1
-BEGIN PGP SIGNATURE-

iQIzBAABCAAdFiEED3nyr7I1G8KWeFRFkfnB7BJf2NsFAmVE1WwACgkQkfnB7BJf
2NvtPg/9FotS3QpsE2DwAS0hVqLZDnctSgQUHphw4qoVvLU38S8c4by3usobdqcG
j0AmOVT10DSopdsX2oYGSDSQXi1BvZDLW424Cl8ixJ4XOkF/dtnIIEGeuUmo8o5u
jJyMqFkBMzx34AH/aYVJ3BIo7iae4tDE63bcps/VKCEFsWw3x0zRAdUf6Qb2AsZj
7EtsLkv6o4i0L4xIDAElAX0V4haun7wrGodHRfAWwGwB4gOV4PfgcalV43/fa8CM
WXj6+ovkBqZfAoeTmm+yayUAddaR4KRmqTcXzm0gBS1UVCogsDSpvPtZyalEqrty
sLkj7auRPAEybmFBmf7N6CCQzk1QC6WDFJgbkzE2SYqemgjOx6TbWm+2l5eHZ9uE
pGfBxxg+xROfvz9ik4nWms6eH/mz92JrjotZCWE/YXyEVgSztm3/+ZNKc1iYfCkd
ufgcKMvzFL5dP9I1Dnb+vdQWJiX7eWcKnbJr7VD4Ed5BGP2GQQhZAZNjQn2vQNoc
2M2nGh0Q1uKPuuV70XtJhmSwqan3XpV7+x1rd3ZXNMOPHTp4u8TuBTaliHBgrOpF
PIVqD1fG0J6QNXS87rw/J1Iq0IsxqgYbmX8LP7gHvZR+xBxfKMtKif/8UI3KoJCM
14GqjMi/Y8Kna+vheS8DbD9JAppkIzIomBG6XySYEdOLoI7DWTU=
=Tf8Y
-END PGP SIGNATURE-
---


No new revisions were added by this update.

Summary of changes:



svn commit: r65000 - in /dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1: ./ flink-connector-gcp-pubsub-3.0.2-src.tgz flink-connector-gcp-pubsub-3.0.2-src.tgz.asc flink-connector-gcp-pubsub-3.0.2-src.t

2023-11-03 Thread dannycranmer
Author: dannycranmer
Date: Fri Nov  3 11:17:01 2023
New Revision: 65000

Log:
Add flink-connector-gcp-pubsub-3.0.2-rc1

Added:
dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1/

dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz
   (with props)

dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.asc

dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.sha512

Added: 
dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz
==
Binary file - no diff available.

Propchange: 
dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz
--
svn:mime-type = application/octet-stream

Added: 
dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.asc
==
--- 
dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.asc
 (added)
+++ 
dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.asc
 Fri Nov  3 11:17:01 2023
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCAAdFiEED3nyr7I1G8KWeFRFkfnB7BJf2NsFAmVE1qgACgkQkfnB7BJf
+2NuNTA//Q43ACgOV/+vxvAP9j08RuuK7hB3bDZAXVA2GnuTa+lhIWS3S1PT6H7r+
+FwjVrK5HHt11LacYCjzY+qZN4L/2idLR+4b5gZ20/c+3B1MiV35yR+CUKCzQ7iv8
+HUNI7nd2NW2zsm0lNcyP0KQzd4IcOI/H03U0AHaKqnsWl9CBlBI+Yl1XNPaRsNm1
+4Y2Hd8FSWkEBx2bQJ02SRMRTe4zSgxhfdJxEQCWUeuZTw8+1T8ZjfzUR+ZIPn7oC
+XIt41Qe4bJ86PzhsakUUQbd8qgNZN/HsinqKoE4GHlVhzBKqUofdwSvhJ36ZctbK
+fG6dDqtY642r8d+AE1K/TWw6ohWGoojc+wpSziCa6mtok2qva8YRCs1CVuos5RHy
+mwgBDt7Zyj4fGg4q0k02RFk2eEtJLg1QqfCVIkqbZWCG7ptTWGNJGwZ1C+D4OLnf
+lLml4O9WUIrvEfScvN9wXzD3oT4kyV3FqSMteHY+dhuTqvnjbvKARsrInoKGWswK
+kn7rKYM1K/ZOh2T234XEKXd9Z7ob4+i/8Wz1RYpRLOJpT0Hll5HK8LFxcxpmpYlZ
+PzukZqTlcunJgMN34/VWNMmcsgLEjZOe03OOygDd3LRiMCh0HtiHQpzVQfaDdGCl
+O/v3InBX1slb9arnnAs/ydBcZKFDuHsbnAwlPPNmFCMFqyI8L0s=
+=HrKy
+-END PGP SIGNATURE-

Added: 
dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.sha512
==
--- 
dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.sha512
 (added)
+++ 
dev/flink/flink-connector-gcp-pubsub-3.0.2-rc1/flink-connector-gcp-pubsub-3.0.2-src.tgz.sha512
 Fri Nov  3 11:17:01 2023
@@ -0,0 +1 @@
+0898dfdb5fd297617e20e0472ea88068ea8eeded7d0424fc59d7271ee20fcdae259f5407f54344b204d3ccb62a92ec565ec3ceebf0d0fda73ea9fbdae06107ea
  flink-connector-gcp-pubsub-3.0.2-src.tgz




svn commit: r64999 - /dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/

2023-11-03 Thread dannycranmer
Author: dannycranmer
Date: Fri Nov  3 11:16:30 2023
New Revision: 64999

Log:
Remove bad archive

Removed:
dev/flink/=flink-connector-gcp-pubsub-3.0.2-rc1/



(flink) branch master updated: [FLINK-33309] Set `-Djava.security.manager=allow` on JDK 18+

2023-11-03 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 7295c3bcf92 [FLINK-33309] Set `-Djava.security.manager=allow` on JDK 
18+
7295c3bcf92 is described below

commit 7295c3bcf92c0e106d3e91c57b6492030c760a25
Author: Sergey Nuyanzin 
AuthorDate: Fri Nov 3 12:51:42 2023 +0100

[FLINK-33309] Set `-Djava.security.manager=allow` on JDK 18+
---
 flink-core/pom.xml  | 21 -
 flink-dist/src/main/flink-bin/bin/config.sh |  6 ++
 flink-runtime/pom.xml   | 21 -
 flink-streaming-java/pom.xml| 21 -
 4 files changed, 66 insertions(+), 3 deletions(-)

diff --git a/flink-core/pom.xml b/flink-core/pom.xml
index ce90b2a1e6d..aee94432815 100644
--- a/flink-core/pom.xml
+++ b/flink-core/pom.xml
@@ -34,7 +34,14 @@ under the License.
jar
 

-
+   
+${surefire.module.config.jdk21} --add-exports=java.rmi/sun.rmi.registry=ALL-UNNAMED 

+   
-Djava.security.manager=allow
+   
+   

 
diff --git a/flink-dist/src/main/flink-bin/bin/config.sh 
b/flink-dist/src/main/flink-bin/bin/config.sh
index f09c68d493f..dcd48a256f7 100755
--- a/flink-dist/src/main/flink-bin/bin/config.sh
+++ b/flink-dist/src/main/flink-bin/bin/config.sh
@@ -334,6 +334,12 @@ if [ -z "${FLINK_ENV_JAVA_OPTS}" ]; then
 
 # Remove leading and ending double quotes (if present) of value
 FLINK_ENV_JAVA_OPTS="-XX:+IgnoreUnrecognizedVMOptions $( echo 
"${FLINK_ENV_JAVA_OPTS}" | sed -e 's/^"//'  -e 's/"$//' )"
+
+JAVA_SPEC_VERSION=`${JAVA_HOME}/bin/java -XshowSettings:properties 2>&1 | 
grep "java.specification.version" | cut -d "=" -f 2 | tr -d '[:space:]' | rev | 
cut -d "." -f 1 | rev`
+if [[ $(( $JAVA_SPEC_VERSION > 17 )) == 1 ]]; then
+  # set security manager property to allow calls to 
System.setSecurityManager() at runtime
+  FLINK_ENV_JAVA_OPTS="$FLINK_ENV_JAVA_OPTS -Djava.security.manager=allow"
+fi
 fi
 
 if [ -z "${FLINK_ENV_JAVA_OPTS_JM}" ]; then
diff --git a/flink-runtime/pom.xml b/flink-runtime/pom.xml
index 359a0e181cb..c6bd2e90964 100644
--- a/flink-runtime/pom.xml
+++ b/flink-runtime/pom.xml
@@ -34,7 +34,14 @@ under the License.
jar
 

-   
+   
+${surefire.module.config.jdk21} --add-opens=java.base/java.util=ALL-UNNAMED 
+   
-Djava.security.manager=allow
+   
+   

 
diff --git a/flink-streaming-java/pom.xml b/flink-streaming-java/pom.xml
index f66ed3b48b8..4a7644dfb45 100644
--- a/flink-streaming-java/pom.xml
+++ b/flink-streaming-java/pom.xml
@@ -34,7 +34,14 @@ under the License.
jar
 

-   
+   
+${surefire.module.config.jdk21} --add-opens=java.base/java.lang=ALL-UNNAMED 
+   
-Djava.security.manager=allow
+   
+   

 



(flink) branch master updated: [FLINK-33412] Implement type inference for reinterpret_cast function

2023-11-03 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new f6b662f83de [FLINK-33412] Implement type inference for 
reinterpret_cast function
f6b662f83de is described below

commit f6b662f83deb80572773617f7eb202fa05388198
Author: Dawid Wysakowicz 
AuthorDate: Tue Oct 31 12:55:13 2023 +0100

[FLINK-33412] Implement type inference for reinterpret_cast function
---
 .../functions/BuiltInFunctionDefinitions.java  |  3 +-
 .../ReinterpretCastInputTypeStrategy.java  | 98 ++
 .../strategies/SpecificInputTypeStrategies.java|  2 +
 .../types/logical/utils/LogicalTypeCasts.java  | 48 +++
 .../types/inference/InputTypeStrategiesTest.java   | 29 ++-
 .../inference/InputTypeStrategiesTestBase.java | 19 ++---
 .../expressions/PlannerExpressionConverter.scala   |  8 --
 .../table/planner/expressions/Reinterpret.scala| 49 ---
 .../table/planner/typeutils/TypeCoercion.scala | 81 --
 9 files changed, 185 insertions(+), 152 deletions(-)

diff --git 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
index e653d1d6463..2d49dc30725 100644
--- 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
+++ 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
@@ -2254,7 +2254,8 @@ public final class BuiltInFunctionDefinitions {
 BuiltInFunctionDefinition.newBuilder()
 .name("reinterpretCast")
 .kind(SCALAR)
-.outputTypeStrategy(TypeStrategies.MISSING)
+
.inputTypeStrategy(SpecificInputTypeStrategies.REINTERPRET_CAST)
+.outputTypeStrategy(TypeStrategies.argument(1))
 .build();
 
 public static final BuiltInFunctionDefinition AS =
diff --git 
a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/ReinterpretCastInputTypeStrategy.java
 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/ReinterpretCastInputTypeStrategy.java
new file mode 100644
index 000..d33ddcdb452
--- /dev/null
+++ 
b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/ReinterpretCastInputTypeStrategy.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.types.inference.strategies;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.table.functions.BuiltInFunctionDefinitions;
+import org.apache.flink.table.functions.FunctionDefinition;
+import org.apache.flink.table.types.DataType;
+import org.apache.flink.table.types.inference.ArgumentCount;
+import org.apache.flink.table.types.inference.CallContext;
+import org.apache.flink.table.types.inference.ConstantArgumentCount;
+import org.apache.flink.table.types.inference.InputTypeStrategy;
+import org.apache.flink.table.types.inference.Signature;
+import org.apache.flink.table.types.logical.LegacyTypeInformationType;
+import org.apache.flink.table.types.logical.LogicalType;
+import org.apache.flink.table.types.logical.LogicalTypeRoot;
+import org.apache.flink.table.types.logical.utils.LogicalTypeCasts;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+
+/**
+ * {@link InputTypeStrategy} specific for {@link 
BuiltInFunctionDefinitions#REINTERPRET_CAST}.
+ *
+ * It expects three arguments where the type of first one must be 
reinterpretable as the type of
+ * the second one. The second one must be a type literal. The third a BOOLEAN 
literal if the
+ * reinterpretation may result in an overflow.
+ */
+@Internal
+public final class ReinterpretCastInputTypeStrategy implements 
InputTypeStrategy {
+@Override
+public

(flink-connector-hbase) branch main updated: [FLINK-33304] Introduce the DeduplicatedMutator to resolve the mutation write conflict problem. This closes #30

2023-11-03 Thread martijnvisser
This is an automated email from the ASF dual-hosted git repository.

martijnvisser pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-hbase.git


The following commit(s) were added to refs/heads/main by this push:
 new e0971c3  [FLINK-33304] Introduce the DeduplicatedMutator to resolve 
the mutation write conflict problem. This closes #30
e0971c3 is described below

commit e0971c3888db03243b08e5684b7690150276ef2c
Author: Tan-JiaLiang <55388933+tan-jiali...@users.noreply.github.com>
AuthorDate: Fri Nov 3 23:12:14 2023 +0800

[FLINK-33304] Introduce the DeduplicatedMutator to resolve the mutation 
write conflict problem. This closes #30

Co-authored-by: tanjialiang 
---
 .../connector/hbase1/HBaseConnectorITCase.java | 59 ++
 .../flink/connector/hbase1/util/HBaseTestBase.java |  9 
 .../connector/hbase2/HBaseConnectorITCase.java | 57 +
 .../flink/connector/hbase2/util/HBaseTestBase.java |  9 
 .../connector/hbase/sink/HBaseSinkFunction.java| 48 --
 5 files changed, 179 insertions(+), 3 deletions(-)

diff --git 
a/flink-connector-hbase-1.4/src/test/java/org/apache/flink/connector/hbase1/HBaseConnectorITCase.java
 
b/flink-connector-hbase-1.4/src/test/java/org/apache/flink/connector/hbase1/HBaseConnectorITCase.java
index 35be358..0ea0002 100644
--- 
a/flink-connector-hbase-1.4/src/test/java/org/apache/flink/connector/hbase1/HBaseConnectorITCase.java
+++ 
b/flink-connector-hbase-1.4/src/test/java/org/apache/flink/connector/hbase1/HBaseConnectorITCase.java
@@ -29,10 +29,13 @@ import org.apache.flink.streaming.api.datastream.DataStream;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.flink.table.api.Table;
 import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.api.TableResult;
 import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
 import org.apache.flink.table.functions.ScalarFunction;
+import org.apache.flink.table.planner.factories.TestValuesTableFactory;
 import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.Row;
+import org.apache.flink.types.RowKind;
 import org.apache.flink.util.CollectionUtil;
 
 import org.apache.hadoop.hbase.TableName;
@@ -41,11 +44,14 @@ import org.junit.Test;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 import java.util.stream.Collectors;
 
 import static org.apache.flink.table.api.Expressions.$;
+import static org.assertj.core.api.Assertions.assertThat;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
@@ -280,6 +286,59 @@ public class HBaseConnectorITCase extends HBaseTestBase {
 TestBaseUtils.compareResultAsText(results, expected);
 }
 
+@Test
+public void testTableSinkWithChangelog() throws Exception {
+StreamExecutionEnvironment execEnv = 
StreamExecutionEnvironment.getExecutionEnvironment();
+StreamTableEnvironment tEnv = StreamTableEnvironment.create(execEnv, 
streamSettings);
+
+// register values table for source
+String dataId =
+TestValuesTableFactory.registerData(
+Arrays.asList(
+Row.ofKind(RowKind.INSERT, 1, 
Row.of("Hello1")),
+Row.ofKind(RowKind.DELETE, 1, 
Row.of("Hello2")),
+Row.ofKind(RowKind.INSERT, 2, 
Row.of("Hello1")),
+Row.ofKind(RowKind.INSERT, 2, 
Row.of("Hello2")),
+Row.ofKind(RowKind.INSERT, 2, 
Row.of("Hello3")),
+Row.ofKind(RowKind.DELETE, 2, 
Row.of("Hello3")),
+Row.ofKind(RowKind.INSERT, 1, 
Row.of("Hello3";
+tEnv.executeSql(
+"CREATE TABLE source_table ("
++ " rowkey INT,"
++ " family1 ROW,"
++ " PRIMARY KEY (rowkey) NOT ENFORCED"
++ ") WITH ("
++ " 'connector' = 'values',"
++ " 'data-id' = '"
++ dataId
++ "',"
++ " 'changelog-mode'='I,UA,UB,D'"
++ ")");
+
+// register HBase table for sink
+tEnv.executeSql(
+"CREATE TABLE sink_table ("
++ " rowkey INT,"
++ " family1 ROW,"
++ " PRIMARY KEY (rowkey) NOT ENFORCED"
++ ") WITH ("
++ " 'connector' = 'hbase-1.4',"
++ " 'table-name' = '"
++ TEST_TABLE_4
++

(flink-connector-hbase) branch v3.0 updated: [FLINK-33304] Introduce the DeduplicatedMutator to resolve the mutation write conflict problem. This closes #30

2023-11-03 Thread martijnvisser
This is an automated email from the ASF dual-hosted git repository.

martijnvisser pushed a commit to branch v3.0
in repository https://gitbox.apache.org/repos/asf/flink-connector-hbase.git


The following commit(s) were added to refs/heads/v3.0 by this push:
 new 3116aa4  [FLINK-33304] Introduce the DeduplicatedMutator to resolve 
the mutation write conflict problem. This closes #30
3116aa4 is described below

commit 3116aa4a2f6aa423cd8f03a634aefba4b6d373f2
Author: Tan-JiaLiang <55388933+tan-jiali...@users.noreply.github.com>
AuthorDate: Fri Nov 3 23:12:14 2023 +0800

[FLINK-33304] Introduce the DeduplicatedMutator to resolve the mutation 
write conflict problem. This closes #30

Co-authored-by: tanjialiang 
(cherry picked from commit e0971c3888db03243b08e5684b7690150276ef2c)
---
 .../connector/hbase1/HBaseConnectorITCase.java | 59 ++
 .../flink/connector/hbase1/util/HBaseTestBase.java |  9 
 .../connector/hbase2/HBaseConnectorITCase.java | 57 +
 .../flink/connector/hbase2/util/HBaseTestBase.java |  9 
 .../connector/hbase/sink/HBaseSinkFunction.java| 48 --
 5 files changed, 179 insertions(+), 3 deletions(-)

diff --git 
a/flink-connector-hbase-1.4/src/test/java/org/apache/flink/connector/hbase1/HBaseConnectorITCase.java
 
b/flink-connector-hbase-1.4/src/test/java/org/apache/flink/connector/hbase1/HBaseConnectorITCase.java
index 35be358..0ea0002 100644
--- 
a/flink-connector-hbase-1.4/src/test/java/org/apache/flink/connector/hbase1/HBaseConnectorITCase.java
+++ 
b/flink-connector-hbase-1.4/src/test/java/org/apache/flink/connector/hbase1/HBaseConnectorITCase.java
@@ -29,10 +29,13 @@ import org.apache.flink.streaming.api.datastream.DataStream;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.flink.table.api.Table;
 import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.api.TableResult;
 import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
 import org.apache.flink.table.functions.ScalarFunction;
+import org.apache.flink.table.planner.factories.TestValuesTableFactory;
 import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.Row;
+import org.apache.flink.types.RowKind;
 import org.apache.flink.util.CollectionUtil;
 
 import org.apache.hadoop.hbase.TableName;
@@ -41,11 +44,14 @@ import org.junit.Test;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 import java.util.stream.Collectors;
 
 import static org.apache.flink.table.api.Expressions.$;
+import static org.assertj.core.api.Assertions.assertThat;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
@@ -280,6 +286,59 @@ public class HBaseConnectorITCase extends HBaseTestBase {
 TestBaseUtils.compareResultAsText(results, expected);
 }
 
+@Test
+public void testTableSinkWithChangelog() throws Exception {
+StreamExecutionEnvironment execEnv = 
StreamExecutionEnvironment.getExecutionEnvironment();
+StreamTableEnvironment tEnv = StreamTableEnvironment.create(execEnv, 
streamSettings);
+
+// register values table for source
+String dataId =
+TestValuesTableFactory.registerData(
+Arrays.asList(
+Row.ofKind(RowKind.INSERT, 1, 
Row.of("Hello1")),
+Row.ofKind(RowKind.DELETE, 1, 
Row.of("Hello2")),
+Row.ofKind(RowKind.INSERT, 2, 
Row.of("Hello1")),
+Row.ofKind(RowKind.INSERT, 2, 
Row.of("Hello2")),
+Row.ofKind(RowKind.INSERT, 2, 
Row.of("Hello3")),
+Row.ofKind(RowKind.DELETE, 2, 
Row.of("Hello3")),
+Row.ofKind(RowKind.INSERT, 1, 
Row.of("Hello3";
+tEnv.executeSql(
+"CREATE TABLE source_table ("
++ " rowkey INT,"
++ " family1 ROW,"
++ " PRIMARY KEY (rowkey) NOT ENFORCED"
++ ") WITH ("
++ " 'connector' = 'values',"
++ " 'data-id' = '"
++ dataId
++ "',"
++ " 'changelog-mode'='I,UA,UB,D'"
++ ")");
+
+// register HBase table for sink
+tEnv.executeSql(
+"CREATE TABLE sink_table ("
++ " rowkey INT,"
++ " family1 ROW,"
++ " PRIMARY KEY (rowkey) NOT ENFORCED"
++ ") WITH ("
++ " 'connector' = 'hbase-1.4',"
++ " 'table-name

(flink-connector-hbase) branch main updated: [FLINK-33164] Support write option sink.ignore-null-valus. This closes #21

2023-11-03 Thread martijnvisser
This is an automated email from the ASF dual-hosted git repository.

martijnvisser pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-hbase.git


The following commit(s) were added to refs/heads/main by this push:
 new 298d816  [FLINK-33164] Support write option sink.ignore-null-valus. 
This closes #21
298d816 is described below

commit 298d8164495732f59d18c54d4d40b601b6d44f21
Author: Tan-JiaLiang <55388933+tan-jiali...@users.noreply.github.com>
AuthorDate: Fri Nov 3 23:51:29 2023 +0800

[FLINK-33164] Support write option sink.ignore-null-valus. This closes #21

Co-authored-by: tanjialiang 
---
 .../hbase1/HBase1DynamicTableFactory.java  |  3 ++
 .../hbase1/sink/HBaseDynamicTableSink.java |  5 +-
 .../hbase1/HBaseDynamicTableFactoryTest.java   | 12 +
 .../hbase2/HBase2DynamicTableFactory.java  |  5 +-
 .../hbase2/sink/HBaseDynamicTableSink.java |  5 +-
 .../hbase2/HBaseDynamicTableFactoryTest.java   | 12 +
 .../connector/hbase/options/HBaseWriteOptions.java | 20 +++
 .../hbase/sink/RowDataToMutationConverter.java |  7 ++-
 .../hbase/table/HBaseConnectorOptions.java |  6 +++
 .../hbase/table/HBaseConnectorOptionsUtil.java |  2 +
 .../flink/connector/hbase/util/HBaseSerde.java | 15 ++
 .../flink/connector/hbase/util/HBaseSerdeTest.java | 62 --
 12 files changed, 144 insertions(+), 10 deletions(-)

diff --git 
a/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/HBase1DynamicTableFactory.java
 
b/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/HBase1DynamicTableFactory.java
index fbc793c..5321bf2 100644
--- 
a/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/HBase1DynamicTableFactory.java
+++ 
b/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/HBase1DynamicTableFactory.java
@@ -50,6 +50,7 @@ import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.NULL_
 import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.SINK_BUFFER_FLUSH_INTERVAL;
 import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.SINK_BUFFER_FLUSH_MAX_ROWS;
 import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.SINK_BUFFER_FLUSH_MAX_SIZE;
+import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.SINK_IGNORE_NULL_VALUE;
 import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.SINK_PARALLELISM;
 import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.TABLE_NAME;
 import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.ZOOKEEPER_QUORUM;
@@ -149,6 +150,7 @@ public class HBase1DynamicTableFactory
 set.add(SINK_BUFFER_FLUSH_MAX_SIZE);
 set.add(SINK_BUFFER_FLUSH_MAX_ROWS);
 set.add(SINK_BUFFER_FLUSH_INTERVAL);
+set.add(SINK_IGNORE_NULL_VALUE);
 set.add(SINK_PARALLELISM);
 set.add(LOOKUP_ASYNC);
 set.add(LOOKUP_CACHE_MAX_ROWS);
@@ -173,6 +175,7 @@ public class HBase1DynamicTableFactory
 SINK_BUFFER_FLUSH_MAX_SIZE,
 SINK_BUFFER_FLUSH_MAX_ROWS,
 SINK_BUFFER_FLUSH_INTERVAL,
+SINK_IGNORE_NULL_VALUE,
 LOOKUP_CACHE_MAX_ROWS,
 LOOKUP_CACHE_TTL,
 LOOKUP_MAX_RETRIES)
diff --git 
a/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/sink/HBaseDynamicTableSink.java
 
b/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/sink/HBaseDynamicTableSink.java
index 2b9e87c..0dec937 100644
--- 
a/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/sink/HBaseDynamicTableSink.java
+++ 
b/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/sink/HBaseDynamicTableSink.java
@@ -61,7 +61,10 @@ public class HBaseDynamicTableSink implements 
DynamicTableSink {
 new HBaseSinkFunction<>(
 tableName,
 hbaseConf,
-new RowDataToMutationConverter(hbaseTableSchema, 
nullStringLiteral),
+new RowDataToMutationConverter(
+hbaseTableSchema,
+nullStringLiteral,
+writeOptions.isIgnoreNullValue()),
 writeOptions.getBufferFlushMaxSizeInBytes(),
 writeOptions.getBufferFlushMaxRows(),
 writeOptions.getBufferFlushIntervalMillis());
diff --git 
a/flink-connector-hbase-1.4/src/test/java/org/apache/flink/connector/hbase1/HBaseDynamicTableFactoryTest.java
 
b/flink-connector-hbase-1.4/src/test/java/org/apache/flink/connector/hbase1/HBaseDynamicTableFactoryTest.java
index 8a8f1d7..7c4ed8e 100644
--- 
a/flink-connect

(flink-connector-hbase) branch v3.0 updated: [FLINK-33164] Support write option sink.ignore-null-valus. This closes #21

2023-11-03 Thread martijnvisser
This is an automated email from the ASF dual-hosted git repository.

martijnvisser pushed a commit to branch v3.0
in repository https://gitbox.apache.org/repos/asf/flink-connector-hbase.git


The following commit(s) were added to refs/heads/v3.0 by this push:
 new 6544697  [FLINK-33164] Support write option sink.ignore-null-valus. 
This closes #21
6544697 is described below

commit 654469780f0ac85f156525575c8b57559f03
Author: Tan-JiaLiang <55388933+tan-jiali...@users.noreply.github.com>
AuthorDate: Fri Nov 3 23:51:29 2023 +0800

[FLINK-33164] Support write option sink.ignore-null-valus. This closes #21

Co-authored-by: tanjialiang 
(cherry picked from commit 298d8164495732f59d18c54d4d40b601b6d44f21)
---
 .../hbase1/HBase1DynamicTableFactory.java  |  3 ++
 .../hbase1/sink/HBaseDynamicTableSink.java |  5 +-
 .../hbase1/HBaseDynamicTableFactoryTest.java   | 12 +
 .../hbase2/HBase2DynamicTableFactory.java  |  5 +-
 .../hbase2/sink/HBaseDynamicTableSink.java |  5 +-
 .../hbase2/HBaseDynamicTableFactoryTest.java   | 12 +
 .../connector/hbase/options/HBaseWriteOptions.java | 20 +++
 .../hbase/sink/RowDataToMutationConverter.java |  7 ++-
 .../hbase/table/HBaseConnectorOptions.java |  6 +++
 .../hbase/table/HBaseConnectorOptionsUtil.java |  2 +
 .../flink/connector/hbase/util/HBaseSerde.java | 15 ++
 .../flink/connector/hbase/util/HBaseSerdeTest.java | 62 --
 12 files changed, 144 insertions(+), 10 deletions(-)

diff --git 
a/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/HBase1DynamicTableFactory.java
 
b/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/HBase1DynamicTableFactory.java
index fbc793c..5321bf2 100644
--- 
a/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/HBase1DynamicTableFactory.java
+++ 
b/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/HBase1DynamicTableFactory.java
@@ -50,6 +50,7 @@ import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.NULL_
 import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.SINK_BUFFER_FLUSH_INTERVAL;
 import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.SINK_BUFFER_FLUSH_MAX_ROWS;
 import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.SINK_BUFFER_FLUSH_MAX_SIZE;
+import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.SINK_IGNORE_NULL_VALUE;
 import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.SINK_PARALLELISM;
 import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.TABLE_NAME;
 import static 
org.apache.flink.connector.hbase.table.HBaseConnectorOptions.ZOOKEEPER_QUORUM;
@@ -149,6 +150,7 @@ public class HBase1DynamicTableFactory
 set.add(SINK_BUFFER_FLUSH_MAX_SIZE);
 set.add(SINK_BUFFER_FLUSH_MAX_ROWS);
 set.add(SINK_BUFFER_FLUSH_INTERVAL);
+set.add(SINK_IGNORE_NULL_VALUE);
 set.add(SINK_PARALLELISM);
 set.add(LOOKUP_ASYNC);
 set.add(LOOKUP_CACHE_MAX_ROWS);
@@ -173,6 +175,7 @@ public class HBase1DynamicTableFactory
 SINK_BUFFER_FLUSH_MAX_SIZE,
 SINK_BUFFER_FLUSH_MAX_ROWS,
 SINK_BUFFER_FLUSH_INTERVAL,
+SINK_IGNORE_NULL_VALUE,
 LOOKUP_CACHE_MAX_ROWS,
 LOOKUP_CACHE_TTL,
 LOOKUP_MAX_RETRIES)
diff --git 
a/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/sink/HBaseDynamicTableSink.java
 
b/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/sink/HBaseDynamicTableSink.java
index 2b9e87c..0dec937 100644
--- 
a/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/sink/HBaseDynamicTableSink.java
+++ 
b/flink-connector-hbase-1.4/src/main/java/org/apache/flink/connector/hbase1/sink/HBaseDynamicTableSink.java
@@ -61,7 +61,10 @@ public class HBaseDynamicTableSink implements 
DynamicTableSink {
 new HBaseSinkFunction<>(
 tableName,
 hbaseConf,
-new RowDataToMutationConverter(hbaseTableSchema, 
nullStringLiteral),
+new RowDataToMutationConverter(
+hbaseTableSchema,
+nullStringLiteral,
+writeOptions.isIgnoreNullValue()),
 writeOptions.getBufferFlushMaxSizeInBytes(),
 writeOptions.getBufferFlushMaxRows(),
 writeOptions.getBufferFlushIntervalMillis());
diff --git 
a/flink-connector-hbase-1.4/src/test/java/org/apache/flink/connector/hbase1/HBaseDynamicTableFactoryTest.java
 
b/flink-connector-hbase-1.4/src/test/java/org/apache/flink/connector/hbase1/HBaseDynami

(flink) branch master updated: [FLINK-33447][table-planner] Avoid CompiledPlan recompilation during loading

2023-11-03 Thread twalthr
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 96a142866a0 [FLINK-33447][table-planner] Avoid CompiledPlan 
recompilation during loading
96a142866a0 is described below

commit 96a142866a042598bfe85407b46b0871a7b8993a
Author: Timo Walther 
AuthorDate: Fri Nov 3 11:02:19 2023 +0100

[FLINK-33447][table-planner] Avoid CompiledPlan recompilation during loading
---
 .../planner/plan/ExecNodeGraphInternalPlan.java| 15 ++
 .../table/planner/delegation/StreamPlanner.scala   | 24 --
 2 files changed, 24 insertions(+), 15 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/ExecNodeGraphInternalPlan.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/ExecNodeGraphInternalPlan.java
index 7f138c6517a..b921535b8b8 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/ExecNodeGraphInternalPlan.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/ExecNodeGraphInternalPlan.java
@@ -34,17 +34,21 @@ import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.StandardOpenOption;
 import java.util.List;
+import java.util.function.Supplier;
 import java.util.stream.Collectors;
 
 /** Implementation of {@link CompiledPlan} backed by an {@link ExecNodeGraph}. 
*/
 @Internal
 public class ExecNodeGraphInternalPlan implements InternalPlan {
 
-private final String serializedPlan;
+private final Supplier serializedPlanSupplier;
 private final ExecNodeGraph execNodeGraph;
 
-public ExecNodeGraphInternalPlan(String serializedPlan, ExecNodeGraph 
execNodeGraph) {
-this.serializedPlan = serializedPlan;
+private String serializedPlan;
+
+public ExecNodeGraphInternalPlan(
+Supplier serializedPlanSupplier, ExecNodeGraph 
execNodeGraph) {
+this.serializedPlanSupplier = serializedPlanSupplier;
 this.execNodeGraph = execNodeGraph;
 }
 
@@ -54,6 +58,9 @@ public class ExecNodeGraphInternalPlan implements 
InternalPlan {
 
 @Override
 public String asJsonString() {
+if (serializedPlan == null) {
+serializedPlan = serializedPlanSupplier.get();
+}
 return serializedPlan;
 }
 
@@ -78,7 +85,7 @@ public class ExecNodeGraphInternalPlan implements 
InternalPlan {
 Files.createDirectories(file.toPath().getParent());
 Files.write(
 file.toPath(),
-serializedPlan.getBytes(StandardCharsets.UTF_8),
+asJsonString().getBytes(StandardCharsets.UTF_8),
 StandardOpenOption.CREATE,
 StandardOpenOption.TRUNCATE_EXISTING,
 StandardOpenOption.WRITE);
diff --git 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/delegation/StreamPlanner.scala
 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/delegation/StreamPlanner.scala
index 1029475b748..fb32326f117 100644
--- 
a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/delegation/StreamPlanner.scala
+++ 
b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/delegation/StreamPlanner.scala
@@ -22,7 +22,7 @@ import org.apache.flink.api.dag.Transformation
 import org.apache.flink.configuration.ExecutionOptions
 import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectReader
 import org.apache.flink.streaming.api.graph.StreamGraph
-import org.apache.flink.table.api.{ExplainDetail, ExplainFormat, 
PlanReference, TableConfig, TableException}
+import org.apache.flink.table.api._
 import org.apache.flink.table.api.PlanReference.{ContentPlanReference, 
FilePlanReference, ResourcePlanReference}
 import org.apache.flink.table.catalog.{CatalogManager, FunctionCatalog}
 import org.apache.flink.table.delegation.{Executor, InternalPlan}
@@ -190,10 +190,12 @@ class StreamPlanner(
 }
 
 new ExecNodeGraphInternalPlan(
-  JsonSerdeUtil
-.createObjectWriter(ctx)
-.withDefaultPrettyPrinter()
-.writeValueAsString(execNodeGraph),
+  // ensures that the JSON output is always normalized
+  () =>
+JsonSerdeUtil
+  .createObjectWriter(ctx)
+  .withDefaultPrettyPrinter()
+  .writeValueAsString(execNodeGraph),
   execNodeGraph)
   }
 
@@ -204,12 +206,12 @@ class StreamPlanner(
 val execGraph = translateToExecNodeGraph(optimizedRelNodes, isCompiled = 
true)
 afterTranslation()
 
-new ExecNodeGraphInternalPlan(
-  JsonSerdeUtil
-.createObjectWriter(createSerdeContext)
-.withDefaultPrettyPrinter()
-.wri