(flink-cdc) branch master updated: [FLINK-35127][cdc][values] Remove HybridSource from ValuesSource to avoid CI failure. (#3237)

2024-04-19 Thread renqs
This is an automated email from the ASF dual-hosted git repository.

renqs pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink-cdc.git


The following commit(s) were added to refs/heads/master by this push:
 new d4ed7db8d [FLINK-35127][cdc][values] Remove HybridSource from 
ValuesSource to avoid CI failure. (#3237)
d4ed7db8d is described below

commit d4ed7db8dca18906bb8a1d7738cb158da0222bc3
Author: Kunni 
AuthorDate: Fri Apr 19 15:37:35 2024 +0800

[FLINK-35127][cdc][values] Remove HybridSource from ValuesSource to avoid 
CI failure. (#3237)
---
 .../flink/cdc/connectors/values/source/ValuesDataSource.java  | 7 +--
 .../cdc/connectors/values/source/ValuesDataSourceHelper.java  | 8 +++-
 2 files changed, 8 insertions(+), 7 deletions(-)

diff --git 
a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/main/java/org/apache/flink/cdc/connectors/values/source/ValuesDataSource.java
 
b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/main/java/org/apache/flink/cdc/connectors/values/source/ValuesDataSource.java
index 88a723fce..61699a5d0 100644
--- 
a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/main/java/org/apache/flink/cdc/connectors/values/source/ValuesDataSource.java
+++ 
b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/main/java/org/apache/flink/cdc/connectors/values/source/ValuesDataSource.java
@@ -35,7 +35,6 @@ import org.apache.flink.cdc.common.source.EventSourceProvider;
 import org.apache.flink.cdc.common.source.FlinkSourceProvider;
 import org.apache.flink.cdc.common.source.MetadataAccessor;
 import org.apache.flink.cdc.connectors.values.ValuesDatabase;
-import org.apache.flink.connector.base.source.hybrid.HybridSource;
 import org.apache.flink.core.io.InputStatus;
 import org.apache.flink.core.io.SimpleVersionedSerializer;
 import org.apache.flink.core.memory.DataInputViewStreamWrapper;
@@ -73,11 +72,7 @@ public class ValuesDataSource implements DataSource {
 @Override
 public EventSourceProvider getEventSourceProvider() {
 ValuesDataSourceHelper.setSourceEvents(eventSetId);
-HybridSource hybridSource =
-HybridSource.builder(new ValuesSource(failAtPos, eventSetId, 
true))
-.addSource(new ValuesSource(failAtPos, eventSetId, 
false))
-.build();
-return FlinkSourceProvider.of(hybridSource);
+return FlinkSourceProvider.of(new ValuesSource(failAtPos, eventSetId, 
false));
 }
 
 @Override
diff --git 
a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/main/java/org/apache/flink/cdc/connectors/values/source/ValuesDataSourceHelper.java
 
b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/main/java/org/apache/flink/cdc/connectors/values/source/ValuesDataSourceHelper.java
index 431cbaf17..24ba78165 100644
--- 
a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/main/java/org/apache/flink/cdc/connectors/values/source/ValuesDataSourceHelper.java
+++ 
b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/main/java/org/apache/flink/cdc/connectors/values/source/ValuesDataSourceHelper.java
@@ -73,7 +73,13 @@ public class ValuesDataSourceHelper {
 // use default enum of SINGLE_SPLIT_SINGLE_TABLE
 sourceEvents = singleSplitSingleTable();
 }
-return sourceEvents;
+// put all events into one list to avoid CI failure and make sure that 
SchemaChangeEvent are
+// sent in order.
+List mergeEvents = new ArrayList<>();
+for (List events : sourceEvents) {
+mergeEvents.addAll(events);
+}
+return Collections.singletonList(mergeEvents);
 }
 
 /** set sourceEvents using custom events. */



(flink-cdc) branch master updated: [FLINK-34903][cdc][mysql] Add mysql-pipeline-connector with tables.exclude option to exclude unnecessary tables (#3186)

2024-04-19 Thread renqs
This is an automated email from the ASF dual-hosted git repository.

renqs pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink-cdc.git


The following commit(s) were added to refs/heads/master by this push:
 new 0d797a602 [FLINK-34903][cdc][mysql] Add mysql-pipeline-connector with 
tables.exclude option to exclude unnecessary tables (#3186)
0d797a602 is described below

commit 0d797a6021ea2f3973f8dc74c0c741b695e85a12
Author: Thorne <46524102+shiy...@users.noreply.github.com>
AuthorDate: Fri Apr 19 15:49:32 2024 +0800

[FLINK-34903][cdc][mysql] Add mysql-pipeline-connector with tables.exclude 
option to exclude unnecessary tables (#3186)
---
 docs/content.zh/docs/connectors/mysql.md   |  8 
 docs/content/docs/connectors/mysql.md  |  8 
 .../mysql/factory/MySqlDataSourceFactory.java  | 27 ++---
 .../mysql/source/MySqlDataSourceOptions.java   | 12 ++
 .../mysql/source/MySqlDataSourceFactoryTest.java   | 45 ++
 5 files changed, 95 insertions(+), 5 deletions(-)

diff --git a/docs/content.zh/docs/connectors/mysql.md 
b/docs/content.zh/docs/connectors/mysql.md
index 564dd3dbf..3410cdea0 100644
--- a/docs/content.zh/docs/connectors/mysql.md
+++ b/docs/content.zh/docs/connectors/mysql.md
@@ -107,6 +107,14 @@ pipeline:
   需要注意的是,点号(.)被视为数据库和表名的分隔符。 
如果需要在正则表达式中使用点(.)来匹配任何字符,必须使用反斜杠对点进行转义。
   例如,db0.\.*, db1.user_table_[0-9]+, db[1-2].[app|web]order_\.*
 
+
+  tables.exclude
+  optional
+  (none)
+  String
+  需要排除的 MySQL 数据库的表名,参数会在tables参数后发生排除作用。表名支持正则表达式,以排除满足正则表达式的多个表。
+  用法和tables参数相同
+
 
   schema-change.enabled
   optional
diff --git a/docs/content/docs/connectors/mysql.md 
b/docs/content/docs/connectors/mysql.md
index c6ef38252..879920fec 100644
--- a/docs/content/docs/connectors/mysql.md
+++ b/docs/content/docs/connectors/mysql.md
@@ -109,6 +109,14 @@ pipeline:
   If there is a need to use a dot (.) in a regular expression to match 
any character, it is necessary to escape the dot with a backslash.
   eg. db0.\.*, db1.user_table_[0-9]+, db[1-2].[app|web]order_\.*
 
+
+  tables.exclude
+  optional
+  (none)
+  String
+  Table name of the MySQL database to exclude, parameter will have an 
exclusion effect after the tables parameter. The table-name also supports 
regular expressions to exclude multiple tables that satisfy the regular 
expressions. 
+  The usage is the same as the tables parameter
+
 
   schema-change.enabled
   optional
diff --git 
a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/factory/MySqlDataSourceFactory.java
 
b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/factory/MySqlDataSourceFactory.java
index 744041d25..dc9972fe0 100644
--- 
a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/factory/MySqlDataSourceFactory.java
+++ 
b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/factory/MySqlDataSourceFactory.java
@@ -42,9 +42,11 @@ import org.slf4j.LoggerFactory;
 import java.time.Duration;
 import java.time.ZoneId;
 import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
 import java.util.Optional;
 import java.util.Set;
+import java.util.stream.Collectors;
 
 import static 
org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.CHUNK_KEY_EVEN_DISTRIBUTION_FACTOR_LOWER_BOUND;
 import static 
org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.CHUNK_KEY_EVEN_DISTRIBUTION_FACTOR_UPPER_BOUND;
@@ -70,6 +72,7 @@ import static 
org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOption
 import static 
org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.SERVER_ID;
 import static 
org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.SERVER_TIME_ZONE;
 import static 
org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.TABLES;
+import static 
org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.TABLES_EXCLUDE;
 import static 
org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.USERNAME;
 import static 
org.apache.flink.cdc.connectors.mysql.source.utils.ObjectUtils.doubleCompare;
 import static 
org.apache.flink.cdc.debezium.table.DebeziumOptions.getDebeziumProperties;
@@ -93,6 +96,7 @@ public class MySqlDataSourceFactory implements 
DataSourceFactory {
 String username = config.get(USERNAME);
 String password = config.get(PASSWORD);
 String tables = config.get(TABLES);
+String tablesExclude = config.get(TABLES_EXCLUDE);
 
 String serverId = validat

(flink) branch master updated (a312a3bdd25 -> 131358b918b)

2024-04-19 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from a312a3bdd25 [FLINK-35045][state] Support ByteBufferReadable for 
HadoopDataInputStream
 add 10c84df8c56 [hotfix] Delete pointless test
 add 131358b918b [FLINK-35159] Transition ExecutionGraph to RUNNING after 
slot assignment

No new revisions were added by this update.

Summary of changes:
 .../scheduler/adaptive/AdaptiveScheduler.java  |  1 -
 .../scheduler/adaptive/CreatingExecutionGraph.java |  5 +-
 .../scheduler/adaptive/AdaptiveSchedulerTest.java  | 55 --
 .../adaptive/CreatingExecutionGraphTest.java   |  8 +++-
 4 files changed, 10 insertions(+), 59 deletions(-)



Re: [I] flink-sql-connector-oracle-cdc-2.4.0采集出现异常 [flink-cdc]

2024-04-19 Thread via GitHub


Aiden-Rose commented on issue #2234:
URL: https://github.com/apache/flink-cdc/issues/2234#issuecomment-2066176302

   我遇到了同样的问题,后面版本有解决的吗


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@flink.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



(flink-connector-aws) branch main updated: [FLINK-35115][Connectors/Kinesis] Allow kinesis consumer to snapshotState after cancelling operator

2024-04-19 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-aws.git


The following commit(s) were added to refs/heads/main by this push:
 new 8d29147  [FLINK-35115][Connectors/Kinesis] Allow kinesis consumer to 
snapshotState after cancelling operator
8d29147 is described below

commit 8d29147b9e6c0a7d27399662c6023ad634363764
Author: Aleksandr Pilipenko 
AuthorDate: Wed Apr 17 17:05:23 2024 +0100

[FLINK-35115][Connectors/Kinesis] Allow kinesis consumer to snapshotState 
after cancelling operator
---
 .../connectors/kinesis/FlinkKinesisConsumer.java   |  13 +-
 .../kinesis/FlinkKinesisConsumerTest.java  | 304 +++--
 2 files changed, 179 insertions(+), 138 deletions(-)

diff --git 
a/flink-connector-aws/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumer.java
 
b/flink-connector-aws/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumer.java
index c229a1c..06d0acc 100644
--- 
a/flink-connector-aws/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumer.java
+++ 
b/flink-connector-aws/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumer.java
@@ -148,7 +148,17 @@ public class FlinkKinesisConsumer extends 
RichParallelSourceFunction
 private transient HashMap
 sequenceNumsToRestore;
 
+/**
+ * Flag used to control reading from Kinesis: source will read data while 
value is true. Changed
+ * to false after {@link #cancel()} has been called.
+ */
 private volatile boolean running = true;
+/**
+ * Flag identifying that operator had been closed. True only after {@link 
#close()} has been
+ * called. Used to control behaviour of snapshotState: state can be 
persisted after operator has
+ * been cancelled (during stop-with-savepoint workflow), but not after 
operator has been closed.
+ */
+private volatile boolean closed = false;
 
 // 
 //  State for Checkpoint
@@ -419,6 +429,7 @@ public class FlinkKinesisConsumer extends 
RichParallelSourceFunction
 @Override
 public void close() throws Exception {
 cancel();
+closed = true;
 // safe-guard when the fetcher has been interrupted, make sure to not 
leak resources
 // application might be stopped before connector subtask has been 
started
 // so we must check if the fetcher is actually created
@@ -478,7 +489,7 @@ public class FlinkKinesisConsumer extends 
RichParallelSourceFunction
 
 @Override
 public void snapshotState(FunctionSnapshotContext context) throws 
Exception {
-if (!running) {
+if (closed) {
 LOG.debug("snapshotState() called on closed source; returning 
null.");
 } else {
 if (LOG.isDebugEnabled()) {
diff --git 
a/flink-connector-aws/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerTest.java
 
b/flink-connector-aws/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerTest.java
index f8cd5ab..82836df 100644
--- 
a/flink-connector-aws/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerTest.java
+++ 
b/flink-connector-aws/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerTest.java
@@ -39,6 +39,7 @@ import org.apache.flink.streaming.api.TimeCharacteristic;
 import org.apache.flink.streaming.api.functions.source.SourceFunction;
 import 
org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
 import org.apache.flink.streaming.api.operators.StreamSource;
+import 
org.apache.flink.streaming.api.operators.collect.utils.MockFunctionSnapshotContext;
 import org.apache.flink.streaming.api.watermark.Watermark;
 import org.apache.flink.streaming.api.windowing.time.Time;
 import 
org.apache.flink.streaming.connectors.kinesis.config.ConsumerConfigConstants;
@@ -71,7 +72,6 @@ import 
com.amazonaws.services.kinesis.model.SequenceNumberRange;
 import com.amazonaws.services.kinesis.model.Shard;
 import org.junit.Test;
 import org.junit.runner.RunWith;
-import org.mockito.Matchers;
 import org.mockito.MockedStatic;
 import org.mockito.Mockito;
 import org.powermock.api.mockito.PowerMockito;
@@ -97,10 +97,13 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.function.Supplier;
 
 import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.mockStatic;
 import static org.mockito.Mockito.never;
 import

svn commit: r68649 - in /dev/flink/flink-connector-aws-4.3.0-rc2: ./ flink-connector-aws-4.3.0-src.tgz flink-connector-aws-4.3.0-src.tgz.asc flink-connector-aws-4.3.0-src.tgz.sha512

2024-04-19 Thread dannycranmer
Author: dannycranmer
Date: Fri Apr 19 09:51:08 2024
New Revision: 68649

Log:
Add flink-connector-aws-4.3.0-rc2

Added:
dev/flink/flink-connector-aws-4.3.0-rc2/
dev/flink/flink-connector-aws-4.3.0-rc2/flink-connector-aws-4.3.0-src.tgz   
(with props)

dev/flink/flink-connector-aws-4.3.0-rc2/flink-connector-aws-4.3.0-src.tgz.asc

dev/flink/flink-connector-aws-4.3.0-rc2/flink-connector-aws-4.3.0-src.tgz.sha512

Added: dev/flink/flink-connector-aws-4.3.0-rc2/flink-connector-aws-4.3.0-src.tgz
==
Binary file - no diff available.

Propchange: 
dev/flink/flink-connector-aws-4.3.0-rc2/flink-connector-aws-4.3.0-src.tgz
--
svn:mime-type = application/octet-stream

Added: 
dev/flink/flink-connector-aws-4.3.0-rc2/flink-connector-aws-4.3.0-src.tgz.asc
==
--- 
dev/flink/flink-connector-aws-4.3.0-rc2/flink-connector-aws-4.3.0-src.tgz.asc 
(added)
+++ 
dev/flink/flink-connector-aws-4.3.0-rc2/flink-connector-aws-4.3.0-src.tgz.asc 
Fri Apr 19 09:51:08 2024
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCAAdFiEED3nyr7I1G8KWeFRFkfnB7BJf2NsFAmYiPoYACgkQkfnB7BJf
+2NsemQ/+LUfar2B32/5DibL8FTSDIceAE//oNXxbTtzmOYGJL+kgiavbNBz4eqJC
+xKRP4CXmNs74yFcrHIkNL9Qls54A1H7L0+y0xmwQEubP3t7gQx90wHfdyfiBEmKB
+cB1PLZjoOD/NlzFILZRKE+dVGgfQoaKW3OxQaxBExzLpXlOUtOLMQ424Htuw4s/V
+LJaT/5uHg3XeBYpIVvmq6KkbAGE4WyOa8O+aE1UvvlV/haFvdTMN3NdL459fCjAA
+tO6LY3z6FXG+AQY5rU//3/P9rsvbxfUQQbY4M8tVVK8K7zeXAPuLXH5WnCIiUDmR
+id+uYBg2Pyud5RnOp9gKxVnf+zEJ+GP/w6OWq2LTiU+W8d18Ayvtl69sQl4g/GjN
+mbAxSeZ+ht/g1rN/kyMT08T6khczmjWHlmqyKuv+Yj+tjKpCg26X40gGnqAFtSuD
+BXHfz8TYgty1SX6I9wLUQY0EWV4jx3NjRtq7oRuiPMT9LZd+M0PGhYDkXigiD+B+
+LJm3XWEJZ1SQ7AlTyCeYG+C2aNuN0dWI3uyksBbsHqw80XW04ywsNa3xXS3wOF1s
+LRZKihAhxINwAF8A/5VJE1m/7tU5kNt51jm+8TREA8KmMg2/VDO8dKfUhhg0JL/f
+xijYOPNFHuRh7BUD1JdOPupBve1gq5lSLutO5A9xjeeToOURCzc=
+=KPb8
+-END PGP SIGNATURE-

Added: 
dev/flink/flink-connector-aws-4.3.0-rc2/flink-connector-aws-4.3.0-src.tgz.sha512
==
--- 
dev/flink/flink-connector-aws-4.3.0-rc2/flink-connector-aws-4.3.0-src.tgz.sha512
 (added)
+++ 
dev/flink/flink-connector-aws-4.3.0-rc2/flink-connector-aws-4.3.0-src.tgz.sha512
 Fri Apr 19 09:51:08 2024
@@ -0,0 +1 @@
+dcf27e76b1aab0c2e6c6aad18ae5e8753925c20e4a9186fbaed73d1d745a4900d4861bf7f5ebf0c1e1186d18c6bbc05c757022a590fe9db5d9d44cee3c192285
  flink-connector-aws-4.3.0-src.tgz




(flink-connector-aws) annotated tag v4.3.0-rc2 updated (a68f076 -> 009e133)

2024-04-19 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a change to annotated tag v4.3.0-rc2
in repository https://gitbox.apache.org/repos/asf/flink-connector-aws.git


*** WARNING: tag v4.3.0-rc2 was modified! ***

from a68f076  (commit)
  to 009e133  (tag)
 tagging a68f076d3b94309e7a38dc1829c9bde97e3709d9 (commit)
  by Danny Cranmer
  on Fri Apr 19 10:50:48 2024 +0100

- Log -
v4.3.0-rc2
-BEGIN PGP SIGNATURE-

iQIzBAABCAAdFiEED3nyr7I1G8KWeFRFkfnB7BJf2NsFAmYiPngACgkQkfnB7BJf
2Nv9tw/+P/HMw+wZ8Af7zXDUv0I+ql/8DPQmXntwpDOqEhC1b3ACr9HR7KbFw0Ab
/AWYJH2IGOxm/3EmgJ9L75egOm05b0gKE1GfEqNsqMO2qjTFDGUC0bHflsyzKZnh
9fNpGbRTFkVnm5szylF1CSLCiXHrpFqw0zH++ypt76YPVnvovSHDoM2oW3TRfiTu
B2JZyyECs99kirBLl2MbQRc4YrPzPPhFaUuAu95L7yUU+1q8jdFIuuX8iLrpSDmu
ZXz1Z3LejnAGlUiWJr6bZVUYzkKefqc2vYK1eedK2bYbKY6W0LoEb4ht81jzxftA
bQwZuJDyk2ldPoB708UtgajKUpYRZ1ikQq2udINAQiUuhR8h8Osv9COYk+0s3T+s
LTkIXpu/ybx9VbAjEuwEJk/l3uekURGXfAxFTLSWlFsw9tLPMrWnU0hvdUU2EuAe
3BUpfbf3cA2ZZ1qrUnyGylJKVTU2J/SHFtYawenrrB0Mb2xA84A9zMaRuQA3EPt2
I/dFHnfweqaUHGrXGHeT5hpUIoLWUX5xcHo1RuybzFgRhRR7nyKToSgENtgy2qqV
UT9Qk50/IhCNbYBp9Qjm30CScvVzd6t4kRjJsvKr4TdVd7ig9BnACj43gt3e/MBq
dsfZYaC/PwyT47MbevyXgpg1mG0n/1aD1q7JTaYuSQD7ZIzRx/M=
=vdcp
-END PGP SIGNATURE-
---


No new revisions were added by this update.

Summary of changes:



(flink) branch master updated (131358b918b -> 39779829b88)

2024-04-19 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 131358b918b [FLINK-35159] Transition ExecutionGraph to RUNNING after 
slot assignment
 add 39779829b88 [FLINK-34954][core] Kryo Input bug fix

No new revisions were added by this update.

Summary of changes:
 .../flink/api/java/typeutils/runtime/NoFetchingInput.java | 11 ++-
 1 file changed, 2 insertions(+), 9 deletions(-)



(flink-connector-rabbitmq) branch dependabot/maven/org.apache.commons-commons-compress-1.26.0 deleted (was 107b33c)

2024-04-19 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a change to branch 
dependabot/maven/org.apache.commons-commons-compress-1.26.0
in repository https://gitbox.apache.org/repos/asf/flink-connector-rabbitmq.git


 was 107b33c  Bump org.apache.commons:commons-compress from 1.24.0 to 1.26.0

The revisions that were on this branch are still contained in
other references; therefore, this change does not discard any commits
from the repository.



(flink-connector-rabbitmq) branch main updated: [hotfix] Bump version to 3.1-SNAPSHOT

2024-04-19 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-rabbitmq.git


The following commit(s) were added to refs/heads/main by this push:
 new 1b0e2a8  [hotfix] Bump version to 3.1-SNAPSHOT
1b0e2a8 is described below

commit 1b0e2a8a1e43be38d4fafead0c465ceb68bcc67b
Author: Danny Cranmer 
AuthorDate: Fri Apr 19 11:10:09 2024 +0100

[hotfix] Bump version to 3.1-SNAPSHOT
---
 flink-connector-rabbitmq/pom.xml | 2 +-
 flink-sql-connector-rabbitmq/pom.xml | 2 +-
 pom.xml  | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/flink-connector-rabbitmq/pom.xml b/flink-connector-rabbitmq/pom.xml
index fc6d527..93cb8de 100644
--- a/flink-connector-rabbitmq/pom.xml
+++ b/flink-connector-rabbitmq/pom.xml
@@ -26,7 +26,7 @@ under the License.

org.apache.flink
flink-connector-rabbitmq-parent
-   3.0-SNAPSHOT
+   3.1-SNAPSHOT

 
flink-connector-rabbitmq
diff --git a/flink-sql-connector-rabbitmq/pom.xml 
b/flink-sql-connector-rabbitmq/pom.xml
index ca479f0..68c5779 100644
--- a/flink-sql-connector-rabbitmq/pom.xml
+++ b/flink-sql-connector-rabbitmq/pom.xml
@@ -26,7 +26,7 @@ under the License.

org.apache.flink
flink-connector-rabbitmq-parent
-   3.0-SNAPSHOT
+   3.1-SNAPSHOT

 
flink-sql-connector-rabbitmq
diff --git a/pom.xml b/pom.xml
index 211db42..43381e5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -29,7 +29,7 @@ under the License.
 
 org.apache.flink
 flink-connector-rabbitmq-parent
-3.0-SNAPSHOT
+3.1-SNAPSHOT
 Flink : Connectors : RabbitMQ : Parent
 pom
 2022



(flink-connector-rabbitmq) branch main updated: [FLINK-35142][Connectors/RabbitMQ] Drop support for Flink 1.17

2024-04-19 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-rabbitmq.git


The following commit(s) were added to refs/heads/main by this push:
 new 8e8a022  [FLINK-35142][Connectors/RabbitMQ] Drop support for Flink 1.17
8e8a022 is described below

commit 8e8a02282e248fc73f20f1ac1fe21250629e9c8f
Author: Danny Cranmer 
AuthorDate: Fri Apr 19 11:02:41 2024 +0100

[FLINK-35142][Connectors/RabbitMQ] Drop support for Flink 1.17
---
 .github/workflows/push_pr.yml |  6 ++
 .github/workflows/weekly.yml  | 12 
 2 files changed, 2 insertions(+), 16 deletions(-)

diff --git a/.github/workflows/push_pr.yml b/.github/workflows/push_pr.yml
index f4289dc..da8a577 100644
--- a/.github/workflows/push_pr.yml
+++ b/.github/workflows/push_pr.yml
@@ -28,11 +28,9 @@ jobs:
   compile_and_test:
 strategy:
   matrix:
-flink: [ 1.16-SNAPSHOT, 1.17-SNAPSHOT ]
-jdk: [ '8, 11' ]
+flink: [ 1.18-SNAPSHOT ]
+jdk: [ '8, 11, 17' ]
 include:
-  - flink: 1.18-SNAPSHOT
-jdk: '8, 11, 17'
   - flink: 1.19-SNAPSHOT
 jdk: '8, 11, 17, 21'
 uses: apache/flink-connector-shared-utils/.github/workflows/ci.yml@ci_utils
diff --git a/.github/workflows/weekly.yml b/.github/workflows/weekly.yml
index 4e8372b..8385763 100644
--- a/.github/workflows/weekly.yml
+++ b/.github/workflows/weekly.yml
@@ -30,12 +30,6 @@ jobs:
 strategy:
   matrix:
 flink_branches: [{
-  flink: 1.16-SNAPSHOT,
-  branch: main
-}, {
-  flink: 1.17-SNAPSHOT,
-  branch: main
-}, {
   flink: 1.18-SNAPSHOT,
   jdk: '8, 11, 17',
   branch: main
@@ -43,12 +37,6 @@ jobs:
   flink: 1.19-SNAPSHOT,
   jdk: '8, 11, 17, 21',
   branch: main
-}, {
-  flink: 1.16.3,
-  branch: v3.0
-}, {
-  flink: 1.17.2,
-  branch: v3.0
 }, {
   flink: 1.18.0,
   jdk: '8, 11, 17',



(flink-connector-rabbitmq) 01/02: [hotfix] Update copyright year to 2024

2024-04-19 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-rabbitmq.git

commit ddbfa8f9d66d6a2c20c678382d52b43d977360cf
Author: Hang Ruan 
AuthorDate: Mon Jan 29 13:53:37 2024 +0800

[hotfix] Update copyright year to 2024
---
 flink-sql-connector-rabbitmq/src/main/resources/META-INF/NOTICE | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/flink-sql-connector-rabbitmq/src/main/resources/META-INF/NOTICE 
b/flink-sql-connector-rabbitmq/src/main/resources/META-INF/NOTICE
index e4f475f..9fbfdc5 100644
--- a/flink-sql-connector-rabbitmq/src/main/resources/META-INF/NOTICE
+++ b/flink-sql-connector-rabbitmq/src/main/resources/META-INF/NOTICE
@@ -1,5 +1,5 @@
 flink-sql-connector-rabbitmq
-Copyright 2014-2023 The Apache Software Foundation
+Copyright 2014-2024 The Apache Software Foundation
 
 This product includes software developed at
 The Apache Software Foundation (http://www.apache.org/).



(flink-connector-rabbitmq) branch main updated (1b0e2a8 -> 6b8a906)

2024-04-19 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a change to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-rabbitmq.git


from 1b0e2a8  [hotfix] Bump version to 3.1-SNAPSHOT
 new ddbfa8f  [hotfix] Update copyright year to 2024
 new 6b8a906  [hotfix] Fix the dependency convergence rule

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 flink-connector-rabbitmq/pom.xml   |  6 ++
 .../src/main/resources/META-INF/NOTICE |  2 +-
 pom.xml| 14 +-
 3 files changed, 20 insertions(+), 2 deletions(-)



(flink-connector-rabbitmq) 02/02: [hotfix] Fix the dependency convergence rule

2024-04-19 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-rabbitmq.git

commit 6b8a9060629e72fa08c9f78507c26ea5f27c2f3f
Author: Hang Ruan 
AuthorDate: Tue Feb 6 17:15:03 2024 +0800

[hotfix] Fix the dependency convergence rule
---
 flink-connector-rabbitmq/pom.xml |  6 ++
 pom.xml  | 14 +-
 2 files changed, 19 insertions(+), 1 deletion(-)

diff --git a/flink-connector-rabbitmq/pom.xml b/flink-connector-rabbitmq/pom.xml
index 93cb8de..22d0c3a 100644
--- a/flink-connector-rabbitmq/pom.xml
+++ b/flink-connector-rabbitmq/pom.xml
@@ -80,6 +80,12 @@ under the License.
org.testcontainers
rabbitmq
test
+   
+   
+   
com.fasterxml.jackson.core
+   
jackson-annotations
+   
+   

 

diff --git a/pom.xml b/pom.xml
index 43381e5..91701fe 100644
--- a/pom.xml
+++ b/pom.xml
@@ -110,6 +110,12 @@ under the License.
 org.testcontainers
 junit-jupiter
 test
+
+
+com.fasterxml.jackson.core
+jackson-annotations
+
+
 
 
 
@@ -136,6 +142,12 @@ under the License.
 org.apache.flink
 flink-test-utils-junit
 test
+
+
+com.fasterxml.jackson.core
+jackson-annotations
+
+
 
 
 
@@ -400,4 +412,4 @@ under the License.
 
 
 
-
\ No newline at end of file
+



(flink-connector-rabbitmq) branch main updated: [FLINK-35174][Connectors/RabbitMQ] Bump commons-compress to 1.26.1

2024-04-19 Thread dannycranmer
This is an automated email from the ASF dual-hosted git repository.

dannycranmer pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-rabbitmq.git


The following commit(s) were added to refs/heads/main by this push:
 new 66e323a  [FLINK-35174][Connectors/RabbitMQ] Bump commons-compress to 
1.26.1
66e323a is described below

commit 66e323a3e79befc08ae03f2789a8aa94b343d504
Author: Danny Cranmer 
AuthorDate: Fri Apr 19 11:14:21 2024 +0100

[FLINK-35174][Connectors/RabbitMQ] Bump commons-compress to 1.26.1
---
 pom.xml | 16 +++-
 1 file changed, 15 insertions(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index 91701fe..23c5fd7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -320,7 +320,21 @@ under the License.
 
 org.apache.commons
 commons-compress
-1.24.0
+1.26.1
+
+
+
+
+org.apache.commons
+commons-lang3
+3.14.0
+
+
+
+
+commons-io
+commons-io
+2.15.1
 
 
 



Re: [I] The XML dependency of Flink Oracle CDC conflicts with the XML related classes of Java8:ERROR StatusLogger Caught javax.xml.parsers.ParserConfigurationException [flink-cdc]

2024-04-19 Thread via GitHub


Aiden-Rose commented on issue #3195:
URL: https://github.com/apache/flink-cdc/issues/3195#issuecomment-2066353680

   I also encountered the same problem. Have you resolved it


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@flink.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



(flink) branch release-1.18 updated: [FLINK-35159] Transition ExecutionGraph to RUNNING after slot assignment

2024-04-19 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new aacc735806a [FLINK-35159] Transition ExecutionGraph to RUNNING after 
slot assignment
aacc735806a is described below

commit aacc735806acf1d63fa732706e079bc2ca1bb4fc
Author: Chesnay Schepler 
AuthorDate: Thu Apr 18 19:10:42 2024 +0200

[FLINK-35159] Transition ExecutionGraph to RUNNING after slot assignment
---
 .../flink/runtime/scheduler/adaptive/AdaptiveScheduler.java   | 1 -
 .../flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java  | 5 -
 .../runtime/scheduler/adaptive/CreatingExecutionGraphTest.java| 8 ++--
 3 files changed, 10 insertions(+), 4 deletions(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
index 4ee22c95848..34539d23e04 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
@@ -1096,7 +1096,6 @@ public class AdaptiveScheduler
 executionGraphWithVertexParallelism.getExecutionGraph();
 
 executionGraph.start(componentMainThreadExecutor);
-executionGraph.transitionToRunning();
 
 executionGraph.setInternalTaskFailuresListener(
 new UpdateSchedulerNgOnInternalFailuresListener(this));
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
index c876fe6ad1d..21055945372 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
@@ -124,7 +124,6 @@ public class CreatingExecutionGraph implements State {
 
operatorCoordinatorHandlerFactory.create(executionGraph, context);
 operatorCoordinatorHandler.initializeOperatorCoordinators(
 context.getMainThreadExecutor());
-operatorCoordinatorHandler.startAllOperatorCoordinators();
 final String updatedPlan =
 JsonPlanGenerator.generatePlan(
 executionGraph.getJobID(),
@@ -138,6 +137,10 @@ public class CreatingExecutionGraph implements State {
 .iterator(),
 
executionGraphWithVertexParallelism.getVertexParallelism());
 executionGraph.setJsonPlan(updatedPlan);
+
+executionGraph.transitionToRunning();
+operatorCoordinatorHandler.startAllOperatorCoordinators();
+
 context.goToExecuting(
 result.getExecutionGraph(),
 executionGraphHandler,
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
index 2375a194206..69e5f589b19 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
@@ -157,8 +157,12 @@ public class CreatingExecutionGraphTest extends TestLogger 
{
 ignored -> 
CreatingExecutionGraph.AssignmentResult.notPossible());
 context.setExpectWaitingForResources();
 
-executionGraphWithVertexParallelismFuture.complete(
-getGraph(new StateTrackingMockExecutionGraph()));
+final StateTrackingMockExecutionGraph executionGraph =
+new StateTrackingMockExecutionGraph();
+
+
executionGraphWithVertexParallelismFuture.complete(getGraph(executionGraph));
+
+
assertThat(executionGraph.getState()).isEqualTo(JobStatus.INITIALIZING);
 }
 }
 



(flink) branch release-1.19 updated: [FLINK-35159] Transition ExecutionGraph to RUNNING after slot assignment

2024-04-19 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.19 by this push:
 new 87ed9ccc210 [FLINK-35159] Transition ExecutionGraph to RUNNING after 
slot assignment
87ed9ccc210 is described below

commit 87ed9ccc2103457ba91f6ca45adfd2bfcc75c9ac
Author: Chesnay Schepler 
AuthorDate: Thu Apr 18 19:10:42 2024 +0200

[FLINK-35159] Transition ExecutionGraph to RUNNING after slot assignment
---
 .../flink/runtime/scheduler/adaptive/AdaptiveScheduler.java   | 1 -
 .../flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java  | 5 -
 .../runtime/scheduler/adaptive/CreatingExecutionGraphTest.java| 8 ++--
 3 files changed, 10 insertions(+), 4 deletions(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
index 5f6438ce181..238c594fd55 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
@@ -1187,7 +1187,6 @@ public class AdaptiveScheduler
 executionGraphWithVertexParallelism.getExecutionGraph();
 
 executionGraph.start(componentMainThreadExecutor);
-executionGraph.transitionToRunning();
 
 executionGraph.setInternalTaskFailuresListener(
 new UpdateSchedulerNgOnInternalFailuresListener(this));
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
index da90ef1468d..e9b1317e46e 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
@@ -123,7 +123,6 @@ public class CreatingExecutionGraph extends 
StateWithoutExecutionGraph {
 
operatorCoordinatorHandlerFactory.create(executionGraph, context);
 operatorCoordinatorHandler.initializeOperatorCoordinators(
 context.getMainThreadExecutor());
-operatorCoordinatorHandler.startAllOperatorCoordinators();
 final String updatedPlan =
 JsonPlanGenerator.generatePlan(
 executionGraph.getJobID(),
@@ -137,6 +136,10 @@ public class CreatingExecutionGraph extends 
StateWithoutExecutionGraph {
 .iterator(),
 
executionGraphWithVertexParallelism.getVertexParallelism());
 executionGraph.setJsonPlan(updatedPlan);
+
+executionGraph.transitionToRunning();
+operatorCoordinatorHandler.startAllOperatorCoordinators();
+
 context.goToExecuting(
 result.getExecutionGraph(),
 executionGraphHandler,
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
index b831b3bb62f..0f89cdf7e12 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
@@ -93,8 +93,12 @@ class CreatingExecutionGraphTest {
 ignored -> 
CreatingExecutionGraph.AssignmentResult.notPossible());
 context.setExpectWaitingForResources();
 
-executionGraphWithVertexParallelismFuture.complete(
-getGraph(new StateTrackingMockExecutionGraph()));
+final StateTrackingMockExecutionGraph executionGraph =
+new StateTrackingMockExecutionGraph();
+
+
executionGraphWithVertexParallelismFuture.complete(getGraph(executionGraph));
+
+
assertThat(executionGraph.getState()).isEqualTo(JobStatus.INITIALIZING);
 }
 
 @Test



(flink-kubernetes-operator) branch main updated: [FLINK-34574] Add CPU and memory size autoscaler quota

2024-04-19 Thread gyfora
This is an automated email from the ASF dual-hosted git repository.

gyfora pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
 new baad9008 [FLINK-34574] Add CPU and memory size autoscaler quota
baad9008 is described below

commit baad90088ea5b5b240186a530a79b64fb84cc77e
Author: Gabor Somogyi 
AuthorDate: Fri Apr 19 17:36:24 2024 +0200

[FLINK-34574] Add CPU and memory size autoscaler quota
---
 .../generated/auto_scaler_configuration.html   |  12 +
 .../apache/flink/autoscaler/ScalingExecutor.java   | 113 +-
 .../flink/autoscaler/ScalingMetricCollector.java   |  15 +-
 .../flink/autoscaler/config/AutoScalerOptions.java |  17 +
 .../flink/autoscaler/topology/JobTopology.java |  18 +-
 .../flink/autoscaler/topology/VertexInfo.java  |  23 +-
 .../runtime/rest/messages/job/JobDetailsInfo.java  | 448 +
 .../json/SlotSharingGroupIDDeserializer.java   |  44 ++
 .../json/SlotSharingGroupIDSerializer.java |  43 ++
 .../messages/json/SlotSharingGroupIdConverter.java |  42 ++
 .../MetricsCollectionAndEvaluationTest.java|  11 +-
 .../flink/autoscaler/ScalingExecutorTest.java  | 193 -
 .../autoscaler/ScalingMetricCollectorTest.java |  10 +
 .../flink/autoscaler/TestingAutoscalerUtils.java   |   9 +-
 .../flink/autoscaler/topology/JobTopologyTest.java |   2 +-
 .../operator/config/FlinkConfigBuilder.java|  18 +-
 .../kubernetes/operator/utils/EventRecorder.java   |   1 +
 .../operator/config/FlinkConfigBuilderTest.java|   4 +
 18 files changed, 977 insertions(+), 46 deletions(-)

diff --git a/docs/layouts/shortcodes/generated/auto_scaler_configuration.html 
b/docs/layouts/shortcodes/generated/auto_scaler_configuration.html
index b3466b63..d9bed101 100644
--- a/docs/layouts/shortcodes/generated/auto_scaler_configuration.html
+++ b/docs/layouts/shortcodes/generated/auto_scaler_configuration.html
@@ -116,6 +116,18 @@
 Double
 Percentage threshold for switching to observed from busy time 
based true processing rate if the measurement is off by at least the configured 
fraction. For example 0.15 means we switch to observed if the busy time based 
computation is at least 15% higher during catchup.
 
+
+job.autoscaler.quota.cpu
+(none)
+Double
+Quota of the CPU count. When scaling would go beyond this 
number the the scaling is not going to happen.
+
+
+job.autoscaler.quota.memory
+(none)
+MemorySize
+Quota of the memory size. When scaling would go beyond this 
number the the scaling is not going to happen.
+
 
 job.autoscaler.restart.time
 5 min
diff --git 
a/flink-autoscaler/src/main/java/org/apache/flink/autoscaler/ScalingExecutor.java
 
b/flink-autoscaler/src/main/java/org/apache/flink/autoscaler/ScalingExecutor.java
index 71efc160..af325371 100644
--- 
a/flink-autoscaler/src/main/java/org/apache/flink/autoscaler/ScalingExecutor.java
+++ 
b/flink-autoscaler/src/main/java/org/apache/flink/autoscaler/ScalingExecutor.java
@@ -33,6 +33,7 @@ import org.apache.flink.autoscaler.utils.ResourceCheckUtils;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.configuration.MemorySize;
 import org.apache.flink.configuration.TaskManagerOptions;
+import org.apache.flink.runtime.instance.SlotSharingGroupId;
 import org.apache.flink.runtime.jobgraph.JobVertexID;
 
 import org.slf4j.Logger;
@@ -67,6 +68,9 @@ public class ScalingExecutor> {
 public static final String HEAP_USAGE_MESSAGE =
 "Heap Usage %s is above the allowed limit for scaling operations. 
Please adjust the available memory manually.";
 
+public static final String RESOURCE_QUOTA_REACHED_MESSAGE =
+"Resource usage is above the allowed limit for scaling operations. 
Please adjust the resource quota manually.";
+
 private static final Logger LOG = 
LoggerFactory.getLogger(ScalingExecutor.class);
 
 private final JobVertexScaler jobVertexScaler;
@@ -129,8 +133,10 @@ public class ScalingExecutor> {
 scalingSummaries,
 autoScalerEventHandler);
 
-if (scalingWouldExceedClusterResources(
-configOverrides.newConfigWithOverrides(conf),
+var memoryTuningEnabled = 
conf.get(AutoScalerOptions.MEMORY_TUNING_ENABLED);
+if (scalingWouldExceedMaxResources(
+memoryTuningEnabled ? 
configOverrides.newConfigWithOverrides(conf) : conf,
+jobTopology,
 evaluatedMetrics,
 scalingSummaries,
 context)) {
@@ -280,6 +286,30 @@ public class ScalingExecutor> {
 return false;
 }
 
+@VisibleForTesting
+protected boolean scalingWouldExceedMaxResourc

(flink) branch master updated (39779829b88 -> 7c4dec63eb4)

2024-04-19 Thread thw
This is an automated email from the ASF dual-hosted git repository.

thw pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 39779829b88 [FLINK-34954][core] Kryo Input bug fix
 add 7c4dec63eb4 [FLINK-28048][connectors] Introduce Source API alternative 
to FiniteTestSource (#23777)

No new revisions were added by this update.

Summary of changes:
 .../f7a4e6fa-e7de-48c9-a61e-c13e83f0c72e   | 118 +++
 flink-connectors/flink-connector-datagen/pom.xml   |  16 +++
 ...tion.java => IndexLookupGeneratorFunction.java} | 145 ---
 .../datagen/source/DataGeneratorSource.java|   2 +-
 ...ittingSourceReaderWithCheckpointsInBetween.java | 158 +
 .../datagen/source/TestDataGenerators.java |  83 +++
 flink-connectors/flink-connector-hive/pom.xml  |   8 ++
 .../flink/connectors/hive/HiveTableSinkITCase.java |  49 ---
 .../connectors/hive/HiveTableSourceITCase.java |  40 +++---
 .../source/lib/util/IteratorSourceReaderBase.java  |  10 +-
 flink-formats/flink-avro/pom.xml   |   8 ++
 .../formats/avro/AvroStreamingFileSinkITCase.java  |  25 +++-
 flink-formats/flink-compress/pom.xml   |   8 ++
 .../formats/compress/CompressionFactoryITCase.java |  10 +-
 flink-formats/flink-csv/pom.xml|   8 ++
 flink-formats/flink-hadoop-bulk/pom.xml|   8 ++
 .../bulk/HadoopPathBasedPartFileWriterITCase.java  |  15 +-
 flink-formats/flink-json/pom.xml   |   8 ++
 flink-formats/flink-orc/pom.xml|   8 ++
 .../flink/orc/writer/OrcBulkWriterITCase.java  |  10 +-
 flink-formats/flink-parquet/pom.xml|   8 ++
 .../avro/AvroParquetStreamingFileSinkITCase.java   |  21 ++-
 .../ParquetProtoStreamingFileSinkITCase.java   |  10 +-
 flink-formats/flink-sequence-file/pom.xml  |   8 ++
 .../SequenceStreamingFileSinkITCase.java   |  12 +-
 flink-table/flink-table-planner/pom.xml|  10 +-
 .../runtime/stream/sql/CompactionITCaseBase.java   |  20 +--
 flink-tests/pom.xml|   2 +
 .../flink/test/streaming/runtime/SinkITCase.java   |  75 +-
 29 files changed, 652 insertions(+), 251 deletions(-)
 copy 
flink-connectors/flink-connector-datagen/src/main/java/org/apache/flink/connector/datagen/functions/{FromElementsGeneratorFunction.java
 => IndexLookupGeneratorFunction.java} (61%)
 create mode 100644 
flink-connectors/flink-connector-datagen/src/main/java/org/apache/flink/connector/datagen/source/DoubleEmittingSourceReaderWithCheckpointsInBetween.java
 create mode 100644 
flink-connectors/flink-connector-datagen/src/test/java/org/apache/flink/connector/datagen/source/TestDataGenerators.java