(flink) 01/02: [hotfix][test] Assert the slot allocation eventually succeed in dedicated tests of DefaultSlotStatusSyncerTest

2024-02-19 Thread guoyangze
This is an automated email from the ASF dual-hosted git repository.

guoyangze pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 8cf29969d9aec4943713f0a6096b703718ce0dd0
Author: Yangze Guo 
AuthorDate: Mon Feb 19 17:58:06 2024 +0800

[hotfix][test] Assert the slot allocation eventually succeed in dedicated 
tests of DefaultSlotStatusSyncerTest

Also deduplicate the code of these tests.
---
 .../slotmanager/DefaultSlotStatusSyncerTest.java   | 178 -
 1 file changed, 69 insertions(+), 109 deletions(-)

diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
index cf60a7e5e5f..4c3d2d5a723 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
@@ -19,11 +19,11 @@ package 
org.apache.flink.runtime.resourcemanager.slotmanager;
 
 import org.apache.flink.api.common.JobID;
 import org.apache.flink.api.common.time.Time;
-import org.apache.flink.api.java.tuple.Tuple6;
 import org.apache.flink.runtime.clusterframework.types.AllocationID;
 import org.apache.flink.runtime.clusterframework.types.ResourceID;
 import org.apache.flink.runtime.clusterframework.types.ResourceProfile;
 import org.apache.flink.runtime.clusterframework.types.SlotID;
+import org.apache.flink.runtime.instance.InstanceID;
 import org.apache.flink.runtime.messages.Acknowledge;
 import org.apache.flink.runtime.resourcemanager.ResourceManagerId;
 import 
org.apache.flink.runtime.resourcemanager.registration.TaskExecutorConnection;
@@ -35,15 +35,18 @@ import 
org.apache.flink.runtime.taskexecutor.TestingTaskExecutorGatewayBuilder;
 import org.apache.flink.testutils.TestingUtils;
 import org.apache.flink.testutils.executor.TestExecutorExtension;
 import org.apache.flink.util.concurrent.FutureUtils;
+import org.apache.flink.util.function.QuadConsumer;
 
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.extension.RegisterExtension;
 
 import java.util.Arrays;
 import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeoutException;
 
+import static org.apache.flink.core.testutils.FlinkAssertions.assertThatFuture;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThatThrownBy;
 
@@ -60,118 +63,18 @@ class DefaultSlotStatusSyncerTest {
 TestingUtils.defaultExecutorExtension();
 
 @Test
-void testAllocateSlot() throws Exception {
-final FineGrainedTaskManagerTracker taskManagerTracker =
-new FineGrainedTaskManagerTracker();
-final CompletableFuture<
-Tuple6<
-SlotID,
-JobID,
-AllocationID,
-ResourceProfile,
-String,
-ResourceManagerId>>
-requestFuture = new CompletableFuture<>();
-final CompletableFuture responseFuture = new 
CompletableFuture<>();
-final TestingTaskExecutorGateway taskExecutorGateway =
-new TestingTaskExecutorGatewayBuilder()
-.setRequestSlotFunction(
-tuple6 -> {
-requestFuture.complete(tuple6);
-return responseFuture;
-})
-.createTestingTaskExecutorGateway();
-final TaskExecutorConnection taskExecutorConnection =
-new TaskExecutorConnection(ResourceID.generate(), 
taskExecutorGateway);
-taskManagerTracker.addTaskManager(
-taskExecutorConnection, ResourceProfile.ANY, 
ResourceProfile.ANY);
-final ResourceTracker resourceTracker = new DefaultResourceTracker();
-final JobID jobId = new JobID();
-final SlotStatusSyncer slotStatusSyncer =
-new DefaultSlotStatusSyncer(TASK_MANAGER_REQUEST_TIMEOUT);
-slotStatusSyncer.initialize(
-taskManagerTracker,
-resourceTracker,
-ResourceManagerId.generate(),
-EXECUTOR_RESOURCE.getExecutor());
-
-final CompletableFuture allocatedFuture =
-slotStatusSyncer.allocateSlot(
-taskExecutorConnection.getInstanceID(),
-jobId,
-"address",
-ResourceProfile.ANY);
-final AllocationID

(flink) 02/02: [FLINK-34434][slotmanager] Complete the returnedFuture when slot removed before allocation success

2024-02-19 Thread guoyangze
This is an automated email from the ASF dual-hosted git repository.

guoyangze pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 45d4dc10248402757e203aa266b19c95e2e93b46
Author: Yangze Guo 
AuthorDate: Mon Feb 19 10:35:45 2024 +0800

[FLINK-34434][slotmanager] Complete the returnedFuture when slot removed 
before allocation success
---
 .../resourcemanager/slotmanager/DefaultSlotStatusSyncer.java   |  2 +-
 .../slotmanager/DefaultSlotStatusSyncerTest.java   | 10 ++
 2 files changed, 11 insertions(+), 1 deletion(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncer.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncer.java
index 0d687df9ad4..794db8125c2 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncer.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncer.java
@@ -152,7 +152,7 @@ public class DefaultSlotStatusSyncer implements 
SlotStatusSyncer {
 LOG.debug(
 "The slot {} has been removed before. 
Ignore the future.",
 allocationId);
-requestFuture.complete(null);
+returnedFuture.complete(null);
 return null;
 }
 if (acknowledge != null) {
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
index 4c3d2d5a723..ebce5c72b65 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
@@ -119,6 +119,16 @@ class DefaultSlotStatusSyncerTest {
 
assertThat(taskManagerInfo.getAllocatedSlots()).isEmpty());
 }
 
+@Test
+void testAllocationUpdatesIgnoredIfSlotRemoved() throws Exception {
+testSlotAllocation(
+(slotStatusSyncer, taskManagerTracker, instanceID, 
allocationId) -> {
+taskManagerTracker.removeTaskManager(instanceID);
+
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId))
+.isEmpty();
+});
+}
+
 @Test
 void testFreeSlot() {
 final FineGrainedTaskManagerTracker taskManagerTracker =



(flink) branch release-1.19 updated (ec73bc5cf5c -> 45d4dc10248)

2024-02-19 Thread guoyangze
This is an automated email from the ASF dual-hosted git repository.

guoyangze pushed a change to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git


from ec73bc5cf5c [FLINK-22765][test] Hardens 
ExceptionUtilsITCase#testIsMetaspaceOutOfMemoryError in JDK21
 new 8cf29969d9a [hotfix][test] Assert the slot allocation eventually 
succeed in dedicated tests of DefaultSlotStatusSyncerTest
 new 45d4dc10248 [FLINK-34434][slotmanager] Complete the returnedFuture 
when slot removed before allocation success

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../slotmanager/DefaultSlotStatusSyncer.java   |   2 +-
 .../slotmanager/DefaultSlotStatusSyncerTest.java   | 188 +
 2 files changed, 80 insertions(+), 110 deletions(-)



(flink) branch master updated: [hotfix][docs] Update the versions of mongodb supported by mongodb-connector

2024-02-19 Thread leonard
This is an automated email from the ASF dual-hosted git repository.

leonard pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 68d8c5b3bd8 [hotfix][docs] Update the versions of mongodb supported by 
mongodb-connector
68d8c5b3bd8 is described below

commit 68d8c5b3bd8c5f1d5febeb68eb961269cc6dd414
Author: Jiabao Sun 
AuthorDate: Tue Feb 20 11:23:15 2024 +0800

[hotfix][docs] Update the versions of mongodb supported by mongodb-connector

This closes #24337.
---
 docs/content.zh/docs/connectors/table/overview.md | 2 +-
 docs/content/docs/connectors/table/overview.md| 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/docs/content.zh/docs/connectors/table/overview.md 
b/docs/content.zh/docs/connectors/table/overview.md
index 8915e2a496b..1bc53c53f43 100644
--- a/docs/content.zh/docs/connectors/table/overview.md
+++ b/docs/content.zh/docs/connectors/table/overview.md
@@ -110,7 +110,7 @@ Flink natively support various connectors. The following 
tables list all availab
 
 
   }}">MongoDB
-  
+  3.6.x & 4.x & 5.x & 6.0.x
   Bounded Scan, Lookup
   Streaming Sink, Batch Sink
 
diff --git a/docs/content/docs/connectors/table/overview.md 
b/docs/content/docs/connectors/table/overview.md
index 3700684a1c4..cf1424ed521 100644
--- a/docs/content/docs/connectors/table/overview.md
+++ b/docs/content/docs/connectors/table/overview.md
@@ -110,7 +110,7 @@ Flink natively support various connectors. The following 
tables list all availab
 
 
   }}">MongoDB
-  
+  3.6.x & 4.x & 5.x & 6.0.x
   Bounded Scan, Lookup
   Streaming Sink, Batch Sink
 



(flink) 02/02: [FLINK-34434][slotmanager] Complete the returnedFuture when slot removed before allocation success

2024-02-19 Thread guoyangze
This is an automated email from the ASF dual-hosted git repository.

guoyangze pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 21cfe998f4fb21afe24ceb8b6f4fef180e89b9e9
Author: Yangze Guo 
AuthorDate: Mon Feb 19 10:35:45 2024 +0800

[FLINK-34434][slotmanager] Complete the returnedFuture when slot removed 
before allocation success
---
 .../resourcemanager/slotmanager/DefaultSlotStatusSyncer.java   |  2 +-
 .../slotmanager/DefaultSlotStatusSyncerTest.java   | 10 ++
 2 files changed, 11 insertions(+), 1 deletion(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncer.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncer.java
index 0d687df9ad4..794db8125c2 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncer.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncer.java
@@ -152,7 +152,7 @@ public class DefaultSlotStatusSyncer implements 
SlotStatusSyncer {
 LOG.debug(
 "The slot {} has been removed before. 
Ignore the future.",
 allocationId);
-requestFuture.complete(null);
+returnedFuture.complete(null);
 return null;
 }
 if (acknowledge != null) {
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
index 4c3d2d5a723..ebce5c72b65 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
@@ -119,6 +119,16 @@ class DefaultSlotStatusSyncerTest {
 
assertThat(taskManagerInfo.getAllocatedSlots()).isEmpty());
 }
 
+@Test
+void testAllocationUpdatesIgnoredIfSlotRemoved() throws Exception {
+testSlotAllocation(
+(slotStatusSyncer, taskManagerTracker, instanceID, 
allocationId) -> {
+taskManagerTracker.removeTaskManager(instanceID);
+
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId))
+.isEmpty();
+});
+}
+
 @Test
 void testFreeSlot() {
 final FineGrainedTaskManagerTracker taskManagerTracker =



(flink) 01/02: [hotfix][test] Assert the slot allocation eventually succeed in dedicated tests of DefaultSlotStatusSyncerTest

2024-02-19 Thread guoyangze
This is an automated email from the ASF dual-hosted git repository.

guoyangze pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git

commit e95cb6e73900fbbc2039407be1bd87271b2a950b
Author: Yangze Guo 
AuthorDate: Mon Feb 19 17:58:06 2024 +0800

[hotfix][test] Assert the slot allocation eventually succeed in dedicated 
tests of DefaultSlotStatusSyncerTest

Also deduplicate the code of these tests.
---
 .../slotmanager/DefaultSlotStatusSyncerTest.java   | 178 -
 1 file changed, 69 insertions(+), 109 deletions(-)

diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
index cf60a7e5e5f..4c3d2d5a723 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/DefaultSlotStatusSyncerTest.java
@@ -19,11 +19,11 @@ package 
org.apache.flink.runtime.resourcemanager.slotmanager;
 
 import org.apache.flink.api.common.JobID;
 import org.apache.flink.api.common.time.Time;
-import org.apache.flink.api.java.tuple.Tuple6;
 import org.apache.flink.runtime.clusterframework.types.AllocationID;
 import org.apache.flink.runtime.clusterframework.types.ResourceID;
 import org.apache.flink.runtime.clusterframework.types.ResourceProfile;
 import org.apache.flink.runtime.clusterframework.types.SlotID;
+import org.apache.flink.runtime.instance.InstanceID;
 import org.apache.flink.runtime.messages.Acknowledge;
 import org.apache.flink.runtime.resourcemanager.ResourceManagerId;
 import 
org.apache.flink.runtime.resourcemanager.registration.TaskExecutorConnection;
@@ -35,15 +35,18 @@ import 
org.apache.flink.runtime.taskexecutor.TestingTaskExecutorGatewayBuilder;
 import org.apache.flink.testutils.TestingUtils;
 import org.apache.flink.testutils.executor.TestExecutorExtension;
 import org.apache.flink.util.concurrent.FutureUtils;
+import org.apache.flink.util.function.QuadConsumer;
 
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.extension.RegisterExtension;
 
 import java.util.Arrays;
 import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeoutException;
 
+import static org.apache.flink.core.testutils.FlinkAssertions.assertThatFuture;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThatThrownBy;
 
@@ -60,118 +63,18 @@ class DefaultSlotStatusSyncerTest {
 TestingUtils.defaultExecutorExtension();
 
 @Test
-void testAllocateSlot() throws Exception {
-final FineGrainedTaskManagerTracker taskManagerTracker =
-new FineGrainedTaskManagerTracker();
-final CompletableFuture<
-Tuple6<
-SlotID,
-JobID,
-AllocationID,
-ResourceProfile,
-String,
-ResourceManagerId>>
-requestFuture = new CompletableFuture<>();
-final CompletableFuture responseFuture = new 
CompletableFuture<>();
-final TestingTaskExecutorGateway taskExecutorGateway =
-new TestingTaskExecutorGatewayBuilder()
-.setRequestSlotFunction(
-tuple6 -> {
-requestFuture.complete(tuple6);
-return responseFuture;
-})
-.createTestingTaskExecutorGateway();
-final TaskExecutorConnection taskExecutorConnection =
-new TaskExecutorConnection(ResourceID.generate(), 
taskExecutorGateway);
-taskManagerTracker.addTaskManager(
-taskExecutorConnection, ResourceProfile.ANY, 
ResourceProfile.ANY);
-final ResourceTracker resourceTracker = new DefaultResourceTracker();
-final JobID jobId = new JobID();
-final SlotStatusSyncer slotStatusSyncer =
-new DefaultSlotStatusSyncer(TASK_MANAGER_REQUEST_TIMEOUT);
-slotStatusSyncer.initialize(
-taskManagerTracker,
-resourceTracker,
-ResourceManagerId.generate(),
-EXECUTOR_RESOURCE.getExecutor());
-
-final CompletableFuture allocatedFuture =
-slotStatusSyncer.allocateSlot(
-taskExecutorConnection.getInstanceID(),
-jobId,
-"address",
-ResourceProfile.ANY);
-final AllocationID

(flink) branch release-1.18 updated (a9ce499e678 -> 21cfe998f4f)

2024-02-19 Thread guoyangze
This is an automated email from the ASF dual-hosted git repository.

guoyangze pushed a change to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


from a9ce499e678 [FLINK-22765][test] Hardens 
ExceptionUtilsITCase#testIsMetaspaceOutOfMemoryError
 new e95cb6e7390 [hotfix][test] Assert the slot allocation eventually 
succeed in dedicated tests of DefaultSlotStatusSyncerTest
 new 21cfe998f4f [FLINK-34434][slotmanager] Complete the returnedFuture 
when slot removed before allocation success

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../slotmanager/DefaultSlotStatusSyncer.java   |   2 +-
 .../slotmanager/DefaultSlotStatusSyncerTest.java   | 188 +
 2 files changed, 80 insertions(+), 110 deletions(-)



(flink) branch master updated (e2e3de2d48e -> 1a494bc1f04)

2024-02-19 Thread guoyangze
This is an automated email from the ASF dual-hosted git repository.

guoyangze pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from e2e3de2d48e [FLINK-34336][test] Fix the bug that AutoRescalingITCase 
may hang sometimes
 add 15af3e49ca4 [hotfix][test] Assert the slot allocation eventually 
succeed in dedicated tests of DefaultSlotStatusSyncerTest
 add 1a494bc1f04 [FLINK-34434][slotmanager] Complete the returnedFuture 
when slot removed before allocation success

No new revisions were added by this update.

Summary of changes:
 .../slotmanager/DefaultSlotStatusSyncer.java   |   2 +-
 .../slotmanager/DefaultSlotStatusSyncerTest.java   | 188 +
 2 files changed, 80 insertions(+), 110 deletions(-)



(flink) branch master updated: [FLINK-34336][test] Fix the bug that AutoRescalingITCase may hang sometimes

2024-02-19 Thread fanrui
This is an automated email from the ASF dual-hosted git repository.

fanrui pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new e2e3de2d48e [FLINK-34336][test] Fix the bug that AutoRescalingITCase 
may hang sometimes
e2e3de2d48e is described below

commit e2e3de2d48e3f02b746bdbdcb4da7b0477986a11
Author: Rui Fan <1996fan...@gmail.com>
AuthorDate: Fri Feb 2 13:08:01 2024 +0800

[FLINK-34336][test] Fix the bug that AutoRescalingITCase may hang sometimes
---
 .../org/apache/flink/test/checkpointing/AutoRescalingITCase.java | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git 
a/flink-tests/src/test/java/org/apache/flink/test/checkpointing/AutoRescalingITCase.java
 
b/flink-tests/src/test/java/org/apache/flink/test/checkpointing/AutoRescalingITCase.java
index ba13faca294..404e129eb4e 100644
--- 
a/flink-tests/src/test/java/org/apache/flink/test/checkpointing/AutoRescalingITCase.java
+++ 
b/flink-tests/src/test/java/org/apache/flink/test/checkpointing/AutoRescalingITCase.java
@@ -351,7 +351,7 @@ public class AutoRescalingITCase extends TestLogger {
 
 restClusterClient.updateJobResourceRequirements(jobID, 
builder.build()).join();
 
-waitForRunningTasks(restClusterClient, jobID, parallelism2);
+waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);
 waitForAvailableSlots(restClusterClient, totalSlots - 
parallelism2);
 
 StateSourceBase.canFinishLatch.countDown();
@@ -441,7 +441,8 @@ public class AutoRescalingITCase extends TestLogger {
 
 restClusterClient.updateJobResourceRequirements(jobID, 
builder.build()).join();
 
-waitForRunningTasks(restClusterClient, jobID, parallelism2);
+// Source is parallelism, the flatMapper & Sink is parallelism2
+waitForRunningTasks(restClusterClient, jobID, parallelism + 
parallelism2);
 waitForAvailableSlots(restClusterClient, totalSlots - 
parallelism2);
 
 SubtaskIndexSource.SOURCE_LATCH.trigger();



(flink) branch master updated: [FLINK-34160][table] Migration of FlinkCalcMergeRule to java

2024-02-19 Thread snuyanzin
This is an automated email from the ASF dual-hosted git repository.

snuyanzin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new c2eac7ec85b [FLINK-34160][table] Migration of FlinkCalcMergeRule to 
java
c2eac7ec85b is described below

commit c2eac7ec85bef93fe2b61c028984e704c5a9d126
Author: Sergey Nuyanzin 
AuthorDate: Mon Feb 19 23:30:28 2024 +0100

[FLINK-34160][table] Migration of FlinkCalcMergeRule to java
---
 .../plan/rules/logical/FlinkCalcMergeRule.java | 117 +
 .../plan/rules/logical/FlinkCalcMergeRule.scala|  83 ---
 .../PushFilterInCalcIntoTableSourceRuleTest.java   |   2 +-
 3 files changed, 118 insertions(+), 84 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkCalcMergeRule.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkCalcMergeRule.java
new file mode 100644
index 000..f82a1bcf188
--- /dev/null
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkCalcMergeRule.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.plan.rules.logical;
+
+import 
org.apache.flink.table.planner.plan.nodes.physical.stream.StreamPhysicalCalc;
+import org.apache.flink.table.planner.plan.utils.FlinkRelUtil;
+
+import org.apache.calcite.plan.RelOptRuleCall;
+import org.apache.calcite.plan.RelRule;
+import org.apache.calcite.rel.core.Calc;
+import org.apache.calcite.rel.core.RelFactories;
+import org.apache.calcite.rex.RexOver;
+import org.apache.calcite.rex.RexProgram;
+import org.immutables.value.Value;
+
+/**
+ * This rule is copied from Calcite's 
[[org.apache.calcite.rel.rules.CalcMergeRule]].
+ *
+ * Modification: - Condition in the merged program will be simplified if it 
exists. - If the two
+ * [[Calc]] can merge into one, each non-deterministic [[RexNode]] of bottom 
[[Calc]] should appear
+ * at most once in the project list and filter list of top [[Calc]].
+ */
+
+/**
+ * Planner rule that merges a [[Calc]] onto a [[Calc]].
+ *
+ * The resulting [[Calc]] has the same project list as the upper [[Calc]], 
but expressed in terms
+ * of the lower [[Calc]]'s inputs.
+ */
+@Value.Enclosing
+public class FlinkCalcMergeRule extends 
RelRule {
+
+public static final FlinkCalcMergeRule INSTANCE = 
FlinkCalcMergeRuleConfig.DEFAULT.toRule();
+public static final FlinkCalcMergeRule STREAM_PHYSICAL_INSTANCE =
+FlinkCalcMergeRuleConfig.STREAM_PHYSICAL.toRule();
+
+protected FlinkCalcMergeRule(FlinkCalcMergeRuleConfig config) {
+super(config);
+}
+
+public boolean matches(RelOptRuleCall call) {
+Calc topCalc = call.rel(0);
+Calc bottomCalc = call.rel(1);
+
+// Don't merge a calc which contains windowed aggregates onto a
+// calc. That would effectively be pushing a windowed aggregate down
+// through a filter.
+RexProgram topProgram = topCalc.getProgram();
+if (RexOver.containsOver(topProgram)) {
+return false;
+}
+
+return FlinkRelUtil.isMergeable(topCalc, bottomCalc);
+}
+
+public void onMatch(RelOptRuleCall call) {
+Calc topCalc = call.rel(0);
+Calc bottomCalc = call.rel(1);
+
+Calc newCalc = FlinkRelUtil.merge(topCalc, bottomCalc);
+if (newCalc.getDigest() == bottomCalc.getDigest()) {
+// newCalc is equivalent to bottomCalc,
+// which means that topCalc
+// must be trivial. Take it out of the game.
+call.getPlanner().prune(topCalc);
+}
+call.transformTo(newCalc);
+}
+
+/** Rule configuration. */
+@Value.Immutable(singleton = false)
+public interface FlinkCalcMergeRuleConfig extends RelRule.Config {
+FlinkCalcMergeRule.FlinkCalcMergeRuleConfig DEFAULT =
+ImmutableFlinkCalcMergeRule.FlinkCalcMergeRuleConfig.builder()
+.build()
+   

(flink) branch master updated: [FLINK-27891][Table] Add ARRAY_APPEND and ARRAY_PREPEND functions

2024-02-19 Thread snuyanzin
This is an automated email from the ASF dual-hosted git repository.

snuyanzin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new e644beac8e5 [FLINK-27891][Table] Add ARRAY_APPEND and ARRAY_PREPEND 
functions
e644beac8e5 is described below

commit e644beac8e5ffe71d9b6185c06ed31050e7c5268
Author: Sergey Nuyanzin 
AuthorDate: Mon Feb 19 23:06:21 2024 +0100

[FLINK-27891][Table] Add ARRAY_APPEND and ARRAY_PREPEND functions
---
 docs/data/sql_functions.yml|  6 ++
 docs/data/sql_functions_zh.yml |  6 ++
 flink-python/pyflink/table/expression.py   | 18 +
 .../flink/table/api/internal/BaseExpressions.java  | 26 +++
 .../functions/BuiltInFunctionDefinitions.java  | 29 
 .../strategies/ArrayAppendPrependTypeStrategy.java | 55 +++
 .../strategies/SpecificTypeStrategies.java |  4 ++
 .../ArrayAppendPrependTypeStrategyTest.java| 56 +++
 .../functions/CollectionFunctionsITCase.java   | 80 ++
 .../functions/scalar/ArrayAppendFunction.java  | 56 +++
 .../functions/scalar/ArrayPrependFunction.java | 56 +++
 11 files changed, 392 insertions(+)

diff --git a/docs/data/sql_functions.yml b/docs/data/sql_functions.yml
index 7ebf715e279..7f06199ecb0 100644
--- a/docs/data/sql_functions.yml
+++ b/docs/data/sql_functions.yml
@@ -628,6 +628,9 @@ collection:
   - sql: map ‘[’ value ‘]’
 table: MAP.at(ANY)
 description: Returns the value specified by key value in map.
+  - sql: ARRAY_APPEND(array, element)
+table: array.arrayAppend(element)
+description: Appends an element to the end of the array and returns the 
result. If the array itself is null, the function will return null. If an 
element to add is null, the null element will be added to the end of the array.
   - sql: ARRAY_CONTAINS(haystack, needle)
 table: haystack.arrayContains(needle)
 description: Returns whether the given element exists in an array. 
Checking for null elements in the array is supported. If the array itself is 
null, the function will return null. The given element is cast implicitly to 
the array's element type if necessary.
@@ -637,6 +640,9 @@ collection:
   - sql: ARRAY_POSITION(haystack, needle)
 table: haystack.arrayPosition(needle)
 description: Returns the position of the first occurrence of element in 
the given array as int. Returns 0 if the given value could not be found in the 
array. Returns null if either of the arguments are null. And this is not zero 
based, but 1-based index. The first element in the array has index 1.
+  - sql: ARRAY_PREPEND(array, element)
+table: array.arrayPrepend(element)
+description: Appends an element to the beginning of the array and returns 
the result. If the array itself is null, the function will return null. If an 
element to add is null, the null element will be added to the beginning of the 
array.
   - sql: ARRAY_REMOVE(haystack, needle)
 table: haystack.arrayRemove(needle)
 description: Removes all elements that equal to element from array. If the 
array itself is null, the function will return null. Keeps ordering of elements.
diff --git a/docs/data/sql_functions_zh.yml b/docs/data/sql_functions_zh.yml
index b0ba79f12b3..b67ac949835 100644
--- a/docs/data/sql_functions_zh.yml
+++ b/docs/data/sql_functions_zh.yml
@@ -763,6 +763,9 @@ collection:
   - sql: map ‘[’ value ‘]’
 table: MAP.at(ANY)
 description: 返回 map 中指定 key 对应的值。
+  - sql: ARRAY_APPEND(array, element)
+table: array.arrayAppend(element)
+description: Appends an element to the end of the array and returns the 
result. If the array itself is null, the function will return null. If an 
element to add is null, the null element will be added to the end of the array.
   - sql: ARRAY_CONTAINS(haystack, needle)
 table: haystack.arrayContains(needle)
 description: 返回是否数组 haystack 中包含指定元素 needle。支持检查数组中是否存在 null。 
如果数组本身是null,函数会返回 null。如果需要,指定元素会隐式转换为数组的元素类型。
@@ -772,6 +775,9 @@ collection:
   - sql: ARRAY_POSITION(haystack, needle)
 table: haystack.arrayPosition(needle)
 description: 返回数组中第一次出现 needle 元素的位置,返回类型为 int。如果数组中不存在该元素则返回 
0。如果两个参数中任何一个参数为 null,则返回 null。序号不是从 0 开始,而是从 1 开始,第一个元素的序号为 1。
+  - sql: ARRAY_PREPEND(array, element)
+table: array.arrayPrepend(element)
+description: Appends an element to the beginning of the array and returns 
the result. If the array itself is null, the function will return null. If an 
element to add is null, the null element will be added to the beginning of the 
array.
   - sql: ARRAY_REMOVE(haystack, needle)
 table: haystack.arrayRemove(needle)
 description: 删除数组中所有和元素 needle 相等的元素。如果数组是 null,则返回 null。函数会保留数组中元素的顺序。
diff --git a/flink-python/pyflink/table/expression.py 
b/flink-python/pyflink/table/expression.py
index 84772c173

svn commit: r67433 - /dev/flink/flink-connector-mongodb-1.1.0-rc1/

2024-02-19 Thread leonard
Author: leonard
Date: Mon Feb 19 16:07:00 2024
New Revision: 67433

Log:
[mongodb] Deleted mongodb 1.1.0 RC1

Removed:
dev/flink/flink-connector-mongodb-1.1.0-rc1/



(flink-web) branch asf-site updated (c3653a465 -> f13c0e0ef)

2024-02-19 Thread leonard
This is an automated email from the ASF dual-hosted git repository.

leonard pushed a change to branch asf-site
in repository https://gitbox.apache.org/repos/asf/flink-web.git


from c3653a465 Rebuild website
 new d502bb2bb Add MongoDB connector v1.1.0 release
 new f13c0e0ef Rebuild website

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../2014/08/26/apache-flink-0.6-available/index.html|  2 +-
 .../2014/09/26/apache-flink-0.6.1-available/index.html  |  2 +-
 content/2014/10/03/upcoming-events/index.html   |  2 +-
 .../2014/11/04/apache-flink-0.7.0-available/index.html  |  2 +-
 .../2014/11/18/hadoop-compatibility-in-flink/index.html |  2 +-
 .../06/december-2014-in-the-flink-community/index.html  |  2 +-
 .../2015/01/21/apache-flink-0.8.0-available/index.html  |  2 +-
 .../04/january-2015-in-the-flink-community/index.html   |  2 +-
 .../2015/02/09/introducing-flink-streaming/index.html   |  2 +-
 .../02/february-2015-in-the-flink-community/index.html  |  2 +-
 .../peeking-into-apache-flinks-engine-room/index.html   |  2 +-
 .../04/07/march-2015-in-the-flink-community/index.html  |  2 +-
 .../index.html  |  2 +-
 .../2015/05/11/juggling-with-bits-and-bytes/index.html  |  2 +-
 .../05/14/april-2015-in-the-flink-community/index.html  |  2 +-
 .../2015/06/24/announcing-apache-flink-0.9.0/index.html |  2 +-
 .../index.html  |  2 +-
 .../2015/09/01/apache-flink-0.9.1-available/index.html  |  2 +-
 .../2015/09/03/announcing-flink-forward-2015/index.html |  2 +-
 .../index.html  |  2 +-
 .../11/16/announcing-apache-flink-0.10.0/index.html |  2 +-
 content/2015/11/27/flink-0.10.1-released/index.html |  2 +-
 .../index.html  |  2 +-
 .../index.html  |  2 +-
 .../index.html  |  2 +-
 content/2016/02/11/flink-0.10.2-released/index.html |  2 +-
 .../2016/03/08/announcing-apache-flink-1.0.0/index.html |  2 +-
 content/2016/04/06/flink-1.0.1-released/index.html  |  2 +-
 .../index.html  |  2 +-
 .../index.html  |  2 +-
 content/2016/04/22/flink-1.0.2-released/index.html  |  2 +-
 content/2016/05/11/flink-1.0.3-released/index.html  |  2 +-
 .../index.html  |  2 +-
 .../2016/08/04/announcing-apache-flink-1.1.0/index.html |  2 +-
 content/2016/08/04/flink-1.1.1-released/index.html  |  2 +-
 .../index.html  |  2 +-
 .../2016/09/05/apache-flink-1.1.2-released/index.html   |  2 +-
 .../2016/10/12/apache-flink-1.1.3-released/index.html   |  2 +-
 .../19/apache-flink-in-2016-year-in-review/index.html   |  2 +-
 .../2016/12/21/apache-flink-1.1.4-released/index.html   |  2 +-
 .../2017/02/06/announcing-apache-flink-1.2.0/index.html |  2 +-
 .../2017/03/23/apache-flink-1.1.5-released/index.html   |  2 +-
 .../index.html  |  2 +-
 .../30/continuous-queries-on-dynamic-tables/index.html  |  2 +-
 .../2017/04/26/apache-flink-1.2.1-released/index.html   |  2 +-
 .../index.html  |  2 +-
 .../apache-flink-1.3.0-release-announcement/index.html  |  2 +-
 .../2017/06/23/apache-flink-1.3.1-released/index.html   |  2 +-
 .../index.html  |  2 +-
 .../2017/08/05/apache-flink-1.3.2-released/index.html   |  2 +-
 .../index.html  |  2 +-
 .../apache-flink-1.4.0-release-announcement/index.html  |  2 +-
 .../21/apache-flink-in-2017-year-in-review/index.html   |  2 +-
 .../index.html  |  2 +-
 .../2018/02/15/apache-flink-1.4.1-released/index.html   |  2 +-
 .../index.html  |  2 +-
 .../2018/03/08/apache-flink-1.4.2-released/index.html   |  2 +-
 .../2018/03/15/apache-flink-1.3.3-released/index.html   |  2 +-
 .../apache-flink-1.5.0-release-announcement/index.html  |  2 +-
 .../2018/07/12/apache-flink-1.5.1-released/index.html   |  2 +-
 .../2018/07/31/apache-flink-1.5.2-released/index.html   |  2 +-
 .../apache-flink-1.6.0-release-announcement/index.html  |  2 +-
 .../2018/08/21/apache-flink-1.5.3-released/index.html   |  2 +-
 .../2018/09/20/apache-flink-1.5.4-released/index.html   |  2 +-
 .../2018/09/20/apache-flink-1.6.1-released/index.html   |  2 +-
 .../2018/10/29/apache-flink-1.5.5-released/index.html   |  2 +-
 .../2018/10/29/apache-flink-1.6.2-released/index.html   |  2 +-
 .../apache-flink-1.7.0-release-announcement/index.html  |  2 +-
 .../2018/12/21/apache

(flink-web) 01/02: Add MongoDB connector v1.1.0 release

2024-02-19 Thread leonard
This is an automated email from the ASF dual-hosted git repository.

leonard pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/flink-web.git

commit d502bb2bbc8100c2015e477f1e990b073a8a52de
Author: Leonard Xu 
AuthorDate: Mon Jan 29 20:30:25 2024 +0800

Add MongoDB connector v1.1.0 release
---
 docs/data/flink_connectors.yml | 10 +-
 docs/data/release_archive.yml  |  5 +
 2 files changed, 10 insertions(+), 5 deletions(-)

diff --git a/docs/data/flink_connectors.yml b/docs/data/flink_connectors.yml
index 4a44865bb..99f30628f 100644
--- a/docs/data/flink_connectors.yml
+++ b/docs/data/flink_connectors.yml
@@ -93,8 +93,8 @@ rabbitmq:
   compatibility: ["1.16.x", "1.17.x"]
 
 mongodb:
-  name: "Apache Flink MongoDB Connector 1.0.2"
-  source_release_url: 
"https://www.apache.org/dyn/closer.lua/flink/flink-connector-mongodb-1.0.2/flink-connector-mongodb-1.0.2-src.tgz";
-  source_release_asc_url: 
"https://downloads.apache.org/flink/flink-connector-mongodb-1.0.2/flink-connector-mongodb-1.0.2-src.tgz.asc";
-  source_release_sha512_url: 
"https://downloads.apache.org/flink/flink-connector-mongodb-1.0.2/flink-connector-mongodb-1.0.2-src.tgz.sha512";
-  compatibility: ["1.16.x", "1.17.x"]
+  name: "Apache Flink MongoDB Connector 1.1.0"
+  source_release_url: 
"https://www.apache.org/dyn/closer.lua/flink/flink-connector-mongodb-1.1.0/flink-connector-mongodb-1.1.0-src.tgz";
+  source_release_asc_url: 
"https://downloads.apache.org/flink/flink-connector-mongodb-1.1.0/flink-connector-mongodb-1.1.0-src.tgz.asc";
+  source_release_sha512_url: 
"https://downloads.apache.org/flink/flink-connector-mongodb-1.1.0/flink-connector-mongodb-1.1.0-src.tgz.sha512";
+  compatibility: ["1.17.x", "1.18.x"]
diff --git a/docs/data/release_archive.yml b/docs/data/release_archive.yml
index 9ea3b7f1f..611b89b01 100644
--- a/docs/data/release_archive.yml
+++ b/docs/data/release_archive.yml
@@ -569,6 +569,11 @@ release_archive:
   version: 3.1.0
   release_date: 2024-02-07
   filename: "kafka"
+- name: "Flink MongoDB Connector"
+  connector: "mongodb"
+  version: 1.1.0
+  release_date: 2024-02-19
+  filename: "mongodb"
 
   flink_shaded:
 -



(flink) branch release-1.18 updated (349e8705135 -> a9ce499e678)

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a change to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


from 349e8705135 [hotfix] Integrate mongodb v1.1 docs (#24332)
 new 943eee6a441 [hotfix][test] Removes duplicate Apache header
 new a9ce499e678 [FLINK-22765][test] Hardens 
ExceptionUtilsITCase#testIsMetaspaceOutOfMemoryError

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../apache/flink/testutils/ClassLoaderUtils.java   | 41 +-
 .../flink/runtime/util/ExceptionUtilsITCase.java   | 87 +++---
 .../org.junit.jupiter.api.extension.Extension  | 18 -
 3 files changed, 99 insertions(+), 47 deletions(-)



(flink) 02/02: [FLINK-22765][test] Hardens ExceptionUtilsITCase#testIsMetaspaceOutOfMemoryError

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git

commit a9ce499e6780dd1fc5c6a19bf0d8b584b2204379
Author: Matthias Pohl 
AuthorDate: Thu Feb 15 15:19:15 2024 +0100

[FLINK-22765][test] Hardens 
ExceptionUtilsITCase#testIsMetaspaceOutOfMemoryError

This test started to fail quite regularly in JDK21 (but rarely also 
appeared with other JDKs). The problem was that the low heap size could have 
caused an OutOfMemoryError to appear when compiling the dummy classes. An OOM 
in the compilation phase results in a different error message being printed to 
stdout that wasn't captured by the test.

The solution is to pre-compile the classes upfront (with the normal heap 
size). The test main method will only load the classes. No compilation is 
necessary.
---
 .../apache/flink/testutils/ClassLoaderUtils.java   | 41 +-
 .../flink/runtime/util/ExceptionUtilsITCase.java   | 87 +++---
 2 files changed, 99 insertions(+), 29 deletions(-)

diff --git 
a/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java 
b/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
index 25f5ea1cef7..7ece6f58046 100644
--- a/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
+++ b/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
@@ -18,6 +18,8 @@
 
 package org.apache.flink.testutils;
 
+import org.apache.flink.util.Preconditions;
+
 import javax.annotation.Nullable;
 import javax.tools.JavaCompiler;
 import javax.tools.ToolProvider;
@@ -40,6 +42,7 @@ import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.UUID;
 
 /** Utilities to create class loaders. */
@@ -142,6 +145,15 @@ public class ClassLoaderUtils {
 return this;
 }
 
+public ClassLoaderBuilder addClass(String className) {
+Preconditions.checkState(
+new File(root, className + ".java").exists(),
+"The class %s was added without any source code being 
present.",
+className);
+
+return addClass(className, null);
+}
+
 public ClassLoaderBuilder addClass(String className, String source) {
 String oldValue = classes.putIfAbsent(className, source);
 
@@ -158,7 +170,7 @@ public class ClassLoaderUtils {
 return this;
 }
 
-public URLClassLoader build() throws IOException {
+public void generateSourcesAndCompile() throws IOException {
 for (Map.Entry classInfo : classes.entrySet()) {
 writeAndCompile(root, createFileName(classInfo.getKey()), 
classInfo.getValue());
 }
@@ -171,10 +183,37 @@ public class ClassLoaderUtils {
 for (Map.Entry resource : resources.entrySet()) {
 writeSourceFile(root, resource.getKey(), resource.getValue());
 }
+}
+
+public URLClassLoader buildWithoutCompilation() throws 
MalformedURLException {
+final int generatedSourceClassesCount =
+Objects.requireNonNull(
+root.listFiles(
+(dir, name) -> {
+if (!name.endsWith(".java")) {
+return false;
+}
+final String derivedClassName =
+name.substring(0, 
name.lastIndexOf('.'));
+return 
classes.containsKey(derivedClassName);
+}))
+.length;
+Preconditions.checkState(
+generatedSourceClassesCount == classes.size(),
+"The generated Java sources in %s (%s) do not match the 
classes in this %s (%s).",
+root.getAbsolutePath(),
+generatedSourceClassesCount,
+ClassLoaderBuilder.class.getSimpleName(),
+classes.size());
 
 return createClassLoader(root, parent);
 }
 
+public URLClassLoader build() throws IOException {
+generateSourcesAndCompile();
+return buildWithoutCompilation();
+}
+
 private String createFileName(String className) {
 return className + ".java";
 }
diff --git 
a/flink-tests/src/test/java/org/apache/flink/runtime/util/ExceptionUtilsITCase.java
 
b/flink-tests/src/test/java/org/apache/flink/runtime/util/ExceptionUtilsITCase.java
index ef151d100b4..e383f7667be 100644
--- 
a/flink-tests/src/test/java/org/apache/flink

(flink) 01/02: [hotfix][test] Removes duplicate Apache header

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 943eee6a441077fdfce4780c7132642df6784966
Author: Matthias Pohl 
AuthorDate: Thu Feb 15 14:20:31 2024 +0100

[hotfix][test] Removes duplicate Apache header
---
 .../services/org.junit.jupiter.api.extension.Extension | 18 --
 1 file changed, 18 deletions(-)

diff --git 
a/flink-tests/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension
 
b/flink-tests/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension
index b1cb0eb2363..0b74fd4603c 100644
--- 
a/flink-tests/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension
+++ 
b/flink-tests/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension
@@ -1,21 +1,3 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.



(flink) 02/02: [FLINK-22765][test] Hardens ExceptionUtilsITCase#testIsMetaspaceOutOfMemoryError in JDK21

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit ec73bc5cf5cd1e37354e0dfe19ed778019bec638
Author: Matthias Pohl 
AuthorDate: Thu Feb 15 15:19:15 2024 +0100

[FLINK-22765][test] Hardens 
ExceptionUtilsITCase#testIsMetaspaceOutOfMemoryError in JDK21

This test started to fail quite regularly in JDK21. The problem was that 
the low heap size could have caused an OutOfMemoryError to appear when 
compiling the dummy classes. An OOM in the compilation phase results in a 
different error message being printed to stdout that wasn't captured by the 
test.

The solution is to pre-compile the classes upfront (with the normal heap 
size). The test main method will only load the classes. No compilation is 
necessary.
---
 .../apache/flink/testutils/ClassLoaderUtils.java   | 41 +-
 .../flink/runtime/util/ExceptionUtilsITCase.java   | 87 +++---
 2 files changed, 99 insertions(+), 29 deletions(-)

diff --git 
a/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java 
b/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
index 25f5ea1cef7..7ece6f58046 100644
--- a/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
+++ b/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
@@ -18,6 +18,8 @@
 
 package org.apache.flink.testutils;
 
+import org.apache.flink.util.Preconditions;
+
 import javax.annotation.Nullable;
 import javax.tools.JavaCompiler;
 import javax.tools.ToolProvider;
@@ -40,6 +42,7 @@ import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.UUID;
 
 /** Utilities to create class loaders. */
@@ -142,6 +145,15 @@ public class ClassLoaderUtils {
 return this;
 }
 
+public ClassLoaderBuilder addClass(String className) {
+Preconditions.checkState(
+new File(root, className + ".java").exists(),
+"The class %s was added without any source code being 
present.",
+className);
+
+return addClass(className, null);
+}
+
 public ClassLoaderBuilder addClass(String className, String source) {
 String oldValue = classes.putIfAbsent(className, source);
 
@@ -158,7 +170,7 @@ public class ClassLoaderUtils {
 return this;
 }
 
-public URLClassLoader build() throws IOException {
+public void generateSourcesAndCompile() throws IOException {
 for (Map.Entry classInfo : classes.entrySet()) {
 writeAndCompile(root, createFileName(classInfo.getKey()), 
classInfo.getValue());
 }
@@ -171,10 +183,37 @@ public class ClassLoaderUtils {
 for (Map.Entry resource : resources.entrySet()) {
 writeSourceFile(root, resource.getKey(), resource.getValue());
 }
+}
+
+public URLClassLoader buildWithoutCompilation() throws 
MalformedURLException {
+final int generatedSourceClassesCount =
+Objects.requireNonNull(
+root.listFiles(
+(dir, name) -> {
+if (!name.endsWith(".java")) {
+return false;
+}
+final String derivedClassName =
+name.substring(0, 
name.lastIndexOf('.'));
+return 
classes.containsKey(derivedClassName);
+}))
+.length;
+Preconditions.checkState(
+generatedSourceClassesCount == classes.size(),
+"The generated Java sources in %s (%s) do not match the 
classes in this %s (%s).",
+root.getAbsolutePath(),
+generatedSourceClassesCount,
+ClassLoaderBuilder.class.getSimpleName(),
+classes.size());
 
 return createClassLoader(root, parent);
 }
 
+public URLClassLoader build() throws IOException {
+generateSourcesAndCompile();
+return buildWithoutCompilation();
+}
+
 private String createFileName(String className) {
 return className + ".java";
 }
diff --git 
a/flink-tests/src/test/java/org/apache/flink/runtime/util/ExceptionUtilsITCase.java
 
b/flink-tests/src/test/java/org/apache/flink/runtime/util/ExceptionUtilsITCase.java
index ef151d100b4..e383f7667be 100644
--- 
a/flink-tests/src/test/java/org/apache/flink/runtime/util/ExceptionUtilsITCase

(flink) 01/02: [hotfix][test] Removes duplicate Apache header

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit f903ce24a93d72b64e91f132c7e935b67b4967bf
Author: Matthias Pohl 
AuthorDate: Thu Feb 15 14:20:31 2024 +0100

[hotfix][test] Removes duplicate Apache header
---
 .../services/org.junit.jupiter.api.extension.Extension | 18 --
 1 file changed, 18 deletions(-)

diff --git 
a/flink-tests/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension
 
b/flink-tests/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension
index b1cb0eb2363..0b74fd4603c 100644
--- 
a/flink-tests/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension
+++ 
b/flink-tests/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension
@@ -1,21 +1,3 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.



(flink) branch release-1.19 updated (c6c1f7984d1 -> ec73bc5cf5c)

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a change to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git


from c6c1f7984d1 [FLINK-34418][ci] Mounts /mnt folder to /root
 new f903ce24a93 [hotfix][test] Removes duplicate Apache header
 new ec73bc5cf5c [FLINK-22765][test] Hardens 
ExceptionUtilsITCase#testIsMetaspaceOutOfMemoryError in JDK21

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../apache/flink/testutils/ClassLoaderUtils.java   | 41 +-
 .../flink/runtime/util/ExceptionUtilsITCase.java   | 87 +++---
 .../org.junit.jupiter.api.extension.Extension  | 18 -
 3 files changed, 99 insertions(+), 47 deletions(-)



(flink) branch master updated (6c9ac5c6ae0 -> 4645880039a)

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 6c9ac5c6ae0 [FLINK-33495][FLINK-33496] Add DISTRIBUTED BY clause for 
CREATE TABLE
 new 7e260aa781c [hotfix][test] Removes duplicate Apache header
 new f43972aa839 [hotfix][test] Replaces String#replaceAll with 
String#replace in ClassLoaderUtils to avoid regex compilation
 new ac910542ccb [FLINK-22765][test] Hardens 
ExceptionUtilsITCase#testIsMetaspaceOutOfMemoryError in JDK21
 new 4645880039a [hotfix][test] Migrates ExceptionUtilsITCase to 
JUnit5/AssertJ

The 4 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../apache/flink/testutils/ClassLoaderUtils.java   |  43 +++-
 .../flink/runtime/util/ExceptionUtilsITCase.java   | 111 +
 .../org.junit.jupiter.api.extension.Extension  |  18 
 3 files changed, 110 insertions(+), 62 deletions(-)



(flink) 04/04: [hotfix][test] Migrates ExceptionUtilsITCase to JUnit5/AssertJ

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 4645880039a3a9ef6fe7ee83999cfe5e56d54f94
Author: Matthias Pohl 
AuthorDate: Thu Feb 15 16:59:05 2024 +0100

[hotfix][test] Migrates ExceptionUtilsITCase to JUnit5/AssertJ
---
 .../flink/runtime/util/ExceptionUtilsITCase.java   | 50 ++
 1 file changed, 23 insertions(+), 27 deletions(-)

diff --git 
a/flink-tests/src/test/java/org/apache/flink/runtime/util/ExceptionUtilsITCase.java
 
b/flink-tests/src/test/java/org/apache/flink/runtime/util/ExceptionUtilsITCase.java
index e383f7667be..4dde1f61a3d 100644
--- 
a/flink-tests/src/test/java/org/apache/flink/runtime/util/ExceptionUtilsITCase.java
+++ 
b/flink-tests/src/test/java/org/apache/flink/runtime/util/ExceptionUtilsITCase.java
@@ -23,11 +23,9 @@ import 
org.apache.flink.test.util.TestProcessBuilder.TestProcess;
 import org.apache.flink.testutils.ClassLoaderUtils;
 import org.apache.flink.testutils.ClassLoaderUtils.ClassLoaderBuilder;
 import org.apache.flink.util.ExceptionUtils;
-import org.apache.flink.util.TestLogger;
 
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
 
 import java.io.File;
 import java.io.IOException;
@@ -37,38 +35,34 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collection;
 
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.text.IsEmptyString.isEmptyString;
-import static org.junit.Assert.assertThat;
+import static org.assertj.core.api.Assertions.assertThat;
 
 /** Tests for {@link ExceptionUtils} which require to spawn JVM process and 
set JVM memory args. */
-public class ExceptionUtilsITCase extends TestLogger {
+class ExceptionUtilsITCase {
 private static final int DIRECT_MEMORY_SIZE = 10 * 1024; // 10Kb
 private static final int DIRECT_MEMORY_ALLOCATION_PAGE_SIZE = 1024; // 1Kb
 private static final int DIRECT_MEMORY_PAGE_NUMBER =
 DIRECT_MEMORY_SIZE / DIRECT_MEMORY_ALLOCATION_PAGE_SIZE;
 private static final long INITIAL_BIG_METASPACE_SIZE = 128 * (1 << 20); // 
128Mb
 
-@ClassRule public static final TemporaryFolder TEMPORARY_FOLDER = new 
TemporaryFolder();
-
 @Test
-public void testIsDirectOutOfMemoryError() throws IOException, 
InterruptedException {
+void testIsDirectOutOfMemoryError() throws IOException, 
InterruptedException {
 String className = DummyDirectAllocatingProgram.class.getName();
 RunResult result = run(className, DIRECT_MEMORY_SIZE, -1);
-assertThat(result.getErrorOut() + "|" + result.getStandardOut(), 
is("|"));
+assertThat(result.getStandardOut()).isEmpty();
+assertThat(result.getErrorOut()).isEmpty();
 }
 
 @Test
-public void testIsMetaspaceOutOfMemoryError() throws IOException, 
InterruptedException {
-String className = DummyClassLoadingProgram.class.getName();
-final File compiledClassesFolder = TEMPORARY_FOLDER.getRoot();
+void testIsMetaspaceOutOfMemoryError(@TempDir File 
temporaryFolderForCompiledClasses)
+throws IOException, InterruptedException {
 final int classCount = 10;
 
 // compile the classes first
 final String sourcePattern =
 "public class %s { @Override public String toString() { return 
\"dummy\"; } }";
 final ClassLoaderBuilder classLoaderBuilder =
-ClassLoaderUtils.withRoot(compiledClassesFolder);
+ClassLoaderUtils.withRoot(temporaryFolderForCompiledClasses);
 for (int i = 0; i < classCount; i++) {
 final String dummyClassName = "DummyClass" + i;
 final String source = String.format(sourcePattern, dummyClassName);
@@ -77,33 +71,35 @@ public class ExceptionUtilsITCase extends TestLogger {
 classLoaderBuilder.generateSourcesAndCompile();
 
 // load only one class and record required Metaspace
-RunResult normalOut =
+final String className = DummyClassLoadingProgram.class.getName();
+final RunResult initialRun =
 run(
 className,
 -1,
 INITIAL_BIG_METASPACE_SIZE,
 1,
-compiledClassesFolder.getAbsolutePath());
+temporaryFolderForCompiledClasses.getAbsolutePath());
 
 // multiply the Metaspace size to stabilize the test - relying solely 
on the Metaspace size
 // of the initial run might cause OOMs to appear in the main thread 
(due to JVM-specific
 // artifacts being loaded)
-long okMetaspace = 3 * Long.parseLong(normalOut.getStandardOut());
-assertThat("No error is expected here.", normalOut.getErrorOut(), 
is(""));
+long okMetaspace =

(flink) 02/04: [hotfix][test] Replaces String#replaceAll with String#replace in ClassLoaderUtils to avoid regex compilation

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit f43972aa839aa955b7aec12a6dbfd5d9d32699ea
Author: Matthias Pohl 
AuthorDate: Mon Feb 19 09:58:33 2024 +0100

[hotfix][test] Replaces String#replaceAll with String#replace in 
ClassLoaderUtils to avoid regex compilation
---
 .../src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git 
a/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java 
b/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
index 25f5ea1cef7..2a3f320e107 100644
--- a/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
+++ b/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
@@ -47,7 +47,7 @@ public class ClassLoaderUtils {
 
 public static URLClassLoader compileAndLoadJava(File root, String 
filename, String source)
 throws IOException {
-return withRoot(root).addClass(filename.replaceAll("\\.java", ""), 
source).build();
+return withRoot(root).addClass(filename.replace(".java", ""), 
source).build();
 }
 
 private static URLClassLoader createClassLoader(File root, ClassLoader 
parent)



(flink) 03/04: [FLINK-22765][test] Hardens ExceptionUtilsITCase#testIsMetaspaceOutOfMemoryError in JDK21

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit ac910542ccb78108cb130fe2368e97557afd9cc6
Author: Matthias Pohl 
AuthorDate: Thu Feb 15 15:19:15 2024 +0100

[FLINK-22765][test] Hardens 
ExceptionUtilsITCase#testIsMetaspaceOutOfMemoryError in JDK21

This test started to fail quite regularly in JDK21. The problem was that 
the low heap size could have caused an OutOfMemoryError to appear when 
compiling the dummy classes. An OOM in the compilation phase results in a 
different error message being printed to stdout that wasn't captured by the 
test.

The solution is to pre-compile the classes upfront (with the normal heap 
size). The test main method will only load the classes. No compilation is 
necessary.
---
 .../apache/flink/testutils/ClassLoaderUtils.java   | 41 +-
 .../flink/runtime/util/ExceptionUtilsITCase.java   | 87 +++---
 2 files changed, 99 insertions(+), 29 deletions(-)

diff --git 
a/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java 
b/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
index 2a3f320e107..c4ca8a2b529 100644
--- a/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
+++ b/flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
@@ -18,6 +18,8 @@
 
 package org.apache.flink.testutils;
 
+import org.apache.flink.util.Preconditions;
+
 import javax.annotation.Nullable;
 import javax.tools.JavaCompiler;
 import javax.tools.ToolProvider;
@@ -40,6 +42,7 @@ import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.UUID;
 
 /** Utilities to create class loaders. */
@@ -142,6 +145,15 @@ public class ClassLoaderUtils {
 return this;
 }
 
+public ClassLoaderBuilder addClass(String className) {
+Preconditions.checkState(
+new File(root, className + ".java").exists(),
+"The class %s was added without any source code being 
present.",
+className);
+
+return addClass(className, null);
+}
+
 public ClassLoaderBuilder addClass(String className, String source) {
 String oldValue = classes.putIfAbsent(className, source);
 
@@ -158,7 +170,7 @@ public class ClassLoaderUtils {
 return this;
 }
 
-public URLClassLoader build() throws IOException {
+public void generateSourcesAndCompile() throws IOException {
 for (Map.Entry classInfo : classes.entrySet()) {
 writeAndCompile(root, createFileName(classInfo.getKey()), 
classInfo.getValue());
 }
@@ -171,10 +183,37 @@ public class ClassLoaderUtils {
 for (Map.Entry resource : resources.entrySet()) {
 writeSourceFile(root, resource.getKey(), resource.getValue());
 }
+}
+
+public URLClassLoader buildWithoutCompilation() throws 
MalformedURLException {
+final int generatedSourceClassesCount =
+Objects.requireNonNull(
+root.listFiles(
+(dir, name) -> {
+if (!name.endsWith(".java")) {
+return false;
+}
+final String derivedClassName =
+name.substring(0, 
name.lastIndexOf('.'));
+return 
classes.containsKey(derivedClassName);
+}))
+.length;
+Preconditions.checkState(
+generatedSourceClassesCount == classes.size(),
+"The generated Java sources in %s (%s) do not match the 
classes in this %s (%s).",
+root.getAbsolutePath(),
+generatedSourceClassesCount,
+ClassLoaderBuilder.class.getSimpleName(),
+classes.size());
 
 return createClassLoader(root, parent);
 }
 
+public URLClassLoader build() throws IOException {
+generateSourcesAndCompile();
+return buildWithoutCompilation();
+}
+
 private String createFileName(String className) {
 return className + ".java";
 }
diff --git 
a/flink-tests/src/test/java/org/apache/flink/runtime/util/ExceptionUtilsITCase.java
 
b/flink-tests/src/test/java/org/apache/flink/runtime/util/ExceptionUtilsITCase.java
index ef151d100b4..e383f7667be 100644
--- 
a/flink-tests/src/test/java/org/apache/flink/runtime/util/ExceptionUtilsITCase.java

(flink) 01/04: [hotfix][test] Removes duplicate Apache header

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 7e260aa781c9d13e445eb59e94e9e981f77166a4
Author: Matthias Pohl 
AuthorDate: Thu Feb 15 14:20:31 2024 +0100

[hotfix][test] Removes duplicate Apache header
---
 .../services/org.junit.jupiter.api.extension.Extension | 18 --
 1 file changed, 18 deletions(-)

diff --git 
a/flink-tests/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension
 
b/flink-tests/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension
index b1cb0eb2363..0b74fd4603c 100644
--- 
a/flink-tests/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension
+++ 
b/flink-tests/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension
@@ -1,21 +1,3 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.



(flink-connector-mongodb) 02/02: [hotfix][test][connectors/mongodb] Update MongoWriterITCase to be compatible with updated SinkV2 interfaces

2024-02-19 Thread martijnvisser
This is an automated email from the ASF dual-hosted git repository.

martijnvisser pushed a commit to branch v1.0
in repository https://gitbox.apache.org/repos/asf/flink-connector-mongodb.git

commit e2aa8b332552853f2e59f7f46af3cbe9f7573741
Author: Jiabao Sun 
AuthorDate: Mon Jan 29 09:50:00 2024 +0800

[hotfix][test][connectors/mongodb] Update MongoWriterITCase to be 
compatible with updated SinkV2 interfaces

This closes #22.
---
 .../mongodb/sink/writer/MongoWriterITCase.java | 33 +++---
 1 file changed, 16 insertions(+), 17 deletions(-)

diff --git 
a/flink-connector-mongodb/src/test/java/org/apache/flink/connector/mongodb/sink/writer/MongoWriterITCase.java
 
b/flink-connector-mongodb/src/test/java/org/apache/flink/connector/mongodb/sink/writer/MongoWriterITCase.java
index 84767e8..bd3ca66 100644
--- 
a/flink-connector-mongodb/src/test/java/org/apache/flink/connector/mongodb/sink/writer/MongoWriterITCase.java
+++ 
b/flink-connector-mongodb/src/test/java/org/apache/flink/connector/mongodb/sink/writer/MongoWriterITCase.java
@@ -17,8 +17,9 @@
 
 package org.apache.flink.connector.mongodb.sink.writer;
 
+import org.apache.flink.connector.base.DeliveryGuarantee;
 import org.apache.flink.connector.base.sink.writer.TestSinkInitContext;
-import org.apache.flink.connector.mongodb.common.config.MongoConnectionOptions;
+import org.apache.flink.connector.mongodb.sink.MongoSink;
 import org.apache.flink.connector.mongodb.sink.config.MongoWriteOptions;
 import org.apache.flink.connector.mongodb.sink.writer.context.MongoSinkContext;
 import 
org.apache.flink.connector.mongodb.sink.writer.serializer.MongoSerializationSchema;
@@ -49,6 +50,7 @@ import org.testcontainers.containers.MongoDBContainer;
 import org.testcontainers.junit.jupiter.Container;
 import org.testcontainers.junit.jupiter.Testcontainers;
 
+import java.io.IOException;
 import java.util.Optional;
 
 import static 
org.apache.flink.connector.mongodb.testutils.MongoTestUtil.assertThatIdsAreNotWritten;
@@ -235,7 +237,7 @@ public class MongoWriterITCase {
 MongoWriteOptions.builder()
 .setBatchSize(batchSize)
 .setBatchIntervalMs(batchIntervalMs)
-.setMaxRetries(0)
+.setDeliveryGuarantee(DeliveryGuarantee.NONE)
 .build();
 
 MongoSerializationSchema testSerializationSchema =
@@ -269,7 +271,8 @@ public class MongoWriterITCase {
 }
 
 private static MongoWriter createWriter(
-String collection, int batchSize, long batchIntervalMs, boolean 
flushOnCheckpoint) {
+String collection, int batchSize, long batchIntervalMs, boolean 
flushOnCheckpoint)
+throws IOException {
 return createWriter(
 collection,
 batchSize,
@@ -283,28 +286,24 @@ public class MongoWriterITCase {
 int batchSize,
 long batchIntervalMs,
 boolean flushOnCheckpoint,
-MongoSerializationSchema serializationSchema) {
+MongoSerializationSchema serializationSchema)
+throws IOException {
 
-MongoConnectionOptions connectionOptions =
-MongoConnectionOptions.builder()
+MongoSink mongoSink =
+MongoSink.builder()
 .setUri(MONGO_CONTAINER.getConnectionString())
 .setDatabase(TEST_DATABASE)
 .setCollection(collection)
-.build();
-
-MongoWriteOptions writeOptions =
-MongoWriteOptions.builder()
 .setBatchSize(batchSize)
 .setBatchIntervalMs(batchIntervalMs)
-.setMaxRetries(0)
+.setDeliveryGuarantee(
+flushOnCheckpoint
+? DeliveryGuarantee.AT_LEAST_ONCE
+: DeliveryGuarantee.NONE)
+.setSerializationSchema(serializationSchema)
 .build();
 
-return new MongoWriter<>(
-connectionOptions,
-writeOptions,
-flushOnCheckpoint,
-sinkInitContext,
-serializationSchema);
+return (MongoWriter) mongoSink.createWriter(sinkInitContext);
 }
 
 private static Document buildMessage(int id) {



(flink-connector-mongodb) 01/02: [FLINK-33899][connectors/mongodb] Support Java 17 and Java 21 for mongodb connector

2024-02-19 Thread martijnvisser
This is an automated email from the ASF dual-hosted git repository.

martijnvisser pushed a commit to branch v1.0
in repository https://gitbox.apache.org/repos/asf/flink-connector-mongodb.git

commit aa7a9094e527fb83524fe681fcf089ffa0a3c794
Author: Jiabao Sun 
AuthorDate: Mon Jan 29 17:18:32 2024 +0800

[FLINK-33899][connectors/mongodb] Support Java 17 and Java 21 for mongodb 
connector

This closes #21.
---
 .github/workflows/push_pr.yml   |  9 -
 .github/workflows/weekly.yml| 13 +++--
 flink-connector-mongodb/pom.xml |  9 +
 pom.xml |  3 +++
 4 files changed, 27 insertions(+), 7 deletions(-)

diff --git a/.github/workflows/push_pr.yml b/.github/workflows/push_pr.yml
index fd4e0af..9d349c0 100644
--- a/.github/workflows/push_pr.yml
+++ b/.github/workflows/push_pr.yml
@@ -25,7 +25,14 @@ jobs:
   compile_and_test:
 strategy:
   matrix:
-flink: [1.16-SNAPSHOT, 1.17-SNAPSHOT, 1.18-SNAPSHOT, 1.19-SNAPSHOT]
+flink: [ 1.16-SNAPSHOT, 1.17-SNAPSHOT ]
+jdk: [ '8, 11' ]
+include:
+  - flink: 1.18-SNAPSHOT
+jdk: '8, 11, 17'
+  - flink: 1.19-SNAPSHOT
+jdk: '8, 11, 17, 21'
 uses: apache/flink-connector-shared-utils/.github/workflows/ci.yml@ci_utils
 with:
   flink_version: ${{ matrix.flink }}
+  jdk_version: ${{ matrix.jdk }}
diff --git a/.github/workflows/weekly.yml b/.github/workflows/weekly.yml
index e2db295..a70c3b8 100644
--- a/.github/workflows/weekly.yml
+++ b/.github/workflows/weekly.yml
@@ -34,25 +34,26 @@ jobs:
   branch: main
 }, {
   flink: 1.18-SNAPSHOT,
+  jdk: '8, 11, 17',
   branch: main
 }, {
   flink: 1.19-SNAPSHOT,
+  jdk: '8, 11, 17, 21',
   branch: main
 }, {
-  flink: 1.16.2,
+  flink: 1.16.3,
   branch: v1.0
 }, {
-  flink: 1.17.1,
-  branch: v1.0
-},{
-  flink: 1.18.0,
+  flink: 1.17.2,
   branch: v1.0
 }, {
-  flink: 1.19-SNAPSHOT,
+  flink: 1.18.1,
+  jdk: '8, 11, 17',
   branch: v1.0
 }]
 uses: apache/flink-connector-shared-utils/.github/workflows/ci.yml@ci_utils
 with:
   flink_version: ${{ matrix.flink_branches.flink }}
   connector_branch: ${{ matrix.flink_branches.branch }}
+  jdk_version: ${{ matrix.flink_branches.jdk || '8, 11' }}
   run_dependency_convergence: false
diff --git a/flink-connector-mongodb/pom.xml b/flink-connector-mongodb/pom.xml
index 06d7d1e..1df7516 100644
--- a/flink-connector-mongodb/pom.xml
+++ b/flink-connector-mongodb/pom.xml
@@ -32,6 +32,15 @@ under the License.
 
jar
 
+   
+   
+   
+   --add-opens=java.base/java.util=ALL-UNNAMED
+   
+   --add-opens=java.base/java.lang=ALL-UNNAMED
+   
+   
+


 
diff --git a/pom.xml b/pom.xml
index ba474ec..f9f05a8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -67,6 +67,9 @@ under the License.
2.17.2
 

flink-connector-mongodb-parent
+   
+   -XX:+UseG1GC -Xms256m 
-XX:+IgnoreUnrecognizedVMOptions 
${surefire.module.config}
+   

 




(flink-connector-mongodb) branch v1.0 updated (78fe3ed -> e2aa8b3)

2024-02-19 Thread martijnvisser
This is an automated email from the ASF dual-hosted git repository.

martijnvisser pushed a change to branch v1.0
in repository https://gitbox.apache.org/repos/asf/flink-connector-mongodb.git


from 78fe3ed  [FLINK-33567][Documentation] Add Flink compatibility matrix 
for MongoDB 1.0.1
 new aa7a909  [FLINK-33899][connectors/mongodb] Support Java 17 and Java 21 
for mongodb connector
 new e2aa8b3  [hotfix][test][connectors/mongodb] Update MongoWriterITCase 
to be compatible with updated SinkV2 interfaces

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .github/workflows/push_pr.yml  |  9 +-
 .github/workflows/weekly.yml   | 13 +
 flink-connector-mongodb/pom.xml|  9 ++
 .../mongodb/sink/writer/MongoWriterITCase.java | 33 +++---
 pom.xml|  3 ++
 5 files changed, 43 insertions(+), 24 deletions(-)



svn commit: r67432 - in /dev/flink/flink-connector-parent-1.1.0-rc2: ./ flink-connector-parent-1.1.0-src.tgz flink-connector-parent-1.1.0-src.tgz.asc flink-connector-parent-1.1.0-src.tgz.sha512

2024-02-19 Thread echauchot
Author: echauchot
Date: Mon Feb 19 14:45:30 2024
New Revision: 67432

Log:
Add flink-connector-parent-1.1.0-rc2

Added:
dev/flink/flink-connector-parent-1.1.0-rc2/

dev/flink/flink-connector-parent-1.1.0-rc2/flink-connector-parent-1.1.0-src.tgz 
  (with props)

dev/flink/flink-connector-parent-1.1.0-rc2/flink-connector-parent-1.1.0-src.tgz.asc

dev/flink/flink-connector-parent-1.1.0-rc2/flink-connector-parent-1.1.0-src.tgz.sha512

Added: 
dev/flink/flink-connector-parent-1.1.0-rc2/flink-connector-parent-1.1.0-src.tgz
==
Binary file - no diff available.

Propchange: 
dev/flink/flink-connector-parent-1.1.0-rc2/flink-connector-parent-1.1.0-src.tgz
--
svn:mime-type = application/octet-stream

Added: 
dev/flink/flink-connector-parent-1.1.0-rc2/flink-connector-parent-1.1.0-src.tgz.asc
==
--- 
dev/flink/flink-connector-parent-1.1.0-rc2/flink-connector-parent-1.1.0-src.tgz.asc
 (added)
+++ 
dev/flink/flink-connector-parent-1.1.0-rc2/flink-connector-parent-1.1.0-src.tgz.asc
 Mon Feb 19 14:45:30 2024
@@ -0,0 +1,11 @@
+-BEGIN PGP SIGNATURE-
+
+iQEzBAABCgAdFiEE0adroZ1ilN0AM/aEOgGfC43RY+oFAmXTaYUACgkQOgGfC43R
+Y+pG7gf8CRIYRLLWvb8zLtIVM6ry5i3hlPuFHWAEOLWSmAPzJPJBShl0ybvftXqv
+5Gh2alVcma6IFOc8z6bfzAmlFDfat1EPonf7y8wDPVtQKux+BVFHhXXYJ9VuB9Fr
+qvaTe/SL0wD2MNY9yPVwjXHm6buAS/MR4/bILP8RVKizciIxJk45v6nh1F9Dskmc
+RyX5LM0uLGZWL34BUVDaxstyOrIz9omn7v67RFLsuxelKVyAtOrqUXRKh2iQeKCa
+l9+52qutEDy9Fxu0F1O0XUayQg+7M4z3vVldOTVpFGNY0IdNG7MxCVLDl9E1o1mI
+QN8CRALQCmhxprQEfEt0ElgdwLxZbw==
+=4HUO
+-END PGP SIGNATURE-

Added: 
dev/flink/flink-connector-parent-1.1.0-rc2/flink-connector-parent-1.1.0-src.tgz.sha512
==
--- 
dev/flink/flink-connector-parent-1.1.0-rc2/flink-connector-parent-1.1.0-src.tgz.sha512
 (added)
+++ 
dev/flink/flink-connector-parent-1.1.0-rc2/flink-connector-parent-1.1.0-src.tgz.sha512
 Mon Feb 19 14:45:30 2024
@@ -0,0 +1 @@
+04fb539d45c0d0f8c39e687226a961fdd1ff363d23b1c5b1d78ee25b6b5ab05ca35f80f5d82b95cc2d318efecfc0fd80f53d1c4329d9a84b7f9179f876927d94
  flink-connector-parent-1.1.0-src.tgz




(flink-connector-shared-utils) annotated tag v1.1.0-rc2 updated (c806d46 -> e0306a7)

2024-02-19 Thread echauchot
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a change to annotated tag v1.1.0-rc2
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git


*** WARNING: tag v1.1.0-rc2 was modified! ***

from c806d46  (commit)
  to e0306a7  (tag)
 tagging c806d46ef06c8e46d28a6a8b5db3f5104cfe53bc (commit)
 replaces v1.1.0-rc1
  by Etienne Chauchot
  on Mon Feb 19 15:44:22 2024 +0100

- Log -
v1.1.0-rc2
-BEGIN PGP SIGNATURE-

iQEzBAABCgAdFiEE0adroZ1ilN0AM/aEOgGfC43RY+oFAmXTaUYACgkQOgGfC43R
Y+qzgAf/XfRPZEIcLCqguMi49z53EJ1Ha9CbmBdimjqyz5WRfF9pOf5bAzMUUE/6
vZwU8wYMHotTDzWHnsX89voMyEnQDVDEyg+QEnBXGsJZrz/PnkJXp2sNjzch1Dge
WYhJqbfIvyOwU3xtm/AZh0Lw6zNsZvkhmmN/ehwrTA0MoHwYU8yyMKdWm1Hqn+9v
XjRu4fDokhSIEnhOSGxm75Hj+C1oVZeRTmZFpXQ+NxARr60fdeZCwXgnh1f4FufO
IQjZsHOAf+kpg2q/nE3EdvQiq0Iumr+oc5SR8+CjrjwuAoJvM/MaV6oaxajjlmzc
lyf2kmpYYtn+TZ5Vu8F9ZdH01yCAdg==
=57NQ
-END PGP SIGNATURE-
---


No new revisions were added by this update.

Summary of changes:



(flink) branch release-1.17 updated: [hotfix] Integrate mongodb v1.1 docs (#24331)

2024-02-19 Thread jiabaosun
This is an automated email from the ASF dual-hosted git repository.

jiabaosun pushed a commit to branch release-1.17
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.17 by this push:
 new fb34768b0f3 [hotfix] Integrate mongodb v1.1 docs (#24331)
fb34768b0f3 is described below

commit fb34768b0f3cf6ba2690743e2b5bac8f9e0d35aa
Author: Leonard Xu 
AuthorDate: Mon Feb 19 22:04:02 2024 +0800

[hotfix] Integrate mongodb v1.1 docs (#24331)
---
 docs/setup_docs.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/setup_docs.sh b/docs/setup_docs.sh
index 2e1ea0ef858..89623c75741 100755
--- a/docs/setup_docs.sh
+++ b/docs/setup_docs.sh
@@ -50,7 +50,7 @@ integrate_connector_docs pulsar v4.1
 integrate_connector_docs jdbc v3.1
 integrate_connector_docs rabbitmq v3.0
 integrate_connector_docs gcp-pubsub v3.0
-integrate_connector_docs mongodb v1.0
+integrate_connector_docs mongodb v1.1
 integrate_connector_docs opensearch v1.1
 integrate_connector_docs hbase v3.0
 integrate_connector_docs kafka v3.1



(flink) branch release-1.18 updated: [hotfix] Integrate mongodb v1.1 docs (#24332)

2024-02-19 Thread jiabaosun
This is an automated email from the ASF dual-hosted git repository.

jiabaosun pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new 349e8705135 [hotfix] Integrate mongodb v1.1 docs (#24332)
349e8705135 is described below

commit 349e87051359f1f0eb0a93c0f1fe62a81bd0a897
Author: Leonard Xu 
AuthorDate: Mon Feb 19 20:32:01 2024 +0800

[hotfix] Integrate mongodb v1.1 docs (#24332)
---
 docs/setup_docs.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/setup_docs.sh b/docs/setup_docs.sh
index 50209e93cb1..c945abe2f38 100755
--- a/docs/setup_docs.sh
+++ b/docs/setup_docs.sh
@@ -50,7 +50,7 @@ integrate_connector_docs pulsar v4.1
 integrate_connector_docs jdbc v3.1
 integrate_connector_docs rabbitmq v3.0
 integrate_connector_docs gcp-pubsub v3.0
-integrate_connector_docs mongodb v1.0
+integrate_connector_docs mongodb v1.1
 integrate_connector_docs opensearch v1.1
 integrate_connector_docs kafka v3.1
 integrate_connector_docs hbase v3.0



(flink) branch master updated: [hotfix] Integrate mongodb v1.1 docs (#24330)

2024-02-19 Thread jiabaosun
This is an automated email from the ASF dual-hosted git repository.

jiabaosun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 64463d4f62b [hotfix] Integrate mongodb v1.1 docs (#24330)
64463d4f62b is described below

commit 64463d4f62b31b478c4374fd1f999a15112cadcc
Author: Leonard Xu 
AuthorDate: Mon Feb 19 20:06:48 2024 +0800

[hotfix] Integrate mongodb v1.1 docs (#24330)
---
 docs/setup_docs.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/setup_docs.sh b/docs/setup_docs.sh
index 439249c7266..bd186e1006b 100755
--- a/docs/setup_docs.sh
+++ b/docs/setup_docs.sh
@@ -50,7 +50,7 @@ integrate_connector_docs pulsar v4.0
 integrate_connector_docs jdbc v3.1
 integrate_connector_docs rabbitmq v3.0
 integrate_connector_docs gcp-pubsub v3.0
-integrate_connector_docs mongodb v1.0
+integrate_connector_docs mongodb v1.1
 integrate_connector_docs opensearch v1.1
 integrate_connector_docs kafka v3.0
 integrate_connector_docs hbase v3.0



(flink) 01/02: [hotfix][ci] Removes obsolete line

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit b7c935dc5d9df88ee0f8dc952dd206ee263fd220
Author: Matthias Pohl 
AuthorDate: Tue Feb 13 18:11:52 2024 +0100

[hotfix][ci] Removes obsolete line
---
 .github/actions/job_init/action.yml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.github/actions/job_init/action.yml 
b/.github/actions/job_init/action.yml
index 550e7b62fd4..561b23ffeb0 100644
--- a/.github/actions/job_init/action.yml
+++ b/.github/actions/job_init/action.yml
@@ -67,7 +67,6 @@ runs:
 echo "[INFO] The directory '${dependency_path}' doesn't exist. 
${dependency_name} won't be removed."
   fi
 done
-android_sdk_path="/usr/local/lib/android"
 
 - name: "Set JDK version to ${{ inputs.jdk_version }}"
   shell: bash



(flink) branch release-1.19 updated (d039c6f8ace -> c6c1f7984d1)

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a change to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git


from d039c6f8ace [FLINK-33960][Scheduler] Fix the bug that Adaptive 
Scheduler doesn't respect the lowerBound when one flink job has more than 1 
tasks
 new b7c935dc5d9 [hotfix][ci] Removes obsolete line
 new c6c1f7984d1 [FLINK-34418][ci] Mounts /mnt folder to /root

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .github/actions/job_init/action.yml |  1 -
 .github/workflows/template.flink-ci.yml | 14 ++
 2 files changed, 14 insertions(+), 1 deletion(-)



(flink) 01/02: [hotfix][ci] Removes obsolete line

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 39a2043845c8e988581b5070f31ad4491b5cdce6
Author: Matthias Pohl 
AuthorDate: Tue Feb 13 18:11:52 2024 +0100

[hotfix][ci] Removes obsolete line
---
 .github/actions/job_init/action.yml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.github/actions/job_init/action.yml 
b/.github/actions/job_init/action.yml
index 550e7b62fd4..561b23ffeb0 100644
--- a/.github/actions/job_init/action.yml
+++ b/.github/actions/job_init/action.yml
@@ -67,7 +67,6 @@ runs:
 echo "[INFO] The directory '${dependency_path}' doesn't exist. 
${dependency_name} won't be removed."
   fi
 done
-android_sdk_path="/usr/local/lib/android"
 
 - name: "Set JDK version to ${{ inputs.jdk_version }}"
   shell: bash



(flink) branch release-1.18 updated (35c560312ef -> 10c8943e49f)

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a change to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


from 35c560312ef [FLINK-34333][k8s] Upgrade k8s client to v6.9.2 to cover 
client issue #5464 (cherry-picked from FLINK-34007)
 new 39a2043845c [hotfix][ci] Removes obsolete line
 new 10c8943e49f [FLINK-34418][ci] Mounts /mnt folder to /root

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .github/actions/job_init/action.yml |  1 -
 .github/workflows/template.flink-ci.yml | 14 ++
 2 files changed, 14 insertions(+), 1 deletion(-)



(flink) branch master updated (dcb30f934a7 -> 4cc04a0b791)

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from dcb30f934a7 [FLINK-25015][Table SQL] Use SQL string as jobName for DQL 
jobs submitted by sql-gateway
 add 05544c663cc [hotfix][ci] Removes obsolete line
 add 4cc04a0b791 [FLINK-34418][ci] Mounts /mnt folder to /root

No new revisions were added by this update.

Summary of changes:
 .github/actions/job_init/action.yml |  1 -
 .github/workflows/template.flink-ci.yml | 14 ++
 2 files changed, 14 insertions(+), 1 deletion(-)



(flink) 02/02: [FLINK-34418][ci] Mounts /mnt folder to /root

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit c6c1f7984d101a59e923a163fddbd0128dea117a
Author: Matthias Pohl 
AuthorDate: Fri Feb 16 13:16:55 2024 +0100

[FLINK-34418][ci] Mounts /mnt folder to /root
---
 .github/workflows/template.flink-ci.yml | 14 ++
 1 file changed, 14 insertions(+)

diff --git a/.github/workflows/template.flink-ci.yml 
b/.github/workflows/template.flink-ci.yml
index ee7629e5acd..1ac63916ddd 100644
--- a/.github/workflows/template.flink-ci.yml
+++ b/.github/workflows/template.flink-ci.yml
@@ -177,6 +177,9 @@ jobs:
   # --init makes the process in the container being started as an init 
process which will clean up any daemon processes during shutdown
   # --privileged allows writing coredumps in docker (FLINK-16973)
   options: --init --privileged
+  # the /mnt folder is a separate disk mounted to the host filesystem with 
more free disk space that can be utilized
+  volumes:
+- /mnt:/root
 env:
   # timeout in minutes - this environment variable is required by 
uploading_watchdog.sh
   GHA_JOB_TIMEOUT: 240
@@ -251,6 +254,17 @@ jobs:
   ${{ inputs.environment }} PROFILE="$PROFILE -Pgithub-actions" 
./tools/azure-pipelines/uploading_watchdog.sh \
   ./tools/ci/test_controller.sh ${{ matrix.module }}
 
+  - name: "Post-build Disk Info"
+if: ${{ always() }}
+shell: bash
+run: df -h
+
+  - name: "Top 15 biggest directories in terms of used disk space"
+if: ${{ always() }}
+shell: bash
+run: |
+  du -ah --exclude="proc" -t100M . | sort -h -r | head -n 15
+
   - name: "Post-process build artifacts"
 working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
 run: find ${{ steps.test-run.outputs.debug-files-output-dir }} -type f 
-exec rename 's/[:<>|*?]/-/' {} \;



(flink) 02/02: [FLINK-34418][ci] Mounts /mnt folder to /root

2024-02-19 Thread mapohl
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 10c8943e49f324abb22c8254e415d9d666c3a6a3
Author: Matthias Pohl 
AuthorDate: Fri Feb 16 13:16:55 2024 +0100

[FLINK-34418][ci] Mounts /mnt folder to /root
---
 .github/workflows/template.flink-ci.yml | 14 ++
 1 file changed, 14 insertions(+)

diff --git a/.github/workflows/template.flink-ci.yml 
b/.github/workflows/template.flink-ci.yml
index ee7629e5acd..1ac63916ddd 100644
--- a/.github/workflows/template.flink-ci.yml
+++ b/.github/workflows/template.flink-ci.yml
@@ -177,6 +177,9 @@ jobs:
   # --init makes the process in the container being started as an init 
process which will clean up any daemon processes during shutdown
   # --privileged allows writing coredumps in docker (FLINK-16973)
   options: --init --privileged
+  # the /mnt folder is a separate disk mounted to the host filesystem with 
more free disk space that can be utilized
+  volumes:
+- /mnt:/root
 env:
   # timeout in minutes - this environment variable is required by 
uploading_watchdog.sh
   GHA_JOB_TIMEOUT: 240
@@ -251,6 +254,17 @@ jobs:
   ${{ inputs.environment }} PROFILE="$PROFILE -Pgithub-actions" 
./tools/azure-pipelines/uploading_watchdog.sh \
   ./tools/ci/test_controller.sh ${{ matrix.module }}
 
+  - name: "Post-build Disk Info"
+if: ${{ always() }}
+shell: bash
+run: df -h
+
+  - name: "Top 15 biggest directories in terms of used disk space"
+if: ${{ always() }}
+shell: bash
+run: |
+  du -ah --exclude="proc" -t100M . | sort -h -r | head -n 15
+
   - name: "Post-process build artifacts"
 working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
 run: find ${{ steps.test-run.outputs.debug-files-output-dir }} -type f 
-exec rename 's/[:<>|*?]/-/' {} \;



(flink-connector-mongodb) branch v1.1 updated: [hotfix] Update compatibility data for mongodb 1.1.0 (#25)

2024-02-19 Thread jiabaosun
This is an automated email from the ASF dual-hosted git repository.

jiabaosun pushed a commit to branch v1.1
in repository https://gitbox.apache.org/repos/asf/flink-connector-mongodb.git


The following commit(s) were added to refs/heads/v1.1 by this push:
 new 50bbbf0  [hotfix] Update compatibility data for mongodb 1.1.0 (#25)
50bbbf0 is described below

commit 50bbbf00d79cfa12edfad8f41466c1dd89bd3f36
Author: Leonard Xu 
AuthorDate: Mon Feb 19 19:09:46 2024 +0800

[hotfix] Update compatibility data for mongodb 1.1.0 (#25)
---
 docs/data/mongodb.yml | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/docs/data/mongodb.yml b/docs/data/mongodb.yml
index 52ced6d..c6345ea 100644
--- a/docs/data/mongodb.yml
+++ b/docs/data/mongodb.yml
@@ -16,7 +16,8 @@
 # limitations under the License.
 

 
-version: 1.1.0-SNAPSHOT
+version: 1.1.0
+flink_compatibility: [1.17, 1.18]
 variants:
   - maven: flink-connector-mongodb
 sql_url: 
https://repo.maven.apache.org/maven2/org/apache/flink/flink-sql-connector-mongodb/$full_version/flink-sql-connector-mongodb-$full_version.jar



(flink-connector-mongodb) branch main updated: [hotfix] Update compatibility data for mongodb main branch

2024-02-19 Thread jiabaosun
This is an automated email from the ASF dual-hosted git repository.

jiabaosun pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-mongodb.git


The following commit(s) were added to refs/heads/main by this push:
 new 7515644  [hotfix] Update compatibility data for mongodb main branch
7515644 is described below

commit 7515644a6425105e5a8c4d1beb97c6a0dba95c86
Author: Leonard Xu 
AuthorDate: Mon Feb 19 18:18:54 2024 +0800

[hotfix] Update compatibility data for mongodb main branch
---
 docs/data/mongodb.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/data/mongodb.yml b/docs/data/mongodb.yml
index 52ced6d..6671322 100644
--- a/docs/data/mongodb.yml
+++ b/docs/data/mongodb.yml
@@ -16,7 +16,7 @@
 # limitations under the License.
 

 
-version: 1.1.0-SNAPSHOT
+version: 1.2.0-SNAPSHOT
 variants:
   - maven: flink-connector-mongodb
 sql_url: 
https://repo.maven.apache.org/maven2/org/apache/flink/flink-sql-connector-mongodb/$full_version/flink-sql-connector-mongodb-$full_version.jar



(flink-connector-mongodb) 02/02: [build] Remove Flink 1.16.x support

2024-02-19 Thread jiabaosun
This is an automated email from the ASF dual-hosted git repository.

jiabaosun pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-mongodb.git

commit 384190c049eae5b9bbd73bdbf1877b72e7d9c435
Author: Leonard Xu 
AuthorDate: Mon Feb 19 18:42:39 2024 +0800

[build] Remove Flink 1.16.x support
---
 .github/workflows/push_pr.yml | 2 +-
 .github/workflows/weekly.yml  | 8 +---
 2 files changed, 2 insertions(+), 8 deletions(-)

diff --git a/.github/workflows/push_pr.yml b/.github/workflows/push_pr.yml
index 9d349c0..fdd1377 100644
--- a/.github/workflows/push_pr.yml
+++ b/.github/workflows/push_pr.yml
@@ -25,7 +25,7 @@ jobs:
   compile_and_test:
 strategy:
   matrix:
-flink: [ 1.16-SNAPSHOT, 1.17-SNAPSHOT ]
+flink: [ 1.17-SNAPSHOT ]
 jdk: [ '8, 11' ]
 include:
   - flink: 1.18-SNAPSHOT
diff --git a/.github/workflows/weekly.yml b/.github/workflows/weekly.yml
index 8599352..07fbae5 100644
--- a/.github/workflows/weekly.yml
+++ b/.github/workflows/weekly.yml
@@ -26,10 +26,7 @@ jobs:
 if: github.repository_owner == 'apache'
 strategy:
   matrix:
-flink_branches: [{
-  flink: 1.16-SNAPSHOT,
-  branch: main
-}, {
+flink_branches: [ {
   flink: 1.17-SNAPSHOT,
   branch: main
 }, {
@@ -40,9 +37,6 @@ jobs:
   flink: 1.19-SNAPSHOT,
   jdk: '8, 11, 17, 21',
   branch: main
-}, {
-  flink: 1.16.3,
-  branch: v1.1
 }, {
   flink: 1.17.2,
   branch: v1.1



(flink-connector-mongodb) 01/02: [hotfix] Update nightly builds against the latest released v1.1 branch

2024-02-19 Thread jiabaosun
This is an automated email from the ASF dual-hosted git repository.

jiabaosun pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-mongodb.git

commit aaf3867b2a72a61a0511f250c36580842623b6bc
Author: Leonard Xu 
AuthorDate: Mon Feb 19 18:38:29 2024 +0800

[hotfix] Update nightly builds against the latest released v1.1 branch
---
 .github/workflows/weekly.yml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/weekly.yml b/.github/workflows/weekly.yml
index a70c3b8..8599352 100644
--- a/.github/workflows/weekly.yml
+++ b/.github/workflows/weekly.yml
@@ -42,14 +42,14 @@ jobs:
   branch: main
 }, {
   flink: 1.16.3,
-  branch: v1.0
+  branch: v1.1
 }, {
   flink: 1.17.2,
-  branch: v1.0
+  branch: v1.1
 }, {
   flink: 1.18.1,
   jdk: '8, 11, 17',
-  branch: v1.0
+  branch: v1.1
 }]
 uses: apache/flink-connector-shared-utils/.github/workflows/ci.yml@ci_utils
 with:



(flink-connector-mongodb) branch main updated (7f49d10 -> 384190c)

2024-02-19 Thread jiabaosun
This is an automated email from the ASF dual-hosted git repository.

jiabaosun pushed a change to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-mongodb.git


from 7f49d10  [hotfix][Connector/MongoDB] Add release tools as submodule
 new aaf3867  [hotfix] Update nightly builds against the latest released 
v1.1 branch
 new 384190c  [build] Remove Flink 1.16.x support

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .github/workflows/push_pr.yml |  2 +-
 .github/workflows/weekly.yml  | 12 +++-
 2 files changed, 4 insertions(+), 10 deletions(-)



(flink-connector-mongodb) annotated tag v1.1.0 updated (6c620cd -> 31f1bf9)

2024-02-19 Thread leonard
This is an automated email from the ASF dual-hosted git repository.

leonard pushed a change to annotated tag v1.1.0
in repository https://gitbox.apache.org/repos/asf/flink-connector-mongodb.git


*** WARNING: tag v1.1.0 was modified! ***

from 6c620cd  (commit)
  to 31f1bf9  (tag)
 tagging 6c620cd4d1b5140df6cae9a7f5c93ffa160bde2e (commit)
  by Leonard Xu
  on Mon Feb 19 17:42:37 2024 +0800

- Log -
v1.1.0
-BEGIN PGP SIGNATURE-

iHUEABYKAB0WIQRbL2YIcyOJrrZzMfWxl+HxEImYrQUCZdMijgAKCRCxl+HxEImY
rasxAP4yjCkICeJLlgdeLEoNi+Ngp0okyMWHzULsNlGJWYmMmgD/cGXE9QnLJGeN
OlRqbwMOOm5+skc4fX8drfvfzwhgkgQ=
=t/LJ
-END PGP SIGNATURE-
---


No new revisions were added by this update.

Summary of changes:



svn commit: r67425 - /dev/flink/flink-connector-mongodb-1.1.0-rc2/ /release/flink/flink-connector-mongodb-1.1.0/

2024-02-19 Thread leonard
Author: leonard
Date: Mon Feb 19 09:38:30 2024
New Revision: 67425

Log:
Release flink-connector-mongodb 1.1.0

Added:
release/flink/flink-connector-mongodb-1.1.0/
  - copied from r67424, dev/flink/flink-connector-mongodb-1.1.0-rc2/
Removed:
dev/flink/flink-connector-mongodb-1.1.0-rc2/



(flink-connector-shared-utils) branch release_utils updated: [hotfix] Update README.md with tools directory exclusion

2024-02-19 Thread echauchot
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a commit to branch release_utils
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git


The following commit(s) were added to refs/heads/release_utils by this push:
 new ba9a8f6  [hotfix] Update README.md with tools directory exclusion
ba9a8f6 is described below

commit ba9a8f6e0ae75985f6e382a28384da08b31b1993
Author: Etienne Chauchot 
AuthorDate: Mon Feb 19 09:36:45 2024 +0100

[hotfix] Update README.md with tools directory exclusion
---
 README.md | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/README.md b/README.md
index 259bf6b..97de8df 100644
--- a/README.md
+++ b/README.md
@@ -30,7 +30,9 @@ Updates the version in the poms of the current branch to 
`${NEW_VERSION}`.
 ## stage_source_release.sh
 
 Creates a source release from the current branch and pushes it via `svn`
-to [dist.apache.org](https://dist.apache.org/repos/dist/dev/flink).  
+to [dist.apache.org](https://dist.apache.org/repos/dist/dev/flink). 
+This will exclude the tools directory where the release utils are mounted. 
Please ensure that this directory
+does not contain anything that is needed in the source release.  
 The project name is automatically determined from the repository name, but can 
be overridden via `${PROJECT}`.
 The version is automatically determined from the version in the pom.  
 The created `svn` directory will contain a `-rc${RC_NUM}` suffix.