(flink) branch release-1.19 updated: [FLINK-35159] Transition ExecutionGraph to RUNNING after slot assignment

2024-04-19 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.19 by this push:
 new 87ed9ccc210 [FLINK-35159] Transition ExecutionGraph to RUNNING after 
slot assignment
87ed9ccc210 is described below

commit 87ed9ccc2103457ba91f6ca45adfd2bfcc75c9ac
Author: Chesnay Schepler 
AuthorDate: Thu Apr 18 19:10:42 2024 +0200

[FLINK-35159] Transition ExecutionGraph to RUNNING after slot assignment
---
 .../flink/runtime/scheduler/adaptive/AdaptiveScheduler.java   | 1 -
 .../flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java  | 5 -
 .../runtime/scheduler/adaptive/CreatingExecutionGraphTest.java| 8 ++--
 3 files changed, 10 insertions(+), 4 deletions(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
index 5f6438ce181..238c594fd55 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
@@ -1187,7 +1187,6 @@ public class AdaptiveScheduler
 executionGraphWithVertexParallelism.getExecutionGraph();
 
 executionGraph.start(componentMainThreadExecutor);
-executionGraph.transitionToRunning();
 
 executionGraph.setInternalTaskFailuresListener(
 new UpdateSchedulerNgOnInternalFailuresListener(this));
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
index da90ef1468d..e9b1317e46e 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
@@ -123,7 +123,6 @@ public class CreatingExecutionGraph extends 
StateWithoutExecutionGraph {
 
operatorCoordinatorHandlerFactory.create(executionGraph, context);
 operatorCoordinatorHandler.initializeOperatorCoordinators(
 context.getMainThreadExecutor());
-operatorCoordinatorHandler.startAllOperatorCoordinators();
 final String updatedPlan =
 JsonPlanGenerator.generatePlan(
 executionGraph.getJobID(),
@@ -137,6 +136,10 @@ public class CreatingExecutionGraph extends 
StateWithoutExecutionGraph {
 .iterator(),
 
executionGraphWithVertexParallelism.getVertexParallelism());
 executionGraph.setJsonPlan(updatedPlan);
+
+executionGraph.transitionToRunning();
+operatorCoordinatorHandler.startAllOperatorCoordinators();
+
 context.goToExecuting(
 result.getExecutionGraph(),
 executionGraphHandler,
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
index b831b3bb62f..0f89cdf7e12 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
@@ -93,8 +93,12 @@ class CreatingExecutionGraphTest {
 ignored -> 
CreatingExecutionGraph.AssignmentResult.notPossible());
 context.setExpectWaitingForResources();
 
-executionGraphWithVertexParallelismFuture.complete(
-getGraph(new StateTrackingMockExecutionGraph()));
+final StateTrackingMockExecutionGraph executionGraph =
+new StateTrackingMockExecutionGraph();
+
+
executionGraphWithVertexParallelismFuture.complete(getGraph(executionGraph));
+
+
assertThat(executionGraph.getState()).isEqualTo(JobStatus.INITIALIZING);
 }
 
 @Test



(flink) branch release-1.18 updated: [FLINK-35159] Transition ExecutionGraph to RUNNING after slot assignment

2024-04-19 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new aacc735806a [FLINK-35159] Transition ExecutionGraph to RUNNING after 
slot assignment
aacc735806a is described below

commit aacc735806acf1d63fa732706e079bc2ca1bb4fc
Author: Chesnay Schepler 
AuthorDate: Thu Apr 18 19:10:42 2024 +0200

[FLINK-35159] Transition ExecutionGraph to RUNNING after slot assignment
---
 .../flink/runtime/scheduler/adaptive/AdaptiveScheduler.java   | 1 -
 .../flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java  | 5 -
 .../runtime/scheduler/adaptive/CreatingExecutionGraphTest.java| 8 ++--
 3 files changed, 10 insertions(+), 4 deletions(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
index 4ee22c95848..34539d23e04 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
@@ -1096,7 +1096,6 @@ public class AdaptiveScheduler
 executionGraphWithVertexParallelism.getExecutionGraph();
 
 executionGraph.start(componentMainThreadExecutor);
-executionGraph.transitionToRunning();
 
 executionGraph.setInternalTaskFailuresListener(
 new UpdateSchedulerNgOnInternalFailuresListener(this));
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
index c876fe6ad1d..21055945372 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraph.java
@@ -124,7 +124,6 @@ public class CreatingExecutionGraph implements State {
 
operatorCoordinatorHandlerFactory.create(executionGraph, context);
 operatorCoordinatorHandler.initializeOperatorCoordinators(
 context.getMainThreadExecutor());
-operatorCoordinatorHandler.startAllOperatorCoordinators();
 final String updatedPlan =
 JsonPlanGenerator.generatePlan(
 executionGraph.getJobID(),
@@ -138,6 +137,10 @@ public class CreatingExecutionGraph implements State {
 .iterator(),
 
executionGraphWithVertexParallelism.getVertexParallelism());
 executionGraph.setJsonPlan(updatedPlan);
+
+executionGraph.transitionToRunning();
+operatorCoordinatorHandler.startAllOperatorCoordinators();
+
 context.goToExecuting(
 result.getExecutionGraph(),
 executionGraphHandler,
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
index 2375a194206..69e5f589b19 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/CreatingExecutionGraphTest.java
@@ -157,8 +157,12 @@ public class CreatingExecutionGraphTest extends TestLogger 
{
 ignored -> 
CreatingExecutionGraph.AssignmentResult.notPossible());
 context.setExpectWaitingForResources();
 
-executionGraphWithVertexParallelismFuture.complete(
-getGraph(new StateTrackingMockExecutionGraph()));
+final StateTrackingMockExecutionGraph executionGraph =
+new StateTrackingMockExecutionGraph();
+
+
executionGraphWithVertexParallelismFuture.complete(getGraph(executionGraph));
+
+
assertThat(executionGraph.getState()).isEqualTo(JobStatus.INITIALIZING);
 }
 }
 



(flink) branch master updated (a312a3bdd25 -> 131358b918b)

2024-04-19 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from a312a3bdd25 [FLINK-35045][state] Support ByteBufferReadable for 
HadoopDataInputStream
 add 10c84df8c56 [hotfix] Delete pointless test
 add 131358b918b [FLINK-35159] Transition ExecutionGraph to RUNNING after 
slot assignment

No new revisions were added by this update.

Summary of changes:
 .../scheduler/adaptive/AdaptiveScheduler.java  |  1 -
 .../scheduler/adaptive/CreatingExecutionGraph.java |  5 +-
 .../scheduler/adaptive/AdaptiveSchedulerTest.java  | 55 --
 .../adaptive/CreatingExecutionGraphTest.java   |  8 +++-
 4 files changed, 10 insertions(+), 59 deletions(-)



(flink) branch release-1.18 updated (94d1363c27e -> 20c506d76c9)

2024-03-28 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


from 94d1363c27e [FLINK-34933][test] Fixes 
JobMasterServiceLeadershipRunnerTest#testResultFutureCompletionOfOutdatedLeaderIsIgnored
 new fcb581f0039 [FLINK-34922][rest] Support concurrent global failure
 new 20c506d76c9 [FLINK-34922] Adds ITCase for GlobalFailureOnRestart

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../rest/handler/job/JobExceptionsHandler.java |  12 ++
 .../rest/handler/job/JobExceptionsHandlerTest.java |  53 +-
 .../test/scheduling/AdaptiveSchedulerITCase.java   | 197 +++--
 3 files changed, 247 insertions(+), 15 deletions(-)



(flink) 02/02: [FLINK-34922] Adds ITCase for GlobalFailureOnRestart

2024-03-28 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 20c506d76c99c2e6c3f30a039acd0366d3448c87
Author: Panagiotis Garefalakis 
AuthorDate: Wed Mar 27 22:23:48 2024 -0700

[FLINK-34922] Adds ITCase for GlobalFailureOnRestart

Add an ITCase where a global failure is triggered while the scheduler is 
restarting, and asserts that
this failure is handled such that can be retrieved via the REST API.
---
 .../test/scheduling/AdaptiveSchedulerITCase.java   | 197 +++--
 1 file changed, 183 insertions(+), 14 deletions(-)

diff --git 
a/flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
 
b/flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
index a4124dfe08c..d15f3ae7ef4 100644
--- 
a/flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
+++ 
b/flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
@@ -18,11 +18,14 @@
 
 package org.apache.flink.test.scheduling;
 
+import org.apache.flink.api.common.ExecutionConfig;
+import org.apache.flink.api.common.JobID;
 import org.apache.flink.api.common.JobStatus;
 import org.apache.flink.api.common.restartstrategy.RestartStrategies;
 import org.apache.flink.api.common.state.CheckpointListener;
 import org.apache.flink.api.common.state.ListState;
 import org.apache.flink.api.common.state.ListStateDescriptor;
+import org.apache.flink.api.common.time.Time;
 import org.apache.flink.client.program.rest.RestClusterClient;
 import org.apache.flink.configuration.ClusterOptions;
 import org.apache.flink.configuration.Configuration;
@@ -30,9 +33,20 @@ import 
org.apache.flink.configuration.HeartbeatManagerOptions;
 import org.apache.flink.configuration.JobManagerOptions;
 import org.apache.flink.core.execution.JobClient;
 import org.apache.flink.core.execution.SavepointFormatType;
+import org.apache.flink.runtime.execution.Environment;
+import org.apache.flink.runtime.jobgraph.JobGraph;
+import org.apache.flink.runtime.jobgraph.JobGraphBuilder;
+import org.apache.flink.runtime.jobgraph.JobVertex;
+import org.apache.flink.runtime.jobgraph.JobVertexID;
+import org.apache.flink.runtime.jobgraph.OperatorID;
+import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
+import org.apache.flink.runtime.minicluster.MiniCluster;
+import org.apache.flink.runtime.operators.coordination.OperatorCoordinator;
+import org.apache.flink.runtime.operators.coordination.OperatorEvent;
 import org.apache.flink.runtime.rest.messages.EmptyRequestBody;
 import org.apache.flink.runtime.rest.messages.JobExceptionsHeaders;
 import org.apache.flink.runtime.rest.messages.JobExceptionsInfoWithHistory;
+import 
org.apache.flink.runtime.rest.messages.JobExceptionsInfoWithHistory.RootExceptionInfo;
 import 
org.apache.flink.runtime.rest.messages.job.JobExceptionsMessageParameters;
 import 
org.apache.flink.runtime.scheduler.stopwithsavepoint.StopWithSavepointStoppingException;
 import org.apache.flink.runtime.state.FunctionInitializationContext;
@@ -48,8 +62,10 @@ import 
org.apache.flink.streaming.api.functions.source.RichParallelSourceFunctio
 import org.apache.flink.test.util.MiniClusterWithClientResource;
 import org.apache.flink.util.FlinkException;
 import org.apache.flink.util.Preconditions;
+import org.apache.flink.util.SerializedValue;
 import org.apache.flink.util.TestLogger;
 
+import org.assertj.core.api.Assertions;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.ClassRule;
@@ -62,11 +78,13 @@ import javax.annotation.Nullable;
 import java.io.File;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.List;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutionException;
 import java.util.stream.Collectors;
 
+import static org.apache.flink.core.testutils.FlinkAssertions.assertThatFuture;
 import static org.apache.flink.core.testutils.FlinkMatchers.containsCause;
 import static org.apache.flink.util.ExceptionUtils.assertThrowable;
 import static org.hamcrest.CoreMatchers.containsString;
@@ -266,25 +284,78 @@ public class AdaptiveSchedulerITCase extends TestLogger {
 final JobClient jobClient = env.executeAsync();
 CommonTestUtils.waitUntilCondition(
 () -> {
-final RestClusterClient restClusterClient =
-
MINI_CLUSTER_WITH_CLIENT_RESOURCE.getRestClusterClient();
-final JobExceptionsMessageParameters params =
-new JobExceptionsMessageParameters();
-params.jobPathParameter.resolve(jobClient.getJobID());
-final CompletableFuture 
exceptionsFuture =
-restClusterClient.sendRequ

(flink) 01/02: [FLINK-34922][rest] Support concurrent global failure

2024-03-28 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git

commit fcb581f0039f9704b6eaf15a2fabaa4e05d79048
Author: Chesnay Schepler 
AuthorDate: Wed Mar 27 09:33:08 2024 +0100

[FLINK-34922][rest] Support concurrent global failure
---
 .../rest/handler/job/JobExceptionsHandler.java | 12 +
 .../rest/handler/job/JobExceptionsHandlerTest.java | 53 +-
 2 files changed, 64 insertions(+), 1 deletion(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
index 5ece82a2671..84140c8c007 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
@@ -248,6 +248,18 @@ public class JobExceptionsHandler
 
 private static JobExceptionsInfoWithHistory.ExceptionInfo 
createExceptionInfo(
 ExceptionHistoryEntry exceptionHistoryEntry) {
+
+if (exceptionHistoryEntry.isGlobal()) {
+return new JobExceptionsInfoWithHistory.ExceptionInfo(
+
exceptionHistoryEntry.getException().getOriginalErrorClassName(),
+exceptionHistoryEntry.getExceptionAsString(),
+exceptionHistoryEntry.getTimestamp(),
+exceptionHistoryEntry.getFailureLabels(),
+null,
+null,
+null);
+}
+
 assertLocalExceptionInfo(exceptionHistoryEntry);
 
 return new JobExceptionsInfoWithHistory.ExceptionInfo(
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandlerTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandlerTest.java
index efce7903686..c7699c6f951 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandlerTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandlerTest.java
@@ -55,6 +55,7 @@ import org.apache.flink.util.SerializedThrowable;
 import org.apache.flink.util.TestLogger;
 import org.apache.flink.util.concurrent.Executors;
 
+import org.assertj.core.api.Assertions;
 import org.hamcrest.Description;
 import org.hamcrest.Matcher;
 import org.hamcrest.TypeSafeDiagnosingMatcher;
@@ -64,6 +65,7 @@ import org.junit.Test;
 
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -73,6 +75,7 @@ import java.util.concurrent.ExecutionException;
 import java.util.function.Function;
 
 import static 
org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.createExecutionAttemptId;
+import static org.assertj.core.api.HamcrestCondition.matching;
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.CoreMatchers.nullValue;
 import static org.hamcrest.collection.IsEmptyCollection.empty;
@@ -214,6 +217,47 @@ public class JobExceptionsHandlerTest extends TestLogger {
 assertFalse(response.getExceptionHistory().isTruncated());
 }
 
+@Test
+public void testWithExceptionHistoryAndConcurrentGlobalFailure()
+throws HandlerRequestException, ExecutionException, 
InterruptedException {
+final ExceptionHistoryEntry otherFailure =
+ExceptionHistoryEntry.createGlobal(
+new RuntimeException("exception #1"),
+
CompletableFuture.completedFuture(Collections.emptyMap()));
+final RootExceptionHistoryEntry rootCause =
+fromGlobalFailure(
+new RuntimeException("exception #0"),
+System.currentTimeMillis(),
+Collections.singleton(otherFailure));
+
+final ExecutionGraphInfo executionGraphInfo = 
createExecutionGraphInfo(rootCause);
+final HandlerRequest request =
+createRequest(executionGraphInfo.getJobId(), 10);
+final JobExceptionsInfoWithHistory response =
+testInstance.handleRequest(request, executionGraphInfo);
+
+Assertions.assertThat(response.getExceptionHistory().getEntries())
+.hasSize(1)
+.satisfies(
+matching(
+contains(
+historyContainsGlobalFailure(
+rootCause.getException(),
+rootCause.getTimestamp(),
+  

(flink) 02/02: [FLINK-34922] Adds ITCase for GlobalFailureOnRestart

2024-03-28 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit b54edc886ce5a533bafe74fa3629657b6266cad5
Author: Panagiotis Garefalakis 
AuthorDate: Wed Mar 27 22:23:48 2024 -0700

[FLINK-34922] Adds ITCase for GlobalFailureOnRestart

Add an ITCase where a global failure is triggered while the scheduler is 
restarting, and asserts that
this failure is handled such that can be retrieved via the REST API.
---
 .../test/scheduling/AdaptiveSchedulerITCase.java   | 197 +++--
 1 file changed, 183 insertions(+), 14 deletions(-)

diff --git 
a/flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
 
b/flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
index 0cbfdd950bd..e63b2891039 100644
--- 
a/flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
+++ 
b/flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
@@ -18,11 +18,14 @@
 
 package org.apache.flink.test.scheduling;
 
+import org.apache.flink.api.common.ExecutionConfig;
+import org.apache.flink.api.common.JobID;
 import org.apache.flink.api.common.JobStatus;
 import org.apache.flink.api.common.restartstrategy.RestartStrategies;
 import org.apache.flink.api.common.state.CheckpointListener;
 import org.apache.flink.api.common.state.ListState;
 import org.apache.flink.api.common.state.ListStateDescriptor;
+import org.apache.flink.api.common.time.Time;
 import org.apache.flink.client.program.rest.RestClusterClient;
 import org.apache.flink.configuration.ClusterOptions;
 import org.apache.flink.configuration.Configuration;
@@ -30,9 +33,20 @@ import 
org.apache.flink.configuration.HeartbeatManagerOptions;
 import org.apache.flink.configuration.JobManagerOptions;
 import org.apache.flink.core.execution.JobClient;
 import org.apache.flink.core.execution.SavepointFormatType;
+import org.apache.flink.runtime.execution.Environment;
+import org.apache.flink.runtime.jobgraph.JobGraph;
+import org.apache.flink.runtime.jobgraph.JobGraphBuilder;
+import org.apache.flink.runtime.jobgraph.JobVertex;
+import org.apache.flink.runtime.jobgraph.JobVertexID;
+import org.apache.flink.runtime.jobgraph.OperatorID;
+import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
+import org.apache.flink.runtime.minicluster.MiniCluster;
+import org.apache.flink.runtime.operators.coordination.OperatorCoordinator;
+import org.apache.flink.runtime.operators.coordination.OperatorEvent;
 import org.apache.flink.runtime.rest.messages.EmptyRequestBody;
 import org.apache.flink.runtime.rest.messages.JobExceptionsHeaders;
 import org.apache.flink.runtime.rest.messages.JobExceptionsInfoWithHistory;
+import 
org.apache.flink.runtime.rest.messages.JobExceptionsInfoWithHistory.RootExceptionInfo;
 import 
org.apache.flink.runtime.rest.messages.job.JobExceptionsMessageParameters;
 import 
org.apache.flink.runtime.scheduler.stopwithsavepoint.StopWithSavepointStoppingException;
 import org.apache.flink.runtime.state.FunctionInitializationContext;
@@ -48,8 +62,10 @@ import 
org.apache.flink.streaming.api.functions.source.RichParallelSourceFunctio
 import org.apache.flink.test.util.MiniClusterWithClientResource;
 import org.apache.flink.util.FlinkException;
 import org.apache.flink.util.Preconditions;
+import org.apache.flink.util.SerializedValue;
 import org.apache.flink.util.TestLogger;
 
+import org.assertj.core.api.Assertions;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.ClassRule;
@@ -62,11 +78,13 @@ import javax.annotation.Nullable;
 import java.io.File;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.List;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutionException;
 import java.util.stream.Collectors;
 
+import static org.apache.flink.core.testutils.FlinkAssertions.assertThatFuture;
 import static org.apache.flink.core.testutils.FlinkMatchers.containsCause;
 import static org.apache.flink.util.ExceptionUtils.assertThrowable;
 import static org.hamcrest.CoreMatchers.containsString;
@@ -270,25 +288,78 @@ public class AdaptiveSchedulerITCase extends TestLogger {
 final JobClient jobClient = env.executeAsync();
 CommonTestUtils.waitUntilCondition(
 () -> {
-final RestClusterClient restClusterClient =
-
MINI_CLUSTER_WITH_CLIENT_RESOURCE.getRestClusterClient();
-final JobExceptionsMessageParameters params =
-new JobExceptionsMessageParameters();
-params.jobPathParameter.resolve(jobClient.getJobID());
-final CompletableFuture 
exceptionsFuture =
-restClusterClient.sendRequ

(flink) branch release-1.19 updated (c11656a2406 -> b54edc886ce)

2024-03-28 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git


from c11656a2406 [FLINK-34933][test] Fixes 
JobMasterServiceLeadershipRunnerTest#testResultFutureCompletionOfOutdatedLeaderIsIgnored
 new faa880c703c [FLINK-34922][rest] Support concurrent global failure
 new b54edc886ce [FLINK-34922] Adds ITCase for GlobalFailureOnRestart

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../rest/handler/job/JobExceptionsHandler.java |  13 ++
 .../rest/handler/job/JobExceptionsHandlerTest.java |  51 +-
 .../test/scheduling/AdaptiveSchedulerITCase.java   | 197 +++--
 3 files changed, 246 insertions(+), 15 deletions(-)



(flink) 01/02: [FLINK-34922][rest] Support concurrent global failure

2024-03-28 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit faa880c703cadba4521fc8ef885a242ded4b2ac7
Author: Chesnay Schepler 
AuthorDate: Wed Mar 27 09:33:08 2024 +0100

[FLINK-34922][rest] Support concurrent global failure
---
 .../rest/handler/job/JobExceptionsHandler.java | 13 ++
 .../rest/handler/job/JobExceptionsHandlerTest.java | 51 +-
 2 files changed, 63 insertions(+), 1 deletion(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
index 55c7875e85c..6d5f49d55b4 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
@@ -249,6 +249,19 @@ public class JobExceptionsHandler
 
 private static JobExceptionsInfoWithHistory.ExceptionInfo 
createExceptionInfo(
 ExceptionHistoryEntry exceptionHistoryEntry) {
+
+if (exceptionHistoryEntry.isGlobal()) {
+return new JobExceptionsInfoWithHistory.ExceptionInfo(
+
exceptionHistoryEntry.getException().getOriginalErrorClassName(),
+exceptionHistoryEntry.getExceptionAsString(),
+exceptionHistoryEntry.getTimestamp(),
+exceptionHistoryEntry.getFailureLabels(),
+null,
+null,
+null,
+null);
+}
+
 assertLocalExceptionInfo(exceptionHistoryEntry);
 
 return new JobExceptionsInfoWithHistory.ExceptionInfo(
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandlerTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandlerTest.java
index f5354e5ce90..761881d1624 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandlerTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandlerTest.java
@@ -63,6 +63,7 @@ import org.junit.jupiter.api.Test;
 
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -215,6 +216,47 @@ class JobExceptionsHandlerTest {
 assertThat(response.getExceptionHistory().isTruncated()).isFalse();
 }
 
+@Test
+void testWithExceptionHistoryAndConcurrentGlobalFailure()
+throws HandlerRequestException, ExecutionException, 
InterruptedException {
+final ExceptionHistoryEntry otherFailure =
+ExceptionHistoryEntry.createGlobal(
+new RuntimeException("exception #1"),
+
CompletableFuture.completedFuture(Collections.emptyMap()));
+final RootExceptionHistoryEntry rootCause =
+fromGlobalFailure(
+new RuntimeException("exception #0"),
+System.currentTimeMillis(),
+Collections.singleton(otherFailure));
+
+final ExecutionGraphInfo executionGraphInfo = 
createExecutionGraphInfo(rootCause);
+final HandlerRequest request =
+createRequest(executionGraphInfo.getJobId(), 10);
+final JobExceptionsInfoWithHistory response =
+testInstance.handleRequest(request, executionGraphInfo);
+
+assertThat(response.getExceptionHistory().getEntries())
+.hasSize(1)
+.satisfies(
+matching(
+contains(
+historyContainsGlobalFailure(
+rootCause.getException(),
+rootCause.getTimestamp(),
+matchesFailure(
+
otherFailure.getException(),
+
otherFailure.getTimestamp(),
+
otherFailure.getFailureLabelsFuture(),
+
otherFailure.getFailingTaskName(),
+
JobExceptionsHandler.toString(
+otherFailure
+
.getTaskManagerLocation()),
+
JobExceptionsHandler.toTaskManagerId(
+

(flink) 02/02: [FLINK-34922] Adds ITCase for GlobalFailureOnRestart

2024-03-28 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit f4c945cb9ca882ae485c2e58c74825938f154119
Author: Panagiotis Garefalakis 
AuthorDate: Wed Mar 27 22:23:48 2024 -0700

[FLINK-34922] Adds ITCase for GlobalFailureOnRestart

Add an ITCase where a global failure is triggered while the scheduler is 
restarting, and asserts that
this failure is handled such that can be retrieved via the REST API.
---
 .../test/scheduling/AdaptiveSchedulerITCase.java   | 197 +++--
 1 file changed, 183 insertions(+), 14 deletions(-)

diff --git 
a/flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
 
b/flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
index 2836ff82371..f6c31658f7d 100644
--- 
a/flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
+++ 
b/flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
@@ -18,11 +18,14 @@
 
 package org.apache.flink.test.scheduling;
 
+import org.apache.flink.api.common.ExecutionConfig;
+import org.apache.flink.api.common.JobID;
 import org.apache.flink.api.common.JobStatus;
 import org.apache.flink.api.common.restartstrategy.RestartStrategies;
 import org.apache.flink.api.common.state.CheckpointListener;
 import org.apache.flink.api.common.state.ListState;
 import org.apache.flink.api.common.state.ListStateDescriptor;
+import org.apache.flink.api.common.time.Time;
 import org.apache.flink.client.program.rest.RestClusterClient;
 import org.apache.flink.configuration.ClusterOptions;
 import org.apache.flink.configuration.Configuration;
@@ -31,9 +34,20 @@ import org.apache.flink.configuration.JobManagerOptions;
 import org.apache.flink.core.execution.CheckpointingMode;
 import org.apache.flink.core.execution.JobClient;
 import org.apache.flink.core.execution.SavepointFormatType;
+import org.apache.flink.runtime.execution.Environment;
+import org.apache.flink.runtime.jobgraph.JobGraph;
+import org.apache.flink.runtime.jobgraph.JobGraphBuilder;
+import org.apache.flink.runtime.jobgraph.JobVertex;
+import org.apache.flink.runtime.jobgraph.JobVertexID;
+import org.apache.flink.runtime.jobgraph.OperatorID;
+import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
+import org.apache.flink.runtime.minicluster.MiniCluster;
+import org.apache.flink.runtime.operators.coordination.OperatorCoordinator;
+import org.apache.flink.runtime.operators.coordination.OperatorEvent;
 import org.apache.flink.runtime.rest.messages.EmptyRequestBody;
 import org.apache.flink.runtime.rest.messages.JobExceptionsHeaders;
 import org.apache.flink.runtime.rest.messages.JobExceptionsInfoWithHistory;
+import 
org.apache.flink.runtime.rest.messages.JobExceptionsInfoWithHistory.RootExceptionInfo;
 import 
org.apache.flink.runtime.rest.messages.job.JobExceptionsMessageParameters;
 import 
org.apache.flink.runtime.scheduler.stopwithsavepoint.StopWithSavepointStoppingException;
 import org.apache.flink.runtime.state.FunctionInitializationContext;
@@ -48,8 +62,10 @@ import 
org.apache.flink.streaming.api.functions.source.RichParallelSourceFunctio
 import org.apache.flink.test.util.MiniClusterWithClientResource;
 import org.apache.flink.util.FlinkException;
 import org.apache.flink.util.Preconditions;
+import org.apache.flink.util.SerializedValue;
 import org.apache.flink.util.TestLogger;
 
+import org.assertj.core.api.Assertions;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.ClassRule;
@@ -62,11 +78,13 @@ import javax.annotation.Nullable;
 import java.io.File;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.List;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutionException;
 import java.util.stream.Collectors;
 
+import static org.apache.flink.core.testutils.FlinkAssertions.assertThatFuture;
 import static org.apache.flink.core.testutils.FlinkMatchers.containsCause;
 import static org.apache.flink.util.ExceptionUtils.assertThrowable;
 import static org.hamcrest.CoreMatchers.containsString;
@@ -270,25 +288,78 @@ public class AdaptiveSchedulerITCase extends TestLogger {
 final JobClient jobClient = env.executeAsync();
 CommonTestUtils.waitUntilCondition(
 () -> {
-final RestClusterClient restClusterClient =
-
MINI_CLUSTER_WITH_CLIENT_RESOURCE.getRestClusterClient();
-final JobExceptionsMessageParameters params =
-new JobExceptionsMessageParameters();
-params.jobPathParameter.resolve(jobClient.getJobID());
-final CompletableFuture 
exceptionsFuture =
-restClusterClient.sendRequ

(flink) 01/02: [FLINK-34922][rest] Support concurrent global failure

2024-03-28 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit dc957bfdc3aa6a8e3bce603cfc68c5c553c72220
Author: Chesnay Schepler 
AuthorDate: Wed Mar 27 09:33:08 2024 +0100

[FLINK-34922][rest] Support concurrent global failure
---
 .../rest/handler/job/JobExceptionsHandler.java | 13 ++
 .../rest/handler/job/JobExceptionsHandlerTest.java | 51 +-
 2 files changed, 63 insertions(+), 1 deletion(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
index 55c7875e85c..6d5f49d55b4 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
@@ -249,6 +249,19 @@ public class JobExceptionsHandler
 
 private static JobExceptionsInfoWithHistory.ExceptionInfo 
createExceptionInfo(
 ExceptionHistoryEntry exceptionHistoryEntry) {
+
+if (exceptionHistoryEntry.isGlobal()) {
+return new JobExceptionsInfoWithHistory.ExceptionInfo(
+
exceptionHistoryEntry.getException().getOriginalErrorClassName(),
+exceptionHistoryEntry.getExceptionAsString(),
+exceptionHistoryEntry.getTimestamp(),
+exceptionHistoryEntry.getFailureLabels(),
+null,
+null,
+null,
+null);
+}
+
 assertLocalExceptionInfo(exceptionHistoryEntry);
 
 return new JobExceptionsInfoWithHistory.ExceptionInfo(
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandlerTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandlerTest.java
index a377ec83bb0..c40bdb2c8e8 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandlerTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandlerTest.java
@@ -64,6 +64,7 @@ import org.junit.jupiter.api.Test;
 
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -216,6 +217,47 @@ class JobExceptionsHandlerTest {
 assertThat(response.getExceptionHistory().isTruncated()).isFalse();
 }
 
+@Test
+void testWithExceptionHistoryAndConcurrentGlobalFailure()
+throws HandlerRequestException, ExecutionException, 
InterruptedException {
+final ExceptionHistoryEntry otherFailure =
+ExceptionHistoryEntry.createGlobal(
+new RuntimeException("exception #1"),
+
CompletableFuture.completedFuture(Collections.emptyMap()));
+final RootExceptionHistoryEntry rootCause =
+fromGlobalFailure(
+new RuntimeException("exception #0"),
+System.currentTimeMillis(),
+Collections.singleton(otherFailure));
+
+final ExecutionGraphInfo executionGraphInfo = 
createExecutionGraphInfo(rootCause);
+final HandlerRequest request =
+createRequest(executionGraphInfo.getJobId(), 10);
+final JobExceptionsInfoWithHistory response =
+testInstance.handleRequest(request, executionGraphInfo);
+
+assertThat(response.getExceptionHistory().getEntries())
+.hasSize(1)
+.satisfies(
+matching(
+contains(
+historyContainsGlobalFailure(
+rootCause.getException(),
+rootCause.getTimestamp(),
+matchesFailure(
+
otherFailure.getException(),
+
otherFailure.getTimestamp(),
+
otherFailure.getFailureLabelsFuture(),
+
otherFailure.getFailingTaskName(),
+
JobExceptionsHandler.toString(
+otherFailure
+
.getTaskManagerLocation()),
+
JobExceptionsHandler.toTaskManagerId(
+

(flink) branch master updated (83f82ab0c86 -> f4c945cb9ca)

2024-03-28 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 83f82ab0c86 [FLINK-33376][coordination] Extend ZooKeeper Curator 
configurations
 new dc957bfdc3a [FLINK-34922][rest] Support concurrent global failure
 new f4c945cb9ca [FLINK-34922] Adds ITCase for GlobalFailureOnRestart

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../rest/handler/job/JobExceptionsHandler.java |  13 ++
 .../rest/handler/job/JobExceptionsHandlerTest.java |  51 +-
 .../test/scheduling/AdaptiveSchedulerITCase.java   | 197 +++--
 3 files changed, 246 insertions(+), 15 deletions(-)



svn commit: r67623 - /dev/flink/flink-connector-parent-1.1.0-rc2/ /release/flink/flink-connector-parent-1.1.0/

2024-02-28 Thread chesnay
Author: chesnay
Date: Wed Feb 28 11:16:12 2024
New Revision: 67623

Log:
Release flink-connector-parent 1.1.0

Added:
release/flink/flink-connector-parent-1.1.0/
  - copied from r67622, dev/flink/flink-connector-parent-1.1.0-rc2/
Removed:
dev/flink/flink-connector-parent-1.1.0-rc2/



(flink) branch master updated (2d78c102112 -> 6c8f3a0799c)

2024-02-23 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 2d78c102112 [FLINK-34496] Break circular dependency in static 
initialization
 add 6c8f3a0799c [FLINK-34496] Remove unused method

No new revisions were added by this update.

Summary of changes:
 .../planner/plan/nodes/exec/serde/JsonSerdeUtil.java  | 15 ---
 1 file changed, 15 deletions(-)



(flink) branch release-1.19 updated: [FLINK-34496] Break circular dependency in static initialization

2024-02-23 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.19 by this push:
 new dd77ee5a250 [FLINK-34496] Break circular dependency in static 
initialization
dd77ee5a250 is described below

commit dd77ee5a2501a6750387126c347cf540f3fb172b
Author: Chesnay Schepler 
AuthorDate: Fri Feb 23 10:26:45 2024 +0100

[FLINK-34496] Break circular dependency in static initialization
---
 .../planner/plan/nodes/exec/serde/JsonSerdeUtil.java  | 15 ---
 .../planner/plan/utils/ExecNodeMetadataUtil.java  | 19 +--
 .../planner/plan/utils/ExecNodeMetadataUtilTest.java  |  3 +--
 3 files changed, 18 insertions(+), 19 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/serde/JsonSerdeUtil.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/serde/JsonSerdeUtil.java
index b55fccbff28..fb3a723d9ca 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/serde/JsonSerdeUtil.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/serde/JsonSerdeUtil.java
@@ -42,7 +42,6 @@ import org.apache.flink.table.types.logical.LogicalType;
 import org.apache.flink.table.types.logical.RowType;
 import org.apache.flink.util.jackson.JacksonMapperFactory;
 
-import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator;
 import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator;
 import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonParser;
 import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.ObjectCodec;
@@ -69,26 +68,12 @@ import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.rex.RexWindowBound;
 
 import java.io.IOException;
-import java.lang.annotation.Annotation;
-import java.lang.reflect.Constructor;
 import java.util.Optional;
 
 /** A utility class that provide abilities for JSON serialization and 
deserialization. */
 @Internal
 public class JsonSerdeUtil {
 
-/** Return true if the given class's constructors have @JsonCreator 
annotation, else false. */
-public static boolean hasJsonCreatorAnnotation(Class clazz) {
-for (Constructor constructor : clazz.getDeclaredConstructors()) {
-for (Annotation annotation : constructor.getAnnotations()) {
-if (annotation instanceof JsonCreator) {
-return true;
-}
-}
-}
-return false;
-}
-
 /**
  * Object mapper shared instance to serialize and deserialize the plan. 
Note that creating and
  * copying of object mappers is expensive and should be avoided.
diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java
index efded08a82e..a7e1d2840ad 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java
@@ -28,7 +28,6 @@ import org.apache.flink.table.api.config.TableConfigOptions;
 import org.apache.flink.table.planner.plan.nodes.exec.ExecNode;
 import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeMetadata;
 import org.apache.flink.table.planner.plan.nodes.exec.MultipleExecNodeMetadata;
-import org.apache.flink.table.planner.plan.nodes.exec.serde.JsonSerdeUtil;
 import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecAsyncCalc;
 import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecCalc;
 import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecChangelogNormalize;
@@ -79,8 +78,12 @@ import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecWindowJoi
 import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecWindowRank;
 import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecWindowTableFunction;
 
+import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator;
+
 import javax.annotation.Nullable;
 
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Constructor;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -232,7 +235,7 @@ public final class ExecNodeMetadataUtil {
 }
 
 private static void addToLookupMap(Class> 
execNodeClass) {
-if (!JsonSerdeUtil.hasJsonCreatorAnnotation(execNodeClass)) {
+if (!hasJsonCreatorAnnotation(execNodeClass)) {
 throw new IllegalStateExcept

(flink) branch release-1.18 updated: [FLINK-34496] Break circular dependency in static initialization

2024-02-23 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new 39ed3cf279d [FLINK-34496] Break circular dependency in static 
initialization
39ed3cf279d is described below

commit 39ed3cf279d61e4472e1c30a17927992236df467
Author: Chesnay Schepler 
AuthorDate: Fri Feb 23 10:26:45 2024 +0100

[FLINK-34496] Break circular dependency in static initialization
---
 .../planner/plan/nodes/exec/serde/JsonSerdeUtil.java  | 15 ---
 .../planner/plan/utils/ExecNodeMetadataUtil.java  | 19 +--
 .../planner/plan/utils/ExecNodeMetadataUtilTest.java  |  3 +--
 3 files changed, 18 insertions(+), 19 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/serde/JsonSerdeUtil.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/serde/JsonSerdeUtil.java
index dffd134e97a..2d34c710a76 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/serde/JsonSerdeUtil.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/serde/JsonSerdeUtil.java
@@ -42,7 +42,6 @@ import org.apache.flink.table.types.logical.LogicalType;
 import org.apache.flink.table.types.logical.RowType;
 import org.apache.flink.util.jackson.JacksonMapperFactory;
 
-import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator;
 import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator;
 import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonParser;
 import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.ObjectCodec;
@@ -68,26 +67,12 @@ import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.rex.RexWindowBound;
 
 import java.io.IOException;
-import java.lang.annotation.Annotation;
-import java.lang.reflect.Constructor;
 import java.util.Optional;
 
 /** A utility class that provide abilities for JSON serialization and 
deserialization. */
 @Internal
 public class JsonSerdeUtil {
 
-/** Return true if the given class's constructors have @JsonCreator 
annotation, else false. */
-public static boolean hasJsonCreatorAnnotation(Class clazz) {
-for (Constructor constructor : clazz.getDeclaredConstructors()) {
-for (Annotation annotation : constructor.getAnnotations()) {
-if (annotation instanceof JsonCreator) {
-return true;
-}
-}
-}
-return false;
-}
-
 /**
  * Object mapper shared instance to serialize and deserialize the plan. 
Note that creating and
  * copying of object mappers is expensive and should be avoided.
diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java
index 5c467f29583..84ce3fbf2a9 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java
@@ -28,7 +28,6 @@ import org.apache.flink.table.api.config.TableConfigOptions;
 import org.apache.flink.table.planner.plan.nodes.exec.ExecNode;
 import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeMetadata;
 import org.apache.flink.table.planner.plan.nodes.exec.MultipleExecNodeMetadata;
-import org.apache.flink.table.planner.plan.nodes.exec.serde.JsonSerdeUtil;
 import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecCalc;
 import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecChangelogNormalize;
 import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecCorrelate;
@@ -78,8 +77,12 @@ import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecWindowJoi
 import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecWindowRank;
 import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecWindowTableFunction;
 
+import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator;
+
 import javax.annotation.Nullable;
 
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Constructor;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -230,7 +233,7 @@ public final class ExecNodeMetadataUtil {
 }
 
 private static void addToLookupMap(Class> 
execNodeClass) {
-if (!JsonSerdeUtil.hasJsonCreatorAnnotation(execNodeClass)) {
+if (!hasJsonCreatorAnnotation(execNodeClass)) {
 throw new IllegalStateExcept

(flink) branch master updated: [FLINK-34496] Break circular dependency in static initialization

2024-02-23 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 2d78c102112 [FLINK-34496] Break circular dependency in static 
initialization
2d78c102112 is described below

commit 2d78c10211272a264712e86192c4dfc59c6a5521
Author: Chesnay Schepler 
AuthorDate: Fri Feb 23 10:26:45 2024 +0100

[FLINK-34496] Break circular dependency in static initialization
---
 .../planner/plan/utils/ExecNodeMetadataUtil.java  | 19 +--
 .../planner/plan/utils/ExecNodeMetadataUtilTest.java  |  3 +--
 2 files changed, 18 insertions(+), 4 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java
index efded08a82e..a7e1d2840ad 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java
@@ -28,7 +28,6 @@ import org.apache.flink.table.api.config.TableConfigOptions;
 import org.apache.flink.table.planner.plan.nodes.exec.ExecNode;
 import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeMetadata;
 import org.apache.flink.table.planner.plan.nodes.exec.MultipleExecNodeMetadata;
-import org.apache.flink.table.planner.plan.nodes.exec.serde.JsonSerdeUtil;
 import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecAsyncCalc;
 import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecCalc;
 import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecChangelogNormalize;
@@ -79,8 +78,12 @@ import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecWindowJoi
 import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecWindowRank;
 import 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecWindowTableFunction;
 
+import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator;
+
 import javax.annotation.Nullable;
 
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Constructor;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -232,7 +235,7 @@ public final class ExecNodeMetadataUtil {
 }
 
 private static void addToLookupMap(Class> 
execNodeClass) {
-if (!JsonSerdeUtil.hasJsonCreatorAnnotation(execNodeClass)) {
+if (!hasJsonCreatorAnnotation(execNodeClass)) {
 throw new IllegalStateException(
 String.format(
 "ExecNode: %s does not implement @JsonCreator 
annotation on "
@@ -366,4 +369,16 @@ public final class ExecNodeMetadataUtil {
 return Objects.hash(name, version);
 }
 }
+
+/** Return true if the given class's constructors have @JsonCreator 
annotation, else false. */
+static boolean hasJsonCreatorAnnotation(Class clazz) {
+for (Constructor constructor : clazz.getDeclaredConstructors()) {
+for (Annotation annotation : constructor.getAnnotations()) {
+if (annotation instanceof JsonCreator) {
+return true;
+}
+}
+}
+return false;
+}
 }
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtilTest.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtilTest.java
index 323a037f310..3e2a8c3275b 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtilTest.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtilTest.java
@@ -32,7 +32,6 @@ import 
org.apache.flink.table.planner.plan.nodes.exec.ExecNodeContext;
 import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeMetadata;
 import org.apache.flink.table.planner.plan.nodes.exec.InputProperty;
 import org.apache.flink.table.planner.plan.nodes.exec.MultipleExecNodeMetadata;
-import org.apache.flink.table.planner.plan.nodes.exec.serde.JsonSerdeUtil;
 import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecNode;
 import org.apache.flink.table.types.logical.LogicalType;
 
@@ -239,7 +238,7 @@ class ExecNodeMetadataUtilTest {
 List>> 
classesWithJsonCreatorInUnsupportedList =
 new ArrayList<>();
 for (Class> clazz : subClasses) {
-boolean hasJsonCreator = 
JsonSerdeUtil.hasJsonCreatorAnnotation(clazz);
+boolean hasJsonCreator = 
ExecNodeMetadataUtil.hasJsonCreatorAnnotation(clazz);
   

(flink) branch master updated (e622205d0b7 -> cf5bb80b5fd)

2024-02-22 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from e622205d0b7 [FLINK-34476][table-planner] Consider assignment operator 
during TVF column expansion
 add 6a938c9550d [hotfix][docs] Fix a bunch of broken references
 add cf5bb80b5fd [FLINK-34485] Add URI/Configuration constructor to 
DynamicTemporaryAWSCredentialsProvider

No new revisions were added by this update.

Summary of changes:
 docs/content.zh/docs/deployment/filesystems/s3.md  |  9 +++
 docs/content.zh/docs/deployment/overview.md|  2 +-
 .../resource-providers/standalone/overview.md  |  8 +++---
 docs/content/docs/deployment/filesystems/s3.md |  9 +++
 docs/content/docs/deployment/overview.md   |  2 +-
 .../DynamicTemporaryAWSCredentialsProvider.java|  7 +
 .../flink/fs/s3presto/PrestoS3FileSystemTest.java  | 30 ++
 7 files changed, 56 insertions(+), 11 deletions(-)



(flink) 03/03: [hotfix] Rename ProtobufSQLITCaseTest -> ProtobufSQLITCase

2024-02-12 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit e7ac9887f92c670d8584457c4b83948409ec67fe
Author: Chesnay Schepler 
AuthorDate: Mon Feb 12 11:50:39 2024 +0100

[hotfix] Rename ProtobufSQLITCaseTest -> ProtobufSQLITCase
---
 .../protobuf/{ProtobufSQLITCaseTest.java => ProtobufSQLITCase.java} | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git 
a/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
 
b/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCase.java
similarity index 99%
rename from 
flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
rename to 
flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCase.java
index ea869241ea2..422574115e8 100644
--- 
a/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
+++ 
b/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCase.java
@@ -40,7 +40,7 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.fail;
 
 /** Integration SQL test for protobuf. */
-public class ProtobufSQLITCaseTest extends BatchTestBase {
+public class ProtobufSQLITCase extends BatchTestBase {
 
 private MapTest getProtoTestObject() {
 MapTest.InnerMessageTest innerMessageTest =



(flink) 01/03: [FLINK-34422] Migrate BatchTestBase subclass to jUnit5

2024-02-12 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 994850d33a32f1ac27cee755f976b86208f911e3
Author: Chesnay Schepler 
AuthorDate: Sun Feb 11 16:06:59 2024 +0100

[FLINK-34422] Migrate BatchTestBase subclass to jUnit5

Usually this should've been done _before_ you ban jUnit4 annotations in 
BatchTestBase...
---
 .../java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git 
a/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
 
b/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
index 1cc1a200d3a..ea869241ea2 100644
--- 
a/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
+++ 
b/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
@@ -27,7 +27,7 @@ import 
org.apache.flink.table.planner.runtime.utils.BatchTestBase;
 import org.apache.flink.types.Row;
 import org.apache.flink.util.CloseableIterator;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.Map;
 import java.util.concurrent.ExecutionException;



(flink) branch release-1.19 updated (04d3b1b1423 -> e7ac9887f92)

2024-02-12 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git


from 04d3b1b1423 [FLINK-33958] Fix IntervalJoin restore test flakiness
 new 994850d33a3 [FLINK-34422] Migrate BatchTestBase subclass to jUnit5
 new 3fcbe3df489 [FLINK-34422][test] BatchTestBase uses MiniClusterExtension
 new e7ac9887f92 [hotfix] Rename ProtobufSQLITCaseTest -> ProtobufSQLITCase

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 ...ufSQLITCaseTest.java => ProtobufSQLITCase.java} |  4 ++--
 .../batch/sql/CompactManagedTableITCase.java   |  6 +++--
 .../planner/runtime/utils/BatchTestBase.scala  | 28 ++
 3 files changed, 24 insertions(+), 14 deletions(-)
 rename 
flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/{ProtobufSQLITCaseTest.java
 => ProtobufSQLITCase.java} (99%)



(flink) 02/03: [FLINK-34422][test] BatchTestBase uses MiniClusterExtension

2024-02-12 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 3fcbe3df48904d10ae29a35800474b18af9e7172
Author: Chesnay Schepler 
AuthorDate: Sun Feb 11 01:38:45 2024 +0100

[FLINK-34422][test] BatchTestBase uses MiniClusterExtension
---
 .../batch/sql/CompactManagedTableITCase.java   |  6 +++--
 .../planner/runtime/utils/BatchTestBase.scala  | 28 ++
 2 files changed, 22 insertions(+), 12 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
index 4974b14feda..d4b3cbce27c 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
@@ -61,8 +61,7 @@ import static org.assertj.core.api.Assertions.fail;
 /** IT Case for testing managed table compaction. */
 class CompactManagedTableITCase extends BatchTestBase {
 
-private final ObjectIdentifier tableIdentifier =
-ObjectIdentifier.of(tEnv().getCurrentCatalog(), 
tEnv().getCurrentDatabase(), "MyTable");
+private ObjectIdentifier tableIdentifier;
 private final Map> collectedElements = 
new HashMap<>();
 
 private Path rootPath;
@@ -73,6 +72,9 @@ class CompactManagedTableITCase extends BatchTestBase {
 @BeforeEach
 public void before() throws Exception {
 super.before();
+tableIdentifier =
+ObjectIdentifier.of(
+tEnv().getCurrentCatalog(), 
tEnv().getCurrentDatabase(), "MyTable");
 MANAGED_TABLES.put(tableIdentifier, new AtomicReference<>());
 referenceOfManagedTableFileEntries = new AtomicReference<>();
 MANAGED_TABLE_FILE_ENTRIES.put(tableIdentifier, 
referenceOfManagedTableFileEntries);
diff --git 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
index fb5a9a058ca..cb509321f34 100644
--- 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
+++ 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
@@ -57,15 +57,11 @@ import org.junit.jupiter.api.{AfterEach, BeforeEach}
 class BatchTestBase extends BatchAbstractTestBase {
 
   protected var settings = 
EnvironmentSettings.newInstance().inBatchMode().build()
-  protected var testingTableEnv: TestingTableEnvironment = 
TestingTableEnvironment
-.create(settings, catalogManager = None, TableConfig.getDefault)
-  protected var tEnv: TableEnvironment = testingTableEnv
-  tEnv.getConfig.set(BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_ENABLED, 
Boolean.box(false))
-  protected var planner =
-
tEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
-  protected var env: StreamExecutionEnvironment = planner.getExecEnv
-  env.getConfig.enableObjectReuse()
-  protected var tableConfig: TableConfig = tEnv.getConfig
+  protected var testingTableEnv: TestingTableEnvironment = _
+  protected var tEnv: TableEnvironment = _
+  protected var planner: PlannerBase = _
+  protected var env: StreamExecutionEnvironment = _
+  protected var tableConfig: TableConfig = _
 
   val LINE_COL_PATTERN: Pattern = Pattern.compile("At line ([0-9]+), column 
([0-9]+)")
   val LINE_COL_TWICE_PATTERN: Pattern = Pattern.compile(
@@ -74,10 +70,22 @@ class BatchTestBase extends BatchAbstractTestBase {
 
   @throws(classOf[Exception])
   @BeforeEach
-  def before(): Unit = {
+  def setupEnv(): Unit = {
+testingTableEnv = TestingTableEnvironment
+  .create(settings, catalogManager = None, TableConfig.getDefault)
+tEnv = testingTableEnv
+
tEnv.getConfig.set(BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_ENABLED, 
Boolean.box(false))
+planner = 
tEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
+env = planner.getExecEnv
+env.getConfig.enableObjectReuse()
+tableConfig = tEnv.getConfig
 BatchTestBase.configForMiniCluster(tableConfig)
   }
 
+  @throws(classOf[Exception])
+  @BeforeEach
+  def before(): Unit = {}
+
   @AfterEach
   def after(): Unit = {
 TestValuesTableFactory.clearAllData()



(flink) 03/03: [hotfix] Rename ProtobufSQLITCaseTest -> ProtobufSQLITCase

2024-02-12 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 9caa3bbb042901aabb84b33098c18af13e026a57
Author: Chesnay Schepler 
AuthorDate: Mon Feb 12 11:50:39 2024 +0100

[hotfix] Rename ProtobufSQLITCaseTest -> ProtobufSQLITCase
---
 .../protobuf/{ProtobufSQLITCaseTest.java => ProtobufSQLITCase.java} | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git 
a/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
 
b/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCase.java
similarity index 99%
rename from 
flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
rename to 
flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCase.java
index ea869241ea2..422574115e8 100644
--- 
a/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
+++ 
b/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCase.java
@@ -40,7 +40,7 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.fail;
 
 /** Integration SQL test for protobuf. */
-public class ProtobufSQLITCaseTest extends BatchTestBase {
+public class ProtobufSQLITCase extends BatchTestBase {
 
 private MapTest getProtoTestObject() {
 MapTest.InnerMessageTest innerMessageTest =



(flink) 02/03: [FLINK-34422][test] BatchTestBase uses MiniClusterExtension

2024-02-12 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 4c4643c3251c284260c96a2110f4b78c8a369723
Author: Chesnay Schepler 
AuthorDate: Sun Feb 11 01:38:45 2024 +0100

[FLINK-34422][test] BatchTestBase uses MiniClusterExtension
---
 .../batch/sql/CompactManagedTableITCase.java   |  6 +++--
 .../planner/runtime/utils/BatchTestBase.scala  | 28 ++
 2 files changed, 22 insertions(+), 12 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
index 4974b14feda..d4b3cbce27c 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
@@ -61,8 +61,7 @@ import static org.assertj.core.api.Assertions.fail;
 /** IT Case for testing managed table compaction. */
 class CompactManagedTableITCase extends BatchTestBase {
 
-private final ObjectIdentifier tableIdentifier =
-ObjectIdentifier.of(tEnv().getCurrentCatalog(), 
tEnv().getCurrentDatabase(), "MyTable");
+private ObjectIdentifier tableIdentifier;
 private final Map> collectedElements = 
new HashMap<>();
 
 private Path rootPath;
@@ -73,6 +72,9 @@ class CompactManagedTableITCase extends BatchTestBase {
 @BeforeEach
 public void before() throws Exception {
 super.before();
+tableIdentifier =
+ObjectIdentifier.of(
+tEnv().getCurrentCatalog(), 
tEnv().getCurrentDatabase(), "MyTable");
 MANAGED_TABLES.put(tableIdentifier, new AtomicReference<>());
 referenceOfManagedTableFileEntries = new AtomicReference<>();
 MANAGED_TABLE_FILE_ENTRIES.put(tableIdentifier, 
referenceOfManagedTableFileEntries);
diff --git 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
index fb5a9a058ca..cb509321f34 100644
--- 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
+++ 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
@@ -57,15 +57,11 @@ import org.junit.jupiter.api.{AfterEach, BeforeEach}
 class BatchTestBase extends BatchAbstractTestBase {
 
   protected var settings = 
EnvironmentSettings.newInstance().inBatchMode().build()
-  protected var testingTableEnv: TestingTableEnvironment = 
TestingTableEnvironment
-.create(settings, catalogManager = None, TableConfig.getDefault)
-  protected var tEnv: TableEnvironment = testingTableEnv
-  tEnv.getConfig.set(BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_ENABLED, 
Boolean.box(false))
-  protected var planner =
-
tEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
-  protected var env: StreamExecutionEnvironment = planner.getExecEnv
-  env.getConfig.enableObjectReuse()
-  protected var tableConfig: TableConfig = tEnv.getConfig
+  protected var testingTableEnv: TestingTableEnvironment = _
+  protected var tEnv: TableEnvironment = _
+  protected var planner: PlannerBase = _
+  protected var env: StreamExecutionEnvironment = _
+  protected var tableConfig: TableConfig = _
 
   val LINE_COL_PATTERN: Pattern = Pattern.compile("At line ([0-9]+), column 
([0-9]+)")
   val LINE_COL_TWICE_PATTERN: Pattern = Pattern.compile(
@@ -74,10 +70,22 @@ class BatchTestBase extends BatchAbstractTestBase {
 
   @throws(classOf[Exception])
   @BeforeEach
-  def before(): Unit = {
+  def setupEnv(): Unit = {
+testingTableEnv = TestingTableEnvironment
+  .create(settings, catalogManager = None, TableConfig.getDefault)
+tEnv = testingTableEnv
+
tEnv.getConfig.set(BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_ENABLED, 
Boolean.box(false))
+planner = 
tEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
+env = planner.getExecEnv
+env.getConfig.enableObjectReuse()
+tableConfig = tEnv.getConfig
 BatchTestBase.configForMiniCluster(tableConfig)
   }
 
+  @throws(classOf[Exception])
+  @BeforeEach
+  def before(): Unit = {}
+
   @AfterEach
   def after(): Unit = {
 TestValuesTableFactory.clearAllData()



(flink) branch master updated (25a604a3a94 -> 9caa3bbb042)

2024-02-12 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 25a604a3a94 [hotfix][build] Wire up spotless.skip property
 new 65727fb9438 [FLINK-34422] Migrate BatchTestBase subclass to jUnit5
 new 4c4643c3251 [FLINK-34422][test] BatchTestBase uses MiniClusterExtension
 new 9caa3bbb042 [hotfix] Rename ProtobufSQLITCaseTest -> ProtobufSQLITCase

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 ...ufSQLITCaseTest.java => ProtobufSQLITCase.java} |  4 ++--
 .../batch/sql/CompactManagedTableITCase.java   |  6 +++--
 .../planner/runtime/utils/BatchTestBase.scala  | 28 ++
 3 files changed, 24 insertions(+), 14 deletions(-)
 rename 
flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/{ProtobufSQLITCaseTest.java
 => ProtobufSQLITCase.java} (99%)



(flink) 01/03: [FLINK-34422] Migrate BatchTestBase subclass to jUnit5

2024-02-12 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 65727fb943807f1ff5345419ce389c5734df0cb4
Author: Chesnay Schepler 
AuthorDate: Sun Feb 11 16:06:59 2024 +0100

[FLINK-34422] Migrate BatchTestBase subclass to jUnit5

Usually this should've been done _before_ you ban jUnit4 annotations in 
BatchTestBase...
---
 .../java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git 
a/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
 
b/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
index 1cc1a200d3a..ea869241ea2 100644
--- 
a/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
+++ 
b/flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/ProtobufSQLITCaseTest.java
@@ -27,7 +27,7 @@ import 
org.apache.flink.table.planner.runtime.utils.BatchTestBase;
 import org.apache.flink.types.Row;
 import org.apache.flink.util.CloseableIterator;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.Map;
 import java.util.concurrent.ExecutionException;



(flink) branch release-1.18 updated: [FLINK-34422][test] BatchTestBase uses MiniClusterExtension

2024-02-12 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new d69393678ef [FLINK-34422][test] BatchTestBase uses MiniClusterExtension
d69393678ef is described below

commit d69393678efe7e26bd5168407a1c862cd4a0e148
Author: Chesnay Schepler 
AuthorDate: Sun Feb 11 01:38:45 2024 +0100

[FLINK-34422][test] BatchTestBase uses MiniClusterExtension
---
 .../batch/sql/CompactManagedTableITCase.java   |  6 +++--
 .../planner/runtime/utils/BatchTestBase.scala  | 30 ++
 2 files changed, 24 insertions(+), 12 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
index e5b263652a6..05c1b87654b 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
@@ -61,8 +61,7 @@ import static org.assertj.core.api.Assertions.fail;
 /** IT Case for testing managed table compaction. */
 public class CompactManagedTableITCase extends BatchTestBase {
 
-private final ObjectIdentifier tableIdentifier =
-ObjectIdentifier.of(tEnv().getCurrentCatalog(), 
tEnv().getCurrentDatabase(), "MyTable");
+private ObjectIdentifier tableIdentifier;
 private final Map> collectedElements = 
new HashMap<>();
 
 private Path rootPath;
@@ -73,6 +72,9 @@ public class CompactManagedTableITCase extends BatchTestBase {
 @Before
 public void before() throws Exception {
 super.before();
+tableIdentifier =
+ObjectIdentifier.of(
+tEnv().getCurrentCatalog(), 
tEnv().getCurrentDatabase(), "MyTable");
 MANAGED_TABLES.put(tableIdentifier, new AtomicReference<>());
 referenceOfManagedTableFileEntries = new AtomicReference<>();
 MANAGED_TABLE_FILE_ENTRIES.put(tableIdentifier, 
referenceOfManagedTableFileEntries);
diff --git 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
index 0645473e57a..9496940db7a 100644
--- 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
+++ 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
@@ -53,20 +53,17 @@ import org.apache.calcite.runtime.CalciteContextException
 import org.apache.calcite.sql.SqlExplainLevel
 import org.apache.calcite.sql.parser.SqlParseException
 import org.assertj.core.api.Assertions.fail
+import org.junit.Before
 import org.junit.jupiter.api.{AfterEach, BeforeEach}
 
 class BatchTestBase extends BatchAbstractTestBase {
 
   protected var settings = 
EnvironmentSettings.newInstance().inBatchMode().build()
-  protected var testingTableEnv: TestingTableEnvironment = 
TestingTableEnvironment
-.create(settings, catalogManager = None, TableConfig.getDefault)
-  protected var tEnv: TableEnvironment = testingTableEnv
-  tEnv.getConfig.set(BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_ENABLED, 
Boolean.box(false))
-  protected var planner =
-
tEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
-  protected var env: StreamExecutionEnvironment = planner.getExecEnv
-  env.getConfig.enableObjectReuse()
-  protected var tableConfig: TableConfig = tEnv.getConfig
+  protected var testingTableEnv: TestingTableEnvironment = _
+  protected var tEnv: TableEnvironment = _
+  protected var planner: PlannerBase = _
+  protected var env: StreamExecutionEnvironment = _
+  protected var tableConfig: TableConfig = _
 
   val LINE_COL_PATTERN: Pattern = Pattern.compile("At line ([0-9]+), column 
([0-9]+)")
   val LINE_COL_TWICE_PATTERN: Pattern = Pattern.compile(
@@ -75,10 +72,23 @@ class BatchTestBase extends BatchAbstractTestBase {
 
   @throws(classOf[Exception])
   @BeforeEach
-  def before(): Unit = {
+  @Before
+  def setupEnv(): Unit = {
+testingTableEnv = TestingTableEnvironment
+  .create(settings, catalogManager = None, TableConfig.getDefault)
+tEnv = testingTableEnv
+
tEnv.getConfig.set(BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_ENABLED, 
Boolean.box(false))
+planner = 
tEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
+env = planner.getExecEnv
+env.getConfig.enableObjectReuse()
+tableConfig = tEn

(flink) branch master updated (1fbf92dfc9e -> 25a604a3a94)

2024-02-12 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 1fbf92dfc9e [FLINK-33958] Fix IntervalJoin restore test flakiness
 add 25a604a3a94 [hotfix][build] Wire up spotless.skip property

No new revisions were added by this update.

Summary of changes:
 pom.xml | 3 +++
 1 file changed, 3 insertions(+)



(flink) branch master updated (5277f653ad3 -> 6f4d31f1b79)

2024-02-11 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 5277f653ad3 [FLINK-34410] Correct hadoop.tar.gz download url
 add 6f4d31f1b79 [hotfix][sql-client] Improve logging using placeholders

No new revisions were added by this update.

Summary of changes:
 .../main/java/org/apache/flink/table/client/cli/SqlCompleter.java  | 2 +-
 .../org/apache/flink/table/client/util/ClientClassloaderUtil.java  | 7 +++
 2 files changed, 4 insertions(+), 5 deletions(-)



(flink) branch release-1.19 updated: [FLINK-34420] Correct hadoop.tar.gz download url

2024-02-11 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.19 by this push:
 new 296f140b62f [FLINK-34420] Correct hadoop.tar.gz download url
296f140b62f is described below

commit 296f140b62f73d5547ff710d439dd8068b01520d
Author: Roman 
AuthorDate: Sun Feb 11 13:32:44 2024 +0100

[FLINK-34420] Correct hadoop.tar.gz download url
---
 flink-end-to-end-tests/test-scripts/common_yarn_docker.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/flink-end-to-end-tests/test-scripts/common_yarn_docker.sh 
b/flink-end-to-end-tests/test-scripts/common_yarn_docker.sh
index ec582c05f88..299c2018684 100755
--- a/flink-end-to-end-tests/test-scripts/common_yarn_docker.sh
+++ b/flink-end-to-end-tests/test-scripts/common_yarn_docker.sh
@@ -99,7 +99,7 @@ function start_hadoop_cluster() {
 function build_image() {
 echo "Pre-downloading Hadoop tarball"
 local cache_path
-cache_path=$(get_artifact 
"http://archive.apache.org/dist/hadoop/common/hadoop-2.10.2/hadoop-2.10.2.tar.gz;)
+cache_path=$(get_artifact 
"https://archive.apache.org/dist/hadoop/common/hadoop-2.10.2/hadoop-2.10.2.tar.gz;)
 ln "${cache_path}" 
"${END_TO_END_DIR}/test-scripts/docker-hadoop-secure-cluster/hadoop/hadoop.tar.gz"
 
 echo "Building Hadoop Docker container"



(flink) branch master updated: [FLINK-34410] Correct hadoop.tar.gz download url

2024-02-11 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 5277f653ad3 [FLINK-34410] Correct hadoop.tar.gz download url
5277f653ad3 is described below

commit 5277f653ad325649d07072ad255f0e20c41f1b42
Author: Roman Khachatryan 
AuthorDate: Fri Feb 9 21:35:41 2024 +0100

[FLINK-34410] Correct hadoop.tar.gz download url
---
 flink-end-to-end-tests/test-scripts/common_yarn_docker.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/flink-end-to-end-tests/test-scripts/common_yarn_docker.sh 
b/flink-end-to-end-tests/test-scripts/common_yarn_docker.sh
index ec582c05f88..299c2018684 100755
--- a/flink-end-to-end-tests/test-scripts/common_yarn_docker.sh
+++ b/flink-end-to-end-tests/test-scripts/common_yarn_docker.sh
@@ -99,7 +99,7 @@ function start_hadoop_cluster() {
 function build_image() {
 echo "Pre-downloading Hadoop tarball"
 local cache_path
-cache_path=$(get_artifact 
"http://archive.apache.org/dist/hadoop/common/hadoop-2.10.2/hadoop-2.10.2.tar.gz;)
+cache_path=$(get_artifact 
"https://archive.apache.org/dist/hadoop/common/hadoop-2.10.2/hadoop-2.10.2.tar.gz;)
 ln "${cache_path}" 
"${END_TO_END_DIR}/test-scripts/docker-hadoop-secure-cluster/hadoop/hadoop.tar.gz"
 
 echo "Building Hadoop Docker container"



(flink) branch release-1.18 updated: [FLINK-34420] Correct hadoop.tar.gz download url

2024-02-11 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new 8d162fc2b68 [FLINK-34420] Correct hadoop.tar.gz download url
8d162fc2b68 is described below

commit 8d162fc2b68fbeca31b9d4b73dfee188b356bba5
Author: Roman 
AuthorDate: Sun Feb 11 13:32:07 2024 +0100

[FLINK-34420] Correct hadoop.tar.gz download url
---
 flink-end-to-end-tests/test-scripts/common_yarn_docker.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/flink-end-to-end-tests/test-scripts/common_yarn_docker.sh 
b/flink-end-to-end-tests/test-scripts/common_yarn_docker.sh
index 97fcca09d5a..6b760f9f36f 100755
--- a/flink-end-to-end-tests/test-scripts/common_yarn_docker.sh
+++ b/flink-end-to-end-tests/test-scripts/common_yarn_docker.sh
@@ -99,7 +99,7 @@ function start_hadoop_cluster() {
 function build_image() {
 echo "Pre-downloading Hadoop tarball"
 local cache_path
-cache_path=$(get_artifact 
"http://archive.apache.org/dist/hadoop/common/hadoop-2.10.2/hadoop-2.10.2.tar.gz;)
+cache_path=$(get_artifact 
"https://archive.apache.org/dist/hadoop/common/hadoop-2.10.2/hadoop-2.10.2.tar.gz;)
 ln "${cache_path}" 
"${END_TO_END_DIR}/test-scripts/docker-hadoop-secure-cluster/hadoop/hadoop.tar.gz"
 
 echo "Building Hadoop Docker container"



(flink) branch release-1.17 updated: [FLINK-34343][rpc] Use actor path when rejecting early messages

2024-02-05 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.17
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.17 by this push:
 new 3f22b6363e6 [FLINK-34343][rpc] Use actor path when rejecting early 
messages
3f22b6363e6 is described below

commit 3f22b6363e6cad4352821f42907ec8a2a181e675
Author: Chesnay Schepler 
AuthorDate: Sat Feb 3 11:20:08 2024 +0100

[FLINK-34343][rpc] Use actor path when rejecting early messages
---
 .../src/main/java/org/apache/flink/runtime/rpc/akka/AkkaRpcActor.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git 
a/flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/akka/AkkaRpcActor.java
 
b/flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/akka/AkkaRpcActor.java
index 8029956bbd9..59d30e16564 100644
--- 
a/flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/akka/AkkaRpcActor.java
+++ 
b/flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/akka/AkkaRpcActor.java
@@ -179,7 +179,7 @@ class AkkaRpcActor 
extends AbstractActor {
 new EndpointNotStartedException(
 String.format(
 "Discard message %s, because the rpc 
endpoint %s has not been started yet.",
-message, rpcEndpoint.getAddress(;
+message, getSelf().path(;
 }
 }
 



(flink) branch release-1.18 updated: [FLINK-34343][rpc] Use actor path when rejecting early messages

2024-02-05 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new ee9945cd785 [FLINK-34343][rpc] Use actor path when rejecting early 
messages
ee9945cd785 is described below

commit ee9945cd785577d0f68092823b71abbb53d127f8
Author: Chesnay Schepler 
AuthorDate: Sat Feb 3 11:20:08 2024 +0100

[FLINK-34343][rpc] Use actor path when rejecting early messages
---
 .../src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActor.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git 
a/flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActor.java
 
b/flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActor.java
index f3724fba0e5..dc4e342f35a 100644
--- 
a/flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActor.java
+++ 
b/flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActor.java
@@ -179,7 +179,7 @@ class PekkoRpcActor 
extends AbstractActor {
 new EndpointNotStartedException(
 String.format(
 "Discard message %s, because the rpc 
endpoint %s has not been started yet.",
-message, rpcEndpoint.getAddress(;
+message, getSelf().path(;
 }
 }
 



(flink) branch master updated: [FLINK-34343][rpc] Use actor path when rejecting early messages

2024-02-05 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 839f298c383 [FLINK-34343][rpc] Use actor path when rejecting early 
messages
839f298c383 is described below

commit 839f298c3838f2f4981e271554b82fae770747d8
Author: Chesnay Schepler 
AuthorDate: Sat Feb 3 11:20:08 2024 +0100

[FLINK-34343][rpc] Use actor path when rejecting early messages
---
 .../src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActor.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git 
a/flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActor.java
 
b/flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActor.java
index f3724fba0e5..dc4e342f35a 100644
--- 
a/flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActor.java
+++ 
b/flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActor.java
@@ -179,7 +179,7 @@ class PekkoRpcActor 
extends AbstractActor {
 new EndpointNotStartedException(
 String.format(
 "Discard message %s, because the rpc 
endpoint %s has not been started yet.",
-message, rpcEndpoint.getAddress(;
+message, getSelf().path(;
 }
 }
 



(flink) branch master updated (0ba3e76b147 -> b5a2ee4e988)

2024-01-31 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 0ba3e76b147 [docs] Fix typo in ide_setup.md (#20877)
 add b5a2ee4e988 [FLINK-34286][k8s] Attach cluster config map labels at 
creation time

No new revisions were added by this update.

Summary of changes:
 .../KubernetesLeaderElectionDriver.java| 10 --
 .../resources/KubernetesLeaderElector.java | 16 +++--
 .../KubernetesLeaderElectionDriverTest.java| 23 
 .../resources/KubernetesLeaderElectorITCase.java   | 42 ++
 4 files changed, 56 insertions(+), 35 deletions(-)



(flink) branch master updated: [FLINK-34097] Remove JobMasterGateway#requestJobDetails

2024-01-17 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 1ffb48f [FLINK-34097] Remove JobMasterGateway#requestJobDetails
1ffb48f is described below

commit 1ffb48f658b699702357921a48e914d13caf
Author: Chesnay Schepler 
AuthorDate: Mon Jan 15 16:20:55 2024 +0100

[FLINK-34097] Remove JobMasterGateway#requestJobDetails
---
 .../java/org/apache/flink/runtime/jobmaster/JobMaster.java |  6 --
 .../apache/flink/runtime/jobmaster/JobMasterGateway.java   |  9 -
 .../org/apache/flink/runtime/scheduler/SchedulerBase.java  |  7 ---
 .../org/apache/flink/runtime/scheduler/SchedulerNG.java|  3 ---
 .../runtime/scheduler/adaptive/AdaptiveScheduler.java  |  6 --
 .../runtime/jobmaster/utils/TestingJobMasterGateway.java   | 14 --
 .../jobmaster/utils/TestingJobMasterGatewayBuilder.java| 12 ++--
 .../apache/flink/runtime/scheduler/TestingSchedulerNG.java |  7 ---
 8 files changed, 10 insertions(+), 54 deletions(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMaster.java 
b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMaster.java
index bfea710db66..59455b787a6 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMaster.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMaster.java
@@ -71,7 +71,6 @@ import 
org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService;
 import org.apache.flink.runtime.messages.Acknowledge;
 import org.apache.flink.runtime.messages.FlinkJobNotFoundException;
 import org.apache.flink.runtime.messages.checkpoint.DeclineCheckpoint;
-import org.apache.flink.runtime.messages.webmonitor.JobDetails;
 import org.apache.flink.runtime.metrics.groups.JobManagerJobMetricGroup;
 import org.apache.flink.runtime.operators.coordination.CoordinationRequest;
 import org.apache.flink.runtime.operators.coordination.CoordinationResponse;
@@ -861,11 +860,6 @@ public class JobMaster extends 
FencedRpcEndpoint
 return resourceManagerHeartbeatManager.requestHeartbeat(resourceID, 
null);
 }
 
-@Override
-public CompletableFuture requestJobDetails(Time timeout) {
-return 
CompletableFuture.completedFuture(schedulerNG.requestJobDetails());
-}
-
 @Override
 public CompletableFuture requestJobStatus(Time timeout) {
 return 
CompletableFuture.completedFuture(schedulerNG.requestJobStatus());
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMasterGateway.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMasterGateway.java
index 6c1b79568a8..02c3c7d501a 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMasterGateway.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMasterGateway.java
@@ -39,7 +39,6 @@ import 
org.apache.flink.runtime.jobgraph.JobResourceRequirements;
 import org.apache.flink.runtime.jobgraph.JobVertexID;
 import org.apache.flink.runtime.jobgraph.OperatorID;
 import org.apache.flink.runtime.messages.Acknowledge;
-import org.apache.flink.runtime.messages.webmonitor.JobDetails;
 import org.apache.flink.runtime.operators.coordination.CoordinationRequest;
 import org.apache.flink.runtime.operators.coordination.CoordinationResponse;
 import org.apache.flink.runtime.registration.RegistrationResponse;
@@ -183,14 +182,6 @@ public interface JobMasterGateway
  */
 CompletableFuture heartbeatFromResourceManager(final ResourceID 
resourceID);
 
-/**
- * Request the details of the executed job.
- *
- * @param timeout for the rpc call
- * @return Future details of the executed job
- */
-CompletableFuture requestJobDetails(@RpcTimeout Time timeout);
-
 /**
  * Requests the current job status.
  *
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/SchedulerBase.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/SchedulerBase.java
index c11d7b2ca86..7f4ba383e43 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/SchedulerBase.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/SchedulerBase.java
@@ -78,7 +78,6 @@ import 
org.apache.flink.runtime.jobmanager.PartitionProducerDisposedException;
 import org.apache.flink.runtime.jobmaster.SerializedInputSplit;
 import org.apache.flink.runtime.messages.FlinkJobNotFoundException;
 import org.apache.flink.runtime.messages.checkpoint.DeclineCheckpoint;
-import org.apache.flink.runtime.messages.webmonitor.JobDetails;
 import org.apache.flink.runtime.metrics.MetricNames;
 import org.apache.flink.runtime.metrics.groups.JobManagerJobMetricGroup;
 import

(flink) 01/02: [FLINK-28229][connectors] Introduce FLIP-27 alternative to StreamExecutionEnvironment#fromCollection()

2023-12-05 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit e44efbff8070dca3489550fdeadc5e1ce31e68c1
Author: Alexander Fedulov <1492164+afedu...@users.noreply.github.com>
AuthorDate: Fri Oct 20 00:58:03 2023 +0200

[FLINK-28229][connectors] Introduce FLIP-27 alternative to 
StreamExecutionEnvironment#fromCollection()
---
 .../datagen/source/DataGeneratorSource.java|  3 +-
 .../environment/StreamExecutionEnvironment.java| 85 --
 pom.xml|  2 +
 3 files changed, 67 insertions(+), 23 deletions(-)

diff --git 
a/flink-connectors/flink-connector-datagen/src/main/java/org/apache/flink/connector/datagen/source/DataGeneratorSource.java
 
b/flink-connectors/flink-connector-datagen/src/main/java/org/apache/flink/connector/datagen/source/DataGeneratorSource.java
index a344eb635ad..3d2416c1e16 100644
--- 
a/flink-connectors/flink-connector-datagen/src/main/java/org/apache/flink/connector/datagen/source/DataGeneratorSource.java
+++ 
b/flink-connectors/flink-connector-datagen/src/main/java/org/apache/flink/connector/datagen/source/DataGeneratorSource.java
@@ -150,7 +150,8 @@ public class DataGeneratorSource
 this.sourceReaderFactory = checkNotNull(sourceReaderFactory);
 this.generatorFunction = checkNotNull(generatorFunction);
 this.typeInfo = checkNotNull(typeInfo);
-this.numberSource = new NumberSequenceSource(0, count - 1);
+long to = count > 0 ? count - 1 : 0; // a noop source (0 elements) is 
used in Table tests
+this.numberSource = new NumberSequenceSource(0, to);
 ClosureCleaner.clean(
 generatorFunction, 
ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
 ClosureCleaner.clean(
diff --git 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironment.java
 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironment.java
index 9069b3a0d3c..18dc49d3895 100644
--- 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironment.java
+++ 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironment.java
@@ -1222,7 +1222,23 @@ public class StreamExecutionEnvironment implements 
AutoCloseable {
 return fromData(Arrays.asList(data), typeInfo);
 }
 
-private  DataStreamSource fromData(
+/**
+ * Creates a new data stream that contains the given elements. The 
elements must all be of the
+ * same type, for example, all of the {@link String} or {@link Integer}.
+ *
+ * The framework will try and determine the exact type from the 
elements. In case of generic
+ * elements, it may be necessary to manually supply the type information 
via {@link
+ * #fromData(org.apache.flink.api.common.typeinfo.TypeInformation, 
OUT...)}.
+ *
+ * NOTE: This creates a non-parallel data stream source by default 
(parallelism of one).
+ * Adjustment of parallelism is supported via {@code setParallelism()} on 
the result.
+ *
+ * @param data The collection of elements to create the data stream from.
+ * @param typeInfo The type information of the elements.
+ * @param  The generic type of the returned data stream.
+ * @return The data stream representing the given collection
+ */
+public  DataStreamSource fromData(
 Collection data, TypeInformation typeInfo) {
 Preconditions.checkNotNull(data, "Collection must not be null");
 
@@ -1273,6 +1289,51 @@ public class StreamExecutionEnvironment implements 
AutoCloseable {
 return fromData(Arrays.asList(data), typeInfo);
 }
 
+/**
+ * Creates a new data stream that contains the given elements.The type of 
the data stream is
+ * that of the elements in the collection.
+ *
+ * The framework will try and determine the exact type from the 
collection elements. In case
+ * of generic elements, it may be necessary to manually supply the type 
information via {@link
+ * #fromData(java.util.Collection, 
org.apache.flink.api.common.typeinfo.TypeInformation)}.
+ *
+ * NOTE: This creates a non-parallel data stream source by default 
(parallelism of one).
+ * Adjustment of parallelism is supported via {@code setParallelism()} on 
the result.
+ *
+ * @param data The collection of elements to create the data stream from.
+ * @param  The generic type of the returned data stream.
+ * @return The data stream representing the given collection
+ */
+public  DataStreamSource fromData(Collection data) {
+TypeInformation typeInfo = extractTypeInfoFromCollection(data);
+return fromData(data, typeInfo);
+}
+
+private static  TypeInformation 
extractTyp

(flink) branch master updated (bc6c2cec37c -> 18c03f2e6c5)

2023-12-05 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from bc6c2cec37c [FLINK-33663] Serialize CallExpressions into SQL (#23811)
 new e44efbff807 [FLINK-28229][connectors] Introduce FLIP-27 alternative to 
StreamExecutionEnvironment#fromCollection()
 new 18c03f2e6c5 [FLINK-28229][connectors] Deprecate 
StreamExecutionEnvironment#fromCollection()

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../program/StreamContextEnvironmentTest.java  |  16 ++--
 .../client/testjar/ForbidConfigurationJob.java |   2 +-
 .../datagen/source/DataGeneratorSource.java|   3 +-
 .../flink/streaming/examples/join/WindowJoin.java  |  16 ++--
 .../examples/join/WindowJoinSampleData.java|  75 +---
 .../examples/java/basics/StreamSQLExample.java |   4 +-
 .../flink/formats/protobuf/ProtobufTestHelper.java |   2 +-
 .../apache/flink/state/api/SavepointWriter.java|   4 +-
 .../flink/state/api/SavepointWriterITCase.java |  12 +--
 .../api/SavepointWriterUidModificationITCase.java  |   4 +-
 .../state/api/SavepointWriterWindowITCase.java |   8 +-
 .../flink/state/api/WritableSavepointITCase.java   |   8 +-
 .../flink/python/util/PythonConfigUtilTest.java|   2 +-
 .../environment/StreamExecutionEnvironment.java| 100 -
 .../StreamExecutionEnvironmentTest.java|  57 +---
 .../ExecutorDiscoveryAndJobClientTest.java |   2 +-
 .../MultipleInputNodeCreationProcessorTest.java|  14 ++-
 .../runtime/stream/sql/DataStreamJavaITCase.java   |   6 +-
 .../stream/table/TimeAttributesITCase.scala|  15 +++-
 .../expressions/utils/ExpressionTestBase.scala |   2 +-
 .../flink/table/planner/utils/TableTestBase.scala  |   4 +-
 .../table/planner/utils/testTableSourceSinks.scala |  10 +--
 .../multipleinput/MultipleInputTestBase.java   |   2 +-
 .../testframe/testsuites/SinkTestSuiteBase.java|   2 +-
 .../test/accumulators/AccumulatorLiveITCase.java   |   2 +-
 .../streaming/runtime/BroadcastStateITCase.java|   4 +-
 .../streaming/runtime/DataStreamPojoITCase.java|   8 +-
 .../test/streaming/runtime/IterateITCase.java  |   4 +-
 .../streaming/runtime/LatencyMarkerITCase.java |  10 +--
 .../test/streaming/runtime/PartitionerITCase.java  |   2 +-
 .../test/streaming/runtime/SideOutputITCase.java   |  46 +-
 .../flink/test/streaming/runtime/SinkITCase.java   |   6 +-
 .../flink/test/streaming/runtime/SinkV2ITCase.java |   2 +-
 pom.xml|   2 +
 tools/maven/checkstyle.xml |   2 +-
 35 files changed, 241 insertions(+), 217 deletions(-)



(flink) 02/02: [FLINK-28229][connectors] Deprecate StreamExecutionEnvironment#fromCollection()

2023-12-05 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 18c03f2e6c593a772f64cdb5c089e2911d3cbc89
Author: Alexander Fedulov <1492164+afedu...@users.noreply.github.com>
AuthorDate: Mon Nov 27 23:29:27 2023 +0100

[FLINK-28229][connectors] Deprecate 
StreamExecutionEnvironment#fromCollection()
---
 .../program/StreamContextEnvironmentTest.java  | 16 ++---
 .../client/testjar/ForbidConfigurationJob.java |  2 +-
 .../flink/streaming/examples/join/WindowJoin.java  | 16 +++--
 .../examples/join/WindowJoinSampleData.java| 75 ++
 .../examples/java/basics/StreamSQLExample.java |  4 +-
 .../flink/formats/protobuf/ProtobufTestHelper.java |  2 +-
 .../apache/flink/state/api/SavepointWriter.java|  4 +-
 .../flink/state/api/SavepointWriterITCase.java | 12 ++--
 .../api/SavepointWriterUidModificationITCase.java  |  4 +-
 .../state/api/SavepointWriterWindowITCase.java |  8 +--
 .../flink/state/api/WritableSavepointITCase.java   |  8 +--
 .../flink/python/util/PythonConfigUtilTest.java|  2 +-
 .../environment/StreamExecutionEnvironment.java| 15 +
 .../StreamExecutionEnvironmentTest.java| 57 ++--
 .../ExecutorDiscoveryAndJobClientTest.java |  2 +-
 .../MultipleInputNodeCreationProcessorTest.java| 14 +++-
 .../runtime/stream/sql/DataStreamJavaITCase.java   |  6 +-
 .../stream/table/TimeAttributesITCase.scala| 15 +++--
 .../expressions/utils/ExpressionTestBase.scala |  2 +-
 .../flink/table/planner/utils/TableTestBase.scala  |  4 +-
 .../table/planner/utils/testTableSourceSinks.scala | 10 +--
 .../multipleinput/MultipleInputTestBase.java   |  2 +-
 .../testframe/testsuites/SinkTestSuiteBase.java|  2 +-
 .../test/accumulators/AccumulatorLiveITCase.java   |  2 +-
 .../streaming/runtime/BroadcastStateITCase.java|  4 +-
 .../streaming/runtime/DataStreamPojoITCase.java|  8 +--
 .../test/streaming/runtime/IterateITCase.java  |  4 +-
 .../streaming/runtime/LatencyMarkerITCase.java | 10 +--
 .../test/streaming/runtime/PartitionerITCase.java  |  2 +-
 .../test/streaming/runtime/SideOutputITCase.java   | 46 ++---
 .../flink/test/streaming/runtime/SinkITCase.java   |  6 +-
 .../flink/test/streaming/runtime/SinkV2ITCase.java |  2 +-
 tools/maven/checkstyle.xml |  2 +-
 33 files changed, 174 insertions(+), 194 deletions(-)

diff --git 
a/flink-clients/src/test/java/org/apache/flink/client/program/StreamContextEnvironmentTest.java
 
b/flink-clients/src/test/java/org/apache/flink/client/program/StreamContextEnvironmentTest.java
index 1af115a402a..ac157b04f93 100644
--- 
a/flink-clients/src/test/java/org/apache/flink/client/program/StreamContextEnvironmentTest.java
+++ 
b/flink-clients/src/test/java/org/apache/flink/client/program/StreamContextEnvironmentTest.java
@@ -78,7 +78,7 @@ class StreamContextEnvironmentTest {
 // Add/mutate values in the configuration
 environment.configure(programConfig);
 
-environment.fromCollection(Collections.singleton(1)).sinkTo(new 
DiscardingSink<>());
+environment.fromData(Collections.singleton(1)).sinkTo(new 
DiscardingSink<>());
 assertThatThrownBy(() -> executor.accept(environment))
 .isInstanceOf(MutatedConfigurationException.class)
 .hasMessageContainingAll(
@@ -106,7 +106,7 @@ class StreamContextEnvironmentTest {
 // Change the CheckpointConfig
 environment.getCheckpointConfig().setCheckpointStorage(disallowedPath);
 
-environment.fromCollection(Collections.singleton(1)).sinkTo(new 
DiscardingSink<>());
+environment.fromData(Collections.singleton(1)).sinkTo(new 
DiscardingSink<>());
 assertThatThrownBy(() -> executor.accept(environment))
 .isInstanceOf(MutatedConfigurationException.class)
 .hasMessageContainingAll(
@@ -114,7 +114,7 @@ class StreamContextEnvironmentTest {
 
 environment.getCheckpointConfig().setCheckpointStorage(new 
JobManagerCheckpointStorage());
 
-environment.fromCollection(Collections.singleton(1)).sinkTo(new 
DiscardingSink<>());
+environment.fromData(Collections.singleton(1)).sinkTo(new 
DiscardingSink<>());
 assertThatThrownBy(() -> executor.accept(environment))
 .isInstanceOf(MutatedConfigurationException.class)
 .hasMessageContainingAll(
@@ -143,7 +143,7 @@ class StreamContextEnvironmentTest {
 false,
 Collections.emptyList());
 
-environment.fromCollection(Collections.singleton(1)).sinkTo(new 
DiscardingSink<>());
+environment.fromData(Collections.singleton(1)).sinkTo(new 
DiscardingSink<>());
 assertThatThrownBy(() 

(flink) 01/04: [FLINK-28050][connectors] Move DataGeneratorSource tests into a separate module

2023-11-27 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 68437b937a60c647abe1a4104289849c006b8fe7
Author: Alexander Fedulov <1492164+afedu...@users.noreply.github.com>
AuthorDate: Mon Oct 16 23:39:51 2023 +0200

[FLINK-28050][connectors] Move DataGeneratorSource tests into a separate 
module

This is required to avoid circular dependency that arises when 
DataGeneratorSource gets used in flink-streaming-java
---
 .../pom.xml| 18 +++-
 .../architecture/TestCodeArchitectureTest.java |  0
 .../datagen/source/DataGeneratorSourceITCase.java  |  0
 .../datagen/source/DataGeneratorSourceTest.java|  0
 .../src/test/resources/archunit.properties |  0
 .../src/test/resources/log4j2-test.properties  |  0
 .../0fbe3123-5829-4891-93a5-a99bd8413fd9   |  0
 .../5a661a23-5b47-407c-9994-b6215a46c45c   |  0
 .../archunit-violations/stored.rules   |  4 ---
 flink-connectors/flink-connector-datagen/pom.xml   | 33 --
 flink-connectors/pom.xml   |  1 +
 11 files changed, 18 insertions(+), 38 deletions(-)

diff --git a/flink-connectors/flink-connector-datagen/pom.xml 
b/flink-connectors/flink-connector-datagen-test/pom.xml
similarity index 83%
copy from flink-connectors/flink-connector-datagen/pom.xml
copy to flink-connectors/flink-connector-datagen-test/pom.xml
index 8063058aca4..5b89683ba20 100644
--- a/flink-connectors/flink-connector-datagen/pom.xml
+++ b/flink-connectors/flink-connector-datagen-test/pom.xml
@@ -28,7 +28,7 @@
1.19-SNAPSHOT

 
-   flink-connector-datagen
+   flink-connector-datagen-tests
Flink : Connectors : Datagen
 
jar
@@ -65,6 +65,11 @@
test-jar

 
+   
+   org.junit.jupiter
+   junit-jupiter
+   test
+   
 

 
@@ -73,6 +78,17 @@
flink-architecture-tests-test
test

+
+   
+   org.apache.flink
+   flink-connector-datagen
+   ${project.version}
+   
+
+   
+   org.apache.flink
+   flink-test-utils-junit
+   

 
 
diff --git 
a/flink-connectors/flink-connector-datagen/src/test/java/org/apache/flink/architecture/TestCodeArchitectureTest.java
 
b/flink-connectors/flink-connector-datagen-test/src/test/java/org/apache/flink/architecture/TestCodeArchitectureTest.java
similarity index 100%
rename from 
flink-connectors/flink-connector-datagen/src/test/java/org/apache/flink/architecture/TestCodeArchitectureTest.java
rename to 
flink-connectors/flink-connector-datagen-test/src/test/java/org/apache/flink/architecture/TestCodeArchitectureTest.java
diff --git 
a/flink-connectors/flink-connector-datagen/src/test/java/org/apache/flink/connector/datagen/source/DataGeneratorSourceITCase.java
 
b/flink-connectors/flink-connector-datagen-test/src/test/java/org/apache/flink/connector/datagen/source/DataGeneratorSourceITCase.java
similarity index 100%
rename from 
flink-connectors/flink-connector-datagen/src/test/java/org/apache/flink/connector/datagen/source/DataGeneratorSourceITCase.java
rename to 
flink-connectors/flink-connector-datagen-test/src/test/java/org/apache/flink/connector/datagen/source/DataGeneratorSourceITCase.java
diff --git 
a/flink-connectors/flink-connector-datagen/src/test/java/org/apache/flink/connector/datagen/source/DataGeneratorSourceTest.java
 
b/flink-connectors/flink-connector-datagen-test/src/test/java/org/apache/flink/connector/datagen/source/DataGeneratorSourceTest.java
similarity index 100%
rename from 
flink-connectors/flink-connector-datagen/src/test/java/org/apache/flink/connector/datagen/source/DataGeneratorSourceTest.java
rename to 
flink-connectors/flink-connector-datagen-test/src/test/java/org/apache/flink/connector/datagen/source/DataGeneratorSourceTest.java
diff --git 
a/flink-connectors/flink-connector-datagen/src/test/resources/archunit.properties
 
b/flink-connectors/flink-connector-datagen-test/src/test/resources/archunit.properties
similarity index 100%
rename from 
flink-connectors/flink-connector-datagen/src/test/resources/archunit.properties
rename to 
flink-connectors/flink-connector-datagen-test/src/test/resources/archunit.properties
diff --git 
a/flink-connectors/flink-connector-datagen/src/test/resources/log4j2-test.properties
 
b/flink-connectors/flink-connector-datagen-test/src/test/resources/log4j2-test.properties
similarity index 100%
rename from 
flink-connectors/flink-connector-datagen/src/test/resources/log4j2-test.properties
rename to 
flink-connectors/flink-connector-datagen-test/sr

(flink) 02/04: [FLINK-28050][connectors] Deprecate OutputTypeConfigurable in flink-streaming-java

2023-11-27 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 80fd59ccfc35dc3f6a077c170a26d3252c9e4dee
Author: Alexander Fedulov <1492164+afedu...@users.noreply.github.com>
AuthorDate: Thu Oct 12 21:17:49 2023 +0200

[FLINK-28050][connectors] Deprecate OutputTypeConfigurable in 
flink-streaming-java

Reason for this change: OutputTypeConfigurable needs to be used in 
DataGeneratorSource (see 
StreamExecutionEnvironmentTest#testFromElementsPostConstructionType). 
OutputTypeConfigurable is located in StreamExecutionEnvironment. When 
DataGeneratorSource gets added into flink-streaming-java, this creates a cycle. 
Marker interfaces should ideally reside in flink-core (InputTypeConfigurable 
already does).
- Deprecates OutputTypeConfigurable in flink-streaming-java
- Adds a new marker OutputTypeConfigurable interface to flink-core
- Modifies operators that use this interface to support both versions
---
 .../java/typeutils}/OutputTypeConfigurable.java| 12 ++--
 .../api/operators/OutputTypeConfigurable.java  |  3 +++
 .../api/operators/SimpleOperatorFactory.java   | 13 +++--
 .../api/operators/SourceOperatorFactory.java   | 22 ++
 4 files changed, 42 insertions(+), 8 deletions(-)

diff --git 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/OutputTypeConfigurable.java
 
b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/OutputTypeConfigurable.java
similarity index 86%
copy from 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/OutputTypeConfigurable.java
copy to 
flink-core/src/main/java/org/apache/flink/api/java/typeutils/OutputTypeConfigurable.java
index a296fa7d5cb..2ff3ceabcd6 100644
--- 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/OutputTypeConfigurable.java
+++ 
b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/OutputTypeConfigurable.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.flink.streaming.api.operators;
+package org.apache.flink.api.java.typeutils;
 
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.ExecutionConfig;
@@ -24,7 +24,7 @@ import org.apache.flink.api.common.typeinfo.TypeInformation;
 
 /**
  * Stream operators can implement this interface if they need access to the 
output type information
- * at {@link org.apache.flink.streaming.api.graph.StreamGraph} generation. 
This can be useful for
+ * at {@code org.apache.flink.streaming.api.graph.StreamGraph} generation. 
This can be useful for
  * cases where the output type is specified by the returns method and, thus, 
after the stream
  * operator has been created.
  */
@@ -32,13 +32,13 @@ import org.apache.flink.api.common.typeinfo.TypeInformation;
 public interface OutputTypeConfigurable {
 
 /**
- * Is called by the {@link 
org.apache.flink.streaming.api.graph.StreamGraph#addOperator(Integer,
- * String, StreamOperator, TypeInformation, TypeInformation, String)} 
method when the {@link
+ * Is called by the {@code 
org.apache.flink.streaming.api.graph.StreamGraph#addOperator(Integer,
+ * String, StreamOperator, TypeInformation, TypeInformation, String)} 
method when the {@code
  * org.apache.flink.streaming.api.graph.StreamGraph} is generated. The 
method is called with the
- * output {@link TypeInformation} which is also used for the {@link
+ * output {@link TypeInformation} which is also used for the {@code
  * org.apache.flink.streaming.runtime.tasks.StreamTask} output serializer.
  *
- * @param outTypeInfo Output type information of the {@link
+ * @param outTypeInfo Output type information of the {@code
  * org.apache.flink.streaming.runtime.tasks.StreamTask}
  * @param executionConfig Execution configuration
  */
diff --git 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/OutputTypeConfigurable.java
 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/OutputTypeConfigurable.java
index a296fa7d5cb..f358aea0c92 100644
--- 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/OutputTypeConfigurable.java
+++ 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/OutputTypeConfigurable.java
@@ -27,7 +27,10 @@ import org.apache.flink.api.common.typeinfo.TypeInformation;
  * at {@link org.apache.flink.streaming.api.graph.StreamGraph} generation. 
This can be useful for
  * cases where the output type is specified by the returns method and, thus, 
after the stream
  * operator has been created.
+ *
+ * @deprecated Use {@link 
org.apache.flink.api.java.typeutils.OutputTypeConfigurable} instead
  */
+@Deprecated
 @PublicEvolving
 public interface OutputTypeConfigurable {
 
diff --git 
a/flink-str

(flink) 03/04: [FLINK-28050][connectors] Introduce FLIP-27 alternative to StreamExecutionEnvironment#fromElements()

2023-11-27 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit ad64e72cf3eaede9a4db6313e72f22340a133b00
Author: Alexander Fedulov <1492164+afedu...@users.noreply.github.com>
AuthorDate: Thu Jun 22 20:06:10 2023 +0200

[FLINK-28050][connectors] Introduce FLIP-27 alternative to 
StreamExecutionEnvironment#fromElements()
---
 .../e5126cae-f3fe-48aa-b6fb-60ae6cc3fcd5   |   8 +-
 .../f7a4e6fa-e7de-48c9-a61e-c13e83f0c72e   |  87 +
 .../flink-architecture-tests-production/pom.xml|   5 +
 flink-architecture-tests/pom.xml   |   7 +
 .../6dae736c-6957-4d04-93bf-d7ebc5ca97ab   |   0
 .../f2b2daa5-8ca8-4a74-bb17-cb9f49e3e0e2   |   0
 .../archunit-violations/stored.rules   |   4 +
 .../flink-connector-datagen-test/pom.xml   |  49 -
 .../FromElementsGeneratorSourceITCase.java | 122 +
 .../datagen/source/DataGeneratorSourceITCase.java  |  56 +-
 .../src/test/resources/avro/user.avsc  |   9 +
 .../functions/FromElementsGeneratorFunction.java   | 200 +
 .../datagen/source/DataGeneratorSource.java|  35 +++-
 .../api/operators}/OutputTypeConfigurable.java |   8 +-
 ...st_stream_execution_environment_completeness.py |   2 +-
 flink-streaming-java/pom.xml   |   6 +
 .../streaming/api/datastream/DataStreamSource.java |   2 +-
 .../environment/StreamExecutionEnvironment.java| 110 
 .../api/operators/OutputTypeConfigurable.java  |  49 -
 .../api/operators/SimpleOperatorFactory.java   |  13 +-
 .../api/operators/SourceOperatorFactory.java   |   9 +-
 .../apache/flink/streaming/api/DataStreamTest.java |   7 -
 .../StreamExecutionEnvironmentTest.java|  57 --
 .../api/graph/StreamGraphGeneratorTest.java|  19 +-
 .../api/graph/StreamingJobGraphGeneratorTest.java  |  16 +-
 .../MultipleInputNodeCreationProcessorTest.java|   4 +-
 flink-tests/pom.xml|  45 -
 .../completeness/TypeInfoTestCoverageTest.java |   6 +-
 .../TypeSerializerTestCoverageTest.java|   7 +-
 .../api/StreamExecutionEnvironmentITCase.java  |  87 +++--
 flink-tests/src/test/resources/avro/user.avsc  |   9 +
 pom.xml|   3 +
 32 files changed, 871 insertions(+), 170 deletions(-)

diff --git 
a/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/e5126cae-f3fe-48aa-b6fb-60ae6cc3fcd5
 
b/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/e5126cae-f3fe-48aa-b6fb-60ae6cc3fcd5
index 1fea7816ed1..dbca0076986 100644
--- 
a/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/e5126cae-f3fe-48aa-b6fb-60ae6cc3fcd5
+++ 
b/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/e5126cae-f3fe-48aa-b6fb-60ae6cc3fcd5
@@ -17,10 +17,10 @@ Method 

 calls method 

 in (EmbeddedHaServicesWithLeadershipControl.java:102)
 Method 

 calls method 
 in (ResourceManagerPartitionTrackerImpl.java:286)
 Method 

 calls method 

 in (RecreateOnResetOperatorCoordinator.java:361)
-Method 
 calls method 
 in (TaskManagerConfiguration.java:244)
-Method 
 calls method 

 in (TaskManagerConfiguration.java:246)
-Method 
 calls method 
 in (TaskManagerServices.java:433)
-Method 
 calls method 

 in (TaskManagerServices.java:431)
+Method 
 calls method 
 in (TaskManagerConfiguration.java:241)
+Method 
 calls method 

 in (TaskManagerConfiguration.java:243)
+Method 
 calls method 
 in (TaskManagerServices.java:450)
+Method 
 calls method 

 in (TaskManagerServices.java:448)
 Method 
 
calls method 
 
in (SourceOperator.java:295)
 Method 
 calls method 
 
in (SourceOperator.java:301)
 Method 
 
calls method 
 in 
(SourceOperatorStreamTask.java:95)
diff --git 
a/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/f7a4e6fa-e7de-48c9-a61e-c13e83f0c72e
 
b/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/f7a4e6fa-e7de-48c9-a61e-c13e83f0c72e
index 0c69f24bc8b..95c466ee545 100644
--- 
a/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/f7a4e6fa-e7de-48c9-a61e-c13e83f0c72e
+++ 
b/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/f7a4e6fa-e7de-48c9-a61e-c13e83f0c72e
@@ -64,6 +64,18 @@ Constructor 
(int, 
org.apache.flink.connector.base.source.reader.synchronization.FutureCompletingBlockingQueue,
 org.apache.flink.connector.base.source.reader.splitreader.SplitReader, 
java.util.function.Consumer, java.lang.Runnable, java.util.function.Consumer, 
boolean)> calls method 
 in 
(SplitFetcher.java:97)
 Constructor 
(org.apache.flink.connector.base.source.reader.synchronization.FutureCompletingBlockingQueue,
 java.util.function

(flink) branch master updated (f6f785fd1a9 -> d351c5bd9c1)

2023-11-27 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from f6f785fd1a9 [FLINK-33418][test] Uses getHost()
 new 68437b937a6 [FLINK-28050][connectors] Move DataGeneratorSource tests 
into a separate module
 new 80fd59ccfc3 [FLINK-28050][connectors] Deprecate OutputTypeConfigurable 
in flink-streaming-java
 new ad64e72cf3e [FLINK-28050][connectors] Introduce FLIP-27 alternative to 
StreamExecutionEnvironment#fromElements()
 new d351c5bd9c1 [FLINK-28050][connectors] Deprecate 
StreamExecutionEnvironment#fromElements()

The 4 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../e5126cae-f3fe-48aa-b6fb-60ae6cc3fcd5   |   8 +-
 .../f7a4e6fa-e7de-48c9-a61e-c13e83f0c72e   |  87 +
 .../flink-architecture-tests-production/pom.xml|   5 +
 flink-architecture-tests/pom.xml   |   7 +
 .../program/PackagedProgramUtilsPipelineTest.java  |   2 +-
 .../org/apache/flink/client/testjar/TestJob.java   |   2 +-
 .../6dae736c-6957-4d04-93bf-d7ebc5ca97ab}  |   0
 .../f2b2daa5-8ca8-4a74-bb17-cb9f49e3e0e2}  |   0
 .../archunit-violations/stored.rules   |   6 +-
 .../pom.xml|  67 ++-
 .../architecture/TestCodeArchitectureTest.java |   0
 .../FromElementsGeneratorSourceITCase.java | 122 +
 .../datagen/source/DataGeneratorSourceITCase.java  |  56 +-
 .../datagen/source/DataGeneratorSourceTest.java|   0
 .../src/test/resources/archunit.properties |   0
 .../src/test/resources/avro/user.avsc  |   9 +
 .../src/test/resources/log4j2-test.properties  |   0
 flink-connectors/flink-connector-datagen/pom.xml   |  33 
 .../functions/FromElementsGeneratorFunction.java   | 200 +
 .../datagen/source/DataGeneratorSource.java|  35 +++-
 flink-connectors/pom.xml   |   1 +
 .../api/operators/OutputTypeConfigurable.java  |  16 +-
 .../tests/DistributedCacheViaBlobTestProgram.java  |   2 +-
 .../java/org/apache/flink/tests/scala/JavaJob.java |   2 +-
 .../tests/scala/JavaJobWithKryoSerializer.java |   2 +-
 .../tests/FailureEnricherTestProgram.java  |   2 +-
 .../streaming/tests/ClassLoaderTestProgram.java|   2 +-
 .../examples/sideoutput/SideOutputExample.java |   2 +-
 .../examples/windowing/WindowWordCount.java|   2 +-
 .../streaming/examples/wordcount/WordCount.java|   2 +-
 .../streaming/test/StreamingExamplesITCase.java|   4 +-
 .../java/basics/TemporalJoinSQLExample.java|   4 +-
 .../flink/table/runtime/batch/AvroTypesITCase.java |   2 +-
 .../flink/hdfstests/DistributedCacheDfsTest.java   |   2 +-
 .../test/java/org/apache/flink/cep/CEPITCase.java  |  30 ++--
 .../flink/state/api/SavepointDeepCopyTest.java |   2 +-
 .../flink/state/api/SavepointWriterITCase.java |   2 +-
 .../api/StateBootstrapTransformationTest.java  |  10 +-
 .../state/api/runtime/OperatorIDGeneratorTest.java |   2 +-
 ...st_stream_execution_environment_completeness.py |   2 +-
 .../java/org/apache/flink/client/cli/TestJob.java  |   2 +-
 .../client/python/PythonFunctionFactoryTest.java   |   2 +-
 .../PythonScalarFunctionOperatorTestBase.java  |   2 +-
 .../webmonitor/history/HistoryServerTest.java  |   2 +-
 flink-streaming-java/pom.xml   |   6 +
 .../streaming/api/datastream/DataStreamSource.java |   2 +-
 .../environment/StreamExecutionEnvironment.java| 116 
 .../api/operators/SourceOperatorFactory.java   |  15 ++
 .../apache/flink/streaming/api/DataStreamTest.java |  39 ++--
 .../flink/streaming/api/RestartStrategyTest.java   |   6 +-
 .../api/datastream/CoGroupedStreamsTest.java   |   4 +-
 .../api/datastream/DataStreamSinkTest.java |   4 +-
 .../api/datastream/JoinedStreamsTest.java  |   4 +-
 .../StreamExecutionEnvironmentTest.java|  73 ++--
 .../SinkV1TransformationTranslatorITCase.java  |   4 +-
 .../StreamGraphGeneratorBatchExecutionTest.java|  46 ++---
 .../api/graph/StreamGraphGeneratorTest.java|  65 +++
 .../api/graph/StreamingJobGraphGeneratorTest.java  |  90 ++
 ...hGeneratorWithGlobalStreamExchangeModeTest.java |   4 +-
 .../api/operators/StateDescriptorPassingTest.java  |  12 +-
 .../api/operators/async/AsyncWaitOperatorTest.java |   2 +-
 .../operators/StreamOperatorChainingTest.java  |   4 +-
 .../windowing/AllWindowTranslationTest.java|  56 +++---
 .../windowing/TimeWindowTranslationTest.java   |   6 +-
 .../operators/windowing/WindowTranslationTest.java |  62 +++
 .../scala/StreamingScalaAPICo

(flink) 02/02: [hotfix] Rename `TestJvmProcess#getJvmArgs` to `TestJvmProcess#getMainMethodArgs`

2023-11-22 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 7f2818bea1c50d9a092c5360a3d4de3a86c411b2
Author: Sergey Nuyanzin 
AuthorDate: Fri Nov 3 18:03:39 2023 +0100

[hotfix] Rename `TestJvmProcess#getJvmArgs` to 
`TestJvmProcess#getMainMethodArgs`
---
 .../entrypoint/ClusterUncaughtExceptionHandlerITCase.java  |  2 +-
 .../flink/runtime/io/disk/FileChannelManagerImplTest.java  |  2 +-
 .../org/apache/flink/runtime/testutils/DispatcherProcess.java  |  2 +-
 .../org/apache/flink/runtime/testutils/TestJvmProcess.java | 10 +-
 .../runtime/testutils/TestingClusterEntrypointProcess.java |  2 +-
 .../org/apache/flink/runtime/util/BlockingShutdownTest.java|  2 +-
 .../apache/flink/runtime/util/FlinkSecurityManagerITCase.java  |  2 +-
 .../org/apache/flink/runtime/util/JvmExitOnFatalErrorTest.java |  2 +-
 8 files changed, 12 insertions(+), 12 deletions(-)

diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/entrypoint/ClusterUncaughtExceptionHandlerITCase.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/entrypoint/ClusterUncaughtExceptionHandlerITCase.java
index 520c8d32591..ae5178310b2 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/entrypoint/ClusterUncaughtExceptionHandlerITCase.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/entrypoint/ClusterUncaughtExceptionHandlerITCase.java
@@ -130,7 +130,7 @@ public class ClusterUncaughtExceptionHandlerITCase extends 
TestLogger {
 }
 
 @Override
-public String[] getJvmArgs() {
+public String[] getMainMethodArgs() {
 return new String[0];
 }
 
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/io/disk/FileChannelManagerImplTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/io/disk/FileChannelManagerImplTest.java
index 17ab746d41a..73b8c565d16 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/io/disk/FileChannelManagerImplTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/io/disk/FileChannelManagerImplTest.java
@@ -185,7 +185,7 @@ class FileChannelManagerImplTest {
 }
 
 @Override
-public String[] getJvmArgs() {
+public String[] getMainMethodArgs() {
 return new String[] {Boolean.toString(callerHasHook), 
tmpDirectories, signalFilePath};
 }
 
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/DispatcherProcess.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/DispatcherProcess.java
index 66e7f710546..7f0a247dc21 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/DispatcherProcess.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/DispatcherProcess.java
@@ -78,7 +78,7 @@ public class DispatcherProcess extends TestJvmProcess {
 }
 
 @Override
-public String[] getJvmArgs() {
+public String[] getMainMethodArgs() {
 return jvmArgs;
 }
 
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/TestJvmProcess.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/TestJvmProcess.java
index c174f66f411..e9ca964350e 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/TestJvmProcess.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/TestJvmProcess.java
@@ -101,12 +101,12 @@ public abstract class TestJvmProcess {
  *
  * These can be parsed by the main method of the entry point class.
  */
-public abstract String[] getJvmArgs();
+public abstract String[] getMainMethodArgs();
 
 /**
  * Returns the name of the class to run.
  *
- * Arguments to the main method can be specified via {@link 
#getJvmArgs()}.
+ * Arguments to the main method can be specified via {@link 
#getMainMethodArgs()}.
  */
 public abstract String getEntryPointClassName();
 
@@ -151,10 +151,10 @@ public abstract class TestJvmProcess {
 
 cmd = ArrayUtils.add(cmd, getEntryPointClassName());
 
-String[] jvmArgs = getJvmArgs();
+String[] mainMethodArgs = getMainMethodArgs();
 
-if (jvmArgs != null && jvmArgs.length > 0) {
-cmd = ArrayUtils.addAll(cmd, jvmArgs);
+if (mainMethodArgs != null && mainMethodArgs.length > 0) {
+cmd = ArrayUtils.addAll(cmd, mainMethodArgs);
 }
 
 synchronized (createDestroyLock) {
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/TestingClusterEntrypointProcess.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/TestingClusterEntrypointProcess.java
index ef9f674a72d..81dd2043ac9 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/TestingClusterEntrypointProcess.java
+++ 
b/flink-run

(flink) branch master updated (2378babf86c -> 7f2818bea1c)

2023-11-22 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 2378babf86c [FLINK-29452][test] Allow unit tests to be executed 
individually
 new 905d5998093 [FLINK-33311] `surefire.module.config` args should be 
before entry point classname
 new 7f2818bea1c [hotfix] Rename `TestJvmProcess#getJvmArgs` to 
`TestJvmProcess#getMainMethodArgs`

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../ClusterUncaughtExceptionHandlerITCase.java |  2 +-
 .../io/disk/FileChannelManagerImplTest.java|  2 +-
 .../flink/runtime/testutils/DispatcherProcess.java |  2 +-
 .../flink/runtime/testutils/TestJvmProcess.java| 23 +++---
 .../testutils/TestingClusterEntrypointProcess.java |  2 +-
 .../flink/runtime/util/BlockingShutdownTest.java   |  2 +-
 .../runtime/util/FlinkSecurityManagerITCase.java   |  2 +-
 .../runtime/util/JvmExitOnFatalErrorTest.java  |  2 +-
 8 files changed, 19 insertions(+), 18 deletions(-)



(flink) 01/02: [FLINK-33311] `surefire.module.config` args should be before entry point classname

2023-11-22 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 905d5998093127b145c9aa7d32a1667c4b45e850
Author: Sergey Nuyanzin 
AuthorDate: Fri Nov 3 17:57:49 2023 +0100

[FLINK-33311] `surefire.module.config` args should be before entry point 
classname
---
 .../apache/flink/runtime/testutils/TestJvmProcess.java| 15 ---
 1 file changed, 8 insertions(+), 7 deletions(-)

diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/TestJvmProcess.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/TestJvmProcess.java
index 78cebd083f3..c174f66f411 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/TestJvmProcess.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/testutils/TestJvmProcess.java
@@ -141,21 +141,22 @@ public abstract class TestJvmProcess {
 "-Xmx" + jvmMemoryInMb + "m",
 "-classpath",
 getCurrentClasspath(),
-"-XX:+IgnoreUnrecognizedVMOptions",
-getEntryPointClassName()
+"-XX:+IgnoreUnrecognizedVMOptions"
 };
 
+final String moduleConfig = 
System.getProperty("surefire.module.config");
+if (moduleConfig != null) {
+cmd = ArrayUtils.addAll(cmd, moduleConfig.trim().split("\\s+"));
+}
+
+cmd = ArrayUtils.add(cmd, getEntryPointClassName());
+
 String[] jvmArgs = getJvmArgs();
 
 if (jvmArgs != null && jvmArgs.length > 0) {
 cmd = ArrayUtils.addAll(cmd, jvmArgs);
 }
 
-final String moduleConfig = 
System.getProperty("surefire.module.config");
-if (moduleConfig != null) {
-cmd = ArrayUtils.addAll(cmd, moduleConfig.split(" "));
-}
-
 synchronized (createDestroyLock) {
 checkState(process == null, "process already started");
 



(flink) branch master updated: [FLINK-18356] Update CI image

2023-11-15 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 499e56f138f [FLINK-18356] Update CI image
499e56f138f is described below

commit 499e56f138fb4e81cbb8810385cfb393d16ea454
Author: Chesnay Schepler 
AuthorDate: Wed Nov 15 10:48:56 2023 +0100

[FLINK-18356] Update CI image
---
 .github/workflows/docs.yml  | 2 +-
 tools/azure-pipelines/build-apache-repo.yml | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 26754d2c1e6..ed71b759ead 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -48,7 +48,7 @@ jobs:
   fi
   - name: Build documentation
 run: |
-  docker run --rm --volume "$PWD:/root/flink" 
chesnay/flink-ci:java_8_11_17_maven_386_v2 bash -c "cd /root/flink && 
./.github/workflows/docs.sh"
+  docker run --rm --volume "$PWD:/root/flink" 
chesnay/flink-ci:java_8_11_17_maven_386_v3 bash -c "cd /root/flink && 
./.github/workflows/docs.sh"
   - name: Upload documentation
 uses: burnett01/rsync-deployments@5.2
 with:
diff --git a/tools/azure-pipelines/build-apache-repo.yml 
b/tools/azure-pipelines/build-apache-repo.yml
index 892d2a519dd..15551710402 100644
--- a/tools/azure-pipelines/build-apache-repo.yml
+++ b/tools/azure-pipelines/build-apache-repo.yml
@@ -39,7 +39,7 @@ resources:
   # Container with Maven 3.8.6, SSL to have the same environment everywhere.
   # see https://github.com/apache/flink-connector-shared-utils/tree/ci_utils
   - container: flink-build-container
-image: chesnay/flink-ci:java_8_11_17_maven_386_v2
+image: chesnay/flink-ci:java_8_11_17_maven_386_v3
 
 variables:
   MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository



(flink-connector-shared-utils) branch parent_pom updated: [FLINK-32563] add additionalExcludes property to add exclusions to surefire tests

2023-11-15 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch parent_pom
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git


The following commit(s) were added to refs/heads/parent_pom by this push:
 new 808184a  [FLINK-32563] add additionalExcludes property to add 
exclusions to surefire tests
808184a is described below

commit 808184ab723d678a4966ae4c45096c4bd6d3e756
Author: Etienne Chauchot 
AuthorDate: Tue Nov 14 10:30:05 2023 +0100

[FLINK-32563] add additionalExcludes property to add exclusions to surefire 
tests
---
 pom.xml | 10 +-
 1 file changed, 9 insertions(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index 00c9a9b..0d88817 100644
--- a/pom.xml
+++ b/pom.xml
@@ -27,7 +27,7 @@ under the License.
 
 org.apache.flink
 flink-connector-parent
-1.0.0
+1.0.1
 pom
 
 https://flink.apache.org
@@ -80,6 +80,10 @@ under the License.
 
 
 
+
+
+
 
 
 
@@ -570,6 +574,9 @@ under the License.
 
 ${test.unit.pattern}
 
+
+${additionalExcludes}
+
 
${flink.forkCountUnitTest}
 ${flink.surefire.baseArgLine} 
-Xmx${flink.XmxUnitTest}
 
@@ -587,6 +594,7 @@ under the License.
 
 
 ${test.unit.pattern}
+${additionalExcludes}
 
 ${flink.forkCountITCase}
 ${flink.surefire.baseArgLine} 
-Xmx${flink.XmxITCase}



(flink) branch release-1.18 updated: [hotfix] Move permission fix to correct line

2023-11-14 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new f0980495180 [hotfix] Move permission fix to correct line
f0980495180 is described below

commit f09804951809f7dbaa748b07ac8dc9837fa7babf
Author: Chesnay Schepler 
AuthorDate: Tue Nov 14 10:50:20 2023 +0100

[hotfix] Move permission fix to correct line
---
 .github/workflows/docs.sh | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/docs.sh b/.github/workflows/docs.sh
index 0304b78e8e4..179a8dbe64a 100755
--- a/.github/workflows/docs.sh
+++ b/.github/workflows/docs.sh
@@ -30,11 +30,11 @@ if ! curl --fail -OL $HUGO_REPO ; then
exit 1
 fi
 tar -zxvf $HUGO_ARTIFACT -C /usr/local/bin
+# workaround for a git security patch
+git config --global --add safe.directory /root/flink
 git submodule update --init --recursive
 # Setup the external documentation modules
 cd docs
-# workaround for a git security patch
-git config --global --add safe.directory /root/flink
 source setup_docs.sh
 cd ..
 # Build the docs



(flink) branch release-1.16 updated: [hotfix][docs] Fix git permission issue attempt

2023-11-14 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.16
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.16 by this push:
 new b02cbc30ade [hotfix][docs] Fix git permission issue attempt
b02cbc30ade is described below

commit b02cbc30ade1ac4e40b15a3bbcbf81c6e7444ce5
Author: Chesnay Schepler 
AuthorDate: Wed Nov 1 16:58:01 2023 +0100

[hotfix][docs] Fix git permission issue attempt
---
 .github/workflows/docs.sh | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/.github/workflows/docs.sh b/.github/workflows/docs.sh
index fbc03f1f99d..6671f0ed7b4 100755
--- a/.github/workflows/docs.sh
+++ b/.github/workflows/docs.sh
@@ -30,6 +30,8 @@ if ! curl --fail -OL $HUGO_REPO ; then
exit 1
 fi
 tar -zxvf $HUGO_ARTIFACT -C /usr/local/bin
+# workaround for a git security patch
+git config --global --add safe.directory /root/flink
 git submodule update --init --recursive
 # Setup the external documentation modules
 cd docs



(flink) branch release-1.17 updated: [hotfix][docs] Fix git permission issue attempt

2023-11-14 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.17
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.17 by this push:
 new 18e1506c92d [hotfix][docs] Fix git permission issue attempt
18e1506c92d is described below

commit 18e1506c92dc818d5ed4c9055b25f071693d72bc
Author: Chesnay Schepler 
AuthorDate: Wed Nov 1 16:58:01 2023 +0100

[hotfix][docs] Fix git permission issue attempt
---
 .github/workflows/docs.sh | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/.github/workflows/docs.sh b/.github/workflows/docs.sh
index 34aa227d8c9..179a8dbe64a 100755
--- a/.github/workflows/docs.sh
+++ b/.github/workflows/docs.sh
@@ -30,6 +30,8 @@ if ! curl --fail -OL $HUGO_REPO ; then
exit 1
 fi
 tar -zxvf $HUGO_ARTIFACT -C /usr/local/bin
+# workaround for a git security patch
+git config --global --add safe.directory /root/flink
 git submodule update --init --recursive
 # Setup the external documentation modules
 cd docs



(flink) branch release-1.18 updated: [hotfix][docs] Fix git permission issue attempt

2023-11-14 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new 7475a175782 [hotfix][docs] Fix git permission issue attempt
7475a175782 is described below

commit 7475a1757821b59dff1762043e6dc5041f66044b
Author: Chesnay Schepler 
AuthorDate: Wed Nov 1 16:58:01 2023 +0100

[hotfix][docs] Fix git permission issue attempt
---
 .github/workflows/docs.sh | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/.github/workflows/docs.sh b/.github/workflows/docs.sh
index 34aa227d8c9..0304b78e8e4 100755
--- a/.github/workflows/docs.sh
+++ b/.github/workflows/docs.sh
@@ -33,6 +33,8 @@ tar -zxvf $HUGO_ARTIFACT -C /usr/local/bin
 git submodule update --init --recursive
 # Setup the external documentation modules
 cd docs
+# workaround for a git security patch
+git config --global --add safe.directory /root/flink
 source setup_docs.sh
 cd ..
 # Build the docs



(flink) branch master updated: [FLINK-33309] Set `-Djava.security.manager=allow` on JDK 18+

2023-11-03 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 7295c3bcf92 [FLINK-33309] Set `-Djava.security.manager=allow` on JDK 
18+
7295c3bcf92 is described below

commit 7295c3bcf92c0e106d3e91c57b6492030c760a25
Author: Sergey Nuyanzin 
AuthorDate: Fri Nov 3 12:51:42 2023 +0100

[FLINK-33309] Set `-Djava.security.manager=allow` on JDK 18+
---
 flink-core/pom.xml  | 21 -
 flink-dist/src/main/flink-bin/bin/config.sh |  6 ++
 flink-runtime/pom.xml   | 21 -
 flink-streaming-java/pom.xml| 21 -
 4 files changed, 66 insertions(+), 3 deletions(-)

diff --git a/flink-core/pom.xml b/flink-core/pom.xml
index ce90b2a1e6d..aee94432815 100644
--- a/flink-core/pom.xml
+++ b/flink-core/pom.xml
@@ -34,7 +34,14 @@ under the License.
jar
 

-
+   
+${surefire.module.config.jdk21} --add-exports=java.rmi/sun.rmi.registry=ALL-UNNAMED 

+   
-Djava.security.manager=allow
+   
+   

 
diff --git a/flink-dist/src/main/flink-bin/bin/config.sh 
b/flink-dist/src/main/flink-bin/bin/config.sh
index f09c68d493f..dcd48a256f7 100755
--- a/flink-dist/src/main/flink-bin/bin/config.sh
+++ b/flink-dist/src/main/flink-bin/bin/config.sh
@@ -334,6 +334,12 @@ if [ -z "${FLINK_ENV_JAVA_OPTS}" ]; then
 
 # Remove leading and ending double quotes (if present) of value
 FLINK_ENV_JAVA_OPTS="-XX:+IgnoreUnrecognizedVMOptions $( echo 
"${FLINK_ENV_JAVA_OPTS}" | sed -e 's/^"//'  -e 's/"$//' )"
+
+JAVA_SPEC_VERSION=`${JAVA_HOME}/bin/java -XshowSettings:properties 2>&1 | 
grep "java.specification.version" | cut -d "=" -f 2 | tr -d '[:space:]' | rev | 
cut -d "." -f 1 | rev`
+if [[ $(( $JAVA_SPEC_VERSION > 17 )) == 1 ]]; then
+  # set security manager property to allow calls to 
System.setSecurityManager() at runtime
+  FLINK_ENV_JAVA_OPTS="$FLINK_ENV_JAVA_OPTS -Djava.security.manager=allow"
+fi
 fi
 
 if [ -z "${FLINK_ENV_JAVA_OPTS_JM}" ]; then
diff --git a/flink-runtime/pom.xml b/flink-runtime/pom.xml
index 359a0e181cb..c6bd2e90964 100644
--- a/flink-runtime/pom.xml
+++ b/flink-runtime/pom.xml
@@ -34,7 +34,14 @@ under the License.
jar
 

-   
+   
+${surefire.module.config.jdk21} --add-opens=java.base/java.util=ALL-UNNAMED 
+   
-Djava.security.manager=allow
+   
+   

 
diff --git a/flink-streaming-java/pom.xml b/flink-streaming-java/pom.xml
index f66ed3b48b8..4a7644dfb45 100644
--- a/flink-streaming-java/pom.xml
+++ b/flink-streaming-java/pom.xml
@@ -34,7 +34,14 @@ under the License.
jar
 

-   
+   
+${surefire.module.config.jdk21} --add-opens=java.base/java.lang=ALL-UNNAMED 
+   
-Djava.security.manager=allow
+   
+   

 



(flink) branch master updated: [hotfix][docs] Fix git permission issue attempt #2

2023-11-01 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 24c1954cebc [hotfix][docs] Fix git permission issue attempt #2
24c1954cebc is described below

commit 24c1954cebc0e62655a9621d5a2adef81db84b5d
Author: Chesnay Schepler 
AuthorDate: Wed Nov 1 16:58:01 2023 +0100

[hotfix][docs] Fix git permission issue attempt #2
---
 .github/workflows/docs.sh  | 2 ++
 .github/workflows/docs.yml | 2 +-
 2 files changed, 3 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/docs.sh b/.github/workflows/docs.sh
index b8a088d31d1..1d08bbbafef 100755
--- a/.github/workflows/docs.sh
+++ b/.github/workflows/docs.sh
@@ -22,6 +22,8 @@ mvn --version
 java -version
 javadoc -J-version
 
+# workaround for a git security patch
+git config --global --add safe.directory /root/flink
 git submodule update --init --recursive
 
 cd docs
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 17d927a0eec..26754d2c1e6 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -48,7 +48,7 @@ jobs:
   fi
   - name: Build documentation
 run: |
-  docker run --user 1001 --rm --volume "$PWD:/root/flink" 
chesnay/flink-ci:java_8_11_17_maven_386_v2 bash -c "cd /root/flink && 
./.github/workflows/docs.sh"
+  docker run --rm --volume "$PWD:/root/flink" 
chesnay/flink-ci:java_8_11_17_maven_386_v2 bash -c "cd /root/flink && 
./.github/workflows/docs.sh"
   - name: Upload documentation
 uses: burnett01/rsync-deployments@5.2
 with:



(flink) branch master updated: [hotfix][docs][ci] Fix typo

2023-11-01 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new d5b24795d54 [hotfix][docs][ci] Fix typo
d5b24795d54 is described below

commit d5b24795d54c9b3a550e5c7b0957631778dfaf6f
Author: Chesnay Schepler 
AuthorDate: Wed Nov 1 16:49:19 2023 +0100

[hotfix][docs][ci] Fix typo
---
 .github/workflows/docs.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index ff59e8cd4f4..17d927a0eec 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -48,7 +48,7 @@ jobs:
   fi
   - name: Build documentation
 run: |
-  docker run -user 1001 --rm --volume "$PWD:/root/flink" 
chesnay/flink-ci:java_8_11_17_maven_386_v2 bash -c "cd /root/flink && 
./.github/workflows/docs.sh"
+  docker run --user 1001 --rm --volume "$PWD:/root/flink" 
chesnay/flink-ci:java_8_11_17_maven_386_v2 bash -c "cd /root/flink && 
./.github/workflows/docs.sh"
   - name: Upload documentation
 uses: burnett01/rsync-deployments@5.2
 with:



(flink) branch master updated: [hotfix][docs] Fix docker/git permission issue

2023-11-01 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 0854084e08b [hotfix][docs] Fix docker/git permission issue
0854084e08b is described below

commit 0854084e08b529a22ce7851630423c1107e7a822
Author: Chesnay Schepler 
AuthorDate: Wed Nov 1 16:47:48 2023 +0100

[hotfix][docs] Fix docker/git permission issue
---
 .github/workflows/docs.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 26b29a1c9ca..ff59e8cd4f4 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -48,7 +48,7 @@ jobs:
   fi
   - name: Build documentation
 run: |
-  docker run  --rm --volume "$PWD:/root/flink" 
chesnay/flink-ci:java_8_11_17_maven_386_v2 bash -c "cd /root/flink && 
./.github/workflows/docs.sh"
+  docker run -user 1001 --rm --volume "$PWD:/root/flink" 
chesnay/flink-ci:java_8_11_17_maven_386_v2 bash -c "cd /root/flink && 
./.github/workflows/docs.sh"
   - name: Upload documentation
 uses: burnett01/rsync-deployments@5.2
 with:



(flink) branch master updated: [hotfix] Update docs build CI image

2023-11-01 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 56b1be46704 [hotfix] Update docs build CI image
56b1be46704 is described below

commit 56b1be467041bbe8c18140083279ccf0416f54fe
Author: Chesnay Schepler 
AuthorDate: Wed Nov 1 10:24:48 2023 +0100

[hotfix] Update docs build CI image
---
 .github/workflows/docs.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 3353fdc693b..26b29a1c9ca 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -48,7 +48,7 @@ jobs:
   fi
   - name: Build documentation
 run: |
-  docker run  --rm --volume "$PWD:/root/flink" 
chesnay/flink-ci:java_8_11 bash -c "cd /root/flink && 
./.github/workflows/docs.sh"
+  docker run  --rm --volume "$PWD:/root/flink" 
chesnay/flink-ci:java_8_11_17_maven_386_v2 bash -c "cd /root/flink && 
./.github/workflows/docs.sh"
   - name: Upload documentation
 uses: burnett01/rsync-deployments@5.2
 with:



(flink) branch master updated: [hotfix][rest] Improve error message

2023-10-31 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 81d559a3bb1 [hotfix][rest] Improve error message
81d559a3bb1 is described below

commit 81d559a3bb11722f5148a9f4ad42e60105ae27a2
Author: Chesnay Schepler 
AuthorDate: Mon Oct 30 14:37:25 2023 +0100

[hotfix][rest] Improve error message
---
 .../org/apache/flink/runtime/rest/messages/MessageHeaders.java | 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
index 63c54083493..122429e3c0d 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
@@ -78,12 +78,15 @@ public interface MessageHeaders<
  * @return short description
  */
 default String operationId() {
+final String className = getClass().getSimpleName();
+
 if (getHttpMethod() != HttpMethodWrapper.GET) {
 throw new UnsupportedOperationException(
-"The default implementation is only supported for GET 
calls. Please override 'operationId()'.");
+"The default implementation is only supported for GET 
calls. Please override 'operationId()' in '"
++ className
++ "'.");
 }
 
-final String className = getClass().getSimpleName();
 final int headersSuffixStart = className.lastIndexOf("Headers");
 if (headersSuffixStart == -1) {
 throw new IllegalStateException(



(flink) branch master updated (cc62044efc0 -> 530ebd2f4ef)

2023-10-30 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from cc62044efc0 [FLINK-4][table] Make map entries sorted by keys in 
json plan to have it stable for java21
 add 530ebd2f4ef [FLINK-32182][build] Use original japicmp plugin

No new revisions were added by this update.

Summary of changes:
 flink-scala/pom.xml  | 2 +-
 flink-streaming-scala/pom.xml| 4 ++--
 flink-table/flink-sql-jdbc-driver-bundle/pom.xml | 2 +-
 pom.xml  | 8 
 4 files changed, 8 insertions(+), 8 deletions(-)



(flink) branch master updated: [FLINK-32181][docs] Enforce Maven 3.8.6 as required version

2023-10-30 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 957eaeda496 [FLINK-32181][docs] Enforce Maven 3.8.6 as required version
957eaeda496 is described below

commit 957eaeda496a5a0bc80c86601217a3d643671317
Author: Chesnay Schepler 
AuthorDate: Wed Oct 25 16:05:18 2023 +0200

[FLINK-32181][docs] Enforce Maven 3.8.6 as required version
---
 README.md | 5 +
 pom.xml   | 6 +-
 2 files changed, 2 insertions(+), 9 deletions(-)

diff --git a/README.md b/README.md
index b2691132ffa..a510cc89962 100644
--- a/README.md
+++ b/README.md
@@ -69,7 +69,7 @@ Prerequisites for building Flink:
 
 * Unix-like environment (we use Linux, Mac OS X, Cygwin, WSL)
 * Git
-* Maven (we recommend version 3.8.6 and require at least 3.1.1)
+* Maven (we require version 3.8.6)
 * Java 8 or 11 (Java 9 or 10 may work)
 
 ```
@@ -80,9 +80,6 @@ cd flink
 
 Flink is now installed in `build-target`.
 
-*NOTE: Maven 3.3.x can build Flink, but will not properly shade away certain 
dependencies. Maven 3.1.1 creates the libraries properly.
-To build unit tests with Java 8, use Java 8u51 or above to prevent failures in 
unit tests that use the PowerMock runner.*
-
 ## Developing Flink
 
 The Flink committers use IntelliJ IDEA to develop the Flink codebase.
diff --git a/pom.xml b/pom.xml
index 3de7cb078b5..9d3176551dc 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1396,9 +1396,6 @@ under the License.





-   

-   
[3.8.6]
-   




[1.8.0,1.8.1)


@@ -1799,8 +1796,7 @@ under the License.




-   
-   
[3.1.1,)
+   
[3.8.6]





${target.java.version}



(flink) branch release-1.18 updated: [FLINK-33369] Use Java 17 docker image for e2e tests on Java 17

2023-10-27 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new 371d9ab2f88 [FLINK-33369] Use Java 17 docker image for e2e tests on 
Java 17
371d9ab2f88 is described below

commit 371d9ab2f88e977b3ad0d80faacd466828b31272
Author: Chesnay Schepler 
AuthorDate: Fri Oct 27 10:23:42 2023 +0200

[FLINK-33369] Use Java 17 docker image for e2e tests on Java 17
---
 flink-end-to-end-tests/test-scripts/common_docker.sh | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/flink-end-to-end-tests/test-scripts/common_docker.sh 
b/flink-end-to-end-tests/test-scripts/common_docker.sh
index 462ea3f4e9d..d37d1cc9e5c 100644
--- a/flink-end-to-end-tests/test-scripts/common_docker.sh
+++ b/flink-end-to-end-tests/test-scripts/common_docker.sh
@@ -54,6 +54,9 @@ function build_image() {
 if [[ ${PROFILE} == *"jdk11"* ]]; then
 java_version=11
 fi
+if [[ ${PROFILE} == *"jdk17"* ]]; then
+java_version=17
+fi
 
 cd flink-docker
 ./add-custom.sh -u ${file_server_address}:/flink.tgz -n ${image_name} 
-j ${java_version}



(flink) branch master updated: [FLINK-33369] Use Java 17 docker image for e2e tests on Java 17

2023-10-27 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new f68ae194a6f [FLINK-33369] Use Java 17 docker image for e2e tests on 
Java 17
f68ae194a6f is described below

commit f68ae194a6fda868c4d3ff3e3027cfc519c9017a
Author: Chesnay Schepler 
AuthorDate: Fri Oct 27 10:23:42 2023 +0200

[FLINK-33369] Use Java 17 docker image for e2e tests on Java 17
---
 flink-end-to-end-tests/test-scripts/common_docker.sh | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/flink-end-to-end-tests/test-scripts/common_docker.sh 
b/flink-end-to-end-tests/test-scripts/common_docker.sh
index 43c387c1fd8..e38f20359ba 100644
--- a/flink-end-to-end-tests/test-scripts/common_docker.sh
+++ b/flink-end-to-end-tests/test-scripts/common_docker.sh
@@ -54,6 +54,9 @@ function build_image() {
 if [[ ${PROFILE} == *"jdk11"* ]]; then
 java_version=11
 fi
+if [[ ${PROFILE} == *"jdk17"* ]]; then
+java_version=17
+fi
 
 cd flink-docker
 ./add-custom.sh -u ${file_server_address}:/flink.tgz -n ${image_name} 
-j ${java_version}



[flink] branch release-1.17 updated: [FLINK-33352][rest][docs] Add schema mappings to discriminator properties

2023-10-25 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.17
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.17 by this push:
 new 2aee7df2d79 [FLINK-33352][rest][docs] Add schema mappings to 
discriminator properties
2aee7df2d79 is described below

commit 2aee7df2d7904ffb38a99363d8b14d3ef2f1950f
Author: Chesnay Schepler 
AuthorDate: Tue Oct 24 16:36:10 2023 +0200

[FLINK-33352][rest][docs] Add schema mappings to discriminator properties
---
 docs/static/generated/rest_v1_dispatcher.yml |  7 +++
 .../rest/messages/checkpoints/CheckpointStatistics.java  | 16 
 .../checkpoints/SubtaskCheckpointStatistics.java | 14 ++
 3 files changed, 37 insertions(+)

diff --git a/docs/static/generated/rest_v1_dispatcher.yml 
b/docs/static/generated/rest_v1_dispatcher.yml
index d02d40d6937..a3377fa903f 100644
--- a/docs/static/generated/rest_v1_dispatcher.yml
+++ b/docs/static/generated/rest_v1_dispatcher.yml
@@ -1774,6 +1774,10 @@ components:
   format: int64
   discriminator:
 propertyName: className
+mapping:
+  completed: '#/components/schemas/CompletedCheckpointStatistics'
+  failed: '#/components/schemas/FailedCheckpointStatistics'
+  in_progress: '#/components/schemas/PendingCheckpointStatistics'
 CheckpointStatisticsSummary:
   type: object
   properties:
@@ -2854,6 +2858,9 @@ components:
   type: string
   discriminator:
 propertyName: className
+mapping:
+  completed: 
'#/components/schemas/CompletedSubtaskCheckpointStatistics'
+  pending: '#/components/schemas/PendingSubtaskCheckpointStatistics'
 SubtaskExecutionAttemptAccumulatorsInfo:
   type: object
   properties:
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointStatistics.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointStatistics.java
index a8a05ea5f9d..44e42a8990d 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointStatistics.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointStatistics.java
@@ -40,6 +40,9 @@ import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonTyp
 import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.annotation.JsonDeserialize;
 import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
+import io.swagger.v3.oas.annotations.media.DiscriminatorMapping;
+import io.swagger.v3.oas.annotations.media.Schema;
+
 import javax.annotation.Nullable;
 
 import java.util.Collection;
@@ -64,6 +67,19 @@ import java.util.Objects;
 value = CheckpointStatistics.PendingCheckpointStatistics.class,
 name = "in_progress")
 })
+@Schema(
+discriminatorProperty = "className",
+discriminatorMapping = {
+@DiscriminatorMapping(
+value = "completed",
+schema = 
CheckpointStatistics.CompletedCheckpointStatistics.class),
+@DiscriminatorMapping(
+value = "failed",
+schema = 
CheckpointStatistics.FailedCheckpointStatistics.class),
+@DiscriminatorMapping(
+value = "in_progress",
+schema = 
CheckpointStatistics.PendingCheckpointStatistics.class),
+})
 public class CheckpointStatistics implements ResponseBody {
 
 public static final String FIELD_NAME_ID = "id";
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/SubtaskCheckpointStatistics.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/SubtaskCheckpointStatistics.java
index 642509a965c..613916eb3d7 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/SubtaskCheckpointStatistics.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/SubtaskCheckpointStatistics.java
@@ -23,6 +23,9 @@ import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonPro
 import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonSubTypes;
 import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonTypeInfo;
 
+import io.swagger.v3.oas.annotations.media.DiscriminatorMapping;
+import io.swagger.v3.oas.annotations.media.Schema;
+
 import java.util.Objects;
 
 /** Checkpoint statistics for a subtask. */
@@ -38,6 +41,17 @@ import java.util.Objects;
 value = 
SubtaskCheckpointStatistics.PendingSubtaskCheckpointStatistics.class,
 name = "pe

[flink] branch release-1.18 updated: [FLINK-33352][rest][docs] Add schema mappings to discriminator properties

2023-10-25 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new ef950f86af3 [FLINK-33352][rest][docs] Add schema mappings to 
discriminator properties
ef950f86af3 is described below

commit ef950f86af3f9a3a585529dad788c466e437
Author: Chesnay Schepler 
AuthorDate: Tue Oct 24 16:36:10 2023 +0200

[FLINK-33352][rest][docs] Add schema mappings to discriminator properties
---
 docs/static/generated/rest_v1_dispatcher.yml |  7 +++
 .../rest/messages/checkpoints/CheckpointStatistics.java  | 16 
 .../checkpoints/SubtaskCheckpointStatistics.java | 14 ++
 3 files changed, 37 insertions(+)

diff --git a/docs/static/generated/rest_v1_dispatcher.yml 
b/docs/static/generated/rest_v1_dispatcher.yml
index 4c510e93742..bf0c9056bf9 100644
--- a/docs/static/generated/rest_v1_dispatcher.yml
+++ b/docs/static/generated/rest_v1_dispatcher.yml
@@ -1819,6 +1819,10 @@ components:
   format: int64
   discriminator:
 propertyName: className
+mapping:
+  completed: '#/components/schemas/CompletedCheckpointStatistics'
+  failed: '#/components/schemas/FailedCheckpointStatistics'
+  in_progress: '#/components/schemas/PendingCheckpointStatistics'
 CheckpointStatisticsSummary:
   type: object
   properties:
@@ -2935,6 +2939,9 @@ components:
   type: string
   discriminator:
 propertyName: className
+mapping:
+  completed: 
'#/components/schemas/CompletedSubtaskCheckpointStatistics'
+  pending: '#/components/schemas/PendingSubtaskCheckpointStatistics'
 SubtaskExecutionAttemptAccumulatorsInfo:
   type: object
   properties:
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointStatistics.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointStatistics.java
index fd8ed6f087b..b38f57c9878 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointStatistics.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointStatistics.java
@@ -41,6 +41,9 @@ import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonTyp
 import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.annotation.JsonDeserialize;
 import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
+import io.swagger.v3.oas.annotations.media.DiscriminatorMapping;
+import io.swagger.v3.oas.annotations.media.Schema;
+
 import javax.annotation.Nullable;
 
 import java.util.Collection;
@@ -64,6 +67,19 @@ import java.util.Objects;
 value = CheckpointStatistics.PendingCheckpointStatistics.class,
 name = "in_progress")
 })
+@Schema(
+discriminatorProperty = "className",
+discriminatorMapping = {
+@DiscriminatorMapping(
+value = "completed",
+schema = 
CheckpointStatistics.CompletedCheckpointStatistics.class),
+@DiscriminatorMapping(
+value = "failed",
+schema = 
CheckpointStatistics.FailedCheckpointStatistics.class),
+@DiscriminatorMapping(
+value = "in_progress",
+schema = 
CheckpointStatistics.PendingCheckpointStatistics.class),
+})
 public class CheckpointStatistics implements ResponseBody {
 
 public static final String FIELD_NAME_ID = "id";
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/SubtaskCheckpointStatistics.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/SubtaskCheckpointStatistics.java
index 642509a965c..613916eb3d7 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/SubtaskCheckpointStatistics.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/SubtaskCheckpointStatistics.java
@@ -23,6 +23,9 @@ import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonPro
 import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonSubTypes;
 import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonTypeInfo;
 
+import io.swagger.v3.oas.annotations.media.DiscriminatorMapping;
+import io.swagger.v3.oas.annotations.media.Schema;
+
 import java.util.Objects;
 
 /** Checkpoint statistics for a subtask. */
@@ -38,6 +41,17 @@ import java.util.Objects;
 value = 
SubtaskCheckpointStatistics.PendingSubtaskCheckpointStatistics.class,
 name = "pe

[flink] branch master updated (d722bf7d1fa -> 100cc20f40b)

2023-10-25 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from d722bf7d1fa [FLINK-33346][runtime][test] Removes timeout
 add e9bf7b54999 [hotfix] Update OpenAPI spec version
 add 100cc20f40b [FLINK-33352][rest][docs] Add schema mappings to 
discriminator properties

No new revisions were added by this update.

Summary of changes:
 docs/static/generated/rest_v1_dispatcher.yml |  9 -
 docs/static/generated/rest_v1_sql_gateway.yml|  2 +-
 docs/static/generated/rest_v2_sql_gateway.yml|  2 +-
 .../rest/messages/checkpoints/CheckpointStatistics.java  | 16 
 .../checkpoints/SubtaskCheckpointStatistics.java | 14 ++
 5 files changed, 40 insertions(+), 3 deletions(-)



[flink] branch master updated: [FLINK-33308] Upgrade lombok to 1.8.30

2023-10-20 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 9471f81a2fb [FLINK-33308] Upgrade lombok to 1.8.30
9471f81a2fb is described below

commit 9471f81a2fb53f5c8274bbbe99161d8f1b27bafc
Author: Sergey Nuyanzin 
AuthorDate: Tue Oct 17 23:18:42 2023 +0200

[FLINK-33308] Upgrade lombok to 1.8.30
---
 flink-core/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/flink-core/pom.xml b/flink-core/pom.xml
index 598f0ed4995..ce90b2a1e6d 100644
--- a/flink-core/pom.xml
+++ b/flink-core/pom.xml
@@ -147,7 +147,7 @@ under the License.

org.projectlombok
lombok
-   1.18.22
+   1.18.30
test

 



[flink] branch master updated: [FLINK-33307] Disable spotless on Java 21

2023-10-20 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 72e302310ba [FLINK-33307] Disable spotless on Java 21
72e302310ba is described below

commit 72e302310ba55bb5f35966ed448243aae36e193e
Author: Sergey Nuyanzin 
AuthorDate: Wed Oct 18 22:28:43 2023 +0200

[FLINK-33307] Disable spotless on Java 21
---
 pom.xml | 24 
 1 file changed, 24 insertions(+)

diff --git a/pom.xml b/pom.xml
index 8a9c3f0e3a5..5a806eb4dc6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1124,6 +1124,30 @@ under the License.


 
+   
+   java21
+   
+   [21,)
+   
+
+   
+   
+   
+   
+   
com.diffplug.spotless
+   
spotless-maven-plugin
+   
+   
+   
true
+   
+   
+   
+   
+   
+   
+

fast




[flink] branch master updated (35a2257af5d -> 71c407a0b94)

2023-10-12 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from 35a2257af5d [FLINK-33248] Fix error message for CURRENT_WATERMARK 
without arguments
 add 71c407a0b94 [FLINK-33199][tests] Use fully qualified class names 
instead of Class objects in ArchitectureTests

No new revisions were added by this update.

Summary of changes:
 .../architecture/common/JavaFieldPredicates.java   | 57 ++---
 .../flink/architecture/common/Predicates.java  | 93 +++---
 .../src/test/java/PredicatesTest.java  | 48 +++
 .../flink/architecture/rules/TableApiRules.java|  8 +-
 .../flink-architecture-tests-test/pom.xml  | 18 -
 .../flink/architecture/rules/ITCaseRules.java  | 57 -
 6 files changed, 202 insertions(+), 79 deletions(-)
 create mode 100644 
flink-architecture-tests/flink-architecture-tests-base/src/test/java/PredicatesTest.java



[flink-connector-kafka] 01/02: fix jackson mismatch

2023-10-10 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch remove-shaded-jackson
in repository https://gitbox.apache.org/repos/asf/flink-connector-kafka.git

commit e5426a8a5337882fa0f7524ba81d9bc8eae4ddd9
Author: Chesnay Schepler 
AuthorDate: Tue Oct 10 13:06:41 2023 +0200

fix jackson mismatch
---
 .../KafkaRecordDeserializationSchemaTest.java | 19 ++-
 1 file changed, 14 insertions(+), 5 deletions(-)

diff --git 
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/reader/deserializer/KafkaRecordDeserializationSchemaTest.java
 
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/reader/deserializer/KafkaRecordDeserializationSchemaTest.java
index d61b7f83..a2a7a355 100644
--- 
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/reader/deserializer/KafkaRecordDeserializationSchemaTest.java
+++ 
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/reader/deserializer/KafkaRecordDeserializationSchemaTest.java
@@ -81,15 +81,24 @@ public class KafkaRecordDeserializationSchemaTest {
 @Test
 public void testKafkaValueDeserializationSchemaWrapper() throws Exception {
 final ConsumerRecord consumerRecord = 
getConsumerRecord();
-KafkaRecordDeserializationSchema schema =
-KafkaRecordDeserializationSchema.valueOnly(
-new JsonDeserializationSchema<>(ObjectNode.class));
+KafkaRecordDeserializationSchema<
+
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node
+.ObjectNode>
+schema =
+KafkaRecordDeserializationSchema.valueOnly(
+new JsonDeserializationSchema<>(
+
org.apache.flink.shaded.jackson2.com.fasterxml.jackson
+
.databind.node.ObjectNode.class));
 schema.open(new DummyInitializationContext());
-SimpleCollector collector = new SimpleCollector<>();
+SimpleCollector<
+
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node
+.ObjectNode>
+collector = new SimpleCollector<>();
 schema.deserialize(consumerRecord, collector);
 
 assertThat(collector.list).hasSize(1);
-ObjectNode deserializedValue = collector.list.get(0);
+
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode
+deserializedValue = collector.list.get(0);
 
 assertThat(deserializedValue.get("word").asText()).isEqualTo("world");
 assertThat(deserializedValue.get("key")).isNull();



[flink-connector-kafka] branch remove-shaded-jackson created (now ead6bc44)

2023-10-10 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch remove-shaded-jackson
in repository https://gitbox.apache.org/repos/asf/flink-connector-kafka.git


  at ead6bc44 make me pretty

This branch includes the following new commits:

 new e5426a8a fix jackson mismatch
 new ead6bc44 make me pretty

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.




[flink-connector-kafka] 02/02: make me pretty

2023-10-10 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch remove-shaded-jackson
in repository https://gitbox.apache.org/repos/asf/flink-connector-kafka.git

commit ead6bc4494c8b4d0580e6f221e9bdee0b0d2a714
Author: Chesnay Schepler 
AuthorDate: Tue Oct 10 13:17:24 2023 +0200

make me pretty
---
 .../KafkaRecordDeserializationSchemaTest.java | 15 ---
 1 file changed, 8 insertions(+), 7 deletions(-)

diff --git 
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/reader/deserializer/KafkaRecordDeserializationSchemaTest.java
 
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/reader/deserializer/KafkaRecordDeserializationSchemaTest.java
index a2a7a355..28560cf5 100644
--- 
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/reader/deserializer/KafkaRecordDeserializationSchemaTest.java
+++ 
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/reader/deserializer/KafkaRecordDeserializationSchemaTest.java
@@ -96,13 +96,14 @@ public class KafkaRecordDeserializationSchemaTest {
 collector = new SimpleCollector<>();
 schema.deserialize(consumerRecord, collector);
 
-assertThat(collector.list).hasSize(1);
-
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode
-deserializedValue = collector.list.get(0);
-
-assertThat(deserializedValue.get("word").asText()).isEqualTo("world");
-assertThat(deserializedValue.get("key")).isNull();
-assertThat(deserializedValue.get("metadata")).isNull();
+assertThat(collector.list)
+.hasSize(1)
+.allSatisfy(
+deserializedValue -> {
+
assertThat(deserializedValue.get("word").asText()).isEqualTo("world");
+assertThat(deserializedValue.get("key")).isNull();
+
assertThat(deserializedValue.get("metadata")).isNull();
+});
 }
 
 @Test



[flink-connector-kafka] branch remove-shaded-jackson created (now e5426a8a)

2023-10-10 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch remove-shaded-jackson
in repository https://gitbox.apache.org/repos/asf/flink-connector-kafka.git


  at e5426a8a fix jackson mismatch

This branch includes the following new commits:

 new e5426a8a fix jackson mismatch

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.




[flink-connector-kafka] 01/01: fix jackson mismatch

2023-10-10 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch remove-shaded-jackson
in repository https://gitbox.apache.org/repos/asf/flink-connector-kafka.git

commit e5426a8a5337882fa0f7524ba81d9bc8eae4ddd9
Author: Chesnay Schepler 
AuthorDate: Tue Oct 10 13:06:41 2023 +0200

fix jackson mismatch
---
 .../KafkaRecordDeserializationSchemaTest.java | 19 ++-
 1 file changed, 14 insertions(+), 5 deletions(-)

diff --git 
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/reader/deserializer/KafkaRecordDeserializationSchemaTest.java
 
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/reader/deserializer/KafkaRecordDeserializationSchemaTest.java
index d61b7f83..a2a7a355 100644
--- 
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/reader/deserializer/KafkaRecordDeserializationSchemaTest.java
+++ 
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/reader/deserializer/KafkaRecordDeserializationSchemaTest.java
@@ -81,15 +81,24 @@ public class KafkaRecordDeserializationSchemaTest {
 @Test
 public void testKafkaValueDeserializationSchemaWrapper() throws Exception {
 final ConsumerRecord consumerRecord = 
getConsumerRecord();
-KafkaRecordDeserializationSchema schema =
-KafkaRecordDeserializationSchema.valueOnly(
-new JsonDeserializationSchema<>(ObjectNode.class));
+KafkaRecordDeserializationSchema<
+
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node
+.ObjectNode>
+schema =
+KafkaRecordDeserializationSchema.valueOnly(
+new JsonDeserializationSchema<>(
+
org.apache.flink.shaded.jackson2.com.fasterxml.jackson
+
.databind.node.ObjectNode.class));
 schema.open(new DummyInitializationContext());
-SimpleCollector collector = new SimpleCollector<>();
+SimpleCollector<
+
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node
+.ObjectNode>
+collector = new SimpleCollector<>();
 schema.deserialize(consumerRecord, collector);
 
 assertThat(collector.list).hasSize(1);
-ObjectNode deserializedValue = collector.list.get(0);
+
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode
+deserializedValue = collector.list.get(0);
 
 assertThat(deserializedValue.get("word").asText()).isEqualTo("world");
 assertThat(deserializedValue.get("key")).isNull();



[flink] branch release-1.18 updated: [FLINK-15736][docs] Add Java compatibility page

2023-09-20 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new 9c1318ca7fa [FLINK-15736][docs] Add Java compatibility page
9c1318ca7fa is described below

commit 9c1318ca7fa5b2e7b11827068ad1288483aaa464
Author: Chesnay Schepler 
AuthorDate: Wed Sep 20 22:07:04 2023 +0200

[FLINK-15736][docs] Add Java compatibility page
---
 .../docs/deployment/java_compatibility.md  | 77 ++
 docs/content/docs/deployment/java_compatibility.md | 77 ++
 docs/content/docs/deployment/memory/_index.md  |  2 +-
 3 files changed, 155 insertions(+), 1 deletion(-)

diff --git a/docs/content.zh/docs/deployment/java_compatibility.md 
b/docs/content.zh/docs/deployment/java_compatibility.md
new file mode 100644
index 000..f2e4cb93eb4
--- /dev/null
+++ b/docs/content.zh/docs/deployment/java_compatibility.md
@@ -0,0 +1,77 @@
+---
+title: Java Compatibility
+weight: 2
+type: docs
+---
+
+
+# Java compatibility
+
+This page lists which Java versions Flink supports and what limitations apply 
(if any).
+
+## Java 8 (deprecated)
+
+Support for Java 8 has been deprecated in 1.15.0.
+It is recommended to migrate to Java 11.
+
+## Java 11
+
+Support for Java 11 was added in 1.10.0 and is the recommended Java version to 
run Flink on.
+
+This is the default version for docker images.
+
+### Untested Flink features
+
+The following Flink features have not been tested with Java 11:
+
+* Hive connector
+* Hbase 1.x connector
+
+### Untested language features
+
+* Modularized user jars have not been tested.
+
+## Java 17
+
+Experimental support for Java 17 was added in 1.18.0. 
([FLINK-15736](https://issues.apache.org/jira/browse/FLINK-15736))
+
+### Untested Flink features
+
+These Flink features have not been tested with Java 17:
+
+* Hive connector
+* Hbase 1.x connector
+
+### JDK modularization
+
+Starting with Java 16 Java applications have to fully cooperate with the JDK 
modularization, also known as [Project 
Jigsaw](https://openjdk.org/projects/jigsaw/).
+This means that access to JDK classes/internal must be explicitly allowed by 
the application when it is started, on a per-module basis, in the form of 
--add-opens/--add-exports JVM arguments.
+
+Since Flink uses reflection for serializing user-defined functions and data 
(via Kryo), this means that if your UDFs or data types use JDK classes you may 
have to allow access to these JDK classes.
+
+These should be configured via the [env.java.opts.all]({{< ref 
"docs/deployment/config" >}}#env-java-opts-all) option.
+
+In the default configuration in the Flink distribution this option is 
configured such that Flink itself works on Java 17.  
+The list of configured arguments must not be shortened, but only extended.
+
+### Known issues
+
+* Java records are not supported. See 
[FLINK-32380](https://issues.apache.org/jira/browse/FLINK-32380) for updates.
+* SIGSEGV in C2 Compiler thread: Early Java 17 builds are affected by a bug 
where the JVM can fail abruptly. Update your Java 17 installation to resolve 
the issue. See [JDK-8277529](https://bugs.openjdk.org/browse/JDK-8277529) for 
details.
diff --git a/docs/content/docs/deployment/java_compatibility.md 
b/docs/content/docs/deployment/java_compatibility.md
new file mode 100644
index 000..f2e4cb93eb4
--- /dev/null
+++ b/docs/content/docs/deployment/java_compatibility.md
@@ -0,0 +1,77 @@
+---
+title: Java Compatibility
+weight: 2
+type: docs
+---
+
+
+# Java compatibility
+
+This page lists which Java versions Flink supports and what limitations apply 
(if any).
+
+## Java 8 (deprecated)
+
+Support for Java 8 has been deprecated in 1.15.0.
+It is recommended to migrate to Java 11.
+
+## Java 11
+
+Support for Java 11 was added in 1.10.0 and is the recommended Java version to 
run Flink on.
+
+This is the default version for docker images.
+
+### Untested Flink features
+
+The following Flink features have not been tested with Java 11:
+
+* Hive connector
+* Hbase 1.x connector
+
+### Untested language features
+
+* Modularized user jars have not been tested.
+
+## Java 17
+
+Experimental support for Java 17 was added in 1.18.0. 
([FLINK-15736](https://issues.apache.org/jira/browse/FLINK-15736))
+
+### Untested Flink features
+
+These Flink features have not been tested with Java 17:
+
+* Hive connector
+* Hbase 1.x connector
+
+### JDK modularization
+
+Starting with Java 16 Java applications have to fully cooperate with the JDK 
modularization, also known as [Project 
Jigsaw](https://openjdk.org/projects/jigsaw/).
+This means that access to JDK classes/internal must be explicitly allowed by 
the application when it is started, on a per-module basis, in the form of 
--add-opens/--add-exports JVM arguments.
+
+Since Flink uses reflection for serializ

[flink] branch master updated: [FLINK-15736][docs] Add Java compatibility page

2023-09-20 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 388601f1b75 [FLINK-15736][docs] Add Java compatibility page
388601f1b75 is described below

commit 388601f1b75abd443e149aaa4584d366072a1b0e
Author: Chesnay Schepler 
AuthorDate: Wed Sep 20 22:07:04 2023 +0200

[FLINK-15736][docs] Add Java compatibility page
---
 .../docs/deployment/java_compatibility.md  | 77 ++
 docs/content/docs/deployment/java_compatibility.md | 77 ++
 docs/content/docs/deployment/memory/_index.md  |  2 +-
 3 files changed, 155 insertions(+), 1 deletion(-)

diff --git a/docs/content.zh/docs/deployment/java_compatibility.md 
b/docs/content.zh/docs/deployment/java_compatibility.md
new file mode 100644
index 000..f2e4cb93eb4
--- /dev/null
+++ b/docs/content.zh/docs/deployment/java_compatibility.md
@@ -0,0 +1,77 @@
+---
+title: Java Compatibility
+weight: 2
+type: docs
+---
+
+
+# Java compatibility
+
+This page lists which Java versions Flink supports and what limitations apply 
(if any).
+
+## Java 8 (deprecated)
+
+Support for Java 8 has been deprecated in 1.15.0.
+It is recommended to migrate to Java 11.
+
+## Java 11
+
+Support for Java 11 was added in 1.10.0 and is the recommended Java version to 
run Flink on.
+
+This is the default version for docker images.
+
+### Untested Flink features
+
+The following Flink features have not been tested with Java 11:
+
+* Hive connector
+* Hbase 1.x connector
+
+### Untested language features
+
+* Modularized user jars have not been tested.
+
+## Java 17
+
+Experimental support for Java 17 was added in 1.18.0. 
([FLINK-15736](https://issues.apache.org/jira/browse/FLINK-15736))
+
+### Untested Flink features
+
+These Flink features have not been tested with Java 17:
+
+* Hive connector
+* Hbase 1.x connector
+
+### JDK modularization
+
+Starting with Java 16 Java applications have to fully cooperate with the JDK 
modularization, also known as [Project 
Jigsaw](https://openjdk.org/projects/jigsaw/).
+This means that access to JDK classes/internal must be explicitly allowed by 
the application when it is started, on a per-module basis, in the form of 
--add-opens/--add-exports JVM arguments.
+
+Since Flink uses reflection for serializing user-defined functions and data 
(via Kryo), this means that if your UDFs or data types use JDK classes you may 
have to allow access to these JDK classes.
+
+These should be configured via the [env.java.opts.all]({{< ref 
"docs/deployment/config" >}}#env-java-opts-all) option.
+
+In the default configuration in the Flink distribution this option is 
configured such that Flink itself works on Java 17.  
+The list of configured arguments must not be shortened, but only extended.
+
+### Known issues
+
+* Java records are not supported. See 
[FLINK-32380](https://issues.apache.org/jira/browse/FLINK-32380) for updates.
+* SIGSEGV in C2 Compiler thread: Early Java 17 builds are affected by a bug 
where the JVM can fail abruptly. Update your Java 17 installation to resolve 
the issue. See [JDK-8277529](https://bugs.openjdk.org/browse/JDK-8277529) for 
details.
diff --git a/docs/content/docs/deployment/java_compatibility.md 
b/docs/content/docs/deployment/java_compatibility.md
new file mode 100644
index 000..f2e4cb93eb4
--- /dev/null
+++ b/docs/content/docs/deployment/java_compatibility.md
@@ -0,0 +1,77 @@
+---
+title: Java Compatibility
+weight: 2
+type: docs
+---
+
+
+# Java compatibility
+
+This page lists which Java versions Flink supports and what limitations apply 
(if any).
+
+## Java 8 (deprecated)
+
+Support for Java 8 has been deprecated in 1.15.0.
+It is recommended to migrate to Java 11.
+
+## Java 11
+
+Support for Java 11 was added in 1.10.0 and is the recommended Java version to 
run Flink on.
+
+This is the default version for docker images.
+
+### Untested Flink features
+
+The following Flink features have not been tested with Java 11:
+
+* Hive connector
+* Hbase 1.x connector
+
+### Untested language features
+
+* Modularized user jars have not been tested.
+
+## Java 17
+
+Experimental support for Java 17 was added in 1.18.0. 
([FLINK-15736](https://issues.apache.org/jira/browse/FLINK-15736))
+
+### Untested Flink features
+
+These Flink features have not been tested with Java 17:
+
+* Hive connector
+* Hbase 1.x connector
+
+### JDK modularization
+
+Starting with Java 16 Java applications have to fully cooperate with the JDK 
modularization, also known as [Project 
Jigsaw](https://openjdk.org/projects/jigsaw/).
+This means that access to JDK classes/internal must be explicitly allowed by 
the application when it is started, on a per-module basis, in the form of 
--add-opens/--add-exports JVM arguments.
+
+Since Flink uses reflection for serializing user-defined fu

[flink] branch release-1.18 updated: [FLINK-33086] Protect failure enrichment against unhandled exceptions

2023-09-14 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new 206609e822a [FLINK-33086] Protect failure enrichment against unhandled 
exceptions
206609e822a is described below

commit 206609e822a8029a78245e6eef7ab5d88a0f370b
Author: Panagiotis Garefalakis 
AuthorDate: Wed Sep 13 19:07:25 2023 -0700

[FLINK-33086] Protect failure enrichment against unhandled exceptions

Co-authored-by: Chesnay Schepler 
---
 .../runtime/failure/FailureEnricherUtils.java  |  8 
 .../runtime/failure/FailureEnricherUtilsTest.java  | 46 ++
 2 files changed, 54 insertions(+)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/failure/FailureEnricherUtils.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/failure/FailureEnricherUtils.java
index f704a1ddbd7..d9b4c2278df 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/failure/FailureEnricherUtils.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/failure/FailureEnricherUtils.java
@@ -200,6 +200,14 @@ public class FailureEnricherUtils {
 }
 });
 return validLabels;
+})
+.exceptionally(
+t -> {
+LOG.warn(
+"Enricher {} threw an 
exception.",
+enricher.getClass(),
+t);
+return Collections.emptyMap();
 }));
 }
 // combine all CompletableFutures into a single CompletableFuture 
containing a Map of labels
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/failure/FailureEnricherUtilsTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/failure/FailureEnricherUtilsTest.java
index 8eedf2d0be2..d5b6ef334d3 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/failure/FailureEnricherUtilsTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/failure/FailureEnricherUtilsTest.java
@@ -202,6 +202,38 @@ class FailureEnricherUtilsTest {
 assertThatFuture(result).eventuallySucceeds().satisfies(labels -> 
labels.isEmpty());
 }
 
+@Test
+public void testLabelFailureWithValidAndThrowingEnricher() {
+// A failing enricher shouldn't affect remaining enrichers with valid 
labels
+final Throwable cause = new RuntimeException("test exception");
+final FailureEnricher validEnricher = new TestEnricher("enricherKey");
+final FailureEnricher throwingEnricher = new 
ThrowingEnricher("throwingKey");
+
+final Set enrichers =
+new HashSet() {
+{
+add(validEnricher);
+add(throwingEnricher);
+}
+};
+
+final CompletableFuture> result =
+FailureEnricherUtils.labelFailure(
+cause,
+null,
+
ComponentMainThreadExecutorServiceAdapter.forMainThread(),
+enrichers);
+
+assertThatFuture(result)
+.eventuallySucceeds()
+.satisfies(
+labels -> {
+assertThat(labels).hasSize(1);
+assertThat(labels).containsKey("enricherKey");
+
assertThat(labels).containsValue("enricherKeyValue");
+});
+}
+
 @Test
 public void testLabelFailureMergeException() {
 // Throwing exception labelFailure when merging duplicate keys
@@ -253,6 +285,20 @@ class FailureEnricherUtilsTest {
 }
 }
 
+private static class ThrowingEnricher extends TestEnricher {
+ThrowingEnricher(String... outputKeys) {
+super(outputKeys);
+}
+
+@Override
+public CompletableFuture> processFailure(
+Throwable cause, Context context) {
+final CompletableFuture> future = new 
CompletableFuture<>();
+future.completeExceptionally(new RuntimeException("test failure"));
+return future;
+}
+}
+
 private static class AndAnotherTestEnricher extends TestEnricher {
 AndAnotherTestEnricher(String... outputKeys) {
 super(outputKeys);



[flink] branch master updated: [FLINK-33086] Protect failure enrichment against unhandled exceptions

2023-09-14 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new e58a718d04e [FLINK-33086] Protect failure enrichment against unhandled 
exceptions
e58a718d04e is described below

commit e58a718d04e3ec6e2a43da8a868e5515916c0eea
Author: Panagiotis Garefalakis 
AuthorDate: Wed Sep 13 19:07:25 2023 -0700

[FLINK-33086] Protect failure enrichment against unhandled exceptions

Co-authored-by: Chesnay Schepler 
---
 .../runtime/failure/FailureEnricherUtils.java  |  8 
 .../runtime/failure/FailureEnricherUtilsTest.java  | 46 ++
 2 files changed, 54 insertions(+)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/failure/FailureEnricherUtils.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/failure/FailureEnricherUtils.java
index f704a1ddbd7..d9b4c2278df 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/failure/FailureEnricherUtils.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/failure/FailureEnricherUtils.java
@@ -200,6 +200,14 @@ public class FailureEnricherUtils {
 }
 });
 return validLabels;
+})
+.exceptionally(
+t -> {
+LOG.warn(
+"Enricher {} threw an 
exception.",
+enricher.getClass(),
+t);
+return Collections.emptyMap();
 }));
 }
 // combine all CompletableFutures into a single CompletableFuture 
containing a Map of labels
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/failure/FailureEnricherUtilsTest.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/failure/FailureEnricherUtilsTest.java
index 8eedf2d0be2..d5b6ef334d3 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/failure/FailureEnricherUtilsTest.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/failure/FailureEnricherUtilsTest.java
@@ -202,6 +202,38 @@ class FailureEnricherUtilsTest {
 assertThatFuture(result).eventuallySucceeds().satisfies(labels -> 
labels.isEmpty());
 }
 
+@Test
+public void testLabelFailureWithValidAndThrowingEnricher() {
+// A failing enricher shouldn't affect remaining enrichers with valid 
labels
+final Throwable cause = new RuntimeException("test exception");
+final FailureEnricher validEnricher = new TestEnricher("enricherKey");
+final FailureEnricher throwingEnricher = new 
ThrowingEnricher("throwingKey");
+
+final Set enrichers =
+new HashSet() {
+{
+add(validEnricher);
+add(throwingEnricher);
+}
+};
+
+final CompletableFuture> result =
+FailureEnricherUtils.labelFailure(
+cause,
+null,
+
ComponentMainThreadExecutorServiceAdapter.forMainThread(),
+enrichers);
+
+assertThatFuture(result)
+.eventuallySucceeds()
+.satisfies(
+labels -> {
+assertThat(labels).hasSize(1);
+assertThat(labels).containsKey("enricherKey");
+
assertThat(labels).containsValue("enricherKeyValue");
+});
+}
+
 @Test
 public void testLabelFailureMergeException() {
 // Throwing exception labelFailure when merging duplicate keys
@@ -253,6 +285,20 @@ class FailureEnricherUtilsTest {
 }
 }
 
+private static class ThrowingEnricher extends TestEnricher {
+ThrowingEnricher(String... outputKeys) {
+super(outputKeys);
+}
+
+@Override
+public CompletableFuture> processFailure(
+Throwable cause, Context context) {
+final CompletableFuture> future = new 
CompletableFuture<>();
+future.completeExceptionally(new RuntimeException("test failure"));
+return future;
+}
+}
+
 private static class AndAnotherTestEnricher extends TestEnricher {
 AndAnotherTestEnricher(String... outputKeys) {
 super(outputKeys);



[flink] branch master updated: [FLINK-31889][docs] Add documentation for implementing/loading enrichers

2023-09-11 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 484da993e22 [FLINK-31889][docs] Add documentation for 
implementing/loading enrichers
484da993e22 is described below

commit 484da993e22c30b0c9e2bcf747036a79443519fc
Author: Panagiotis Garefalakis 
AuthorDate: Sun Apr 23 19:07:20 2023 -0700

[FLINK-31889][docs] Add documentation for implementing/loading enrichers
---
 .../docs/deployment/advanced/failure_enrichers.md  | 110 +
 .../docs/deployment/advanced/failure_enrichers.md  | 110 +
 2 files changed, 220 insertions(+)

diff --git a/docs/content.zh/docs/deployment/advanced/failure_enrichers.md 
b/docs/content.zh/docs/deployment/advanced/failure_enrichers.md
new file mode 100644
index 000..0bbf45efd68
--- /dev/null
+++ b/docs/content.zh/docs/deployment/advanced/failure_enrichers.md
@@ -0,0 +1,110 @@
+---
+title: "Failure Enrichers"
+nav-title: failure-enrichers
+nav-parent_id: advanced
+nav-pos: 3
+---
+
+
+## Custom failure enrichers
+Flink provides a pluggable interface for users to register their custom logic 
and enrich failures with extra metadata labels (string key-value pairs).
+This enables users to implement their own failure enrichment plugins to 
categorize job failures, expose custom metrics, or make calls to external 
notification systems.
+
+FailureEnrichers are triggered every time an exception is reported at runtime 
by the JobManager.
+Every FailureEnricher may asynchronously return labels associated with the 
failure that are then exposed via the JobManager's REST API (e.g., a 
'type:System' label implying the failure is categorized as a system error).
+
+
+### Implement a plugin for your custom enricher
+
+To implement a custom FailureEnricher plugin, you need to:
+
+- Add your own FailureEnricher by implementing the {{< gh_link 
file="/flink-core/src/main/java/org/apache/flink/core/failure/FailureEnricher.java"
 name="FailureEnricher" >}} interface.
+
+- Add your own FailureEnricherFactory by implementing the {{< gh_link 
file="/flink-core/src/main/java/org/apache/flink/core/failure/FailureEnricherFactory.java"
 name="FailureEnricherFactory" >}} interface.
+
+- Add a service entry. Create a file 
`META-INF/services/org.apache.flink.core.failure.FailureEnricherFactory` which 
contains the class name of your failure enricher factory class (see [Java 
Service 
Loader](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/util/ServiceLoader.html)
 docs for more details).
+
+
+Then, create a jar which includes your `FailureEnricher`, 
`FailureEnricherFactory`, `META-INF/services/` and all external dependencies.
+Make a directory in `plugins/` of your Flink distribution with an arbitrary 
name, e.g. "failure-enrichment", and put the jar into this directory.
+See [Flink Plugin]({% link deployment/filesystems/plugins.md %}) for more 
details.
+
+{{< hint warning >}}
+Note that every FailureEnricher should have defined a set of {{< gh_link 
file="/flink-core/src/main/java/org/apache/flink/core/failure/FailureEnricher.java"
 name="output keys" >}} that may be associated with values. This set of keys 
has to be unique otherwise all enrichers with overlapping keys will be ignored.
+{{< /hint >}}
+
+FailureEnricherFactory example:
+
+``` java
+package org.apache.flink.test.plugin.jar.failure;
+
+public class TestFailureEnricherFactory implements FailureEnricherFactory {
+
+   @Override
+   public FailureEnricher createFailureEnricher(Configuration conf) {
+return new CustomEnricher();
+   }
+}
+```
+
+FailureEnricher example:
+
+``` java
+package org.apache.flink.test.plugin.jar.failure;
+
+public class CustomEnricher implements FailureEnricher {
+private final Set outputKeys;
+
+public CustomEnricher() {
+this.outputKeys = Collections.singleton("labelKey");
+}
+
+@Override
+public Set getOutputKeys() {
+return outputKeys;
+}
+
+@Override
+public CompletableFuture> processFailure(
+Throwable cause, Context context) {
+return 
CompletableFuture.completedFuture(Collections.singletonMap("labelKey", 
"labelValue"));
+}
+}
+```
+
+### Configuration
+
+The JobManager loads FailureEnricher plugins at startup. To make sure your 
FailureEnrichers are loaded all class names should be defined as part of 
[jobmanager.failure-enrichers configuration]({{< ref 
"docs/deployment/config#jobmanager-failure-enrichers" >}}).
+If this configuration is empty, NO enrichers will be started. Example:
+```
+jobmanager.failure-enrichers = 
org.apache.flink.test.plugin.jar.failure.CustomEnricher
+```
+

[flink] branch release-1.18 updated: [FLINK-31889][docs] Add documentation for implementing/loading enrichers

2023-09-11 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.18
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.18 by this push:
 new 7339a5359cb [FLINK-31889][docs] Add documentation for 
implementing/loading enrichers
7339a5359cb is described below

commit 7339a5359cb1ea1e718c00037bc322b02cc523e2
Author: Panagiotis Garefalakis 
AuthorDate: Sun Apr 23 19:07:20 2023 -0700

[FLINK-31889][docs] Add documentation for implementing/loading enrichers
---
 .../docs/deployment/advanced/failure_enrichers.md  | 110 +
 .../docs/deployment/advanced/failure_enrichers.md  | 110 +
 2 files changed, 220 insertions(+)

diff --git a/docs/content.zh/docs/deployment/advanced/failure_enrichers.md 
b/docs/content.zh/docs/deployment/advanced/failure_enrichers.md
new file mode 100644
index 000..0bbf45efd68
--- /dev/null
+++ b/docs/content.zh/docs/deployment/advanced/failure_enrichers.md
@@ -0,0 +1,110 @@
+---
+title: "Failure Enrichers"
+nav-title: failure-enrichers
+nav-parent_id: advanced
+nav-pos: 3
+---
+
+
+## Custom failure enrichers
+Flink provides a pluggable interface for users to register their custom logic 
and enrich failures with extra metadata labels (string key-value pairs).
+This enables users to implement their own failure enrichment plugins to 
categorize job failures, expose custom metrics, or make calls to external 
notification systems.
+
+FailureEnrichers are triggered every time an exception is reported at runtime 
by the JobManager.
+Every FailureEnricher may asynchronously return labels associated with the 
failure that are then exposed via the JobManager's REST API (e.g., a 
'type:System' label implying the failure is categorized as a system error).
+
+
+### Implement a plugin for your custom enricher
+
+To implement a custom FailureEnricher plugin, you need to:
+
+- Add your own FailureEnricher by implementing the {{< gh_link 
file="/flink-core/src/main/java/org/apache/flink/core/failure/FailureEnricher.java"
 name="FailureEnricher" >}} interface.
+
+- Add your own FailureEnricherFactory by implementing the {{< gh_link 
file="/flink-core/src/main/java/org/apache/flink/core/failure/FailureEnricherFactory.java"
 name="FailureEnricherFactory" >}} interface.
+
+- Add a service entry. Create a file 
`META-INF/services/org.apache.flink.core.failure.FailureEnricherFactory` which 
contains the class name of your failure enricher factory class (see [Java 
Service 
Loader](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/util/ServiceLoader.html)
 docs for more details).
+
+
+Then, create a jar which includes your `FailureEnricher`, 
`FailureEnricherFactory`, `META-INF/services/` and all external dependencies.
+Make a directory in `plugins/` of your Flink distribution with an arbitrary 
name, e.g. "failure-enrichment", and put the jar into this directory.
+See [Flink Plugin]({% link deployment/filesystems/plugins.md %}) for more 
details.
+
+{{< hint warning >}}
+Note that every FailureEnricher should have defined a set of {{< gh_link 
file="/flink-core/src/main/java/org/apache/flink/core/failure/FailureEnricher.java"
 name="output keys" >}} that may be associated with values. This set of keys 
has to be unique otherwise all enrichers with overlapping keys will be ignored.
+{{< /hint >}}
+
+FailureEnricherFactory example:
+
+``` java
+package org.apache.flink.test.plugin.jar.failure;
+
+public class TestFailureEnricherFactory implements FailureEnricherFactory {
+
+   @Override
+   public FailureEnricher createFailureEnricher(Configuration conf) {
+return new CustomEnricher();
+   }
+}
+```
+
+FailureEnricher example:
+
+``` java
+package org.apache.flink.test.plugin.jar.failure;
+
+public class CustomEnricher implements FailureEnricher {
+private final Set outputKeys;
+
+public CustomEnricher() {
+this.outputKeys = Collections.singleton("labelKey");
+}
+
+@Override
+public Set getOutputKeys() {
+return outputKeys;
+}
+
+@Override
+public CompletableFuture> processFailure(
+Throwable cause, Context context) {
+return 
CompletableFuture.completedFuture(Collections.singletonMap("labelKey", 
"labelValue"));
+}
+}
+```
+
+### Configuration
+
+The JobManager loads FailureEnricher plugins at startup. To make sure your 
FailureEnrichers are loaded all class names should be defined as part of 
[jobmanager.failure-enrichers configuration]({{< ref 
"docs/deployment/config#jobmanager-failure-enrichers" >}}).
+If this configuration is empty, NO enrichers will be started. Example:
+```
+jobmanager.failure-enrichers = 
org.apache.flink.test.plugin.jar.failure.CustomEnricher
+```
+

[flink] 01/02: [FLINK-32888] Expose file comparison assertion

2023-08-18 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.16
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 333a6e67b4bbe78b8c5695f8cd52ea2ea7dc0b20
Author: Chesnay Schepler 
AuthorDate: Thu Aug 17 11:11:34 2023 +0200

[FLINK-32888] Expose file comparison assertion
---
 .../runtime/rest/MultipartUploadResource.java  | 53 +++---
 1 file changed, 26 insertions(+), 27 deletions(-)

diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadResource.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadResource.java
index 19939b2dce9..9109424b177 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadResource.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadResource.java
@@ -134,36 +134,35 @@ public class MultipartUploadResource extends 
ExternalResource {
 (request, restfulGateway) -> {
 // the default verifier checks for identiy (i.e. same name 
and content) of all
 // uploaded files
-List expectedFiles =
-getFilesToUpload().stream()
-.map(File::toPath)
-.collect(Collectors.toList());
-List uploadedFiles =
-request.getUploadedFiles().stream()
-.map(File::toPath)
-.collect(Collectors.toList());
-
-assertEquals(expectedFiles.size(), uploadedFiles.size());
-
-List expectedList = new ArrayList<>(expectedFiles);
-List actualList = new ArrayList<>(uploadedFiles);
-expectedList.sort(Comparator.comparing(Path::toString));
-actualList.sort(Comparator.comparing(Path::toString));
-
-for (int x = 0; x < expectedList.size(); x++) {
-Path expected = expectedList.get(x);
-Path actual = actualList.get(x);
-
-assertEquals(
-expected.getFileName().toString(), 
actual.getFileName().toString());
-
-byte[] originalContent = Files.readAllBytes(expected);
-byte[] receivedContent = Files.readAllBytes(actual);
-assertArrayEquals(originalContent, receivedContent);
-}
+assertUploadedFilesEqual(request, getFilesToUpload());
 });
 }
 
+public static void assertUploadedFilesEqual(HandlerRequest request, 
Collection files)
+throws IOException {
+List expectedFiles = 
files.stream().map(File::toPath).collect(Collectors.toList());
+List uploadedFiles =
+
request.getUploadedFiles().stream().map(File::toPath).collect(Collectors.toList());
+
+assertEquals(expectedFiles.size(), uploadedFiles.size());
+
+List expectedList = new ArrayList<>(expectedFiles);
+List actualList = new ArrayList<>(uploadedFiles);
+expectedList.sort(Comparator.comparing(Path::toString));
+actualList.sort(Comparator.comparing(Path::toString));
+
+for (int x = 0; x < expectedList.size(); x++) {
+Path expected = expectedList.get(x);
+Path actual = actualList.get(x);
+
+assertEquals(expected.getFileName().toString(), 
actual.getFileName().toString());
+
+byte[] originalContent = Files.readAllBytes(expected);
+byte[] receivedContent = Files.readAllBytes(actual);
+assertArrayEquals(originalContent, receivedContent);
+}
+}
+
 public void setFileUploadVerifier(
 BiConsumerWithException<
 HandlerRequest, 
RestfulGateway, Exception>



[flink] branch release-1.16 updated (5157ac5921d -> 2d3c142eb03)

2023-08-18 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch release-1.16
in repository https://gitbox.apache.org/repos/asf/flink.git


from 5157ac5921d [FLINK-31139][state/changelog] not upload empty state 
changelog file
 new 333a6e67b4b [FLINK-32888] Expose file comparison assertion
 new 2d3c142eb03 [FLINK-32888] Handle hasNext() throwing 
EndOfDataDecoderException

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../flink/runtime/rest/FileUploadHandler.java  | 12 -
 .../runtime/rest/FileUploadHandlerITCase.java  | 58 ++
 .../runtime/rest/MultipartUploadResource.java  | 53 ++--
 3 files changed, 95 insertions(+), 28 deletions(-)



[flink] 02/02: [FLINK-32888] Handle hasNext() throwing EndOfDataDecoderException

2023-08-18 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.16
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 2d3c142eb036f2cf65b0a4f81caddd7e4c943fd5
Author: Chesnay Schepler 
AuthorDate: Fri Aug 18 14:13:28 2023 +0200

[FLINK-32888] Handle hasNext() throwing EndOfDataDecoderException
---
 .../flink/runtime/rest/FileUploadHandler.java  | 12 -
 .../runtime/rest/FileUploadHandlerITCase.java  | 58 ++
 2 files changed, 69 insertions(+), 1 deletion(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java
index c6dda2cdfb2..d60f48d68ee 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java
@@ -148,7 +148,7 @@ public class FileUploadHandler extends 
SimpleChannelInboundHandler {
 currentHttpPostRequestDecoder.offer(httpContent);
 
 while (httpContent != LastHttpContent.EMPTY_LAST_CONTENT
-&& currentHttpPostRequestDecoder.hasNext()) {
+&& hasNext(currentHttpPostRequestDecoder)) {
 final InterfaceHttpData data = 
currentHttpPostRequestDecoder.next();
 if (data.getHttpDataType() == 
InterfaceHttpData.HttpDataType.FileUpload) {
 final DiskFileUpload fileUpload = (DiskFileUpload) 
data;
@@ -214,6 +214,16 @@ public class FileUploadHandler extends 
SimpleChannelInboundHandler {
 }
 }
 
+private static boolean hasNext(HttpPostRequestDecoder decoder) {
+try {
+return decoder.hasNext();
+} catch (HttpPostRequestDecoder.EndOfDataDecoderException e) {
+// this can occur if the final chuck wasn't empty, but didn't 
contain any attribute data
+// unfortunately the Netty APIs don't give us any way to check this
+return false;
+}
+}
+
 private void handleError(
 ChannelHandlerContext ctx,
 String errorMessage,
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/FileUploadHandlerITCase.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/FileUploadHandlerITCase.java
index 34d17955922..480b58da26e 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/FileUploadHandlerITCase.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/FileUploadHandlerITCase.java
@@ -24,6 +24,7 @@ import org.apache.flink.runtime.rest.messages.MessageHeaders;
 import org.apache.flink.runtime.rest.messages.RequestBody;
 import org.apache.flink.runtime.rest.util.RestMapperUtils;
 import org.apache.flink.runtime.webmonitor.RestfulGateway;
+import org.apache.flink.testutils.junit.utils.TempDirUtils;
 import org.apache.flink.util.FileUtils;
 import org.apache.flink.util.TestLogger;
 import org.apache.flink.util.function.BiConsumerWithException;
@@ -39,13 +40,16 @@ import okhttp3.Response;
 import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
 
 import java.io.File;
 import java.io.IOException;
+import java.io.RandomAccessFile;
 import java.io.StringWriter;
 import java.lang.reflect.Field;
 import java.nio.file.Path;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.Comparator;
 import java.util.Iterator;
 import java.util.LinkedHashSet;
@@ -71,6 +75,8 @@ public class FileUploadHandlerITCase extends TestLogger {
 @Rule
 public final MultipartUploadResource multipartUpdateResource = new 
MultipartUploadResource();
 
+@Rule public final TemporaryFolder tmp = new TemporaryFolder();
+
 private static final ObjectMapper OBJECT_MAPPER = 
RestMapperUtils.getStrictObjectMapper();
 
 @ClassRule
@@ -115,6 +121,15 @@ public class FileUploadHandlerITCase extends TestLogger {
 return finalizeRequest(builder, headerUrl);
 }
 
+private Request buildMixedRequest(
+String headerUrl, MultipartUploadResource.TestRequestBody json, 
File file)
+throws IOException {
+MultipartBody.Builder builder = new MultipartBody.Builder();
+builder = addJsonPart(builder, json, 
FileUploadHandler.HTTP_ATTRIBUTE_REQUEST);
+builder = addFilePart(builder, file, file.getName());
+return finalizeRequest(builder, headerUrl);
+}
+
 private Request buildMixedRequest(
 String headerUrl, MultipartUploadResource.TestRequestBody json) 
throws IOException {
 MultipartBody.Builder builder = new MultipartBody.Builder();
@@ -219,6 +234,49 @@ public class FileUploadHandlerITCase extends TestLogger {
 verifyNoFileIsRegisteredToDeleteOnExitHook();
 }
 
+/**
+ * This test checks for a sp

[flink] 02/02: [FLINK-32888] Handle hasNext() throwing EndOfDataDecoderException

2023-08-18 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.17
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 3e1e32bd89a2ed871e79cd16634c6f66d5ff3db8
Author: Chesnay Schepler 
AuthorDate: Fri Aug 18 14:13:28 2023 +0200

[FLINK-32888] Handle hasNext() throwing EndOfDataDecoderException
---
 .../flink/runtime/rest/FileUploadHandler.java  | 12 -
 .../runtime/rest/FileUploadHandlerITCase.java  | 58 ++
 2 files changed, 69 insertions(+), 1 deletion(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java
index c3b797bcf72..c9e1fd78d74 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java
@@ -148,7 +148,7 @@ public class FileUploadHandler extends 
SimpleChannelInboundHandler {
 currentHttpPostRequestDecoder.offer(httpContent);
 
 while (httpContent != LastHttpContent.EMPTY_LAST_CONTENT
-&& currentHttpPostRequestDecoder.hasNext()) {
+&& hasNext(currentHttpPostRequestDecoder)) {
 final InterfaceHttpData data = 
currentHttpPostRequestDecoder.next();
 if (data.getHttpDataType() == 
InterfaceHttpData.HttpDataType.FileUpload) {
 final DiskFileUpload fileUpload = (DiskFileUpload) 
data;
@@ -212,6 +212,16 @@ public class FileUploadHandler extends 
SimpleChannelInboundHandler {
 }
 }
 
+private static boolean hasNext(HttpPostRequestDecoder decoder) {
+try {
+return decoder.hasNext();
+} catch (HttpPostRequestDecoder.EndOfDataDecoderException e) {
+// this can occur if the final chuck wasn't empty, but didn't 
contain any attribute data
+// unfortunately the Netty APIs don't give us any way to check this
+return false;
+}
+}
+
 private void handleError(
 ChannelHandlerContext ctx,
 String errorMessage,
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/FileUploadHandlerITCase.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/FileUploadHandlerITCase.java
index 34d17955922..480b58da26e 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/FileUploadHandlerITCase.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/FileUploadHandlerITCase.java
@@ -24,6 +24,7 @@ import org.apache.flink.runtime.rest.messages.MessageHeaders;
 import org.apache.flink.runtime.rest.messages.RequestBody;
 import org.apache.flink.runtime.rest.util.RestMapperUtils;
 import org.apache.flink.runtime.webmonitor.RestfulGateway;
+import org.apache.flink.testutils.junit.utils.TempDirUtils;
 import org.apache.flink.util.FileUtils;
 import org.apache.flink.util.TestLogger;
 import org.apache.flink.util.function.BiConsumerWithException;
@@ -39,13 +40,16 @@ import okhttp3.Response;
 import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
 
 import java.io.File;
 import java.io.IOException;
+import java.io.RandomAccessFile;
 import java.io.StringWriter;
 import java.lang.reflect.Field;
 import java.nio.file.Path;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.Comparator;
 import java.util.Iterator;
 import java.util.LinkedHashSet;
@@ -71,6 +75,8 @@ public class FileUploadHandlerITCase extends TestLogger {
 @Rule
 public final MultipartUploadResource multipartUpdateResource = new 
MultipartUploadResource();
 
+@Rule public final TemporaryFolder tmp = new TemporaryFolder();
+
 private static final ObjectMapper OBJECT_MAPPER = 
RestMapperUtils.getStrictObjectMapper();
 
 @ClassRule
@@ -115,6 +121,15 @@ public class FileUploadHandlerITCase extends TestLogger {
 return finalizeRequest(builder, headerUrl);
 }
 
+private Request buildMixedRequest(
+String headerUrl, MultipartUploadResource.TestRequestBody json, 
File file)
+throws IOException {
+MultipartBody.Builder builder = new MultipartBody.Builder();
+builder = addJsonPart(builder, json, 
FileUploadHandler.HTTP_ATTRIBUTE_REQUEST);
+builder = addFilePart(builder, file, file.getName());
+return finalizeRequest(builder, headerUrl);
+}
+
 private Request buildMixedRequest(
 String headerUrl, MultipartUploadResource.TestRequestBody json) 
throws IOException {
 MultipartBody.Builder builder = new MultipartBody.Builder();
@@ -219,6 +234,49 @@ public class FileUploadHandlerITCase extends TestLogger {
 verifyNoFileIsRegisteredToDeleteOnExitHook();
 }
 
+/**
+ * This test checks for a sp

[flink] branch release-1.17 updated (38b9c280128 -> 3e1e32bd89a)

2023-08-18 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch release-1.17
in repository https://gitbox.apache.org/repos/asf/flink.git


from 38b9c280128 [FLINK-32876][runtime] Prevent 
ExecutionTimeBasedSlowTaskDetector from identifying tasks in CREATED state as 
slow tasks.
 new 4a368707162 [FLINK-32888] Expose file comparison assertion
 new 3e1e32bd89a [FLINK-32888] Handle hasNext() throwing 
EndOfDataDecoderException

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../flink/runtime/rest/FileUploadHandler.java  | 12 -
 .../runtime/rest/FileUploadHandlerITCase.java  | 58 ++
 .../runtime/rest/MultipartUploadResource.java  | 53 ++--
 3 files changed, 95 insertions(+), 28 deletions(-)



[flink] 01/02: [FLINK-32888] Expose file comparison assertion

2023-08-18 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch release-1.17
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 4a368707162760fc39208d4a4d4bac2c6c728802
Author: Chesnay Schepler 
AuthorDate: Thu Aug 17 11:11:34 2023 +0200

[FLINK-32888] Expose file comparison assertion
---
 .../runtime/rest/MultipartUploadResource.java  | 53 +++---
 1 file changed, 26 insertions(+), 27 deletions(-)

diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadResource.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadResource.java
index 19939b2dce9..9109424b177 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadResource.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadResource.java
@@ -134,36 +134,35 @@ public class MultipartUploadResource extends 
ExternalResource {
 (request, restfulGateway) -> {
 // the default verifier checks for identiy (i.e. same name 
and content) of all
 // uploaded files
-List expectedFiles =
-getFilesToUpload().stream()
-.map(File::toPath)
-.collect(Collectors.toList());
-List uploadedFiles =
-request.getUploadedFiles().stream()
-.map(File::toPath)
-.collect(Collectors.toList());
-
-assertEquals(expectedFiles.size(), uploadedFiles.size());
-
-List expectedList = new ArrayList<>(expectedFiles);
-List actualList = new ArrayList<>(uploadedFiles);
-expectedList.sort(Comparator.comparing(Path::toString));
-actualList.sort(Comparator.comparing(Path::toString));
-
-for (int x = 0; x < expectedList.size(); x++) {
-Path expected = expectedList.get(x);
-Path actual = actualList.get(x);
-
-assertEquals(
-expected.getFileName().toString(), 
actual.getFileName().toString());
-
-byte[] originalContent = Files.readAllBytes(expected);
-byte[] receivedContent = Files.readAllBytes(actual);
-assertArrayEquals(originalContent, receivedContent);
-}
+assertUploadedFilesEqual(request, getFilesToUpload());
 });
 }
 
+public static void assertUploadedFilesEqual(HandlerRequest request, 
Collection files)
+throws IOException {
+List expectedFiles = 
files.stream().map(File::toPath).collect(Collectors.toList());
+List uploadedFiles =
+
request.getUploadedFiles().stream().map(File::toPath).collect(Collectors.toList());
+
+assertEquals(expectedFiles.size(), uploadedFiles.size());
+
+List expectedList = new ArrayList<>(expectedFiles);
+List actualList = new ArrayList<>(uploadedFiles);
+expectedList.sort(Comparator.comparing(Path::toString));
+actualList.sort(Comparator.comparing(Path::toString));
+
+for (int x = 0; x < expectedList.size(); x++) {
+Path expected = expectedList.get(x);
+Path actual = actualList.get(x);
+
+assertEquals(expected.getFileName().toString(), 
actual.getFileName().toString());
+
+byte[] originalContent = Files.readAllBytes(expected);
+byte[] receivedContent = Files.readAllBytes(actual);
+assertArrayEquals(originalContent, receivedContent);
+}
+}
+
 public void setFileUploadVerifier(
 BiConsumerWithException<
 HandlerRequest, 
RestfulGateway, Exception>



[flink] 01/02: [FLINK-32888] Expose file comparison assertion

2023-08-18 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 329ee55e2e3ef5a6cf21009f6e321a99b1c91452
Author: Chesnay Schepler 
AuthorDate: Thu Aug 17 11:11:34 2023 +0200

[FLINK-32888] Expose file comparison assertion
---
 .../runtime/rest/MultipartUploadExtension.java | 53 +++---
 1 file changed, 26 insertions(+), 27 deletions(-)

diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadExtension.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadExtension.java
index 672afe442ae..494eb212260 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadExtension.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadExtension.java
@@ -138,36 +138,35 @@ public class MultipartUploadExtension implements 
CustomExtension {
 (request, restfulGateway) -> {
 // the default verifier checks for identiy (i.e. same name 
and content) of all
 // uploaded files
-List expectedFiles =
-getFilesToUpload().stream()
-.map(File::toPath)
-.collect(Collectors.toList());
-List uploadedFiles =
-request.getUploadedFiles().stream()
-.map(File::toPath)
-.collect(Collectors.toList());
-
-assertThat(uploadedFiles).hasSameSizeAs(expectedFiles);
-
-List expectedList = new ArrayList<>(expectedFiles);
-List actualList = new ArrayList<>(uploadedFiles);
-expectedList.sort(Comparator.comparing(Path::toString));
-actualList.sort(Comparator.comparing(Path::toString));
-
-for (int x = 0; x < expectedList.size(); x++) {
-Path expected = expectedList.get(x);
-Path actual = actualList.get(x);
-
-assertThat(actual.getFileName())
-
.hasToString(expected.getFileName().toString());
-
-byte[] originalContent = Files.readAllBytes(expected);
-byte[] receivedContent = Files.readAllBytes(actual);
-assertThat(receivedContent).isEqualTo(originalContent);
-}
+assertUploadedFilesEqual(request, getFilesToUpload());
 });
 }
 
+public static void assertUploadedFilesEqual(HandlerRequest request, 
Collection files)
+throws IOException {
+List expectedFiles = 
files.stream().map(File::toPath).collect(Collectors.toList());
+List uploadedFiles =
+
request.getUploadedFiles().stream().map(File::toPath).collect(Collectors.toList());
+
+assertThat(uploadedFiles).hasSameSizeAs(expectedFiles);
+
+List expectedList = new ArrayList<>(expectedFiles);
+List actualList = new ArrayList<>(uploadedFiles);
+expectedList.sort(Comparator.comparing(Path::toString));
+actualList.sort(Comparator.comparing(Path::toString));
+
+for (int x = 0; x < expectedList.size(); x++) {
+Path expected = expectedList.get(x);
+Path actual = actualList.get(x);
+
+
assertThat(actual.getFileName()).hasToString(expected.getFileName().toString());
+
+byte[] originalContent = Files.readAllBytes(expected);
+byte[] receivedContent = Files.readAllBytes(actual);
+assertThat(receivedContent).isEqualTo(originalContent);
+}
+}
+
 public void setFileUploadVerifier(
 BiConsumerWithException<
 HandlerRequest, 
RestfulGateway, Exception>



[flink] 02/02: [FLINK-32888] Handle hasNext() throwing EndOfDataDecoderException

2023-08-18 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 9546f8243a24e7b45582b6de6702f819f1d73f97
Author: Chesnay Schepler 
AuthorDate: Thu Aug 17 10:46:54 2023 +0200

[FLINK-32888] Handle hasNext() throwing EndOfDataDecoderException

This _probably_ happens when a non-empty http content is received that does 
not contain any attribute data.
---
 .../flink/runtime/rest/FileUploadHandler.java  | 12 -
 .../runtime/rest/FileUploadHandlerITCase.java  | 56 ++
 2 files changed, 67 insertions(+), 1 deletion(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java
index c3b797bcf72..c9e1fd78d74 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java
@@ -148,7 +148,7 @@ public class FileUploadHandler extends 
SimpleChannelInboundHandler {
 currentHttpPostRequestDecoder.offer(httpContent);
 
 while (httpContent != LastHttpContent.EMPTY_LAST_CONTENT
-&& currentHttpPostRequestDecoder.hasNext()) {
+&& hasNext(currentHttpPostRequestDecoder)) {
 final InterfaceHttpData data = 
currentHttpPostRequestDecoder.next();
 if (data.getHttpDataType() == 
InterfaceHttpData.HttpDataType.FileUpload) {
 final DiskFileUpload fileUpload = (DiskFileUpload) 
data;
@@ -212,6 +212,16 @@ public class FileUploadHandler extends 
SimpleChannelInboundHandler {
 }
 }
 
+private static boolean hasNext(HttpPostRequestDecoder decoder) {
+try {
+return decoder.hasNext();
+} catch (HttpPostRequestDecoder.EndOfDataDecoderException e) {
+// this can occur if the final chuck wasn't empty, but didn't 
contain any attribute data
+// unfortunately the Netty APIs don't give us any way to check this
+return false;
+}
+}
+
 private void handleError(
 ChannelHandlerContext ctx,
 String errorMessage,
diff --git 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/FileUploadHandlerITCase.java
 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/FileUploadHandlerITCase.java
index 75879a9cea7..45d14f1a444 100644
--- 
a/flink-runtime/src/test/java/org/apache/flink/runtime/rest/FileUploadHandlerITCase.java
+++ 
b/flink-runtime/src/test/java/org/apache/flink/runtime/rest/FileUploadHandlerITCase.java
@@ -25,6 +25,7 @@ import org.apache.flink.runtime.rest.messages.MessageHeaders;
 import org.apache.flink.runtime.rest.messages.RequestBody;
 import org.apache.flink.runtime.rest.util.RestMapperUtils;
 import org.apache.flink.runtime.webmonitor.RestfulGateway;
+import org.apache.flink.testutils.junit.utils.TempDirUtils;
 import org.apache.flink.util.FileUtils;
 import org.apache.flink.util.function.BiConsumerWithException;
 
@@ -42,10 +43,12 @@ import org.junit.jupiter.api.io.TempDir;
 
 import java.io.File;
 import java.io.IOException;
+import java.io.RandomAccessFile;
 import java.io.StringWriter;
 import java.lang.reflect.Field;
 import java.nio.file.Path;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.Comparator;
 import java.util.Iterator;
 import java.util.LinkedHashSet;
@@ -123,6 +126,15 @@ class FileUploadHandlerITCase {
 return finalizeRequest(builder, headerUrl);
 }
 
+private Request buildMixedRequest(
+String headerUrl, MultipartUploadExtension.TestRequestBody json, 
File file)
+throws IOException {
+MultipartBody.Builder builder = new MultipartBody.Builder();
+builder = addJsonPart(builder, json, 
FileUploadHandler.HTTP_ATTRIBUTE_REQUEST);
+builder = addFilePart(builder, file, file.getName());
+return finalizeRequest(builder, headerUrl);
+}
+
 private Request buildMixedRequest(
 String headerUrl, MultipartUploadExtension.TestRequestBody json) 
throws IOException {
 MultipartBody.Builder builder = new MultipartBody.Builder();
@@ -227,6 +239,50 @@ class FileUploadHandlerITCase {
 verifyNoFileIsRegisteredToDeleteOnExitHook();
 }
 
+/**
+ * This test checks for a specific multipart request chunk layout using a 
magic number.
+ *
+ * These things are very susceptible to interference from other 
requests or parts of the
+ * payload; for example if the JSON payload increases by a single byte it 
can already break the
+ * number. Do not reuse the client.
+ *
+ * To find the magic number you can define a static counter, and loop 
the test in the IDE
+ * (without forking!) while incremen

[flink] branch master updated (b93216f7855 -> 9546f8243a2)

2023-08-18 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from b93216f7855 [FLINK-32788][runtime] Support the handling of exception 
when slow task detector notify slow tasks
 new 329ee55e2e3 [FLINK-32888] Expose file comparison assertion
 new 9546f8243a2 [FLINK-32888] Handle hasNext() throwing 
EndOfDataDecoderException

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../flink/runtime/rest/FileUploadHandler.java  | 12 -
 .../runtime/rest/FileUploadHandlerITCase.java  | 56 ++
 .../runtime/rest/MultipartUploadExtension.java | 53 ++--
 3 files changed, 93 insertions(+), 28 deletions(-)



[flink] 13/15: [FLINK-32834] Forward actual Maven error code instead of 1

2023-08-17 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 3e5eca702d264d87964022c6cebe7dc95fbd8f9b
Author: Chesnay Schepler 
AuthorDate: Thu Aug 17 11:29:11 2023 +0200

[FLINK-32834] Forward actual Maven error code instead of 1
---
 tools/ci/verify_bundled_optional.sh |  4 ++--
 tools/ci/verify_scala_suffixes.sh   | 14 +++---
 2 files changed, 9 insertions(+), 9 deletions(-)

diff --git a/tools/ci/verify_bundled_optional.sh 
b/tools/ci/verify_bundled_optional.sh
index 890b1a5acf5..276d95eb634 100755
--- a/tools/ci/verify_bundled_optional.sh
+++ b/tools/ci/verify_bundled_optional.sh
@@ -56,7 +56,7 @@ if [ $EXIT_CODE != 0 ]; then
 echo 
"=="
 echo "Optional Check failed. The dependency tree could not be determined. 
See previous output for details."
 echo 
"=="
-exit 1
+exit $EXIT_CODE
 fi
 
 cat "${dependency_plugin_output}"
@@ -68,7 +68,7 @@ if [ $EXIT_CODE != 0 ]; then
 echo 
"=="
 echo "Optional Check failed. See previous output for details."
 echo 
"=="
-exit 1
+exit $EXIT_CODE
 fi
 
 exit 0
diff --git a/tools/ci/verify_scala_suffixes.sh 
b/tools/ci/verify_scala_suffixes.sh
index 45fca80a842..9747066b4c8 100755
--- a/tools/ci/verify_scala_suffixes.sh
+++ b/tools/ci/verify_scala_suffixes.sh
@@ -71,18 +71,18 @@ if [ $EXIT_CODE != 0 ]; then
 echo 
"=="
 echo "Suffix Check failed. The dependency tree could not be determined. 
See previous output for details."
 echo 
"=="
-exit 1
+exit $EXIT_CODE
 fi
 
 $MVN -pl tools/ci/flink-ci-tools exec:java exec:java 
-Dexec.mainClass=org.apache.flink.tools.ci.suffixcheck.ScalaSuffixChecker 
-Dexec.args="${dependency_plugin_output} $(pwd)"
 EXIT_CODE=$?
 
-if [ $EXIT_CODE == 0 ]; then
-exit 0
+if [ $EXIT_CODE != 0 ]; then
+echo 
"=="
+echo "Suffix Check failed. See previous output for details."
+echo 
"=="
+exit $EXIT_CODE
 fi
 
-echo 
"=="
-echo "Suffix Check failed. See previous output for details."
-echo 
"=="
-exit 1
+exit 0
 



[flink] 01/15: [FLINK-32834] Run all compile scripts from root directory

2023-08-17 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 5d163dd39d6179c3618dcada86d42b2b332569f3
Author: Chesnay Schepler 
AuthorDate: Fri Aug 11 10:01:06 2023 +0200

[FLINK-32834] Run all compile scripts from root directory

- make it easier to work with relative paths
  - specifically, useful to have the scripts rely on mvnv by default 
(follow-up)
---
 tools/ci/license_check.sh   | 4 +---
 tools/ci/verify_bundled_optional.sh | 4 +---
 tools/ci/verify_scala_suffixes.sh   | 4 +---
 3 files changed, 3 insertions(+), 9 deletions(-)

diff --git a/tools/ci/license_check.sh b/tools/ci/license_check.sh
index 79c96416113..9bbcadab96a 100755
--- a/tools/ci/license_check.sh
+++ b/tools/ci/license_check.sh
@@ -24,9 +24,7 @@ FLINK_DEPLOYED_ROOT=$4
 
 source "${CI_DIR}/maven-utils.sh"
 
-cd $CI_DIR/flink-ci-tools/
-
-run_mvn exec:java 
-Dexec.mainClass=org.apache.flink.tools.ci.licensecheck.LicenseChecker 
-Dexec.args=\"$MVN_CLEAN_COMPILE_OUT $FLINK_ROOT $FLINK_DEPLOYED_ROOT\"
+run_mvn -pl tools/ci/flink-ci-tools exec:java 
-Dexec.mainClass=org.apache.flink.tools.ci.licensecheck.LicenseChecker 
-Dexec.args=\"$MVN_CLEAN_COMPILE_OUT $FLINK_ROOT $FLINK_DEPLOYED_ROOT\"
 EXIT_CODE=$?
 
 if [ $EXIT_CODE != 0 ]; then
diff --git a/tools/ci/verify_bundled_optional.sh 
b/tools/ci/verify_bundled_optional.sh
index db43b320249..40e761ed3e6 100755
--- a/tools/ci/verify_bundled_optional.sh
+++ b/tools/ci/verify_bundled_optional.sh
@@ -32,9 +32,7 @@ run_mvn dependency:tree -B > "${dependency_plugin_output}"
 
 cat "${dependency_plugin_output}"
 
-cd "${CI_DIR}/flink-ci-tools/" || exit
-
-run_mvn exec:java 
-Dexec.mainClass=org.apache.flink.tools.ci.optional.ShadeOptionalChecker 
-Dexec.args=\""${MVN_CLEAN_COMPILE_OUT}" "${dependency_plugin_output}"\"
+run_mvn -pl tools/ci/flink-ci-tools exec:java 
-Dexec.mainClass=org.apache.flink.tools.ci.optional.ShadeOptionalChecker 
-Dexec.args=\""${MVN_CLEAN_COMPILE_OUT}" "${dependency_plugin_output}"\"
 EXIT_CODE=$?
 
 if [ $EXIT_CODE != 0 ]; then
diff --git a/tools/ci/verify_scala_suffixes.sh 
b/tools/ci/verify_scala_suffixes.sh
index 53b9edaf08e..756a7503bee 100755
--- a/tools/ci/verify_scala_suffixes.sh
+++ b/tools/ci/verify_scala_suffixes.sh
@@ -61,9 +61,7 @@ if [ $EXIT_CODE != 0 ]; then
 exit 1
 fi
 
-cd "${CI_DIR}/flink-ci-tools/" || exit
-
-run_mvn exec:java 
-Dexec.mainClass=org.apache.flink.tools.ci.suffixcheck.ScalaSuffixChecker 
-Dexec.args=\""${dependency_plugin_output}" "${FLINK_ROOT}"\"
+run_mvn -pl tools/ci/flink-ci-tools exec:java exec:java 
-Dexec.mainClass=org.apache.flink.tools.ci.suffixcheck.ScalaSuffixChecker 
-Dexec.args=\""${dependency_plugin_output}" "${FLINK_ROOT}"\"
 EXIT_CODE=$?
 
 if [ $EXIT_CODE == 0 ]; then



[flink] 07/15: [FLINK-32834] Streamline CI_DIR detection

2023-08-17 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 003eae50c360001cfdbd38c34013f7ed3704bf7a
Author: Chesnay Schepler 
AuthorDate: Fri Aug 11 11:11:47 2023 +0200

[FLINK-32834] Streamline CI_DIR detection

Copied from the Flink connector release scripts
---
 tools/ci/compile.sh | 7 +--
 1 file changed, 1 insertion(+), 6 deletions(-)

diff --git a/tools/ci/compile.sh b/tools/ci/compile.sh
index 271501033e4..8d1e6fbfce0 100755
--- a/tools/ci/compile.sh
+++ b/tools/ci/compile.sh
@@ -21,12 +21,7 @@
 # This file contains tooling for compiling Flink
 #
 
-HERE="`dirname \"$0\"`" # relative
-HERE="`( cd \"$HERE\" && pwd )`"# absolutized and normalized
-if [ -z "$HERE" ] ; then
-exit 1  # fail
-fi
-CI_DIR="$HERE/../ci"
+CI_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
 MVN_CLEAN_COMPILE_OUT="/tmp/clean_compile.out"
 
 # Deploy into this directory, to run license checks on all jars staged for 
deployment.



[flink] 15/15: [FLINK-32834] Use descriptive output file names

2023-08-17 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 5bf5003f5c7baf19b0164f78558e495d8bb62b04
Author: Chesnay Schepler 
AuthorDate: Thu Aug 17 11:41:33 2023 +0200

[FLINK-32834] Use descriptive output file names
---
 tools/ci/verify_bundled_optional.sh | 2 +-
 tools/ci/verify_scala_suffixes.sh   | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/tools/ci/verify_bundled_optional.sh 
b/tools/ci/verify_bundled_optional.sh
index e0f5a22255d..9b926bd559b 100755
--- a/tools/ci/verify_bundled_optional.sh
+++ b/tools/ci/verify_bundled_optional.sh
@@ -46,7 +46,7 @@ MVN_CLEAN_COMPILE_OUT=$1
 
 MVN=${MVN:-./mvnw}
 
-dependency_plugin_output=/tmp/optional_dep.txt
+dependency_plugin_output=/tmp/dependency_tree_optional.txt
 
 # run with -T1 because our maven output parsers don't support multi-threaded 
builds
 $MVN dependency:tree -B -T1 > "${dependency_plugin_output}"
diff --git a/tools/ci/verify_scala_suffixes.sh 
b/tools/ci/verify_scala_suffixes.sh
index f6aae040731..bf7dce5b9e1 100755
--- a/tools/ci/verify_scala_suffixes.sh
+++ b/tools/ci/verify_scala_suffixes.sh
@@ -61,7 +61,7 @@ echo "--- Flink Scala Dependency Analyzer ---"
 echo "Analyzing modules for Scala dependencies using 'mvn dependency:tree'."
 echo "If you haven't built the project, please do so first by running \"mvn 
clean install -DskipTests\""
 
-dependency_plugin_output=/tmp/dep.txt
+dependency_plugin_output=/tmp/dependency_tree_scala.txt
 
 # run with -T1 because our maven output parsers don't support multi-threaded 
builds
 $MVN dependency:tree -Dincludes=org.scala-lang,:*_2.1*:: ${MAVEN_ARGUMENTS} 
-T1 > "${dependency_plugin_output}"



[flink] 03/15: [FLINK-32834] Write all tmp files to /tmp/

2023-08-17 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit d08356a1afe9a86b06c3978c04b0ef467d1685a6
Author: Chesnay Schepler 
AuthorDate: Fri Aug 11 10:13:04 2023 +0200

[FLINK-32834] Write all tmp files to /tmp/

- prevent files from being picked up by git/rat
- don't use /target because mvn clean would interfere
- tmp dirs under tools/ci would be neat, but we lack a central place to 
create it
---
 tools/ci/compile.sh | 14 +-
 tools/ci/shade.sh   | 30 --
 tools/ci/verify_bundled_optional.sh |  2 +-
 tools/ci/verify_scala_suffixes.sh   |  2 +-
 4 files changed, 27 insertions(+), 21 deletions(-)

diff --git a/tools/ci/compile.sh b/tools/ci/compile.sh
index 92b8d331755..49743999707 100755
--- a/tools/ci/compile.sh
+++ b/tools/ci/compile.sh
@@ -70,24 +70,28 @@ fi
 
 echo " Checking Javadocs "
 
+javadoc_output=/tmp/javadoc.out
+
 # use the same invocation as .github/workflows/docs.sh
-run_mvn javadoc:aggregate -DadditionalJOption='-Xdoclint:none' \
+$MVN javadoc:aggregate -DadditionalJOption='-Xdoclint:none' \
   -Dmaven.javadoc.failOnError=false -Dcheckstyle.skip=true 
-Denforcer.skip=true -Dspotless.skip=true -Drat.skip=true \
-  -Dheader=someTestHeader > javadoc.out
+  -Dheader=someTestHeader > ${javadoc_output}
 EXIT_CODE=$?
 if [ $EXIT_CODE != 0 ] ; then
   echo "ERROR in Javadocs. Printing full output:"
-  cat javadoc.out ; rm javadoc.out
+  cat ${javadoc_output}
   exit $EXIT_CODE
 fi
 
 echo " Checking Scaladocs "
 
-run_mvn scala:doc -Dcheckstyle.skip=true -Denforcer.skip=true 
-Dspotless.skip=true -pl flink-scala 2> scaladoc.out
+scaladoc_output=/tmp/scaladoc.out
+
+$MVN scala:doc -Dcheckstyle.skip=true -Denforcer.skip=true 
-Dspotless.skip=true -pl flink-scala 2> ${scaladoc_output}
 EXIT_CODE=$?
 if [ $EXIT_CODE != 0 ] ; then
   echo "ERROR in Scaladocs. Printing full output:"
-  cat scaladoc.out ; rm scaladoc.out
+  cat ${scaladoc_output}
   exit $EXIT_CODE
 fi
 
diff --git a/tools/ci/shade.sh b/tools/ci/shade.sh
index 70e99251280..51ea11e7540 100755
--- a/tools/ci/shade.sh
+++ b/tools/ci/shade.sh
@@ -17,10 +17,12 @@
 # limitations under the License.
 

 
+jarContents=/tmp/allClasses
+
 # Check the final fat jar for illegal or missing artifacts
 check_shaded_artifacts() {
-   jar tf build-target/lib/flink-dist*.jar > allClasses
-   ASM=`cat allClasses | grep '^org/objectweb/asm/' | wc -l`
+   jar tf build-target/lib/flink-dist*.jar > ${jarContents}
+   ASM=`cat ${jarContents} | grep '^org/objectweb/asm/' | wc -l`
if [ "$ASM" != "0" ]; then
echo 
"=="
echo "Detected '$ASM' unshaded asm dependencies in fat jar"
@@ -28,7 +30,7 @@ check_shaded_artifacts() {
return 1
fi
 
-   GUAVA=`cat allClasses | grep '^com/google/common' | wc -l`
+   GUAVA=`cat ${jarContents} | grep '^com/google/common' | wc -l`
if [ "$GUAVA" != "0" ]; then
echo 
"=="
echo "Detected '$GUAVA' guava dependencies in fat jar"
@@ -36,7 +38,7 @@ check_shaded_artifacts() {
return 1
fi
 
-   CODEHAUS_JACKSON=`cat allClasses | grep '^org/codehaus/jackson' | wc -l`
+   CODEHAUS_JACKSON=`cat ${jarContents} | grep '^org/codehaus/jackson' | 
wc -l`
if [ "$CODEHAUS_JACKSON" != "0" ]; then
echo 
"=="
echo "Detected '$CODEHAUS_JACKSON' unshaded 
org.codehaus.jackson classes in fat jar"
@@ -44,7 +46,7 @@ check_shaded_artifacts() {
return 1
fi
 
-   FASTERXML_JACKSON=`cat allClasses | grep '^com/fasterxml/jackson' | wc 
-l`
+   FASTERXML_JACKSON=`cat ${jarContents} | grep '^com/fasterxml/jackson' | 
wc -l`
if [ "$FASTERXML_JACKSON" != "0" ]; then
echo 
"=="
echo "Detected '$FASTERXML_JACKSON' unshaded 
com.fasterxml.jackson classes in fat jar"
@@ -52,7 +54,7 @@ check_shaded_artifacts() {
return 1
fi
 
-   SNAPPY=`cat allClasses | grep '^org/xerial/snappy' | wc -l`
+   SNAPPY=`cat ${jarContents} | grep '^org/xerial/snappy' | wc -l`
if [ "$SNAPPY" = "0" ]; t

[flink] 09/15: [FLINK-32834] Forward any additional args to maven

2023-08-17 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 109c7ff261be834825e1c78c2c776c2ecb314a8a
Author: Chesnay Schepler 
AuthorDate: Fri Aug 11 11:32:43 2023 +0200

[FLINK-32834] Forward any additional args to maven

For example: 'tools/ci/compile.sh -Dfast'
---
 tools/ci/compile.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tools/ci/compile.sh b/tools/ci/compile.sh
index 5fc899dd00b..bc349e3d53a 100755
--- a/tools/ci/compile.sh
+++ b/tools/ci/compile.sh
@@ -44,7 +44,7 @@ echo 
"==
 EXIT_CODE=0
 
 $MVN clean deploy 
-DaltDeploymentRepository=validation_repository::default::file:$MVN_VALIDATION_DIR
 -Dflink.convergence.phase=install -Pcheck-convergence \
--Dmaven.javadoc.skip=true -U -DskipTests | tee $MVN_CLEAN_COMPILE_OUT
+-Dmaven.javadoc.skip=true -U -DskipTests "${@}" | tee 
$MVN_CLEAN_COMPILE_OUT
 
 EXIT_CODE=${PIPESTATUS[0]}
 



[flink] 05/15: [FLINK-32834] Remove FLINK_ROOT parameter

2023-08-17 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 4bcbc7caed977b0c2c53c5b549b7feed4d430643
Author: Chesnay Schepler 
AuthorDate: Fri Aug 11 10:55:39 2023 +0200

[FLINK-32834] Remove FLINK_ROOT parameter

Ease direct manual usage by reducing the number of parameters.
---
 tools/ci/compile.sh | 6 +++---
 tools/ci/license_check.sh   | 5 ++---
 tools/ci/verify_bundled_optional.sh | 3 ---
 tools/ci/verify_scala_suffixes.sh   | 5 +
 4 files changed, 6 insertions(+), 13 deletions(-)

diff --git a/tools/ci/compile.sh b/tools/ci/compile.sh
index 49743999707..11e1e22e9e6 100755
--- a/tools/ci/compile.sh
+++ b/tools/ci/compile.sh
@@ -97,11 +97,11 @@ fi
 
 echo " Checking bundled dependencies marked as optional 
"
 
-${CI_DIR}/verify_bundled_optional.sh $MVN_CLEAN_COMPILE_OUT "$CI_DIR" "$(pwd)" 
|| exit $?
+${CI_DIR}/verify_bundled_optional.sh $MVN_CLEAN_COMPILE_OUT "$CI_DIR" || exit 
$?
 
 echo " Checking scala suffixes "
 
-${CI_DIR}/verify_scala_suffixes.sh "$CI_DIR" "$(pwd)" || exit $?
+${CI_DIR}/verify_scala_suffixes.sh "$CI_DIR" || exit $?
 
 echo " Checking shaded dependencies "
 
@@ -117,7 +117,7 @@ echo " Run license check "
 find $MVN_VALIDATION_DIR
 # We use a different Scala version with Java 17
 if [[ ${PROFILE} != *"jdk17"* ]]; then
-  ${CI_DIR}/license_check.sh $MVN_CLEAN_COMPILE_OUT $CI_DIR $(pwd) 
$MVN_VALIDATION_DIR || exit $?
+  ${CI_DIR}/license_check.sh $MVN_CLEAN_COMPILE_OUT $CI_DIR 
$MVN_VALIDATION_DIR || exit $?
 fi
 
 exit $EXIT_CODE
diff --git a/tools/ci/license_check.sh b/tools/ci/license_check.sh
index 7ba98c88eae..3b3e02603c0 100755
--- a/tools/ci/license_check.sh
+++ b/tools/ci/license_check.sh
@@ -19,12 +19,11 @@
 
 MVN_CLEAN_COMPILE_OUT=$1
 CI_DIR=$2
-FLINK_ROOT=$3
-FLINK_DEPLOYED_ROOT=$4
+FLINK_DEPLOYED_ROOT=$3
 
 source "${CI_DIR}/maven-utils.sh"
 
-run_mvn -pl tools/ci/flink-ci-tools exec:java 
-Dexec.mainClass=org.apache.flink.tools.ci.licensecheck.LicenseChecker 
-Dexec.args="$MVN_CLEAN_COMPILE_OUT $FLINK_ROOT $FLINK_DEPLOYED_ROOT"
+run_mvn -pl tools/ci/flink-ci-tools exec:java 
-Dexec.mainClass=org.apache.flink.tools.ci.licensecheck.LicenseChecker 
-Dexec.args="$MVN_CLEAN_COMPILE_OUT $(pwd) $FLINK_DEPLOYED_ROOT"
 EXIT_CODE=$?
 
 if [ $EXIT_CODE != 0 ]; then
diff --git a/tools/ci/verify_bundled_optional.sh 
b/tools/ci/verify_bundled_optional.sh
index d2c34e638db..bcacc705f43 100755
--- a/tools/ci/verify_bundled_optional.sh
+++ b/tools/ci/verify_bundled_optional.sh
@@ -20,12 +20,9 @@
 ## Checks that all bundled dependencies are marked as optional in the poms
 MVN_CLEAN_COMPILE_OUT=$1
 CI_DIR=$2
-FLINK_ROOT=$3
 
 source "${CI_DIR}/maven-utils.sh"
 
-cd "$FLINK_ROOT" || exit
-
 dependency_plugin_output=/tmp/optional_dep.txt
 
 run_mvn dependency:tree -B > "${dependency_plugin_output}"
diff --git a/tools/ci/verify_scala_suffixes.sh 
b/tools/ci/verify_scala_suffixes.sh
index b827a1c19f5..37581b472b3 100755
--- a/tools/ci/verify_scala_suffixes.sh
+++ b/tools/ci/verify_scala_suffixes.sh
@@ -38,7 +38,6 @@
 # The script uses 'mvn dependency:tree -Dincludes=org.scala-lang' to list Scala
 # dependent modules.
 CI_DIR=$1
-FLINK_ROOT=$2
 
 echo "--- Flink Scala Dependency Analyzer ---"
 echo "Analyzing modules for Scala dependencies using 'mvn dependency:tree'."
@@ -46,8 +45,6 @@ echo "If you haven't built the project, please do so first by 
running \"mvn clea
 
 source "${CI_DIR}/maven-utils.sh"
 
-cd "$FLINK_ROOT" || exit
-
 dependency_plugin_output=/tmp/dep.txt
 
 run_mvn dependency:tree -Dincludes=org.scala-lang,:*_2.1*:: ${MAVEN_ARGUMENTS} 
> "${dependency_plugin_output}"
@@ -61,7 +58,7 @@ if [ $EXIT_CODE != 0 ]; then
 exit 1
 fi
 
-run_mvn -pl tools/ci/flink-ci-tools exec:java exec:java 
-Dexec.mainClass=org.apache.flink.tools.ci.suffixcheck.ScalaSuffixChecker 
-Dexec.args="${dependency_plugin_output} ${FLINK_ROOT}"
+run_mvn -pl tools/ci/flink-ci-tools exec:java exec:java 
-Dexec.mainClass=org.apache.flink.tools.ci.suffixcheck.ScalaSuffixChecker 
-Dexec.args="${dependency_plugin_output} $(pwd)"
 EXIT_CODE=$?
 
 if [ $EXIT_CODE == 0 ]; then



[flink] 11/15: [FLINK-32834] Add usage information and -h option to low-level scripts

2023-08-17 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit ed31bca4c9efb89ad7b0a2242d51360ab5188e35
Author: Chesnay Schepler 
AuthorDate: Tue Aug 15 17:57:29 2023 +0200

[FLINK-32834] Add usage information and -h option to low-level scripts
---
 tools/ci/license_check.sh   | 26 ++
 tools/ci/verify_bundled_optional.sh | 24 
 tools/ci/verify_scala_suffixes.sh   | 18 ++
 3 files changed, 68 insertions(+)

diff --git a/tools/ci/license_check.sh b/tools/ci/license_check.sh
index 4f2aebbde00..f31a6665061 100755
--- a/tools/ci/license_check.sh
+++ b/tools/ci/license_check.sh
@@ -17,6 +17,32 @@
 # limitations under the License.
 

 
+usage() {
+  echo "Usage: $0  "
+  echo " A file containing the output 
of the Maven build."
+  echo "  A directory containing a 
Maven repository into which the Flink artifacts were deployed."
+  echo ""
+  echo "Example preparation:"
+  echo "mvnw clean deploy 
-DaltDeploymentRepository=validation_repository::default::file:
 > "
+  echo ""
+  echo "The environment variable MVN is used to specify the Maven binaries; 
defaults to 'mvnw'."
+  echo "See further details in the JavaDoc of LicenseChecker."
+}
+
+while getopts 'h' o; do
+  case "${o}" in
+h)
+  usage
+  exit 0
+  ;;
+  esac
+done
+
+if [[ "$#" != "2" ]]; then
+  usage
+  exit 1
+fi
+
 MVN_CLEAN_COMPILE_OUT=$1
 FLINK_DEPLOYED_ROOT=$2
 
diff --git a/tools/ci/verify_bundled_optional.sh 
b/tools/ci/verify_bundled_optional.sh
index 34fff60ca2d..0d3dbdf41ba 100755
--- a/tools/ci/verify_bundled_optional.sh
+++ b/tools/ci/verify_bundled_optional.sh
@@ -17,6 +17,30 @@
 # limitations under the License.
 #
 
+usage() {
+  echo "Usage: $0 "
+  echo " A file containing the output 
of the Maven build."
+  echo ""
+  echo "mvnw clean package > "
+  echo ""
+  echo "The environment variable MVN is used to specify the Maven binaries; 
defaults to 'mvnw'."
+  echo "See further details in the JavaDoc of ShadeOptionalChecker."
+}
+
+while getopts 'h' o; do
+  case "${o}" in
+h)
+  usage
+  exit 0
+  ;;
+  esac
+done
+
+if [[ "$#" != "1" ]]; then
+  usage
+  exit 1
+fi
+
 ## Checks that all bundled dependencies are marked as optional in the poms
 MVN_CLEAN_COMPILE_OUT=$1
 
diff --git a/tools/ci/verify_scala_suffixes.sh 
b/tools/ci/verify_scala_suffixes.sh
index 4c5cc389eb3..45fca80a842 100755
--- a/tools/ci/verify_scala_suffixes.sh
+++ b/tools/ci/verify_scala_suffixes.sh
@@ -37,6 +37,24 @@
 #
 # The script uses 'mvn dependency:tree -Dincludes=org.scala-lang' to list Scala
 # dependent modules.
+
+
+usage() {
+  echo "Usage: $0"
+  echo ""
+  echo "The environment variable MVN is used to specify the Maven binaries; 
defaults to 'mvnw'."
+  echo "See further details in the JavaDoc of ScalaSuffixChecker."
+}
+
+while getopts 'h' o; do
+  case "${o}" in
+h)
+  usage
+  exit 0
+  ;;
+  esac
+done
+
 MVN=${MVN:-./mvnw}
 
 echo "--- Flink Scala Dependency Analyzer ---"



[flink] 10/15: [FLINK-32834] Add documentation to the scripts

2023-08-17 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit b38cd662bcfe66dd467bfc88c4ec313f444c48fd
Author: Chesnay Schepler 
AuthorDate: Fri Aug 11 12:36:55 2023 +0200

[FLINK-32834] Add documentation to the scripts
---
 tools/ci/compile.sh| 17 -
 tools/ci/compile_ci.sh |  2 +-
 2 files changed, 17 insertions(+), 2 deletions(-)

diff --git a/tools/ci/compile.sh b/tools/ci/compile.sh
index bc349e3d53a..ee4ba13d940 100755
--- a/tools/ci/compile.sh
+++ b/tools/ci/compile.sh
@@ -18,7 +18,22 @@
 

 
 #
-# This file contains tooling for compiling Flink
+# This script compiles Flink and runs all QA checks apart from tests.
+#
+# This script should not contain any CI-specific logic; put these into 
compile_ci.sh instead.
+#
+# Usage: [MVN=/path/to/maven] tools/ci/compile.sh [additional maven args]
+# - Use the MVN environment variable to point the script to another maven 
installation.
+# - Any script argument is forwarded to the Flink maven build. Use it to 
skip/modify parts of the build process.
+#
+# Tips:
+# - '-Pskip-webui-build' skips the WebUI build.
+# - '-Dfast' skips Maven QA checks.
+# - '-Dmaven.clean.skip' skips recompilation of classes.
+# Example: tools/ci/compile.sh -Dmaven.clean.skip -Dfast -Pskip-webui-build, 
use -Dmaven.clean.skip to avoid recompiling classes.
+#
+# Warnings:
+# - Skipping modules via '-pl [!]' is not recommended because checks 
may assume/require a full build.
 #
 
 CI_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
diff --git a/tools/ci/compile_ci.sh b/tools/ci/compile_ci.sh
index d9b3f68a49a..d88cce6848d 100755
--- a/tools/ci/compile_ci.sh
+++ b/tools/ci/compile_ci.sh
@@ -18,7 +18,7 @@
 

 
 #
-# This file contains tooling for compiling Flink
+# This script is the CI entrypoint for compiling Flink and running QA checks 
that don't require tests.
 #
 
 CI_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)



[flink] 14/15: [FLINK-32834] Force parallelism of 1

2023-08-17 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 6faf3680d4d558f7f5577f9997ebaea1d9bc7b77
Author: Chesnay Schepler 
AuthorDate: Thu Aug 17 11:32:27 2023 +0200

[FLINK-32834] Force parallelism of 1

The maven output parsers rely on certain order of messages which can be 
broken by multi-threaded builds.
---
 tools/ci/compile.sh | 3 ++-
 tools/ci/verify_bundled_optional.sh | 3 ++-
 tools/ci/verify_scala_suffixes.sh   | 3 ++-
 3 files changed, 6 insertions(+), 3 deletions(-)

diff --git a/tools/ci/compile.sh b/tools/ci/compile.sh
index ee4ba13d940..0603e7b7e48 100755
--- a/tools/ci/compile.sh
+++ b/tools/ci/compile.sh
@@ -58,8 +58,9 @@ echo 
"==
 
 EXIT_CODE=0
 
+# run with -T1 because our maven output parsers don't support multi-threaded 
builds
 $MVN clean deploy 
-DaltDeploymentRepository=validation_repository::default::file:$MVN_VALIDATION_DIR
 -Dflink.convergence.phase=install -Pcheck-convergence \
--Dmaven.javadoc.skip=true -U -DskipTests "${@}" | tee 
$MVN_CLEAN_COMPILE_OUT
+-Dmaven.javadoc.skip=true -U -DskipTests "${@}" -T1 | tee 
$MVN_CLEAN_COMPILE_OUT
 
 EXIT_CODE=${PIPESTATUS[0]}
 
diff --git a/tools/ci/verify_bundled_optional.sh 
b/tools/ci/verify_bundled_optional.sh
index 276d95eb634..e0f5a22255d 100755
--- a/tools/ci/verify_bundled_optional.sh
+++ b/tools/ci/verify_bundled_optional.sh
@@ -48,7 +48,8 @@ MVN=${MVN:-./mvnw}
 
 dependency_plugin_output=/tmp/optional_dep.txt
 
-$MVN dependency:tree -B > "${dependency_plugin_output}"
+# run with -T1 because our maven output parsers don't support multi-threaded 
builds
+$MVN dependency:tree -B -T1 > "${dependency_plugin_output}"
 EXIT_CODE=$?
 
 if [ $EXIT_CODE != 0 ]; then
diff --git a/tools/ci/verify_scala_suffixes.sh 
b/tools/ci/verify_scala_suffixes.sh
index 9747066b4c8..f6aae040731 100755
--- a/tools/ci/verify_scala_suffixes.sh
+++ b/tools/ci/verify_scala_suffixes.sh
@@ -63,7 +63,8 @@ echo "If you haven't built the project, please do so first by 
running \"mvn clea
 
 dependency_plugin_output=/tmp/dep.txt
 
-$MVN dependency:tree -Dincludes=org.scala-lang,:*_2.1*:: ${MAVEN_ARGUMENTS} > 
"${dependency_plugin_output}"
+# run with -T1 because our maven output parsers don't support multi-threaded 
builds
+$MVN dependency:tree -Dincludes=org.scala-lang,:*_2.1*:: ${MAVEN_ARGUMENTS} 
-T1 > "${dependency_plugin_output}"
 EXIT_CODE=$?
 
 if [ $EXIT_CODE != 0 ]; then



[flink] 12/15: [FLINK-32834] Fail early if dependency plugin fails

2023-08-17 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 6663b3982444a3ee3c67e68289b24b31aac15c02
Author: Chesnay Schepler 
AuthorDate: Wed Aug 16 13:02:41 2023 +0200

[FLINK-32834] Fail early if dependency plugin fails
---
 tools/ci/verify_bundled_optional.sh | 9 +
 1 file changed, 9 insertions(+)

diff --git a/tools/ci/verify_bundled_optional.sh 
b/tools/ci/verify_bundled_optional.sh
index 0d3dbdf41ba..890b1a5acf5 100755
--- a/tools/ci/verify_bundled_optional.sh
+++ b/tools/ci/verify_bundled_optional.sh
@@ -49,6 +49,15 @@ MVN=${MVN:-./mvnw}
 dependency_plugin_output=/tmp/optional_dep.txt
 
 $MVN dependency:tree -B > "${dependency_plugin_output}"
+EXIT_CODE=$?
+
+if [ $EXIT_CODE != 0 ]; then
+cat ${dependency_plugin_output}
+echo 
"=="
+echo "Optional Check failed. The dependency tree could not be determined. 
See previous output for details."
+echo 
"=="
+exit 1
+fi
 
 cat "${dependency_plugin_output}"
 



  1   2   3   4   5   6   7   8   9   10   >