[flink] branch release-1.12 updated: [FLINK-20410] Retry querying for schema in the schema registry e2e test.

2020-11-30 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.12
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.12 by this push:
 new bbf68c6  [FLINK-20410] Retry querying for schema in the schema 
registry e2e test.
bbf68c6 is described below

commit bbf68c656a352129ab621efacdf0ab561071d3de
Author: Dawid Wysakowicz 
AuthorDate: Mon Nov 30 10:36:25 2020 +0100

[FLINK-20410] Retry querying for schema in the schema registry e2e test.
---
 .../util/kafka/SQLClientSchemaRegistryITCase.java | 19 ++-
 1 file changed, 18 insertions(+), 1 deletion(-)

diff --git 
a/flink-end-to-end-tests/flink-end-to-end-tests-common-kafka/src/test/java/org/apache/flink/tests/util/kafka/SQLClientSchemaRegistryITCase.java
 
b/flink-end-to-end-tests/flink-end-to-end-tests-common-kafka/src/test/java/org/apache/flink/tests/util/kafka/SQLClientSchemaRegistryITCase.java
index 1906b58..34500d3 100644
--- 
a/flink-end-to-end-tests/flink-end-to-end-tests-common-kafka/src/test/java/org/apache/flink/tests/util/kafka/SQLClientSchemaRegistryITCase.java
+++ 
b/flink-end-to-end-tests/flink-end-to-end-tests-common-kafka/src/test/java/org/apache/flink/tests/util/kafka/SQLClientSchemaRegistryITCase.java
@@ -18,6 +18,7 @@
 
 package org.apache.flink.tests.util.kafka;
 
+import org.apache.flink.api.common.time.Deadline;
 import org.apache.flink.tests.util.TestUtils;
 import org.apache.flink.tests.util.categories.TravisGroup1;
 import org.apache.flink.tests.util.flink.FlinkContainer;
@@ -26,6 +27,7 @@ import 
org.apache.flink.tests.util.kafka.containers.SchemaRegistryContainer;
 
 import io.confluent.kafka.schemaregistry.avro.AvroSchema;
 import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
+import 
io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
 import io.confluent.kafka.serializers.KafkaAvroDeserializer;
 import io.confluent.kafka.serializers.KafkaAvroSerializer;
 import org.apache.avro.Schema;
@@ -42,6 +44,7 @@ import org.testcontainers.utility.DockerImageName;
 
 import java.io.IOException;
 import java.nio.file.Path;
+import java.time.Duration;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
@@ -192,7 +195,7 @@ public class SQLClientSchemaRegistryITCase {
 
executeSqlStatements(sqlLines);
 
-   List versions = 
registryClient.getAllVersions(behaviourSubject);
+   List versions = getAllVersions(behaviourSubject);
assertThat(versions.size(), equalTo(1));
List userBehaviors = kafkaClient.readMessages(
1,
@@ -217,6 +220,20 @@ public class SQLClientSchemaRegistryITCase {
));
}
 
+   private List getAllVersions(String behaviourSubject) throws 
Exception {
+   Deadline deadline = Deadline.fromNow(Duration.ofSeconds(30));
+   Exception ex = new IllegalStateException(
+   "Could not query schema registry. Negative deadline 
provided.");
+   while (deadline.hasTimeLeft()) {
+   try {
+   return 
registryClient.getAllVersions(behaviourSubject);
+   } catch (RestClientException e) {
+   ex = e;
+   }
+   }
+   throw ex;
+   }
+
private void executeSqlStatements(List sqlLines) throws 
Exception {
flink.submitSQLJob(new 
SQLJobSubmission.SQLJobSubmissionBuilder(sqlLines)
.addJars(sqlAvroJar, sqlAvroRegistryJar, 
sqlConnectorKafkaJar, sqlToolBoxJar)



[flink] branch master updated: [FLINK-20410] Retry querying for schema in the schema registry e2e test.

2020-11-30 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 6f46458  [FLINK-20410] Retry querying for schema in the schema 
registry e2e test.
6f46458 is described below

commit 6f46458dccd490ddc93b24df0c34a0e0ff700830
Author: Dawid Wysakowicz 
AuthorDate: Mon Nov 30 10:36:25 2020 +0100

[FLINK-20410] Retry querying for schema in the schema registry e2e test.
---
 .../util/kafka/SQLClientSchemaRegistryITCase.java | 19 ++-
 1 file changed, 18 insertions(+), 1 deletion(-)

diff --git 
a/flink-end-to-end-tests/flink-end-to-end-tests-common-kafka/src/test/java/org/apache/flink/tests/util/kafka/SQLClientSchemaRegistryITCase.java
 
b/flink-end-to-end-tests/flink-end-to-end-tests-common-kafka/src/test/java/org/apache/flink/tests/util/kafka/SQLClientSchemaRegistryITCase.java
index 1906b58..34500d3 100644
--- 
a/flink-end-to-end-tests/flink-end-to-end-tests-common-kafka/src/test/java/org/apache/flink/tests/util/kafka/SQLClientSchemaRegistryITCase.java
+++ 
b/flink-end-to-end-tests/flink-end-to-end-tests-common-kafka/src/test/java/org/apache/flink/tests/util/kafka/SQLClientSchemaRegistryITCase.java
@@ -18,6 +18,7 @@
 
 package org.apache.flink.tests.util.kafka;
 
+import org.apache.flink.api.common.time.Deadline;
 import org.apache.flink.tests.util.TestUtils;
 import org.apache.flink.tests.util.categories.TravisGroup1;
 import org.apache.flink.tests.util.flink.FlinkContainer;
@@ -26,6 +27,7 @@ import 
org.apache.flink.tests.util.kafka.containers.SchemaRegistryContainer;
 
 import io.confluent.kafka.schemaregistry.avro.AvroSchema;
 import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
+import 
io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
 import io.confluent.kafka.serializers.KafkaAvroDeserializer;
 import io.confluent.kafka.serializers.KafkaAvroSerializer;
 import org.apache.avro.Schema;
@@ -42,6 +44,7 @@ import org.testcontainers.utility.DockerImageName;
 
 import java.io.IOException;
 import java.nio.file.Path;
+import java.time.Duration;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
@@ -192,7 +195,7 @@ public class SQLClientSchemaRegistryITCase {
 
executeSqlStatements(sqlLines);
 
-   List versions = 
registryClient.getAllVersions(behaviourSubject);
+   List versions = getAllVersions(behaviourSubject);
assertThat(versions.size(), equalTo(1));
List userBehaviors = kafkaClient.readMessages(
1,
@@ -217,6 +220,20 @@ public class SQLClientSchemaRegistryITCase {
));
}
 
+   private List getAllVersions(String behaviourSubject) throws 
Exception {
+   Deadline deadline = Deadline.fromNow(Duration.ofSeconds(30));
+   Exception ex = new IllegalStateException(
+   "Could not query schema registry. Negative deadline 
provided.");
+   while (deadline.hasTimeLeft()) {
+   try {
+   return 
registryClient.getAllVersions(behaviourSubject);
+   } catch (RestClientException e) {
+   ex = e;
+   }
+   }
+   throw ex;
+   }
+
private void executeSqlStatements(List sqlLines) throws 
Exception {
flink.submitSQLJob(new 
SQLJobSubmission.SQLJobSubmissionBuilder(sqlLines)
.addJars(sqlAvroJar, sqlAvroRegistryJar, 
sqlConnectorKafkaJar, sqlToolBoxJar)



[flink] 02/02: [FLINK-20391] Set FORWARD_EDGES_PIPELINED for BATCH ExecutionMode

2020-11-30 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.12
in repository https://gitbox.apache.org/repos/asf/flink.git

commit be972ef13157783ab6ff1b6282e34cfd5f1e1d61
Author: Dawid Wysakowicz 
AuthorDate: Fri Nov 27 11:43:04 2020 +0100

[FLINK-20391] Set FORWARD_EDGES_PIPELINED for BATCH ExecutionMode

This closes #14249
---
 .../streaming/api/graph/StreamGraphGenerator.java  |  2 +-
 ...amGraphGeneratorExecutionModeDetectionTest.java | 18 ++--
 .../api/graph/StreamingJobGraphGeneratorTest.java  | 49 ++
 .../datastream/DataStreamBatchExecutionITCase.java | 32 +-
 4 files changed, 96 insertions(+), 5 deletions(-)

diff --git 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamGraphGenerator.java
 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamGraphGenerator.java
index 0d64872..6aca57b 100644
--- 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamGraphGenerator.java
+++ 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamGraphGenerator.java
@@ -284,7 +284,7 @@ public class StreamGraphGenerator {
checkpointConfig.disableCheckpointing();
}
 
-   
graph.setGlobalDataExchangeMode(GlobalDataExchangeMode.POINTWISE_EDGES_PIPELINED);
+   
graph.setGlobalDataExchangeMode(GlobalDataExchangeMode.FORWARD_EDGES_PIPELINED);

graph.setScheduleMode(ScheduleMode.LAZY_FROM_SOURCES_WITH_BATCH_SLOT_REQUEST);
setDefaultBufferTimeout(-1);
setBatchStateBackendAndTimerService(graph);
diff --git 
a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorExecutionModeDetectionTest.java
 
b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorExecutionModeDetectionTest.java
index 582a82a..65fb375 100644
--- 
a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorExecutionModeDetectionTest.java
+++ 
b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorExecutionModeDetectionTest.java
@@ -126,7 +126,7 @@ public class StreamGraphGeneratorExecutionModeDetectionTest 
extends TestLogger {
assertThat(
streamGraph,
hasProperties(
-   
GlobalDataExchangeMode.POINTWISE_EDGES_PIPELINED,
+   
GlobalDataExchangeMode.FORWARD_EDGES_PIPELINED,

ScheduleMode.LAZY_FROM_SOURCES_WITH_BATCH_SLOT_REQUEST,
false));
}
@@ -189,7 +189,7 @@ public class StreamGraphGeneratorExecutionModeDetectionTest 
extends TestLogger {
assertThat(
graph,
hasProperties(
-   
GlobalDataExchangeMode.POINTWISE_EDGES_PIPELINED,
+   
GlobalDataExchangeMode.FORWARD_EDGES_PIPELINED,

ScheduleMode.LAZY_FROM_SOURCES_WITH_BATCH_SLOT_REQUEST,
false));
}
@@ -238,7 +238,7 @@ public class StreamGraphGeneratorExecutionModeDetectionTest 
extends TestLogger {
assertThat(
graph,
hasProperties(
-   
GlobalDataExchangeMode.POINTWISE_EDGES_PIPELINED,
+   
GlobalDataExchangeMode.FORWARD_EDGES_PIPELINED,

ScheduleMode.LAZY_FROM_SOURCES_WITH_BATCH_SLOT_REQUEST,
false));
 
@@ -299,6 +299,18 @@ public class 
StreamGraphGeneratorExecutionModeDetectionTest extends TestLogger {
.appendValue(scheduleMode)
.appendText("'");
}
+
+   @Override
+   protected void describeMismatchSafely(
+   StreamGraph item,
+   Description mismatchDescription) {
+   mismatchDescription.appendText("was ")
+   .appendText("a StreamGraph with 
exchangeMode='")
+   
.appendValue(item.getGlobalDataExchangeMode())
+   .appendText("' and scheduleMode='")
+

[flink] branch release-1.12 updated (bc870f6 -> be972ef)

2020-11-30 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch release-1.12
in repository https://gitbox.apache.org/repos/asf/flink.git.


from bc870f6  [FLINK-20396][checkpointing] Add a 'subtaskReset()' method to 
the OperatorCoordinator.
 new b21af8b  [FLINK-20245][hive][docs] Document how to create a Hive 
catalog from DDL
 new be972ef  [FLINK-20391] Set FORWARD_EDGES_PIPELINED for BATCH 
ExecutionMode

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 docs/dev/table/connectors/hive/index.md| 88 +++---
 docs/dev/table/connectors/hive/index.zh.md | 87 ++---
 .../streaming/api/graph/StreamGraphGenerator.java  |  2 +-
 ...amGraphGeneratorExecutionModeDetectionTest.java | 18 -
 .../api/graph/StreamingJobGraphGeneratorTest.java  | 49 
 .../datastream/DataStreamBatchExecutionITCase.java | 32 +++-
 6 files changed, 248 insertions(+), 28 deletions(-)



[flink] 01/02: [FLINK-20245][hive][docs] Document how to create a Hive catalog from DDL

2020-11-30 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.12
in repository https://gitbox.apache.org/repos/asf/flink.git

commit b21af8bc1b9a2da05c2e7d84cfb8aefa0c13e106
Author: Rui Li 
AuthorDate: Thu Nov 26 14:34:08 2020 +0800

[FLINK-20245][hive][docs] Document how to create a Hive catalog from DDL

This closes #14227
---
 docs/dev/table/connectors/hive/index.md| 88 ++
 docs/dev/table/connectors/hive/index.zh.md | 87 +
 2 files changed, 152 insertions(+), 23 deletions(-)

diff --git a/docs/dev/table/connectors/hive/index.md 
b/docs/dev/table/connectors/hive/index.md
index 9cdc5fd..a994926 100644
--- a/docs/dev/table/connectors/hive/index.md
+++ b/docs/dev/table/connectors/hive/index.md
@@ -289,15 +289,9 @@ You're supposed to add dependencies as stated above at 
runtime.
 Connect to an existing Hive installation using the [catalog interface]({{ 
site.baseurl }}/dev/table/catalogs.html) 
 and [HiveCatalog]({{ site.baseurl 
}}/dev/table/connectors/hive/hive_catalog.html) through the table environment 
or YAML configuration.
 
-If the `hive-conf/hive-site.xml` file is stored in remote storage system, 
users should download 
-the hive configuration file to their local environment first. 
-
 Please note while HiveCatalog doesn't require a particular planner, 
reading/writing Hive tables only works with blink planner.
 Therefore it's highly recommended that you use blink planner when connecting 
to your Hive warehouse.
 
-`HiveCatalog` is capable of automatically detecting the Hive version in use. 
It's recommended **NOT** to specify the Hive
-version, unless the automatic detection fails.
-
 Following is an example of how to connect to Hive:
 
 
@@ -305,12 +299,12 @@ Following is an example of how to connect to Hive:
 
 {% highlight java %}
 
-EnvironmentSettings settings = 
EnvironmentSettings.newInstance().inBatchMode().build();
+EnvironmentSettings settings = 
EnvironmentSettings.newInstance().useBlinkPlanner().build();
 TableEnvironment tableEnv = TableEnvironment.create(settings);
 
 String name= "myhive";
 String defaultDatabase = "mydatabase";
-String hiveConfDir = "/opt/hive-conf"; // a local path
+String hiveConfDir = "/opt/hive-conf";
 
 HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir);
 tableEnv.registerCatalog("myhive", hive);
@@ -323,12 +317,12 @@ tableEnv.useCatalog("myhive");
 
 {% highlight scala %}
 
-val settings = EnvironmentSettings.newInstance().inBatchMode().build()
+val settings = EnvironmentSettings.newInstance().useBlinkPlanner().build()
 val tableEnv = TableEnvironment.create(settings)
 
 val name= "myhive"
 val defaultDatabase = "mydatabase"
-val hiveConfDir = "/opt/hive-conf" // a local path
+val hiveConfDir = "/opt/hive-conf"
 
 val hive = new HiveCatalog(name, defaultDatabase, hiveConfDir)
 tableEnv.registerCatalog("myhive", hive)
@@ -342,12 +336,12 @@ tableEnv.useCatalog("myhive")
 from pyflink.table import *
 from pyflink.table.catalog import HiveCatalog
 
-settings = 
EnvironmentSettings.new_instance().in_batch_mode().use_blink_planner().build()
+settings = EnvironmentSettings.new_instance().use_blink_planner().build()
 t_env = BatchTableEnvironment.create(environment_settings=settings)
 
 catalog_name = "myhive"
 default_database = "mydatabase"
-hive_conf_dir = "/opt/hive-conf"  # a local path
+hive_conf_dir = "/opt/hive-conf"
 
 hive_catalog = HiveCatalog(catalog_name, default_database, hive_conf_dir)
 t_env.register_catalog("myhive", hive_catalog)
@@ -371,7 +365,77 @@ catalogs:
  hive-conf-dir: /opt/hive-conf
 {% endhighlight %}
 
+
+{% highlight sql %}
+
+CREATE CATALOG myhive WITH (
+'type' = 'hive',
+'default-database' = 'mydatabase',
+'hive-conf-dir' = '/opt/hive-conf'
+);
+-- set the HiveCatalog as the current catalog of the session
+USE CATALOG myhive;
+{% endhighlight %}
 
+
+
+Below are the options supported when creating a `HiveCatalog` instance with 
YAML file or DDL.
+
+
+
+
+  Option
+  Required
+  Default
+  Type
+  Description
+
+
+
+
+  type
+  Yes
+  (none)
+  String
+  Type of the catalog. Must be set to 'hive' when 
creating a HiveCatalog.
+
+
+  name
+  Yes
+  (none)
+  String
+  The unique name of the catalog. Only applicable to YAML file.
+
+
+  hive-conf-dir
+  No
+  (none)
+  String
+  URI to your Hive conf dir containing hive-site.xml. The URI needs to 
be supported by Hadoop FileSystem. If the URI is relative, i.e. without a 
scheme, local file system is assumed. If t

[flink] branch master updated (89f1754 -> ffb538b)

2020-11-30 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 89f1754  [FLINK-20344][dist] Change example value for default 
savepoint path flink-conf template.
 add ffb538b  [FLINK-20391] Set FORWARD_EDGES_PIPELINED for BATCH 
ExecutionMode

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/graph/StreamGraphGenerator.java  |  2 +-
 ...amGraphGeneratorExecutionModeDetectionTest.java | 18 ++--
 .../api/graph/StreamingJobGraphGeneratorTest.java  | 49 ++
 .../datastream/DataStreamBatchExecutionITCase.java | 32 +-
 4 files changed, 96 insertions(+), 5 deletions(-)



[flink] branch release-1.11 updated: [FLINK-20175] Avro Confluent Registry SQL format does not support adding nullable columns

2020-11-29 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.11 by this push:
 new d2f24e8  [FLINK-20175] Avro Confluent Registry SQL format does not 
support adding nullable columns
d2f24e8 is described below

commit d2f24e8177f05fd41c8804f2bb962a1a7500b095
Author: Dawid Wysakowicz 
AuthorDate: Mon Nov 16 20:28:29 2020 +0100

[FLINK-20175] Avro Confluent Registry SQL format does not support adding 
nullable columns
---
 .../avro/typeutils/AvroSchemaConverter.java|  75 ++-
 .../avro/AvroRowDataDeSerializationSchemaTest.java |   6 +-
 .../avro/typeutils/AvroSchemaConverterTest.java| 139 ++---
 3 files changed, 146 insertions(+), 74 deletions(-)

diff --git 
a/flink-formats/flink-avro/src/main/java/org/apache/flink/formats/avro/typeutils/AvroSchemaConverter.java
 
b/flink-formats/flink-avro/src/main/java/org/apache/flink/formats/avro/typeutils/AvroSchemaConverter.java
index c129ea5..76aa5f2 100644
--- 
a/flink-formats/flink-avro/src/main/java/org/apache/flink/formats/avro/typeutils/AvroSchemaConverter.java
+++ 
b/flink-formats/flink-avro/src/main/java/org/apache/flink/formats/avro/typeutils/AvroSchemaConverter.java
@@ -181,27 +181,35 @@ public class AvroSchemaConverter {
 
public static Schema convertToSchema(LogicalType logicalType, String 
rowName) {
int precision;
+   boolean isNullable = logicalType.isNullable();
switch (logicalType.getTypeRoot()) {
case NULL:
return SchemaBuilder.builder().nullType();
case BOOLEAN:
-   return 
getNullableBuilder(logicalType).booleanType();
+   Schema booleanType = 
SchemaBuilder.builder().booleanType();
+   return isNullable ? nullableSchema(booleanType) 
: booleanType;
case TINYINT:
case SMALLINT:
case INTEGER:
-   return 
getNullableBuilder(logicalType).intType();
+   Schema intType = 
SchemaBuilder.builder().intType();
+   return isNullable ? nullableSchema(intType) : 
intType;
case BIGINT:
-   return 
getNullableBuilder(logicalType).longType();
+   Schema longType = 
SchemaBuilder.builder().longType();
+   return isNullable ? nullableSchema(longType) : 
longType;
case FLOAT:
-   return 
getNullableBuilder(logicalType).floatType();
+   Schema floatType = 
SchemaBuilder.builder().floatType();
+   return isNullable ? nullableSchema(floatType) : 
floatType;
case DOUBLE:
-   return 
getNullableBuilder(logicalType).doubleType();
+   Schema doubleType = 
SchemaBuilder.builder().doubleType();
+   return isNullable ? nullableSchema(doubleType) 
: doubleType;
case CHAR:
case VARCHAR:
-   return 
getNullableBuilder(logicalType).stringType();
+   Schema stringType = 
SchemaBuilder.builder().stringType();
+   return isNullable ? nullableSchema(stringType) 
: stringType;
case BINARY:
case VARBINARY:
-   return 
getNullableBuilder(logicalType).bytesType();
+   Schema bytesType = 
SchemaBuilder.builder().bytesType();
+   return isNullable ? nullableSchema(bytesType) : 
bytesType;
case TIMESTAMP_WITHOUT_TIME_ZONE:
// use long to represents Timestamp
final TimestampType timestampType = 
(TimestampType) logicalType;
@@ -213,10 +221,12 @@ public class AvroSchemaConverter {
throw new 
IllegalArgumentException("Avro does not support TIMESTAMP type " +
"with precision: " + precision 
+ ", it only supports precision less than 3.");
}
-   return 
avroLogicalType.addToSchema(SchemaBuilder.builder().longType());
+   Schema timestampeType = 
avroLogicalType.addToSchema(SchemaBuilder.builder().longType());
+   return isNullable ? 
nullableSchema(timestampeType) : timestampeType;
   

[flink] branch master updated (af1b2cf -> cf9bbfd)

2020-11-26 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from af1b2cf  [FLINK-20365][python] The native k8s cluster could not be 
unregistered when executing Python DataStream jobs in attach mode
 add cf9bbfd  [FLINK-20245][hive][docs] Document how to create a Hive 
catalog from DDL

No new revisions were added by this update.

Summary of changes:
 docs/dev/table/connectors/hive/index.md| 88 ++
 docs/dev/table/connectors/hive/index.zh.md | 87 +
 2 files changed, 152 insertions(+), 23 deletions(-)



[flink] branch master updated (01d8f99 -> 40e6e16)

2020-11-25 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 01d8f99  [FLINK-20302] Recommend DataStream API with BATCH execution 
mode in DataSet docs
 add 9abe38e  [hotfix] Shade avro in sql-avro jars
 add efc12ca  [FLINK-20175] Avro Confluent Registry SQL format does not 
support adding nullable columns
 add 40e6e16  [FLINK-19997] Implement an e2e test for sql-client with 
Confluent Registry Avro format

No new revisions were added by this update.

Summary of changes:
 .../flink-end-to-end-tests-common-kafka/pom.xml|  62 
 .../tests/util/kafka/KafkaContainerClient.java | 131 +
 .../util/kafka/SQLClientSchemaRegistryITCase.java  | 225 ++
 .../kafka/containers/SchemaRegistryContainer.java  |  50 
 .../src/test/resources/log4j2-test.properties  |   6 +
 .../flink-end-to-end-tests-common/pom.xml  |   5 +
 .../flink/tests/util/flink/FlinkContainer.java | 300 +++
 .../flink/LocalStandaloneFlinkResourceFactory.java |  83 +-
 .../flink/tests/util/flink/SQLJobSubmission.java   |   7 +
 .../apache/flink/tests/util/util/FileUtils.java|  88 ++
 flink-end-to-end-tests/pom.xml |  13 +
 .../formats/avro/RowDataToAvroConverters.java  | 189 
 .../avro/typeutils/AvroSchemaConverter.java|  14 +-
 .../avro/typeutils/AvroSchemaConverterTest.java| 322 +
 .../flink-sql-avro-confluent-registry/pom.xml  |  20 +-
 flink-formats/flink-sql-avro/pom.xml   |   4 +
 16 files changed, 1251 insertions(+), 268 deletions(-)
 create mode 100644 
flink-end-to-end-tests/flink-end-to-end-tests-common-kafka/src/main/java/org/apache/flink/tests/util/kafka/KafkaContainerClient.java
 create mode 100644 
flink-end-to-end-tests/flink-end-to-end-tests-common-kafka/src/test/java/org/apache/flink/tests/util/kafka/SQLClientSchemaRegistryITCase.java
 create mode 100644 
flink-end-to-end-tests/flink-end-to-end-tests-common-kafka/src/test/java/org/apache/flink/tests/util/kafka/containers/SchemaRegistryContainer.java
 create mode 100644 
flink-end-to-end-tests/flink-end-to-end-tests-common/src/main/java/org/apache/flink/tests/util/flink/FlinkContainer.java



[flink] branch master updated (8cf28ee -> 33a2a40)

2020-11-24 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 8cf28ee  [FLINK-18027][table] Improve ROW constructor documentation
 add 33a2a40  [FLINK-20307][doc] Strength the document about temporal table 
join syntax

No new revisions were added by this update.

Summary of changes:
 docs/dev/table/sql/queries.md| 9 -
 docs/dev/table/sql/queries.zh.md | 9 -
 2 files changed, 16 insertions(+), 2 deletions(-)



[flink] branch release-1.11 updated: [FLINK-20262] Building flink-dist docker image does not work without python2

2020-11-23 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.11 by this push:
 new 4b74c06  [FLINK-20262] Building flink-dist docker image does not work 
without python2
4b74c06 is described below

commit 4b74c060557642945a28a91d2273da141508c4aa
Author: Dawid Wysakowicz 
AuthorDate: Mon Nov 23 09:44:52 2020 +0100

[FLINK-20262] Building flink-dist docker image does not work without python2
---
 flink-end-to-end-tests/test-scripts/common_docker.sh  | 2 +-
 flink-end-to-end-tests/test-scripts/python3_fileserver.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/flink-end-to-end-tests/test-scripts/common_docker.sh 
b/flink-end-to-end-tests/test-scripts/common_docker.sh
index 4cc3a16..4914011a 100644
--- a/flink-end-to-end-tests/test-scripts/common_docker.sh
+++ b/flink-end-to-end-tests/test-scripts/common_docker.sh
@@ -63,7 +63,7 @@ function start_file_server() {
 
 command -v python3 >/dev/null 2>&1
 if [[ $? -eq 0 ]]; then
-  python ${TEST_INFRA_DIR}/python3_fileserver.py &
+  python3 ${TEST_INFRA_DIR}/python3_fileserver.py &
   return
 fi
 
diff --git a/flink-end-to-end-tests/test-scripts/python3_fileserver.py 
b/flink-end-to-end-tests/test-scripts/python3_fileserver.py
index d9b3345..bfd444e 100644
--- a/flink-end-to-end-tests/test-scripts/python3_fileserver.py
+++ b/flink-end-to-end-tests/test-scripts/python3_fileserver.py
@@ -22,7 +22,7 @@ import socketserver
 handler = http.server.SimpleHTTPRequestHandler
 
 # azure says that ports are still in use if this is not set
-SocketServer.TCPServer.allow_reuse_address = True
+socketserver.TCPServer.allow_reuse_address = True
 httpd = socketserver.TCPServer(("", ), handler)
 
 try:



[flink] branch master updated (d878cac -> a4cc8c2)

2020-11-23 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from d878cac  [FLINK-20145][tests] Add ITCases for UC with union gates and 
multiple input operators.
 add a4cc8c2  [FLINK-20262] Building flink-dist docker image does not work 
without python2

No new revisions were added by this update.

Summary of changes:
 flink-end-to-end-tests/test-scripts/common_docker.sh  | 2 +-
 flink-end-to-end-tests/test-scripts/python3_fileserver.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)



[flink] branch release-1.11 updated (f33c30f -> ef42b84)

2020-11-17 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git.


from f33c30f  [FLINK-19300] Fix input stream read to prevent heap based 
timer loss
 add ef42b84  [FLINK-20142][doc] Update the document for CREATE TABLE LIKE 
that source table from different catalog is supported

No new revisions were added by this update.

Summary of changes:
 docs/dev/table/sql/create.md| 7 ++-
 docs/dev/table/sql/create.zh.md | 5 +
 2 files changed, 11 insertions(+), 1 deletion(-)



[flink] branch release-1.11 updated (f33c30f -> ef42b84)

2020-11-17 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git.


from f33c30f  [FLINK-19300] Fix input stream read to prevent heap based 
timer loss
 add ef42b84  [FLINK-20142][doc] Update the document for CREATE TABLE LIKE 
that source table from different catalog is supported

No new revisions were added by this update.

Summary of changes:
 docs/dev/table/sql/create.md| 7 ++-
 docs/dev/table/sql/create.zh.md | 5 +
 2 files changed, 11 insertions(+), 1 deletion(-)



[flink] branch release-1.11 updated (f33c30f -> ef42b84)

2020-11-17 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git.


from f33c30f  [FLINK-19300] Fix input stream read to prevent heap based 
timer loss
 add ef42b84  [FLINK-20142][doc] Update the document for CREATE TABLE LIKE 
that source table from different catalog is supported

No new revisions were added by this update.

Summary of changes:
 docs/dev/table/sql/create.md| 7 ++-
 docs/dev/table/sql/create.zh.md | 5 +
 2 files changed, 11 insertions(+), 1 deletion(-)



[flink] branch master updated (e33fa27 -> 23863e2)

2020-11-17 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from e33fa27  Revert "[FLINK-18500][table] Make the legacy planner 
exception more clear when resolving computed columns types for schema"
 add 23863e2  [FLINK-20142][doc] Update the document for CREATE TABLE LIKE 
that source table from different catalog is supported

No new revisions were added by this update.

Summary of changes:
 docs/dev/table/sql/create.md| 7 ++-
 docs/dev/table/sql/create.zh.md | 5 +
 2 files changed, 11 insertions(+), 1 deletion(-)



[flink] branch release-1.11 updated (f33c30f -> ef42b84)

2020-11-17 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git.


from f33c30f  [FLINK-19300] Fix input stream read to prevent heap based 
timer loss
 add ef42b84  [FLINK-20142][doc] Update the document for CREATE TABLE LIKE 
that source table from different catalog is supported

No new revisions were added by this update.

Summary of changes:
 docs/dev/table/sql/create.md| 7 ++-
 docs/dev/table/sql/create.zh.md | 5 +
 2 files changed, 11 insertions(+), 1 deletion(-)



[flink] branch master updated (e33fa27 -> 23863e2)

2020-11-17 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from e33fa27  Revert "[FLINK-18500][table] Make the legacy planner 
exception more clear when resolving computed columns types for schema"
 add 23863e2  [FLINK-20142][doc] Update the document for CREATE TABLE LIKE 
that source table from different catalog is supported

No new revisions were added by this update.

Summary of changes:
 docs/dev/table/sql/create.md| 7 ++-
 docs/dev/table/sql/create.zh.md | 5 +
 2 files changed, 11 insertions(+), 1 deletion(-)



[flink] branch release-1.11 updated (f33c30f -> ef42b84)

2020-11-17 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git.


from f33c30f  [FLINK-19300] Fix input stream read to prevent heap based 
timer loss
 add ef42b84  [FLINK-20142][doc] Update the document for CREATE TABLE LIKE 
that source table from different catalog is supported

No new revisions were added by this update.

Summary of changes:
 docs/dev/table/sql/create.md| 7 ++-
 docs/dev/table/sql/create.zh.md | 5 +
 2 files changed, 11 insertions(+), 1 deletion(-)



[flink] branch master updated (e33fa27 -> 23863e2)

2020-11-17 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from e33fa27  Revert "[FLINK-18500][table] Make the legacy planner 
exception more clear when resolving computed columns types for schema"
 add 23863e2  [FLINK-20142][doc] Update the document for CREATE TABLE LIKE 
that source table from different catalog is supported

No new revisions were added by this update.

Summary of changes:
 docs/dev/table/sql/create.md| 7 ++-
 docs/dev/table/sql/create.zh.md | 5 +
 2 files changed, 11 insertions(+), 1 deletion(-)



[flink] branch master updated (e33fa27 -> 23863e2)

2020-11-17 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from e33fa27  Revert "[FLINK-18500][table] Make the legacy planner 
exception more clear when resolving computed columns types for schema"
 add 23863e2  [FLINK-20142][doc] Update the document for CREATE TABLE LIKE 
that source table from different catalog is supported

No new revisions were added by this update.

Summary of changes:
 docs/dev/table/sql/create.md| 7 ++-
 docs/dev/table/sql/create.zh.md | 5 +
 2 files changed, 11 insertions(+), 1 deletion(-)



[flink] branch master updated (e33fa27 -> 23863e2)

2020-11-17 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from e33fa27  Revert "[FLINK-18500][table] Make the legacy planner 
exception more clear when resolving computed columns types for schema"
 add 23863e2  [FLINK-20142][doc] Update the document for CREATE TABLE LIKE 
that source table from different catalog is supported

No new revisions were added by this update.

Summary of changes:
 docs/dev/table/sql/create.md| 7 ++-
 docs/dev/table/sql/create.zh.md | 5 +
 2 files changed, 11 insertions(+), 1 deletion(-)



[flink] branch master updated (958fc80 -> b878f54)

2020-11-16 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 958fc80  [FLINK-20141] Update Connectors main doc page for FS sinks
 add b878f54  [FLINK-20169] Move emitting MAX_WATERMARK out of the 
SourceOperator processing loop

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/SourceOperator.java| 13 ++---
 .../runtime/tasks/SourceOperatorStreamTask.java|  8 ++
 .../api/operators/SourceOperatorTest.java  | 20 +-
 .../source/SourceOperatorEventTimeTest.java|  4 +--
 .../tasks/SourceOperatorStreamTaskTest.java| 32 +-
 .../tasks/StreamTaskMailboxTestHarness.java| 13 ++---
 6 files changed, 53 insertions(+), 37 deletions(-)



[flink] branch master updated (958fc80 -> b878f54)

2020-11-16 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 958fc80  [FLINK-20141] Update Connectors main doc page for FS sinks
 add b878f54  [FLINK-20169] Move emitting MAX_WATERMARK out of the 
SourceOperator processing loop

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/SourceOperator.java| 13 ++---
 .../runtime/tasks/SourceOperatorStreamTask.java|  8 ++
 .../api/operators/SourceOperatorTest.java  | 20 +-
 .../source/SourceOperatorEventTimeTest.java|  4 +--
 .../tasks/SourceOperatorStreamTaskTest.java| 32 +-
 .../tasks/StreamTaskMailboxTestHarness.java| 13 ++---
 6 files changed, 53 insertions(+), 37 deletions(-)



[flink] branch master updated (958fc80 -> b878f54)

2020-11-16 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 958fc80  [FLINK-20141] Update Connectors main doc page for FS sinks
 add b878f54  [FLINK-20169] Move emitting MAX_WATERMARK out of the 
SourceOperator processing loop

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/SourceOperator.java| 13 ++---
 .../runtime/tasks/SourceOperatorStreamTask.java|  8 ++
 .../api/operators/SourceOperatorTest.java  | 20 +-
 .../source/SourceOperatorEventTimeTest.java|  4 +--
 .../tasks/SourceOperatorStreamTaskTest.java| 32 +-
 .../tasks/StreamTaskMailboxTestHarness.java| 13 ++---
 6 files changed, 53 insertions(+), 37 deletions(-)



[flink] branch master updated (958fc80 -> b878f54)

2020-11-16 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 958fc80  [FLINK-20141] Update Connectors main doc page for FS sinks
 add b878f54  [FLINK-20169] Move emitting MAX_WATERMARK out of the 
SourceOperator processing loop

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/SourceOperator.java| 13 ++---
 .../runtime/tasks/SourceOperatorStreamTask.java|  8 ++
 .../api/operators/SourceOperatorTest.java  | 20 +-
 .../source/SourceOperatorEventTimeTest.java|  4 +--
 .../tasks/SourceOperatorStreamTaskTest.java| 32 +-
 .../tasks/StreamTaskMailboxTestHarness.java| 13 ++---
 6 files changed, 53 insertions(+), 37 deletions(-)



[flink] branch master updated (958fc80 -> b878f54)

2020-11-16 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 958fc80  [FLINK-20141] Update Connectors main doc page for FS sinks
 add b878f54  [FLINK-20169] Move emitting MAX_WATERMARK out of the 
SourceOperator processing loop

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/SourceOperator.java| 13 ++---
 .../runtime/tasks/SourceOperatorStreamTask.java|  8 ++
 .../api/operators/SourceOperatorTest.java  | 20 +-
 .../source/SourceOperatorEventTimeTest.java|  4 +--
 .../tasks/SourceOperatorStreamTaskTest.java| 32 +-
 .../tasks/StreamTaskMailboxTestHarness.java| 13 ++---
 6 files changed, 53 insertions(+), 37 deletions(-)



[flink-web] branch asf-site updated: [FLINK-20093] Link to SQL components download page (#391)

2020-11-13 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/flink-web.git


The following commit(s) were added to refs/heads/asf-site by this push:
 new 589b9fb  [FLINK-20093] Link to SQL components download page (#391)
589b9fb is described below

commit 589b9fbe939ebcc1a424f985448759d7785925ce
Author: Dawid Wysakowicz 
AuthorDate: Fri Nov 13 10:09:05 2020 +0100

[FLINK-20093] Link to SQL components download page (#391)
---
 downloads.md | 10 +-
 1 file changed, 9 insertions(+), 1 deletion(-)

diff --git a/downloads.md b/downloads.md
index c7714d3..50e0b98 100644
--- a/downloads.md
+++ b/downloads.md
@@ -51,9 +51,11 @@ file system connector), please check out the [Hadoop 
Integration]({{ site.DOCS_B
 
 {% endif %}
 
-{% if flink_release.optional_components %}
+{% if flink_release.optional_components or flink_release.sql_components_url %}
  Optional components
+{% endif %}
 
+{% if flink_release.optional_components %}
 {% assign components = flink_release.optional_components | | sort: 'name' %}
 {% for component in components %}
 
@@ -81,6 +83,12 @@ file system connector), please check out the [Hadoop 
Integration]({{ site.DOCS_B
 
 {% endif %}
 
+{% if flink_release.sql_components_url != nil %}
+
+SQL 
components download page
+
+{% endif %} 
+
 {% if flink_release.alternative_binaries %}
  Alternative Binaries
 



[flink] branch release-1.11 updated (57845ce -> 4c91f8a)

2020-11-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 57845ce  [FLINK-20079][task] Initialize operator chain before upstream 
partition request
 add 4c91f8a  [FLINK-20077] Fix creating a view with MATCH_RECOGNIZE clause

No new revisions were added by this update.

Summary of changes:
 .../org/apache/calcite/sql/SqlMatchRecognize.java  | 408 +
 .../operations/SqlToOperationConverterTest.java|  41 +++
 2 files changed, 449 insertions(+)
 create mode 100644 
flink-table/flink-table-planner-blink/src/main/java/org/apache/calcite/sql/SqlMatchRecognize.java



[flink] branch release-1.11 updated (57845ce -> 4c91f8a)

2020-11-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 57845ce  [FLINK-20079][task] Initialize operator chain before upstream 
partition request
 add 4c91f8a  [FLINK-20077] Fix creating a view with MATCH_RECOGNIZE clause

No new revisions were added by this update.

Summary of changes:
 .../org/apache/calcite/sql/SqlMatchRecognize.java  | 408 +
 .../operations/SqlToOperationConverterTest.java|  41 +++
 2 files changed, 449 insertions(+)
 create mode 100644 
flink-table/flink-table-planner-blink/src/main/java/org/apache/calcite/sql/SqlMatchRecognize.java



[flink] branch master updated (b9b9ff3 -> 530c1f0)

2020-11-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from b9b9ff3  [FLINK-20046][python] Fix the unstable test 
StreamTableAggregateTests.test_map_view_iterate
 add 530c1f0  [FLINK-20077] Fix creating a view with MATCH_RECOGNIZE clause

No new revisions were added by this update.

Summary of changes:
 .../org/apache/calcite/sql/SqlMatchRecognize.java  | 400 +
 .../operations/SqlToOperationConverterTest.java|  41 +++
 2 files changed, 441 insertions(+)
 create mode 100644 
flink-table/flink-table-planner-blink/src/main/java/org/apache/calcite/sql/SqlMatchRecognize.java



[flink] branch release-1.11 updated (57845ce -> 4c91f8a)

2020-11-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 57845ce  [FLINK-20079][task] Initialize operator chain before upstream 
partition request
 add 4c91f8a  [FLINK-20077] Fix creating a view with MATCH_RECOGNIZE clause

No new revisions were added by this update.

Summary of changes:
 .../org/apache/calcite/sql/SqlMatchRecognize.java  | 408 +
 .../operations/SqlToOperationConverterTest.java|  41 +++
 2 files changed, 449 insertions(+)
 create mode 100644 
flink-table/flink-table-planner-blink/src/main/java/org/apache/calcite/sql/SqlMatchRecognize.java



[flink] branch master updated (b9b9ff3 -> 530c1f0)

2020-11-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from b9b9ff3  [FLINK-20046][python] Fix the unstable test 
StreamTableAggregateTests.test_map_view_iterate
 add 530c1f0  [FLINK-20077] Fix creating a view with MATCH_RECOGNIZE clause

No new revisions were added by this update.

Summary of changes:
 .../org/apache/calcite/sql/SqlMatchRecognize.java  | 400 +
 .../operations/SqlToOperationConverterTest.java|  41 +++
 2 files changed, 441 insertions(+)
 create mode 100644 
flink-table/flink-table-planner-blink/src/main/java/org/apache/calcite/sql/SqlMatchRecognize.java



[flink] branch release-1.11 updated (57845ce -> 4c91f8a)

2020-11-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 57845ce  [FLINK-20079][task] Initialize operator chain before upstream 
partition request
 add 4c91f8a  [FLINK-20077] Fix creating a view with MATCH_RECOGNIZE clause

No new revisions were added by this update.

Summary of changes:
 .../org/apache/calcite/sql/SqlMatchRecognize.java  | 408 +
 .../operations/SqlToOperationConverterTest.java|  41 +++
 2 files changed, 449 insertions(+)
 create mode 100644 
flink-table/flink-table-planner-blink/src/main/java/org/apache/calcite/sql/SqlMatchRecognize.java



[flink] branch master updated (b9b9ff3 -> 530c1f0)

2020-11-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from b9b9ff3  [FLINK-20046][python] Fix the unstable test 
StreamTableAggregateTests.test_map_view_iterate
 add 530c1f0  [FLINK-20077] Fix creating a view with MATCH_RECOGNIZE clause

No new revisions were added by this update.

Summary of changes:
 .../org/apache/calcite/sql/SqlMatchRecognize.java  | 400 +
 .../operations/SqlToOperationConverterTest.java|  41 +++
 2 files changed, 441 insertions(+)
 create mode 100644 
flink-table/flink-table-planner-blink/src/main/java/org/apache/calcite/sql/SqlMatchRecognize.java



[flink] branch release-1.11 updated (57845ce -> 4c91f8a)

2020-11-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 57845ce  [FLINK-20079][task] Initialize operator chain before upstream 
partition request
 add 4c91f8a  [FLINK-20077] Fix creating a view with MATCH_RECOGNIZE clause

No new revisions were added by this update.

Summary of changes:
 .../org/apache/calcite/sql/SqlMatchRecognize.java  | 408 +
 .../operations/SqlToOperationConverterTest.java|  41 +++
 2 files changed, 449 insertions(+)
 create mode 100644 
flink-table/flink-table-planner-blink/src/main/java/org/apache/calcite/sql/SqlMatchRecognize.java



[flink] branch master updated (b9b9ff3 -> 530c1f0)

2020-11-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from b9b9ff3  [FLINK-20046][python] Fix the unstable test 
StreamTableAggregateTests.test_map_view_iterate
 add 530c1f0  [FLINK-20077] Fix creating a view with MATCH_RECOGNIZE clause

No new revisions were added by this update.

Summary of changes:
 .../org/apache/calcite/sql/SqlMatchRecognize.java  | 400 +
 .../operations/SqlToOperationConverterTest.java|  41 +++
 2 files changed, 441 insertions(+)
 create mode 100644 
flink-table/flink-table-planner-blink/src/main/java/org/apache/calcite/sql/SqlMatchRecognize.java



[flink] branch master updated (b9b9ff3 -> 530c1f0)

2020-11-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from b9b9ff3  [FLINK-20046][python] Fix the unstable test 
StreamTableAggregateTests.test_map_view_iterate
 add 530c1f0  [FLINK-20077] Fix creating a view with MATCH_RECOGNIZE clause

No new revisions were added by this update.

Summary of changes:
 .../org/apache/calcite/sql/SqlMatchRecognize.java  | 400 +
 .../operations/SqlToOperationConverterTest.java|  41 +++
 2 files changed, 441 insertions(+)
 create mode 100644 
flink-table/flink-table-planner-blink/src/main/java/org/apache/calcite/sql/SqlMatchRecognize.java



[flink] branch release-1.10 updated: [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 'pipeline.default-kryo-serializers'

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.10
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.10 by this push:
 new e3beef3  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'
e3beef3 is described below

commit e3beef341bbe72d040a2fdabfc30350da56017af
Author: Dawid Wysakowicz 
AuthorDate: Fri Nov 6 11:07:31 2020 +0100

[FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

This commit enables escaping in options that expect a map of
string-string entries. It lets users pass options such as e.g.

pipeline.cached-files=name:file1,path:'oss://bucket/file1'
---
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +---
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 24 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 66 +-
 6 files changed, 93 insertions(+), 27 deletions(-)

diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java 
b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
index 73ff232..8526997 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
@@ -22,6 +22,7 @@ import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.configuration.CoreOptions;
 import org.apache.flink.configuration.ExecutionOptions;
 import org.apache.flink.configuration.MetricOptions;
@@ -33,7 +34,6 @@ import org.apache.flink.util.Preconditions;
 import com.esotericsoftware.kryo.Serializer;
 
 import java.io.Serializable;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
@@ -1234,15 +1234,7 @@ public class ExecutionConfig implements Serializable, 
Archiveable kryoSerializers) {
return kryoSerializers.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // entry key
-   arr -> arr[1] // entry value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.collect(Collectors.toMap(
m -> loadClass(m.get("class"), classLoader, 
"Could not load class for kryo serialization"),
m -> loadClass(m.get("serializer"), 
classLoader, "Could not load serializer's class"),
diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
index 079cfab..8b58b97 100644
--- 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
+++ 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
@@ -21,6 +21,7 @@ package org.apache.flink.api.common.cache;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.core.fs.Path;
 
 import java.io.File;
@@ -197,15 +198,7 @@ public class DistributedCache {
 */
public static List> 
parseCachedFilesFromString(List files) {
return files.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // key name
-   arr -> arr[1] // value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.map(m -> Tuple2.of(
m.get("

[flink] branch release-1.10 updated: [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 'pipeline.default-kryo-serializers'

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.10
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.10 by this push:
 new e3beef3  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'
e3beef3 is described below

commit e3beef341bbe72d040a2fdabfc30350da56017af
Author: Dawid Wysakowicz 
AuthorDate: Fri Nov 6 11:07:31 2020 +0100

[FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

This commit enables escaping in options that expect a map of
string-string entries. It lets users pass options such as e.g.

pipeline.cached-files=name:file1,path:'oss://bucket/file1'
---
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +---
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 24 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 66 +-
 6 files changed, 93 insertions(+), 27 deletions(-)

diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java 
b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
index 73ff232..8526997 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
@@ -22,6 +22,7 @@ import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.configuration.CoreOptions;
 import org.apache.flink.configuration.ExecutionOptions;
 import org.apache.flink.configuration.MetricOptions;
@@ -33,7 +34,6 @@ import org.apache.flink.util.Preconditions;
 import com.esotericsoftware.kryo.Serializer;
 
 import java.io.Serializable;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
@@ -1234,15 +1234,7 @@ public class ExecutionConfig implements Serializable, 
Archiveable kryoSerializers) {
return kryoSerializers.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // entry key
-   arr -> arr[1] // entry value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.collect(Collectors.toMap(
m -> loadClass(m.get("class"), classLoader, 
"Could not load class for kryo serialization"),
m -> loadClass(m.get("serializer"), 
classLoader, "Could not load serializer's class"),
diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
index 079cfab..8b58b97 100644
--- 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
+++ 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
@@ -21,6 +21,7 @@ package org.apache.flink.api.common.cache;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.core.fs.Path;
 
 import java.io.File;
@@ -197,15 +198,7 @@ public class DistributedCache {
 */
public static List> 
parseCachedFilesFromString(List files) {
return files.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // key name
-   arr -> arr[1] // value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.map(m -> Tuple2.of(
m.get("

[flink] branch release-1.10 updated: [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 'pipeline.default-kryo-serializers'

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.10
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.10 by this push:
 new e3beef3  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'
e3beef3 is described below

commit e3beef341bbe72d040a2fdabfc30350da56017af
Author: Dawid Wysakowicz 
AuthorDate: Fri Nov 6 11:07:31 2020 +0100

[FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

This commit enables escaping in options that expect a map of
string-string entries. It lets users pass options such as e.g.

pipeline.cached-files=name:file1,path:'oss://bucket/file1'
---
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +---
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 24 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 66 +-
 6 files changed, 93 insertions(+), 27 deletions(-)

diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java 
b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
index 73ff232..8526997 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
@@ -22,6 +22,7 @@ import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.configuration.CoreOptions;
 import org.apache.flink.configuration.ExecutionOptions;
 import org.apache.flink.configuration.MetricOptions;
@@ -33,7 +34,6 @@ import org.apache.flink.util.Preconditions;
 import com.esotericsoftware.kryo.Serializer;
 
 import java.io.Serializable;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
@@ -1234,15 +1234,7 @@ public class ExecutionConfig implements Serializable, 
Archiveable kryoSerializers) {
return kryoSerializers.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // entry key
-   arr -> arr[1] // entry value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.collect(Collectors.toMap(
m -> loadClass(m.get("class"), classLoader, 
"Could not load class for kryo serialization"),
m -> loadClass(m.get("serializer"), 
classLoader, "Could not load serializer's class"),
diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
index 079cfab..8b58b97 100644
--- 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
+++ 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
@@ -21,6 +21,7 @@ package org.apache.flink.api.common.cache;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.core.fs.Path;
 
 import java.io.File;
@@ -197,15 +198,7 @@ public class DistributedCache {
 */
public static List> 
parseCachedFilesFromString(List files) {
return files.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // key name
-   arr -> arr[1] // value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.map(m -> Tuple2.of(
m.get("

[flink] branch release-1.11 updated (18e4c1b -> 0ec1c4d)

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 18e4c1b  [FLINK-20068] Enhance the topic creation guarantee to ensure 
all the brokers receive the metadata update.
 add 0ec1c4d  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

No new revisions were added by this update.

Summary of changes:
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +---
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 24 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 66 +-
 6 files changed, 93 insertions(+), 27 deletions(-)



[flink] branch release-1.11 updated: [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 'pipeline.default-kryo-serializers'

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.11 by this push:
 new 0ec1c4d  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'
0ec1c4d is described below

commit 0ec1c4d6ccf7c811b0328cf00e0833ae588aa112
Author: Dawid Wysakowicz 
AuthorDate: Fri Nov 6 11:07:31 2020 +0100

[FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

This commit enables escaping in options that expect a map of
string-string entries. It lets users pass options such as e.g.

pipeline.cached-files=name:file1,path:'oss://bucket/file1'
---
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +---
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 24 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 66 +-
 6 files changed, 93 insertions(+), 27 deletions(-)

diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java 
b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
index 73ff232..8526997 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
@@ -22,6 +22,7 @@ import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.configuration.CoreOptions;
 import org.apache.flink.configuration.ExecutionOptions;
 import org.apache.flink.configuration.MetricOptions;
@@ -33,7 +34,6 @@ import org.apache.flink.util.Preconditions;
 import com.esotericsoftware.kryo.Serializer;
 
 import java.io.Serializable;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
@@ -1234,15 +1234,7 @@ public class ExecutionConfig implements Serializable, 
Archiveable kryoSerializers) {
return kryoSerializers.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // entry key
-   arr -> arr[1] // entry value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.collect(Collectors.toMap(
m -> loadClass(m.get("class"), classLoader, 
"Could not load class for kryo serialization"),
m -> loadClass(m.get("serializer"), 
classLoader, "Could not load serializer's class"),
diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
index 079cfab..8b58b97 100644
--- 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
+++ 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
@@ -21,6 +21,7 @@ package org.apache.flink.api.common.cache;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.core.fs.Path;
 
 import java.io.File;
@@ -197,15 +198,7 @@ public class DistributedCache {
 */
public static List> 
parseCachedFilesFromString(List files) {
return files.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // key name
-   arr -> arr[1] // value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.map(m -> Tuple2.of(
m.get("

[flink] branch release-1.11 updated: [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 'pipeline.default-kryo-serializers'

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.11 by this push:
 new 0ec1c4d  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'
0ec1c4d is described below

commit 0ec1c4d6ccf7c811b0328cf00e0833ae588aa112
Author: Dawid Wysakowicz 
AuthorDate: Fri Nov 6 11:07:31 2020 +0100

[FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

This commit enables escaping in options that expect a map of
string-string entries. It lets users pass options such as e.g.

pipeline.cached-files=name:file1,path:'oss://bucket/file1'
---
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +---
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 24 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 66 +-
 6 files changed, 93 insertions(+), 27 deletions(-)

diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java 
b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
index 73ff232..8526997 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
@@ -22,6 +22,7 @@ import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.configuration.CoreOptions;
 import org.apache.flink.configuration.ExecutionOptions;
 import org.apache.flink.configuration.MetricOptions;
@@ -33,7 +34,6 @@ import org.apache.flink.util.Preconditions;
 import com.esotericsoftware.kryo.Serializer;
 
 import java.io.Serializable;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
@@ -1234,15 +1234,7 @@ public class ExecutionConfig implements Serializable, 
Archiveable kryoSerializers) {
return kryoSerializers.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // entry key
-   arr -> arr[1] // entry value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.collect(Collectors.toMap(
m -> loadClass(m.get("class"), classLoader, 
"Could not load class for kryo serialization"),
m -> loadClass(m.get("serializer"), 
classLoader, "Could not load serializer's class"),
diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
index 079cfab..8b58b97 100644
--- 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
+++ 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
@@ -21,6 +21,7 @@ package org.apache.flink.api.common.cache;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.core.fs.Path;
 
 import java.io.File;
@@ -197,15 +198,7 @@ public class DistributedCache {
 */
public static List> 
parseCachedFilesFromString(List files) {
return files.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // key name
-   arr -> arr[1] // value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.map(m -> Tuple2.of(
m.get("

[flink] branch release-1.11 updated: [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 'pipeline.default-kryo-serializers'

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.11 by this push:
 new 0ec1c4d  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'
0ec1c4d is described below

commit 0ec1c4d6ccf7c811b0328cf00e0833ae588aa112
Author: Dawid Wysakowicz 
AuthorDate: Fri Nov 6 11:07:31 2020 +0100

[FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

This commit enables escaping in options that expect a map of
string-string entries. It lets users pass options such as e.g.

pipeline.cached-files=name:file1,path:'oss://bucket/file1'
---
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +---
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 24 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 66 +-
 6 files changed, 93 insertions(+), 27 deletions(-)

diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java 
b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
index 73ff232..8526997 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
@@ -22,6 +22,7 @@ import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.configuration.CoreOptions;
 import org.apache.flink.configuration.ExecutionOptions;
 import org.apache.flink.configuration.MetricOptions;
@@ -33,7 +34,6 @@ import org.apache.flink.util.Preconditions;
 import com.esotericsoftware.kryo.Serializer;
 
 import java.io.Serializable;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
@@ -1234,15 +1234,7 @@ public class ExecutionConfig implements Serializable, 
Archiveable kryoSerializers) {
return kryoSerializers.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // entry key
-   arr -> arr[1] // entry value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.collect(Collectors.toMap(
m -> loadClass(m.get("class"), classLoader, 
"Could not load class for kryo serialization"),
m -> loadClass(m.get("serializer"), 
classLoader, "Could not load serializer's class"),
diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
index 079cfab..8b58b97 100644
--- 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
+++ 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
@@ -21,6 +21,7 @@ package org.apache.flink.api.common.cache;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.core.fs.Path;
 
 import java.io.File;
@@ -197,15 +198,7 @@ public class DistributedCache {
 */
public static List> 
parseCachedFilesFromString(List files) {
return files.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // key name
-   arr -> arr[1] // value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.map(m -> Tuple2.of(
m.get("

[flink] branch release-1.11 updated: [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 'pipeline.default-kryo-serializers'

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.11 by this push:
 new 0ec1c4d  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'
0ec1c4d is described below

commit 0ec1c4d6ccf7c811b0328cf00e0833ae588aa112
Author: Dawid Wysakowicz 
AuthorDate: Fri Nov 6 11:07:31 2020 +0100

[FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

This commit enables escaping in options that expect a map of
string-string entries. It lets users pass options such as e.g.

pipeline.cached-files=name:file1,path:'oss://bucket/file1'
---
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +---
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 24 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 66 +-
 6 files changed, 93 insertions(+), 27 deletions(-)

diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java 
b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
index 73ff232..8526997 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
@@ -22,6 +22,7 @@ import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.configuration.CoreOptions;
 import org.apache.flink.configuration.ExecutionOptions;
 import org.apache.flink.configuration.MetricOptions;
@@ -33,7 +34,6 @@ import org.apache.flink.util.Preconditions;
 import com.esotericsoftware.kryo.Serializer;
 
 import java.io.Serializable;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
@@ -1234,15 +1234,7 @@ public class ExecutionConfig implements Serializable, 
Archiveable kryoSerializers) {
return kryoSerializers.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // entry key
-   arr -> arr[1] // entry value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.collect(Collectors.toMap(
m -> loadClass(m.get("class"), classLoader, 
"Could not load class for kryo serialization"),
m -> loadClass(m.get("serializer"), 
classLoader, "Could not load serializer's class"),
diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
index 079cfab..8b58b97 100644
--- 
a/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
+++ 
b/flink-core/src/main/java/org/apache/flink/api/common/cache/DistributedCache.java
@@ -21,6 +21,7 @@ package org.apache.flink.api.common.cache;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.ConfigurationUtils;
 import org.apache.flink.core.fs.Path;
 
 import java.io.File;
@@ -197,15 +198,7 @@ public class DistributedCache {
 */
public static List> 
parseCachedFilesFromString(List files) {
return files.stream()
-   .map(v -> Arrays.stream(v.split(","))
-   .map(p -> p.split(":"))
-   .collect(
-   Collectors.toMap(
-   arr -> arr[0], // key name
-   arr -> arr[1] // value
-   )
-   )
-   )
+   .map(ConfigurationUtils::parseMap)
.map(m -> Tuple2.of(
m.get("

[flink] branch master updated (0c36c84 -> 5af1d00)

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 0c36c84  [FLINK-20035][tests] Use random port for rest endpoint in 
BlockingShuffleITCase and ShuffleCompressionITCase
 add 5af1d00  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

No new revisions were added by this update.

Summary of changes:
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 23 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 63 --
 6 files changed, 89 insertions(+), 27 deletions(-)



[flink] branch master updated (0c36c84 -> 5af1d00)

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 0c36c84  [FLINK-20035][tests] Use random port for rest endpoint in 
BlockingShuffleITCase and ShuffleCompressionITCase
 add 5af1d00  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

No new revisions were added by this update.

Summary of changes:
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 23 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 63 --
 6 files changed, 89 insertions(+), 27 deletions(-)



[flink] branch master updated (0c36c84 -> 5af1d00)

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 0c36c84  [FLINK-20035][tests] Use random port for rest endpoint in 
BlockingShuffleITCase and ShuffleCompressionITCase
 add 5af1d00  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

No new revisions were added by this update.

Summary of changes:
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 23 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 63 --
 6 files changed, 89 insertions(+), 27 deletions(-)



[flink] branch master updated (0c36c84 -> 5af1d00)

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 0c36c84  [FLINK-20035][tests] Use random port for rest endpoint in 
BlockingShuffleITCase and ShuffleCompressionITCase
 add 5af1d00  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

No new revisions were added by this update.

Summary of changes:
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 23 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 63 --
 6 files changed, 89 insertions(+), 27 deletions(-)



[flink] branch master updated (0c36c84 -> 5af1d00)

2020-11-11 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 0c36c84  [FLINK-20035][tests] Use random port for rest endpoint in 
BlockingShuffleITCase and ShuffleCompressionITCase
 add 5af1d00  [FLINK-20018] Allow escaping in 'pipeline.cached-files' and 
'pipeline.default-kryo-serializers'

No new revisions were added by this update.

Summary of changes:
 .../apache/flink/api/common/ExecutionConfig.java   | 12 +
 .../flink/api/common/cache/DistributedCache.java   | 11 +---
 .../flink/configuration/ConfigurationUtils.java| 23 
 .../configuration/StructuredOptionsSplitter.java   |  2 -
 .../StructuredOptionsSplitterTest.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 63 --
 6 files changed, 89 insertions(+), 27 deletions(-)



[flink] branch master updated (dc56273 -> c037dcb)

2020-11-07 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from dc56273  [FLINK-19544][k8s] Implement CheckpointRecoveryFactory based 
on Kubernetes API
 add c037dcb  [FLINK-20002] Add a 
StreamExecutionEnvironment#getExecutionEnvironment(Configuration) method

No new revisions were added by this update.

Summary of changes:
 .../client/program/StreamContextEnvironment.java   | 17 ---
 .../client/program/StreamPlanEnvironment.java  |  5 +-
 .../api/java/ScalaShellStreamEnvironment.java  |  2 +-
 .../api/environment/LocalStreamEnvironment.java|  4 --
 .../environment/StreamExecutionEnvironment.java| 53 ++
 .../StreamExecutionEnvironmentFactory.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 34 ++
 .../streaming/util/TestStreamEnvironment.java  |  6 ++-
 .../datastream/DataStreamBatchExecutionITCase.java |  7 +--
 9 files changed, 108 insertions(+), 25 deletions(-)



[flink] branch master updated (dc56273 -> c037dcb)

2020-11-07 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from dc56273  [FLINK-19544][k8s] Implement CheckpointRecoveryFactory based 
on Kubernetes API
 add c037dcb  [FLINK-20002] Add a 
StreamExecutionEnvironment#getExecutionEnvironment(Configuration) method

No new revisions were added by this update.

Summary of changes:
 .../client/program/StreamContextEnvironment.java   | 17 ---
 .../client/program/StreamPlanEnvironment.java  |  5 +-
 .../api/java/ScalaShellStreamEnvironment.java  |  2 +-
 .../api/environment/LocalStreamEnvironment.java|  4 --
 .../environment/StreamExecutionEnvironment.java| 53 ++
 .../StreamExecutionEnvironmentFactory.java |  5 +-
 ...ecutionEnvironmentComplexConfigurationTest.java | 34 ++
 .../streaming/util/TestStreamEnvironment.java  |  6 ++-
 .../datastream/DataStreamBatchExecutionITCase.java |  7 +--
 9 files changed, 108 insertions(+), 25 deletions(-)



[flink] branch master updated (4370139 -> 15fa170)

2020-11-06 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 4370139  [FLINK-19699][e2e] Collect dmesg output after tests
 add 15fa170  [FLINK-18546] Upgrade to Kafka Schema Registry Client 5.5.2

No new revisions were added by this update.

Summary of changes:
 .../dev/table/connectors/formats/avro-confluent.md |   2 +-
 .../flink-confluent-schema-registry/pom.xml|   1 -
 .../flink-avro-confluent-registry/pom.xml  |  75 +--
 .../src/main/resources/META-INF/NOTICE |  14 -
 .../pom.xml|  51 +-
 .../src/main/resources/META-INF/NOTICE |  25 +
 .../licenses/LICENSE.jakarta-annotation-api| 637 +
 .../META-INF/licenses/LICENSE.jakarta-inject   | 637 +
 .../META-INF/licenses/LICENSE.jakarta-ws-rs-api| 637 +
 .../licenses/LICENSE.osgi-resource-locator | 637 +
 flink-formats/pom.xml  |   1 +
 11 files changed, 2631 insertions(+), 86 deletions(-)
 delete mode 100644 
flink-formats/flink-avro-confluent-registry/src/main/resources/META-INF/NOTICE
 copy flink-formats/{flink-sql-avro => 
flink-sql-avro-confluent-registry}/pom.xml (51%)
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/NOTICE
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.jakarta-annotation-api
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.jakarta-inject
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.jakarta-ws-rs-api
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.osgi-resource-locator



[flink] branch master updated (4370139 -> 15fa170)

2020-11-06 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 4370139  [FLINK-19699][e2e] Collect dmesg output after tests
 add 15fa170  [FLINK-18546] Upgrade to Kafka Schema Registry Client 5.5.2

No new revisions were added by this update.

Summary of changes:
 .../dev/table/connectors/formats/avro-confluent.md |   2 +-
 .../flink-confluent-schema-registry/pom.xml|   1 -
 .../flink-avro-confluent-registry/pom.xml  |  75 +--
 .../src/main/resources/META-INF/NOTICE |  14 -
 .../pom.xml|  51 +-
 .../src/main/resources/META-INF/NOTICE |  25 +
 .../licenses/LICENSE.jakarta-annotation-api| 637 +
 .../META-INF/licenses/LICENSE.jakarta-inject   | 637 +
 .../META-INF/licenses/LICENSE.jakarta-ws-rs-api| 637 +
 .../licenses/LICENSE.osgi-resource-locator | 637 +
 flink-formats/pom.xml  |   1 +
 11 files changed, 2631 insertions(+), 86 deletions(-)
 delete mode 100644 
flink-formats/flink-avro-confluent-registry/src/main/resources/META-INF/NOTICE
 copy flink-formats/{flink-sql-avro => 
flink-sql-avro-confluent-registry}/pom.xml (51%)
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/NOTICE
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.jakarta-annotation-api
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.jakarta-inject
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.jakarta-ws-rs-api
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.osgi-resource-locator



[flink] branch master updated (4370139 -> 15fa170)

2020-11-06 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 4370139  [FLINK-19699][e2e] Collect dmesg output after tests
 add 15fa170  [FLINK-18546] Upgrade to Kafka Schema Registry Client 5.5.2

No new revisions were added by this update.

Summary of changes:
 .../dev/table/connectors/formats/avro-confluent.md |   2 +-
 .../flink-confluent-schema-registry/pom.xml|   1 -
 .../flink-avro-confluent-registry/pom.xml  |  75 +--
 .../src/main/resources/META-INF/NOTICE |  14 -
 .../pom.xml|  51 +-
 .../src/main/resources/META-INF/NOTICE |  25 +
 .../licenses/LICENSE.jakarta-annotation-api| 637 +
 .../META-INF/licenses/LICENSE.jakarta-inject   | 637 +
 .../META-INF/licenses/LICENSE.jakarta-ws-rs-api| 637 +
 .../licenses/LICENSE.osgi-resource-locator | 637 +
 flink-formats/pom.xml  |   1 +
 11 files changed, 2631 insertions(+), 86 deletions(-)
 delete mode 100644 
flink-formats/flink-avro-confluent-registry/src/main/resources/META-INF/NOTICE
 copy flink-formats/{flink-sql-avro => 
flink-sql-avro-confluent-registry}/pom.xml (51%)
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/NOTICE
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.jakarta-annotation-api
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.jakarta-inject
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.jakarta-ws-rs-api
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.osgi-resource-locator



[flink] branch master updated (4370139 -> 15fa170)

2020-11-06 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 4370139  [FLINK-19699][e2e] Collect dmesg output after tests
 add 15fa170  [FLINK-18546] Upgrade to Kafka Schema Registry Client 5.5.2

No new revisions were added by this update.

Summary of changes:
 .../dev/table/connectors/formats/avro-confluent.md |   2 +-
 .../flink-confluent-schema-registry/pom.xml|   1 -
 .../flink-avro-confluent-registry/pom.xml  |  75 +--
 .../src/main/resources/META-INF/NOTICE |  14 -
 .../pom.xml|  51 +-
 .../src/main/resources/META-INF/NOTICE |  25 +
 .../licenses/LICENSE.jakarta-annotation-api| 637 +
 .../META-INF/licenses/LICENSE.jakarta-inject   | 637 +
 .../META-INF/licenses/LICENSE.jakarta-ws-rs-api| 637 +
 .../licenses/LICENSE.osgi-resource-locator | 637 +
 flink-formats/pom.xml  |   1 +
 11 files changed, 2631 insertions(+), 86 deletions(-)
 delete mode 100644 
flink-formats/flink-avro-confluent-registry/src/main/resources/META-INF/NOTICE
 copy flink-formats/{flink-sql-avro => 
flink-sql-avro-confluent-registry}/pom.xml (51%)
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/NOTICE
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.jakarta-annotation-api
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.jakarta-inject
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.jakarta-ws-rs-api
 create mode 100644 
flink-formats/flink-sql-avro-confluent-registry/src/main/resources/META-INF/licenses/LICENSE.osgi-resource-locator



[flink] branch master updated (a2267fa -> a6c064e)

2020-11-06 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from a2267fa  [FLINK-19533][checkpoint] Add InflightDataRescalingDescriptor 
during rescaling.
 add a6c064e  [FLINK-19837][DataStream] Don't emit intermediate watermarks 
from watermark operators in BATCH execution mode

No new revisions were added by this update.

Summary of changes:
 .../flink/streaming/api/datastream/DataStream.java | 18 ++---
 .../streaming/api/graph/StreamGraphGenerator.java  |  3 +
 .../streaming/api/operators/SourceOperator.java|  2 +-
 .../api/operators/SourceOperatorFactory.java   |  2 +-
 ... => TimestampsAndWatermarksTransformation.java} | 71 ++--
 .../operators/TimestampsAndWatermarksOperator.java | 20 --
 .../SourceTransformationTranslator.java|  4 +-
 ...tampsAndWatermarksTransformationTranslator.java | 77 ++
 .../TimestampsAndWatermarksOperatorTest.java   | 65 --
 9 files changed, 205 insertions(+), 57 deletions(-)
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/{SourceTransformation.java
 => TimestampsAndWatermarksTransformation.java} (56%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/TimestampsAndWatermarksTransformationTranslator.java



[flink] branch master updated (a2267fa -> a6c064e)

2020-11-06 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from a2267fa  [FLINK-19533][checkpoint] Add InflightDataRescalingDescriptor 
during rescaling.
 add a6c064e  [FLINK-19837][DataStream] Don't emit intermediate watermarks 
from watermark operators in BATCH execution mode

No new revisions were added by this update.

Summary of changes:
 .../flink/streaming/api/datastream/DataStream.java | 18 ++---
 .../streaming/api/graph/StreamGraphGenerator.java  |  3 +
 .../streaming/api/operators/SourceOperator.java|  2 +-
 .../api/operators/SourceOperatorFactory.java   |  2 +-
 ... => TimestampsAndWatermarksTransformation.java} | 71 ++--
 .../operators/TimestampsAndWatermarksOperator.java | 20 --
 .../SourceTransformationTranslator.java|  4 +-
 ...tampsAndWatermarksTransformationTranslator.java | 77 ++
 .../TimestampsAndWatermarksOperatorTest.java   | 65 --
 9 files changed, 205 insertions(+), 57 deletions(-)
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/{SourceTransformation.java
 => TimestampsAndWatermarksTransformation.java} (56%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/TimestampsAndWatermarksTransformationTranslator.java



[flink] branch master updated (a2267fa -> a6c064e)

2020-11-06 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from a2267fa  [FLINK-19533][checkpoint] Add InflightDataRescalingDescriptor 
during rescaling.
 add a6c064e  [FLINK-19837][DataStream] Don't emit intermediate watermarks 
from watermark operators in BATCH execution mode

No new revisions were added by this update.

Summary of changes:
 .../flink/streaming/api/datastream/DataStream.java | 18 ++---
 .../streaming/api/graph/StreamGraphGenerator.java  |  3 +
 .../streaming/api/operators/SourceOperator.java|  2 +-
 .../api/operators/SourceOperatorFactory.java   |  2 +-
 ... => TimestampsAndWatermarksTransformation.java} | 71 ++--
 .../operators/TimestampsAndWatermarksOperator.java | 20 --
 .../SourceTransformationTranslator.java|  4 +-
 ...tampsAndWatermarksTransformationTranslator.java | 77 ++
 .../TimestampsAndWatermarksOperatorTest.java   | 65 --
 9 files changed, 205 insertions(+), 57 deletions(-)
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/{SourceTransformation.java
 => TimestampsAndWatermarksTransformation.java} (56%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/TimestampsAndWatermarksTransformationTranslator.java



[flink] branch master updated (a2267fa -> a6c064e)

2020-11-06 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from a2267fa  [FLINK-19533][checkpoint] Add InflightDataRescalingDescriptor 
during rescaling.
 add a6c064e  [FLINK-19837][DataStream] Don't emit intermediate watermarks 
from watermark operators in BATCH execution mode

No new revisions were added by this update.

Summary of changes:
 .../flink/streaming/api/datastream/DataStream.java | 18 ++---
 .../streaming/api/graph/StreamGraphGenerator.java  |  3 +
 .../streaming/api/operators/SourceOperator.java|  2 +-
 .../api/operators/SourceOperatorFactory.java   |  2 +-
 ... => TimestampsAndWatermarksTransformation.java} | 71 ++--
 .../operators/TimestampsAndWatermarksOperator.java | 20 --
 .../SourceTransformationTranslator.java|  4 +-
 ...tampsAndWatermarksTransformationTranslator.java | 77 ++
 .../TimestampsAndWatermarksOperatorTest.java   | 65 --
 9 files changed, 205 insertions(+), 57 deletions(-)
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/{SourceTransformation.java
 => TimestampsAndWatermarksTransformation.java} (56%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/TimestampsAndWatermarksTransformationTranslator.java



[flink] branch master updated (a2267fa -> a6c064e)

2020-11-06 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from a2267fa  [FLINK-19533][checkpoint] Add InflightDataRescalingDescriptor 
during rescaling.
 add a6c064e  [FLINK-19837][DataStream] Don't emit intermediate watermarks 
from watermark operators in BATCH execution mode

No new revisions were added by this update.

Summary of changes:
 .../flink/streaming/api/datastream/DataStream.java | 18 ++---
 .../streaming/api/graph/StreamGraphGenerator.java  |  3 +
 .../streaming/api/operators/SourceOperator.java|  2 +-
 .../api/operators/SourceOperatorFactory.java   |  2 +-
 ... => TimestampsAndWatermarksTransformation.java} | 71 ++--
 .../operators/TimestampsAndWatermarksOperator.java | 20 --
 .../SourceTransformationTranslator.java|  4 +-
 ...tampsAndWatermarksTransformationTranslator.java | 77 ++
 .../TimestampsAndWatermarksOperatorTest.java   | 65 --
 9 files changed, 205 insertions(+), 57 deletions(-)
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/{SourceTransformation.java
 => TimestampsAndWatermarksTransformation.java} (56%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/TimestampsAndWatermarksTransformationTranslator.java



[flink] branch master updated (36c3326 -> 7f5dd53)

2020-11-04 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 36c3326  [FLINK-19867][table-common] Validation fails for UDF that 
accepts var-args
 add 7f5dd53  [FLINK-19952][core] Replace deprecated ConfigOption builder 
in SecurityOptions.

No new revisions were added by this update.

Summary of changes:
 .../flink/configuration/SecurityOptions.java   | 76 --
 1 file changed, 56 insertions(+), 20 deletions(-)



[flink] branch master updated (36c3326 -> 7f5dd53)

2020-11-04 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 36c3326  [FLINK-19867][table-common] Validation fails for UDF that 
accepts var-args
 add 7f5dd53  [FLINK-19952][core] Replace deprecated ConfigOption builder 
in SecurityOptions.

No new revisions were added by this update.

Summary of changes:
 .../flink/configuration/SecurityOptions.java   | 76 --
 1 file changed, 56 insertions(+), 20 deletions(-)



[flink] branch master updated (36c3326 -> 7f5dd53)

2020-11-04 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 36c3326  [FLINK-19867][table-common] Validation fails for UDF that 
accepts var-args
 add 7f5dd53  [FLINK-19952][core] Replace deprecated ConfigOption builder 
in SecurityOptions.

No new revisions were added by this update.

Summary of changes:
 .../flink/configuration/SecurityOptions.java   | 76 --
 1 file changed, 56 insertions(+), 20 deletions(-)



[flink] branch master updated (36c3326 -> 7f5dd53)

2020-11-04 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 36c3326  [FLINK-19867][table-common] Validation fails for UDF that 
accepts var-args
 add 7f5dd53  [FLINK-19952][core] Replace deprecated ConfigOption builder 
in SecurityOptions.

No new revisions were added by this update.

Summary of changes:
 .../flink/configuration/SecurityOptions.java   | 76 --
 1 file changed, 56 insertions(+), 20 deletions(-)



[flink] branch master updated (36c3326 -> 7f5dd53)

2020-11-04 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 36c3326  [FLINK-19867][table-common] Validation fails for UDF that 
accepts var-args
 add 7f5dd53  [FLINK-19952][core] Replace deprecated ConfigOption builder 
in SecurityOptions.

No new revisions were added by this update.

Summary of changes:
 .../flink/configuration/SecurityOptions.java   | 76 --
 1 file changed, 56 insertions(+), 20 deletions(-)



[flink] branch master updated (477d37d -> 5b4a370)

2020-11-03 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 477d37d  [FLINK-19901][metrics] Fix caching offset for variables
 add 5468fab  [hotfix] Add Operator suffix to StreamGroupedReduce
 add 5b4a370  [FLINK-19931] Do not emit intermediate results for reduce 
operation BATCH execution mode

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/datastream/KeyedStream.java  |  20 ++--
 .../streaming/api/graph/StreamGraphGenerator.java  |   3 +
 ...Reduce.java => BatchGroupedReduceOperator.java} |  44 ++---
 ...educe.java => StreamGroupedReduceOperator.java} |   7 +-
 .../api/transformations/ReduceTransformation.java  | 101 +
 ... AbstractOneInputTransformationTranslator.java} |  81 +++--
 .../OneInputTransformationTranslator.java  |  76 
 .../ReduceTransformationTranslator.java|  82 +
 .../streaming/api/AggregationFunctionTest.java |  30 +++---
 ...t.java => StreamGroupedReduceOperatorTest.java} |  10 +-
 .../flink/streaming/api/scala/KeyedStream.scala|   7 +-
 .../datastream/DataStreamBatchExecutionITCase.java |  43 +
 .../UdfStreamOperatorCheckpointingITCase.java  |   4 +-
 13 files changed, 354 insertions(+), 154 deletions(-)
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduce.java
 => BatchGroupedReduceOperator.java} (61%)
 rename 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduce.java
 => StreamGroupedReduceOperator.java} (89%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/ReduceTransformation.java
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/{OneInputTransformationTranslator.java
 => AbstractOneInputTransformationTranslator.java} (51%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/ReduceTransformationTranslator.java
 rename 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduceTest.java
 => StreamGroupedReduceOperatorTest.java} (93%)



[flink] branch master updated (477d37d -> 5b4a370)

2020-11-03 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 477d37d  [FLINK-19901][metrics] Fix caching offset for variables
 add 5468fab  [hotfix] Add Operator suffix to StreamGroupedReduce
 add 5b4a370  [FLINK-19931] Do not emit intermediate results for reduce 
operation BATCH execution mode

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/datastream/KeyedStream.java  |  20 ++--
 .../streaming/api/graph/StreamGraphGenerator.java  |   3 +
 ...Reduce.java => BatchGroupedReduceOperator.java} |  44 ++---
 ...educe.java => StreamGroupedReduceOperator.java} |   7 +-
 .../api/transformations/ReduceTransformation.java  | 101 +
 ... AbstractOneInputTransformationTranslator.java} |  81 +++--
 .../OneInputTransformationTranslator.java  |  76 
 .../ReduceTransformationTranslator.java|  82 +
 .../streaming/api/AggregationFunctionTest.java |  30 +++---
 ...t.java => StreamGroupedReduceOperatorTest.java} |  10 +-
 .../flink/streaming/api/scala/KeyedStream.scala|   7 +-
 .../datastream/DataStreamBatchExecutionITCase.java |  43 +
 .../UdfStreamOperatorCheckpointingITCase.java  |   4 +-
 13 files changed, 354 insertions(+), 154 deletions(-)
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduce.java
 => BatchGroupedReduceOperator.java} (61%)
 rename 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduce.java
 => StreamGroupedReduceOperator.java} (89%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/ReduceTransformation.java
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/{OneInputTransformationTranslator.java
 => AbstractOneInputTransformationTranslator.java} (51%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/ReduceTransformationTranslator.java
 rename 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduceTest.java
 => StreamGroupedReduceOperatorTest.java} (93%)



[flink] branch master updated (477d37d -> 5b4a370)

2020-11-03 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 477d37d  [FLINK-19901][metrics] Fix caching offset for variables
 add 5468fab  [hotfix] Add Operator suffix to StreamGroupedReduce
 add 5b4a370  [FLINK-19931] Do not emit intermediate results for reduce 
operation BATCH execution mode

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/datastream/KeyedStream.java  |  20 ++--
 .../streaming/api/graph/StreamGraphGenerator.java  |   3 +
 ...Reduce.java => BatchGroupedReduceOperator.java} |  44 ++---
 ...educe.java => StreamGroupedReduceOperator.java} |   7 +-
 .../api/transformations/ReduceTransformation.java  | 101 +
 ... AbstractOneInputTransformationTranslator.java} |  81 +++--
 .../OneInputTransformationTranslator.java  |  76 
 .../ReduceTransformationTranslator.java|  82 +
 .../streaming/api/AggregationFunctionTest.java |  30 +++---
 ...t.java => StreamGroupedReduceOperatorTest.java} |  10 +-
 .../flink/streaming/api/scala/KeyedStream.scala|   7 +-
 .../datastream/DataStreamBatchExecutionITCase.java |  43 +
 .../UdfStreamOperatorCheckpointingITCase.java  |   4 +-
 13 files changed, 354 insertions(+), 154 deletions(-)
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduce.java
 => BatchGroupedReduceOperator.java} (61%)
 rename 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduce.java
 => StreamGroupedReduceOperator.java} (89%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/ReduceTransformation.java
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/{OneInputTransformationTranslator.java
 => AbstractOneInputTransformationTranslator.java} (51%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/ReduceTransformationTranslator.java
 rename 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduceTest.java
 => StreamGroupedReduceOperatorTest.java} (93%)



[flink] branch master updated (477d37d -> 5b4a370)

2020-11-03 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 477d37d  [FLINK-19901][metrics] Fix caching offset for variables
 add 5468fab  [hotfix] Add Operator suffix to StreamGroupedReduce
 add 5b4a370  [FLINK-19931] Do not emit intermediate results for reduce 
operation BATCH execution mode

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/datastream/KeyedStream.java  |  20 ++--
 .../streaming/api/graph/StreamGraphGenerator.java  |   3 +
 ...Reduce.java => BatchGroupedReduceOperator.java} |  44 ++---
 ...educe.java => StreamGroupedReduceOperator.java} |   7 +-
 .../api/transformations/ReduceTransformation.java  | 101 +
 ... AbstractOneInputTransformationTranslator.java} |  81 +++--
 .../OneInputTransformationTranslator.java  |  76 
 .../ReduceTransformationTranslator.java|  82 +
 .../streaming/api/AggregationFunctionTest.java |  30 +++---
 ...t.java => StreamGroupedReduceOperatorTest.java} |  10 +-
 .../flink/streaming/api/scala/KeyedStream.scala|   7 +-
 .../datastream/DataStreamBatchExecutionITCase.java |  43 +
 .../UdfStreamOperatorCheckpointingITCase.java  |   4 +-
 13 files changed, 354 insertions(+), 154 deletions(-)
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduce.java
 => BatchGroupedReduceOperator.java} (61%)
 rename 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduce.java
 => StreamGroupedReduceOperator.java} (89%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/ReduceTransformation.java
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/{OneInputTransformationTranslator.java
 => AbstractOneInputTransformationTranslator.java} (51%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/ReduceTransformationTranslator.java
 rename 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduceTest.java
 => StreamGroupedReduceOperatorTest.java} (93%)



[flink] branch master updated (477d37d -> 5b4a370)

2020-11-03 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 477d37d  [FLINK-19901][metrics] Fix caching offset for variables
 add 5468fab  [hotfix] Add Operator suffix to StreamGroupedReduce
 add 5b4a370  [FLINK-19931] Do not emit intermediate results for reduce 
operation BATCH execution mode

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/datastream/KeyedStream.java  |  20 ++--
 .../streaming/api/graph/StreamGraphGenerator.java  |   3 +
 ...Reduce.java => BatchGroupedReduceOperator.java} |  44 ++---
 ...educe.java => StreamGroupedReduceOperator.java} |   7 +-
 .../api/transformations/ReduceTransformation.java  | 101 +
 ... AbstractOneInputTransformationTranslator.java} |  81 +++--
 .../OneInputTransformationTranslator.java  |  76 
 .../ReduceTransformationTranslator.java|  82 +
 .../streaming/api/AggregationFunctionTest.java |  30 +++---
 ...t.java => StreamGroupedReduceOperatorTest.java} |  10 +-
 .../flink/streaming/api/scala/KeyedStream.scala|   7 +-
 .../datastream/DataStreamBatchExecutionITCase.java |  43 +
 .../UdfStreamOperatorCheckpointingITCase.java  |   4 +-
 13 files changed, 354 insertions(+), 154 deletions(-)
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduce.java
 => BatchGroupedReduceOperator.java} (61%)
 rename 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduce.java
 => StreamGroupedReduceOperator.java} (89%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/ReduceTransformation.java
 copy 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/{OneInputTransformationTranslator.java
 => AbstractOneInputTransformationTranslator.java} (51%)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/ReduceTransformationTranslator.java
 rename 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/{StreamGroupedReduceTest.java
 => StreamGroupedReduceOperatorTest.java} (93%)



[flink] branch master updated (bfc8a9e -> f84acf6)

2020-11-02 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from bfc8a9e  [hotfix][table-runtime] Temporarily disable unstable tests in 
TemporalJoinITCase
 add f84acf6  [FLINK-19915][cep] fix comments bug of cep test.

No new revisions were added by this update.

Summary of changes:
 .../test/java/org/apache/flink/cep/nfa/compiler/NFACompilerTest.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)



[flink] branch master updated (bfc8a9e -> f84acf6)

2020-11-02 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from bfc8a9e  [hotfix][table-runtime] Temporarily disable unstable tests in 
TemporalJoinITCase
 add f84acf6  [FLINK-19915][cep] fix comments bug of cep test.

No new revisions were added by this update.

Summary of changes:
 .../test/java/org/apache/flink/cep/nfa/compiler/NFACompilerTest.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)



[flink] branch master updated (bfc8a9e -> f84acf6)

2020-11-02 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from bfc8a9e  [hotfix][table-runtime] Temporarily disable unstable tests in 
TemporalJoinITCase
 add f84acf6  [FLINK-19915][cep] fix comments bug of cep test.

No new revisions were added by this update.

Summary of changes:
 .../test/java/org/apache/flink/cep/nfa/compiler/NFACompilerTest.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)



[flink] branch master updated (bfc8a9e -> f84acf6)

2020-11-02 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from bfc8a9e  [hotfix][table-runtime] Temporarily disable unstable tests in 
TemporalJoinITCase
 add f84acf6  [FLINK-19915][cep] fix comments bug of cep test.

No new revisions were added by this update.

Summary of changes:
 .../test/java/org/apache/flink/cep/nfa/compiler/NFACompilerTest.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)



[flink] branch master updated (6b5d4c0 -> 46b0f8b)

2020-11-02 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 6b5d4c0  [FLINK-19855][network] Specify channel AND gate in 
resume/block consumption()
 add 46b0f8b  [FLINK-19900] Remove surefire log4j configuration from pom.xml

No new revisions were added by this update.

Summary of changes:
 flink-connectors/flink-connector-kafka/pom.xml | 2 +-
 pom.xml| 2 --
 2 files changed, 1 insertion(+), 3 deletions(-)



[flink] branch master updated (6b5d4c0 -> 46b0f8b)

2020-11-02 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 6b5d4c0  [FLINK-19855][network] Specify channel AND gate in 
resume/block consumption()
 add 46b0f8b  [FLINK-19900] Remove surefire log4j configuration from pom.xml

No new revisions were added by this update.

Summary of changes:
 flink-connectors/flink-connector-kafka/pom.xml | 2 +-
 pom.xml| 2 --
 2 files changed, 1 insertion(+), 3 deletions(-)



[flink] branch master updated (ddc163a -> da67eeb)

2020-10-30 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from ddc163a  [FLINK-19762][WebUI] Improve content selection when 
double-clicking IDs
 add da67eeb  [FLINK-18363] Add user classloader to context in 
DeSerializationSchema

No new revisions were added by this update.

Summary of changes:
 .../connectors/gcp/pubsub/PubSubSink.java  |   6 +-
 .../connectors/gcp/pubsub/PubSubSource.java|   6 +-
 .../connectors/kafka/FlinkKafkaConsumerBase.java   |   8 +-
 .../connectors/kafka/FlinkKafkaProducer.java   |   6 +-
 .../connectors/kafka/FlinkKafkaProducerBase.java   |   6 +-
 .../connectors/kinesis/FlinkKinesisProducer.java   |   6 +-
 .../kinesis/internals/KinesisDataFetcher.java  |  26 +++--
 .../streaming/connectors/rabbitmq/RMQSink.java |   6 +-
 .../streaming/connectors/rabbitmq/RMQSource.java   |   6 +-
 .../streaming/connectors/rabbitmq/RMQSinkTest.java |   5 +
 .../serialization/DeserializationSchema.java   |  10 ++
 ...untimeContextInitializationContextAdapters.java | 127 +
 .../common/serialization/SerializationSchema.java  |  10 ++
 .../java/connectors/SocketSourceFunction.java  |   5 +-
 14 files changed, 216 insertions(+), 17 deletions(-)
 create mode 100644 
flink-core/src/main/java/org/apache/flink/api/common/serialization/RuntimeContextInitializationContextAdapters.java



[flink] branch master updated (ddc163a -> da67eeb)

2020-10-30 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from ddc163a  [FLINK-19762][WebUI] Improve content selection when 
double-clicking IDs
 add da67eeb  [FLINK-18363] Add user classloader to context in 
DeSerializationSchema

No new revisions were added by this update.

Summary of changes:
 .../connectors/gcp/pubsub/PubSubSink.java  |   6 +-
 .../connectors/gcp/pubsub/PubSubSource.java|   6 +-
 .../connectors/kafka/FlinkKafkaConsumerBase.java   |   8 +-
 .../connectors/kafka/FlinkKafkaProducer.java   |   6 +-
 .../connectors/kafka/FlinkKafkaProducerBase.java   |   6 +-
 .../connectors/kinesis/FlinkKinesisProducer.java   |   6 +-
 .../kinesis/internals/KinesisDataFetcher.java  |  26 +++--
 .../streaming/connectors/rabbitmq/RMQSink.java |   6 +-
 .../streaming/connectors/rabbitmq/RMQSource.java   |   6 +-
 .../streaming/connectors/rabbitmq/RMQSinkTest.java |   5 +
 .../serialization/DeserializationSchema.java   |  10 ++
 ...untimeContextInitializationContextAdapters.java | 127 +
 .../common/serialization/SerializationSchema.java  |  10 ++
 .../java/connectors/SocketSourceFunction.java  |   5 +-
 14 files changed, 216 insertions(+), 17 deletions(-)
 create mode 100644 
flink-core/src/main/java/org/apache/flink/api/common/serialization/RuntimeContextInitializationContextAdapters.java



[flink] branch master updated (fada6fb -> 146269d)

2020-10-29 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from fada6fb  [FLINK-18820] Emit MAX_WATERMARK at the end in SourceOperator
 add f570d04  [FLINK-19779][avro] Remove the "record_" field name prefix 
for Avro format deserialization
 add 146269d  [FLINK-19786][avro] Fix the nullability and precision for 
Avro format deserialization

No new revisions were added by this update.

Summary of changes:
 .../connector/hbase/util/HBaseTableSchema.java |  31 ++-
 .../flink/connectors/hive/HiveTableSinkITCase.java |   2 +-
 .../RegistryAvroRowDataSeDeSchemaTest.java |   5 +-
 .../avro/typeutils/AvroSchemaConverter.java| 106 +---
 .../avro/AvroRowDataDeSerializationSchemaTest.java |  10 +-
 .../avro/typeutils/AvroSchemaConverterTest.java| 287 +++--
 flink-python/pyflink/table/table_environment.py|   2 +-
 .../pyflink/table/tests/test_pandas_conversion.py  |   2 +-
 .../pyflink/table/tests/test_table_schema.py   |   3 +-
 .../client/gateway/utils/SimpleCatalogFactory.java |   4 +-
 .../org/apache/flink/table/api/TableSchema.java|   9 +-
 .../table/factories/FileSystemFormatFactory.java   |   2 +
 .../apache/flink/table/sources/TableSource.java|   2 +-
 .../apache/flink/table/types/logical/RowType.java  |   6 +-
 .../apache/flink/table/api/TableSchemaTest.java|  46 +++-
 .../plan/nodes/dataset/BatchTableSourceScan.scala  |   3 +-
 .../nodes/datastream/StreamTableSourceScan.scala   |   3 +-
 .../table/filesystem/FileSystemTableSource.java|   3 +-
 18 files changed, 442 insertions(+), 84 deletions(-)



[flink] branch master updated (fada6fb -> 146269d)

2020-10-29 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from fada6fb  [FLINK-18820] Emit MAX_WATERMARK at the end in SourceOperator
 add f570d04  [FLINK-19779][avro] Remove the "record_" field name prefix 
for Avro format deserialization
 add 146269d  [FLINK-19786][avro] Fix the nullability and precision for 
Avro format deserialization

No new revisions were added by this update.

Summary of changes:
 .../connector/hbase/util/HBaseTableSchema.java |  31 ++-
 .../flink/connectors/hive/HiveTableSinkITCase.java |   2 +-
 .../RegistryAvroRowDataSeDeSchemaTest.java |   5 +-
 .../avro/typeutils/AvroSchemaConverter.java| 106 +---
 .../avro/AvroRowDataDeSerializationSchemaTest.java |  10 +-
 .../avro/typeutils/AvroSchemaConverterTest.java| 287 +++--
 flink-python/pyflink/table/table_environment.py|   2 +-
 .../pyflink/table/tests/test_pandas_conversion.py  |   2 +-
 .../pyflink/table/tests/test_table_schema.py   |   3 +-
 .../client/gateway/utils/SimpleCatalogFactory.java |   4 +-
 .../org/apache/flink/table/api/TableSchema.java|   9 +-
 .../table/factories/FileSystemFormatFactory.java   |   2 +
 .../apache/flink/table/sources/TableSource.java|   2 +-
 .../apache/flink/table/types/logical/RowType.java  |   6 +-
 .../apache/flink/table/api/TableSchemaTest.java|  46 +++-
 .../plan/nodes/dataset/BatchTableSourceScan.scala  |   3 +-
 .../nodes/datastream/StreamTableSourceScan.scala   |   3 +-
 .../table/filesystem/FileSystemTableSource.java|   3 +-
 18 files changed, 442 insertions(+), 84 deletions(-)



[flink] branch master updated (fada6fb -> 146269d)

2020-10-29 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from fada6fb  [FLINK-18820] Emit MAX_WATERMARK at the end in SourceOperator
 add f570d04  [FLINK-19779][avro] Remove the "record_" field name prefix 
for Avro format deserialization
 add 146269d  [FLINK-19786][avro] Fix the nullability and precision for 
Avro format deserialization

No new revisions were added by this update.

Summary of changes:
 .../connector/hbase/util/HBaseTableSchema.java |  31 ++-
 .../flink/connectors/hive/HiveTableSinkITCase.java |   2 +-
 .../RegistryAvroRowDataSeDeSchemaTest.java |   5 +-
 .../avro/typeutils/AvroSchemaConverter.java| 106 +---
 .../avro/AvroRowDataDeSerializationSchemaTest.java |  10 +-
 .../avro/typeutils/AvroSchemaConverterTest.java| 287 +++--
 flink-python/pyflink/table/table_environment.py|   2 +-
 .../pyflink/table/tests/test_pandas_conversion.py  |   2 +-
 .../pyflink/table/tests/test_table_schema.py   |   3 +-
 .../client/gateway/utils/SimpleCatalogFactory.java |   4 +-
 .../org/apache/flink/table/api/TableSchema.java|   9 +-
 .../table/factories/FileSystemFormatFactory.java   |   2 +
 .../apache/flink/table/sources/TableSource.java|   2 +-
 .../apache/flink/table/types/logical/RowType.java  |   6 +-
 .../apache/flink/table/api/TableSchemaTest.java|  46 +++-
 .../plan/nodes/dataset/BatchTableSourceScan.scala  |   3 +-
 .../nodes/datastream/StreamTableSourceScan.scala   |   3 +-
 .../table/filesystem/FileSystemTableSource.java|   3 +-
 18 files changed, 442 insertions(+), 84 deletions(-)



[flink] branch master updated (1e42bb4 -> fada6fb)

2020-10-29 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 1e42bb4  [FLINK-19712][Coordination] Do not restart CREATED executions 
in RestartPipelinedRegionFailoverStrategy
 add fada6fb  [FLINK-18820] Emit MAX_WATERMARK at the end in SourceOperator

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/SourceOperator.java| 13 -
 .../runtime/tasks/SourceOperatorStreamTask.java|  8 ++-
 .../api/operators/SourceOperatorTest.java  | 20 +++-
 .../source/SourceOperatorEventTimeTest.java| 12 +++--
 .../tasks/SourceOperatorStreamTaskTest.java| 57 --
 5 files changed, 87 insertions(+), 23 deletions(-)



[flink] branch master updated (1e42bb4 -> fada6fb)

2020-10-29 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 1e42bb4  [FLINK-19712][Coordination] Do not restart CREATED executions 
in RestartPipelinedRegionFailoverStrategy
 add fada6fb  [FLINK-18820] Emit MAX_WATERMARK at the end in SourceOperator

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/SourceOperator.java| 13 -
 .../runtime/tasks/SourceOperatorStreamTask.java|  8 ++-
 .../api/operators/SourceOperatorTest.java  | 20 +++-
 .../source/SourceOperatorEventTimeTest.java| 12 +++--
 .../tasks/SourceOperatorStreamTaskTest.java| 57 --
 5 files changed, 87 insertions(+), 23 deletions(-)



[flink] branch master updated (1e42bb4 -> fada6fb)

2020-10-29 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 1e42bb4  [FLINK-19712][Coordination] Do not restart CREATED executions 
in RestartPipelinedRegionFailoverStrategy
 add fada6fb  [FLINK-18820] Emit MAX_WATERMARK at the end in SourceOperator

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/SourceOperator.java| 13 -
 .../runtime/tasks/SourceOperatorStreamTask.java|  8 ++-
 .../api/operators/SourceOperatorTest.java  | 20 +++-
 .../source/SourceOperatorEventTimeTest.java| 12 +++--
 .../tasks/SourceOperatorStreamTaskTest.java| 57 --
 5 files changed, 87 insertions(+), 23 deletions(-)



[flink] branch master updated (1e42bb4 -> fada6fb)

2020-10-29 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 1e42bb4  [FLINK-19712][Coordination] Do not restart CREATED executions 
in RestartPipelinedRegionFailoverStrategy
 add fada6fb  [FLINK-18820] Emit MAX_WATERMARK at the end in SourceOperator

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/SourceOperator.java| 13 -
 .../runtime/tasks/SourceOperatorStreamTask.java|  8 ++-
 .../api/operators/SourceOperatorTest.java  | 20 +++-
 .../source/SourceOperatorEventTimeTest.java| 12 +++--
 .../tasks/SourceOperatorStreamTaskTest.java| 57 --
 5 files changed, 87 insertions(+), 23 deletions(-)



[flink] branch master updated (1e42bb4 -> fada6fb)

2020-10-29 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 1e42bb4  [FLINK-19712][Coordination] Do not restart CREATED executions 
in RestartPipelinedRegionFailoverStrategy
 add fada6fb  [FLINK-18820] Emit MAX_WATERMARK at the end in SourceOperator

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/SourceOperator.java| 13 -
 .../runtime/tasks/SourceOperatorStreamTask.java|  8 ++-
 .../api/operators/SourceOperatorTest.java  | 20 +++-
 .../source/SourceOperatorEventTimeTest.java| 12 +++--
 .../tasks/SourceOperatorStreamTaskTest.java| 57 --
 5 files changed, 87 insertions(+), 23 deletions(-)



[flink] branch master updated (2242358 -> e178e4f)

2020-10-27 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 2242358  [minor] Fix warnings in KeyedStream
 add e178e4f  [hotfix] Log interrupted exception on debug when closing 
threads in ExternalSorter

No new revisions were added by this update.

Summary of changes:
 .../runtime/operators/sort/ExternalSorter.java | 32 --
 1 file changed, 17 insertions(+), 15 deletions(-)



[flink] branch master updated (2242358 -> e178e4f)

2020-10-27 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 2242358  [minor] Fix warnings in KeyedStream
 add e178e4f  [hotfix] Log interrupted exception on debug when closing 
threads in ExternalSorter

No new revisions were added by this update.

Summary of changes:
 .../runtime/operators/sort/ExternalSorter.java | 32 --
 1 file changed, 17 insertions(+), 15 deletions(-)



[flink] branch master updated (a889332 -> 2ff3b77)

2020-10-16 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from a889332  [FLINK-19324][yarn] Match requested and allocated containers 
with priority rather than resources.
 add daa54cb  [hotfix] Fixes in CEP operator
 add 2ff3b77  [FLINK-19640] Enable sorting inputs for batch

No new revisions were added by this update.

Summary of changes:
 .../flink/configuration/ExecutionOptions.java  |  22 +
 .../org/apache/flink/cep/operator/CepOperator.java |  19 +-
 .../environment/StreamExecutionEnvironment.java|   8 +-
 .../streaming/api/graph/StreamGraphGenerator.java  |  54 ++-
 .../flink/streaming/api/graph/StreamNode.java  |   3 +-
 .../api/graph/TransformationTranslator.java|   6 +
 .../api/operators/InternalTimeServiceManager.java  |   4 +-
 .../runtime/translators/BatchExecutionUtils.java   |  98 
 .../MultiInputTransformationTranslator.java|   7 +-
 .../OneInputTransformationTranslator.java  |   8 +-
 .../TwoInputTransformationTranslator.java  |   8 +-
 .../StreamGraphGeneratorBatchExecutionTest.java| 497 +
 12 files changed, 709 insertions(+), 25 deletions(-)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/BatchExecutionUtils.java
 create mode 100644 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorBatchExecutionTest.java



[flink] branch master updated (a889332 -> 2ff3b77)

2020-10-16 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from a889332  [FLINK-19324][yarn] Match requested and allocated containers 
with priority rather than resources.
 add daa54cb  [hotfix] Fixes in CEP operator
 add 2ff3b77  [FLINK-19640] Enable sorting inputs for batch

No new revisions were added by this update.

Summary of changes:
 .../flink/configuration/ExecutionOptions.java  |  22 +
 .../org/apache/flink/cep/operator/CepOperator.java |  19 +-
 .../environment/StreamExecutionEnvironment.java|   8 +-
 .../streaming/api/graph/StreamGraphGenerator.java  |  54 ++-
 .../flink/streaming/api/graph/StreamNode.java  |   3 +-
 .../api/graph/TransformationTranslator.java|   6 +
 .../api/operators/InternalTimeServiceManager.java  |   4 +-
 .../runtime/translators/BatchExecutionUtils.java   |  98 
 .../MultiInputTransformationTranslator.java|   7 +-
 .../OneInputTransformationTranslator.java  |   8 +-
 .../TwoInputTransformationTranslator.java  |   8 +-
 .../StreamGraphGeneratorBatchExecutionTest.java| 497 +
 12 files changed, 709 insertions(+), 25 deletions(-)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/translators/BatchExecutionUtils.java
 create mode 100644 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorBatchExecutionTest.java



[flink] branch master updated (318c7b6 -> 1dbdd64)

2020-10-14 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 318c7b6  [FLINK-17073][checkpointing][refactor] Remove callback from 
CheckpointsCleaner
 add ab20285  [hotfix] Drop unnecessary PowerMockRunner from RMQSourceTest
 add 1dbdd64  [FLINK-17502] [flink-connector-rabbitmq] RMQSource custom 
deserialization schema

No new revisions were added by this update.

Summary of changes:
 .../rabbitmq/RMQDeserializationSchema.java | 100 
 .../rabbitmq/RMQDeserializationSchemaWrapper.java  |  64 
 .../streaming/connectors/rabbitmq/RMQSource.java   | 172 +++--
 .../connectors/rabbitmq/RMQSourceTest.java | 102 ++--
 4 files changed, 375 insertions(+), 63 deletions(-)
 create mode 100644 
flink-connectors/flink-connector-rabbitmq/src/main/java/org/apache/flink/streaming/connectors/rabbitmq/RMQDeserializationSchema.java
 create mode 100644 
flink-connectors/flink-connector-rabbitmq/src/main/java/org/apache/flink/streaming/connectors/rabbitmq/RMQDeserializationSchemaWrapper.java



[flink] branch master updated: [FLINK-19475] Implement a time service for the batch execution mode

2020-10-14 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new eda2111  [FLINK-19475] Implement a time service for the batch 
execution mode
eda2111 is described below

commit eda211154f6735eca3e0c9c70ae617c388ef057c
Author: Dawid Wysakowicz 
AuthorDate: Wed Sep 23 14:52:22 2020 +0200

[FLINK-19475] Implement a time service for the batch execution mode

I introduce a BatchExecutionInternalTimeServiceManager and
BatchExecutionInternalTimeService which can be used in the batch
execution mode along with the BatchExecutionStateBackend. These services
only ever keep state for a single key at a time. They assume a perfect
Watermark and fire timers only upon switching the current key. Therefore
they require the input to be sorted/grouped by the key.
---
 .../flink/streaming/api/graph/StreamConfig.java|  20 +
 .../flink/streaming/api/graph/StreamGraph.java |  10 +
 .../api/graph/StreamingJobGraphGenerator.java  |   1 +
 .../operators/InternalTimeServiceManagerImpl.java  |   5 -
 .../operators/StreamTaskStateInitializerImpl.java  |  18 +-
 .../state/BatchExecutionInternalTimeService.java   | 151 +++
 .../BatchExecutionInternalTimeServiceManager.java  | 115 +
 .../flink/streaming/runtime/tasks/StreamTask.java  |  10 +-
 .../BatchExecutionInternalTimeServiceTest.java | 487 +
 .../util/AbstractStreamOperatorTestHarness.java|  57 ++-
 .../restore/StreamOperatorSnapshotRestoreTest.java |  43 +-
 11 files changed, 852 insertions(+), 65 deletions(-)

diff --git 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamConfig.java
 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamConfig.java
index b035195..67f025d 100644
--- 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamConfig.java
+++ 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamConfig.java
@@ -33,6 +33,7 @@ import 
org.apache.flink.runtime.util.config.memory.ManagedMemoryUtils;
 import org.apache.flink.streaming.api.CheckpointingMode;
 import org.apache.flink.streaming.api.TimeCharacteristic;
 import 
org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions;
+import org.apache.flink.streaming.api.operators.InternalTimeServiceManager;
 import org.apache.flink.streaming.api.operators.SimpleOperatorFactory;
 import org.apache.flink.streaming.api.operators.StreamOperator;
 import org.apache.flink.streaming.api.operators.StreamOperatorFactory;
@@ -94,6 +95,7 @@ public class StreamConfig implements Serializable {
private static final String CHECKPOINT_MODE = "checkpointMode";
 
private static final String STATE_BACKEND = "statebackend";
+   private static final String TIMER_SERVICE_PROVIDER = "timerservice";
private static final String STATE_PARTITIONER = "statePartitioner";
 
private static final String STATE_KEY_SERIALIZER = "statekeyser";
@@ -536,6 +538,24 @@ public class StreamConfig implements Serializable {
}
}
 
+   public void setTimerServiceProvider(InternalTimeServiceManager.Provider 
timerServiceProvider) {
+   if (timerServiceProvider != null) {
+   try {
+   
InstantiationUtil.writeObjectToConfig(timerServiceProvider, this.config, 
TIMER_SERVICE_PROVIDER);
+   } catch (Exception e) {
+   throw new StreamTaskException("Could not 
serialize timer service provider.", e);
+   }
+   }
+   }
+
+   public InternalTimeServiceManager.Provider 
getTimerServiceProvider(ClassLoader cl) {
+   try {
+   return 
InstantiationUtil.readObjectFromConfig(this.config, TIMER_SERVICE_PROVIDER, cl);
+   } catch (Exception e) {
+   throw new StreamTaskException("Could not instantiate 
timer service provider.", e);
+   }
+   }
+
public void setStatePartitioner(int input, KeySelector 
partitioner) {
try {
InstantiationUtil.writeObjectToConfig(partitioner, 
this.config, STATE_PARTITIONER + input);
diff --git 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamGraph.java
 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamGraph.java
index 8f91fdd..7bab18a 100644
--- 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamGraph.java
+++ 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamGraph.java
@@ -40,6 +40,7 @@ import 
org.apache.flink.runtime.jobgraph.tasks.AbstractI

[flink] branch master updated: [FLINK-19475] Implement a time service for the batch execution mode

2020-10-14 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new eda2111  [FLINK-19475] Implement a time service for the batch 
execution mode
eda2111 is described below

commit eda211154f6735eca3e0c9c70ae617c388ef057c
Author: Dawid Wysakowicz 
AuthorDate: Wed Sep 23 14:52:22 2020 +0200

[FLINK-19475] Implement a time service for the batch execution mode

I introduce a BatchExecutionInternalTimeServiceManager and
BatchExecutionInternalTimeService which can be used in the batch
execution mode along with the BatchExecutionStateBackend. These services
only ever keep state for a single key at a time. They assume a perfect
Watermark and fire timers only upon switching the current key. Therefore
they require the input to be sorted/grouped by the key.
---
 .../flink/streaming/api/graph/StreamConfig.java|  20 +
 .../flink/streaming/api/graph/StreamGraph.java |  10 +
 .../api/graph/StreamingJobGraphGenerator.java  |   1 +
 .../operators/InternalTimeServiceManagerImpl.java  |   5 -
 .../operators/StreamTaskStateInitializerImpl.java  |  18 +-
 .../state/BatchExecutionInternalTimeService.java   | 151 +++
 .../BatchExecutionInternalTimeServiceManager.java  | 115 +
 .../flink/streaming/runtime/tasks/StreamTask.java  |  10 +-
 .../BatchExecutionInternalTimeServiceTest.java | 487 +
 .../util/AbstractStreamOperatorTestHarness.java|  57 ++-
 .../restore/StreamOperatorSnapshotRestoreTest.java |  43 +-
 11 files changed, 852 insertions(+), 65 deletions(-)

diff --git 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamConfig.java
 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamConfig.java
index b035195..67f025d 100644
--- 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamConfig.java
+++ 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamConfig.java
@@ -33,6 +33,7 @@ import 
org.apache.flink.runtime.util.config.memory.ManagedMemoryUtils;
 import org.apache.flink.streaming.api.CheckpointingMode;
 import org.apache.flink.streaming.api.TimeCharacteristic;
 import 
org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions;
+import org.apache.flink.streaming.api.operators.InternalTimeServiceManager;
 import org.apache.flink.streaming.api.operators.SimpleOperatorFactory;
 import org.apache.flink.streaming.api.operators.StreamOperator;
 import org.apache.flink.streaming.api.operators.StreamOperatorFactory;
@@ -94,6 +95,7 @@ public class StreamConfig implements Serializable {
private static final String CHECKPOINT_MODE = "checkpointMode";
 
private static final String STATE_BACKEND = "statebackend";
+   private static final String TIMER_SERVICE_PROVIDER = "timerservice";
private static final String STATE_PARTITIONER = "statePartitioner";
 
private static final String STATE_KEY_SERIALIZER = "statekeyser";
@@ -536,6 +538,24 @@ public class StreamConfig implements Serializable {
}
}
 
+   public void setTimerServiceProvider(InternalTimeServiceManager.Provider 
timerServiceProvider) {
+   if (timerServiceProvider != null) {
+   try {
+   
InstantiationUtil.writeObjectToConfig(timerServiceProvider, this.config, 
TIMER_SERVICE_PROVIDER);
+   } catch (Exception e) {
+   throw new StreamTaskException("Could not 
serialize timer service provider.", e);
+   }
+   }
+   }
+
+   public InternalTimeServiceManager.Provider 
getTimerServiceProvider(ClassLoader cl) {
+   try {
+   return 
InstantiationUtil.readObjectFromConfig(this.config, TIMER_SERVICE_PROVIDER, cl);
+   } catch (Exception e) {
+   throw new StreamTaskException("Could not instantiate 
timer service provider.", e);
+   }
+   }
+
public void setStatePartitioner(int input, KeySelector 
partitioner) {
try {
InstantiationUtil.writeObjectToConfig(partitioner, 
this.config, STATE_PARTITIONER + input);
diff --git 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamGraph.java
 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamGraph.java
index 8f91fdd..7bab18a 100644
--- 
a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamGraph.java
+++ 
b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamGraph.java
@@ -40,6 +40,7 @@ import 
org.apache.flink.runtime.jobgraph.tasks.AbstractI

[flink] branch master updated (c3fedf3 -> 77e4e3b)

2020-10-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from c3fedf3  [hotfix] Make Transformation.getTransitivePredecessors() 
return a list
 add 0c0dbf5  [refactor] Reuse StreamOneInputProcessor in 
StreamMultiProcessor
 add f87a6ef  [refactor] Reuse StreamOneInputProcessor in 
StreamTwoInputProcessor
 add 4c4f6a7  [refactor] Remove SourceInputProcessor from 
StreamMultipleInputProcessor
 add b1317fa  [refactor] Extract StreamMultipleInputProcessor factory method
 add 2aa8345  [refactor] Use BoundedMultiInput instead of OperatorChain in 
InputProcessors
 add e8f500b  [refactor] Call nextSelection after each emitNext
 add fa43b75  [FLINK-19473] Implement multi inputs sorting DataInput
 add 24708d0  [minor] Minor fixes in javadocs and an exception message in 
SortingDataInput
 add 4ddf954  [refactor] Extract StreamTwoInputProcessor factory method
 add 574edb3  [FLINK-19473] Enable two input sorting in TwoInputStreamTask
 add 77e4e3b  [FLINK-19473] Enable sorting inputs in MultiInputTasks

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/StreamSource.java  |   2 +-
 .../operators/sort/MultiInputSortingDataInput.java | 463 +
 .../api/operators/sort/SortingDataInput.java   |   7 +-
 .../streaming/runtime/io/StreamInputProcessor.java |   4 +
 .../runtime/io/StreamMultipleInputProcessor.java   | 260 +---
 .../io/StreamMultipleInputProcessorFactory.java| 321 ++
 .../runtime/io/StreamOneInputProcessor.java|  10 +-
 .../runtime/io/StreamTaskSourceInput.java  |  12 +-
 .../runtime/io/StreamTwoInputProcessor.java| 235 ++-
 .../runtime/io/StreamTwoInputProcessorFactory.java | 287 +
 .../runtime/tasks/AbstractTwoInputStreamTask.java  |  14 +-
 .../runtime/tasks/MultipleInputStreamTask.java |  25 +-
 .../streaming/runtime/tasks/OperatorChain.java |   8 +-
 .../runtime/tasks/SourceOperatorStreamTask.java|   2 +-
 .../runtime/tasks/TwoInputStreamTask.java  |  24 +-
 .../{source => sort}/CollectingDataOutput.java |   3 +-
 .../api/operators/sort/CollectionDataInput.java|  82 
 ...TCase.java => LargeSortingDataInputITCase.java} |  84 +++-
 .../sort/MultiInputSortingDataInputsTest.java  | 211 ++
 .../api/operators/sort/SortingDataInputTest.java   |  88 +---
 .../runtime/SortingBoundedInputITCase.java | 283 -
 21 files changed, 1801 insertions(+), 624 deletions(-)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInput.java
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamMultipleInputProcessorFactory.java
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamTwoInputProcessorFactory.java
 copy 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/{source
 => sort}/CollectingDataOutput.java (97%)
 create mode 100644 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/CollectionDataInput.java
 rename 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/{SortingDataInputITCase.java
 => LargeSortingDataInputITCase.java} (71%)
 create mode 100644 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInputsTest.java



[flink] branch master updated (c3fedf3 -> 77e4e3b)

2020-10-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from c3fedf3  [hotfix] Make Transformation.getTransitivePredecessors() 
return a list
 add 0c0dbf5  [refactor] Reuse StreamOneInputProcessor in 
StreamMultiProcessor
 add f87a6ef  [refactor] Reuse StreamOneInputProcessor in 
StreamTwoInputProcessor
 add 4c4f6a7  [refactor] Remove SourceInputProcessor from 
StreamMultipleInputProcessor
 add b1317fa  [refactor] Extract StreamMultipleInputProcessor factory method
 add 2aa8345  [refactor] Use BoundedMultiInput instead of OperatorChain in 
InputProcessors
 add e8f500b  [refactor] Call nextSelection after each emitNext
 add fa43b75  [FLINK-19473] Implement multi inputs sorting DataInput
 add 24708d0  [minor] Minor fixes in javadocs and an exception message in 
SortingDataInput
 add 4ddf954  [refactor] Extract StreamTwoInputProcessor factory method
 add 574edb3  [FLINK-19473] Enable two input sorting in TwoInputStreamTask
 add 77e4e3b  [FLINK-19473] Enable sorting inputs in MultiInputTasks

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/StreamSource.java  |   2 +-
 .../operators/sort/MultiInputSortingDataInput.java | 463 +
 .../api/operators/sort/SortingDataInput.java   |   7 +-
 .../streaming/runtime/io/StreamInputProcessor.java |   4 +
 .../runtime/io/StreamMultipleInputProcessor.java   | 260 +---
 .../io/StreamMultipleInputProcessorFactory.java| 321 ++
 .../runtime/io/StreamOneInputProcessor.java|  10 +-
 .../runtime/io/StreamTaskSourceInput.java  |  12 +-
 .../runtime/io/StreamTwoInputProcessor.java| 235 ++-
 .../runtime/io/StreamTwoInputProcessorFactory.java | 287 +
 .../runtime/tasks/AbstractTwoInputStreamTask.java  |  14 +-
 .../runtime/tasks/MultipleInputStreamTask.java |  25 +-
 .../streaming/runtime/tasks/OperatorChain.java |   8 +-
 .../runtime/tasks/SourceOperatorStreamTask.java|   2 +-
 .../runtime/tasks/TwoInputStreamTask.java  |  24 +-
 .../{source => sort}/CollectingDataOutput.java |   3 +-
 .../api/operators/sort/CollectionDataInput.java|  82 
 ...TCase.java => LargeSortingDataInputITCase.java} |  84 +++-
 .../sort/MultiInputSortingDataInputsTest.java  | 211 ++
 .../api/operators/sort/SortingDataInputTest.java   |  88 +---
 .../runtime/SortingBoundedInputITCase.java | 283 -
 21 files changed, 1801 insertions(+), 624 deletions(-)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInput.java
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamMultipleInputProcessorFactory.java
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamTwoInputProcessorFactory.java
 copy 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/{source
 => sort}/CollectingDataOutput.java (97%)
 create mode 100644 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/CollectionDataInput.java
 rename 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/{SortingDataInputITCase.java
 => LargeSortingDataInputITCase.java} (71%)
 create mode 100644 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInputsTest.java



[flink] branch master updated (c3fedf3 -> 77e4e3b)

2020-10-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from c3fedf3  [hotfix] Make Transformation.getTransitivePredecessors() 
return a list
 add 0c0dbf5  [refactor] Reuse StreamOneInputProcessor in 
StreamMultiProcessor
 add f87a6ef  [refactor] Reuse StreamOneInputProcessor in 
StreamTwoInputProcessor
 add 4c4f6a7  [refactor] Remove SourceInputProcessor from 
StreamMultipleInputProcessor
 add b1317fa  [refactor] Extract StreamMultipleInputProcessor factory method
 add 2aa8345  [refactor] Use BoundedMultiInput instead of OperatorChain in 
InputProcessors
 add e8f500b  [refactor] Call nextSelection after each emitNext
 add fa43b75  [FLINK-19473] Implement multi inputs sorting DataInput
 add 24708d0  [minor] Minor fixes in javadocs and an exception message in 
SortingDataInput
 add 4ddf954  [refactor] Extract StreamTwoInputProcessor factory method
 add 574edb3  [FLINK-19473] Enable two input sorting in TwoInputStreamTask
 add 77e4e3b  [FLINK-19473] Enable sorting inputs in MultiInputTasks

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/StreamSource.java  |   2 +-
 .../operators/sort/MultiInputSortingDataInput.java | 463 +
 .../api/operators/sort/SortingDataInput.java   |   7 +-
 .../streaming/runtime/io/StreamInputProcessor.java |   4 +
 .../runtime/io/StreamMultipleInputProcessor.java   | 260 +---
 .../io/StreamMultipleInputProcessorFactory.java| 321 ++
 .../runtime/io/StreamOneInputProcessor.java|  10 +-
 .../runtime/io/StreamTaskSourceInput.java  |  12 +-
 .../runtime/io/StreamTwoInputProcessor.java| 235 ++-
 .../runtime/io/StreamTwoInputProcessorFactory.java | 287 +
 .../runtime/tasks/AbstractTwoInputStreamTask.java  |  14 +-
 .../runtime/tasks/MultipleInputStreamTask.java |  25 +-
 .../streaming/runtime/tasks/OperatorChain.java |   8 +-
 .../runtime/tasks/SourceOperatorStreamTask.java|   2 +-
 .../runtime/tasks/TwoInputStreamTask.java  |  24 +-
 .../{source => sort}/CollectingDataOutput.java |   3 +-
 .../api/operators/sort/CollectionDataInput.java|  82 
 ...TCase.java => LargeSortingDataInputITCase.java} |  84 +++-
 .../sort/MultiInputSortingDataInputsTest.java  | 211 ++
 .../api/operators/sort/SortingDataInputTest.java   |  88 +---
 .../runtime/SortingBoundedInputITCase.java | 283 -
 21 files changed, 1801 insertions(+), 624 deletions(-)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInput.java
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamMultipleInputProcessorFactory.java
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamTwoInputProcessorFactory.java
 copy 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/{source
 => sort}/CollectingDataOutput.java (97%)
 create mode 100644 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/CollectionDataInput.java
 rename 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/{SortingDataInputITCase.java
 => LargeSortingDataInputITCase.java} (71%)
 create mode 100644 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInputsTest.java



[flink] branch master updated (c3fedf3 -> 77e4e3b)

2020-10-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from c3fedf3  [hotfix] Make Transformation.getTransitivePredecessors() 
return a list
 add 0c0dbf5  [refactor] Reuse StreamOneInputProcessor in 
StreamMultiProcessor
 add f87a6ef  [refactor] Reuse StreamOneInputProcessor in 
StreamTwoInputProcessor
 add 4c4f6a7  [refactor] Remove SourceInputProcessor from 
StreamMultipleInputProcessor
 add b1317fa  [refactor] Extract StreamMultipleInputProcessor factory method
 add 2aa8345  [refactor] Use BoundedMultiInput instead of OperatorChain in 
InputProcessors
 add e8f500b  [refactor] Call nextSelection after each emitNext
 add fa43b75  [FLINK-19473] Implement multi inputs sorting DataInput
 add 24708d0  [minor] Minor fixes in javadocs and an exception message in 
SortingDataInput
 add 4ddf954  [refactor] Extract StreamTwoInputProcessor factory method
 add 574edb3  [FLINK-19473] Enable two input sorting in TwoInputStreamTask
 add 77e4e3b  [FLINK-19473] Enable sorting inputs in MultiInputTasks

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/StreamSource.java  |   2 +-
 .../operators/sort/MultiInputSortingDataInput.java | 463 +
 .../api/operators/sort/SortingDataInput.java   |   7 +-
 .../streaming/runtime/io/StreamInputProcessor.java |   4 +
 .../runtime/io/StreamMultipleInputProcessor.java   | 260 +---
 .../io/StreamMultipleInputProcessorFactory.java| 321 ++
 .../runtime/io/StreamOneInputProcessor.java|  10 +-
 .../runtime/io/StreamTaskSourceInput.java  |  12 +-
 .../runtime/io/StreamTwoInputProcessor.java| 235 ++-
 .../runtime/io/StreamTwoInputProcessorFactory.java | 287 +
 .../runtime/tasks/AbstractTwoInputStreamTask.java  |  14 +-
 .../runtime/tasks/MultipleInputStreamTask.java |  25 +-
 .../streaming/runtime/tasks/OperatorChain.java |   8 +-
 .../runtime/tasks/SourceOperatorStreamTask.java|   2 +-
 .../runtime/tasks/TwoInputStreamTask.java  |  24 +-
 .../{source => sort}/CollectingDataOutput.java |   3 +-
 .../api/operators/sort/CollectionDataInput.java|  82 
 ...TCase.java => LargeSortingDataInputITCase.java} |  84 +++-
 .../sort/MultiInputSortingDataInputsTest.java  | 211 ++
 .../api/operators/sort/SortingDataInputTest.java   |  88 +---
 .../runtime/SortingBoundedInputITCase.java | 283 -
 21 files changed, 1801 insertions(+), 624 deletions(-)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInput.java
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamMultipleInputProcessorFactory.java
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamTwoInputProcessorFactory.java
 copy 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/{source
 => sort}/CollectingDataOutput.java (97%)
 create mode 100644 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/CollectionDataInput.java
 rename 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/{SortingDataInputITCase.java
 => LargeSortingDataInputITCase.java} (71%)
 create mode 100644 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInputsTest.java



[flink] branch master updated (c3fedf3 -> 77e4e3b)

2020-10-12 Thread dwysakowicz
This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from c3fedf3  [hotfix] Make Transformation.getTransitivePredecessors() 
return a list
 add 0c0dbf5  [refactor] Reuse StreamOneInputProcessor in 
StreamMultiProcessor
 add f87a6ef  [refactor] Reuse StreamOneInputProcessor in 
StreamTwoInputProcessor
 add 4c4f6a7  [refactor] Remove SourceInputProcessor from 
StreamMultipleInputProcessor
 add b1317fa  [refactor] Extract StreamMultipleInputProcessor factory method
 add 2aa8345  [refactor] Use BoundedMultiInput instead of OperatorChain in 
InputProcessors
 add e8f500b  [refactor] Call nextSelection after each emitNext
 add fa43b75  [FLINK-19473] Implement multi inputs sorting DataInput
 add 24708d0  [minor] Minor fixes in javadocs and an exception message in 
SortingDataInput
 add 4ddf954  [refactor] Extract StreamTwoInputProcessor factory method
 add 574edb3  [FLINK-19473] Enable two input sorting in TwoInputStreamTask
 add 77e4e3b  [FLINK-19473] Enable sorting inputs in MultiInputTasks

No new revisions were added by this update.

Summary of changes:
 .../streaming/api/operators/StreamSource.java  |   2 +-
 .../operators/sort/MultiInputSortingDataInput.java | 463 +
 .../api/operators/sort/SortingDataInput.java   |   7 +-
 .../streaming/runtime/io/StreamInputProcessor.java |   4 +
 .../runtime/io/StreamMultipleInputProcessor.java   | 260 +---
 .../io/StreamMultipleInputProcessorFactory.java| 321 ++
 .../runtime/io/StreamOneInputProcessor.java|  10 +-
 .../runtime/io/StreamTaskSourceInput.java  |  12 +-
 .../runtime/io/StreamTwoInputProcessor.java| 235 ++-
 .../runtime/io/StreamTwoInputProcessorFactory.java | 287 +
 .../runtime/tasks/AbstractTwoInputStreamTask.java  |  14 +-
 .../runtime/tasks/MultipleInputStreamTask.java |  25 +-
 .../streaming/runtime/tasks/OperatorChain.java |   8 +-
 .../runtime/tasks/SourceOperatorStreamTask.java|   2 +-
 .../runtime/tasks/TwoInputStreamTask.java  |  24 +-
 .../{source => sort}/CollectingDataOutput.java |   3 +-
 .../api/operators/sort/CollectionDataInput.java|  82 
 ...TCase.java => LargeSortingDataInputITCase.java} |  84 +++-
 .../sort/MultiInputSortingDataInputsTest.java  | 211 ++
 .../api/operators/sort/SortingDataInputTest.java   |  88 +---
 .../runtime/SortingBoundedInputITCase.java | 283 -
 21 files changed, 1801 insertions(+), 624 deletions(-)
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInput.java
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamMultipleInputProcessorFactory.java
 create mode 100644 
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamTwoInputProcessorFactory.java
 copy 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/{source
 => sort}/CollectingDataOutput.java (97%)
 create mode 100644 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/CollectionDataInput.java
 rename 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/{SortingDataInputITCase.java
 => LargeSortingDataInputITCase.java} (71%)
 create mode 100644 
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInputsTest.java



<    3   4   5   6   7   8   9   10   11   12   >