(flink) 02/03: [hotfix] Fix the StateTtlConfig#newBuilder doc from Time to Duration

2024-03-01 Thread tangyun
This is an automated email from the ASF dual-hosted git repository.

tangyun pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 697d2b62a5a070ce074550b8767fbc33859db83d
Author: Rui Fan <1996fan...@gmail.com>
AuthorDate: Tue Feb 27 13:55:19 2024 +0800

[hotfix] Fix the StateTtlConfig#newBuilder doc from Time to Duration
---
 .../docs/dev/datastream/fault-tolerance/state.md | 20 ++--
 .../docs/dev/datastream/fault-tolerance/state.md | 20 ++--
 2 files changed, 20 insertions(+), 20 deletions(-)

diff --git a/docs/content.zh/docs/dev/datastream/fault-tolerance/state.md 
b/docs/content.zh/docs/dev/datastream/fault-tolerance/state.md
index 80f444b13c5..2635c11f1ec 100644
--- a/docs/content.zh/docs/dev/datastream/fault-tolerance/state.md
+++ b/docs/content.zh/docs/dev/datastream/fault-tolerance/state.md
@@ -305,7 +305,7 @@ import 
org.apache.flink.api.common.state.ValueStateDescriptor;
 import org.apache.flink.api.common.time.Time;
 
 StateTtlConfig ttlConfig = StateTtlConfig
-.newBuilder(Time.seconds(1))
+.newBuilder(Duration.ofSeconds(1))
 .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
 .setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired)
 .build();
@@ -321,7 +321,7 @@ import 
org.apache.flink.api.common.state.ValueStateDescriptor
 import org.apache.flink.api.common.time.Time
 
 val ttlConfig = StateTtlConfig
-.newBuilder(Time.seconds(1))
+.newBuilder(Duration.ofSeconds(1))
 .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
 .setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired)
 .build
@@ -399,7 +399,7 @@ Heap state backend 会额外存储一个包括用户状态以及时间戳的 Jav
 import org.apache.flink.api.common.state.StateTtlConfig;
 
 StateTtlConfig ttlConfig = StateTtlConfig
-.newBuilder(Time.seconds(1))
+.newBuilder(Duration.ofSeconds(1))
 .disableCleanupInBackground()
 .build();
 ```
@@ -409,7 +409,7 @@ StateTtlConfig ttlConfig = StateTtlConfig
 import org.apache.flink.api.common.state.StateTtlConfig
 
 val ttlConfig = StateTtlConfig
-.newBuilder(Time.seconds(1))
+.newBuilder(Duration.ofSeconds(1))
 .disableCleanupInBackground
 .build
 ```
@@ -441,7 +441,7 @@ import org.apache.flink.api.common.state.StateTtlConfig;
 import org.apache.flink.api.common.time.Time;
 
 StateTtlConfig ttlConfig = StateTtlConfig
-.newBuilder(Time.seconds(1))
+.newBuilder(Duration.ofSeconds(1))
 .cleanupFullSnapshot()
 .build();
 ```
@@ -452,7 +452,7 @@ import org.apache.flink.api.common.state.StateTtlConfig
 import org.apache.flink.api.common.time.Time
 
 val ttlConfig = StateTtlConfig
-.newBuilder(Time.seconds(1))
+.newBuilder(Duration.ofSeconds(1))
 .cleanupFullSnapshot
 .build
 ```
@@ -487,7 +487,7 @@ ttl_config = StateTtlConfig \
 ```java
 import org.apache.flink.api.common.state.StateTtlConfig;
  StateTtlConfig ttlConfig = StateTtlConfig
-.newBuilder(Time.seconds(1))
+.newBuilder(Duration.ofSeconds(1))
 .cleanupIncrementally(10, true)
 .build();
 ```
@@ -496,7 +496,7 @@ import org.apache.flink.api.common.state.StateTtlConfig;
 ```scala
 import org.apache.flink.api.common.state.StateTtlConfig
 val ttlConfig = StateTtlConfig
-.newBuilder(Time.seconds(1))
+.newBuilder(Duration.ofSeconds(1))
 .cleanupIncrementally(10, true)
 .build
 ```
@@ -537,7 +537,7 @@ Flink 提供的 RocksDB 压缩过滤器会在压缩时过滤掉已经过期的
 import org.apache.flink.api.common.state.StateTtlConfig;
 
 StateTtlConfig ttlConfig = StateTtlConfig
-.newBuilder(Time.seconds(1))
+.newBuilder(Duration.ofSeconds(1))
 .cleanupInRocksdbCompactFilter(1000, Duration.ofHours(1))
 .build();
 ```
@@ -547,7 +547,7 @@ StateTtlConfig ttlConfig = StateTtlConfig
 import org.apache.flink.api.common.state.StateTtlConfig
 
 val ttlConfig = StateTtlConfig
-.newBuilder(Time.seconds(1))
+.newBuilder(Duration.ofSeconds(1))
 .cleanupInRocksdbCompactFilter(1000, Duration.ofHours(1))
 .build
 ```
diff --git a/docs/content/docs/dev/datastream/fault-tolerance/state.md 
b/docs/content/docs/dev/datastream/fault-tolerance/state.md
index 0d5705ee196..233f9bdc01b 100644
--- a/docs/content/docs/dev/datastream/fault-tolerance/state.md
+++ b/docs/content/docs/dev/datastream/fault-tolerance/state.md
@@ -344,7 +344,7 @@ import 
org.apache.flink.api.common.state.ValueStateDescriptor;
 import org.apache.flink.api.common.time.Time;
 
 StateTtlConfig ttlConfig = StateTtlConfig
-.newBuilder(Time.seconds(1))
+.newBuilder(Duration.ofSeconds(1))
 .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
 .setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired)
 .build();
@@ -360,7 +360,7 @@ import 
org.apache.flink.api.common.state.ValueStateDescriptor
 import org.apache.flink.api.common.time.Time
 
 val ttlConfig = StateTtlConfig
-.newBuilder(Time.seconds(1))
+.newBuilder(Duration.ofSeconds(1))
 

(flink) 03/03: [Hotfix] Fix Duration class can't load for pyflink

2024-03-01 Thread tangyun
This is an automated email from the ASF dual-hosted git repository.

tangyun pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 7618bdeeab06c09219136a04a62262148c677134
Author: Roc Marshal 
AuthorDate: Fri Mar 1 09:57:48 2024 +0800

[Hotfix] Fix Duration class can't load for pyflink
---
 flink-python/pyflink/datastream/state.py | 10 ++
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/flink-python/pyflink/datastream/state.py 
b/flink-python/pyflink/datastream/state.py
index a3dc9f6df84..4216561299c 100644
--- a/flink-python/pyflink/datastream/state.py
+++ b/flink-python/pyflink/datastream/state.py
@@ -809,7 +809,7 @@ class StateTtlConfig(object):
 def cleanup_in_rocksdb_compact_filter(
 self,
 query_time_after_num_entries,
-periodic_compaction_time=Duration.of_days(30)) -> \
+periodic_compaction_time=None) -> \
 'StateTtlConfig.Builder':
 """
 Cleanup expired state while Rocksdb compaction is running.
@@ -833,7 +833,8 @@ class StateTtlConfig(object):
 self._strategies[
 
StateTtlConfig.CleanupStrategies.Strategies.ROCKSDB_COMPACTION_FILTER] = \
 
StateTtlConfig.CleanupStrategies.RocksdbCompactFilterCleanupStrategy(
-query_time_after_num_entries, periodic_compaction_time)
+query_time_after_num_entries,
+periodic_compaction_time if periodic_compaction_time else 
Duration.of_days(30))
 return self
 
 def disable_cleanup_in_background(self) -> 'StateTtlConfig.Builder':
@@ -925,9 +926,10 @@ class StateTtlConfig(object):
 
 def __init__(self,
  query_time_after_num_entries: int,
- periodic_compaction_time=Duration.of_days(30)):
+ periodic_compaction_time=None):
 self._query_time_after_num_entries = 
query_time_after_num_entries
-self._periodic_compaction_time = periodic_compaction_time
+self._periodic_compaction_time = periodic_compaction_time \
+if periodic_compaction_time else Duration.of_days(30)
 
 def get_query_time_after_num_entries(self) -> int:
 return self._query_time_after_num_entries



(flink) branch release-1.19 updated (12ea64c0e2a -> 7618bdeeab0)

2024-03-01 Thread tangyun
This is an automated email from the ASF dual-hosted git repository.

tangyun pushed a change to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git


from 12ea64c0e2a [FLINK-33436][docs] Add the docs of built-in async-profiler
 new 161defe0bb2 [FLINK-34522][core] Changing the Time to Duration for 
StateTtlConfig.Builder.cleanupInRocksdbCompactFilter
 new 697d2b62a5a [hotfix] Fix the StateTtlConfig#newBuilder doc from Time 
to Duration
 new 7618bdeeab0 [Hotfix] Fix Duration class can't load for pyflink

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../docs/dev/datastream/fault-tolerance/state.md   | 29 +++---
 .../docs/dev/datastream/fault-tolerance/state.md   | 29 +++---
 .../flink/api/common/state/StateTtlConfig.java | 12 -
 .../flink/api/common/state/StateTtlConfigTest.java |  3 ++-
 flink-python/pyflink/datastream/state.py   | 14 ++-
 .../flink/streaming/api/utils/ProtoUtilsTest.java  |  3 ++-
 .../state/ttl/RocksDbTtlCompactFiltersManager.java |  2 +-
 7 files changed, 49 insertions(+), 43 deletions(-)



(flink) 01/03: [FLINK-34522][core] Changing the Time to Duration for StateTtlConfig.Builder.cleanupInRocksdbCompactFilter

2024-03-01 Thread tangyun
This is an automated email from the ASF dual-hosted git repository.

tangyun pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 161defe0bb2dc8136133e07699b6ac433d52dc65
Author: Rui Fan <1996fan...@gmail.com>
AuthorDate: Tue Feb 27 13:17:25 2024 +0800

[FLINK-34522][core] Changing the Time to Duration for 
StateTtlConfig.Builder.cleanupInRocksdbCompactFilter
---
 docs/content.zh/docs/dev/datastream/fault-tolerance/state.md |  9 +
 docs/content/docs/dev/datastream/fault-tolerance/state.md|  9 +
 .../org/apache/flink/api/common/state/StateTtlConfig.java| 12 ++--
 .../apache/flink/api/common/state/StateTtlConfigTest.java|  3 ++-
 flink-python/pyflink/datastream/state.py |  8 
 .../org/apache/flink/streaming/api/utils/ProtoUtilsTest.java |  3 ++-
 .../streaming/state/ttl/RocksDbTtlCompactFiltersManager.java |  2 +-
 7 files changed, 25 insertions(+), 21 deletions(-)

diff --git a/docs/content.zh/docs/dev/datastream/fault-tolerance/state.md 
b/docs/content.zh/docs/dev/datastream/fault-tolerance/state.md
index c7aaaf0c27f..80f444b13c5 100644
--- a/docs/content.zh/docs/dev/datastream/fault-tolerance/state.md
+++ b/docs/content.zh/docs/dev/datastream/fault-tolerance/state.md
@@ -538,7 +538,7 @@ import org.apache.flink.api.common.state.StateTtlConfig;
 
 StateTtlConfig ttlConfig = StateTtlConfig
 .newBuilder(Time.seconds(1))
-.cleanupInRocksdbCompactFilter(1000, Time.hours(1))
+.cleanupInRocksdbCompactFilter(1000, Duration.ofHours(1))
 .build();
 ```
 {{< /tab >}}
@@ -548,18 +548,19 @@ import org.apache.flink.api.common.state.StateTtlConfig
 
 val ttlConfig = StateTtlConfig
 .newBuilder(Time.seconds(1))
-.cleanupInRocksdbCompactFilter(1000, Time.hours(1))
+.cleanupInRocksdbCompactFilter(1000, Duration.ofHours(1))
 .build
 ```
 {{< /tab >}}
 {{< tab "Python" >}}
 ```python
+from pyflink.common import Duration
 from pyflink.common.time import Time
 from pyflink.datastream.state import StateTtlConfig
 
 ttl_config = StateTtlConfig \
   .new_builder(Time.seconds(1)) \
-  .cleanup_in_rocksdb_compact_filter(1000, Time.hours(1)) \
+  .cleanup_in_rocksdb_compact_filter(1000, Duration.of_hours(1)) \
   .build()
 ```
 {{< /tab >}}
@@ -573,7 +574,7 @@ RocksDB backend 的默认后台清理策略会每处理 1000 条数据进行一
 定期压缩可以加速过期状态条目的清理,特别是对于很少访问的状态条目。
 比这个值早的文件将被选取进行压缩,并重新写入与之前相同的 Level 中。 
 该功能可以确保文件定期通过压缩过滤器压缩。
-您可以通过`StateTtlConfig.newBuilder(...).cleanupInRocksdbCompactFilter(long 
queryTimeAfterNumEntries, Time periodicCompactionTime)` 
+您可以通过`StateTtlConfig.newBuilder(...).cleanupInRocksdbCompactFilter(long 
queryTimeAfterNumEntries, Duration periodicCompactionTime)` 
 方法设定定期压缩的时间。
 定期压缩的时间的默认值是 30 天。
 您可以将其设置为 0 以关闭定期压缩或设置一个较小的值以加速过期状态条目的清理,但它将会触发更多压缩。
diff --git a/docs/content/docs/dev/datastream/fault-tolerance/state.md 
b/docs/content/docs/dev/datastream/fault-tolerance/state.md
index 24f14953b1c..0d5705ee196 100644
--- a/docs/content/docs/dev/datastream/fault-tolerance/state.md
+++ b/docs/content/docs/dev/datastream/fault-tolerance/state.md
@@ -601,7 +601,7 @@ import org.apache.flink.api.common.state.StateTtlConfig;
 
 StateTtlConfig ttlConfig = StateTtlConfig
 .newBuilder(Time.seconds(1))
-.cleanupInRocksdbCompactFilter(1000, Time.hours(1))
+.cleanupInRocksdbCompactFilter(1000, Duration.ofHours(1))
 .build();
 ```
 {{< /tab >}}
@@ -611,18 +611,19 @@ import org.apache.flink.api.common.state.StateTtlConfig
 
 val ttlConfig = StateTtlConfig
 .newBuilder(Time.seconds(1))
-.cleanupInRocksdbCompactFilter(1000, Time.hours(1))
+.cleanupInRocksdbCompactFilter(1000, Duration.ofHours(1))
 .build
 ```
 {{< /tab >}}
 {{< tab "Python" >}}
 ```python
+from pyflink.common import Duration
 from pyflink.common.time import Time
 from pyflink.datastream.state import StateTtlConfig
 
 ttl_config = StateTtlConfig \
   .new_builder(Time.seconds(1)) \
-  .cleanup_in_rocksdb_compact_filter(1000, Time.hours(1)) \
+  .cleanup_in_rocksdb_compact_filter(1000, Duration.of_hours(1)) \
   .build()
 ```
 {{< /tab >}}
@@ -640,7 +641,7 @@ Periodic compaction could speed up expired state entries 
cleanup, especially for
 Files older than this value will be picked up for compaction, and re-written 
to the same level as they were before. 
 It makes sure a file goes through compaction filters periodically.
 You can change it and pass a custom value to
-`StateTtlConfig.newBuilder(...).cleanupInRocksdbCompactFilter(long 
queryTimeAfterNumEntries, Time periodicCompactionTime)` method.
+`StateTtlConfig.newBuilder(...).cleanupInRocksdbCompactFilter(long 
queryTimeAfterNumEntries, Duration periodicCompactionTime)` method.
 The default value of Periodic compaction seconds is 30 days.
 You could set it to 0 to turn off periodic compaction or set a small value to 
speed up expired state entries cleanup, but it
 would trigger more compactions.
diff --git 

(flink-connector-shared-utils) 01/09: [FLINK-34137] Setup CI and archunit for archunit tests skipping

2024-03-01 Thread echauchot
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a commit to branch ci_utils
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git

commit 30d0b106eb1df8db2875f24d2353c28920af468e
Author: Etienne Chauchot 
AuthorDate: Mon Oct 9 16:28:45 2023 +0200

[FLINK-34137] Setup CI and archunit for archunit tests skipping
---
 .github/workflows/_testing.yml | 21 --
 .github/workflows/ci.yml   | 10 +
 .../06adde03-b226-46a1-880e-e3f9d0fbcfcb   |  0
 .../2b408df1-117c-43ee-8829-11a33f2f4c3d   |  0
 .../47150079-df0b-4320-b1a9-117804d1d2b9   |  0
 .../61e2247d-4ef0-4327-ad7f-2dc5cd881a4d   |  0
 .../737d7692-7d8e-4a4e-94d4-61e573619ec4   |  0
 .../9193bee2-f400-432f-b664-7ed218451dc9   |  0
 .../a6ff808a-1aeb-4ff3-8fe6-3a5cb49fba71   |  0
 .../acfbc678-b208-4e82-b2c9-09b53dc0eec5   |  0
 archunit-violations/stored.rules   | 10 +
 pom.xml| 36 -
 .../apache/flink/connector/testing/SomeClass.java  | 11 +-
 .../ProductionCodeArchitectureTest.java| 42 
 .../architecture/TestCodeArchitectureTest.java | 46 ++
 src/test/resources/archunit.properties | 38 ++
 16 files changed, 209 insertions(+), 5 deletions(-)

diff --git a/.github/workflows/_testing.yml b/.github/workflows/_testing.yml
index aca4d74..84eb71d 100644
--- a/.github/workflows/_testing.yml
+++ b/.github/workflows/_testing.yml
@@ -25,13 +25,28 @@ jobs:
   specific-version:
 uses: ./.github/workflows/ci.yml
 with:
-  flink_version: 1.16.1
+  flink_version: 1.17.1
   connector_branch: ci_utils
-  snapshot-version:
+  # we can test connectors against flink snapshots but enable archunit tests 
only when tested against
+  # the flink version used for the connector build.
+  snapshot-without-archunit-tests:
 uses: ./.github/workflows/ci.yml
 with:
   flink_version: 1.16-SNAPSHOT
-  connector_branch: ci_utils
+  skip_archunit_tests: true
+  # we need to test the connectors against last 2 flink major versions.
+  # let's say the connector under test was built against flink 1.17.1, disable 
the archunit tests when tested against 1.16.2
+  non-main-version-without-archunit-tests:
+uses: ./.github/workflows/ci.yml
+with:
+  flink_version: 1.16.2
+  skip_archunit_tests: true
+  # we need to test the connectors against last 2 flink major versions.
+  # let's say the connector under test was built against flink 1.17.1, leave 
the archunit tests enabled when tested against 1.17.1
+  main-version-with-archunit-tests:
+uses: ./.github/workflows/ci.yml
+with:
+  flink_version: 1.17.1
   flink118-java17-version:
 uses: ./.github/workflows/ci.yml
 with:
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 418998d..bc12766 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -51,6 +51,11 @@ on:
 required: false
 type: boolean
 default: true
+  skip_archunit_tests:
+description: "Whether to skip the archunit tests"
+required: false
+type: boolean
+default: false
   connector_branch:
 description: "Branch that need to be checked out"
 required: false
@@ -101,6 +106,10 @@ jobs:
 if: ${{ inputs.optional_maven_profiles }}
 run: echo "MVN_COMMON_OPTIONS=${MVN_COMMON_OPTIONS} -P ${{ 
inputs.optional_maven_profiles }}" >> $GITHUB_ENV
 
+  - name: "Disable archunit tests"
+if: ${{ inputs.skip_archunit_tests }}
+run: echo "MVN_ARCHUNIT_TESTS=-Dtest='!*ArchitectureTest'" >> 
$GITHUB_ENV
+
   - name: "Determine Flink binary url"
 run: |
   binary_url=${{ inputs.flink_url }}
@@ -159,6 +168,7 @@ jobs:
 -Dscala-2.12 \
 -Prun-end-to-end-tests -DdistDir=${{ env.FLINK_CACHE_DIR 
}}/flink-${{ inputs.flink_version }} \
 ${{ env.MVN_DEPENDENCY_CONVERGENCE }} \
+${{ env.MVN_ARCHUNIT_TESTS }} \
 ${{ env.MVN_CONNECTION_OPTIONS }} \
 -Dlog4j.configurationFile=file://$(pwd)/tools/ci/log4j.properties \
 | tee ${{ env.MVN_BUILD_OUTPUT_FILE }}
diff --git a/archunit-violations/06adde03-b226-46a1-880e-e3f9d0fbcfcb 
b/archunit-violations/06adde03-b226-46a1-880e-e3f9d0fbcfcb
new file mode 100644
index 000..e69de29
diff --git a/archunit-violations/2b408df1-117c-43ee-8829-11a33f2f4c3d 
b/archunit-violations/2b408df1-117c-43ee-8829-11a33f2f4c3d
new file mode 100644
index 000..e69de29
diff --git a/archunit-violations/47150079-df0b-4320-b1a9-117804d1d2b9 
b/archunit-violations/47150079-df0b-4320-b1a9-117804d1d2b9
new file mode 100644
index 000..e69de29
diff --git a/archunit-violations/61e2247d-4ef0-4327-ad7f-2dc5cd881a4d 

(flink-connector-shared-utils) 02/09: [FLINK-34137] Update test-project parent to regular flink-connector-parent

2024-03-01 Thread echauchot
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a commit to branch ci_utils
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git

commit 4c3b7bb7dc35970c7e8a35c49596795af5d22a1b
Author: Etienne Chauchot 
AuthorDate: Mon Nov 6 13:22:58 2023 +0100

[FLINK-34137] Update test-project parent to regular flink-connector-parent
---
 pom.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/pom.xml b/pom.xml
index 7544839..a32ad20 100644
--- a/pom.xml
+++ b/pom.xml
@@ -20,9 +20,9 @@ under the License.
xmlns="http://maven.apache.org/POM/4.0.0; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance;>
 

-   io.github.zentol.flink
+   org.apache.flink
flink-connector-parent
-   1.0
+   1.0.0

 
4.0.0



(flink-connector-shared-utils) 05/09: [FLINK-34137] Add a simple non-violated test rule for production and test code and simplify dependencies

2024-03-01 Thread echauchot
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a commit to branch ci_utils
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git

commit 5786a05331ec1916078c80117599869eccf0da37
Author: Etienne Chauchot 
AuthorDate: Tue Nov 21 16:16:04 2023 +0100

[FLINK-34137] Add a simple non-violated test rule for production and test 
code and simplify dependencies
---
 module-with-no-tests/pom.xml   |  1 +
 .../apache/flink/connector/testing/SomeClass.java  |  4 +-
 ...bf549e => 09379350-830e-42a3-8b31-afde18cf1eb4} |  0
 .../25d7c950-4d90-4419-b9af-78a9001796ce   |  0
 .../2c5e2ae5-0e19-423f-9501-d00b090b9d0e   |  0
 .../3e3429f1-b715-4057-91ce-f859c4c07245   |  0
 .../662c8ab6-14a2-4f59-bacf-3e7a09427662   |  0
 .../a2f52e44-eb11-4165-8a96-418e754ac1c6   |  0
 .../b5a693ee-ab65-41b7-9764-cbe027c4ee12   |  0
 .../f3351a4e-bc84-4164-aba1-3d729855e431   |  0
 module-with-tests/archunit-violations/stored.rules | 11 +---
 module-with-tests/pom.xml  | 19 ++
 .../connector/testing/SomeConnectorClass.java  |  5 +-
 ...eClassTest.java => SomeConnectorClassTest.java} |  2 +-
 ...> ConnectorProductionCodeArchitectureTest.java} | 27 
 ...java => ConnectorTestCodeArchitectureTest.java} | 25 
 pom.xml| 75 ++
 17 files changed, 89 insertions(+), 80 deletions(-)

diff --git a/module-with-no-tests/pom.xml b/module-with-no-tests/pom.xml
index da0c156..ba65984 100644
--- a/module-with-no-tests/pom.xml
+++ b/module-with-no-tests/pom.xml
@@ -3,6 +3,7 @@
 xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance;
 xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd;>
4.0.0
+   

org.apache.flink
test-project-parent
diff --git 
a/module-with-tests/src/main/java/org/apache/flink/connector/testing/SomeClass.java
 
b/module-with-no-tests/src/main/java/org/apache/flink/connector/testing/SomeClass.java
similarity index 92%
rename from 
module-with-tests/src/main/java/org/apache/flink/connector/testing/SomeClass.java
rename to 
module-with-no-tests/src/main/java/org/apache/flink/connector/testing/SomeClass.java
index f29e5e0..36da836 100644
--- 
a/module-with-tests/src/main/java/org/apache/flink/connector/testing/SomeClass.java
+++ 
b/module-with-no-tests/src/main/java/org/apache/flink/connector/testing/SomeClass.java
@@ -19,6 +19,4 @@
 package org.apache.flink.connector.testing;
 
 /** A dummy class; only exists so that the compile/packaging plugins have 
something to do. */
-public class SomeClass {
-public static void main(String[] args) {}
-}
+public class SomeClass {}
diff --git 
a/module-with-tests/archunit-violations/1758f718-5b06-442d-8b5a-ad347bbf549e 
b/module-with-tests/archunit-violations/09379350-830e-42a3-8b31-afde18cf1eb4
similarity index 100%
rename from 
module-with-tests/archunit-violations/1758f718-5b06-442d-8b5a-ad347bbf549e
rename to 
module-with-tests/archunit-violations/09379350-830e-42a3-8b31-afde18cf1eb4
diff --git 
a/module-with-tests/archunit-violations/25d7c950-4d90-4419-b9af-78a9001796ce 
b/module-with-tests/archunit-violations/25d7c950-4d90-4419-b9af-78a9001796ce
deleted file mode 100644
index e69de29..000
diff --git 
a/module-with-tests/archunit-violations/2c5e2ae5-0e19-423f-9501-d00b090b9d0e 
b/module-with-tests/archunit-violations/2c5e2ae5-0e19-423f-9501-d00b090b9d0e
deleted file mode 100644
index e69de29..000
diff --git 
a/module-with-tests/archunit-violations/3e3429f1-b715-4057-91ce-f859c4c07245 
b/module-with-tests/archunit-violations/3e3429f1-b715-4057-91ce-f859c4c07245
deleted file mode 100644
index e69de29..000
diff --git 
a/module-with-tests/archunit-violations/662c8ab6-14a2-4f59-bacf-3e7a09427662 
b/module-with-tests/archunit-violations/662c8ab6-14a2-4f59-bacf-3e7a09427662
deleted file mode 100644
index e69de29..000
diff --git 
a/module-with-tests/archunit-violations/a2f52e44-eb11-4165-8a96-418e754ac1c6 
b/module-with-tests/archunit-violations/a2f52e44-eb11-4165-8a96-418e754ac1c6
deleted file mode 100644
index e69de29..000
diff --git 
a/module-with-tests/archunit-violations/b5a693ee-ab65-41b7-9764-cbe027c4ee12 
b/module-with-tests/archunit-violations/b5a693ee-ab65-41b7-9764-cbe027c4ee12
deleted file mode 100644
index e69de29..000
diff --git 
a/module-with-tests/archunit-violations/f3351a4e-bc84-4164-aba1-3d729855e431 
b/module-with-tests/archunit-violations/f3351a4e-bc84-4164-aba1-3d729855e431
deleted file mode 100644
index e69de29..000
diff --git a/module-with-tests/archunit-violations/stored.rules 
b/module-with-tests/archunit-violations/stored.rules
index e6fc6f1..bd30533 100644
--- a/module-with-tests/archunit-violations/stored.rules
+++ b/module-with-tests/archunit-violations/stored.rules

(flink-connector-shared-utils) branch ci_utils updated (ec54606 -> b5ad097)

2024-03-01 Thread echauchot
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a change to branch ci_utils
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git


from ec54606  [FLINK-34314] Update CI Node Actions from NodeJS 16 to NodeJS 
20. This closes  (#35)
 new 30d0b10  [FLINK-34137] Setup CI and archunit for archunit tests 
skipping
 new 4c3b7bb  [FLINK-34137] Update test-project parent to regular 
flink-connector-parent
 new 866c7b4  [FLINK-34137] Use additionalExcludes property to exclude 
archunit tests
 new 2868ea1  [FLINK-34137] Create a parent and 2 sub modules: one with 
tests, and one with no tests
 new 5786a05  [FLINK-34137] Add a simple non-violated test rule for 
production and test code and simplify dependencies
 new e6e2136  [FLINK-34137] Bump to flink 1.17.2
 new e9f50c1  [FLINK-34137] Use flink-connector-parent v1.1.0 with surefire 
tests exclusions
 new ad16ea8  [FLINK-34137] exclude junit-platform-engine archunit 
transitive dep for dependency convergence
 new b5ad097  [FLINK-34137] upgrade all flinks versions in CI: 1.16.x -> 
1.17.x, 1.17.x -> 1.18.x

The 9 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .github/workflows/_testing.yml | 33 
 .github/workflows/ci.yml   | 10 
 .gitignore |  4 +-
 .gitignore => module-with-no-tests/.gitignore  |  5 --
 module-with-no-tests/pom.xml   | 14 +
 .../apache/flink/connector/testing/SomeClass.java  |  0
 .gitignore => module-with-tests/.gitignore |  5 --
 .../09379350-830e-42a3-8b31-afde18cf1eb4   |  0
 module-with-tests/archunit-violations/stored.rules |  3 ++
 module-with-tests/pom.xml  | 33 
 .../connector/testing/SomeConnectorClass.java  |  2 +-
 .../connector/testing/SomeConnectorClassTest.java  |  2 +-
 .../ConnectorProductionCodeArchitectureTest.java   | 47 
 .../ConnectorTestCodeArchitectureTest.java | 44 +++
 .../src/test/resources/archunit.properties | 38 +
 pom.xml| 62 +-
 16 files changed, 268 insertions(+), 34 deletions(-)
 copy .gitignore => module-with-no-tests/.gitignore (82%)
 create mode 100644 module-with-no-tests/pom.xml
 copy {src => 
module-with-no-tests/src}/main/java/org/apache/flink/connector/testing/SomeClass.java
 (100%)
 copy .gitignore => module-with-tests/.gitignore (82%)
 create mode 100644 
module-with-tests/archunit-violations/09379350-830e-42a3-8b31-afde18cf1eb4
 create mode 100644 module-with-tests/archunit-violations/stored.rules
 create mode 100644 module-with-tests/pom.xml
 rename src/main/java/org/apache/flink/connector/testing/SomeClass.java => 
module-with-tests/src/main/java/org/apache/flink/connector/testing/SomeConnectorClass.java
 (96%)
 rename src/test/java/org/apache/flink/connector/testing/SomeClassTest.java => 
module-with-tests/src/test/java/org/apache/flink/connector/testing/SomeConnectorClassTest.java
 (96%)
 create mode 100644 
module-with-tests/src/test/java/org/apache/flink/connector/testing/architecture/ConnectorProductionCodeArchitectureTest.java
 create mode 100644 
module-with-tests/src/test/java/org/apache/flink/connector/testing/architecture/ConnectorTestCodeArchitectureTest.java
 create mode 100644 module-with-tests/src/test/resources/archunit.properties



(flink-connector-shared-utils) 03/09: [FLINK-34137] Use additionalExcludes property to exclude archunit tests

2024-03-01 Thread echauchot
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a commit to branch ci_utils
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git

commit 866c7b426506a30f1cdc11a6e0c9d247c2eb06d1
Author: Etienne Chauchot 
AuthorDate: Mon Nov 6 17:22:52 2023 +0100

[FLINK-34137] Use additionalExcludes property to exclude archunit tests
---
 .github/workflows/ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index bc12766..462dade 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -108,7 +108,7 @@ jobs:
 
   - name: "Disable archunit tests"
 if: ${{ inputs.skip_archunit_tests }}
-run: echo "MVN_ARCHUNIT_TESTS=-Dtest='!*ArchitectureTest'" >> 
$GITHUB_ENV
+run: echo 
"MVN_ARCHUNIT_TESTS=-DadditionalExcludes=**/*ArchitectureTest.java" >> 
$GITHUB_ENV
 
   - name: "Determine Flink binary url"
 run: |



(flink-connector-shared-utils) 04/09: [FLINK-34137] Create a parent and 2 sub modules: one with tests, and one with no tests

2024-03-01 Thread echauchot
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a commit to branch ci_utils
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git

commit 2868ea13a2a287f11b65417ec308382d974faa81
Author: Etienne Chauchot 
AuthorDate: Mon Nov 13 16:41:15 2023 +0100

[FLINK-34137] Create a parent and 2 sub modules: one with tests, and one 
with no tests
---
 .gitignore |  4 +++-
 archunit-violations/stored.rules   | 10 --
 .gitignore => module-with-no-tests/.gitignore  |  5 -
 module-with-no-tests/pom.xml   | 13 +
 .../org/apache/flink/connector/testing/SomeClass2.java |  8 +++-
 .gitignore => module-with-tests/.gitignore |  5 -
 .../1758f718-5b06-442d-8b5a-ad347bbf549e   |  0
 .../25d7c950-4d90-4419-b9af-78a9001796ce   |  0
 .../2c5e2ae5-0e19-423f-9501-d00b090b9d0e   |  0
 .../3e3429f1-b715-4057-91ce-f859c4c07245   |  0
 .../662c8ab6-14a2-4f59-bacf-3e7a09427662   |  0
 .../a2f52e44-eb11-4165-8a96-418e754ac1c6   |  0
 .../b5a693ee-ab65-41b7-9764-cbe027c4ee12   |  0
 .../f3351a4e-bc84-4164-aba1-3d729855e431   |  0
 module-with-tests/archunit-violations/stored.rules | 10 ++
 module-with-tests/pom.xml  | 14 ++
 .../java/org/apache/flink/connector/testing/SomeClass.java |  9 +
 .../org/apache/flink/connector/testing/SomeClassTest.java  |  0
 .../architecture/ProductionCodeArchitectureTest.java   |  0
 .../testing/architecture/TestCodeArchitectureTest.java |  7 +--
 .../src}/test/resources/archunit.properties|  0
 pom.xml|  8 ++--
 22 files changed, 51 insertions(+), 42 deletions(-)

diff --git a/.gitignore b/.gitignore
index d6073bc..4e77b1e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,6 +5,8 @@ scalastyle-output.xml
 .idea
 !.idea/vcs.xml
 .metadata
+module-with-no-tests/.metadata
+module-with-tests/.metadata
 .settings
 .project
 .version.properties
@@ -35,4 +37,4 @@ out/
 tools/flink
 tools/flink-*
 tools/releasing/release
-tools/japicmp-output
\ No newline at end of file
+tools/japicmp-output
diff --git a/archunit-violations/stored.rules b/archunit-violations/stored.rules
deleted file mode 100644
index 4c2eeb9..000
--- a/archunit-violations/stored.rules
+++ /dev/null
@@ -1,10 +0,0 @@
-#
-#Wed Oct 11 15:35:58 CEST 2023
-Return\ and\ argument\ types\ of\ methods\ annotated\ with\ @Public\ must\ be\ 
annotated\ with\ @Public.=2b408df1-117c-43ee-8829-11a33f2f4c3d
-Connector\ production\ code\ must\ not\ depend\ on\ non-public\ API\ outside\ 
of\ connector\ packages=47150079-df0b-4320-b1a9-117804d1d2b9
-ITCASE\ tests\ should\ use\ a\ MiniCluster\ resource\ or\ 
extension=acfbc678-b208-4e82-b2c9-09b53dc0eec5
-Production\ code\ must\ not\ call\ methods\ annotated\ with\ 
@VisibleForTesting=a6ff808a-1aeb-4ff3-8fe6-3a5cb49fba71
-Tests\ inheriting\ from\ AbstractTestBase\ should\ have\ name\ ending\ with\ 
ITCase=06adde03-b226-46a1-880e-e3f9d0fbcfcb
-Options\ for\ connectors\ and\ formats\ should\ reside\ in\ a\ consistent\ 
package\ and\ be\ public\ API.=737d7692-7d8e-4a4e-94d4-61e573619ec4
-Return\ and\ argument\ types\ of\ methods\ annotated\ with\ @PublicEvolving\ 
must\ be\ annotated\ with\ 
@Public(Evolving).=61e2247d-4ef0-4327-ad7f-2dc5cd881a4d
-Classes\ in\ API\ packages\ should\ have\ at\ least\ one\ API\ visibility\ 
annotation.=9193bee2-f400-432f-b664-7ed218451dc9
diff --git a/.gitignore b/module-with-no-tests/.gitignore
similarity index 82%
copy from .gitignore
copy to module-with-no-tests/.gitignore
index d6073bc..78fd2bd 100644
--- a/.gitignore
+++ b/module-with-no-tests/.gitignore
@@ -4,7 +4,6 @@ scalastyle-output.xml
 .classpath
 .idea
 !.idea/vcs.xml
-.metadata
 .settings
 .project
 .version.properties
@@ -32,7 +31,3 @@ out/
 /docs/.jekyll-metadata
 *.ipr
 *.iws
-tools/flink
-tools/flink-*
-tools/releasing/release
-tools/japicmp-output
\ No newline at end of file
diff --git a/module-with-no-tests/pom.xml b/module-with-no-tests/pom.xml
new file mode 100644
index 000..da0c156
--- /dev/null
+++ b/module-with-no-tests/pom.xml
@@ -0,0 +1,13 @@
+
+http://maven.apache.org/POM/4.0.0;
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance;
+xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd;>
+   4.0.0
+   
+   org.apache.flink
+   test-project-parent
+   1.0
+   
+
+   module-with-no-tests
+
diff --git 
a/src/test/java/org/apache/flink/connector/testing/SomeClassTest.java 
b/module-with-no-tests/src/main/java/org/apache/flink/connector/testing/SomeClass2.java
similarity index 83%

(flink-connector-shared-utils) 08/09: [FLINK-34137] exclude junit-platform-engine archunit transitive dep for dependency convergence

2024-03-01 Thread echauchot
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a commit to branch ci_utils
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git

commit ad16ea8211f9c28a7540b9bf98a27a92860e4b39
Author: Etienne Chauchot 
AuthorDate: Thu Feb 29 11:06:40 2024 +0100

[FLINK-34137] exclude junit-platform-engine archunit transitive dep for 
dependency convergence
---
 pom.xml | 7 +++
 1 file changed, 7 insertions(+)

diff --git a/pom.xml b/pom.xml
index 534ef1d..2fc1e1b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -87,6 +87,13 @@ under the License.
archunit-junit5
${archunit.version}
test
+   
+   
+   
+   
org.junit.platform
+   
junit-platform-engine
+   
+   

 




(flink-connector-shared-utils) 06/09: [FLINK-34137] Bump to flink 1.17.2

2024-03-01 Thread echauchot
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a commit to branch ci_utils
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git

commit e6e2136fb44c01e5ebe2e2ac57eb4361073343f2
Author: Etienne Chauchot 
AuthorDate: Wed Nov 29 10:59:12 2023 +0100

[FLINK-34137] Bump to flink 1.17.2
---
 .github/workflows/_testing.yml | 10 +-
 pom.xml|  2 +-
 2 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/.github/workflows/_testing.yml b/.github/workflows/_testing.yml
index 84eb71d..f7b2534 100644
--- a/.github/workflows/_testing.yml
+++ b/.github/workflows/_testing.yml
@@ -25,7 +25,7 @@ jobs:
   specific-version:
 uses: ./.github/workflows/ci.yml
 with:
-  flink_version: 1.17.1
+  flink_version: 1.17.2
   connector_branch: ci_utils
   # we can test connectors against flink snapshots but enable archunit tests 
only when tested against
   # the flink version used for the connector build.
@@ -35,18 +35,18 @@ jobs:
   flink_version: 1.16-SNAPSHOT
   skip_archunit_tests: true
   # we need to test the connectors against last 2 flink major versions.
-  # let's say the connector under test was built against flink 1.17.1, disable 
the archunit tests when tested against 1.16.2
+  # let's say the connector under test was built against flink 1.17.2, disable 
the archunit tests when tested against 1.16.2
   non-main-version-without-archunit-tests:
 uses: ./.github/workflows/ci.yml
 with:
   flink_version: 1.16.2
   skip_archunit_tests: true
   # we need to test the connectors against last 2 flink major versions.
-  # let's say the connector under test was built against flink 1.17.1, leave 
the archunit tests enabled when tested against 1.17.1
+  # let's say the connector under test was built against flink 1.17.2, leave 
the archunit tests enabled when tested against 1.17.2
   main-version-with-archunit-tests:
 uses: ./.github/workflows/ci.yml
 with:
-  flink_version: 1.17.1
+  flink_version: 1.17.2
   flink118-java17-version:
 uses: ./.github/workflows/ci.yml
 with:
@@ -74,7 +74,7 @@ jobs:
   flink: 1.16.1
 #  By not specifying a branch, it should default to the branch that 
triggers this workflow
 }, {
-  flink: 1.17.1,
+  flink: 1.17.2,
 #  By specifying this branch, it should check out this specified branch
   branch: ci_utils
 }, {
diff --git a/pom.xml b/pom.xml
index 046c5e1..8de083e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -38,7 +38,7 @@ under the License.
pom
 

-   1.17.1
+   1.17.2
1.16.0
1.0.0
1.7.36



(flink-connector-shared-utils) 07/09: [FLINK-34137] Use flink-connector-parent v1.1.0 with surefire tests exclusions

2024-03-01 Thread echauchot
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a commit to branch ci_utils
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git

commit e9f50c1de5918f525bf913bc438f3bee9fb07b08
Author: Etienne Chauchot 
AuthorDate: Thu Jan 18 11:42:49 2024 +0100

[FLINK-34137] Use flink-connector-parent v1.1.0 with surefire tests 
exclusions
---
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index 8de083e..534ef1d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -22,7 +22,7 @@ under the License.

org.apache.flink
flink-connector-parent
-   1.0.0
+   1.1.0

 




(flink-connector-shared-utils) 09/09: [FLINK-34137] upgrade all flinks versions in CI: 1.16.x -> 1.17.x, 1.17.x -> 1.18.x

2024-03-01 Thread echauchot
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a commit to branch ci_utils
in repository 
https://gitbox.apache.org/repos/asf/flink-connector-shared-utils.git

commit b5ad097df25973ad05d0d6af0f988b65b0d8cd22
Author: Etienne Chauchot 
AuthorDate: Fri Mar 1 11:23:54 2024 +0100

[FLINK-34137] upgrade all flinks versions in CI: 1.16.x -> 1.17.x, 1.17.x 
-> 1.18.x
---
 .github/workflows/_testing.yml | 18 +-
 pom.xml|  2 +-
 2 files changed, 10 insertions(+), 10 deletions(-)

diff --git a/.github/workflows/_testing.yml b/.github/workflows/_testing.yml
index f7b2534..d45b664 100644
--- a/.github/workflows/_testing.yml
+++ b/.github/workflows/_testing.yml
@@ -32,21 +32,21 @@ jobs:
   snapshot-without-archunit-tests:
 uses: ./.github/workflows/ci.yml
 with:
-  flink_version: 1.16-SNAPSHOT
+  flink_version: 1.17-SNAPSHOT
   skip_archunit_tests: true
   # we need to test the connectors against last 2 flink major versions.
-  # let's say the connector under test was built against flink 1.17.2, disable 
the archunit tests when tested against 1.16.2
+  # let's say the connector under test was built against flink 1.18.0, disable 
the archunit tests when tested against 1.17.2
   non-main-version-without-archunit-tests:
 uses: ./.github/workflows/ci.yml
 with:
-  flink_version: 1.16.2
+  flink_version: 1.17.2
   skip_archunit_tests: true
   # we need to test the connectors against last 2 flink major versions.
-  # let's say the connector under test was built against flink 1.17.2, leave 
the archunit tests enabled when tested against 1.17.2
+  # let's say the connector under test was built against flink 1.18.0, leave 
the archunit tests enabled when tested against 1.18.0
   main-version-with-archunit-tests:
 uses: ./.github/workflows/ci.yml
 with:
-  flink_version: 1.17.2
+  flink_version: 1.18.0
   flink118-java17-version:
 uses: ./.github/workflows/ci.yml
 with:
@@ -57,13 +57,13 @@ jobs:
   disable-convergence:
 uses: ./.github/workflows/ci.yml
 with:
-  flink_version: 1.16.1
+  flink_version: 1.17.2
   connector_branch: ci_utils
   run_dependency_convergence: false
   optional_maven_profile:
 uses: ./.github/workflows/ci.yml
 with:
-  flink_version: 1.16-SNAPSHOT
+  flink_version: 1.17-SNAPSHOT
   connector_branch: ci_utils
   jdk_version: 11
   optional_maven_profiles: "java11,java11-target"
@@ -71,14 +71,14 @@ jobs:
 strategy:
   matrix:
 flink_branches: [{
-  flink: 1.16.1
+  flink: 1.17.2
 #  By not specifying a branch, it should default to the branch that 
triggers this workflow
 }, {
   flink: 1.17.2,
 #  By specifying this branch, it should check out this specified branch
   branch: ci_utils
 }, {
-  flink: 1.16.1,
+  flink: 1.17.2,
 #  By specifying a different branch then on L49, it should check out 
this specified branch
   branch: test_project
 }]
diff --git a/pom.xml b/pom.xml
index 2fc1e1b..0a4d4c8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -38,7 +38,7 @@ under the License.
pom
 

-   1.17.2
+   1.18.0
1.16.0
1.0.0
1.7.36



(flink) branch master updated: [FLINK-34492][table] Fix comment link when migrate calcite rules from scala to java

2024-03-01 Thread jiabaosun
This is an automated email from the ASF dual-hosted git repository.

jiabaosun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 46cbf22147d [FLINK-34492][table] Fix comment link when migrate calcite 
rules from scala to java
46cbf22147d is described below

commit 46cbf22147d783fb68f77fad95161dc5ef036c96
Author: Jacky Lau 
AuthorDate: Fri Mar 1 17:53:41 2024 +0800

[FLINK-34492][table] Fix comment link when migrate calcite rules from scala 
to java
---
 .../planner/plan/rules/logical/FlinkCalcMergeRule.java| 15 +--
 .../planner/plan/rules/logical/FlinkLimit0RemoveRule.java |  2 +-
 .../logical/JoinTableFunctionScanToCorrelateRule.java |  5 -
 3 files changed, 14 insertions(+), 8 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkCalcMergeRule.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkCalcMergeRule.java
index f82a1bcf188..61e01715b50 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkCalcMergeRule.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkCalcMergeRule.java
@@ -30,18 +30,21 @@ import org.apache.calcite.rex.RexProgram;
 import org.immutables.value.Value;
 
 /**
- * This rule is copied from Calcite's 
[[org.apache.calcite.rel.rules.CalcMergeRule]].
+ * This rule is copied from Calcite's {@link 
org.apache.calcite.rel.rules.CalcMergeRule}.
  *
  * Modification: - Condition in the merged program will be simplified if it 
exists. - If the two
- * [[Calc]] can merge into one, each non-deterministic [[RexNode]] of bottom 
[[Calc]] should appear
- * at most once in the project list and filter list of top [[Calc]].
+ * {@link org.apache.calcite.rel.core.Calc} can merge into one, each 
non-deterministic {@link
+ * org.apache.calcite.rex.RexNode} of bottom {@link 
org.apache.calcite.rel.core.Calc} should appear
+ * at most once in the project list and filter list of top {@link 
org.apache.calcite.rel.core.Calc}.
  */
 
 /**
- * Planner rule that merges a [[Calc]] onto a [[Calc]].
+ * Planner rule that merges a {@link org.apache.calcite.rel.core.Calc} onto a 
{@link
+ * org.apache.calcite.rel.core.Calc}.
  *
- * The resulting [[Calc]] has the same project list as the upper [[Calc]], 
but expressed in terms
- * of the lower [[Calc]]'s inputs.
+ * The resulting {@link org.apache.calcite.rel.core.Calc} has the same 
project list as the upper
+ * {@link org.apache.calcite.rel.core.Calc}, but expressed in terms of the 
lower {@link
+ * org.apache.calcite.rel.core.Calc}'s inputs.
  */
 @Value.Enclosing
 public class FlinkCalcMergeRule extends 
RelRule {
diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkLimit0RemoveRule.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkLimit0RemoveRule.java
index 59d2b1d9591..bb9654f5df5 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkLimit0RemoveRule.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkLimit0RemoveRule.java
@@ -25,7 +25,7 @@ import org.apache.calcite.rel.core.Sort;
 import org.apache.calcite.rex.RexLiteral;
 import org.immutables.value.Value;
 
-/** Planner rule that rewrites `limit 0` to empty 
[[org.apache.calcite.rel.core.Values]]. */
+/** Planner rule that rewrites `limit 0` to empty {@link 
org.apache.calcite.rel.core.Values}. */
 @Value.Enclosing
 public class FlinkLimit0RemoveRule
 extends RelRule {
diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/JoinTableFunctionScanToCorrelateRule.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/JoinTableFunctionScanToCorrelateRule.java
index d6f8752b352..b14054906c1 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/JoinTableFunctionScanToCorrelateRule.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/JoinTableFunctionScanToCorrelateRule.java
@@ -25,7 +25,10 @@ import org.apache.calcite.rel.logical.LogicalJoin;
 import org.apache.calcite.rel.logical.LogicalTableFunctionScan;
 import org.immutables.value.Value;
 
-/** Rule that rewrites Join on TableFunctionScan to Correlate. */
+/**
+ * Rule that rewrites {@link org.apache.calcite.rel.core.Join} on {@link
+ * org.apache.calcite.rel.core.TableFunctionScan} to {@link 
org.apache.calcite.rel.core.Correlate}.
+ */
 @Value.Enclosing
 public class 

(flink) branch master updated: [FLINK-34546] Emit span with failure labels on failure.

2024-03-01 Thread srichter
This is an automated email from the ASF dual-hosted git repository.

srichter pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 7c8e3f5a0c3 [FLINK-34546] Emit span with failure labels on failure.
7c8e3f5a0c3 is described below

commit 7c8e3f5a0c39f9a82c5549925035344c5d27cb98
Author: Stefan Richter 
AuthorDate: Thu Feb 29 10:26:02 2024 +0100

[FLINK-34546] Emit span with failure labels on failure.
---
 .../apache/flink/configuration/TraceOptions.java   | 14 +
 .../failover/ExecutionFailureHandler.java  | 65 +-
 .../flink/runtime/scheduler/DefaultScheduler.java  |  4 +-
 .../failover/ExecutionFailureHandlerTest.java  | 39 -
 4 files changed, 117 insertions(+), 5 deletions(-)

diff --git 
a/flink-core/src/main/java/org/apache/flink/configuration/TraceOptions.java 
b/flink-core/src/main/java/org/apache/flink/configuration/TraceOptions.java
index 1aee746e210..a7e84192dea 100644
--- a/flink-core/src/main/java/org/apache/flink/configuration/TraceOptions.java
+++ b/flink-core/src/main/java/org/apache/flink/configuration/TraceOptions.java
@@ -56,6 +56,20 @@ public class TraceOptions {
 + " any of the names in the list will be 
started. Otherwise, all reporters that could be found in"
 + " the configuration will be started.");
 
+/**
+ * Temporary option to report events as span. This option will be removed 
once we support
+ * reporting events.
+ */
+@Deprecated
+public static final ConfigOption REPORT_EVENTS_AS_SPANS =
+key("traces.report-events-as-spans")
+.booleanType()
+.defaultValue(false)
+.withDescription(
+"Whether to report events as spans. This is a 
temporary parameter that "
++ "is in place until we have support for 
reporting events. "
++ "In the meantime, this can be activated 
to report them as spans instead.");
+
 /**
  * Returns a view over the given configuration via which options can be 
set/retrieved for the
  * given reporter.
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/failover/ExecutionFailureHandler.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/failover/ExecutionFailureHandler.java
index aed330de522..3d36a9e6bff 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/failover/ExecutionFailureHandler.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/failover/ExecutionFailureHandler.java
@@ -17,8 +17,11 @@
 
 package org.apache.flink.runtime.executiongraph.failover;
 
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.TraceOptions;
 import org.apache.flink.core.failure.FailureEnricher;
 import org.apache.flink.core.failure.FailureEnricher.Context;
+import org.apache.flink.metrics.MetricGroup;
 import org.apache.flink.runtime.JobException;
 import org.apache.flink.runtime.concurrent.ComponentMainThreadExecutor;
 import org.apache.flink.runtime.executiongraph.Execution;
@@ -28,6 +31,8 @@ import 
org.apache.flink.runtime.scheduler.strategy.SchedulingExecutionVertex;
 import org.apache.flink.runtime.scheduler.strategy.SchedulingTopology;
 import org.apache.flink.runtime.throwable.ThrowableClassifier;
 import org.apache.flink.runtime.throwable.ThrowableType;
+import org.apache.flink.traces.Span;
+import org.apache.flink.traces.SpanBuilder;
 import org.apache.flink.util.IterableUtils;
 
 import javax.annotation.Nullable;
@@ -47,6 +52,8 @@ import static 
org.apache.flink.util.Preconditions.checkNotNull;
  */
 public class ExecutionFailureHandler {
 
+public static final String FAILURE_LABEL_ATTRIBUTE_PREFIX = 
"failureLabel.";
+
 private final SchedulingTopology schedulingTopology;
 
 /** Strategy to judge which tasks should be restarted. */
@@ -62,6 +69,9 @@ public class ExecutionFailureHandler {
 private final Context globalFailureCtx;
 private final Collection failureEnrichers;
 private final ComponentMainThreadExecutor mainThreadExecutor;
+private final MetricGroup metricGroup;
+
+private final boolean reportEventsAsSpans;
 
 /**
  * Creates the handler to deal with task failures.
@@ -76,13 +86,15 @@ public class ExecutionFailureHandler {
  * @param globalFailureCtx Global failure Context used by FailureEnrichers
  */
 public ExecutionFailureHandler(
+final Configuration jobMasterConfig,
 final SchedulingTopology schedulingTopology,
 final FailoverStrategy failoverStrategy,
 final RestartBackoffTimeStrategy restartBackoffTimeStrategy,
 final ComponentMainThreadExecutor 

(flink) branch release-1.19 updated: [FLINK-33436][docs] Add the docs of built-in async-profiler

2024-03-01 Thread tangyun
This is an automated email from the ASF dual-hosted git repository.

tangyun pushed a commit to branch release-1.19
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.19 by this push:
 new 12ea64c0e2a [FLINK-33436][docs] Add the docs of built-in async-profiler
12ea64c0e2a is described below

commit 12ea64c0e2a56da3c5f6a656b23a2f2ac54f19d5
Author: Yu Chen 
AuthorDate: Wed Feb 28 21:19:17 2024 +0800

[FLINK-33436][docs] Add the docs of built-in async-profiler

This closes #24403.
---
 docs/content.zh/docs/ops/debugging/profiler.md |  81 +
 docs/content/docs/ops/debugging/profiler.md|  81 +
 docs/static/fig/profiler_instance.png  | Bin 0 -> 181106 bytes
 3 files changed, 162 insertions(+)

diff --git a/docs/content.zh/docs/ops/debugging/profiler.md 
b/docs/content.zh/docs/ops/debugging/profiler.md
new file mode 100644
index 000..ab5c79a21aa
--- /dev/null
+++ b/docs/content.zh/docs/ops/debugging/profiler.md
@@ -0,0 +1,81 @@
+---
+title: "Profiler"
+weight: 3
+type: docs
+aliases:
+  - /ops/debugging/profiler.html
+  - /ops/debugging/profiler
+---
+
+
+# Profiler
+
+Since Flink 1.19, we support profiling the JobManager/TaskManager process 
interactively with 
[async-profiler](https://github.com/async-profiler/async-profiler) via Flink 
Web UI, which allows users to create a profiling instance with arbitrary 
intervals and event modes, e.g ITIMER, CPU, Lock, Wall-Clock and Allocation.
+
+- **CPU**: In this mode the profiler collects stack trace samples that include 
Java methods, native calls, JVM code and kernel functions.
+- **ALLOCATION**: In allocation profiling mode, the top frame of every call 
trace is the class of the allocated object, and the counter is the heap 
pressure (the total size of allocated TLABs or objects outside TLAB).
+- **Wall-clock**: Wall-Clock option tells async-profiler to sample all threads 
equally every given period of time regardless of thread status: Running, 
Sleeping or Blocked. For instance, this can be helpful when profiling 
application start-up time.
+- **Lock**: In lock profiling mode the top frame is the class of lock/monitor, 
and the counter is number of nanoseconds it took to enter this lock/monitor.
+- **ITIMER**: You can fall back to itimer profiling mode. It is similar to cpu 
mode, but does not require perf_events support. As a drawback, there will be no 
kernel stack traces.
+
+{{< hint warning >}}
+
+Any measurement process in and of itself inevitably affects the subject of 
measurement. In order to prevent unintended impacts on production environments, 
Profiler are currently available as an opt-in feature. To enable it, you'll 
need to set [`rest.profiling.enabled: true`]({{< ref 
"docs/deployment/config">}}#rest-profiling-enabled) in [Flink configuration 
file]({{< ref "docs/deployment/config#flink-configuration-file" >}}). We 
recommend enabling it in development and pre-production env [...]
+
+{{< /hint >}}
+
+
+## Requirements
+Since the Profiler is powered by the Async-profiler, it is required to work on 
platforms that are supported by the Async-profiler.
+
+|   | Officially maintained builds | Other available ports 
|
+|---|--|---|
+| **Linux** | x64, arm64   | x86, arm32, ppc64le, riscv64, 
loongarch64 |
+| **macOS** | x64, arm64   |   
|
+
+Profiling on platforms beyond those listed above will fail with an error 
message in the `Message` column.
+
+
+##  Usage
+Flink users can complete the profiling submission and result export via Flink 
Web UI conveniently.
+
+For example,
+- First, you should find out the candidate TaskManager/JobManager with 
performance bottleneck for profiling, and switch to the corresponding 
TaskManager/JobManager page (profiler tab).
+- You can submit a profiling instance with a specified duration and mode by 
simply clicking on the button **Create Profiling Instance**. (The description 
of the profiling mode will be shown when hovering over the corresponding mode.)
+- Once the profiling instance is complete, you can easily download the 
interactive HTML file by clicking on the link.
+
+{{< img src="/fig/profiler_instance.png" class="img-fluid" width="90%" >}}
+{{% center %}}
+Profiling Instance
+{{% /center %}}
+
+
+## Troubleshooting
+1. **Failed to profiling in CPU mode: No access to perf events. Try 
--fdtransfer or --all-user option or 'sysctl kernel.perf_event_paranoid=1'** \
+   That means `perf_event_open()` syscall has failed. By default, Docker 
container restricts the access to `perf_event_open` syscall. The recommended 
solution is to fall back to ITIMER profiling mode. It is similar to CPU mode, 
but does not require `perf_events` support. As a drawback, there will be no 
kernel stack 

(flink) branch master updated: [FLINK-33436][docs] Add the docs of built-in async-profiler

2024-03-01 Thread tangyun
This is an automated email from the ASF dual-hosted git repository.

tangyun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 5f06ce76525 [FLINK-33436][docs] Add the docs of built-in async-profiler
5f06ce76525 is described below

commit 5f06ce765256b375945b9e69db2f16123b53f194
Author: Yu Chen 
AuthorDate: Wed Feb 28 21:19:17 2024 +0800

[FLINK-33436][docs] Add the docs of built-in async-profiler

This closes #24403.
---
 docs/content.zh/docs/ops/debugging/profiler.md |  81 +
 docs/content/docs/ops/debugging/profiler.md|  81 +
 docs/static/fig/profiler_instance.png  | Bin 0 -> 181106 bytes
 3 files changed, 162 insertions(+)

diff --git a/docs/content.zh/docs/ops/debugging/profiler.md 
b/docs/content.zh/docs/ops/debugging/profiler.md
new file mode 100644
index 000..ab5c79a21aa
--- /dev/null
+++ b/docs/content.zh/docs/ops/debugging/profiler.md
@@ -0,0 +1,81 @@
+---
+title: "Profiler"
+weight: 3
+type: docs
+aliases:
+  - /ops/debugging/profiler.html
+  - /ops/debugging/profiler
+---
+
+
+# Profiler
+
+Since Flink 1.19, we support profiling the JobManager/TaskManager process 
interactively with 
[async-profiler](https://github.com/async-profiler/async-profiler) via Flink 
Web UI, which allows users to create a profiling instance with arbitrary 
intervals and event modes, e.g ITIMER, CPU, Lock, Wall-Clock and Allocation.
+
+- **CPU**: In this mode the profiler collects stack trace samples that include 
Java methods, native calls, JVM code and kernel functions.
+- **ALLOCATION**: In allocation profiling mode, the top frame of every call 
trace is the class of the allocated object, and the counter is the heap 
pressure (the total size of allocated TLABs or objects outside TLAB).
+- **Wall-clock**: Wall-Clock option tells async-profiler to sample all threads 
equally every given period of time regardless of thread status: Running, 
Sleeping or Blocked. For instance, this can be helpful when profiling 
application start-up time.
+- **Lock**: In lock profiling mode the top frame is the class of lock/monitor, 
and the counter is number of nanoseconds it took to enter this lock/monitor.
+- **ITIMER**: You can fall back to itimer profiling mode. It is similar to cpu 
mode, but does not require perf_events support. As a drawback, there will be no 
kernel stack traces.
+
+{{< hint warning >}}
+
+Any measurement process in and of itself inevitably affects the subject of 
measurement. In order to prevent unintended impacts on production environments, 
Profiler are currently available as an opt-in feature. To enable it, you'll 
need to set [`rest.profiling.enabled: true`]({{< ref 
"docs/deployment/config">}}#rest-profiling-enabled) in [Flink configuration 
file]({{< ref "docs/deployment/config#flink-configuration-file" >}}). We 
recommend enabling it in development and pre-production env [...]
+
+{{< /hint >}}
+
+
+## Requirements
+Since the Profiler is powered by the Async-profiler, it is required to work on 
platforms that are supported by the Async-profiler.
+
+|   | Officially maintained builds | Other available ports 
|
+|---|--|---|
+| **Linux** | x64, arm64   | x86, arm32, ppc64le, riscv64, 
loongarch64 |
+| **macOS** | x64, arm64   |   
|
+
+Profiling on platforms beyond those listed above will fail with an error 
message in the `Message` column.
+
+
+##  Usage
+Flink users can complete the profiling submission and result export via Flink 
Web UI conveniently.
+
+For example,
+- First, you should find out the candidate TaskManager/JobManager with 
performance bottleneck for profiling, and switch to the corresponding 
TaskManager/JobManager page (profiler tab).
+- You can submit a profiling instance with a specified duration and mode by 
simply clicking on the button **Create Profiling Instance**. (The description 
of the profiling mode will be shown when hovering over the corresponding mode.)
+- Once the profiling instance is complete, you can easily download the 
interactive HTML file by clicking on the link.
+
+{{< img src="/fig/profiler_instance.png" class="img-fluid" width="90%" >}}
+{{% center %}}
+Profiling Instance
+{{% /center %}}
+
+
+## Troubleshooting
+1. **Failed to profiling in CPU mode: No access to perf events. Try 
--fdtransfer or --all-user option or 'sysctl kernel.perf_event_paranoid=1'** \
+   That means `perf_event_open()` syscall has failed. By default, Docker 
container restricts the access to `perf_event_open` syscall. The recommended 
solution is to fall back to ITIMER profiling mode. It is similar to CPU mode, 
but does not require `perf_events` support. As a drawback, there will be no 
kernel stack traces.
+
+2. 

(flink) branch master updated (f88f7506834 -> f99e2e4fd6f)

2024-03-01 Thread rmetzger
This is an automated email from the ASF dual-hosted git repository.

rmetzger pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


from f88f7506834 [hotfix][runtime] Remove unneeded 
requestTaskManagerFileUploadByName (#24386)
 add f99e2e4fd6f [hotfix][table] Fix typo in maven shade configuration

No new revisions were added by this update.

Summary of changes:
 flink-table/flink-sql-jdbc-driver-bundle/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)



(flink-connector-jdbc) branch main updated: [FLINK-32714] Add dialect for OceanBase database. This closes #72

2024-03-01 Thread martijnvisser
This is an automated email from the ASF dual-hosted git repository.

martijnvisser pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-jdbc.git


The following commit(s) were added to refs/heads/main by this push:
 new 786bd159 [FLINK-32714] Add dialect for OceanBase database. This closes 
#72
786bd159 is described below

commit 786bd15951887a98f3a4962ba95e4e99b7214d7a
Author: He Wang 
AuthorDate: Fri Mar 1 16:21:17 2024 +0800

[FLINK-32714] Add dialect for OceanBase database. This closes #72
---
 docs/content.zh/docs/connectors/table/jdbc.md  |  83 +++--
 docs/content/docs/connectors/table/jdbc.md |  70 ++-
 flink-connector-jdbc/pom.xml   |   8 ++
 .../flink/connector/jdbc/catalog/JdbcCatalog.java  |  17 ++-
 .../connector/jdbc/catalog/JdbcCatalogUtils.java   |   5 +-
 .../jdbc/catalog/factory/JdbcCatalogFactory.java   |   5 +-
 .../catalog/factory/JdbcCatalogFactoryOptions.java |   3 +
 .../oceanbase/dialect/OceanBaseDialect.java| 121 +++
 .../oceanbase/dialect/OceanBaseDialectFactory.java |  46 +++
 .../oceanbase/dialect/OceanBaseRowConverter.java   | 134 +
 .../connector/jdbc/dialect/JdbcDialectFactory.java |  15 +++
 .../connector/jdbc/dialect/JdbcDialectLoader.java  |   9 +-
 .../options/InternalJdbcConnectionOptions.java |   9 +-
 .../connector/jdbc/table/JdbcConnectorOptions.java |   6 +
 .../jdbc/table/JdbcDynamicTableFactory.java|  24 +++-
 ...flink.connector.jdbc.dialect.JdbcDialectFactory |   1 +
 .../oceanbase/OceanBaseMysqlTestBase.java  |  42 +++
 .../oceanbase/OceanBaseOracleTestBase.java |  44 +++
 .../oceanbase/dialect/OceanBaseDialectTest.java|  52 
 .../dialect/OceanBaseMysqlDialectTypeTest.java |  77 
 .../dialect/OceanBaseOracleDialectTypeTest.java|  71 +++
 .../OceanBaseMySqlDynamicTableSinkITCase.java  |  90 ++
 .../OceanBaseMySqlDynamicTableSourceITCase.java|  74 
 .../OceanBaseOracleDynamicTableSinkITCase.java | 121 +++
 .../OceanBaseOracleDynamicTableSourceITCase.java   |  91 ++
 .../oceanbase/table/OceanBaseTableRow.java |  48 
 .../catalog/factory/JdbcCatalogFactoryTest.java|   3 +-
 .../jdbc/dialect/JdbcDialectTypeTest.java  |   4 +-
 .../jdbc/table/JdbcDynamicTableSinkITCase.java |   9 +-
 .../connector/jdbc/table/JdbcOutputFormatTest.java |  35 ++
 .../databases/oceanbase/OceanBaseContainer.java|  74 
 .../databases/oceanbase/OceanBaseDatabase.java |  72 +++
 .../databases/oceanbase/OceanBaseImages.java   |  27 +
 .../databases/oceanbase/OceanBaseMetadata.java |  85 +
 .../databases/oceanbase/OceanBaseTestDatabase.java |  25 
 35 files changed, 1567 insertions(+), 33 deletions(-)

diff --git a/docs/content.zh/docs/connectors/table/jdbc.md 
b/docs/content.zh/docs/connectors/table/jdbc.md
index 20447e16..cf4e6384 100644
--- a/docs/content.zh/docs/connectors/table/jdbc.md
+++ b/docs/content.zh/docs/connectors/table/jdbc.md
@@ -58,7 +58,7 @@ JDBC 连接器不是二进制发行版的一部分,请查阅[这里]({{< ref "
 | CrateDB| `io.crate` | `crate-jdbc`   | 
[下载](https://repo1.maven.org/maven2/io/crate/crate-jdbc/)   
|
 | Db2| `com.ibm.db2.jcc`  | `db2jcc`   | 
[下载](https://www.ibm.com/support/pages/download-db2-fix-packs-version-db2-linux-unix-and-windows)
   |
 | Trino  | `io.trino` | `trino-jdbc`   | 
[下载](https://repo1.maven.org/maven2/io/trino/trino-jdbc/)   
|
-
+| OceanBase  | `com.oceanbase`| `oceanbase-client` | 
[下载](https://repo1.maven.org/maven2/com/oceanbase/oceanbase-client/)
|
 
 当前,JDBC 连接器和驱动不在 Flink 二进制发布包中,请参阅[这里]({{< ref 
"docs/dev/configuration/overview" >}})了解在集群上执行时如何连接它们。
 
@@ -141,6 +141,13 @@ ON myTopic.key = MyUserTable.id;
   String
   用于连接到此 URL 的 JDBC 驱动类名,如果不设置,将自动从 URL 中推导。
 
+
+  compatible-mode
+  可选
+  (none)
+  String
+  数据库的兼容模式。
+
 
   username
   可选
@@ -654,7 +661,7 @@ SELECT * FROM `custom_schema.test_table2`;
 
 数据类型映射
 
-Flink 支持连接到多个使用方言(dialect)的数据库,如 MySQL、Oracle、PostgreSQL、CrateDB, Derby、Db2、 
SQL Server 等。其中,Derby 通常是用于测试目的。下表列出了从关系数据库数据类型到 Flink SQL 数据类型的类型映射,映射表可以使得在 
Flink 中定义 JDBC 表更加简单。
+Flink 支持连接到多个使用方言(dialect)的数据库,如 MySQL、Oracle、PostgreSQL、CrateDB, Derby、Db2、 
SQL Server、OceanBase 等。其中,Derby 通常是用于测试目的。下表列出了从关系数据库数据类型到 Flink SQL 
数据类型的类型映射,映射表可以使得在 Flink 中定义 JDBC 表更加简单。
 
 
 
@@ -666,6 +673,8 @@ Flink 支持连接到多个使用方言(dialect)的数据库,如 MySQL、O