This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow-publish.git


The following commit(s) were added to refs/heads/main by this push:
     new 40d7ef0  Add providers workflow (#5)
40d7ef0 is described below

commit 40d7ef0fdc2e0313219d63c402cf244fad7d35b9
Author: GPK <[email protected]>
AuthorDate: Wed Dec 18 15:33:51 2024 +0000

    Add providers workflow (#5)
    
    * add providers workflow
    
    * add pre commit
    
    * updated changes to align with svn repo structure
    
    * print package list in tabular format
---
 .github/workflows/airflow-publish.yml              |  37 +-
 .github/workflows/ci.yml                           |   2 +-
 .../{airflow-publish.yml => providers-publish.yml} |  39 +-
 .github/workflows/requirements.txt                 |   7 +
 .../workflows/ci.yml => .pre-commit-config.yaml    |  50 +--
 README.md                                          | 117 ++----
 airflow-rc-config.yml                              |  60 +++
 artifacts/__init__.py                              |   2 +-
 artifacts/action.yml                               |  43 +--
 artifacts/publish_packages_finder.py               | 242 +++---------
 artifacts/test_publish_packages_finder.py          | 412 +++++----------------
 checksum/__init__.py                               |   2 +-
 checksum/checksum_check.py                         |   8 +-
 checksum/test_checksum_check.py                    |   8 +-
 providers-pypi-config.yml                          |  55 +++
 providers-rc-config.yml                            |  55 +++
 read-config/action.yml                             |   4 +-
 read-config/config_parser.py                       |   8 +-
 read-config/release-config-schema.yml.schema.json  |  49 +--
 release-config.yml                                 |  48 ---
 signature/__init__.py                              |   2 +-
 signature/signature_check.py                       |  12 +-
 svn/__init__.py                                    |   2 +-
 svn/svn_check.py                                   |  12 +-
 24 files changed, 441 insertions(+), 835 deletions(-)

diff --git a/.github/workflows/airflow-publish.yml 
b/.github/workflows/airflow-publish.yml
index abe8bcc..f469dbe 100644
--- a/.github/workflows/airflow-publish.yml
+++ b/.github/workflows/airflow-publish.yml
@@ -25,52 +25,37 @@ on:
       release-config:
         description: "Path to the release config file"
         required: true
-        default: "release-config.yml"
+        default: "airflow-rc-config.yml"
+        type: choice
+        options:
+          - "airflow-rc-config.yml"
+          - "airflow-pypi-config.yml"
       temp-dir:
         description: >
-          Checkout directory of svn repo, this is used to checkout the svn 
repo.
+          Temporary directory to checkout the svn repo.
         required: false
         default: "asf-dist"
       mode:
         description: >
-          Mode to run the action, The default mode is 'VERIFY' which will only 
verify the packages and displays the what will be published.
-          to publish the packages to PyPI set the mode to 'RELEASE'.
+          Mode to run the action, set mode to 'RELEASE' to publish the 
packages to PyPI.
         required: false
         default: "VERIFY"
       if-no-files-found:
         description: >
-          The desired behavior if no files are found using the provided path.
-  
-          Available Options:
-            warn: Output a warning but do not fail the action
-            error: Fail the action with an error message
-            ignore: Do not output any warnings or errors, the action does not 
fail
+          upload artifacts action behavior if no files are found using the 
provided path.
         default: 'warn'
       retention-days:
         description: >
           Duration after which artifact will expire in days. 0 means using 
default retention.
-  
-          Minimum 1 day.
-          Maximum 90 days unless changed from the repository settings page.
         default: '5'
       compression-level:
         description: >
-          The level of compression for Zlib to be applied to the artifact 
archive.
-          The value can range from 0 to 9:
-          - 0: No compression
-          - 1: Best speed
-          - 6: Default compression (same as GNU Gzip)
-          - 9: Best compression
-          Higher levels will result in better compression, but will take 
longer to complete.
-          For large files that are not easily compressed, a value of 0 is 
recommended for significantly faster uploads.
+          The level of compression for artifact upload.
         default: '6'
       overwrite:
         description: >
-          If true, an artifact with a matching name will be deleted before a 
new one is uploaded.
-          If false, the action will fail if an artifact for the given name 
already exists.
-          Does not fail if the artifact does not exist.
+          Overwrite the existing artifact with the same name.
         default: 'false'
-
       artifact-name:
         description: >
           The name of the artifact to be uploaded.
@@ -135,7 +120,7 @@ jobs:
        id: "upload-artifacts"
        uses: ./artifacts
        with:
-        publish-config: ${{ steps.config-parser.outputs.checks-publish }}
+        artifact-config: ${{ steps.config-parser.outputs.checks-artifact }}
         temp-dir: ${{ inputs.temp-dir }}
         mode: ${{ inputs.mode }}
         publisher-name: ${{ steps.config-parser.outputs.publisher-name }}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 3d7809b..0095239 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -37,5 +37,5 @@ jobs:
           python-version: '3.11'
       - name: "Run tests"
         run: |
-            python3 -m pip install uv pytest rich python-gnupg requests 
pytest-unordered
+            python3 -m pip install -r .github/workflows/requirements.txt
             uv run pytest -vv
\ No newline at end of file
diff --git a/.github/workflows/airflow-publish.yml 
b/.github/workflows/providers-publish.yml
similarity index 74%
copy from .github/workflows/airflow-publish.yml
copy to .github/workflows/providers-publish.yml
index abe8bcc..c1e2666 100644
--- a/.github/workflows/airflow-publish.yml
+++ b/.github/workflows/providers-publish.yml
@@ -16,7 +16,7 @@
 # under the License.
 #
 ---
-name: Dry run publish airflow packages
+name: Dry run publish airflow provider packages
 description: "Publish or verify svn artifacts"
 
 on:
@@ -25,50 +25,36 @@ on:
       release-config:
         description: "Path to the release config file"
         required: true
-        default: "release-config.yml"
+        default: "providers-rc-config.yml"
+        type: choice
+        options:
+          - "providers-rc-config.yml"
+          - "providers-pypi-config.yml"
       temp-dir:
         description: >
-          Checkout directory of svn repo, this is used to checkout the svn 
repo.
+          Temporary directory to checkout the svn repo.
         required: false
         default: "asf-dist"
       mode:
         description: >
-          Mode to run the action, The default mode is 'VERIFY' which will only 
verify the packages and displays the what will be published.
-          to publish the packages to PyPI set the mode to 'RELEASE'.
+          Mode to run the action, set mode to 'RELEASE' to publish the 
packages to PyPI.
         required: false
         default: "VERIFY"
       if-no-files-found:
         description: >
-          The desired behavior if no files are found using the provided path.
-  
-          Available Options:
-            warn: Output a warning but do not fail the action
-            error: Fail the action with an error message
-            ignore: Do not output any warnings or errors, the action does not 
fail
+          upload artifacts action behavior if no files are found using the 
provided path.
         default: 'warn'
       retention-days:
         description: >
           Duration after which artifact will expire in days. 0 means using 
default retention.
-  
-          Minimum 1 day.
-          Maximum 90 days unless changed from the repository settings page.
         default: '5'
       compression-level:
         description: >
-          The level of compression for Zlib to be applied to the artifact 
archive.
-          The value can range from 0 to 9:
-          - 0: No compression
-          - 1: Best speed
-          - 6: Default compression (same as GNU Gzip)
-          - 9: Best compression
-          Higher levels will result in better compression, but will take 
longer to complete.
-          For large files that are not easily compressed, a value of 0 is 
recommended for significantly faster uploads.
+          The level of compression for artifact upload.
         default: '6'
       overwrite:
         description: >
-          If true, an artifact with a matching name will be deleted before a 
new one is uploaded.
-          If false, the action will fail if an artifact for the given name 
already exists.
-          Does not fail if the artifact does not exist.
+          Overwrite the existing artifact with the same name.
         default: 'false'
 
       artifact-name:
@@ -135,7 +121,7 @@ jobs:
        id: "upload-artifacts"
        uses: ./artifacts
        with:
-        publish-config: ${{ steps.config-parser.outputs.checks-publish }}
+        artifact-config: ${{ steps.config-parser.outputs.checks-artifact }}
         temp-dir: ${{ inputs.temp-dir }}
         mode: ${{ inputs.mode }}
         publisher-name: ${{ steps.config-parser.outputs.publisher-name }}
@@ -146,7 +132,6 @@ jobs:
         overwrite: ${{ inputs.overwrite }}
         artifact-name: ${{ inputs.artifact-name }}
 
-
   publish-to-pypi:
     name: Publish svn packages to PyPI
     runs-on: ubuntu-20.04
diff --git a/.github/workflows/requirements.txt 
b/.github/workflows/requirements.txt
new file mode 100644
index 0000000..edb89a0
--- /dev/null
+++ b/.github/workflows/requirements.txt
@@ -0,0 +1,7 @@
+uv
+pytest
+rich
+python-gnupg
+requests
+pytest-unordered
+tabulate
\ No newline at end of file
diff --git a/.github/workflows/ci.yml b/.pre-commit-config.yaml
similarity index 50%
copy from .github/workflows/ci.yml
copy to .pre-commit-config.yaml
index 3d7809b..bc64758 100644
--- a/.github/workflows/ci.yml
+++ b/.pre-commit-config.yaml
@@ -14,28 +14,30 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
 ---
-name: Unit test action scripts
-on:
-  workflow_dispatch:
-  pull_request:
-    branches:
-      - main
-jobs:
-  test:
-    runs-on: ubuntu-latest
-    steps:
-      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
-        uses: actions/checkout@v4
-        with:
-          persist-credentials: false
-
-      - name: Setup Python
-        uses: actions/setup-python@v4
-        with:
-          python-version: '3.11'
-      - name: "Run tests"
-        run: |
-            python3 -m pip install uv pytest rich python-gnupg requests 
pytest-unordered
-            uv run pytest -vv
\ No newline at end of file
+default_stages: [pre-commit, pre-push]
+default_language_version:
+  python: python3
+  node: 22.2.0
+minimum_pre_commit_version: '3.2.0'
+repos:
+  - repo: https://github.com/asottile/blacken-docs
+    rev: 1.19.1
+    hooks:
+      - id: blacken-docs
+        name: Run black on docs
+        args:
+          - --line-length=110
+        alias: blacken-docs
+        additional_dependencies: [black==24.10.0]
+  - repo: local
+    hooks:
+      - id: ruff
+        name: Run 'ruff' for extremely fast Python linting
+        description: "Run 'ruff' for extremely fast Python linting"
+        entry: ruff check --force-exclude
+        language: python
+        types_or: [ python, pyi ]
+        args: [ --fix ]
+        require_serial: true
+        additional_dependencies: [ "ruff==0.8.1" ]
\ No newline at end of file
diff --git a/README.md b/README.md
index b926822..69d9f17 100644
--- a/README.md
+++ b/README.md
@@ -20,28 +20,25 @@ This action reads the release configuration file and writes 
output to `GITHUB_OU
 
 ```yaml
 project:
-  name: example-project
-  description: "Example project for publishing to PyPI"
+  name: airflow
+  description: "Publish rc provider packages to PyPI"
 publisher:
-  name: providers
-  url: "https://dist.apache.org/repos/dist/dev/airflow";
-  path: "providers/"
+  name: airflow
+  url: https://dist.apache.org/repos/dist/dev/airflow/
+  path: providers/pypi-rc/
 checks:
   svn:
     - id: extension
       description: "Validate svn package extensions"
       identifiers:
         - type: regex
-          pattern: 
".*(py3-none-any.whl|tar.gz.sha512|tar.gz.asc|tar.gz|py3-none-any.whl.asc|py3-none-any.whl.sha512)$"
+          pattern: 
".*(py3-none-any.whl|py3-none-any.whl.asc|py3-none-any.whl.sha512|tar.gz|tar.gz.asc|tar.gz.sha512)$"
 
     - id: package_name
       description: "Validate svn package names"
       identifiers:
         - type: regex
-          pattern: ".*(apache_airflow.*)$"
-
-        - type: regex
-          pattern: ".*(apache-airflow.*)$"
+          pattern: ".*(apache_airflow_providers.*)$"
 
   checksum:
     - id: checksum
@@ -54,19 +51,12 @@ checks:
       method: gpg
       keys: "https://dist.apache.org/repos/dist/release/airflow/KEYS";
 
-  publish:
-    id: publish
-    description: "Publish provider packages to PyPI"
-    release-type: "RC_VERSION"
-    exclude_extensions:
+  artifact:
+    id: artifact
+    description: "Find providers artifacts to publish to PyPI"
+    exclude:
       - type: regex
         pattern: ".*(.asc|.sha512)$"
-    compare:
-      url: "https://dist.apache.org/repos/dist/release/airflow/";
-      path: "providers/"
-      package_names:
-        - type: regex
-          pattern: "(apache_airflow_providers.*?)(?=rc)"
 ```
 #### Publisher
 This section contains the publisher details like `name`, `url`, and `path` to 
identify the repository in SVN.
@@ -227,43 +217,22 @@ This action uses the `publish` section from the 
`release-config.yml` to publish
 
 ```yaml
 checks:
-  publish:
-    id: publish
-    description: "Publish provider packages to PyPI"
-    release-type: "RC_VERSION"
-    exclude_extensions:
+    artifact:
+    id: artifact
+    description: "Find providers artifacts to publish to PyPI"
+    exclude:
       - type: regex
         pattern: ".*(.asc|.sha512)$"
-    compare:
-      url: "https://dist.apache.org/repos/dist/release/airflow/";
-      path: "providers/"
-      package_names:
-       - type: regex
-         pattern: "(apache_airflow_providers.*?)(?=rc)"
 ```
-#### Release Configuration
-The `release-type` and `compare` sections are part of the validation and 
publishing configuration.
-
-##### `release-type`
-- **`RC_VERSION`**:  
-  It will consider packages from the `dev/` folder and publish to PyPI.  
-
-- **`PYPI_VERSION`**:  
-  It will consider packages from the `release/` folder and publish to PyPI.
-
 ---
 
-#### `compare`
-This section contains the release svn folder configuration, 
-it compares the packages in the `dev/` folder with release folder and only 
matching packages will be published to PyPI.
-
 ### Usage
 ```yaml
 - name: "Find ${{ steps.config-parser.outputs.publisher-name }} packages"
   id: "upload-artifacts"
   uses: ./artifacts
   with:
-    publish-config: ${{ steps.config-parser.outputs.checks-publish }}
+    artifact-config: ${{ steps.config-parser.outputs.checks-artifact }}
     temp-dir: ${{ inputs.temp-dir }}
     mode: ${{ inputs.mode }}
     publisher-name: ${{ steps.config-parser.outputs.publisher-name }}
@@ -278,8 +247,8 @@ it compares the packages in the `dev/` folder with release 
folder and only match
 A sample github workflow file to use the composite actions is shown below:
 
 ```yaml
-name: Tes gh-svn-pypi-publisher
-description: "Publish to PyPI"
+name: Dry run publish airflow provider packages
+description: "Publish or verify svn artifacts"
 
 on:
   workflow_dispatch:
@@ -287,48 +256,36 @@ on:
       release-config:
         description: "Path to the release config file"
         required: true
-        default: "release-config.yml"
+        default: "providers-rc-config.yml"
+        type: choice
+        options:
+          - "providers-rc-config.yml"
+          - "providers-pypi-config.yml"
       temp-dir:
         description: >
-          Checkout directory of svn repo, this is used to checkout the svn 
repo.
+          Temporary directory to checkout the svn repo.
         required: false
         default: "asf-dist"
       mode:
-        description: "Mode to run the action"
+        description: >
+          Mode to run the action, set mode to 'RELEASE' to publish the 
packages to PyPI.
         required: false
         default: "VERIFY"
       if-no-files-found:
         description: >
-          The desired behavior if no files are found using the provided path.
-  
-          Available Options:
-            warn: Output a warning but do not fail the action
-            error: Fail the action with an error message
-            ignore: Do not output any warnings or errors, the action does not 
fail
+          upload artifacts action behavior if no files are found using the 
provided path.
         default: 'warn'
       retention-days:
         description: >
           Duration after which artifact will expire in days. 0 means using 
default retention.
-  
-          Minimum 1 day.
-          Maximum 90 days unless changed from the repository settings page.
         default: '5'
       compression-level:
         description: >
-          The level of compression for Zlib to be applied to the artifact 
archive.
-          The value can range from 0 to 9:
-          - 0: No compression
-          - 1: Best speed
-          - 6: Default compression (same as GNU Gzip)
-          - 9: Best compression
-          Higher levels will result in better compression, but will take 
longer to complete.
-          For large files that are not easily compressed, a value of 0 is 
recommended for significantly faster uploads.
+          The level of compression for artifact upload.
         default: '6'
       overwrite:
         description: >
-          If true, an artifact with a matching name will be deleted before a 
new one is uploaded.
-          If false, the action will fail if an artifact for the given name 
already exists.
-          Does not fail if the artifact does not exist.
+          Overwrite the existing artifact with the same name.
         default: 'false'
 
       artifact-name:
@@ -337,13 +294,11 @@ on:
         required: false
         default: "pypi-packages"
 
-
-
 jobs:
   release-checks:
     outputs:
       publisher-name: ${{ steps.config-parser.outputs.publisher-name }}
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-20.04
     steps:
      - name: Checkout Code
        uses: actions/checkout@v4
@@ -351,7 +306,7 @@ jobs:
          persist-credentials: false
 
      - name: Setup Python
-       uses: actions/setup-python@v4
+       uses: actions/setup-python@v5
        with:
          python-version: "3.11"
 
@@ -397,7 +352,7 @@ jobs:
        id: "upload-artifacts"
        uses: ./artifacts
        with:
-        publish-config: ${{ steps.config-parser.outputs.checks-publish }}
+        artifact-config: ${{ steps.config-parser.outputs.checks-artifact }}
         temp-dir: ${{ inputs.temp-dir }}
         mode: ${{ inputs.mode }}
         publisher-name: ${{ steps.config-parser.outputs.publisher-name }}
@@ -406,15 +361,14 @@ jobs:
         retention-days: ${{ inputs.retention-days }}
         compression-level: ${{ inputs.compression-level }}
         overwrite: ${{ inputs.overwrite }}
-
+        artifact-name: ${{ inputs.artifact-name }}
 
   publish-to-pypi:
     name: Publish svn packages to PyPI
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-20.04
+    if: inputs.mode == 'RELEASE' && success()
     needs:
       - release-checks
-    environment:
-      name: test
     permissions:
       id-token: write  # IMPORTANT: mandatory for trusted publishing
 
@@ -428,7 +382,6 @@ jobs:
 
       - name: "Publishing ${{ needs.release-checks.outputs.publisher-name }} 
to PyPI"
         uses: pypa/gh-action-pypi-publish@release/v1
-        if: inputs.mode == 'RELEASE'
         with:
           packages-dir: "./dist"
 ```
diff --git a/airflow-rc-config.yml b/airflow-rc-config.yml
new file mode 100644
index 0000000..1662f19
--- /dev/null
+++ b/airflow-rc-config.yml
@@ -0,0 +1,60 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+project:
+  name: airflow
+  description: "Publish rc airflow packages to PyPI"
+publisher:
+  name: airflow
+  url: https://dist.apache.org/repos/dist/dev/airflow
+  path: 2.10.4rc1/
+checks:
+  svn:
+    - id: extension
+      description: "Validate svn package extensions"
+      identifiers:
+        - type: regex
+          pattern: 
".*(py3-none-any.whl|tar.gz.sha512|tar.gz.asc|tar.gz|py3-none-any.whl.asc|py3-none-any.whl.sha512)$"
+
+    - id: package_name
+      description: "Validate svn package names"
+      identifiers:
+        - type: regex
+          pattern: ".*(apache_airflow.*)$"
+
+        - type: regex
+          pattern: ".*(apache-airflow.*)$"
+
+  checksum:
+    - id: checksum
+      description: "Validate check sum with SHA512"
+      algorithm: "sha512"
+
+  signature:
+    - id: signature
+      description: "Validate signatures with GPG of packages"
+      method: gpg
+      keys: "https://dist.apache.org/repos/dist/release/airflow/KEYS";
+
+  artifact:
+    id: artifact
+    description: "Find airflow artifacts to publish to PyPI"
+    exclude:
+      - type: regex
+        pattern: ".*(.asc|.sha512)$"
+      - type: regex
+        pattern: "(apache-airflow-.*?)$"
\ No newline at end of file
diff --git a/artifacts/__init__.py b/artifacts/__init__.py
index b4646d1..d0a78df 100644
--- a/artifacts/__init__.py
+++ b/artifacts/__init__.py
@@ -14,4 +14,4 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
\ No newline at end of file
+#
diff --git a/artifacts/action.yml b/artifacts/action.yml
index cc3e782..05e36d6 100644
--- a/artifacts/action.yml
+++ b/artifacts/action.yml
@@ -19,27 +19,15 @@ name: "Upload as artifacts"
 description: "Uploads the release distributions as artifacts"
 
 inputs:
-  publish-config:
+  artifact-config:
     description: >
-      Json config for publishing packages. it contains all the information 
required to publish the packages to PyPI. eg:
-      publish:
-        id: publish
-        description: "Publish provider packages to PyPI"
-        release-type: "RC_VERSION"
-        exclude_extensions:
+      Json config to find publishing packages. it contains all the information 
required to publish the packages to PyPI. eg:
+      artifact:
+        id: artifact
+        description: "Find artifacts to publish to PyPI"
+        exclude:
           - type: regex
             pattern: ".*(.asc|.sha512)$"
-        compare:
-          url: "https://dist.apache.org/repos/dist/release/airflow/";
-          path: "providers/"
-          package_names:
-           - type: regex
-             pattern: "(apache_airflow_providers.*?)(?=rc)"
-      
-      release-type: RC_VERSION It will consider packages from dev/ folder and 
publish to PyPI.
-      release-type: PYPI_VERSION It will consider packages from release/ 
folder and publish to PyPI.
-      compare: Section contains the release packages, which is used to compare 
the packages in dev folder and only matching 
-      packages will be published to PyPI.
     required: true
 
   temp-dir:
@@ -50,13 +38,14 @@ inputs:
 
   repo-path:
     description: >
-      Path to the svn repo. Lets say to publish the packages from the dev 
folder.
+      Path to the svn repo. Lets say to publish the packages from the dev pypi 
folder.
       eg: svn repo structure is https://dist.apache.org/repos/dist/
-        dev/airflow/providers
-        release/airflow/providers
-      now to publish the packages from dev providers folder, set url and path 
like below in the release-config.yml
+        dev/airflow/providers/pypi-rc/
+      
+      now to publish the packages from dev pypi providers folder, set url and 
path like below in the release-config.yml
+      
       url: https://dist.apache.org/repos/dist/dev/airflow
-      repo-path: providers/
+      repo-path: providers/pypi-rc/
     required: true
 
   mode:
@@ -119,13 +108,13 @@ runs:
       with:
         python-version: "3.11"
 
-    - name: "Move release distributions to dist directory"
+    - name: "Find artifacts to publish"
       shell: bash
-      id: move-release-dists
+      id: find-artifacts
       env:
-        PUBLISH_PACKAGES_CONFIG: ${{ inputs.publish-config }}
-        DIST_PATH: "${{ github.workspace }}/${{ inputs.temp-dir }}/dist"
+        ARTIFACTS_CONFIG: ${{ inputs.artifact-config }}
         MODE: ${{ inputs.mode }}
+        DIST_PATH: "${{ github.workspace }}/${{ inputs.temp-dir }}/dist"
       run: |
         python3 -m pip install uv
         uv run $GITHUB_ACTION_PATH/publish_packages_finder.py
diff --git a/artifacts/publish_packages_finder.py 
b/artifacts/publish_packages_finder.py
index bff6757..a0d2ab0 100644
--- a/artifacts/publish_packages_finder.py
+++ b/artifacts/publish_packages_finder.py
@@ -19,6 +19,7 @@
 # requires-python = ">=3.11"
 # dependencies = [
 #     "rich",
+#     "tabulate",
 # ]
 # ///
 
@@ -27,11 +28,11 @@ import os
 import re
 import subprocess
 import sys
-import tempfile
 from functools import cached_property
 from typing import Any
 
 from rich.console import Console
+from tabulate import tabulate
 
 console = Console(width=400, color_system="standard")
 
@@ -39,134 +40,48 @@ console = Console(width=400, color_system="standard")
 # the current working directory will be providers/
 # publisher:
 #   name: providers
-#   url: https://dist.apache.org/repos/dist/dev/airflow/";
-#   path: providers/
+#   url: https://dist.apache.org/repos/dist/dev/airflow/
+#   path: providers/pypi-rc/
 
 
 class PublishPackagesFinder:
     final_packages_to_publish: list[str] = []
-    matched_packages_between_dev_and_release: list[str] = []
-    publish_config = json.loads(os.environ.get("PUBLISH_PACKAGES_CONFIG", 
"{}"))
-    temp_svn_dist_release_dir = tempfile.TemporaryDirectory()
 
     @cached_property
-    def dev_svn_files(self):
-        return os.listdir()
+    def artifacts_config(self):
+        return json.loads(os.environ.get("ARTIFACTS_CONFIG", "{}"))
 
     @cached_property
-    def svn_dist_release_dir(self):
-        return self.temp_svn_dist_release_dir.name
+    def dev_svn_files(self):
+        """
+        Get the list of files in the current directory
+        :return:
+        """
+        files = [file for file in os.listdir() if os.path.isfile(file)]
+
+        if not files:
+            console.print(f"[red]No packages found in the {os.getcwd()}[/]")
+            sys.exit(1)
+        return files
 
     @staticmethod
-    def is_extension_matched(file: str, pattern: str) -> bool:
+    def is_matched(file: str, pattern: str) -> bool:
         match = re.match(pattern, file)
-        return match and file.endswith(match.group(1))
+        if match and file.endswith(match.group(1)):
+            return True
+        return False
 
     @cached_property
     def dist_path(self):
         # Path where the final packages will be moved and pushed to artifactory
-        if not os.path.exists(os.environ.get("DIST_PATH")):
-            os.makedirs(os.environ.get("DIST_PATH"))
-        return os.environ.get("DIST_PATH")
-
-    @cached_property
-    def release_type(self):
-        return self.publish_config.get("release-type")
-
-    @cached_property
-    def extension_exclude_config(self):
-        return self.publish_config.get("exclude_extensions")
+        dist_path = os.environ.get("DIST_PATH")
+        if not os.path.exists(dist_path):
+            os.makedirs(dist_path)
+        return dist_path
 
     @cached_property
-    def github_workspace(self):
-        return os.environ.get("GITHUB_WORKSPACE")
-
-    @staticmethod
-    def extract_package_names(
-        package_name_config: list[dict[str, Any]], lookup_packages: list[str]
-    ) -> list[str]:
-        """
-        Extract the package names based on the regex pattern provided in the 
package_names config
-        :param package_name_config:
-
-              package_names:
-                   - type: regex
-                     pattern: "(apache_airflow_providers.*?)(?=rc)"
-
-             eg: for a rc package apache-airflow-1.0.0rc1.targ.gz it will 
extract the package name as "apache-airflow-providers-1.0.0"
-        :param lookup_packages: List of packages to check for the package names
-        :return: matched package names
-        """
-        package_names: set[str] = set()
-
-        for package_name_config in package_name_config:
-            if package_name_config.get("type") == "regex":
-                regex_pattern = package_name_config.get("pattern")
-                package_names.update(
-                    match.group(1)
-                    for file in lookup_packages
-                    if (match := re.match(regex_pattern, file))
-                )
-
-        return list(package_names)
-
-    def find_matched_packages_between_dev_and_release(
-        self, compare_config: dict[str, Any]
-    ):
-        """
-        Find the matched packages between dev and release folder based on the 
package names. the comparison works with config provided in compare
-         section of the release config.
-             compare:
-              url: "https://dist.apache.org/repos/dist/release/airflow/";
-              path: "providers/"
-              package_names:
-               - type: regex
-                 pattern: "(apache_airflow_providers.*?)(?=rc)"
-
-         Here the package names are extracted based on the regex pattern 
provided, here in this case for a rc package apache-airflow-1.0.0rc1.targ.gz
-         it will extract the package name as "apache-airflow-providers-1.0.0" 
and compare with the release folder packages. below it used startswith
-         to compare the package names. if it matches it considers the package 
to final publish list.
-
-        :param compare_config: Configuration to compare the packages between 
dev and release folder, likely the dist
-        :return: None
-        """
-
-        # This dev_package_names contains all the packages without rc or based 
on regex pattern extracted name
-        # if dev package name is "apache-airflow-1.0.0rc1.targ.gz" and
-        # extract_package_names function returns package name like 
"apache-airflow-1.0.0"
-        # (it depends on regex pattern provided in package_names)
-
-        dev_package_names = self.extract_package_names(
-            compare_config.get("package_names"), self.dev_svn_files
-        )
-
-        if not dev_package_names:
-            console.print(
-                f"[red]No package names found in {os.getcwd()} with 
{compare_config.get('package_names')} [/]"
-            )
-            sys.exit(1)
-
-        inner_path = compare_config.get("path")
-        path_to_lookup = os.path.join(self.svn_dist_release_dir, inner_path)
-
-        release_folder_packages = os.listdir(path=path_to_lookup)
-        self.matched_packages_between_dev_and_release = [
-            package
-            for package in release_folder_packages
-            if any(
-                package.startswith(package_name) for package_name in 
dev_package_names
-            )
-        ]
-
-        if not self.matched_packages_between_dev_and_release:
-            svn_full_path = os.path.join(
-                self.publish_config.get("compare").get("url"), inner_path
-            ).strip()
-
-            console.print(
-                f"[red]No matched packages found between {os.getcwd()} and 
{svn_full_path}[/]"
-            )
-            sys.exit(1)
+    def exclude_config(self):
+        return self.artifacts_config.get("exclude")
 
     def exclude_packages_to_publish(
         self, packages: list[str], exclude_config: list[dict[str, Any]]
@@ -186,7 +101,7 @@ class PublishPackagesFinder:
                 [
                     exclude_packages.add(package)
                     for package in packages
-                    if self.is_extension_matched(package, regex_pattern)
+                    if self.is_matched(package, regex_pattern)
                 ]
         if exclude_packages:
             console.print("[blue]Following packages excluded: [/]")
@@ -195,21 +110,6 @@ class PublishPackagesFinder:
 
         return list(set(packages) - exclude_packages)
 
-    def filter_rc_packages_to_publish(
-        self, exclude_extensions_config: list[dict[str, Any]]
-    ):
-        """
-        Filter the packages to publish based on the release type RC_VERSION, 
for rc release we directly consider
-        packages from dev svn folder path provided in the release config
-
-        :param exclude_extensions_config:  Configuration to exclude the final 
publish packages based on the extension, eg: .asc, .sha512
-        :return:
-        """
-        packages_to_publish = self.exclude_packages_to_publish(
-            packages=self.dev_svn_files, 
exclude_config=exclude_extensions_config
-        )
-        self.final_packages_to_publish.extend(packages_to_publish)
-
     def move_packages_to_dist_folder(self, packages_path: str):
         """
         Move the packages to dist folder
@@ -219,92 +119,40 @@ class PublishPackagesFinder:
         """
 
         if not self.final_packages_to_publish:
-            console.print("[red]No packages found to publish[/]")
+            console.print("[red]No packages found to move[/]")
             sys.exit(1)
 
         for package_name in self.final_packages_to_publish:
             full_path = os.path.join(packages_path, package_name)
             subprocess.run(["mv", full_path, self.dist_path], check=True)
 
-    def filter_pypi_version_packages_to_publish(
-        self,
-        compare_config: dict[str, Any],
-        extension_exclude_config: list[dict[str, Any]],
-    ):
-        """
-        :param compare_config: Configuration to compare the packages between 
dev and release folder, likely the dist
-            release svn folder
-            {
-              "url": "https://dist.apache.org/repos/dist/release/airflow/";,
-              "path": "providers/",
-              "package_names": [
-                {
-                  "type": "regex",
-                  "pattern": "(apache_airflow_providers.*?)(?=rc)"
-                }
-              ]
-            }
-        :param extension_exclude_config:  Configuration to exclude the final 
publish packages based on the extension, eg: .asc, .sha512
-        :return: None
-        """
-
-        self.find_matched_packages_between_dev_and_release(compare_config)
-
-        # self.matched_packages_between_dev_and_release
-        # package names contains all the packages without
-        # rc or based on regex pattern extracted name
-
-        self.final_packages_to_publish.extend(
-            self.exclude_packages_to_publish(
-                self.matched_packages_between_dev_and_release, 
extension_exclude_config
-            )
-        )
-
-    @staticmethod
-    def checkout_svn_repo(repo_url: str, path_to_checkout: str):
-        console.print(
-            f"[blue]Checking out files from {repo_url} to 
{path_to_checkout}[/]"
-        )
-        subprocess.run(["svn", "co", repo_url, path_to_checkout], check=True)
-
     def run(self):
         try:
-            if self.release_type == "RC_VERSION":
-                
self.filter_rc_packages_to_publish(self.extension_exclude_config)
+            
console.print(f"[blue]{self.artifacts_config.get('description')}[/]")
+            console.print()
 
-                # For RC release we directly move the packages from the 
provided source path.
-                # also the current working directory is the source path
-                self.move_packages_to_dist_folder(os.getcwd())
-
-            elif self.release_type == "PYPI_VERSION":
-                compare_config = self.publish_config.get("compare")
-                repo_url = compare_config.get("url")
-                self.checkout_svn_repo(repo_url, self.svn_dist_release_dir)
-                self.filter_pypi_version_packages_to_publish(
-                    compare_config, self.extension_exclude_config
-                )
-
-                # For PYPI_VERSION release we move the packages from the 
release folder to dist folder,
-                # only matched packages between dev and release folder 
packages will be moved to dist folder for final publishing
+            self.final_packages_to_publish = self.exclude_packages_to_publish(
+                self.dev_svn_files, self.exclude_config
+            )
 
-                release_files_path = os.path.join(
-                    self.svn_dist_release_dir, compare_config.get("path")
-                )
-                self.move_packages_to_dist_folder(release_files_path)
-            else:
-                console.print(f"[red]Invalid release type 
{self.release_type}[/]")
-                sys.exit(1)
+            self.move_packages_to_dist_folder(os.getcwd())
 
-            if os.environ.get("MODE") == "VERIFY":
+            if os.environ.get("MODE", "VERIFY") == "VERIFY":
                 console.print(
                     "[blue]To publish these packages to PyPI, set the 
mode=RELEASE in workflow and run[/]"
                 )
             else:
-                console.print("[blue]Following packages will be published to 
PyPI[/]")
+                console.print("[blue]Following packages will be published to 
PyPI.[/]")
 
-            for package in self.final_packages_to_publish:
-                console.print(f"[blue]{package}[/]")
+            packages_tabulate_format = [[item] for item in 
self.final_packages_to_publish]
 
+            console.print(
+                tabulate(
+                    packages_tabulate_format,
+                    headers=["Packages"],
+                    tablefmt="grid"
+                )
+            )
         except Exception as e:
             console.print(f"[red]Error: {e}[/]")
             sys.exit(1)
diff --git a/artifacts/test_publish_packages_finder.py 
b/artifacts/test_publish_packages_finder.py
index 2208d65..6880fe1 100644
--- a/artifacts/test_publish_packages_finder.py
+++ b/artifacts/test_publish_packages_finder.py
@@ -15,6 +15,7 @@
 # specific language governing permissions and limitations
 # under the License.
 #
+import json
 import os.path
 import tempfile
 
@@ -78,145 +79,87 @@ class TestPublishPackagesFinder:
         )
         assert after_exclude_packages == unordered(expected)
 
-    #
+    def test_dev_svn_files(self):
+        publish_packages_finder = PublishPackagesFinder()
+        with tempfile.TemporaryDirectory() as temp_dir:
+            files = [
+                "file1.tar.gz",
+                "file2.tar.gz.asc",
+                "file3.py3-none-any.whl.sha512",
+            ]
+            write_data(files, temp_dir)
+            os.chdir(temp_dir)
+            assert publish_packages_finder.dev_svn_files == unordered(files)
+
+    def test_dev_svn_files_empty(self):
+        publish_packages_finder = PublishPackagesFinder()
+        with tempfile.TemporaryDirectory() as temp_dir:
+            os.chdir(temp_dir)
+            with pytest.raises(SystemExit):
+                publish_packages_finder.dev_svn_files()
+
     @pytest.mark.parametrize(
-        "packages, exclude_config, expected",
+        "file, pattern, expected",
         [
             pytest.param(
-                [
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz",
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz.asc",
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz.sha512",
-                    
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl",
-                    
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl.sha512",
-                ],
-                [
-                    {
-                        "type": "regex",
-                        "pattern": r".*(.asc|.sha512)$",
-                    },
-                ],
-                [
-                    
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl",
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz",
-                ],
-                id="return_rc_packages",
+                "apache_airflow_providers_amazon-9.1.0rc1.tar.gz",
+                ".*(.asc|.sha512)$",
+                False,
             ),
             pytest.param(
-                [
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz.asc",
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz.sha512",
-                ],
-                [
-                    {
-                        "type": "regex",
-                        "pattern": r".*(.asc|.sha512)$",
-                    },
-                ],
-                [],
-                id="no_rc_packages",
+                "apache_airflow_providers_amazon-9.1.0rc1.tar.gz.asc",
+                ".*(.asc|.sha512)$",
+                True,
             ),
-        ],
-    )
-    def test_filter_rc_packages_to_publish(self, packages, exclude_config, 
expected):
-        publish_packages_finder = PublishPackagesFinder()
-        publish_packages_finder.final_packages_to_publish.clear()
-
-        # Write some files to temporary dev svn folder
-        temp_dev_svn_folder = tempfile.TemporaryDirectory()
-        os.chdir(temp_dev_svn_folder.name)
-        write_data(packages, temp_dev_svn_folder.name)
-        publish_packages_finder.filter_rc_packages_to_publish(
-            exclude_extensions_config=exclude_config
-        )
-
-        assert publish_packages_finder.final_packages_to_publish == 
unordered(expected)
-
-    @pytest.mark.parametrize(
-        "packages, package_name_config, expected",
-        [
             pytest.param(
-                [
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz",
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz.asc",
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz.sha512",
-                    
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl",
-                    
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl.sha512",
-                    "apache_airflow_providers_airbyte-10.1.0rc1.tar.gz",
-                    "apache_airflow_providers_airbyte-10.1.0rc1.tar.gz.asc",
-                    "apache_airflow_providers_airbyte-10.1.0rc1.tar.gz.sha512",
-                    
"apache_airflow_providers_airbyte-10.1.0rc1-py3-none-any.whl",
-                    
"apache_airflow_providers_airbyte-10.1.0rc1-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_airbyte-10.1.0rc1-py3-none-any.whl.sha512",
-                ],
-                [
-                    {
-                        "type": "regex",
-                        "pattern": "(apache_airflow_providers.*?)(?=rc)",
-                    },
-                ],
-                [
-                    "apache_airflow_providers_amazon-9.1.0",
-                    "apache_airflow_providers_airbyte-10.1.0",
-                ],
-                id="return_package_name_without_rc",
+                "apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl",
+                ".*(.asc|.sha512)$",
+                False,
             ),
             pytest.param(
-                [
-                    "apache-superset-incubating-0.34.0rc2-source.tar.gz",
-                    "apache-superset-incubating-0.34.0rc2-source.tar.gz.asc",
-                    
"apache-superset-incubating-0.34.0rc2-source.tar.gz.sha512",
-                ],
-                [
-                    {
-                        "type": "regex",
-                        "pattern": "(apache-superset-incubating.*?)(?=rc)",
-                    },
-                ],
-                [
-                    "apache-superset-incubating-0.34.0",
-                ],
-                id="return_superset_package_name_without_rc",
+                
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl.sha512",
+                ".*(.asc|.sha512)$",
+                True,
             ),
+            pytest.param("apache_airflow-2.10.4.tar.gz.asc", 
"(apache_airflow-.*?)$", True),
         ],
     )
-    def test_extract_package_names(self, packages, package_name_config, 
expected):
+    def test_is_matched(self, file, pattern, expected):
+        """
+        Test is_matched method of PublishPackagesFinder, which checks if the 
file is matched with the pattern
+
+        """
         publish_packages_finder = PublishPackagesFinder()
-        extracted_names = publish_packages_finder.extract_package_names(
-            package_name_config=package_name_config, lookup_packages=packages
-        )
-        assert extracted_names == unordered(expected)
+        assert publish_packages_finder.is_matched(file, pattern) == expected
+
+    def test_exclude_config(self):
+        publish_packages_finder = PublishPackagesFinder()
+        publish_packages_finder.artifacts_config = {
+            "exclude": [{"type": "regex", "pattern": 
r".*(tar.gz.asc|py3-none-any.whl.sha512)$"}]
+        }
+        assert publish_packages_finder.exclude_config == [
+            {"type": "regex", "pattern": 
r".*(tar.gz.asc|py3-none-any.whl.sha512)$"}
+        ]
+
+    def test_exclude_config_empty(self):
+        publish_packages_finder = PublishPackagesFinder()
+        publish_packages_finder.artifacts_config = {}
+        assert publish_packages_finder.exclude_config is None
+
+    def test_exclude_config_empty_list(self):
+        publish_packages_finder = PublishPackagesFinder()
+        publish_packages_finder.artifacts_config = {"exclude": []}
+        assert publish_packages_finder.exclude_config == []
+
+    def test_run_should_fail_if_no_packages_found(self):
+        publish_packages_finder = PublishPackagesFinder()
+        with pytest.raises(SystemExit):
+            publish_packages_finder.run()
 
     @pytest.mark.parametrize(
-        "compare_config, temp_release_dir_files, temp_dev_svn_files, expected",
+        "temp_packages, exclude_config, expected",
         [
             pytest.param(
-                {
-                    "url": 
"https://dist.apache.org/repos/dist/release/airflow/";,
-                    "path": "airflow/providers/",
-                    "package_names": [
-                        {
-                            "type": "regex",
-                            "pattern": "(apache_airflow_providers.*?)(?=rc)",
-                        }
-                    ],
-                },
-                [
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz",
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz.asc",
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz.sha512",
-                    "apache_airflow_providers_amazon-9.1.0-py3-none-any.whl",
-                    
"apache_airflow_providers_amazon-9.1.0-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_amazon-9.1.0-py3-none-any.whl.sha512",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz.asc",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz.sha512",
-                    "apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl",
-                    
"apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl.sha512",
-                ],
                 [
                     "apache_airflow_providers_amazon-9.1.0rc1.tar.gz",
                     "apache_airflow_providers_amazon-9.1.0rc1.tar.gz.asc",
@@ -224,215 +167,50 @@ class TestPublishPackagesFinder:
                     
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl",
                     
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl.asc",
                     
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl.sha512",
-                    "apache_airflow_providers_airbyte-10.1.0rc1.tar.gz",
-                    "apache_airflow_providers_airbyte-10.1.0rc1.tar.gz.asc",
-                    "apache_airflow_providers_airbyte-10.1.0rc1.tar.gz.sha512",
-                    
"apache_airflow_providers_airbyte-10.1.0rc1-py3-none-any.whl",
-                    
"apache_airflow_providers_airbyte-10.1.0rc1-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_airbyte-10.1.0rc1-py3-none-any.whl.sha512",
-                ],
-                [
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz",
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz.asc",
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz.sha512",
-                    "apache_airflow_providers_amazon-9.1.0-py3-none-any.whl",
-                    
"apache_airflow_providers_amazon-9.1.0-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_amazon-9.1.0-py3-none-any.whl.sha512",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz.asc",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz.sha512",
-                    "apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl",
-                    
"apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl.sha512",
-                ],
-                id="find_matched_packages_between_dev_and_release",
-            ),
-            pytest.param(
-                {
-                    "url": 
"https://dist.apache.org/repos/dist/release/airflow/";,
-                    "path": "airflow/providers/",
-                    "package_names": [
-                        {
-                            "type": "regex",
-                            "pattern": "(apache_airflow_providers.*?)(?=rc)",
-                        }
-                    ],
-                },
-                [
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz",
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz.asc",
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz.sha512",
-                    "apache_airflow_providers_amazon-9.1.0-py3-none-any.whl",
-                    
"apache_airflow_providers_amazon-9.1.0-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_amazon-9.1.0-py3-none-any.whl.sha512",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz.asc",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz.sha512",
-                    "apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl",
-                    
"apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl.sha512",
                 ],
+                [{"type": "regex", "pattern": ".*(.asc|.sha512)$"}],
                 [
                     "apache_airflow_providers_amazon-9.1.0rc1.tar.gz",
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz.asc",
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz.sha512",
                     
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl",
-                    
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl.sha512",
-                ],
-                [
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz",
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz.asc",
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz.sha512",
-                    "apache_airflow_providers_amazon-9.1.0-py3-none-any.whl",
-                    
"apache_airflow_providers_amazon-9.1.0-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_amazon-9.1.0-py3-none-any.whl.sha512",
                 ],
-                
id="find_matched_packages_between_dev_and_release_should_return_one_provider",
-            ),
-        ],
-    )
-    def test_find_matched_packages_between_dev_and_release(
-        self,
-        compare_config,
-        temp_release_dir_files,
-        temp_dev_svn_files,
-        expected,
-    ):
-        publish_packages_finder = PublishPackagesFinder()
-
-        # Write some files to temporary release folder
-        write_data(
-            temp_release_dir_files,
-            os.path.join(
-                publish_packages_finder.svn_dist_release_dir, 
compare_config.get("path")
+                id="exclude_asc_sha512",
             ),
-        )
-
-        # Write some files to temporary dev svn folder
-        temp_dev_svn_folder = tempfile.TemporaryDirectory()
-        os.chdir(temp_dev_svn_folder.name)
-        write_data(temp_dev_svn_files, temp_dev_svn_folder.name)
-
-        publish_packages_finder.find_matched_packages_between_dev_and_release(
-            compare_config
-        )
-        assert (
-            publish_packages_finder.matched_packages_between_dev_and_release
-            == unordered(expected)
-        )
-
-    def 
test_find_matched_packages_between_dev_and_release_when_no_match_should_fail(
-        self,
-    ):
-        publish_packages_finder = PublishPackagesFinder()
-        files = [
-            "apache_airflow_providers_amazon-9.1.0.tar.gz",
-            "apache_airflow_providers_amazon-9.1.0.tar.gz.asc",
-            "apache_airflow_providers_amazon-9.1.0.tar.gz.sha512",
-        ]
-        write_data(files, publish_packages_finder.svn_dist_release_dir)
-
-        temp_dev_svn_folder = tempfile.TemporaryDirectory()
-        os.chdir(temp_dev_svn_folder.name)
-        write_data(
-            [
-                "apache_airflow_providers-airbyte-9.1.0.tar.gz.sha512",
-            ],
-            temp_dev_svn_folder.name,
-        )
-
-        with pytest.raises(SystemExit):
-            
publish_packages_finder.find_matched_packages_between_dev_and_release(
-                compare_config={
-                    "url": "https://someurl/";,
-                    "path": "airflow/providers/",
-                    "package_names": [
-                        {
-                            "type": "regex",
-                            "pattern": "(apache_airflow_providers.*?)(?=rc)",
-                        }
-                    ],
-                }
-            )
-
-    @pytest.mark.parametrize(
-        "compare_config, temp_release_dir_files, temp_dev_svn_files, expected",
-        [
             pytest.param(
-                {
-                    "url": 
"https://dist.apache.org/repos/dist/release/airflow/";,
-                    "path": "airflow/providers/",
-                    "package_names": [
-                        {
-                            "type": "regex",
-                            "pattern": "(apache_airflow_providers.*?)(?=rc)",
-                        }
-                    ],
-                },
                 [
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz",
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz.asc",
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz.sha512",
-                    "apache_airflow_providers_amazon-9.1.0-py3-none-any.whl",
-                    
"apache_airflow_providers_amazon-9.1.0-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_amazon-9.1.0-py3-none-any.whl.sha512",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz.asc",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz.sha512",
-                    "apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl",
-                    
"apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl.sha512",
+                    "apache-airflow-2.10.4-source.tar.gz",
+                    "apache-airflow-2.10.4-source.tar.gz.asc",
+                    "apache-airflow-2.10.4-source.tar.gz.sha512",
+                    "apache_airflow-2.10.4-py3-none-any.whl",
+                    "apache_airflow-2.10.4-py3-none-any.whl.asc",
+                    "apache_airflow-2.10.4-py3-none-any.whl.sha512",
+                    "apache_airflow-2.10.4.tar.gz",
+                    "apache_airflow-2.10.4.tar.gz.asc",
+                    "apache_airflow-2.10.4.tar.gz.sha512",
                 ],
                 [
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz",
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz.asc",
-                    "apache_airflow_providers_amazon-9.1.0rc1.tar.gz.sha512",
-                    
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl",
-                    
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_amazon-9.1.0rc1-py3-none-any.whl.sha512",
-                    "apache_airflow_providers_airbyte-10.1.0rc1.tar.gz",
-                    "apache_airflow_providers_airbyte-10.1.0rc1.tar.gz.asc",
-                    "apache_airflow_providers_airbyte-10.1.0rc1.tar.gz.sha512",
-                    
"apache_airflow_providers_airbyte-10.1.0rc1-py3-none-any.whl",
-                    
"apache_airflow_providers_airbyte-10.1.0rc1-py3-none-any.whl.asc",
-                    
"apache_airflow_providers_airbyte-10.1.0rc1-py3-none-any.whl.sha512",
+                    {"type": "regex", "pattern": ".*(.asc|.sha512)$"},
+                    {"type": "regex", "pattern": "(apache-airflow-.*?)$"},
                 ],
                 [
-                    "apache_airflow_providers_amazon-9.1.0.tar.gz",
-                    "apache_airflow_providers_amazon-9.1.0-py3-none-any.whl",
-                    "apache_airflow_providers_airbyte-10.1.0.tar.gz",
-                    "apache_airflow_providers_airbyte-10.1.0-py3-none-any.whl",
+                    "apache_airflow-2.10.4.tar.gz",
+                    "apache_airflow-2.10.4-py3-none-any.whl",
                 ],
-                id="find_matched_packages_between_dev_and_release",
             ),
         ],
     )
-    def test_filter_pypi_version_packages_to_publish(
-        self, compare_config, temp_release_dir_files, temp_dev_svn_files, 
expected
-    ):
-        # Test compare the dev and release packages and filter the packages to 
publish
-        publish_packages_finder = PublishPackagesFinder()
-        publish_packages_finder.final_packages_to_publish.clear()
-
-        # Write some files to temporary dev svn folder
-        temp_dev_svn_folder = tempfile.TemporaryDirectory()
-        os.chdir(temp_dev_svn_folder.name)
-        write_data(temp_dev_svn_files, temp_dev_svn_folder.name)
-
-        dist_folder = tempfile.TemporaryDirectory()
-        os.environ["DIST_PATH"] = dist_folder.name
-
-        # Create temporary release folder files
-        write_data(temp_release_dir_files, 
publish_packages_finder.svn_dist_release_dir)
-
-        publish_packages_finder.filter_pypi_version_packages_to_publish(
-            compare_config=compare_config,
-            extension_exclude_config=[
-                {
-                    "type": "regex",
-                    "pattern": r".*(.asc|.sha512)$",
-                }
-            ],
+    def test_run_should_find_packages(self, monkeypatch, temp_packages, 
exclude_config, expected):
+        monkeypatch.setenv(
+            "ARTIFACTS_CONFIG",
+            json.dumps(
+                {"id": "artifact", "description": "Find publish packages to 
PyPI", "exclude": exclude_config}
+            ),
         )
-        assert publish_packages_finder.final_packages_to_publish == 
unordered(expected)
+        dist_folder = tempfile.TemporaryDirectory()
+        monkeypatch.setenv("DIST_PATH", dist_folder.name)
+        publish_packages_finder = PublishPackagesFinder()
+        with tempfile.TemporaryDirectory() as temp_dir:
+            write_data(temp_packages, temp_dir)
+            os.chdir(temp_dir)
+            publish_packages_finder.run()
+            assert publish_packages_finder.final_packages_to_publish == 
unordered(expected)
+            assert os.listdir(dist_folder.name) == unordered(expected)
diff --git a/checksum/__init__.py b/checksum/__init__.py
index b4646d1..d0a78df 100644
--- a/checksum/__init__.py
+++ b/checksum/__init__.py
@@ -14,4 +14,4 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
\ No newline at end of file
+#
diff --git a/checksum/checksum_check.py b/checksum/checksum_check.py
index 99e37d3..65bb854 100644
--- a/checksum/checksum_check.py
+++ b/checksum/checksum_check.py
@@ -74,9 +74,7 @@ def get_valid_files(algorithm: str, files: list[str]) -> 
list[dict[str, str]]:
 
 
 if __name__ == "__main__":
-    check_sum_config: list[dict[str, Any]] = json.loads(
-        os.environ.get("CHECK_SUM_CONFIG")
-    )
+    check_sum_config: list[dict[str, Any]] = 
json.loads(os.environ.get("CHECK_SUM_CONFIG"))
 
     if not check_sum_config:
         console.print(
@@ -86,9 +84,7 @@ if __name__ == "__main__":
         sys.exit(1)
 
     if not svn_files:
-        console.print(
-            f"[red]Error: No files found in SVN directory at 
{os.environ.get('REPO_PATH')}[/]"
-        )
+        console.print(f"[red]Error: No files found in SVN directory at 
{os.environ.get('REPO_PATH')}[/]")
         sys.exit(1)
 
     for check in check_sum_config:
diff --git a/checksum/test_checksum_check.py b/checksum/test_checksum_check.py
index 910c356..40d790a 100644
--- a/checksum/test_checksum_check.py
+++ b/checksum/test_checksum_check.py
@@ -80,9 +80,7 @@ def test_validate_checksum(mock_file_digest):
     with open(temp_file.name, "wb") as temp_data:
         temp_data.write(b"some random data")
 
-    with open(
-        temp_dir.name + "/apache-airflow-2.10.3-source.tar.gz.sha512", "wb"
-    ) as temp_file:
+    with open(temp_dir.name + "/apache-airflow-2.10.3-source.tar.gz.sha512", 
"wb") as temp_file:
         temp_file.write(
             
b"bbc759357eb1980e7f80ba0b016e9ed02120e26fcd008129b5777baf8086208c45e170e3c98cf35bd96a246d59484bde3220a897e5e6a7f688a69a40bcd451bd
 apache-airflow-2.10.3-source.tar.gz"
         )
@@ -111,9 +109,7 @@ def test_validate_checksum_invalid(mock_file_digest):
     with open(temp_file.name, "wb") as temp_data:
         temp_data.write(b"some random data")
 
-    with open(
-        temp_dir.name + "/apache-airflow-2.10.3-source.tar.gz.sha512", "wb"
-    ) as temp_file:
+    with open(temp_dir.name + "/apache-airflow-2.10.3-source.tar.gz.sha512", 
"wb") as temp_file:
         temp_file.write(
             
b"bbc759357eb1980e7f80ba0b016e9ed02120e26fcd008129b5777baf8086208c45e170e3c98cf35bd96a246d59484bde3220a897e5e6a7f688a69a40bcd451bd
 apache-airflow-2.10.3-source.tar.gz"
         )
diff --git a/providers-pypi-config.yml b/providers-pypi-config.yml
new file mode 100644
index 0000000..a742222
--- /dev/null
+++ b/providers-pypi-config.yml
@@ -0,0 +1,55 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+project:
+  name: airflow
+  description: "Publish provider packages to PyPI"
+publisher:
+  name: airflow
+  url: https://dist.apache.org/repos/dist/dev/airflow/
+  path: providers/
+checks:
+  svn:
+    - id: extension
+      description: "Validate svn package extensions"
+      identifiers:
+        - type: regex
+          pattern: 
".*(py3-none-any.whl|py3-none-any.whl.asc|py3-none-any.whl.sha512|tar.gz|tar.gz.asc|tar.gz.sha512)$"
+
+    - id: package_name
+      description: "Validate svn package names"
+      identifiers:
+        - type: regex
+          pattern: ".*(apache_airflow_providers.*)$"
+
+  checksum:
+    - id: checksum
+      description: "Validate check sum with SHA512"
+      algorithm: "sha512"
+
+  signature:
+    - id: signature
+      description: "Validate signatures with GPG of packages"
+      method: gpg
+      keys: "https://dist.apache.org/repos/dist/release/airflow/KEYS";
+
+  artifact:
+    id: artifact
+    description: "Find providers artifacts to publish to PyPI"
+    exclude:
+      - type: regex
+        pattern: ".*(.asc|.sha512)$"
diff --git a/providers-rc-config.yml b/providers-rc-config.yml
new file mode 100644
index 0000000..3345625
--- /dev/null
+++ b/providers-rc-config.yml
@@ -0,0 +1,55 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+project:
+  name: airflow
+  description: "Publish rc provider packages to PyPI"
+publisher:
+  name: airflow
+  url: https://dist.apache.org/repos/dist/dev/airflow/
+  path: providers/pypi-rc/
+checks:
+  svn:
+    - id: extension
+      description: "Validate svn package extensions"
+      identifiers:
+        - type: regex
+          pattern: 
".*(py3-none-any.whl|py3-none-any.whl.asc|py3-none-any.whl.sha512|tar.gz|tar.gz.asc|tar.gz.sha512)$"
+
+    - id: package_name
+      description: "Validate svn package names"
+      identifiers:
+        - type: regex
+          pattern: ".*(apache_airflow_providers.*)$"
+
+  checksum:
+    - id: checksum
+      description: "Validate check sum with SHA512"
+      algorithm: "sha512"
+
+  signature:
+    - id: signature
+      description: "Validate signatures with GPG of packages"
+      method: gpg
+      keys: "https://dist.apache.org/repos/dist/release/airflow/KEYS";
+
+  artifact:
+    id: artifact
+    description: "Find providers artifacts to publish to PyPI"
+    exclude:
+      - type: regex
+        pattern: ".*(.asc|.sha512)$"
diff --git a/read-config/action.yml b/read-config/action.yml
index da878be..d10ea16 100644
--- a/read-config/action.yml
+++ b/read-config/action.yml
@@ -46,8 +46,8 @@ outputs:
   checks-signature:
     value: ${{ steps.config-parser.outputs.checks-signature }}
     description: "Config for signature checks"
-  checks-publish:
-    value: ${{ steps.config-parser.outputs.checks-publish }}
+  checks-artifact:
+    value: ${{ steps.config-parser.outputs.checks-artifact }}
     description: "Config for finding eligible packages and publishing packages 
pypi"
 
 runs:
diff --git a/read-config/config_parser.py b/read-config/config_parser.py
index 0ab45c4..153de00 100644
--- a/read-config/config_parser.py
+++ b/read-config/config_parser.py
@@ -109,9 +109,5 @@ if __name__ == "__main__":
     console.print("")
     console.print("[blue]Starting validations for:[/]")
     console.print(f"[blue]  Project: 
{yml_config_data.get('project').get('name')}[/]")
-    console.print(
-        f"[blue]  Description: 
{yml_config_data.get('project').get('description')}[/]"
-    )
-    console.print(
-        f"[blue]  Publisher: {yml_config_data.get('publisher').get('name')}[/]"
-    )
+    console.print(f"[blue]  Description: 
{yml_config_data.get('project').get('description')}[/]")
+    console.print(f"[blue]  Publisher: 
{yml_config_data.get('publisher').get('name')}[/]")
diff --git a/read-config/release-config-schema.yml.schema.json 
b/read-config/release-config-schema.yml.schema.json
index bf81859..3c640f6 100644
--- a/read-config/release-config-schema.yml.schema.json
+++ b/read-config/release-config-schema.yml.schema.json
@@ -124,7 +124,7 @@
             ]
           }
         },
-        "publish": {
+        "artifact": {
           "type": "object",
           "properties": {
             "id": {
@@ -133,10 +133,7 @@
             "description": {
               "type": "string"
             },
-            "release-type": {
-              "type": "string"
-            },
-            "exclude_extensions": {
+            "exclude": {
               "type": "array",
               "items": {
                 "type": "object",
@@ -153,54 +150,18 @@
                   "type"
                 ]
               }
-            },
-            "compare": {
-              "type": "object",
-              "properties": {
-                "url": {
-                  "type": "string"
-                },
-                "path": {
-                  "type": "string"
-                },
-                "package_names": {
-                  "type": "array",
-                  "items": {
-                    "type": "object",
-                    "properties": {
-                      "type": {
-                        "type": "string"
-                      },
-                      "pattern": {
-                        "type": "string"
-                      }
-                    },
-                    "required": [
-                      "pattern",
-                      "type"
-                    ]
-                  }
-                }
-              },
-              "required": [
-                "package_names",
-                "path",
-                "url"
-              ]
             }
           },
           "required": [
-            "compare",
             "description",
-            "exclude_extensions",
-            "id",
-            "release-type"
+            "exclude",
+            "id"
           ]
         }
       },
       "required": [
+        "artifact",
         "checksum",
-        "publish",
         "signature",
         "svn"
       ]
diff --git a/release-config.yml b/release-config.yml
deleted file mode 100644
index be1ca34..0000000
--- a/release-config.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-project:
-  name: airflow-publish
-  description: "Publish airflow packages to PyPI"
-publisher:
-  name: airflow
-  url: https://dist.apache.org/repos/dist/dev/airflow
-  path: "2.10.4rc1/"
-checks:
-  svn:
-    - id: extension
-      description: "Validate svn package extensions"
-      identifiers:
-        - type: regex
-          pattern: 
".*(py3-none-any.whl|tar.gz.sha512|tar.gz.asc|tar.gz|py3-none-any.whl.asc|py3-none-any.whl.sha512)$"
-
-    - id: package_name
-      description: "Validate svn package names"
-      identifiers:
-        - type: regex
-          pattern: ".*(apache_airflow.*)$"
-
-        - type: regex
-          pattern: ".*(apache-airflow.*)$"
-
-  checksum:
-    - id: checksum
-      description: "Validate check sum with SHA512"
-      algorithm: "sha512"
-
-  signature:
-    - id: signature
-      description: "Validate signatures with GPG of packages"
-      method: gpg
-      keys: "https://dist.apache.org/repos/dist/release/airflow/KEYS";
-
-  publish:
-    id: publish
-    description: "Publish airflow packages to PyPI"
-    release-type: "RC_VERSION"
-    exclude_extensions:
-      - type: regex
-        pattern: ".*(.asc|.sha512)$"
-    compare:
-      url: https://dist.apache.org/repos/dist/release/airflow/
-      path: "2.10.4"
-      package_names:
-       - type: regex
-         pattern: "(apache_airflow.*?)(?=rc)"
diff --git a/signature/__init__.py b/signature/__init__.py
index b4646d1..d0a78df 100644
--- a/signature/__init__.py
+++ b/signature/__init__.py
@@ -14,4 +14,4 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
\ No newline at end of file
+#
diff --git a/signature/signature_check.py b/signature/signature_check.py
index cbde2b3..4f77ddf 100644
--- a/signature/signature_check.py
+++ b/signature/signature_check.py
@@ -64,9 +64,7 @@ def validate_signature_with_gpg(signature_check: dict[str, 
Any]):
     for file in svn_files:
         if file.endswith(".asc"):
             with open(file, "rb") as singed_file:
-                status = gpg.verify_file(
-                    fileobj_or_path=singed_file, 
data_filename=file.replace(".asc", "")
-                )
+                status = gpg.verify_file(fileobj_or_path=singed_file, 
data_filename=file.replace(".asc", ""))
             if not status.valid:
                 invalid_signature_files.append(
                     {"file": file, "status": status.valid, "problems": 
status.problems}
@@ -76,9 +74,7 @@ def validate_signature_with_gpg(signature_check: dict[str, 
Any]):
 
 
 if __name__ == "__main__":
-    signature_check_config: list[dict[str, Any]] = json.loads(
-        os.environ.get("SIGNATURE_CHECK_CONFIG")
-    )
+    signature_check_config: list[dict[str, Any]] = 
json.loads(os.environ.get("SIGNATURE_CHECK_CONFIG"))
 
     if not signature_check_config:
         console.print(
@@ -88,9 +84,7 @@ if __name__ == "__main__":
         sys.exit(1)
 
     if not svn_files:
-        console.print(
-            f"[red]Error: No files found in SVN directory at 
{os.environ.get('REPO_PATH')}[/]"
-        )
+        console.print(f"[red]Error: No files found in SVN directory at 
{os.environ.get('REPO_PATH')}[/]")
         sys.exit(1)
 
     for check in signature_check_config:
diff --git a/svn/__init__.py b/svn/__init__.py
index b4646d1..d0a78df 100644
--- a/svn/__init__.py
+++ b/svn/__init__.py
@@ -14,4 +14,4 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
\ No newline at end of file
+#
diff --git a/svn/svn_check.py b/svn/svn_check.py
index 7f6ea84..7af79f0 100644
--- a/svn/svn_check.py
+++ b/svn/svn_check.py
@@ -90,9 +90,7 @@ def check_files_with_identifiers(
 
 
 if __name__ == "__main__":
-    svn_check_config: list[dict[str, Any]] = json.loads(
-        os.environ.get("SVN_CHECK_CONFIG")
-    )
+    svn_check_config: list[dict[str, Any]] = 
json.loads(os.environ.get("SVN_CHECK_CONFIG"))
 
     if not svn_check_config:
         console.print(
@@ -102,16 +100,12 @@ if __name__ == "__main__":
         sys.exit(1)
 
     if not svn_files:
-        console.print(
-            f"[red]Error: No files found in SVN directory at 
{os.environ.get('REPO_PATH')}[/]"
-        )
+        console.print(f"[red]Error: No files found in SVN directory at 
{os.environ.get('REPO_PATH')}[/]")
         sys.exit(1)
 
     for check in svn_check_config:
         console.print(f"[blue]{check.get('description')}[/]")
-        check_files_with_identifiers(
-            check.get("identifiers"), svn_files, check.get("id")
-        )
+        check_files_with_identifiers(check.get("identifiers"), svn_files, 
check.get("id"))
 
     exit_code = 0
 

Reply via email to