This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch run-scripts-from-current-version
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 08954f1d6e6636d614ce7644e0f27ad805b98a5a
Author: Jarek Potiuk <[email protected]>
AuthorDate: Tue Feb 17 00:17:31 2026 +0100

    Publish docs to s3 uses current version of workflows/scripts
    
    In case we built docs for past version of airflow we checkout
    both - current (main) version of code to use latest environment
    and tag version of code to build the documentation. The workflow
    mixed sub-workflows and run them from the "tag" version of workflow
    rather than from current.
    
    We are moving SBOM building to the publishing job, and we are also
    pre-processing the generated docs to include the right stable
    versions - including Airflow 2 compatibility
---
 .github/workflows/publish-docs-to-s3.yml           | 121 ++++++-----
 .../commands/release_management_commands.py        |  42 +++-
 .../src/airflow_breeze/utils/docs_publisher.py     |  10 +-
 .../src/airflow_breeze/utils/publish_docs_to_s3.py |   2 +-
 scripts/ci/docs/store_stable_versions.py           | 240 +++++++++++++++++++++
 5 files changed, 351 insertions(+), 64 deletions(-)

diff --git a/.github/workflows/publish-docs-to-s3.yml 
b/.github/workflows/publish-docs-to-s3.yml
index a8db5f3a5a4..1a9d9ac2dab 100644
--- a/.github/workflows/publish-docs-to-s3.yml
+++ b/.github/workflows/publish-docs-to-s3.yml
@@ -182,6 +182,15 @@ jobs:
         with:
           persist-credentials: false
           path: current-version
+      - name: "Free up disk space"
+        shell: bash
+        run: ./current-version/scripts/tools/free_up_disk_space.sh
+      - name: "Make /mnt writeable"
+        run: ./current-version/scripts/ci/make_mnt_writeable.sh
+      - name: "Move docker to /mnt"
+        run: ./current-version/scripts/ci/move_docker_to_mnt.sh
+      - name: "Copy the version retrieval script"
+        run: cp ./current-version/scripts/ci/docs/store_stable_versions.py 
/tmp/store_stable_versions.py
       # We are checking repo for both - breeze and docs from the ref provided 
as input
       # This will take longer as we need to rebuild CI image and it will not 
use cache
       # but it will build the CI image from the version of Airflow that is 
used to check out things
@@ -190,15 +199,7 @@ jobs:
         with:
           persist-credentials: false
           ref: ${{ inputs.ref }}
-          fetch-depth: 0
           fetch-tags: true
-      - name: "Free up disk space"
-        shell: bash
-        run: ./scripts/tools/free_up_disk_space.sh
-      - name: "Make /mnt writeable"
-        run: ./scripts/ci/make_mnt_writeable.sh
-      - name: "Move docker to /mnt"
-        run: ./scripts/ci/move_docker_to_mnt.sh
       - name: "Apply patch commits if provided"
         run: |
           if [[ "${APPLY_COMMITS}" != "" ]]; then
@@ -242,63 +243,24 @@ jobs:
           INCLUDE_COMMITS: ${{ startsWith(inputs.ref, 'providers') && 'true' 
|| 'false' }}
         run: >
           breeze build-docs ${INCLUDE_DOCS} --docs-only
-      - name: "Checkout current version to run SBOM generation"
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683  # 
v4.2.2
-        with:
-          persist-credentials: false
-          fetch-depth: 0
-          fetch-tags: true
-          path: current-version
-        if: inputs.build-sboms
-      - name: "Reinstall breeze from the current version"
-        run: |
-          breeze setup self-upgrade --use-current-airflow-sources
-        if: inputs.build-sboms
-        working-directory: current-version
-      - name: "Make sure SBOM dir exists and has the right permissions"
-        run: |
-          sudo mkdir -vp ./files/sbom
-          sudo chown -R "${USER}" .
-        working-directory: current-version
-        if: inputs.build-sboms
-      - name: "Prepare SBOMs using current version of Breeze"
-        env:
-          AIRFLOW_VERSION: ${{ needs.build-info.outputs.airflow-version }}
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-          PYTHON_VERSION: "${{ needs.build-info.outputs.default-python-version 
}}"
-          FORCE: "true"
-        run: >
-          breeze sbom update-sbom-information
-          --airflow-version ${AIRFLOW_VERSION} --remote-name origin --force
-          --all-combinations --run-in-parallel --airflow-root-path 
"${GITHUB_WORKSPACE}"
-        working-directory: current-version
-        if: inputs.build-sboms
-      - name: "Generated SBOM files"
-        run: |
-          echo "Generated SBOM files:"
-          find ./generated/_build/docs/apache-airflow/stable/sbom/ -type f | 
sort
-        if: inputs.build-sboms
-      - name: "Reinstall breeze from ${{ inputs.ref }} reference"
-        run:
-          breeze setup self-upgrade --use-current-airflow-sources
-        if: inputs.build-sboms
-      - name: Check disk space available
-        run: df -H
-      # Here we will create temp airflow-site dir to publish docs
-      - name: Create /mnt/airflow-site directory
+      - name: "Store stable versions"
+        run: uv run /tmp/store_stable_versions.py
+      - name: "Saving build docs folder"
         run: |
-          sudo mkdir -p /mnt/airflow-site && sudo chown -R "${USER}" 
/mnt/airflow-site
-          echo "AIRFLOW_SITE_DIRECTORY=/mnt/airflow-site/" >> "$GITHUB_ENV"
-      - name: "Publish docs to /mnt/airflow-site directory using ${{ 
inputs.ref }} reference breeze"
-        env:
-          INCLUDE_DOCS: ${{ needs.build-info.outputs.include-docs }}
-        run: >
-          breeze release-management publish-docs --override-versioned 
--run-in-parallel ${INCLUDE_DOCS}
+          if [ -d "generated/_build/" ]; then
+            mv "generated/_build/" "/mnt/_build"
+          elif [ -d "docs/_build/" ]; then
+            # handle Airflow 2 case
+            mv "docs/_build/" "/mnt/_build"
+          else
+            echo "No build docs found to save"
+            exit 1
+          fi
       - name: "Upload build docs"
         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 
 # v4.6.2
         with:
           name: airflow-docs
-          path: /mnt/airflow-site
+          path: /mnt/_build
           retention-days: '7'
           if-no-files-found: 'error'
           overwrite: 'true'
@@ -324,10 +286,12 @@ jobs:
         # We are checking repo for both - breeze and docs from the "workflow' 
branch
         # This will take longer as we need to rebuild CI image and it will not 
use cache
         # but it will build the CI image from the version of Airflow that is 
used to check out things
+        # We also fetch the whole history to be able to prepare SBOM files
       - name: "Checkout ${{ inputs.ref }} "
         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683  # 
v4.2.2
         with:
           persist-credentials: false
+          fetch-depth: 0
       - name: "Make /mnt writeable and cleanup"
         shell: bash
         run: ./scripts/ci/make_mnt_writeable.sh
@@ -337,7 +301,42 @@ jobs:
         uses: 
actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093  # v4.3.0
         with:
           name: airflow-docs
-          path: /mnt/airflow-site
+          path: /mnt/_build
+      - name: "Move doscs to generated folder"
+        run: mv /mnt/_build generated/_build
+      - name: "Make sure SBOM dir exists and has the right permissions"
+        run: |
+          sudo mkdir -vp ./files/sbom
+          sudo chown -R "${USER}" .
+        if: inputs.build-sboms
+      - name: "Prepare SBOMs"
+        env:
+          AIRFLOW_VERSION: ${{ needs.build-info.outputs.airflow-version }}
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+          PYTHON_VERSION: "${{ needs.build-info.outputs.default-python-version 
}}"
+          FORCE: "true"
+        run: >
+          breeze sbom update-sbom-information
+          --airflow-version ${AIRFLOW_VERSION} --remote-name origin --force
+          --all-combinations --run-in-parallel --airflow-root-path 
"${GITHUB_WORKSPACE}"
+        if: inputs.build-sboms
+      - name: "Generated SBOM files"
+        run: |
+          echo "Generated SBOM files:"
+          find ./generated/_build/docs/apache-airflow/stable/sbom/ -type f | 
sort
+        if: inputs.build-sboms
+      - name: Check disk space available
+        run: df -H
+      # Here we will create temp airflow-site dir to publish docs
+      - name: Create /mnt/airflow-site directory
+        run: |
+          sudo mkdir -p /mnt/airflow-site && sudo chown -R "${USER}" 
/mnt/airflow-site
+          echo "AIRFLOW_SITE_DIRECTORY=/mnt/airflow-site/" >> "$GITHUB_ENV"
+      - name: "Publish docs to /mnt/airflow-site directory using ${{ 
inputs.ref }} reference breeze"
+        env:
+          INCLUDE_DOCS: ${{ needs.build-info.outputs.include-docs }}
+        run: >
+          breeze release-management publish-docs --override-versioned 
--run-in-parallel ${INCLUDE_DOCS}
       - name: Check disk space available
         run: df -H
       - name: "Update watermarks"
diff --git 
a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py 
b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
index b9296d83fe1..6a395dfd5c7 100644
--- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
@@ -101,6 +101,9 @@ from airflow_breeze.global_constants import (
     DESTINATION_LOCATIONS,
     MULTI_PLATFORM,
     UV_VERSION,
+    get_airflow_version,
+    get_airflowctl_version,
+    get_task_sdk_version,
 )
 from airflow_breeze.params.build_ci_params import BuildCiParams
 from airflow_breeze.params.shell_params import ShellParams
@@ -133,6 +136,7 @@ from airflow_breeze.utils.docker_command_utils import (
     fix_ownership_using_docker,
     perform_environment_checks,
 )
+from airflow_breeze.utils.helm_chart_utils import chart_version
 from airflow_breeze.utils.packages import (
     PackageSuspendedException,
     apply_version_suffix_to_non_provider_pyproject_tomls,
@@ -142,6 +146,7 @@ from airflow_breeze.utils.packages import (
     get_available_distributions,
     get_provider_details,
     get_provider_distributions_metadata,
+    get_short_package_name,
     make_sure_remote_apache_exists_and_fetch,
 )
 from airflow_breeze.utils.parallel import (
@@ -1826,6 +1831,40 @@ def run_publish_docs_in_parallel(
             get_console().print(f"[warning]{entry}")
 
 
+def get_package_version_possibly_from_stable_txt(package_name: str) -> str:
+    """
+    Get version for a package, trying stable.txt first, then falling back to 
source files.
+
+    :param package_name: The package name (e.g., 'apache-airflow', 
'apache-airflow-providers-amazon')
+    :return: The version string
+    """
+    # Try to read from stable.txt file first
+    stable_txt_path = AIRFLOW_ROOT_PATH / "generated" / "_build" / "docs" / 
package_name / "stable.txt"
+    if stable_txt_path.exists():
+        return stable_txt_path.read_text().strip()
+
+    # Fall back to reading from source files based on package type
+    if package_name == "apache-airflow":
+        return get_airflow_version()
+
+    if package_name == "apache-airflow-ctl":
+        return get_airflowctl_version()
+
+    if package_name == "task-sdk":
+        return get_task_sdk_version()
+
+    if package_name == "helm-chart":
+        return chart_version()
+
+    if package_name.startswith("apache-airflow-providers-"):
+        provider = 
get_provider_distributions_metadata().get(get_short_package_name(package_name))
+        if provider and "versions" in provider and provider["versions"]:
+            return provider["versions"][0]
+        raise SystemExit(f"Could not determine version for provider: 
{package_name}")
+
+    raise SystemExit(f"Unsupported package: {package_name}")
+
+
 @release_management_group.command(
     name="publish-docs",
     help="Command to publish generated documentation to airflow-site",
@@ -1898,7 +1937,8 @@ def publish_docs(
     )
     print(f"Publishing docs for {len(current_packages)} package(s)")
     for pkg in current_packages:
-        print(f" - {pkg}")
+        version = get_package_version_possibly_from_stable_txt(pkg)
+        print(f" - {pkg}: {version}")
     print()
     if run_in_parallel:
         run_publish_docs_in_parallel(
diff --git a/dev/breeze/src/airflow_breeze/utils/docs_publisher.py 
b/dev/breeze/src/airflow_breeze/utils/docs_publisher.py
index 861a7730ded..01621681669 100644
--- a/dev/breeze/src/airflow_breeze/utils/docs_publisher.py
+++ b/dev/breeze/src/airflow_breeze/utils/docs_publisher.py
@@ -61,6 +61,14 @@ class DocsPublisher:
                 "Make sure to add version in `provider.yaml` for the package."
             )
             raise RuntimeError(msg)
+
+        # Read version from stable.txt file
+        stable_txt_path = 
f"{GENERATED_PATH}/_build/docs/{self.package_name}/stable.txt"
+        if os.path.exists(stable_txt_path):
+            with open(stable_txt_path) as f:
+                return f.read().strip()
+
+        # Fallback to reading from source files if stable.txt doesn't exist
         if self.package_name == "apache-airflow":
             return get_airflow_version()
         if self.package_name.startswith("apache-airflow-providers-"):
@@ -103,6 +111,6 @@ class DocsPublisher:
         shutil.copytree(self._build_dir, output_dir)
         if self.is_versioned:
             with open(os.path.join(output_dir, "..", "stable.txt"), "w") as 
stable_file:
-                stable_file.write(self._current_version)
+                stable_file.write(self._current_version + "\n")
         get_console(output=self.output).print()
         return 0, f"Docs published: {self.package_name}"
diff --git a/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py 
b/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py
index 66f54472b32..fe385f7c88b 100644
--- a/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py
+++ b/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py
@@ -157,7 +157,7 @@ class S3DocsPublish:
                 stable_file_path = f"{self.source_dir_path}/{doc}/stable.txt"
                 if os.path.exists(stable_file_path):
                     with open(stable_file_path) as stable_file:
-                        stable_version = stable_file.read()
+                        stable_version = stable_file.read().strip()
                         get_console().print(f"[info]Stable version: 
{stable_version} for {doc}\n")
                 else:
                     get_console().print(
diff --git a/scripts/ci/docs/store_stable_versions.py 
b/scripts/ci/docs/store_stable_versions.py
new file mode 100755
index 00000000000..6579d614dfe
--- /dev/null
+++ b/scripts/ci/docs/store_stable_versions.py
@@ -0,0 +1,240 @@
+#!/usr/bin/env python3
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# /// script
+# requires-python = ">=3.8"
+# dependencies = [
+#     "pyyaml>=6.0",
+# ]
+# ///
+
+"""
+This script retrieves versions from versioned doc packages built and stores 
them in stable.txt files.
+
+It should be run after building docs but before saving/uploading the build 
artifacts.
+"""
+
+from __future__ import annotations
+
+import os
+import re
+import shutil
+import sys
+from pathlib import Path
+
+import yaml
+
+
+def get_airflow_version(airflow_root: Path) -> str | None:
+    """Get Airflow version from airflow/__init__.py."""
+    # Try Airflow 3.x location first
+    init_file = airflow_root / "airflow-core" / "src" / "airflow" / 
"__init__.py"
+    if not init_file.exists():
+        # Fallback to Airflow 2.x location
+        init_file = airflow_root / "airflow" / "__init__.py"
+
+    if not init_file.exists():
+        return None
+
+    content = init_file.read_text()
+    match = re.search(r'^__version__\s*=\s*["\']([^"\']+)["\']', content, 
re.MULTILINE)
+    if match:
+        return match.group(1)
+    return None
+
+
+def get_version_from_provider_yaml(provider_yaml_path: Path) -> str | None:
+    """Get version from provider.yaml file (first version in the versions 
list)."""
+    if not provider_yaml_path.exists():
+        return None
+
+    try:
+        with open(provider_yaml_path) as f:
+            data = yaml.safe_load(f)
+            if "versions" in data and len(data["versions"]) > 0:
+                # versions is a list of version strings, get the first one
+                return str(data["versions"][0])
+    except Exception:
+        pass
+    return None
+
+
+def get_version_from_pyproject_toml(pyproject_path: Path) -> str | None:
+    """Get version from pyproject.toml file."""
+    if not pyproject_path.exists():
+        return None
+
+    content = pyproject_path.read_text()
+    match = re.search(r'^version\s*=\s*["\']([^"\']+)["\']', content, 
re.MULTILINE)
+    if match:
+        return match.group(1)
+    return None
+
+
+def get_helm_chart_version(chart_yaml_path: Path) -> str | None:
+    """Get version from Chart.yaml file."""
+    if not chart_yaml_path.exists():
+        return None
+
+    content = chart_yaml_path.read_text()
+    match = re.search(r"^version:\s*(.+)$", content, re.MULTILINE)
+    if match:
+        return match.group(1).strip()
+    return None
+
+
+def get_package_version(package_name: str, airflow_root: Path) -> str | None:
+    """Get version for a package based on its type and metadata location."""
+    if package_name == "apache-airflow":
+        return get_airflow_version(airflow_root)
+
+    if package_name == "apache-airflow-ctl":
+        # Try provider.yaml first
+        provider_yaml = airflow_root / "airflow-ctl" / "src" / "airflow_ctl" / 
"provider.yaml"
+        version = get_version_from_provider_yaml(provider_yaml)
+        if version:
+            return version
+        # Fallback to pyproject.toml
+        pyproject = airflow_root / "airflow-ctl" / "pyproject.toml"
+        return get_version_from_pyproject_toml(pyproject)
+
+    if package_name == "task-sdk":
+        # Try provider.yaml first
+        provider_yaml = airflow_root / "task-sdk" / "src" / "task_sdk" / 
"provider.yaml"
+        version = get_version_from_provider_yaml(provider_yaml)
+        if version:
+            return version
+        # Fallback to pyproject.toml
+        pyproject = airflow_root / "task-sdk" / "pyproject.toml"
+        return get_version_from_pyproject_toml(pyproject)
+
+    if package_name == "helm-chart":
+        chart_yaml = airflow_root / "chart" / "Chart.yaml"
+        return get_helm_chart_version(chart_yaml)
+
+    if package_name.startswith("apache-airflow-providers-"):
+        # Get provider version from provider.yaml
+        provider_short_name = 
package_name.replace("apache-airflow-providers-", "").replace("-", "/")
+
+        # Try Airflow 3.x location first (providers/{provider}/provider.yaml)
+        provider_yaml = airflow_root / "providers" / provider_short_name / 
"provider.yaml"
+        version = get_version_from_provider_yaml(provider_yaml)
+        if version:
+            return version
+
+        # Fallback to Airflow 2.x location 
(airflow/providers/{provider}/provider.yaml)
+        provider_yaml = airflow_root / "airflow" / "providers" / 
provider_short_name / "provider.yaml"
+        return get_version_from_provider_yaml(provider_yaml)
+
+    print(f"Unknown package type: {package_name}")
+    return None
+
+
+def main() -> int:
+    """Main function to process all documentation packages."""
+    # Get configuration from environment or defaults
+    docs_build_dir = Path(os.environ.get("DOCS_BUILD_DIR", 
"generated/_build/docs"))
+    airflow_root = Path(os.environ.get("AIRFLOW_ROOT", os.getcwd()))
+
+    # Change to airflow root directory
+    os.chdir(airflow_root)
+
+    print("=" * 42)
+    print("Storing stable versions for built docs")
+    print("=" * 42)
+
+    # Check if docs build directory exists
+    if not docs_build_dir.exists():
+        print(f"Error: Docs build directory not found at {docs_build_dir}")
+        # Try alternate location for Airflow 2 compatibility
+        alt_docs_dir = Path("docs/_build/docs")
+        if alt_docs_dir.exists():
+            docs_build_dir = alt_docs_dir
+            print(f"Found alternate location at {docs_build_dir}")
+        else:
+            print("No docs build directory found, exiting")
+            return 1
+
+    # Non-versioned packages to skip
+    non_versioned_packages = {"apache-airflow-providers", "docker-stack"}
+
+    stable_files_created = []
+
+    # Process each package in the docs build directory
+    for package_dir in sorted(docs_build_dir.iterdir()):
+        if not package_dir.is_dir():
+            continue
+
+        package_name = package_dir.name
+
+        # Skip non-versioned packages
+        if package_name in non_versioned_packages:
+            print(f"Skipping non-versioned package: {package_name}")
+            continue
+
+        # Check if this package has a stable directory (indicating it's 
versioned)
+        stable_dir = package_dir / "stable"
+        if not stable_dir.exists() or not stable_dir.is_dir():
+            print(f"Skipping non-versioned package (no stable dir): 
{package_name}")
+            continue
+
+        print(f"Processing versioned package: {package_name}")
+
+        # Get the version for this package
+        version = get_package_version(package_name, airflow_root)
+
+        if not version:
+            print(f"  Warning: Could not determine version for {package_name}, 
skipping")
+            continue
+
+        print(f"  Version: {version}")
+
+        # Create stable.txt file
+        stable_file = package_dir / "stable.txt"
+        stable_file.write_text(version + "\n")
+        print(f"  Created: {stable_file}")
+        stable_files_created.append((package_name, version))
+
+        # Also create a version-specific copy of the stable docs
+        version_dir = package_dir / version
+        if not version_dir.exists():
+            print(f"  Copying stable docs to versioned directory: 
{version_dir}")
+            shutil.copytree(stable_dir, version_dir)
+        else:
+            print(f"  Version directory already exists: {version_dir}")
+
+    print()
+    print("=" * 42)
+    print("Stable version files created successfully")
+    print("=" * 42)
+    print()
+
+    if stable_files_created:
+        print("Summary of stable.txt files:")
+        for package_name, version in stable_files_created:
+            print(f"  {package_name}: {version}")
+    else:
+        print("No stable.txt files created")
+
+    print()
+    print("Done!")
+
+    return 0
+
+
+if __name__ == "__main__":
+    sys.exit(main())

Reply via email to