This is an automated email from the ASF dual-hosted git repository. potiuk pushed a commit to branch add-watermark-script in repository https://gitbox.apache.org/repos/asf/airflow-site-archive.git
commit 3b2b94f1a134b6686b04ecfae7a3ece057df2054 Author: Jarek Potiuk <ja...@potiuk.com> AuthorDate: Wed May 14 20:50:40 2025 +0100 Add watermark to processing scripts css files In all the flows we publish files to staging site we should add the watermark to css --- .github/workflows/build.yml | 9 ++---- .github/workflows/github-to-s3.yml | 56 +++++++++++++++++++++++++------------- .github/workflows/s3-to-github.yml | 46 +++++++++++++++++++------------ docs | 1 + 4 files changed, 70 insertions(+), 42 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5c7d3b3be1..5220ced030 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -16,10 +16,8 @@ jobs: sudo rm -rf /opt/ghc sudo rm -rf "/usr/local/share/boost" sudo rm -rf "$AGENT_TOOLSDIRECTORY" - - name: 🐍 Setup Python - uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 - with: - python-version: '3.9' + - name: "Install uv" + run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} ) for scripts" uses: actions/checkout@v4 # Checkout only workflow and scripts directory to run scripts from @@ -29,6 +27,5 @@ jobs: scripts - name: 🔎 Lint run: | - pip install pre-commit - pip install pre-commit-uv + uv tool install pre-commit --with pre-commit-uv pre-commit run --all-files --color always diff --git a/.github/workflows/github-to-s3.yml b/.github/workflows/github-to-s3.yml index fc92366cb2..a8dcab16f9 100644 --- a/.github/workflows/github-to-s3.yml +++ b/.github/workflows/github-to-s3.yml @@ -20,14 +20,14 @@ name: Sync GitHub to S3 on: # yamllint disable-line rule:truthy workflow_dispatch: inputs: - destination-location: + destination: description: "The destination location in S3" required: true type: choice options: - - s3://live-docs-airflow-apache-org/docs/ - - s3://staging-docs-airflow-apache-org/docs/ - default: "s3://live-docs-airflow-apache-org/docs/" + - live + - staging + default: live sync-type: description: "Perform a full sync or just sync the last commit" required: false @@ -57,17 +57,26 @@ jobs: runs-on: ubuntu-latest steps: - name: Summarize parameters + id: parameters + env: + DOCUMENT_FOLDERS: ${{ inputs.document-folders }} + SYNC_TYPE: ${{ inputs.sync-type }} + COMMIT_SHA: ${{ inputs.commit-sha }} + PROCESSES: ${{ inputs.processes }} + DESTINATION: ${{ iputs.destination }}} run: | - echo "Destination location: ${{ inputs.destination-location }}" - echo "Document folders: ${{ inputs.document-folders }}" - echo "Sync type: ${{ inputs.sync-type }}" - echo "Commit SHA: ${{ inputs.commit-sha }}" - echo "Processes: ${{ inputs.processes }}" - - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: '3.11' + echo "Input parameters summary" + echo "=========================" + echo "Document folders: ${DOCUMENT_FOLDER}" + echo "Sync type: ${SYNC_TYPE}" + echo "Commit SHA: ${COMMIT_SHA}" + echo "Processes: ${PROCESSES}" + echo "Destination: ${DESTINATION}" + if [[ "${DESTINATION}" == "live" ]]; then + echo "destination-location=s3://live-docs-airflow-apache-org/docs/" >> ${GITHUB_OUTPUT} + else + echo "destination-location=s3://staging-docs-airflow-apache-org/docs/" >> ${GITHUB_OUTPUT} + fi - uses: actions/checkout@v4 # Checkout only workflow and scripts directory to run scripts from @@ -151,7 +160,6 @@ jobs: ${{ steps.docs-folders-processed.outputs.sparse-checkout }} ref: ${{ inputs.commit-sha || github.sha }} if: steps.docs-folders-processed.outputs.docs-folders-processed != 'all' - - name: > Checkout (${{ inputs.commit-sha || github.sha }}) to /mnt/cloned-airflow-site-archive (whole repo) uses: actions/checkout@v4 @@ -160,9 +168,20 @@ jobs: fetch-depth: 2 ref: ${{ inputs.commit-sha || github.sha }} if: steps.docs-folders-processed.outputs.docs-folders-processed == 'all' - + - name: "Install uv" + run: curl -LsSf https://astral.sh/uv/install.sh | sh + - name: Add staging watermarks + # yamllint disable rule:line-length + run: | + curl -sSf -o add_watermark.py https://raw.githubusercontent.com/apache/airflow-site/refs/heads/main/.github/scripts/add_watermark.py + chmod a+x add_watermark.py + mkdir -p images + curl -sSf -o images/staging.png https://raw.githubusercontent.com/apache/airflow-site/refs/heads/main/.github/scripts/images/staging.png + uv run scripts/add_watermark.py --pattern 'main.min.*.css' --folder docs-archive \ + --image-directory images --url-prefix /docs/images + if: inputs.destination == 'staging' - name: > - Syncing ${{ inputs.commit-sha || github.sha }}: ${{ inputs.destination-location }}: + Syncing ${{ inputs.commit-sha || github.sha }}: ${{ inputs.destination }}: ${{ inputs.sync-type }} ${{ steps.docs-folders-processed.outputs.docs-folders-processed }} wih parallel aws cli methods = ${{ inputs.processes }} env: @@ -170,7 +189,7 @@ jobs: SYNC_TYPE: ${{ inputs.sync-type }} PROCESSES: ${{ inputs.processes }} DOCUMENTS_FOLDERS: ${{ steps.docs-folders-processed.outputs.docs-folders-processed }} - DESTINATION_LOCATION: ${{ inputs.destination-location }} + DESTINATION_LOCATION: ${{ steps.parameters.outputs.destination-location }} run: | # show what's being run set -x @@ -180,7 +199,6 @@ jobs: echo "Syncing whole repo" fi ls -la /mnt/cloned-airflow-site-archive/* - python3 -m pip install uv # we run inputs.processes aws cli commands - each command uploading files in parallel # that seems to be the fastest way to upload files to S3 aws configure set default.s3.max_concurrent_requests 10 diff --git a/.github/workflows/s3-to-github.yml b/.github/workflows/s3-to-github.yml index 0db534aad5..9440212aec 100644 --- a/.github/workflows/s3-to-github.yml +++ b/.github/workflows/s3-to-github.yml @@ -20,14 +20,14 @@ name: Sync S3 to GitHub on: # yamllint disable-line rule:truthy workflow_dispatch: inputs: - source-location: + source: description: "The source location in S3" required: true type: choice options: - - s3://live-docs-airflow-apache-org/docs/ - - s3://staging-docs-airflow-apache-org/docs/ - default: "s3://live-docs-airflow-apache-org/docs/" + - live + - staging + default: live document-folders: description: "Document folders to sync or short package ids (separated with spaces)" required: false @@ -47,18 +47,29 @@ jobs: s3-to-github: name: S3 to GitHub runs-on: ubuntu-latest + env: + SOURCE: ${{ inputs.source }} + DOCUMENT_FOLDERS: ${{ inputs.document-folders }} + COMMIT_CHANGES: ${{ inputs.commit-changes }} + PROCESSES: ${{ inputs.processes }} steps: - name: Summarize parameters + id: parameters run: | - echo "Source location: ${{ inputs.source-location }}" - echo "Document folders: ${{ inputs.document-folders }}" - echo "Commit changes: ${{ inputs.commit-changes }}" - echo "Processes: ${{ inputs.processes }}" + echo "Input parameters summary" + echo "=========================" + echo "Source: ${SOURCE}" + echo "Document folders: ${DOCUMENT_FOLDERS}" + echo "Commit changes: ${COMMIT_CHANGES}" + echo "Processes: ${PROCESSES}" + if [[ "${SOURCE}" == "live" ]]; then + echo "source-location=s3://live-docs-airflow-apache-org/docs/" >> ${GITHUB_OUTPUT} + else + echo "source-location=s3://staging-docs-airflow-apache-org/docs/" >> ${GITHUB_OUTPUT} + fi - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: '3.11' + - name: "Install uv" + run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: Install AWS CLI v2 run: | @@ -134,18 +145,19 @@ jobs: - name: "Check space available" run: df -h - - name: Syncing ${{inputs.source-location}} (${{ inputs.document-folders }}) + - name: Syncing ${{ inputs.source }} (${{ inputs.document-folders }}) env: PROCESSES: ${{ inputs.processes }} + DOCUMENTS_FOLDERS: ${{ inputs.document-folders }} + SOURCE_LOCATION: ${{ steps.parameters.outputs.source-location }} run: | set -x - python3 -m pip install uv aws configure set default.s3.max_concurrent_requests 10 uv run ./scripts/s3_to_github.py \ - --bucket-path ${{inputs.source-location}} \ + --bucket-path ${SOURCE_LOCATION} \ --local-path ./docs-archive \ - --document-folders "${{inputs.document-folders}}" \ - --processes "${{inputs.processes}}" + --document-folders "${DOCUMENT_FOLDERS}" \ + --processes "${PROCESSES}" working-directory: /mnt/cloned-airflow-site-archive - name: Show files that will be committed diff --git a/docs b/docs new file mode 120000 index 0000000000..3067c23fe0 --- /dev/null +++ b/docs @@ -0,0 +1 @@ +docs-archive \ No newline at end of file