This is an automated email from the ASF dual-hosted git repository.
mbutrovich pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git
The following commit(s) were added to refs/heads/main by this push:
new dcf9f090c minor: Combine two CI workflows for Spark SQL tests (#2727)
dcf9f090c is described below
commit dcf9f090cdabba5704805515b167f583d0a26601
Author: Andy Grove <[email protected]>
AuthorDate: Fri Nov 7 06:56:24 2025 -0700
minor: Combine two CI workflows for Spark SQL tests (#2727)
---
.github/workflows/spark_sql_test.yml | 9 ++--
.github/workflows/spark_sql_test_ansi.yml | 79 -------------------------------
2 files changed, 4 insertions(+), 84 deletions(-)
diff --git a/.github/workflows/spark_sql_test.yml
b/.github/workflows/spark_sql_test.yml
index e5d177a76..13a06884c 100644
--- a/.github/workflows/spark_sql_test.yml
+++ b/.github/workflows/spark_sql_test.yml
@@ -50,8 +50,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-24.04]
- java-version: [11]
- spark-version: [{short: '3.4', full: '3.4.3'}, {short: '3.5', full:
'3.5.7'}]
+ spark-version: [{short: '3.4', full: '3.4.3', java: 11}, {short:
'3.5', full: '3.5.7', java: 11}, {short: '4.0', full: '4.0.1', java: 17}]
module:
- {name: "catalyst", args1: "catalyst/test", args2: ""}
- {name: "sql_core-1", args1: "", args2: sql/testOnly * -- -l
org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest}
@@ -61,7 +60,7 @@ jobs:
- {name: "sql_hive-2", args1: "", args2: "hive/testOnly * -- -n
org.apache.spark.tags.ExtendedHiveTest"}
- {name: "sql_hive-3", args1: "", args2: "hive/testOnly * -- -n
org.apache.spark.tags.SlowHiveTest"}
fail-fast: false
- name: spark-sql-${{ matrix.module.name }}/${{ matrix.os }}/spark-${{
matrix.spark-version.full }}/java-${{ matrix.java-version }}
+ name: spark-sql-${{ matrix.module.name }}/${{ matrix.os }}/spark-${{
matrix.spark-version.full }}/java-${{ matrix.spark-version.java }}
runs-on: ${{ matrix.os }}
container:
image: amd64/rust
@@ -71,7 +70,7 @@ jobs:
uses: ./.github/actions/setup-builder
with:
rust-version: ${{env.RUST_VERSION}}
- jdk-version: ${{ matrix.java-version }}
+ jdk-version: ${{ matrix.spark-version.java }}
- name: Setup Spark
uses: ./.github/actions/setup-spark-builder
with:
@@ -92,7 +91,7 @@ jobs:
if: ${{ github.event.inputs.collect-fallback-logs == 'true' }}
uses: actions/upload-artifact@v5
with:
- name: fallback-log-spark-sql-${{ matrix.module.name }}-${{ matrix.os
}}-spark-${{ matrix.spark-version.full }}-java-${{ matrix.java-version }}
+ name: fallback-log-spark-sql-${{ matrix.module.name }}-${{ matrix.os
}}-spark-${{ matrix.spark-version.full }}-java-${{ matrix.spark-version.java }}
path: "**/fallback.log"
spark-sql-native-native-comet:
diff --git a/.github/workflows/spark_sql_test_ansi.yml
b/.github/workflows/spark_sql_test_ansi.yml
deleted file mode 100644
index b97cd7841..000000000
--- a/.github/workflows/spark_sql_test_ansi.yml
+++ /dev/null
@@ -1,79 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: Spark SQL Tests (ANSI mode)
-
-concurrency:
- group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{
github.workflow }}
- cancel-in-progress: true
-
-on:
- push:
- paths-ignore:
- - "docs/**"
- - "**.md"
- pull_request:
- paths-ignore:
- - "docs/**"
- - "**.md"
- # manual trigger
- #
https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow
- workflow_dispatch:
-
-env:
- RUST_VERSION: stable
-
-jobs:
- spark-sql-catalyst:
- strategy:
- matrix:
- os: [ubuntu-24.04]
- java-version: [17]
- spark-version: [{short: '4.0', full: '4.0.1'}]
- module:
- - {name: "catalyst", args1: "catalyst/test", args2: ""}
- - {name: "sql/core-1", args1: "", args2: sql/testOnly * -- -l
org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest}
- - {name: "sql/core-2", args1: "", args2: "sql/testOnly * -- -n
org.apache.spark.tags.ExtendedSQLTest"}
- - {name: "sql/core-3", args1: "", args2: "sql/testOnly * -- -n
org.apache.spark.tags.SlowSQLTest"}
- - {name: "sql/hive-1", args1: "", args2: "hive/testOnly * -- -l
org.apache.spark.tags.ExtendedHiveTest -l org.apache.spark.tags.SlowHiveTest"}
- - {name: "sql/hive-2", args1: "", args2: "hive/testOnly * -- -n
org.apache.spark.tags.ExtendedHiveTest"}
- - {name: "sql/hive-3", args1: "", args2: "hive/testOnly * -- -n
org.apache.spark.tags.SlowHiveTest"}
- fail-fast: false
- name: spark-sql-${{ matrix.module.name }}/${{ matrix.os }}/spark-${{
matrix.spark-version.full }}/java-${{ matrix.java-version }}
- runs-on: ${{ matrix.os }}
- container:
- image: amd64/rust
- steps:
- - uses: actions/checkout@v5
- - name: Setup Rust & Java toolchain
- uses: ./.github/actions/setup-builder
- with:
- rust-version: ${{env.RUST_VERSION}}
- jdk-version: ${{ matrix.java-version }}
- - name: Setup Spark
- uses: ./.github/actions/setup-spark-builder
- with:
- spark-version: ${{ matrix.spark-version.full }}
- spark-short-version: ${{ matrix.spark-version.short }}
- - name: Run Spark tests
- run: |
- cd apache-spark
- rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet
cache requires cleanups
- RUST_BACKTRACE=1 ENABLE_COMET=true ENABLE_COMET_ONHEAP=true
ENABLE_COMET_ANSI_MODE=true build/sbt -Dsbt.log.noformat=true ${{
matrix.module.args1 }} "${{ matrix.module.args2 }}"
- env:
- LC_ALL: "C.UTF-8"
-
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]