This is an automated email from the ASF dual-hosted git repository.

jonnybot pushed a commit to branch INDY-PERF-EXPLORATION
in repository https://gitbox.apache.org/repos/asf/groovy.git

commit 5774a55ed47d6caf73fa0080f43190abdffbbc9a
Author: Jonny Carter <[email protected]>
AuthorDate: Thu Feb 19 16:24:57 2026 -0600

    Matrix benchmarks
---
 .github/benchmark-groups.json            |  34 +++++
 .github/workflows/groovy-performance.yml | 239 +++++++++++++++++++++++--------
 2 files changed, 213 insertions(+), 60 deletions(-)

diff --git a/.github/benchmark-groups.json b/.github/benchmark-groups.json
new file mode 100644
index 0000000000..cd95eae5a3
--- /dev/null
+++ b/.github/benchmark-groups.json
@@ -0,0 +1,34 @@
+[
+  {
+    "group": "dispatch",
+    "pattern": ".*bench\\.dispatch\\..*"
+  },
+  {
+    "group": "grailslike",
+    "pattern": ".*bench\\.grailslike\\..*"
+  },
+  {
+    "group": "indy",
+    "pattern": ".*bench\\.indy\\..*"
+  },
+  {
+    "group": "memory",
+    "pattern": ".*bench\\.memory\\..*"
+  },
+  {
+    "group": "orm",
+    "pattern": ".*bench\\.orm\\..*"
+  },
+  {
+    "group": "profiling",
+    "pattern": ".*bench\\.profiling\\..*"
+  },
+  {
+    "group": "core",
+    "pattern": ".*(Ackermann|Ary|Fibo|GeneratedHashCode)Bench.*"
+  },
+  {
+    "group": "plugin",
+    "pattern": ".*plugin\\..*"
+  }
+]
diff --git a/.github/workflows/groovy-performance.yml 
b/.github/workflows/groovy-performance.yml
index d473937a25..d32641d5b3 100644
--- a/.github/workflows/groovy-performance.yml
+++ b/.github/workflows/groovy-performance.yml
@@ -20,7 +20,7 @@ on:
   workflow_dispatch:
     inputs:
       benchmark_filter:
-        description: 'Benchmark filter pattern (e.g., ColdCall, Memory, 
RequestLifecycle)'
+        description: 'Benchmark group to run (e.g., dispatch, orm). Leave 
empty for all groups.'
         required: false
         default: ''
       compare_baseline:
@@ -73,14 +73,14 @@ jobs:
           name: benchmark-smoke-results
           path: subprojects/performance/build/results/jmh/
 
-  # Full benchmark suite
-  performance-full:
+  # 
============================================================================
+  # Full benchmark suite: build once, fan out into parallel matrix jobs
+  # 
============================================================================
+
+  # Step 1: Build the JMH fat jar once
+  build-jmh-jar:
     if: github.event_name != 'pull_request'
     runs-on: ubuntu-latest
-    strategy:
-      fail-fast: false
-      matrix:
-        mode: ['indy', 'noindy']
     steps:
       - uses: actions/checkout@v4
       - uses: actions/setup-java@v4
@@ -89,104 +89,223 @@ jobs:
           java-version: '17'
       - uses: gradle/actions/setup-gradle@v4
 
-      - name: Set benchmark filter
-        id: filter
-        run: |
-          if [ -n "${{ github.event.inputs.benchmark_filter }}" ]; then
-            echo "filter=-PbenchInclude=${{ 
github.event.inputs.benchmark_filter }}" >> $GITHUB_OUTPUT
-          else
-            echo "filter=" >> $GITHUB_OUTPUT
-          fi
-
-      - name: Run benchmarks (${{ matrix.mode }})
-        run: |
-          INDY_FLAG=${{ matrix.mode == 'indy' && 'true' || 'false' }}
-          ./gradlew -Pindy=$INDY_FLAG ${{ steps.filter.outputs.filter }} 
:performance:jmh
-        timeout-minutes: 120
+      - name: Build JMH fat jar
+        run: ./gradlew :performance:jmhJar
 
-      - name: Upload benchmark results
+      - name: Upload JMH jar
         uses: actions/upload-artifact@v4
         with:
-          name: benchmark-results-${{ matrix.mode }}
-          path: subprojects/performance/build/results/jmh/
+          name: jmh-jar
+          path: subprojects/performance/build/libs/*-jmh.jar
+          retention-days: 1
 
-  # Compare indy vs non-indy
-  performance-compare:
-    needs: performance-full
+  # Step 2: Read benchmark groups and output matrix JSON
+  discover-groups:
     if: github.event_name != 'pull_request'
     runs-on: ubuntu-latest
+    outputs:
+      matrix: ${{ steps.set-matrix.outputs.matrix }}
     steps:
       - uses: actions/checkout@v4
+
+      - name: Build matrix from benchmark groups
+        id: set-matrix
+        run: |
+          FILTER="${{ github.event.inputs.benchmark_filter }}"
+          if [ -n "$FILTER" ]; then
+            # Run only the requested group
+            MATRIX=$(jq -c --arg f "$FILTER" '[.[] | select(.group == $f)]' 
.github/benchmark-groups.json)
+          else
+            MATRIX=$(jq -c '.' .github/benchmark-groups.json)
+          fi
+          echo "matrix={\"include\":$(echo "$MATRIX" | jq -c '[.[] | {group: 
.group, pattern: .pattern, indy: true}] + [.[] | {group: .group, pattern: 
.pattern, indy: false}]')}" >> "$GITHUB_OUTPUT"
+
+  # Step 3: Run benchmarks in parallel (groups x indy modes)
+  benchmark-matrix:
+    needs: [build-jmh-jar, discover-groups]
+    if: github.event_name != 'pull_request'
+    runs-on: ubuntu-latest
+    timeout-minutes: 60
+    strategy:
+      fail-fast: false
+      matrix: ${{ fromJson(needs.discover-groups.outputs.matrix) }}
+    steps:
       - uses: actions/setup-java@v4
         with:
           distribution: 'zulu'
           java-version: '17'
-      - uses: gradle/actions/setup-gradle@v4
 
-      - name: Download indy results
+      - name: Download JMH jar
         uses: actions/download-artifact@v4
         with:
-          name: benchmark-results-indy
-          path: subprojects/performance/build/results/jmh-compare/
+          name: jmh-jar
+          path: .
 
-      - name: Rename indy results
+      - name: Run benchmarks (${{ matrix.group }}, indy=${{ matrix.indy }})
         run: |
-          mv subprojects/performance/build/results/jmh-compare/results.txt \
-             subprojects/performance/build/results/jmh-compare/indy-results.txt
+          JAR=$(ls *-jmh.jar | head -1)
+          java \
+            -Dgroovy.target.indy=${{ matrix.indy }} \
+            -jar "$JAR" \
+            "${{ matrix.pattern }}" \
+            -f 1 -wi 1 -i 1 -r 2s -w 2s \
+            -rf json -rff results.json
+        timeout-minutes: 45
 
-      - name: Download non-indy results
+      - name: Upload results
+        uses: actions/upload-artifact@v4
+        if: always()
+        with:
+          name: bench-${{ matrix.group }}-indy-${{ matrix.indy }}
+          path: results.json
+          retention-days: 5
+
+  # Step 4: Collect all results and generate comparison report
+  collect-and-compare:
+    needs: benchmark-matrix
+    if: always() && github.event_name != 'pull_request'
+    runs-on: ubuntu-latest
+    steps:
+      - name: Download all benchmark results
         uses: actions/download-artifact@v4
         with:
-          name: benchmark-results-noindy
-          path: subprojects/performance/build/results/jmh-compare/
+          pattern: bench-*
+          path: results/
+          merge-multiple: false
 
-      - name: Rename non-indy results
+      - name: Merge results and generate comparison
         run: |
-          mv subprojects/performance/build/results/jmh-compare/results.txt \
-             
subprojects/performance/build/results/jmh-compare/noindy-results.txt
+          python3 << 'PYEOF'
+          import json, os, sys
+          from pathlib import Path
 
-      - name: Generate comparison report
-        run: |
-          ./gradlew :performance:jmhCompare --offline || true
+          indy_results = {}
+          noindy_results = {}
+
+          results_dir = Path("results")
+          for artifact_dir in sorted(results_dir.iterdir()):
+              if not artifact_dir.is_dir():
+                  continue
+              results_file = artifact_dir / "results.json"
+              if not results_file.exists():
+                  print(f"Warning: no results.json in {artifact_dir.name}", 
file=sys.stderr)
+                  continue
+
+              is_indy = "-indy-true" in artifact_dir.name
+              target = indy_results if is_indy else noindy_results
+
+              with open(results_file) as f:
+                  data = json.load(f)
+
+              for bench in data:
+                  name = bench.get("benchmark", "unknown")
+                  score = bench.get("primaryMetric", {}).get("score", 0)
+                  unit = bench.get("primaryMetric", {}).get("scoreUnit", "")
+                  error = bench.get("primaryMetric", {}).get("scoreError", 0)
+                  target[name] = {"score": score, "unit": unit, "error": error}
+
+          # Save merged results
+          with open("indy-results.json", "w") as f:
+              json.dump(indy_results, f, indent=2)
+          with open("noindy-results.json", "w") as f:
+              json.dump(noindy_results, f, indent=2)
+
+          # Generate comparison report
+          all_benchmarks = sorted(set(list(indy_results.keys()) + 
list(noindy_results.keys())))
+
+          lines = []
+          lines.append("## Performance Comparison: Indy vs Non-Indy")
+          lines.append("")
+          lines.append(f"| Benchmark | Indy | Non-Indy | Diff |")
+          lines.append("|-----------|------|----------|------|")
+
+          regressions = []
+          improvements = []
+
+          for name in all_benchmarks:
+              short_name = name.split(".")[-1] if "." in name else name
+              indy = indy_results.get(name)
+              noindy = noindy_results.get(name)
+
+              if indy and noindy and noindy["score"] > 0:
+                  diff_pct = ((indy["score"] - noindy["score"]) / 
noindy["score"]) * 100
+                  diff_str = f"{diff_pct:+.1f}%"
+                  if diff_pct > 10:
+                      diff_str += " :arrow_up:"
+                      improvements.append((short_name, diff_pct))
+                  elif diff_pct < -10:
+                      diff_str += " :arrow_down:"
+                      regressions.append((short_name, diff_pct))
+                  lines.append(f"| {short_name} | {indy['score']:.3f} 
{indy['unit']} | {noindy['score']:.3f} {noindy['unit']} | {diff_str} |")
+              elif indy:
+                  lines.append(f"| {short_name} | {indy['score']:.3f} 
{indy['unit']} | N/A | - |")
+              elif noindy:
+                  lines.append(f"| {short_name} | N/A | {noindy['score']:.3f} 
{noindy['unit']} | - |")
+
+          lines.append("")
+          lines.append(f"**Total benchmarks:** {len(all_benchmarks)} | "
+                       f"**Indy faster (>10%):** {len(improvements)} | "
+                       f"**Non-Indy faster (>10%):** {len(regressions)}")
+
+          report = "\n".join(lines)
+
+          with open("comparison-report.md", "w") as f:
+              f.write(report)
+
+          # Write to GitHub Step Summary
+          summary_path = os.environ.get("GITHUB_STEP_SUMMARY", "")
+          if summary_path:
+              with open(summary_path, "a") as f:
+                  f.write(report + "\n")
+
+          print(report)
+          PYEOF
 
       - name: Upload comparison report
         uses: actions/upload-artifact@v4
+        if: always()
         with:
           name: benchmark-comparison
-          path: subprojects/performance/build/results/jmh-compare/
-
-      - name: Display comparison summary
-        run: |
-          echo "## Performance Comparison: Indy vs Non-Indy" >> 
$GITHUB_STEP_SUMMARY
-          echo "" >> $GITHUB_STEP_SUMMARY
-          if [ -f 
subprojects/performance/build/results/jmh-compare/comparison-report.txt ]; then
-            echo '```' >> $GITHUB_STEP_SUMMARY
-            cat 
subprojects/performance/build/results/jmh-compare/comparison-report.txt >> 
$GITHUB_STEP_SUMMARY
-            echo '```' >> $GITHUB_STEP_SUMMARY
-          fi
+          path: |
+            comparison-report.md
+            indy-results.json
+            noindy-results.json
+          retention-days: 30
 
-  # Memory-focused benchmarks
+  # Memory-focused benchmarks with GC profiler (parallel with main matrix)
   performance-memory:
+    needs: build-jmh-jar
     if: github.event_name != 'pull_request'
     runs-on: ubuntu-latest
+    timeout-minutes: 60
     steps:
-      - uses: actions/checkout@v4
       - uses: actions/setup-java@v4
         with:
           distribution: 'zulu'
           java-version: '17'
-      - uses: gradle/actions/setup-gradle@v4
+
+      - name: Download JMH jar
+        uses: actions/download-artifact@v4
+        with:
+          name: jmh-jar
+          path: .
 
       - name: Run memory benchmarks with GC profiler
         run: |
-          ./gradlew -Pindy=true -PbenchInclude=Memory :performance:jmhGcProfile
-        timeout-minutes: 60
+          JAR=$(ls *-jmh.jar | head -1)
+          java \
+            -Dgroovy.target.indy=true \
+            -jar "$JAR" \
+            ".*bench\.memory\..*" \
+            -f 1 -wi 1 -i 1 -r 2s -w 2s \
+            -prof gc \
+            -rf json -rff gc-profile-results.json
 
       - name: Upload memory profile results
         uses: actions/upload-artifact@v4
         with:
           name: benchmark-memory-profile
-          path: subprojects/performance/build/results/jmh/
+          path: gc-profile-results.json
 
   # Threshold sweep analysis
   performance-threshold-sweep:

Reply via email to