https://gcc.gnu.org/g:c070cfb7b4f7850ace889725b4f788f4ae769840
commit r16-4908-gc070cfb7b4f7850ace889725b4f788f4ae769840 Author: Richard Biener <[email protected]> Date: Fri Oct 31 08:57:39 2025 +0100 tree-optimization/122502 - avoid folding during imm use walk with active ranger The following works around an unfortunate interaction with ranger and immediate use walking. An actual solution needs more thoughts. PR tree-optimization/122502 * tree-scalar-evolution.cc (final_value_replacement_loop): Avoid folding from within FOR_EACH_IMM_USE_STMT due to active ranger. * gcc.dg/torture/pr122502.c: New testcase. Diff: --- gcc/testsuite/gcc.dg/torture/pr122502.c | 21 +++++++++++++++++++++ gcc/tree-scalar-evolution.cc | 10 ++++++++-- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/gcc/testsuite/gcc.dg/torture/pr122502.c b/gcc/testsuite/gcc.dg/torture/pr122502.c new file mode 100644 index 000000000000..5e2cb2e8163f --- /dev/null +++ b/gcc/testsuite/gcc.dg/torture/pr122502.c @@ -0,0 +1,21 @@ +/* { dg-do compile } */ + +short int *ts; + +void +c2 (unsigned long long int s4, int ns) +{ + short int *b2 = (short int *)&ns; + + while (ns != 0) + { + int xn; + + for (xn = 0; xn < 3; ++xn) + for (*b2 = 0; *b2 < 2; ++*b2) + s4 += xn; + if (s4 != 0) + b2 = ts; + ++ns; + } +} diff --git a/gcc/tree-scalar-evolution.cc b/gcc/tree-scalar-evolution.cc index 7907893b916c..9f82abc4b81f 100644 --- a/gcc/tree-scalar-evolution.cc +++ b/gcc/tree-scalar-evolution.cc @@ -3995,11 +3995,17 @@ final_value_replacement_loop (class loop *loop) { gimple *use_stmt; imm_use_iterator imm_iter; + auto_vec<gimple *, 4> to_fold; FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, rslt) + if (!stmt_can_throw_internal (cfun, use_stmt)) + to_fold.safe_push (use_stmt); + /* Delay folding until after the immediate use walk is completed + as we have an active ranger and that might walk immediate + uses of rslt again. See PR122502. */ + for (gimple *use_stmt : to_fold) { gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); - if (!stmt_can_throw_internal (cfun, use_stmt) - && fold_stmt (&gsi, follow_all_ssa_edges)) + if (fold_stmt (&gsi, follow_all_ssa_edges)) update_stmt (gsi_stmt (gsi)); } }
