https://gcc.gnu.org/g:2894493adf7434a83b9ead2617468e817b337126
commit r16-8110-g2894493adf7434a83b9ead2617468e817b337126 Author: Richard Biener <[email protected]> Date: Mon Mar 16 09:55:26 2026 +0100 tree-optimization/124528 - UB from SCEV SCEV has a long-standing issue in that it negates the addend of a subtraction within a SSA cycle without considering UB from negating of the most negative value. The following tries to rectify this. PR tree-optimization/124528 * tree-scalar-evolution.cc (scev_dfs::add_to_evolution): Perform the negation in an unsigned type if we cannot make sure it will not cause UB. * gcc.dg/tree-ssa/scev-17.c: New testcase. Diff: --- gcc/testsuite/gcc.dg/tree-ssa/scev-17.c | 15 +++++++++++++++ gcc/tree-scalar-evolution.cc | 20 +++++++++++++++++--- 2 files changed, 32 insertions(+), 3 deletions(-) diff --git a/gcc/testsuite/gcc.dg/tree-ssa/scev-17.c b/gcc/testsuite/gcc.dg/tree-ssa/scev-17.c new file mode 100644 index 000000000000..4751a2b91d45 --- /dev/null +++ b/gcc/testsuite/gcc.dg/tree-ssa/scev-17.c @@ -0,0 +1,15 @@ +/* PR124528 */ +/* { dg-options "-Os -fdump-tree-sccp" } */ + +int baz (int n, int m) +{ + int r = 100; + while (n != 0) { + n = n - 1; + r = r - m; + } + return r; +} + +/* Make sure we negate m in unsigned. */ +/* { dg-final { scan-tree-dump "\\\(unsigned int\\\) m_.\\\(D\\\)" "sccp" } } */ diff --git a/gcc/tree-scalar-evolution.cc b/gcc/tree-scalar-evolution.cc index 223015c4a8df..e05a171c1b15 100644 --- a/gcc/tree-scalar-evolution.cc +++ b/gcc/tree-scalar-evolution.cc @@ -880,9 +880,23 @@ scev_dfs::add_to_evolution (tree chrec_before, enum tree_code code, } if (code == MINUS_EXPR) - to_add = chrec_fold_multiply (type, to_add, SCALAR_FLOAT_TYPE_P (type) - ? build_real (type, dconstm1) - : build_int_cst_type (type, -1)); + { + if (INTEGRAL_TYPE_P (type) + && TYPE_OVERFLOW_UNDEFINED (type) + && !expr_not_equal_to (to_add, + wi::to_wide (TYPE_MIN_VALUE (type)))) + { + tree utype = unsigned_type_for (type); + to_add = chrec_convert_rhs (utype, to_add); + to_add = chrec_fold_multiply (utype, to_add, + build_int_cst_type (utype, -1)); + to_add = chrec_convert_rhs (type, to_add); + } + else + to_add = chrec_fold_multiply (type, to_add, SCALAR_FLOAT_TYPE_P (type) + ? build_real (type, dconstm1) + : build_int_cst_type (type, -1)); + } res = add_to_evolution_1 (chrec_before, to_add, at_stmt);
