This avoids the last_vuse optimization hindering redundant store elimination by always also recording the original VUSE that was in effect on the load.
Bootstrapped and tested on x86_64-unknown-linux-gnu. I'm still pondering on how to avoid the wastage of adding the ref twice and will at least record some statistics for this. 2021-09-27 Richard Biener <rguent...@suse.de> PR tree-optimization/100112 * tree-ssa-sccvn.c (visit_reference_op_load): Record the referece into the hashtable twice in case last_vuse is different from the original vuse on the stmt. * gcc.dg/tree-ssa/ssa-fre-95.c: New testcase. --- gcc/testsuite/gcc.dg/tree-ssa/ssa-fre-95.c | 25 ++++++++++++++++++++++ gcc/tree-ssa-sccvn.c | 17 +++++++++++---- 2 files changed, 38 insertions(+), 4 deletions(-) create mode 100644 gcc/testsuite/gcc.dg/tree-ssa/ssa-fre-95.c diff --git a/gcc/testsuite/gcc.dg/tree-ssa/ssa-fre-95.c b/gcc/testsuite/gcc.dg/tree-ssa/ssa-fre-95.c new file mode 100644 index 00000000000..b0936be5e77 --- /dev/null +++ b/gcc/testsuite/gcc.dg/tree-ssa/ssa-fre-95.c @@ -0,0 +1,25 @@ +/* PR100112 and dups. */ +/* { dg-do compile } */ +/* { dg-options "-O2 -fdump-tree-fre1-details -fdump-tree-optimized" } */ + +int *c, *b; +void foo() +{ + int *tem = b; + *tem = 0; + int *footem = c; + c = footem; +} + +void bar() +{ + int *tem = b; + int *bartem = c; + *tem = 0; + c = bartem; +} + +/* We should elide the redundant store in foo, in bar it is not redundant since + the *tem = 0 store might alias. */ +/* { dg-final { scan-tree-dump "Deleted redundant store c = footem" "fre1" } } */ +/* { dg-final { scan-tree-dump "c = bartem" "optimized" } } */ diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c index e8b1c39184d..416a5252144 100644 --- a/gcc/tree-ssa-sccvn.c +++ b/gcc/tree-ssa-sccvn.c @@ -5125,13 +5125,12 @@ static bool visit_reference_op_load (tree lhs, tree op, gimple *stmt) { bool changed = false; - tree last_vuse; tree result; vn_reference_t res; - last_vuse = gimple_vuse (stmt); - result = vn_reference_lookup (op, gimple_vuse (stmt), - default_vn_walk_kind, &res, true, &last_vuse); + tree vuse = gimple_vuse (stmt); + tree last_vuse = vuse; + result = vn_reference_lookup (op, vuse, default_vn_walk_kind, &res, true, &last_vuse); /* We handle type-punning through unions by value-numbering based on offset and size of the access. Be prepared to handle a @@ -5174,6 +5173,16 @@ visit_reference_op_load (tree lhs, tree op, gimple *stmt) { changed = set_ssa_val_to (lhs, lhs); vn_reference_insert (op, lhs, last_vuse, NULL_TREE); + if (vuse && SSA_VAL (last_vuse) != SSA_VAL (vuse)) + { + if (dump_file && (dump_flags & TDF_DETAILS)) + { + fprintf (dump_file, "Using extra use virtual operand "); + print_generic_expr (dump_file, last_vuse); + fprintf (dump_file, "\n"); + } + vn_reference_insert (op, lhs, vuse, NULL_TREE); + } } return changed; -- 2.31.1