On Sat, Aug 16, 2014 at 11:21 PM, Marc Glisse <marc.gli...@inria.fr> wrote:
> Hello,
>
> here is a patch extending DSE to handle calls to memset, memcpy and memmove.
> A number of alias functions didn't have a version taking an ao_ref*, so I
> added those. Instead of naming things _1, _2, etc I took advantage of C++
> and overloaded. I kept ref_maybe_used_by_call_p so we are still updating the
> stats, but I am not sure how reliable they are, for instance there are a
> number of direct calls to refs_may_alias_p_1 that skip the stats in
> refs_may_alias_p (so I didn't add counters to the new refs_may_alias_p
> overload).
>
> The piece of code I removed from dse_optimize_stmt was dead, the test has
> already been done in dse_possible_dead_store_p (and I ran
> bootstrap+testsuite with an assertion to confirm).
>
> I only listed 3 builtins, but it should be easy to add more. The condition
> about the third argument is unnecessary for now, but copied from
> tree-ssa-alias.c to make it easier to add more builtins like strcpy.
>
> Bootstrap+testsuite (really all languages) on x86_64-linux-gnu.

Comments below

> 2014-08-18  Marc Glisse  <marc.gli...@inria.fr>
>
>         PR tree-optimization/62112
> gcc/
>         tree-ssa-alias.c (ref_may_alias_global_p, refs_may_alias_p,
>         ref_maybe_used_by_stmt_p): New overloads.
>         (ref_maybe_used_by_call_p): Take ao_ref* instead of tree.
>         (stmt_kills_ref_p_1): Rename...
>         (stmt_kills_ref_p): ... to this.
>         tree-ssa-alias.h (ref_may_alias_global_p, ref_maybe_used_by_stmt_p,
>         stmt_kills_ref_p): Declare.
>         tree-ssa-dse.c (dse_possible_dead_store_p): New argument, use it.
>         Move the self-assignment case...
>         (dse_optimize_stmt): ... here. Handle builtin calls. Remove dead
> code.
> gcc/testsuite/
>         gcc.dg/tree-ssa/pr62112-1.c: New file.
>         gcc.dg/tree-ssa/pr62112-2.c: Likewise.
>
> --
> Marc Glisse
> Index: gcc/testsuite/gcc.dg/tree-ssa/pr62112-1.c
> ===================================================================
> --- gcc/testsuite/gcc.dg/tree-ssa/pr62112-1.c   (revision 0)
> +++ gcc/testsuite/gcc.dg/tree-ssa/pr62112-1.c   (working copy)
> @@ -0,0 +1,23 @@
> +/* { dg-do compile } */
> +/* { dg-options "-O1 -fdump-tree-dse1-details" } */
> +
> +void f(){
> +  char*p=__builtin_malloc(42);
> +  __builtin_memset(p,3,10);
> +  __builtin_memset(p,7,33);
> +}
> +char*g;
> +void h(){
> +  char*p=__builtin_malloc(42);
> +  g=__builtin_memset(p,3,10);
> +  __builtin_free(p);
> +}
> +char*i(){
> +  char*p=__builtin_malloc(42);
> +  __builtin_memset(p,3,10);
> +  __builtin_memset(p,7,33);
> +  return p;
> +}
> +
> +/* { dg-final { scan-tree-dump-times "Deleted dead store" 4 "dse1" } } */
> +/* { dg-final { cleanup-tree-dump "dse1" } } */
> Index: gcc/testsuite/gcc.dg/tree-ssa/pr62112-2.c
> ===================================================================
> --- gcc/testsuite/gcc.dg/tree-ssa/pr62112-2.c   (revision 0)
> +++ gcc/testsuite/gcc.dg/tree-ssa/pr62112-2.c   (working copy)
> @@ -0,0 +1,17 @@
> +/* { dg-do compile } */
> +/* { dg-options "-O1 -fdump-tree-dse1-details" } */
> +
> +char*g;
> +char* f(){
> +  char*p=__builtin_malloc(42);
> +  __builtin_memset(p,3,33);
> +  __builtin_memset(p,7,10);
> +  return p;
> +}
> +void h(){
> +  char*p=__builtin_malloc(42);
> +  g=__builtin_memset(p,3,10);
> +}
> +
> +/* { dg-final { scan-tree-dump-not "Deleted dead store" "dse1" } } */
> +/* { dg-final { cleanup-tree-dump "dse1" } } */
> Index: gcc/tree-ssa-alias.c
> ===================================================================
> --- gcc/tree-ssa-alias.c        (revision 214066)
> +++ gcc/tree-ssa-alias.c        (working copy)
> @@ -326,31 +326,39 @@ ptr_deref_may_alias_ref_p_1 (tree ptr, a
>      return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
>    else if (DECL_P (base))
>      return ptr_deref_may_alias_decl_p (ptr, base);
>
>    return true;
>  }
>
>  /* Return true whether REF may refer to global memory.  */
>
>  bool
> -ref_may_alias_global_p (tree ref)
> +ref_may_alias_global_p (ao_ref *ref)
>  {
> -  tree base = get_base_address (ref);
> +  tree base = ao_ref_base (ref);
>    if (DECL_P (base))
>      return is_global_var (base);
>    else if (TREE_CODE (base) == MEM_REF
>            || TREE_CODE (base) == TARGET_MEM_REF)
>      return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
>    return true;
>  }
>
> +bool
> +ref_may_alias_global_p (tree ref)
> +{
> +  ao_ref r;
> +  ao_ref_init (&r, ref);
> +  return ref_may_alias_global_p (&r);
> +}
> +

I'd do two wrapers then since get_base_address is way cheaper
than doing ao_ref_base.  Thus, split out the code doing the
actual job into a static ref_may_alias_global_1?

>  /* Return true whether STMT may clobber global memory.  */
>
>  bool
>  stmt_may_clobber_global_p (gimple stmt)
>  {
>    tree lhs;
>
>    if (!gimple_vdef (stmt))
>      return false;
>
> @@ -1406,20 +1414,28 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref
>                                       tbaa_p);
>
>    /* We really do not want to end up here, but returning true is safe.  */
>  #ifdef ENABLE_CHECKING
>    gcc_unreachable ();
>  #else
>    return true;
>  #endif
>  }
>
> +static bool
> +refs_may_alias_p (tree ref1, ao_ref *ref2)
> +{
> +  ao_ref r1;
> +  ao_ref_init (&r1, ref1);
> +  return refs_may_alias_p_1 (&r1, ref2, true);
> +}
> +
>  bool
>  refs_may_alias_p (tree ref1, tree ref2)
>  {
>    ao_ref r1, r2;
>    bool res;
>    ao_ref_init (&r1, ref1);
>    ao_ref_init (&r2, ref2);
>    res = refs_may_alias_p_1 (&r1, &r2, true);
>    if (res)
>      ++alias_stats.refs_may_alias_p_may_alias;
> @@ -1762,39 +1778,37 @@ process_args:
>           ao_ref_init (&r, op);
>           if (refs_may_alias_p_1 (&r, ref, true))
>             return true;
>         }
>      }
>
>    return false;
>  }
>
>  static bool
> -ref_maybe_used_by_call_p (gimple call, tree ref)
> +ref_maybe_used_by_call_p (gimple call, ao_ref *ref)
>  {
> -  ao_ref r;
>    bool res;
> -  ao_ref_init (&r, ref);
> -  res = ref_maybe_used_by_call_p_1 (call, &r);
> +  res = ref_maybe_used_by_call_p_1 (call, ref);
>    if (res)
>      ++alias_stats.ref_maybe_used_by_call_p_may_alias;
>    else
>      ++alias_stats.ref_maybe_used_by_call_p_no_alias;
>    return res;
>  }
>
>
>  /* If the statement STMT may use the memory reference REF return
>     true, otherwise return false.  */
>
>  bool
> -ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
> +ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref)
>  {
>    if (is_gimple_assign (stmt))
>      {
>        tree rhs;
>
>        /* All memory assign statements are single.  */
>        if (!gimple_assign_single_p (stmt))
>         return false;
>
>        rhs = gimple_assign_rhs1 (stmt);
> @@ -1803,41 +1817,48 @@ ref_maybe_used_by_stmt_p (gimple stmt, t
>           || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
>         return false;
>
>        return refs_may_alias_p (rhs, ref);
>      }
>    else if (is_gimple_call (stmt))
>      return ref_maybe_used_by_call_p (stmt, ref);
>    else if (gimple_code (stmt) == GIMPLE_RETURN)
>      {
>        tree retval = gimple_return_retval (stmt);
> -      tree base;
>        if (retval
>           && TREE_CODE (retval) != SSA_NAME
>           && !is_gimple_min_invariant (retval)
>           && refs_may_alias_p (retval, ref))
>         return true;
>        /* If ref escapes the function then the return acts as a use.  */
> -      base = get_base_address (ref);
> +      tree base = ao_ref_base (ref);
>        if (!base)
>         ;
>        else if (DECL_P (base))
>         return is_global_var (base);
>        else if (TREE_CODE (base) == MEM_REF
>                || TREE_CODE (base) == TARGET_MEM_REF)
>         return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
>        return false;
>      }
>
>    return true;
>  }
>
> +bool
> +ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
> +{
> +  ao_ref r;
> +  ao_ref_init (&r, ref);
> +  return ref_maybe_used_by_stmt_p (stmt, &r);
> +}
> +
>  /* If the call in statement CALL may clobber the memory reference REF
>     return true, otherwise return false.  */
>
>  bool
>  call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
>  {
>    tree base;
>    tree callee;
>
>    /* If the call is pure or const it cannot clobber anything.  */
> @@ -2162,22 +2183,22 @@ bool
>  stmt_may_clobber_ref_p (gimple stmt, tree ref)
>  {
>    ao_ref r;
>    ao_ref_init (&r, ref);
>    return stmt_may_clobber_ref_p_1 (stmt, &r);
>  }
>
>  /* If STMT kills the memory reference REF return true, otherwise
>     return false.  */
>
> -static bool
> -stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
> +bool
> +stmt_kills_ref_p (gimple stmt, ao_ref *ref)
>  {
>    if (!ao_ref_base (ref))
>      return false;
>
>    if (gimple_has_lhs (stmt)
>        && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
>        /* The assignment is not necessarily carried out if it can throw
>          and we can catch it in the current function where we could inspect
>          the previous value.
>          ???  We only need to care about the RHS throwing.  For aggregate
> @@ -2350,21 +2371,21 @@ stmt_kills_ref_p_1 (gimple stmt, ao_ref
>           }
>      }
>    return false;
>  }
>
>  bool
>  stmt_kills_ref_p (gimple stmt, tree ref)
>  {
>    ao_ref r;
>    ao_ref_init (&r, ref);
> -  return stmt_kills_ref_p_1 (stmt, &r);
> +  return stmt_kills_ref_p (stmt, &r);
>  }
>
>
>  /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
>     TARGET or a statement clobbering the memory reference REF in which
>     case false is returned.  The walk starts with VUSE, one argument of PHI.
> */
>
>  static bool
>  maybe_skip_until (gimple phi, tree target, ao_ref *ref,
>                   tree vuse, unsigned int *cnt, bitmap *visited,
> Index: gcc/tree-ssa-alias.h
> ===================================================================
> --- gcc/tree-ssa-alias.h        (revision 214066)
> +++ gcc/tree-ssa-alias.h        (working copy)
> @@ -94,31 +94,34 @@ struct ao_ref
>
>
>  /* In tree-ssa-alias.c  */
>  extern void ao_ref_init (ao_ref *, tree);
>  extern void ao_ref_init_from_ptr_and_size (ao_ref *, tree, tree);
>  extern tree ao_ref_base (ao_ref *);
>  extern alias_set_type ao_ref_alias_set (ao_ref *);
>  extern bool ptr_deref_may_alias_global_p (tree);
>  extern bool ptr_derefs_may_alias_p (tree, tree);
>  extern bool ref_may_alias_global_p (tree);
> +extern bool ref_may_alias_global_p (ao_ref *);
>  extern bool refs_may_alias_p (tree, tree);
>  extern bool refs_may_alias_p_1 (ao_ref *, ao_ref *, bool);
>  extern bool refs_anti_dependent_p (tree, tree);
>  extern bool refs_output_dependent_p (tree, tree);
>  extern bool ref_maybe_used_by_stmt_p (gimple, tree);
> +extern bool ref_maybe_used_by_stmt_p (gimple, ao_ref *);
>  extern bool stmt_may_clobber_global_p (gimple);
>  extern bool stmt_may_clobber_ref_p (gimple, tree);
>  extern bool stmt_may_clobber_ref_p_1 (gimple, ao_ref *);
>  extern bool call_may_clobber_ref_p (gimple, tree);
>  extern bool call_may_clobber_ref_p_1 (gimple, ao_ref *);
>  extern bool stmt_kills_ref_p (gimple, tree);
> +extern bool stmt_kills_ref_p (gimple, ao_ref *);
>  extern tree get_continuation_for_phi (gimple, ao_ref *,
>                                       unsigned int *, bitmap *, bool,
>                                       void *(*)(ao_ref *, tree, void *,
> bool),
>                                       void *);
>  extern void *walk_non_aliased_vuses (ao_ref *, tree,
>                                      void *(*)(ao_ref *, tree,
>                                                unsigned int, void *),
>                                      void *(*)(ao_ref *, tree, void *,
> bool),
>                                      void *);
>  extern unsigned int walk_aliased_vdefs (ao_ref *, tree,
> Index: gcc/tree-ssa-dse.c
> ===================================================================
> --- gcc/tree-ssa-dse.c  (revision 214066)
> +++ gcc/tree-ssa-dse.c  (working copy)
> @@ -75,39 +75,32 @@ along with GCC; see the file COPYING3.
>     fact, they are the same transformation applied to different views of
>     the CFG.  */
>
>
>  /* Bitmap of blocks that have had EH statements cleaned.  We should
>     remove their dead edges eventually.  */
>  static bitmap need_eh_cleanup;
>
>
>  /* A helper of dse_optimize_stmt.
> -   Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
> -   may prove STMT to be dead.
> +   Given a GIMPLE_ASSIGN in STMT that writes to REF, find a candidate
> +   statement *USE_STMT that may prove STMT to be dead.
>     Return TRUE if the above conditions are met, otherwise FALSE.  */
>
>  static bool
> -dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
> +dse_possible_dead_store_p (ao_ref *ref, gimple stmt, gimple *use_stmt)
>  {
>    gimple temp;
>    unsigned cnt = 0;
>
>    *use_stmt = NULL;
>
> -  /* Self-assignments are zombies.  */
> -  if (operand_equal_p (gimple_assign_rhs1 (stmt), gimple_assign_lhs (stmt),
> 0))
> -    {
> -      *use_stmt = stmt;
> -      return true;
> -    }
> -
>    /* Find the first dominated statement that clobbers (part of) the
>       memory stmt stores to with no intermediate statement that may use
>       part of the memory stmt stores.  That is, find a store that may
>       prove stmt to be a dead store.  */
>    temp = stmt;
>    do
>      {
>        gimple use_stmt, defvar_def;
>        imm_use_iterator ui;
>        bool fail = false;
> @@ -157,22 +150,21 @@ dse_possible_dead_store_p (gimple stmt,
>               /* Do not consider the PHI as use if it dominates the
>                  stmt defining the virtual operand we are processing,
>                  we have processed it already in this case.  */
>               if (gimple_bb (defvar_def) != gimple_bb (use_stmt)
>                   && !dominated_by_p (CDI_DOMINATORS,
>                                       gimple_bb (defvar_def),
>                                       gimple_bb (use_stmt)))
>                 temp = use_stmt;
>             }
>           /* If the statement is a use the store is not dead.  */
> -         else if (ref_maybe_used_by_stmt_p (use_stmt,
> -                                            gimple_assign_lhs (stmt)))
> +         else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
>             {
>               fail = true;
>               BREAK_FROM_IMM_USE_STMT (ui);
>             }
>           /* If this is a store, remember it or bail out if we have
>              multiple ones (the will be in different CFG parts then).  */
>           else if (gimple_vdef (use_stmt))
>             {
>               if (temp)
>                 {
> @@ -184,29 +176,29 @@ dse_possible_dead_store_p (gimple stmt,
>         }
>
>        if (fail)
>         return false;
>
>        /* If we didn't find any definition this means the store is dead
>           if it isn't a store to global reachable memory.  In this case
>          just pretend the stmt makes itself dead.  Otherwise fail.  */
>        if (!temp)
>         {
> -         if (stmt_may_clobber_global_p (stmt))
> +         if (ref_may_alias_global_p (ref))
>             return false;
>
>           temp = stmt;
>           break;
>         }
>      }
>    /* Continue walking until we reach a kill.  */
> -  while (!stmt_kills_ref_p (temp, gimple_assign_lhs (stmt)));
> +  while (!stmt_kills_ref_p (temp, ref));
>
>    *use_stmt = temp;
>
>    return true;
>  }
>
>
>  /* Attempt to eliminate dead stores in the statement referenced by BSI.
>
>     A dead store is a store into a memory location which will later be
> @@ -221,75 +213,119 @@ dse_possible_dead_store_p (gimple stmt,
>  static void
>  dse_optimize_stmt (gimple_stmt_iterator *gsi)
>  {
>    gimple stmt = gsi_stmt (*gsi);
>
>    /* If this statement has no virtual defs, then there is nothing
>       to do.  */
>    if (!gimple_vdef (stmt))
>      return;
>
> -  /* We know we have virtual definitions.  If this is a GIMPLE_ASSIGN
> -     that's not also a function call, then record it into our table.  */
> -  if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
> -    return;
> -
>    /* Don't return early on *this_2(D) ={v} {CLOBBER}.  */
>    if (gimple_has_volatile_ops (stmt)
>        && (!gimple_clobber_p (stmt)
>           || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
>      return;
>
> +  /* We know we have virtual definitions.  We can handle assignments and
> +     some builtin calls.  */
> +  if (is_gimple_call (stmt))

  if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))

which also does argument verification

> +    {
> +      tree callee = gimple_call_fndecl (stmt);
> +      if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
> +       return;
> +      switch (DECL_FUNCTION_CODE (callee))
> +       {
> +         case BUILT_IN_MEMCPY:
> +         case BUILT_IN_MEMMOVE:
> +         case BUILT_IN_MEMSET:
> +           {
> +             gimple use_stmt;
> +             ao_ref ref;
> +             tree size = NULL_TREE;
> +             int nargs = gimple_call_num_args (stmt);
> +             if (nargs < 2)
> +               return;
> +             if (nargs == 3)
> +               size = gimple_call_arg (stmt, 2);
> +             tree ptr = gimple_call_arg (stmt, 0);
> +             ao_ref_init_from_ptr_and_size (&ref, ptr, size);
> +             if (!dse_possible_dead_store_p (&ref, stmt, &use_stmt))
> +               return;
> +
> +             if (dump_file && (dump_flags & TDF_DETAILS))
> +               {
> +                 fprintf (dump_file, "  Deleted dead store '");

dead call

> +                 print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags,
> 0);
> +                 fprintf (dump_file, "'\n");
> +               }
> +
> +             tree lhs = gimple_call_lhs (stmt);
> +             if (lhs)
> +               {
> +                 gimple new_stmt = gimple_build_assign (lhs, ptr);
> +                 unlink_stmt_vdef (stmt);
> +                 gsi_replace (gsi, new_stmt, true);

You may need eh cleanup here as well.

Bonus points if you make gsi_replace return whether
maybe_clean_or_replace_eh_stmt returned true.

> +               }
> +             else
> +               {
> +                 /* Then we need to fix the operand of the consuming stmt.
> */
> +                 unlink_stmt_vdef (stmt);
> +
> +                 /* Remove the dead store.  */
> +                 basic_block bb = gimple_bb (stmt);
> +                 if (gsi_remove (gsi, true))
> +                   bitmap_set_bit (need_eh_cleanup, bb->index);
> +               }
> +             break;
> +           }
> +         default:
> +           return;
> +       }
> +    }
> +
>    if (is_gimple_assign (stmt))
>      {
>        gimple use_stmt;
>
> -      if (!dse_possible_dead_store_p (stmt, &use_stmt))
> -       return;
> +      /* Self-assignments are zombies.  */
> +      if (operand_equal_p (gimple_assign_rhs1 (stmt),
> +                          gimple_assign_lhs (stmt), 0))
> +       use_stmt = stmt;
> +      else
> +       {
> +         ao_ref ref;
> +         ao_ref_init (&ref, gimple_assign_lhs (stmt));
> +         if (!dse_possible_dead_store_p (&ref, stmt, &use_stmt))
> +           return;
> +       }
>
>        /* Now we know that use_stmt kills the LHS of stmt.  */
>
>        /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
>          another clobber stmt.  */
>        if (gimple_clobber_p (stmt)
>           && !gimple_clobber_p (use_stmt))
>         return;
>
> -      basic_block bb;
> -
> -      /* If use_stmt is or might be a nop assignment, e.g. for
> -          struct { ... } S a, b, *p; ...
> -          b = a; b = b;
> -        or
> -          b = a; b = *p; where p might be &b,
> -        or
> -           *p = a; *p = b; where p might be &b,
> -        or
> -           *p = *u; *p = *v; where p might be v, then USE_STMT
> -         acts as a use as well as definition, so store in STMT
> -         is not dead.  */
> -      if (stmt != use_stmt
> -         && ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt)))
> -       return;
> -

I don't see how you can remove this code?

Otherwise looks good to me.

Thanks,
RIchard.

>        if (dump_file && (dump_flags & TDF_DETAILS))
>         {
>           fprintf (dump_file, "  Deleted dead store '");
>           print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0);
>           fprintf (dump_file, "'\n");
>         }
>
>        /* Then we need to fix the operand of the consuming stmt.  */
>        unlink_stmt_vdef (stmt);
>
>        /* Remove the dead store.  */
> -      bb = gimple_bb (stmt);
> +      basic_block bb = gimple_bb (stmt);
>        if (gsi_remove (gsi, true))
>         bitmap_set_bit (need_eh_cleanup, bb->index);
>
>        /* And release any SSA_NAMEs set in this statement back to the
>          SSA_NAME manager.  */
>        release_defs (stmt);
>      }
>  }
>
>  class dse_dom_walker : public dom_walker
>

Reply via email to