Hi!

On Thu, Nov 13, 2014 at 12:11:08AM +0100, Jan Hubicka wrote:

> Actually I think you want to do this for can_throw, too.
> We probably do not have throwing internal calls, but it is better to be safe.

I'll leave that change to you ;), as I said in my last mail, it isn't
immediately clear to me what exactly you want to do with internal calls
for can_throw (if anything) and for pure_const/looping.
E.g. for pure_const, internal calls can be memory accesses (e.g.
MASK_LOAD/MASK_STORE/LOAD_LANES/STORE_LANES, supposedly even ASAN_CHECK
can be considered a memory access).  Though, the first 4 should ATM
appear only post-IPA, and ASAN_CHECK should just accompany some normal
memory access.

> The propagation seems fine, but I wonder if we won't get better memory 
> locality doing this
> during the propagation of pure/const?

Done below, though I chose to use a separate w = node; ... w = 
w_info->next_cycle;
loop to compute can_free, because trying to do it in the same loop as
pure_const which can stop early etc. would be too ugly.  The update of
can_free and setting of nonfreeing_fn flag is done in the same loop as
storing of pure_const/looping results.

> You want to walk aliases,
> 
>   cgraph_node::get (fndecl)->function_symbol (&availability)
> 
> > +  if (!n || n->get_availability () <= AVAIL_INTERPOSABLE)
> 
> and use availability here.

Done below.

What about the:
> > I wonder if the nonfreeing_call_p function shouldn't be moved elsewhere
> > though (suggestion where), so that gimple.c doesn't need the cgraph
> > includes.
question though (maybe it is more on Richard)?

2014-11-13  Jakub Jelinek  <ja...@redhat.com>

        * ipa-pure-const.c (struct funct_state_d): Add can_free field.
        (varying_state): Add true for can_free.
        (check_call): For builtin or internal !nonfreeing_call_p set
        local->can_free.
        (check_stmt): For asm volatile and asm with "memory" set
        local->can_free.
        (analyze_function): Clear local->can_free initially, continue
        calling check_stmt until all flags are computed, dump can_free
        flag.
        (pure_const_write_summary): Write can_free flag.
        (pure_const_read_summary): Read it back.
        (propagate_pure_const): Propagate also can_free flag, set
        w->nonfreeing_fn if it is false after propagation.
        * cgraph.h (cgraph_node): Add nonfreeing_fn member.
        * gimple.c: Include  ipa-ref.h, lto-streamer.h and cgraph.h.
        (nonfreeing_call_p): Return cgraph nonfreeing_fn flag if set.
        * cgraph.c (cgraph_node::dump): Dump nonfreeing_fn flag.
        * lto-cgraph.c (lto_output_node): Write nonfreeing_fn flag.
        (input_overwrite_node): Read it back.

--- gcc/cgraph.c.jj     2014-11-13 00:07:34.271125586 +0100
+++ gcc/cgraph.c        2014-11-13 09:14:41.217090244 +0100
@@ -1986,6 +1986,8 @@ cgraph_node::dump (FILE *f)
     fprintf (f, " tm_clone");
   if (icf_merged)
     fprintf (f, " icf_merged");
+  if (nonfreeing_fn)
+    fprintf (f, " nonfreeing_fn");
   if (DECL_STATIC_CONSTRUCTOR (decl))
     fprintf (f," static_constructor (priority:%i)", get_init_priority ());
   if (DECL_STATIC_DESTRUCTOR (decl))
--- gcc/ipa-pure-const.c.jj     2014-11-13 00:08:37.566011221 +0100
+++ gcc/ipa-pure-const.c        2014-11-13 09:25:02.933637928 +0100
@@ -112,11 +112,15 @@ struct funct_state_d
   bool looping;
 
   bool can_throw;
+
+  /* If function can call free, munmap or otherwise make previously
+     non-trapping memory accesses trapping.  */
+  bool can_free;
 };
 
 /* State used when we know nothing about function.  */
 static struct funct_state_d varying_state
-   = { IPA_NEITHER, IPA_NEITHER, true, true, true };
+   = { IPA_NEITHER, IPA_NEITHER, true, true, true, true };
 
 
 typedef struct funct_state_d * funct_state;
@@ -559,6 +563,10 @@ check_call (funct_state local, gimple ca
       enum pure_const_state_e call_state;
       bool call_looping;
 
+      if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
+         && !nonfreeing_call_p (call))
+       local->can_free = true;
+
       if (special_builtin_state (&call_state, &call_looping, callee_t))
        {
          worse_state (&local->pure_const_state, &local->looping,
@@ -589,6 +597,8 @@ check_call (funct_state local, gimple ca
            break;
          }
     }
+  else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
+    local->can_free = true;
 
   /* When not in IPA mode, we can still handle self recursion.  */
   if (!ipa && callee_t
@@ -753,6 +763,7 @@ check_stmt (gimple_stmt_iterator *gsip,
            fprintf (dump_file, "    memory asm clobber is not const/pure\n");
          /* Abandon all hope, ye who enter here. */
          local->pure_const_state = IPA_NEITHER;
+         local->can_free = true;
        }
       if (gimple_asm_volatile_p (stmt))
        {
@@ -760,7 +771,8 @@ check_stmt (gimple_stmt_iterator *gsip,
            fprintf (dump_file, "    volatile is not const/pure\n");
          /* Abandon all hope, ye who enter here. */
          local->pure_const_state = IPA_NEITHER;
-          local->looping = true;
+         local->looping = true;
+         local->can_free = true;
        }
       return;
     default:
@@ -785,6 +797,7 @@ analyze_function (struct cgraph_node *fn
   l->looping_previously_known = true;
   l->looping = false;
   l->can_throw = false;
+  l->can_free = false;
   state_from_flags (&l->state_previously_known, &l->looping_previously_known,
                    flags_from_decl_or_type (fn->decl),
                    fn->cannot_return_p ());
@@ -815,7 +828,10 @@ analyze_function (struct cgraph_node *fn
           gsi_next (&gsi))
        {
          check_stmt (&gsi, l, ipa);
-         if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
+         if (l->pure_const_state == IPA_NEITHER
+             && l->looping
+             && l->can_throw
+             && l->can_free)
            goto end;
        }
     }
@@ -882,6 +898,8 @@ end:
         fprintf (dump_file, "Function is locally const.\n");
       if (l->pure_const_state == IPA_PURE)
         fprintf (dump_file, "Function is locally pure.\n");
+      if (l->can_free)
+       fprintf (dump_file, "Function can locally free.\n");
     }
   return l;
 }
@@ -1021,6 +1039,7 @@ pure_const_write_summary (void)
          bp_pack_value (&bp, fs->looping_previously_known, 1);
          bp_pack_value (&bp, fs->looping, 1);
          bp_pack_value (&bp, fs->can_throw, 1);
+         bp_pack_value (&bp, fs->can_free, 1);
          streamer_write_bitpack (&bp);
        }
     }
@@ -1080,6 +1099,7 @@ pure_const_read_summary (void)
              fs->looping_previously_known = bp_unpack_value (&bp, 1);
              fs->looping = bp_unpack_value (&bp, 1);
              fs->can_throw = bp_unpack_value (&bp, 1);
+             fs->can_free = bp_unpack_value (&bp, 1);
              if (dump_file)
                {
                  int flags = flags_from_decl_or_type (node->decl);
@@ -1102,6 +1122,8 @@ pure_const_read_summary (void)
                    fprintf (dump_file,"  function is previously known 
looping\n");
                  if (fs->can_throw)
                    fprintf (dump_file,"  function is locally throwing\n");
+                 if (fs->can_free)
+                   fprintf (dump_file,"  function can locally free\n");
                }
            }
 
@@ -1347,6 +1369,33 @@ propagate_pure_const (void)
                 pure_const_names [pure_const_state],
                 looping);
 
+      /* Find the worst state of can_free for any node in the cycle.  */
+      bool can_free = false;
+      w = node;
+      while (w && !can_free)
+       {
+         struct cgraph_edge *e;
+         funct_state w_l = get_function_state (w);
+
+         if (w_l->can_free
+             || w->get_availability () == AVAIL_INTERPOSABLE
+             || w->indirect_calls)
+           can_free = true;
+
+         for (e = w->callees; e && !can_free; e = e->next_callee)
+           {
+             enum availability avail;
+             struct cgraph_node *y = e->callee->function_symbol (&avail);
+
+             if (avail > AVAIL_INTERPOSABLE)
+               can_free = get_function_state (y)->can_free;
+             else
+               can_free = true;
+           }
+         w_info = (struct ipa_dfs_info *) w->aux;
+         w = w_info->next_cycle;
+       }
+
       /* Copy back the region's pure_const_state which is shared by
         all nodes in the region.  */
       w = node;
@@ -1356,6 +1405,12 @@ propagate_pure_const (void)
          enum pure_const_state_e this_state = pure_const_state;
          bool this_looping = looping;
 
+         w_l->can_free = can_free;
+         w->nonfreeing_fn = !can_free;
+         if (!can_free && dump_file)
+           fprintf (dump_file, "Function found not to call free: %s\n",
+                    w->name ());
+
          if (w_l->state_previously_known != IPA_NEITHER
              && this_state > w_l->state_previously_known)
            {
--- gcc/cgraph.h.jj     2014-11-13 00:07:34.278125463 +0100
+++ gcc/cgraph.h        2014-11-13 09:14:41.216090285 +0100
@@ -1260,6 +1260,10 @@ public:
   /* True when function is clone created for Pointer Bounds Checker
      instrumentation.  */
   unsigned instrumentation_clone : 1;
+  /* True if call to node can't result in a call to free, munmap or
+     other operation that could make previously non-trapping memory
+     accesses trapping.  */
+  unsigned nonfreeing_fn : 1;
 };
 
 /* A cgraph node set is a collection of cgraph nodes.  A cgraph node
--- gcc/gimple.c.jj     2014-11-13 09:14:28.853595937 +0100
+++ gcc/gimple.c        2014-11-13 09:16:08.111606042 +0100
@@ -58,6 +58,9 @@ along with GCC; see the file COPYING3.
 #include "bitmap.h"
 #include "stringpool.h"
 #include "tree-ssanames.h"
+#include "ipa-ref.h"
+#include "lto-streamer.h"
+#include "cgraph.h"
 
 
 /* All the tuples have their operand vector (if present) at the very bottom
@@ -2542,7 +2545,17 @@ nonfreeing_call_p (gimple call)
           && gimple_call_flags (call) & ECF_LEAF)
     return true;
 
-  return false;
+  tree fndecl = gimple_call_fndecl (call);
+  if (!fndecl)
+    return false;
+  struct cgraph_node *n = cgraph_node::get (fndecl);
+  if (!n)
+    return false;
+  enum availability availability;
+  n = n->function_symbol (&availability);
+  if (!n || availability <= AVAIL_INTERPOSABLE)
+    return false;
+  return n->nonfreeing_fn;
 }
 
 /* Callback for walk_stmt_load_store_ops.
--- gcc/lto-cgraph.c.jj 2014-11-13 00:07:34.306124970 +0100
+++ gcc/lto-cgraph.c    2014-11-13 09:14:41.227089835 +0100
@@ -549,6 +549,7 @@ lto_output_node (struct lto_simple_outpu
   bp_pack_value (&bp, node->tm_clone, 1);
   bp_pack_value (&bp, node->calls_comdat_local, 1);
   bp_pack_value (&bp, node->icf_merged, 1);
+  bp_pack_value (&bp, node->nonfreeing_fn, 1);
   bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
   bp_pack_enum (&bp, ld_plugin_symbol_resolution,
                LDPR_NUM_KNOWN, node->resolution);
@@ -1097,6 +1098,7 @@ input_overwrite_node (struct lto_file_de
   node->tm_clone = bp_unpack_value (bp, 1);
   node->calls_comdat_local = bp_unpack_value (bp, 1);
   node->icf_merged = bp_unpack_value (bp, 1);
+  node->nonfreeing_fn = bp_unpack_value (bp, 1);
   node->thunk.thunk_p = bp_unpack_value (bp, 1);
   node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
                                     LDPR_NUM_KNOWN);


        Jakub

Reply via email to