> > On 07/02/2014 01:18 PM, Jan Hubicka wrote:
> > >We propagate types from places we know instances are created across 
> > >pointers
> > >passed to functions.  Once non-POD type is created at a given memory 
> > >location,
> > >one can not change its type by placement_new into something else.
> > 
> > Hmm.  If the memory location is untyped (i.e. from malloc) or a
> > character array, or a union, you can indeed destroy an object of one
> > type and create an object of a different type in that location.
> > 
> > >Jason, this assumes that one can not destroy the type and re-construct same
> > >type at the same spot.
> > 
> > That is an invalid assumption; you can destroy one object and
> > construct a new one in the same location.  Doing it within a method
> > would be unusual, but I don't think there's a rule against it.
> > 
> Jason,
> I am looking into tracking dynamic types now. Obviously I need to set very
> exact rules about when these may change. Can you take a few minutes and tell 
> me
> what of these sequences are valid?
> 
> I think b variants are invalid, currently we also assume t1 to be invalid, but
> t2 to be valid.
> With placement news, I wonder if we can arrange them to do before return:
> ptr = __builtin_placement_new (ptr)
> this builtin would be folded away after IPA wwhen we no longer need to track
> types same way as builtin_constant. That way I won't get two different dynamic
> types mixed at one pointer location, since these will look as two pointers
> until after inlining.  But given that C++ makes placement new to be written by
> hand, perhaps this is not possible?

It would be useful to know rules in these testcases. I am attaching WIP patch 
for
detecting dynamic type of heap allocated objects.  It basically takes Martin's 
detect_type_change
code from ipa-prop and adds discovery of constructor calls.  I however need to 
know if I need
to plan extra safe when propagating these types.
> 
> #include <stdio.h>
> inline void* operator new(__SIZE_TYPE__, void* __p) throw() { return __p;}
> 
> struct A
> {
>   virtual void foo() {printf ("A\n");}
> };
> struct B: A
> {
>   virtual void foo() {printf ("B\n");}
> };
> struct C: A
> {
>   virtual void foo() {printf ("C\n");}
> };
> 
> struct A *
> type(struct B *a)
> {
>   struct C *b;
>   ((struct B *)a)->~B();
>   b = new (a) C;
>   return b;
> }
> struct A *
> type_back(struct A *a)
> {
>   struct B *b;
>   ((struct C *)a)->~C();
>   b = new (a) B;
>   return b;
> }
> 
> void
> t1()
> {
>   struct B a;
>   struct A *b;
>   a.foo();
>   b=type(&a);
>   b->foo();
>   b=type_back (b);
>   a.foo();
> }
> void
> t1b()
> {
>   struct B a;
>   a.foo();
>   type(&a);
>   ((struct A *)&a)->foo();
>   type_back (&a);
>   ((struct A *)&a)->foo();
> }
> void
> t2()
> {
>   struct B *a = new (B);
>   struct A *b;
>   a->foo();
>   b=type(a);
>   b->foo();
> }
> void
> t2b()
> {
>   struct B *a = new (B);
>   struct A *b;
>   a->foo();
>   type(a);
>   ((struct A *)a)->foo();
> }
> main()
> {
>   t1();
>   t1b();
>   t2();
>   t2b();
> }

Index: gimple-fold.c
===================================================================
--- gimple-fold.c       (revision 212546)
+++ gimple-fold.c       (working copy)
@@ -372,7 +372,7 @@
            tree val = OBJ_TYPE_REF_EXPR (rhs);
            if (is_gimple_min_invariant (val))
              return val;
-           else if (flag_devirtualize && virtual_method_call_p (val))
+           else if (flag_devirtualize && virtual_method_call_p (rhs))
              {
                bool final;
                vec <cgraph_node *>targets
Index: ipa-devirt.c
===================================================================
--- ipa-devirt.c        (revision 212546)
+++ ipa-devirt.c        (working copy)
@@ -2092,6 +2113,26 @@
   return true;
 }
 
+/* See if OP is SSA name initialized as a copy or by single assignment.
+   If so, walk the SSA graph up.  */
+
+static tree
+walk_ssa_copies (tree op)
+{
+  STRIP_NOPS (op);
+  while (TREE_CODE (op) == SSA_NAME
+        && !SSA_NAME_IS_DEFAULT_DEF (op)
+        && SSA_NAME_DEF_STMT (op)
+        && gimple_assign_single_p (SSA_NAME_DEF_STMT (op)))
+    {
+      if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
+       return op;
+      op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
+      STRIP_NOPS (op);
+    }
+  return op;
+}
+
 /* Given REF call in FNDECL, determine class of the polymorphic
    call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
    CALL is optional argument giving the actual statement (usually call) where
@@ -2120,16 +2161,9 @@
   /* Walk SSA for outer object.  */
   do 
     {
-      if (TREE_CODE (base_pointer) == SSA_NAME
-         && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
-         && SSA_NAME_DEF_STMT (base_pointer)
-         && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
+      base_pointer = walk_ssa_copies (base_pointer);
+      if (TREE_CODE (base_pointer) == ADDR_EXPR)
        {
-         base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
-         STRIP_NOPS (base_pointer);
-       }
-      else if (TREE_CODE (base_pointer) == ADDR_EXPR)
-       {
          HOST_WIDE_INT size, max_size;
          HOST_WIDE_INT offset2;
          tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
@@ -2174,7 +2208,7 @@
                                                     context->outer_type,
                                                     call,
                                                     current_function_decl);
-                 return NULL;
+                 return base_pointer;
                }
              else
                break;
@@ -2264,6 +2298,402 @@
   return base_pointer;
 }
 
+/* Structure to be passed in between detect_type_change and
+   check_stmt_for_type_change.  */
+
+struct type_change_info
+{
+  /* Offset into the object where there is the virtual method pointer we are
+     looking for.  */
+  HOST_WIDE_INT offset;
+  /* The declaration or SSA_NAME pointer of the base that we are checking for
+     type change.  */
+  tree instance;
+  /* The reference to virtual table pointer used.  */
+  tree vtbl_ptr_ref;
+  tree otr_type;
+  /* If we actually can tell the type that the object has changed to, it is
+     stored in this field.  Otherwise it remains NULL_TREE.  */
+  tree known_current_type;
+  HOST_WIDE_INT known_current_offset;
+
+  /* Set to true if dynamic type change has been detected.  */
+  bool type_maybe_changed;
+  /* Set to true if multiple types have been encountered.  known_current_type
+     must be disregarded in that case.  */
+  bool multiple_types_encountered;
+};
+
+/* Return true if STMT can modify a virtual method table pointer.
+
+   This function makes special assumptions about both constructors and
+   destructors which are all the functions that are allowed to alter the VMT
+   pointers.  It assumes that destructors begin with assignment into all VMT
+   pointers and that constructors essentially look in the following way:
+
+   1) The very first thing they do is that they call constructors of ancestor
+   sub-objects that have them.
+
+   2) Then VMT pointers of this and all its ancestors is set to new values
+   corresponding to the type corresponding to the constructor.
+
+   3) Only afterwards, other stuff such as constructor of member sub-objects
+   and the code written by the user is run.  Only this may include calling
+   virtual functions, directly or indirectly.
+
+   There is no way to call a constructor of an ancestor sub-object in any
+   other way.
+
+   This means that we do not have to care whether constructors get the correct
+   type information because they will always change it (in fact, if we define
+   the type to be given by the VMT pointer, it is undefined).
+
+   The most important fact to derive from the above is that if, for some
+   statement in the section 3, we try to detect whether the dynamic type has
+   changed, we can safely ignore all calls as we examine the function body
+   backwards until we reach statements in section 2 because these calls cannot
+   be ancestor constructors or destructors (if the input is not bogus) and so
+   do not change the dynamic type (this holds true only for automatically
+   allocated objects but at the moment we devirtualize only these).  We then
+   must detect that statements in section 2 change the dynamic type and can try
+   to derive the new type.  That is enough and we can stop, we will never see
+   the calls into constructors of sub-objects in this code.  Therefore we can
+   safely ignore all call statements that we traverse.
+  */
+
+static bool
+stmt_may_be_vtbl_ptr_store (gimple stmt)
+{
+  if (is_gimple_call (stmt))
+    return false;
+  else if (is_gimple_assign (stmt))
+    {
+      tree lhs = gimple_assign_lhs (stmt);
+
+      if (gimple_clobber_p (stmt))
+       return false;
+      if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
+       {
+         if (flag_strict_aliasing
+             && !POINTER_TYPE_P (TREE_TYPE (lhs)))
+           return false;
+
+         if (TREE_CODE (lhs) == COMPONENT_REF
+             && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
+           return false;
+         /* In the future we might want to use get_base_ref_and_offset to find
+            if there is a field corresponding to the offset and if so, proceed
+            almost like if it was a component ref.  */
+       }
+    }
+  return true;
+}
+
+/* If STMT can be proved to be an assignment to the virtual method table
+   pointer of ANALYZED_OBJ and the type associated with the new table
+   identified, return the type.  Otherwise return NULL_TREE.  */
+
+static tree
+extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
+                              HOST_WIDE_INT *type_offset)
+{
+  HOST_WIDE_INT offset, size, max_size;
+  tree lhs, rhs, base, binfo;
+
+  if (!gimple_assign_single_p (stmt))
+    return NULL_TREE;
+
+  lhs = gimple_assign_lhs (stmt);
+  rhs = gimple_assign_rhs1 (stmt);
+  if (TREE_CODE (lhs) != COMPONENT_REF
+      || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
+    return NULL_TREE;
+
+  if (operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
+    ;
+  else
+    {
+      base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
+      if (offset != tci->offset
+         || size != POINTER_SIZE
+         || max_size != POINTER_SIZE)
+       return NULL_TREE;
+      if (TREE_CODE (base) == MEM_REF)
+       {
+         if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0)
+             || !integer_zerop (TREE_OPERAND (base, 1)))
+           return NULL_TREE;
+       }
+      else if (!operand_equal_p (tci->instance, base, 0)
+              || tci->offset)
+       return NULL_TREE;
+    }
+
+  binfo = vtable_pointer_value_to_binfo (rhs);
+
+  if (!binfo)
+    return NULL;
+  *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
+  if (TYPE_BINFO (BINFO_TYPE (binfo)) == binfo)
+    return BINFO_TYPE (binfo);
+
+  /* TODO: Figure out the type containing BINFO.  */
+  debug_tree (binfo);
+  return NULL;
+}
+
+/* Record dynamic type change of TCI to TYPE.  */
+
+void
+record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT 
offset)
+{
+  if (dump_file)
+    {
+      if (type)
+       {
+          fprintf (dump_file, "  Recording type: ");
+         print_generic_expr (dump_file, type, TDF_SLIM);
+          fprintf (dump_file, " at offset %i\n", (int)offset);
+       }
+     else
+       fprintf (dump_file, "  Recording unknown type\n");
+    }
+  if (tci->type_maybe_changed
+      && (type != tci->known_current_type
+         || offset != tci->known_current_offset))
+    tci->multiple_types_encountered = true;
+  tci->known_current_type = type;
+  tci->known_current_offset = offset;
+  tci->type_maybe_changed = true;
+}
+
+/* Callback of walk_aliased_vdefs and a helper function for
+   detect_type_change to check whether a particular statement may modify
+   the virtual table pointer, and if possible also determine the new type of
+   the (sub-)object.  It stores its result into DATA, which points to a
+   type_change_info structure.  */
+
+static bool
+check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
+{
+  gimple stmt = SSA_NAME_DEF_STMT (vdef);
+  struct type_change_info *tci = (struct type_change_info *) data;
+  tree fn;
+
+  /* If we already gave up, just terminate the rest of walk.  */
+  if (tci->multiple_types_encountered)
+    return true;
+
+  /* Check for a constructor call.  */
+  if (is_gimple_call (stmt)
+      && (fn = gimple_call_fndecl (stmt)) != NULL_TREE
+      && DECL_CXX_CONSTRUCTOR_P (fn)
+      && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
+      && gimple_call_num_args (stmt))
+    {
+      tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
+      tree type = method_class_type (TREE_TYPE (fn));
+      HOST_WIDE_INT offset, size, max_size;
+
+      if (dump_file)
+       {
+         fprintf (dump_file, "  Checking constructor call: ");
+         print_gimple_stmt (dump_file, stmt, 0, 0);
+       }
+
+      if (TREE_CODE (op) == ADDR_EXPR)
+       {
+         op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
+                                       &offset, &size, &max_size);
+         if (op && TREE_CODE (op) == MEM_REF)
+           {
+             if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
+               return false;
+             offset += tree_to_shwi (TREE_OPERAND (op, 1))
+                       * BITS_PER_UNIT;
+             op = TREE_OPERAND (op, 0);
+           }
+         else
+           return false;
+         op = walk_ssa_copies (op);
+       }
+      if (operand_equal_p (op, tci->instance, 0)
+         && TYPE_SIZE (type)
+         && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
+         && tree_fits_shwi_p (TYPE_SIZE (type))
+         && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset)
+       {
+         record_known_type (tci, type, tci->offset - offset);
+         return true;
+       }
+    }
+
+  /* Check for inlined virtual table store.  */
+  if (stmt_may_be_vtbl_ptr_store (stmt))
+    {
+      tree type;
+      HOST_WIDE_INT offset = 0;
+      if (dump_file)
+       {
+         fprintf (dump_file, "  Checking vtbl store: ");
+         print_gimple_stmt (dump_file, stmt, 0, 0);
+       }
+
+      type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
+      gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
+      record_known_type (tci, type, offset);
+      return true;
+    }
+  else
+    return false;
+}
+
+/* CONTEXT is polymorphic call context obtained from get_polymorphic_context.
+   INSTANCE is pointer to the instance as returned by get_polymorphic_context.
+   If the type of instance is not fully determined (either OUTER_TYPE is 
unknown
+   or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES is set), try to walk memory
+   writes and find the actual construction of the instance.
+
+   We do not include this analysis in the context analysis itself, because
+   it needs memory SSA to be fully built and the walk may be expensive.
+   So it is not suitable for use withing fold_stmt and similar uses.  */
+
+bool
+get_dynamic_type (tree instance,
+                 ipa_polymorphic_call_context *context,
+                 tree otr_type,
+                 gimple call)
+{
+  struct type_change_info tci;
+  ao_ref ao;
+  bool function_entry_reached = false;
+  tree instance_ref = NULL;
+  gimple stmt = call;
+
+  if (!context->maybe_in_construction && !context->maybe_derived_type)
+    return false;
+
+  /* We need to obtain refernce to virtual table pointer.  It is better
+     to look it up in the code rather than build our own.  This require bit
+     of pattern matching, but we end up verifying that what we found is
+     correct.  */
+  if (gimple_code (call) == GIMPLE_CALL)
+    {
+      tree ref = gimple_call_fn (call);
+      HOST_WIDE_INT offset2, size, max_size;
+
+      if (TREE_CODE (ref) == OBJ_TYPE_REF)
+       {
+         ref = OBJ_TYPE_REF_EXPR (ref);
+         ref = walk_ssa_copies (ref);
+
+         /* Check if definition looks like vtable lookup.  */
+         if (TREE_CODE (ref) == SSA_NAME
+             && !SSA_NAME_IS_DEFAULT_DEF (ref)
+             && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
+             && TREE_CODE (gimple_assign_rhs1
+                            (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
+           {
+             ref = get_base_address
+                    (TREE_OPERAND (gimple_assign_rhs1
+                                    (SSA_NAME_DEF_STMT (ref)), 0));
+             ref = walk_ssa_copies (ref);
+             /* Find base address of the lookup and see if it looks like
+                vptr load.  */
+             if (TREE_CODE (ref) == SSA_NAME
+                 && !SSA_NAME_IS_DEFAULT_DEF (ref)
+                 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
+               {
+                 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
+                 tree base_ref = get_ref_base_and_extent (ref_exp, &offset2, 
&size, &max_size);
+
+                 /* Finally verify that what we found is based on INSTANCE.  */
+                 if (base_ref
+                     && TREE_CODE (base_ref) == MEM_REF
+                     && TREE_OPERAND (base_ref, 0) == instance)
+                   {
+                     stmt = SSA_NAME_DEF_STMT (ref);
+                     instance_ref = ref_exp;
+                   }
+               }
+           }
+       }
+    }
+ 
+
+  /* If we failed to look up the refernece in code, build our own.  */
+  if (!instance_ref)
+    {
+      /* If the statement in question does not use memory, we can't tell
+        anything.  */
+      if (!gimple_vuse (stmt))
+       return false;
+      instance_ref = build2 (MEM_REF,
+                            TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))),
+                            instance,
+                            build_int_cst (ptr_type_node,
+                                           context->offset / BITS_PER_UNIT));
+    }
+
+
+  ao_ref_init (&ao, instance_ref);
+  /*ao.base = instance_ref;
+  ao.offset = context->offset;*/
+  ao.size = POINTER_SIZE;
+  ao.max_size = ao.size;
+
+  if (dump_file)
+    {
+      fprintf (dump_file, "Determining dynamic type for call: ");
+      print_gimple_stmt (dump_file, call, 0, 0);
+      fprintf (dump_file, "  Starting walk at: ");
+      print_gimple_stmt (dump_file, stmt, 0, 0);
+      fprintf (dump_file, "  Instance pointer: ");
+      print_generic_expr (dump_file, instance, TDF_SLIM);
+      fprintf (dump_file, " offset: %i (bits)", (int)context->offset);
+      fprintf (dump_file, " vtbl reference: ");
+      print_generic_expr (dump_file, instance_ref, TDF_SLIM);
+      fprintf (dump_file, "\n");
+    }
+
+  tci.offset = context->offset;
+  tci.instance = instance;
+  tci.vtbl_ptr_ref = instance_ref;
+  gcc_assert (TREE_CODE (instance) != MEM_REF);
+  tci.known_current_type = NULL_TREE;
+  tci.known_current_offset = 0;
+  tci.otr_type = otr_type;
+  tci.type_maybe_changed = false;
+  tci.multiple_types_encountered = false;
+
+  walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
+                     &tci, NULL, &function_entry_reached);
+  if (!tci.type_maybe_changed)
+    {
+      if (context->maybe_in_construction)
+        context->maybe_in_construction = false;
+      if (dump_file)
+       fprintf (dump_file, "  No dynamic type change found.\n");
+      return true;
+    }
+
+  if (tci.known_current_type
+      && !function_entry_reached
+      && !tci.multiple_types_encountered)
+    {
+      context->outer_type = tci.known_current_type;
+      context->offset = tci.known_current_offset;
+      context->maybe_in_construction = false;
+      context->maybe_derived_type = false;
+      if (dump_file)
+       fprintf (dump_file, "  Determined dynamic type.\n");
+    }
+  else if (dump_file)
+    fprintf (dump_file, "  Found multiple types.\n");
+
+  return true;
+}
+
 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
    Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
    and insert them to NODES.
Index: ipa-prop.c
===================================================================
--- ipa-prop.c  (revision 212546)
+++ ipa-prop.c  (working copy)
@@ -2134,7 +2134,36 @@
   struct ipa_node_params *info = fbi->info;
   HOST_WIDE_INT offset;
   bool by_ref;
+  struct cgraph_edge *cs = cgraph_edge (cgraph_get_node 
(current_function_decl), call);
 
+  if (cs->indirect_info->polymorphic)
+    {
+      tree otr_type;
+      HOST_WIDE_INT otr_token;
+      ipa_polymorphic_call_context context;
+      tree instance;
+      tree target = gimple_call_fn (call);
+
+      instance = get_polymorphic_call_info (current_function_decl,
+                                           target,
+                                           &otr_type, &otr_token,
+                                           &context, call);
+
+      if (get_dynamic_type (instance, &context, otr_type, call))
+       {
+         gcc_assert (TREE_CODE (otr_type) == RECORD_TYPE);
+         cs->indirect_info->polymorphic = true;
+         cs->indirect_info->param_index = -1;
+         cs->indirect_info->otr_token = otr_token;
+         cs->indirect_info->otr_type = otr_type;
+         cs->indirect_info->outer_type = context.outer_type;
+         cs->indirect_info->offset = context.offset;
+         cs->indirect_info->maybe_in_construction
+            = context.maybe_in_construction;
+         cs->indirect_info->maybe_derived_type = context.maybe_derived_type;
+       }
+    }
+
   if (SSA_NAME_IS_DEFAULT_DEF (target))
     {
       tree var = SSA_NAME_VAR (target);
Index: ipa-utils.h
===================================================================
--- ipa-utils.h (revision 212546)
+++ ipa-utils.h (working copy)
@@ -89,6 +89,7 @@
                                HOST_WIDE_INT *,
                                ipa_polymorphic_call_context *,
                                gimple call = NULL);
+bool get_dynamic_type (tree, ipa_polymorphic_call_context *, tree, gimple);
 bool get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *,
                                               tree, tree, HOST_WIDE_INT);
 bool decl_maybe_in_construction_p (tree, tree, gimple, tree);
Index: tree-ssa-pre.c
===================================================================
--- tree-ssa-pre.c      (revision 212546)
+++ tree-ssa-pre.c      (working copy)
@@ -63,6 +63,7 @@
 #include "domwalk.h"
 #include "ipa-prop.h"
 #include "tree-ssa-propagate.h"
+#include "ipa-utils.h"
 
 /* TODO:
 
@@ -4359,12 +4360,34 @@
        {
          tree fn = gimple_call_fn (stmt);
          if (fn
-             && TREE_CODE (fn) == OBJ_TYPE_REF
-             && TREE_CODE (OBJ_TYPE_REF_EXPR (fn)) == SSA_NAME)
+             && flag_devirtualize
+             && virtual_method_call_p (fn))
            {
-             fn = ipa_intraprocedural_devirtualization (stmt);
-             if (fn && dbg_cnt (devirt))
+             tree otr_type;
+             HOST_WIDE_INT otr_token;
+             ipa_polymorphic_call_context context;
+             tree instance;
+             bool final;
+
+             instance = get_polymorphic_call_info (current_function_decl,
+                                                   fn,
+                                                   &otr_type, &otr_token, 
&context, stmt);
+
+             get_dynamic_type (instance, &context, otr_type, stmt);
+
+             vec <cgraph_node *>targets
+               = possible_polymorphic_call_targets (obj_type_ref_class (fn),
+                                                    tree_to_uhwi
+                                                      (OBJ_TYPE_REF_TOKEN 
(fn)),
+                                                    context,
+                                                    &final);
+             if (final && targets.length () <= 1 && dbg_cnt (devirt))
                {
+                 tree fn;
+                 if (targets.length () == 1)
+                   fn = targets[0]->decl;
+                 else
+                   fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
                  if (dump_enabled_p ())
                    {
                      location_t loc = gimple_location_safe (stmt);
@@ -4376,6 +4399,8 @@
                  gimple_call_set_fndecl (stmt, fn);
                  gimple_set_modified (stmt, true);
                }
+             else
+               gcc_assert (!ipa_intraprocedural_devirtualization (stmt));
            }
        }
 

Reply via email to