Hi,

On Mon, Mar 12, 2012 at 04:26:21PM +0100, Michael Matz wrote:
> Hi,
> 
> On Mon, 12 Mar 2012, Martin Jambor wrote:
> 
> > when we expand a misaligned MEM_REF on the LHS, we must not call the
> > code in expand_expr_real_1 if the subsequent patch is applied, because
> > the code generates code extracting the contents of the memory to a
> > register, which is of course bad if the intent is to write into that
> > memory.
> 
> Then expand_expr_real_1 should be called with EXPAND_WRITE modifier, 
> instead of any of the others.  Then it will (or should) return an lvalue.  
> That might still be wrong for alignment reasons, but writing into the so 
> returned rtx will change the original object.

OK, the following patch changes the places where I previously called
the new function to call expand_expr with EXPAND_WRITE modifier and
then makes sure we do not perform reads into rvalues in
expand_expr_real_1 in the contexts where I need to avoid that.

So far it has passed bootstrap and testing on x86_64-linux, bootstraps
and testsuite runs on the other platforms are still underway.  What do
you think?

Thanks,

Martin


2012-03-12  Martin Jambor  <mjam...@suse.cz>

        * expr.c (expand_assignment): Use expand_expr with EXPAND_WRITE
        when expanding MEM_REFs, MEM_TARGET_REFs and handled_component
        bases.
        (expand_expr_real_1): Do not handle misalignment if modifier is
        EXPAND_WRITE.

Index: src/gcc/expr.c
===================================================================
--- src.orig/gcc/expr.c
+++ src/gcc/expr.c
@@ -4600,49 +4600,16 @@ expand_assignment (tree to, tree from, b
           != CODE_FOR_nothing)
          || SLOW_UNALIGNED_ACCESS (mode, align)))
     {
-      addr_space_t as
-       = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
-      struct expand_operand ops[2];
-      enum machine_mode address_mode;
-      rtx reg, op0, mem;
+      rtx reg, mem;
 
       reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
       reg = force_not_mem (reg);
-
-      if (TREE_CODE (to) == MEM_REF)
-       {
-         tree base = TREE_OPERAND (to, 0);
-         address_mode = targetm.addr_space.address_mode (as);
-         op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
-         op0 = convert_memory_address_addr_space (address_mode, op0, as);
-         if (!integer_zerop (TREE_OPERAND (to, 1)))
-           {
-             rtx off
-               = immed_double_int_const (mem_ref_offset (to), address_mode);
-             op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
-           }
-         op0 = memory_address_addr_space (mode, op0, as);
-         mem = gen_rtx_MEM (mode, op0);
-         set_mem_attributes (mem, to, 0);
-         set_mem_addr_space (mem, as);
-       }
-      else if (TREE_CODE (to) == TARGET_MEM_REF)
-       {
-         struct mem_address addr;
-         get_address_description (to, &addr);
-         op0 = addr_for_mem_ref (&addr, as, true);
-         op0 = memory_address_addr_space (mode, op0, as);
-         mem = gen_rtx_MEM (mode, op0);
-         set_mem_attributes (mem, to, 0);
-         set_mem_addr_space (mem, as);
-       }
-      else
-       gcc_unreachable ();
-      if (TREE_THIS_VOLATILE (to))
-       MEM_VOLATILE_P (mem) = 1;
+      mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
 
       if (icode != CODE_FOR_nothing)
        {
+         struct expand_operand ops[2];
+
          create_fixed_operand (&ops[0], mem);
          create_input_operand (&ops[1], reg, mode);
          /* The movmisalign<mode> pattern cannot fail, else the assignment
@@ -4695,31 +4662,11 @@ expand_assignment (tree to, tree from, b
          && ((icode = optab_handler (movmisalign_optab, mode))
              != CODE_FOR_nothing))
        {
-         enum machine_mode address_mode;
-         rtx op0;
          struct expand_operand ops[2];
-         addr_space_t as = TYPE_ADDR_SPACE
-             (TREE_TYPE (TREE_TYPE (TREE_OPERAND (tem, 0))));
-         tree base = TREE_OPERAND (tem, 0);
 
          misalignp = true;
          to_rtx = gen_reg_rtx (mode);
-
-         address_mode = targetm.addr_space.address_mode (as);
-         op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
-         op0 = convert_memory_address_addr_space (address_mode, op0, as);
-         if (!integer_zerop (TREE_OPERAND (tem, 1)))
-           {
-             rtx off = immed_double_int_const (mem_ref_offset (tem),
-                                               address_mode);
-             op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
-           }
-         op0 = memory_address_addr_space (mode, op0, as);
-         mem = gen_rtx_MEM (mode, op0);
-         set_mem_attributes (mem, tem, 0);
-         set_mem_addr_space (mem, as);
-         if (TREE_THIS_VOLATILE (tem))
-           MEM_VOLATILE_P (mem) = 1;
+         mem = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
 
          /* If the misaligned store doesn't overwrite all bits, perform
             rmw cycle on MEM.  */
@@ -4737,7 +4684,7 @@ expand_assignment (tree to, tree from, b
       else
        {
          misalignp = false;
-         to_rtx = expand_normal (tem);
+         to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
        }
 
       /* If the bitfield is volatile, we want to access it in the
@@ -9373,7 +9320,8 @@ expand_expr_real_1 (tree exp, rtx target
        set_mem_attributes (temp, exp, 0);
        set_mem_addr_space (temp, as);
        align = get_object_or_type_alignment (exp);
-       if (mode != BLKmode
+       if (modifier != EXPAND_WRITE
+           && mode != BLKmode
            && align < GET_MODE_ALIGNMENT (mode)
            /* If the target does not have special handling for unaligned
               loads of mode then it can use regular moves for them.  */
@@ -9461,7 +9409,8 @@ expand_expr_real_1 (tree exp, rtx target
        set_mem_addr_space (temp, as);
        if (TREE_THIS_VOLATILE (exp))
          MEM_VOLATILE_P (temp) = 1;
-       if (mode != BLKmode
+       if (modifier != EXPAND_WRITE
+           && mode != BLKmode
            && align < GET_MODE_ALIGNMENT (mode)
            /* If the target does not have special handling for unaligned
               loads of mode then it can use regular moves for them.  */

Reply via email to