This is my first contribution to GCC :) one of the beginner projects
suggested on the website was to add and use RTL type predicates. I
added predicates for addition, subtraction and multiplication. I also
went through and used them in the code.

I did not add tests because I'm not addding/modifying any behavior.
All existings tests did pass.

Like I said, this is my first patch. Please let me know if I did
anything wrong or if there's anything I can improve for next time.

Signed-off-by: Charlie Sale <softwaresal...@gmail.com>
---
 gcc/ChangeLog            |  43 +++++++
 gcc/alias.cc             |  30 +++--
 gcc/auto-inc-dec.cc      |  11 +-
 gcc/calls.cc             |   8 +-
 gcc/cfgexpand.cc         |  16 +--
 gcc/combine-stack-adj.cc |  39 +++----
 gcc/combine.cc           | 241 +++++++++++++++++----------------------
 gcc/compare-elim.cc      |   3 +-
 gcc/cse.cc               |  66 +++++------
 gcc/cselib.cc            |  37 +++---
 gcc/dce.cc               |   4 +-
 gcc/dwarf2cfi.cc         |   2 +-
 gcc/dwarf2out.cc         |  11 +-
 gcc/emit-rtl.cc          |   6 +-
 gcc/explow.cc            |  31 ++---
 gcc/expr.cc              |  23 ++--
 gcc/final.cc             |  20 ++--
 gcc/function.cc          |   7 +-
 gcc/fwprop.cc            |   2 +-
 gcc/haifa-sched.cc       |  10 +-
 gcc/ifcvt.cc             |  11 +-
 gcc/ira.cc               |   6 +-
 gcc/loop-doloop.cc       |  70 ++++++------
 gcc/loop-iv.cc           |  21 +---
 gcc/lra-constraints.cc   |  34 +++---
 gcc/lra-eliminations.cc  |  25 ++--
 gcc/lra.cc               |   6 +-
 gcc/modulo-sched.cc      |   2 +-
 gcc/postreload.cc        |  25 ++--
 gcc/reginfo.cc           |  12 +-
 gcc/reload.cc            | 180 +++++++++++++----------------
 gcc/reload1.cc           |  85 ++++++--------
 gcc/reorg.cc             |  12 +-
 gcc/rtl.cc               |   3 +-
 gcc/rtl.h                |  11 ++
 gcc/rtlanal.cc           |  25 ++--
 gcc/sched-deps.cc        |   8 +-
 gcc/simplify-rtx.cc      | 143 +++++++++--------------
 gcc/var-tracking.cc      |  37 +++---
 39 files changed, 595 insertions(+), 731 deletions(-)

diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index f999e2cba43..1fd2c94c873 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,46 @@
+2022-11-26  Charlie Sale   <softwaresal...@gmail.com>
+
+       * rtl.h (PLUS_P): RTL addition predicate
+       (MINUS_P): RTL subtraction predicate
+       (MULT_P): RTL multiplication predicate
+       * alias.cc: use RTL predicates
+       * auto-inc-dec.cc: use RTL predicates
+       * calls.cc: use RTL predicates
+       * cfgexpand.cc: use RTL predicates
+       * combine-stack-adj.cc: use RTL predicates
+       * combine.cc: use RTL predicates
+       * compare-elim.cc: use RTL predicates
+       * cse.cc: use RTL predicates
+       * cselib.cc: use RTL predicates
+       * dce.cc: use RTL predicates
+       * dwarf2cfi.cc: use RTL predicates
+       * dwarf2out.cc: use RTL predicates
+       * emit-rtl.cc: use RTL predicates
+       * explow.cc: use RTL predicates
+       * expr.cc: use RTL predicates
+       * final.cc: use RTL predicates
+       * function.cc: use RTL predicates
+       * fwprop.cc: use RTL predicates
+       * haifa-sched.cc: use RTL predicates
+       * ifcvt.cc: use RTL predicates
+       * ira.cc: use RTL predicates
+       * loop-doloop.cc: use RTL predicates
+       * loop-iv.cc: use RTL predicates
+       * lra-constraints.cc: use RTL predicates
+       * lra-eliminations.cc: use RTL predicates
+       * lra.cc: use RTL predicates
+       * modulo-sched.cc: use RTL predicates
+       * postreload.cc: use RTL predicates
+       * reginfo.cc: use RTL predicates
+       * reload.cc: use RTL predicates
+       * reload1.cc: use RTL predicates
+       * reorg.cc: use RTL predicates
+       * rtl.cc: use RTL predicates
+       * rtlanal.cc: use RTL predicates
+       * sched-deps.cc: use RTL predicates
+       * simplify-rtx.cc: use RTL predicates
+       * var-tracking.cc: use RTL predicates
+
 2022-11-25  Sandra Loosemore  <san...@codesourcery.com>
 
        * common.opt (fopenmp-target-simd-clone): New option.
diff --git a/gcc/alias.cc b/gcc/alias.cc
index c62837dd854..2d9bd79fe21 100644
--- a/gcc/alias.cc
+++ b/gcc/alias.cc
@@ -1473,7 +1473,7 @@ find_base_value (rtx src)
         otherwise.  */
       if (copying_arguments
          && (XEXP (src, 0) == arg_pointer_rtx
-             || (GET_CODE (XEXP (src, 0)) == PLUS
+             || (PLUS_P (XEXP (src, 0))
                  && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
        return arg_base_value;
       return 0;
@@ -1790,7 +1790,7 @@ canon_rtx (rtx x)
        return canon_rtx (t);
     }
 
-  if (GET_CODE (x) == PLUS)
+  if (PLUS_P (x))
     {
       rtx x0 = canon_rtx (XEXP (x, 0));
       rtx x1 = canon_rtx (XEXP (x, 1));
@@ -2357,19 +2357,17 @@ get_addr (rtx x)
 
   if (GET_CODE (x) != VALUE)
     {
-      if ((GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
-         && GET_CODE (XEXP (x, 0)) == VALUE
+      if ((PLUS_P (x) || MINUS_P (x)) && GET_CODE (XEXP (x, 0)) == VALUE
          && CONST_SCALAR_INT_P (XEXP (x, 1)))
        {
          rtx op0 = get_addr (XEXP (x, 0));
          if (op0 != XEXP (x, 0))
            {
              poly_int64 c;
-             if (GET_CODE (x) == PLUS
-                 && poly_int_rtx_p (XEXP (x, 1), &c))
+             if (PLUS_P (x) && poly_int_rtx_p (XEXP (x, 1), &c))
                return plus_constant (GET_MODE (x), op0, c);
-             return simplify_gen_binary (GET_CODE (x), GET_MODE (x),
-                                         op0, XEXP (x, 1));
+             return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0,
+                                         XEXP (x, 1));
            }
        }
       return x;
@@ -2570,7 +2568,7 @@ memrefs_conflict_p (poly_int64 xsize, rtx x, poly_int64 
ysize, rtx y,
   /* This code used to check for conflicts involving stack references and
      globals but the base address alias code now handles these cases.  */
 
-  if (GET_CODE (x) == PLUS)
+  if (PLUS_P (x))
     {
       /* The fact that X is canonicalized means that this
         PLUS rtx is canonicalized.  */
@@ -2585,7 +2583,7 @@ memrefs_conflict_p (poly_int64 xsize, rtx x, poly_int64 
ysize, rtx y,
        return memrefs_conflict_p (xsize, x0, ysize, const0_rtx, c);
 
       poly_int64 cx1, cy1;
-      if (GET_CODE (y) == PLUS)
+      if (PLUS_P (y))
        {
          /* The fact that Y is canonicalized means that this
             PLUS rtx is canonicalized.  */
@@ -2617,7 +2615,7 @@ memrefs_conflict_p (poly_int64 xsize, rtx x, poly_int64 
ysize, rtx y,
       else if (poly_int_rtx_p (x1, &cx1))
        return memrefs_conflict_p (xsize, x0, ysize, y, c - cx1);
     }
-  else if (GET_CODE (y) == PLUS)
+  else if (PLUS_P (y))
     {
       /* The fact that Y is canonicalized means that this
         PLUS rtx is canonicalized.  */
@@ -3506,13 +3504,13 @@ init_alias_analysis (void)
                                                        XEXP (note, 0)))
                        {
                          set_reg_known_value (regno, XEXP (note, 0));
-                         set_reg_known_equiv_p (regno,
-                                                REG_NOTE_KIND (note) == 
REG_EQUIV);
+                         set_reg_known_equiv_p (regno, REG_NOTE_KIND (note)
+                                                         == REG_EQUIV);
                        }
-                     else if (DF_REG_DEF_COUNT (regno) == 1
-                              && GET_CODE (src) == PLUS
+                     else if (DF_REG_DEF_COUNT (regno) == 1 && PLUS_P (src)
                               && REG_P (XEXP (src, 0))
-                              && (t = get_reg_known_value (REGNO (XEXP (src, 
0))))
+                              && (t = get_reg_known_value (
+                                    REGNO (XEXP (src, 0))))
                               && poly_int_rtx_p (XEXP (src, 1), &offset))
                        {
                          t = plus_constant (GET_MODE (src), t, offset);
diff --git a/gcc/auto-inc-dec.cc b/gcc/auto-inc-dec.cc
index 481e7af6895..d4ca5d8954f 100644
--- a/gcc/auto-inc-dec.cc
+++ b/gcc/auto-inc-dec.cc
@@ -882,7 +882,7 @@ parse_add_or_inc (rtx_insn *insn, bool before_mem)
     {
       /* Process a = b + c where c is a const.  */
       inc_insn.reg1_is_const = true;
-      if (GET_CODE (SET_SRC (pat)) == PLUS)
+      if (PLUS_P (SET_SRC (pat)))
        {
          inc_insn.reg1 = XEXP (SET_SRC (pat), 1);
          inc_insn.reg1_val = INTVAL (inc_insn.reg1);
@@ -895,8 +895,7 @@ parse_add_or_inc (rtx_insn *insn, bool before_mem)
       return true;
     }
   else if ((HAVE_PRE_MODIFY_REG || HAVE_POST_MODIFY_REG)
-          && (REG_P (XEXP (SET_SRC (pat), 1)))
-          && GET_CODE (SET_SRC (pat)) == PLUS)
+          && (REG_P (XEXP (SET_SRC (pat), 1))) && PLUS_P (SET_SRC (pat)))
     {
       /* Process a = b + c where c is a reg.  */
       inc_insn.reg1 = XEXP (SET_SRC (pat), 1);
@@ -964,8 +963,7 @@ find_address (rtx *address_of_x, rtx findreg)
       mem_insn.reg1 = GEN_INT (mem_insn.reg1_val);
       return -1;
     }
-  if (code == MEM && findreg == inc_insn.reg_res
-      && GET_CODE (XEXP (x, 0)) == PLUS
+  if (code == MEM && findreg == inc_insn.reg_res && PLUS_P (XEXP (x, 0))
       && rtx_equal_p (XEXP (XEXP (x, 0), 0), inc_insn.reg_res))
     {
       rtx b = XEXP (XEXP (x, 0), 1);
@@ -1357,8 +1355,7 @@ find_mem (rtx *address_of_x)
       if (find_inc (true))
        return true;
     }
-  if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
-      && REG_P (XEXP (XEXP (x, 0), 0)))
+  if (code == MEM && PLUS_P (XEXP (x, 0)) && REG_P (XEXP (XEXP (x, 0), 0)))
     {
       rtx reg1 = XEXP (XEXP (x, 0), 1);
       mem_insn.mem_loc = address_of_x;
diff --git a/gcc/calls.cc b/gcc/calls.cc
index 6dd6f73e978..1c00f47c8dd 100644
--- a/gcc/calls.cc
+++ b/gcc/calls.cc
@@ -1782,7 +1782,7 @@ compute_argument_addresses (struct arg_data *args, rtx 
argblock, int num_actuals
       int i;
       poly_int64 arg_offset = 0;
 
-      if (GET_CODE (argblock) == PLUS)
+      if (PLUS_P (argblock))
        {
          arg_reg = XEXP (argblock, 0);
          arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
@@ -1991,7 +1991,7 @@ internal_arg_pointer_based_exp (const_rtx rtl, bool 
toplevel)
     return NULL_RTX;
 
   poly_int64 offset;
-  if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
+  if (PLUS_P (rtl) && poly_int_rtx_p (XEXP (rtl, 1), &offset))
     {
       rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
       if (val == NULL_RTX || val == pc_rtx)
@@ -4900,7 +4900,7 @@ store_one_arg (struct arg_data *arg, rtx argblock, int 
flags,
            {
              /* stack_slot is negative, but we want to index stack_usage_map
                 with positive values.  */
-             if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
+             if (PLUS_P (XEXP (arg->stack_slot, 0)))
                {
                  rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
                  upper_bound = -rtx_to_poly_int64 (offset) + 1;
@@ -4912,7 +4912,7 @@ store_one_arg (struct arg_data *arg, rtx argblock, int 
flags,
            }
          else
            {
-             if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
+             if (PLUS_P (XEXP (arg->stack_slot, 0)))
                {
                  rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
                  lower_bound = rtx_to_poly_int64 (offset);
diff --git a/gcc/cfgexpand.cc b/gcc/cfgexpand.cc
index dd29ffffc03..e839f30378d 100644
--- a/gcc/cfgexpand.cc
+++ b/gcc/cfgexpand.cc
@@ -4277,8 +4277,7 @@ convert_debug_memory_address (scalar_int_mode mode, rtx x,
        case SUBREG:
          if ((SUBREG_PROMOTED_VAR_P (x)
               || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
-              || (GET_CODE (SUBREG_REG (x)) == PLUS
-                  && REG_P (XEXP (SUBREG_REG (x), 0))
+              || (PLUS_P (SUBREG_REG (x)) && REG_P (XEXP (SUBREG_REG (x), 0))
                   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
                   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
              && GET_MODE (SUBREG_REG (x)) == mode)
@@ -4398,12 +4397,10 @@ expand_debug_parm_decl (tree decl)
       return rtl;
     }
 
-  if (incoming
-      && GET_MODE (incoming) != BLKmode
-      && !TREE_ADDRESSABLE (decl)
+  if (incoming && GET_MODE (incoming) != BLKmode && !TREE_ADDRESSABLE (decl)
       && MEM_P (incoming)
       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
-         || (GET_CODE (XEXP (incoming, 0)) == PLUS
+         || (PLUS_P (XEXP (incoming, 0))
              && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
              && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
     return copy_rtx (incoming);
@@ -4687,7 +4684,7 @@ expand_debug_expr (tree exp)
       if (TREE_CODE (exp) == MEM_REF)
        {
          if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
-             || (GET_CODE (op0) == PLUS
+             || (PLUS_P (op0)
                  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
            /* (mem (debug_implicit_ptr)) might confuse aliasing.
               Instead just use get_inner_reference.  */
@@ -5204,11 +5201,10 @@ expand_debug_expr (tree exp)
              && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
                 == ADDR_EXPR)
            {
-             op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
-                                                    0));
+             op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
              if (op0 != NULL
                  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
-                     || (GET_CODE (op0) == PLUS
+                     || (PLUS_P (op0)
                          && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
                          && CONST_INT_P (XEXP (op0, 1)))))
                {
diff --git a/gcc/combine-stack-adj.cc b/gcc/combine-stack-adj.cc
index e76d60a8f7e..ce3276513eb 100644
--- a/gcc/combine-stack-adj.cc
+++ b/gcc/combine-stack-adj.cc
@@ -101,8 +101,7 @@ stack_memref_p (rtx x)
 
   if (x == stack_pointer_rtx)
     return 1;
-  if (GET_CODE (x) == PLUS
-      && XEXP (x, 0) == stack_pointer_rtx
+  if (PLUS_P (x) && XEXP (x, 0) == stack_pointer_rtx
       && CONST_INT_P (XEXP (x, 1)))
     return 1;
 
@@ -570,18 +569,16 @@ combine_stack_adjustments_for_block (basic_block bb, 
bitmap live)
          HOST_WIDE_INT this_adjust = 0;
 
          /* Find constant additions to the stack pointer.  */
-         if (dest == stack_pointer_rtx
-             && GET_CODE (src) == PLUS
+         if (dest == stack_pointer_rtx && PLUS_P (src)
              && XEXP (src, 0) == stack_pointer_rtx
              && CONST_INT_P (XEXP (src, 1)))
            this_adjust = INTVAL (XEXP (src, 1));
          /* Or such additions turned by postreload into a store of
             equivalent register.  */
-         else if (dest == stack_pointer_rtx
-                  && REG_P (src)
+         else if (dest == stack_pointer_rtx && REG_P (src)
                   && REGNO (src) != STACK_POINTER_REGNUM)
            if (rtx equal = find_reg_note (insn, REG_EQUAL, NULL_RTX))
-             if (GET_CODE (XEXP (equal, 0)) == PLUS
+             if (PLUS_P (XEXP (equal, 0))
                  && XEXP (XEXP (equal, 0), 0) == stack_pointer_rtx
                  && CONST_INT_P (XEXP (XEXP (equal, 0), 1)))
                this_adjust = INTVAL (XEXP (XEXP (equal, 0), 1));
@@ -713,27 +710,25 @@ combine_stack_adjustments_for_block (basic_block bb, 
bitmap live)
             be an allocation.  */
          if (MEM_P (dest)
              && ((STACK_GROWS_DOWNWARD
-                  ? (GET_CODE (XEXP (dest, 0)) == PRE_DEC
-                     && known_eq (last_sp_adjust,
-                                  GET_MODE_SIZE (GET_MODE (dest))))
-                  : (GET_CODE (XEXP (dest, 0)) == PRE_INC
-                     && known_eq (-last_sp_adjust,
-                                  GET_MODE_SIZE (GET_MODE (dest)))))
-                 || ((STACK_GROWS_DOWNWARD
-                      ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
+                    ? (GET_CODE (XEXP (dest, 0)) == PRE_DEC
+                       && known_eq (last_sp_adjust,
+                                    GET_MODE_SIZE (GET_MODE (dest))))
+                    : (GET_CODE (XEXP (dest, 0)) == PRE_INC
+                       && known_eq (-last_sp_adjust,
+                                    GET_MODE_SIZE (GET_MODE (dest)))))
+                 || ((STACK_GROWS_DOWNWARD ? last_sp_adjust >= 0
+                                           : last_sp_adjust <= 0)
                      && GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
-                     && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
-                     && XEXP (XEXP (XEXP (dest, 0), 1), 0)
-                        == stack_pointer_rtx
+                     && PLUS_P (XEXP (XEXP (dest, 0), 1))
+                     && XEXP (XEXP (XEXP (dest, 0), 1), 0) == stack_pointer_rtx
                      && GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
-                        == CONST_INT
+                          == CONST_INT
                      && INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
-                        == -last_sp_adjust))
+                          == -last_sp_adjust))
              && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
              && !reg_mentioned_p (stack_pointer_rtx, src)
              && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
-             && try_apply_stack_adjustment (insn, reflist, 0,
-                                            -last_sp_adjust,
+             && try_apply_stack_adjustment (insn, reflist, 0, -last_sp_adjust,
                                             NULL, NULL))
            {
              if (last2_sp_set)
diff --git a/gcc/combine.cc b/gcc/combine.cc
index a5fabf397f7..d63455efd1f 100644
--- a/gcc/combine.cc
+++ b/gcc/combine.cc
@@ -3016,19 +3016,17 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, 
rtx_insn *i0,
   /* See if any of the insns is a MULT operation.  Unless one is, we will
      reject a combination that is, since it must be slower.  Be conservative
      here.  */
-  if (GET_CODE (i2src) == MULT
-      || (i1 != 0 && GET_CODE (i1src) == MULT)
-      || (i0 != 0 && GET_CODE (i0src) == MULT)
-      || (GET_CODE (PATTERN (i3)) == SET
-         && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
+  if (MULT_P (i2src) || (i1 != 0 && MULT_P (i1src))
+      || (i0 != 0 && MULT_P (i0src))
+      || (GET_CODE (PATTERN (i3)) == SET && MULT_P (SET_SRC (PATTERN (i3)))))
     have_mult = 1;
 
-  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
-     We used to do this EXCEPT in one case: I3 has a post-inc in an
-     output operand.  However, that exception can give rise to insns like
-       mov r3,(r3)+
-     which is a famous insn on the PDP-11 where the value of r3 used as the
-     source was model-dependent.  Avoid this sort of thing.  */
+    /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
+       We used to do this EXCEPT in one case: I3 has a post-inc in an
+       output operand.  However, that exception can give rise to insns like
+         mov r3,(r3)+
+       which is a famous insn on the PDP-11 where the value of r3 used as the
+       source was model-dependent.  Avoid this sort of thing.  */
 
 #if 0
   if (!(GET_CODE (PATTERN (i3)) == SET
@@ -3383,15 +3381,12 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, 
rtx_insn *i0,
 
   /* Fail if an autoincrement side-effect has been duplicated.  Be careful
      to count all the ways that I2SRC and I1SRC can be used.  */
-  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
-       && i2_is_used + added_sets_2 > 1)
+  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0 && i2_is_used + added_sets_2 > 1)
       || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
-         && (i1_is_used + added_sets_1 + (added_sets_2 && i1_feeds_i2_n)
-             > 1))
+         && (i1_is_used + added_sets_1 + (added_sets_2 && i1_feeds_i2_n) > 1))
       || (i0 != 0 && FIND_REG_INC_NOTE (i0, NULL_RTX) != 0
-         && (n_occurrences + added_sets_0
-             + (added_sets_1 && i0_feeds_i1_n)
-             + (added_sets_2 && i0_feeds_i2_n)
+         && (n_occurrences + added_sets_0 + (added_sets_1 && i0_feeds_i1_n)
+               + (added_sets_2 && i0_feeds_i2_n)
              > 1))
       /* Fail if we tried to make a new register.  */
       || max_reg_num () != maxreg
@@ -3399,8 +3394,8 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, 
rtx_insn *i0,
       || GET_CODE (newpat) == CLOBBER
       /* Fail if this new pattern is a MULT and we didn't have one before
         at the outer level.  */
-      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
-         && ! have_mult))
+      || GET_CODE (newpat) == SET
+          && MULT_P (GET_CODE (SET_SRC (newpat)) && !have_mult))
     {
       undo_all ();
       return 0;
@@ -3766,8 +3761,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, 
rtx_insn *i0,
            }
 
          /* Similarly for (plus (mult FOO (const_int pow2))).  */
-         if (split_code == PLUS
-             && GET_CODE (XEXP (*split, 0)) == MULT
+         if (split_code == PLUS && MULT_P (XEXP (*split, 0))
              && CONST_INT_P (XEXP (XEXP (*split, 0), 1))
              && INTVAL (XEXP (XEXP (*split, 0), 1)) > 0
              && (i = exact_log2 (UINTVAL (XEXP (XEXP (*split, 0), 1)))) >= 0)
@@ -4880,14 +4874,13 @@ find_split_point (rtx *loc, rtx_insn *insn, bool 
set_src)
         the machine-specific way to split large constants.  We use
         the first pseudo-reg (one of the virtual regs) as a placeholder;
         it will not remain in the result.  */
-      if (GET_CODE (XEXP (x, 0)) == PLUS
-         && CONST_INT_P (XEXP (XEXP (x, 0), 1))
-         && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
-                                           MEM_ADDR_SPACE (x)))
+      if (PLUS_P (XEXP (x, 0)) && CONST_INT_P (XEXP (XEXP (x, 0), 1))
+         && !memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
+                                          MEM_ADDR_SPACE (x)))
        {
          rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
-         rtx_insn *seq = combine_split_insns (gen_rtx_SET (reg, XEXP (x, 0)),
-                                              subst_insn);
+         rtx_insn *seq
+           = combine_split_insns (gen_rtx_SET (reg, XEXP (x, 0)), subst_insn);
 
          /* This should have produced two insns, each of which sets our
             placeholder.  If the source of the second is a valid address,
@@ -4935,12 +4928,11 @@ find_split_point (rtx *loc, rtx_insn *insn, bool 
set_src)
          /* If that didn't work and we have a nested plus, like:
             ((REG1 * CONST1) + REG2) + CONST2 and (REG1 + REG2) + CONST2
             is valid address, try to split (REG1 * CONST1).  */
-         if (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
+         if (PLUS_P (XEXP (XEXP (x, 0), 0))
              && !OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 0))
              && OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
-             && ! (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SUBREG
-                   && OBJECT_P (SUBREG_REG (XEXP (XEXP (XEXP (x, 0),
-                                                        0), 0)))))
+             && !(GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SUBREG
+                  && OBJECT_P (SUBREG_REG (XEXP (XEXP (XEXP (x, 0), 0), 0)))))
            {
              rtx tem = XEXP (XEXP (XEXP (x, 0), 0), 0);
              XEXP (XEXP (XEXP (x, 0), 0), 0) = reg;
@@ -4952,12 +4944,12 @@ find_split_point (rtx *loc, rtx_insn *insn, bool 
set_src)
                }
              XEXP (XEXP (XEXP (x, 0), 0), 0) = tem;
            }
-         else if (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
+         else if (PLUS_P (XEXP (XEXP (x, 0), 0))
                   && OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 0))
                   && !OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
-                  && ! (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == SUBREG
-                        && OBJECT_P (SUBREG_REG (XEXP (XEXP (XEXP (x, 0),
-                                                             0), 1)))))
+                  && !(GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == SUBREG
+                       && OBJECT_P (
+                         SUBREG_REG (XEXP (XEXP (XEXP (x, 0), 0), 1)))))
            {
              rtx tem = XEXP (XEXP (XEXP (x, 0), 0), 1);
              XEXP (XEXP (XEXP (x, 0), 0), 1) = reg;
@@ -4982,13 +4974,13 @@ find_split_point (rtx *loc, rtx_insn *insn, bool 
set_src)
 
       /* If we have a PLUS whose first operand is complex, try computing it
          separately by making a split there.  */
-      if (GET_CODE (XEXP (x, 0)) == PLUS
-          && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
-                                           MEM_ADDR_SPACE (x))
-          && ! OBJECT_P (XEXP (XEXP (x, 0), 0))
-          && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
-                && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
-        return &XEXP (XEXP (x, 0), 0);
+      if (PLUS_P (XEXP (x, 0))
+         && !memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
+                                          MEM_ADDR_SPACE (x))
+         && !OBJECT_P (XEXP (XEXP (x, 0), 0))
+         && !(GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
+              && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
+       return &XEXP (XEXP (x, 0), 0);
       break;
 
     case SET:
@@ -5239,7 +5231,7 @@ find_split_point (rtx *loc, rtx_insn *insn, bool set_src)
       /* Canonicalization can produce (minus A (mult B C)), where C is a
         constant.  It may be better to try splitting (plus (mult B -C) A)
         instead if this isn't a multiply by a power of two.  */
-      if (set_src && code == MINUS && GET_CODE (XEXP (x, 1)) == MULT
+      if (set_src && code == MINUS && MULT_P (XEXP (x, 1))
          && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
          && !pow2p_hwi (INTVAL (XEXP (XEXP (x, 1), 1))))
        {
@@ -5259,10 +5251,10 @@ find_split_point (rtx *loc, rtx_insn *insn, bool 
set_src)
          the SET_SRC, we likely do not have such an instruction and it's
          worthless to try this split.  */
       if (!set_src
-         && (GET_CODE (XEXP (x, 0)) == MULT
+         && (MULT_P (XEXP (x, 0))
              || (GET_CODE (XEXP (x, 0)) == ASHIFT
                  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
-        return loc;
+       return loc;
 
     default:
       break;
@@ -6092,7 +6084,7 @@ combine_simplify_rtx (rtx x, machine_mode op0_mode, int 
in_dest,
         the inverse distributive law to see if things simplify.  This
         occurs mostly in addresses, often when unrolling loops.  */
 
-      if (GET_CODE (XEXP (x, 0)) == PLUS)
+      if (PLUS_P (XEXP (x, 0)))
        {
          rtx result = distribute_and_simplify_rtx (x, 0);
          if (result)
@@ -6584,23 +6576,20 @@ simplify_if_then_else (rtx x)
       scalar_int_mode m = int_mode;
       rtx z = 0, c1 = NULL_RTX;
 
-      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
-          || GET_CODE (t) == IOR || GET_CODE (t) == XOR
-          || GET_CODE (t) == ASHIFT
+      if ((PLUS_P (t) || MINUS_P (t) || GET_CODE (t) == IOR
+          || GET_CODE (t) == XOR || GET_CODE (t) == ASHIFT
           || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
          && rtx_equal_p (XEXP (t, 0), f))
        c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
 
       /* If an identity-zero op is commutative, check whether there
         would be a match if we swapped the operands.  */
-      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
-               || GET_CODE (t) == XOR)
+      else if ((PLUS_P (t) || GET_CODE (t) == IOR || GET_CODE (t) == XOR)
               && rtx_equal_p (XEXP (t, 1), f))
        c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
       else if (GET_CODE (t) == SIGN_EXTEND
-              && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
-              && (GET_CODE (XEXP (t, 0)) == PLUS
-                  || GET_CODE (XEXP (t, 0)) == MINUS
+              && is_a<scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
+              && (PLUS_P (XEXP (t, 0)) || MINUS_P (XEXP (t, 0))
                   || GET_CODE (XEXP (t, 0)) == IOR
                   || GET_CODE (XEXP (t, 0)) == XOR
                   || GET_CODE (XEXP (t, 0)) == ASHIFT
@@ -6610,35 +6599,31 @@ simplify_if_then_else (rtx x)
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
               && (num_sign_bit_copies (f, GET_MODE (f))
-                  > (unsigned int)
-                    (GET_MODE_PRECISION (int_mode)
-                     - GET_MODE_PRECISION (inner_mode))))
+                  > (unsigned int) (GET_MODE_PRECISION (int_mode)
+                                    - GET_MODE_PRECISION (inner_mode))))
        {
          c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
          extend_op = SIGN_EXTEND;
          m = inner_mode;
        }
       else if (GET_CODE (t) == SIGN_EXTEND
-              && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
-              && (GET_CODE (XEXP (t, 0)) == PLUS
-                  || GET_CODE (XEXP (t, 0)) == IOR
+              && is_a<scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
+              && (PLUS_P (XEXP (t, 0)) || GET_CODE (XEXP (t, 0)) == IOR
                   || GET_CODE (XEXP (t, 0)) == XOR)
               && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
               && (num_sign_bit_copies (f, GET_MODE (f))
-                  > (unsigned int)
-                    (GET_MODE_PRECISION (int_mode)
-                     - GET_MODE_PRECISION (inner_mode))))
+                  > (unsigned int) (GET_MODE_PRECISION (int_mode)
+                                    - GET_MODE_PRECISION (inner_mode))))
        {
          c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
          extend_op = SIGN_EXTEND;
          m = inner_mode;
        }
       else if (GET_CODE (t) == ZERO_EXTEND
-              && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
-              && (GET_CODE (XEXP (t, 0)) == PLUS
-                  || GET_CODE (XEXP (t, 0)) == MINUS
+              && is_a<scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
+              && (PLUS_P (XEXP (t, 0)) || MINUS_P (XEXP (t, 0))
                   || GET_CODE (XEXP (t, 0)) == IOR
                   || GET_CODE (XEXP (t, 0)) == XOR
                   || GET_CODE (XEXP (t, 0)) == ASHIFT
@@ -6657,9 +6642,8 @@ simplify_if_then_else (rtx x)
          m = inner_mode;
        }
       else if (GET_CODE (t) == ZERO_EXTEND
-              && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
-              && (GET_CODE (XEXP (t, 0)) == PLUS
-                  || GET_CODE (XEXP (t, 0)) == IOR
+              && is_a<scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
+              && (PLUS_P (XEXP (t, 0)) || GET_CODE (XEXP (t, 0)) == IOR
                   || GET_CODE (XEXP (t, 0)) == XOR)
               && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
               && HWI_COMPUTABLE_MODE_P (int_mode)
@@ -7386,8 +7370,7 @@ expand_field_assignment (const_rtx x)
            {
              if (CONST_INT_P (pos))
                pos = GEN_INT (inner_len - len - INTVAL (pos));
-             else if (GET_CODE (pos) == MINUS
-                      && CONST_INT_P (XEXP (pos, 1))
+             else if (MINUS_P (pos) && CONST_INT_P (XEXP (pos, 1))
                       && INTVAL (XEXP (pos, 1)) == inner_len - len)
                /* If position is ADJUST - X, new position is X.  */
                pos = XEXP (pos, 0);
@@ -7541,9 +7524,8 @@ make_extraction (machine_mode mode, rtx inner, 
HOST_WIDE_INT pos,
       if (new_rtx != 0)
        return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1));
     }
-  else if (GET_CODE (inner) == MULT
-          && CONST_INT_P (XEXP (inner, 1))
-          && pos_rtx == 0 && pos == 0)
+  else if (MULT_P (inner) && CONST_INT_P (XEXP (inner, 1)) && pos_rtx == 0
+          && pos == 0)
     {
       /* We're extracting the least significant bits of an rtx
         (mult X (const_int 2^C)), where LEN > C.  Extract the
@@ -8017,19 +7999,18 @@ make_compound_operation_int (scalar_int_mode mode, rtx 
*x_ptr,
       rhs = XEXP (x, 1);
       lhs = make_compound_operation (lhs, next_code);
       rhs = make_compound_operation (rhs, next_code);
-      if (GET_CODE (lhs) == MULT && GET_CODE (XEXP (lhs, 0)) == NEG)
+      if (MULT_P (lhs) && GET_CODE (XEXP (lhs, 0)) == NEG)
        {
          tem = simplify_gen_binary (MULT, mode, XEXP (XEXP (lhs, 0), 0),
                                     XEXP (lhs, 1));
          new_rtx = simplify_gen_binary (MINUS, mode, rhs, tem);
        }
-      else if (GET_CODE (lhs) == MULT
+      else if (MULT_P (lhs)
               && (CONST_INT_P (XEXP (lhs, 1)) && INTVAL (XEXP (lhs, 1)) < 0))
        {
          tem = simplify_gen_binary (MULT, mode, XEXP (lhs, 0),
                                     simplify_gen_unary (NEG, mode,
-                                                        XEXP (lhs, 1),
-                                                        mode));
+                                                        XEXP (lhs, 1), mode));
          new_rtx = simplify_gen_binary (MINUS, mode, rhs, tem);
        }
       else
@@ -8045,19 +8026,18 @@ make_compound_operation_int (scalar_int_mode mode, rtx 
*x_ptr,
       rhs = XEXP (x, 1);
       lhs = make_compound_operation (lhs, next_code);
       rhs = make_compound_operation (rhs, next_code);
-      if (GET_CODE (rhs) == MULT && GET_CODE (XEXP (rhs, 0)) == NEG)
+      if (MULT_P (rhs) && GET_CODE (XEXP (rhs, 0)) == NEG)
        {
          tem = simplify_gen_binary (MULT, mode, XEXP (XEXP (rhs, 0), 0),
                                     XEXP (rhs, 1));
          return simplify_gen_binary (PLUS, mode, tem, lhs);
        }
-      else if (GET_CODE (rhs) == MULT
+      else if (MULT_P (rhs)
               && (CONST_INT_P (XEXP (rhs, 1)) && INTVAL (XEXP (rhs, 1)) < 0))
        {
          tem = simplify_gen_binary (MULT, mode, XEXP (rhs, 0),
                                     simplify_gen_unary (NEG, mode,
-                                                        XEXP (rhs, 1),
-                                                        mode));
+                                                        XEXP (rhs, 1), mode));
          return simplify_gen_binary (PLUS, mode, tem, lhs);
        }
       else
@@ -9288,11 +9268,10 @@ if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
       /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
         operands is zero when the other is nonzero, and vice-versa,
         and STORE_FLAG_VALUE is 1 or -1.  */
-
       if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
          && (code == PLUS || code == IOR || code == XOR || code == MINUS
              || code == UMAX)
-         && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
+         && MULT_P (XEXP (x, 0)) && MULT_P (XEXP (x, 1)))
        {
          rtx op0 = XEXP (XEXP (x, 0), 1);
          rtx op1 = XEXP (XEXP (x, 1), 1);
@@ -9300,8 +9279,7 @@ if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
          cond0 = XEXP (XEXP (x, 0), 0);
          cond1 = XEXP (XEXP (x, 1), 0);
 
-         if (COMPARISON_P (cond0)
-             && COMPARISON_P (cond1)
+         if (COMPARISON_P (cond0) && COMPARISON_P (cond1)
              && SCALAR_INT_MODE_P (mode)
              && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
                   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
@@ -9310,15 +9288,14 @@ if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
                       == reversed_comparison_code (cond1, NULL))
                      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
                      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
-             && ! side_effects_p (x))
+             && !side_effects_p (x))
            {
              *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
-             *pfalse = simplify_gen_binary (MULT, mode,
-                                            (code == MINUS
-                                             ? simplify_gen_unary (NEG, mode,
-                                                                   op1, mode)
-                                             : op1),
-                                             const_true_rtx);
+             *pfalse = simplify_gen_binary (
+               MULT, mode,
+               (code == MINUS ? simplify_gen_unary (NEG, mode, op1, mode)
+                              : op1),
+               const_true_rtx);
              return cond0;
            }
        }
@@ -9327,25 +9304,24 @@ if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
         is always zero.  */
       if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
          && (code == MULT || code == AND || code == UMIN)
-         && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
-       {
-         cond0 = XEXP (XEXP (x, 0), 0);
-         cond1 = XEXP (XEXP (x, 1), 0);
-
-         if (COMPARISON_P (cond0)
-             && COMPARISON_P (cond1)
-             && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
-                  && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
-                  && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
-                 || ((swap_condition (GET_CODE (cond0))
-                      == reversed_comparison_code (cond1, NULL))
-                     && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
-                     && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
-             && ! side_effects_p (x))
-           {
-             *ptrue = *pfalse = const0_rtx;
-             return cond0;
-           }
+    && MULT_P (XEXP (x, 0)) && MULT_P (XEXP (x, 1))
+       {
+       cond0 = XEXP (XEXP (x, 0), 0);
+       cond1 = XEXP (XEXP (x, 1), 0);
+
+       if (COMPARISON_P (cond0) && COMPARISON_P (cond1)
+           && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
+                && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
+                && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
+               || ((swap_condition (GET_CODE (cond0))
+                    == reversed_comparison_code (cond1, NULL))
+                   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
+                   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
+           && !side_effects_p (x))
+         {
+           *ptrue = *pfalse = const0_rtx;
+           return cond0;
+         }
        }
     }
 
@@ -10100,7 +10076,7 @@ simplify_and_const_int_1 (scalar_int_mode mode, rtx 
varop,
      the AND and see if one of the operands simplifies to zero.  If so, we
      may eliminate it.  */
 
-  if (GET_CODE (varop) == PLUS
+  if (PLUS_P (varop)
       && pow2p_hwi (constop + 1))
     {
       rtx o0, o1;
@@ -10939,7 +10915,7 @@ simplify_shift_const_1 (enum rtx_code code, 
machine_mode result_mode,
             and STORE_FLAG_VALUE is 1 or we have a logical shift with
             STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
 
-         if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
+         if (GET_CODE (varop) == IOR && PLUS_P (XEXP (varop, 0))
              && XEXP (XEXP (varop, 0), 1) == constm1_rtx
              && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
              && (code == LSHIFTRT || code == ASHIFTRT)
@@ -10947,8 +10923,7 @@ simplify_shift_const_1 (enum rtx_code code, 
machine_mode result_mode,
              && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
            {
              count = 0;
-             varop = gen_rtx_LE (int_varop_mode, XEXP (varop, 1),
-                                 const0_rtx);
+             varop = gen_rtx_LE (int_varop_mode, XEXP (varop, 1), const0_rtx);
 
              if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
                varop = gen_rtx_NEG (int_varop_mode, varop);
@@ -12402,33 +12377,33 @@ simplify_comparison (enum rtx_code code, rtx *pop0, 
rtx *pop1)
              && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op0)),
                                         &inner_mode)
              && GET_MODE_PRECISION (inner_mode) > mode_width
-             && GET_CODE (SUBREG_REG (op0)) == PLUS
+             && PLUS_P (SUBREG_REG (op0))
              && CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
            {
              rtx a = XEXP (SUBREG_REG (op0), 0);
              HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
 
              if ((c1 > 0
-                  && (unsigned HOST_WIDE_INT) c1
-                      < HOST_WIDE_INT_1U << (mode_width - 1)
+                  && (unsigned HOST_WIDE_INT) c1 < HOST_WIDE_INT_1U
+                                                     << (mode_width - 1)
                   && (equality_comparison_p || unsigned_comparison_p)
                   /* (A - C1) zero-extends if it is positive and sign-extends
                      if it is negative, C2 both zero- and sign-extends.  */
-                  && (((nonzero_bits (a, inner_mode)
-                        & ~GET_MODE_MASK (mode)) == 0
+                  && (((nonzero_bits (a, inner_mode) & ~GET_MODE_MASK (mode))
+                         == 0
                        && const_op >= 0)
                       /* (A - C1) sign-extends if it is positive and 1-extends
                          if it is negative, C2 both sign- and 1-extends.  */
                       || (num_sign_bit_copies (a, inner_mode)
-                          > (unsigned int) (GET_MODE_PRECISION (inner_mode)
-                                            - mode_width)
+                            > (unsigned int) (GET_MODE_PRECISION (inner_mode)
+                                              - mode_width)
                           && const_op < 0)))
-                 || ((unsigned HOST_WIDE_INT) c1
-                      < HOST_WIDE_INT_1U << (mode_width - 2)
+                 || ((unsigned HOST_WIDE_INT) c1 < HOST_WIDE_INT_1U
+                                                     << (mode_width - 2)
                      /* (A - C1) always sign-extends, like C2.  */
                      && num_sign_bit_copies (a, inner_mode)
-                        > (unsigned int) (GET_MODE_PRECISION (inner_mode)
-                                          - (mode_width - 1))))
+                          > (unsigned int) (GET_MODE_PRECISION (inner_mode)
+                                            - (mode_width - 1))))
                {
                  op0 = SUBREG_REG (op0);
                  continue;
@@ -12559,7 +12534,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx 
*pop1)
        case IOR:
          /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
             iff X <= 0.  */
-         if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
+         if (sign_bit_comparison_p && PLUS_P (XEXP (op0, 0))
              && XEXP (XEXP (op0, 0), 1) == constm1_rtx
              && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
            {
@@ -12812,14 +12787,12 @@ simplify_comparison (enum rtx_code code, rtx *pop0, 
rtx *pop1)
          /* Likewise if OP0 is a PLUS of a sign extension with a
             constant, which is usually represented with the PLUS
             between the shifts.  */
-         if (! unsigned_comparison_p
-             && CONST_INT_P (XEXP (op0, 1))
-             && GET_CODE (XEXP (op0, 0)) == PLUS
-             && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
+         if (!unsigned_comparison_p && CONST_INT_P (XEXP (op0, 1))
+             && PLUS_P (XEXP (op0, 0)) && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
              && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
              && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
              && (int_mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), 1)
-                 .exists (&tmode))
+                   .exists (&tmode))
              && (((unsigned HOST_WIDE_INT) const_op
                   + (GET_MODE_MASK (tmode) >> 1) + 1)
                  <= GET_MODE_MASK (tmode)))
diff --git a/gcc/compare-elim.cc b/gcc/compare-elim.cc
index 4a23202f8ff..9427b81dc9e 100644
--- a/gcc/compare-elim.cc
+++ b/gcc/compare-elim.cc
@@ -831,8 +831,7 @@ try_eliminate_compare (struct comparison *cmp)
           (minus:SI (reg:SI) (reg:SI))))
       (set (reg:CC flags)
           (compare:CC (reg:SI) (reg:SI)))] */
-  else if (REG_P (in_b)
-          && GET_CODE (SET_SRC (x)) == MINUS
+  else if (REG_P (in_b) && MINUS_P (SET_SRC (x))
           && rtx_equal_p (XEXP (SET_SRC (x), 0), in_a)
           && rtx_equal_p (XEXP (SET_SRC (x), 1), in_b))
     cmp_a = in_a;
diff --git a/gcc/cse.cc b/gcc/cse.cc
index b13afd4ba72..cf542cbd72c 100644
--- a/gcc/cse.cc
+++ b/gcc/cse.cc
@@ -1264,8 +1264,7 @@ find_reg_offset_for_const (struct table_elt *anchor_elt, 
HOST_WIDE_INT offs,
        return match;
 
       if (REG_P (elt->exp)
-         || (GET_CODE (elt->exp) == PLUS
-             && REG_P (XEXP (elt->exp, 0))
+         || (PLUS_P (elt->exp) && REG_P (XEXP (elt->exp, 0))
              && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
        {
          rtx x;
@@ -1276,9 +1275,8 @@ find_reg_offset_for_const (struct table_elt *anchor_elt, 
HOST_WIDE_INT offs,
 
          x = plus_constant (GET_MODE (elt->exp), elt->exp, offs);
          if (REG_P (x)
-             || (GET_CODE (x) == PLUS
-                 && IN_RANGE (INTVAL (XEXP (x, 1)),
-                              -targetm.const_anchor,
+             || (PLUS_P (x)
+                 && IN_RANGE (INTVAL (XEXP (x, 1)), -targetm.const_anchor,
                               targetm.const_anchor - 1)))
            {
              match = x;
@@ -3492,40 +3490,48 @@ fold_rtx (rtx x, rtx_insn *insn)
             ADDR_DIFF_VEC table.  */
          if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
            {
-             rtx y
-               = GET_CODE (folded_arg0) == MINUS ? folded_arg0
-               : lookup_as_function (folded_arg0, MINUS);
+             rtx y = MINUS_P (folded_arg0)
+                       ? folded_arg0
+                       : lookup_as_function (folded_arg0, MINUS);
 
              if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
-                 && label_ref_label (XEXP (y, 1)) == label_ref_label 
(const_arg1))
+                 && label_ref_label (XEXP (y, 1))
+                      == label_ref_label (const_arg1))
                return XEXP (y, 0);
 
              /* Now try for a CONST of a MINUS like the above.  */
-             if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
-                       : lookup_as_function (folded_arg0, CONST))) != 0
-                 && GET_CODE (XEXP (y, 0)) == MINUS
+             if ((y = (GET_CODE (folded_arg0) == CONST
+                         ? folded_arg0
+                         : lookup_as_function (folded_arg0, CONST)))
+                   != 0
+                 && MINUS_P (XEXP (y, 0))
                  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
-                 && label_ref_label (XEXP (XEXP (y, 0), 1)) == label_ref_label 
(const_arg1))
+                 && label_ref_label (XEXP (XEXP (y, 0), 1))
+                      == label_ref_label (const_arg1))
                return XEXP (XEXP (y, 0), 0);
            }
 
          /* Likewise if the operands are in the other order.  */
          if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
            {
-             rtx y
-               = GET_CODE (folded_arg1) == MINUS ? folded_arg1
-               : lookup_as_function (folded_arg1, MINUS);
+             rtx y = MINUS_P (folded_arg1)
+                       ? folded_arg1
+                       : lookup_as_function (folded_arg1, MINUS);
 
              if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
-                 && label_ref_label (XEXP (y, 1)) == label_ref_label 
(const_arg0))
+                 && label_ref_label (XEXP (y, 1))
+                      == label_ref_label (const_arg0))
                return XEXP (y, 0);
 
              /* Now try for a CONST of a MINUS like the above.  */
-             if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
-                       : lookup_as_function (folded_arg1, CONST))) != 0
-                 && GET_CODE (XEXP (y, 0)) == MINUS
+             if ((y = (GET_CODE (folded_arg1) == CONST
+                         ? folded_arg1
+                         : lookup_as_function (folded_arg1, CONST)))
+                   != 0
+                 && MINUS_P (XEXP (y, 0))
                  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
-                 && label_ref_label (XEXP (XEXP (y, 0), 1)) == label_ref_label 
(const_arg0))
+                 && label_ref_label (XEXP (XEXP (y, 0), 1))
+                      == label_ref_label (const_arg0))
                return XEXP (XEXP (y, 0), 0);
            }
 
@@ -4817,7 +4823,7 @@ cse_insn (rtx_insn *insn)
              /* Consider (minus (label_ref L1) (label_ref L2)) as
                 "constant" here so we will record it. This allows us
                 to fold switch statements when an ADDR_DIFF_VEC is used.  */
-             || (GET_CODE (src_folded) == MINUS
+             || (MINUS_P (src_folded)
                  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
                  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
        src_const = src_folded, src_const_elt = elt;
@@ -5364,9 +5370,10 @@ cse_insn (rtx_insn *insn)
                   && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
                       /* Likewise on IA-64, except without the
                          truncate.  */
-                      || (GET_CODE (XEXP (trial, 0)) == MINUS
+                      || (MINUS_P (XEXP (trial, 0))
                           && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
-                          && GET_CODE (XEXP (XEXP (trial, 0), 1)) == 
LABEL_REF)))
+                          && GET_CODE (XEXP (XEXP (trial, 0), 1))
+                               == LABEL_REF)))
            /* Do nothing for this case.  */
            ;
 
@@ -5489,14 +5496,9 @@ cse_insn (rtx_insn *insn)
         Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
         which can be created for a reference to a compile time computable
         entry in a jump table.  */
-      if (n_sets == 1
-         && REG_P (dest)
-         && src_const
-         && !REG_P (src_const)
-         && !(GET_CODE (src_const) == SUBREG
-              && REG_P (SUBREG_REG (src_const)))
-         && !(GET_CODE (src_const) == CONST
-              && GET_CODE (XEXP (src_const, 0)) == MINUS
+      if (n_sets == 1 && REG_P (dest) && src_const && !REG_P (src_const)
+         && !(GET_CODE (src_const) == SUBREG && REG_P (SUBREG_REG (src_const)))
+         && !(GET_CODE (src_const) == CONST && MINUS_P (XEXP (src_const, 0))
               && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
               && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF)
          && !rtx_equal_p (src, src_const))
diff --git a/gcc/cselib.cc b/gcc/cselib.cc
index 2abc763a3f8..9d7ea8ae320 100644
--- a/gcc/cselib.cc
+++ b/gcc/cselib.cc
@@ -481,8 +481,7 @@ invariant_or_equiv_p (cselib_val *v)
        return true;
 
       /* (plus (value V) (const_int C)) is invariant iff V is invariant.  */
-      if (GET_CODE (v->locs->loc) == PLUS
-         && CONST_INT_P (XEXP (v->locs->loc, 1))
+      if (PLUS_P (v->locs->loc) && CONST_INT_P (XEXP (v->locs->loc, 1))
          && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
          && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
        return true;
@@ -547,10 +546,9 @@ cselib_reset_table (unsigned int num)
 
       /* If cfa_base is sp + const_int, need to preserve also the
         SP_DERIVED_VALUE_P value.  */
-      for (struct elt_loc_list *l = cfa_base_preserved_val->locs;
-          l; l = l->next)
-       if (GET_CODE (l->loc) == PLUS
-           && GET_CODE (XEXP (l->loc, 0)) == VALUE
+      for (struct elt_loc_list *l = cfa_base_preserved_val->locs; l;
+          l = l->next)
+       if (PLUS_P (l->loc) && GET_CODE (XEXP (l->loc, 0)) == VALUE
            && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
            && CONST_INT_P (XEXP (l->loc, 1)))
          {
@@ -893,8 +891,7 @@ autoinc_split (rtx x, rtx *off, machine_mode memmode)
              return e->val_rtx;
            }
          for (struct elt_loc_list *l = e->locs; l; l = l->next)
-           if (GET_CODE (l->loc) == PLUS
-               && GET_CODE (XEXP (l->loc, 0)) == VALUE
+           if (PLUS_P (l->loc) && GET_CODE (XEXP (l->loc, 0)) == VALUE
                && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
                && CONST_INT_P (XEXP (l->loc, 1)))
              {
@@ -1015,9 +1012,7 @@ rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode 
memmode, int depth)
     return 0;
 
   if (GET_CODE (x) != GET_CODE (y)
-      || (GET_CODE (x) == PLUS
-         && GET_MODE (x) == Pmode
-         && CONST_INT_P (XEXP (x, 1))
+      || (PLUS_P (x) && GET_MODE (x) == Pmode && CONST_INT_P (XEXP (x, 1))
          && CONST_INT_P (XEXP (y, 1))))
     {
       rtx xorig = x, yorig = y;
@@ -1238,10 +1233,9 @@ cselib_hash_plus_const_int (rtx x, HOST_WIDE_INT c, int 
create,
   if (! e)
     return 0;
 
-  if (! SP_DERIVED_VALUE_P (e->val_rtx))
+  if (!SP_DERIVED_VALUE_P (e->val_rtx))
     for (struct elt_loc_list *l = e->locs; l; l = l->next)
-      if (GET_CODE (l->loc) == PLUS
-         && GET_CODE (XEXP (l->loc, 0)) == VALUE
+      if (PLUS_P (l->loc) && GET_CODE (XEXP (l->loc, 0)) == VALUE
          && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
          && CONST_INT_P (XEXP (l->loc, 1)))
        {
@@ -2195,10 +2189,9 @@ cselib_subst_to_values (rtx x, machine_mode memmode)
            {
              if (SP_DERIVED_VALUE_P (t) && XEXP (x, 1) == const0_rtx)
                return t;
-             for (struct elt_loc_list *l = CSELIB_VAL_PTR (t)->locs;
-                  l; l = l->next)
-               if (GET_CODE (l->loc) == PLUS
-                   && GET_CODE (XEXP (l->loc, 0)) == VALUE
+             for (struct elt_loc_list *l = CSELIB_VAL_PTR (t)->locs; l;
+                  l = l->next)
+               if (PLUS_P (l->loc) && GET_CODE (XEXP (l->loc, 0)) == VALUE
                    && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
                    && CONST_INT_P (XEXP (l->loc, 1)))
                  return plus_constant (Pmode, l->loc, INTVAL (XEXP (x, 1)));
@@ -2774,8 +2767,7 @@ cselib_record_sp_cfa_base_equiv (HOST_WIDE_INT offset, 
rtx_insn *insn)
        sp_derived_value = l->loc;
        break;
       }
-    else if (GET_CODE (l->loc) == PLUS
-            && GET_CODE (XEXP (l->loc, 0)) == VALUE
+    else if (PLUS_P (l->loc) && GET_CODE (XEXP (l->loc, 0)) == VALUE
             && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
             && CONST_INT_P (XEXP (l->loc, 1)))
       {
@@ -2803,8 +2795,7 @@ cselib_sp_derived_value_p (cselib_val *v)
 {
   if (!SP_DERIVED_VALUE_P (v->val_rtx))
     for (struct elt_loc_list *l = v->locs; l; l = l->next)
-      if (GET_CODE (l->loc) == PLUS
-         && GET_CODE (XEXP (l->loc, 0)) == VALUE
+      if (PLUS_P (l->loc) && GET_CODE (XEXP (l->loc, 0)) == VALUE
          && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
          && CONST_INT_P (XEXP (l->loc, 1)))
        v = CSELIB_VAL_PTR (XEXP (l->loc, 0));
@@ -2813,7 +2804,7 @@ cselib_sp_derived_value_p (cselib_val *v)
   for (struct elt_loc_list *l = v->locs; l; l = l->next)
     if (l->loc == cfa_base_preserved_val->val_rtx)
       return true;
-    else if (GET_CODE (l->loc) == PLUS
+    else if (PLUS_P (l->loc)
             && XEXP (l->loc, 0) == cfa_base_preserved_val->val_rtx
             && CONST_INT_P (XEXP (l->loc, 1)))
       return true;
diff --git a/gcc/dce.cc b/gcc/dce.cc
index 64aa0aa471d..6157f2630f6 100644
--- a/gcc/dce.cc
+++ b/gcc/dce.cc
@@ -300,9 +300,7 @@ sp_based_mem_offset (rtx_call_insn *call_insn, const_rtx 
mem, bool fast)
 {
   HOST_WIDE_INT off = 0;
   rtx addr = XEXP (mem, 0);
-  if (GET_CODE (addr) == PLUS
-      && REG_P (XEXP (addr, 0))
-      && CONST_INT_P (XEXP (addr, 1)))
+  if (PLUS_P (addr) && REG_P (XEXP (addr, 0)) && CONST_INT_P (XEXP (addr, 1)))
     {
       off = INTVAL (XEXP (addr, 1));
       addr = XEXP (addr, 0);
diff --git a/gcc/dwarf2cfi.cc b/gcc/dwarf2cfi.cc
index bef3165e691..457a6487d64 100644
--- a/gcc/dwarf2cfi.cc
+++ b/gcc/dwarf2cfi.cc
@@ -2094,7 +2094,7 @@ dwarf2out_frame_debug_expr (rtx expr)
 
            gcc_assert (REG_P (XEXP (XEXP (dest, 0), 0)));
            offset = rtx_to_poly_int64 (XEXP (XEXP (dest, 0), 1));
-           if (GET_CODE (XEXP (dest, 0)) == MINUS)
+           if (MINUS_P (XEXP (dest, 0)))
              offset = -offset;
 
            regno = dwf_cfa_reg (XEXP (XEXP (dest, 0), 0));
diff --git a/gcc/dwarf2out.cc b/gcc/dwarf2out.cc
index e81044b8c48..92ca8e3b081 100644
--- a/gcc/dwarf2out.cc
+++ b/gcc/dwarf2out.cc
@@ -14906,10 +14906,10 @@ based_loc_descr (rtx reg, poly_int64 offset,
 static inline int
 is_based_loc (const_rtx rtl)
 {
-  return (GET_CODE (rtl) == PLUS
-         && ((REG_P (XEXP (rtl, 0))
-              && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
-              && CONST_INT_P (XEXP (rtl, 1)))));
+  return (
+    PLUS_P (rtl)
+    && ((REG_P (XEXP (rtl, 0)) && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
+        && CONST_INT_P (XEXP (rtl, 1)))));
 }
 
 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
@@ -30686,8 +30686,7 @@ optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref 
loc)
   dw_die_ref ref = NULL;
   tree decl;
 
-  if (GET_CODE (rtl) == CONST
-      && GET_CODE (XEXP (rtl, 0)) == PLUS
+  if (GET_CODE (rtl) == CONST && PLUS_P (XEXP (rtl, 0))
       && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
     {
       offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
diff --git a/gcc/emit-rtl.cc b/gcc/emit-rtl.cc
index f25fb70ab97..54e793bc949 100644
--- a/gcc/emit-rtl.cc
+++ b/gcc/emit-rtl.cc
@@ -2514,10 +2514,8 @@ offset_address (rtx memref, rtx offset, unsigned 
HOST_WIDE_INT pow2)
      being able to recognize the magic around pic_offset_table_rtx.
      This stuff is fragile, and is yet another example of why it is
      bad to expose PIC machinery too early.  */
-  if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
-                                    attrs.addrspace)
-      && GET_CODE (addr) == PLUS
-      && XEXP (addr, 0) == pic_offset_table_rtx)
+  if (!memory_address_addr_space_p (GET_MODE (memref), new_rtx, 
attrs.addrspace)
+      && PLUS_P (addr) && XEXP (addr, 0) == pic_offset_table_rtx)
     {
       addr = force_reg (GET_MODE (addr), addr);
       new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
diff --git a/gcc/explow.cc b/gcc/explow.cc
index 568e0eb1a15..73435df1531 100644
--- a/gcc/explow.cc
+++ b/gcc/explow.cc
@@ -268,11 +268,9 @@ static rtx
 break_out_memory_refs (rtx x)
 {
   if (MEM_P (x)
-      || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
-         && GET_MODE (x) != VOIDmode))
+      || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x) && GET_MODE (x) != 
VOIDmode))
     x = force_reg (GET_MODE (x), x);
-  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
-          || GET_CODE (x) == MULT)
+  else if (PLUS_P (x) || MINUS_P (x) || MULT_P (x))
     {
       rtx op0 = break_out_memory_refs (XEXP (x, 0));
       rtx op1 = break_out_memory_refs (XEXP (x, 1));
@@ -363,13 +361,12 @@ convert_memory_address_addr_space_1 (scalar_int_mode 
to_mode ATTRIBUTE_UNUSED,
         narrower. Inside a CONST RTL, this is safe for both pointers
         zero or sign extended as pointers cannot wrap. */
       if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
-         || (GET_CODE (x) == PLUS
-             && CONST_INT_P (XEXP (x, 1))
+         || (PLUS_P (x) && CONST_INT_P (XEXP (x, 1))
              && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
-                 || XEXP (x, 1) == convert_memory_address_addr_space_1
-                                    (to_mode, XEXP (x, 1), as, in_const,
-                                     no_emit)
-                  || POINTERS_EXTEND_UNSIGNED < 0)))
+                 || XEXP (x, 1)
+                      == convert_memory_address_addr_space_1 (
+                        to_mode, XEXP (x, 1), as, in_const, no_emit)
+                 || POINTERS_EXTEND_UNSIGNED < 0)))
        {
          temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0),
                                                      as, in_const, no_emit);
@@ -483,7 +480,7 @@ memory_address_addr_space (machine_mode mode, rtx x, 
addr_space_t as)
         and index off of it.  We do this because it often makes
         shorter code, and because the addresses thus generated
         in registers often become common subexpressions.  */
-      if (GET_CODE (x) == PLUS)
+      if (PLUS_P (x))
        {
          rtx constant_term = const0_rtx;
          rtx y = eliminate_constant_term (x, &constant_term);
@@ -500,7 +497,7 @@ memory_address_addr_space (machine_mode mode, rtx x, 
addr_space_t as)
            }
        }
 
-      else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
+      else if (MINUS_P (x) || MULT_P (x))
        x = force_operand (x, NULL_RTX);
 
       /* If we have a register that's an invalid address,
@@ -523,9 +520,7 @@ memory_address_addr_space (machine_mode mode, rtx x, 
addr_space_t as)
     return x;
   else if (REG_P (x))
     mark_reg_pointer (x, BITS_PER_UNIT);
-  else if (GET_CODE (x) == PLUS
-          && REG_P (XEXP (x, 0))
-          && CONST_INT_P (XEXP (x, 1)))
+  else if (PLUS_P (x) && REG_P (XEXP (x, 0)) && CONST_INT_P (XEXP (x, 1)))
     mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
 
   /* OLDX may have been the address on a temporary.  Update the address
@@ -572,8 +567,7 @@ use_anchored_address (rtx x)
   /* Split the address into a base and offset.  */
   base = XEXP (x, 0);
   offset = 0;
-  if (GET_CODE (base) == CONST
-      && GET_CODE (XEXP (base, 0)) == PLUS
+  if (GET_CODE (base) == CONST && PLUS_P (XEXP (base, 0))
       && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
     {
       offset += INTVAL (XEXP (XEXP (base, 0), 1));
@@ -711,8 +705,7 @@ force_reg (machine_mode mode, rtx x)
       }
     else if (GET_CODE (x) == LABEL_REF)
       align = BITS_PER_UNIT;
-    else if (GET_CODE (x) == CONST
-            && GET_CODE (XEXP (x, 0)) == PLUS
+    else if (GET_CODE (x) == CONST && PLUS_P (XEXP (x, 0))
             && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
             && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
       {
diff --git a/gcc/expr.cc b/gcc/expr.cc
index d9407432ea5..de6481c1fbe 100644
--- a/gcc/expr.cc
+++ b/gcc/expr.cc
@@ -3865,9 +3865,9 @@ emit_move_resolve_push (machine_mode mode, rtx x)
     {
       rtx expr = XEXP (XEXP (x, 0), 1);
 
-      gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
+      gcc_assert (PLUS_P (expr) || MINUS_P (expr));
       poly_int64 val = rtx_to_poly_int64 (XEXP (expr, 1));
-      if (GET_CODE (expr) == MINUS)
+      if (MINUS_P (expr))
        val = -val;
       gcc_assert (known_eq (adjust, val) || known_eq (adjust, -val));
       adjust = val;
@@ -4653,7 +4653,7 @@ find_args_size_adjust (rtx_insn *insn)
        case PRE_MODIFY:
        case POST_MODIFY:
          addr = XEXP (addr, 1);
-         gcc_assert (GET_CODE (addr) == PLUS);
+         gcc_assert (PLUS_P (addr));
          gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
          return rtx_to_poly_int64 (XEXP (addr, 1));
        default:
@@ -8170,13 +8170,12 @@ force_operand (rtx value, rtx target)
        }
 
       /* Check for an addition with OP2 a constant integer and our first
-         operand a PLUS of a virtual register and something else.  In that
-         case, we want to emit the sum of the virtual register and the
-         constant first and then add the other value.  This allows virtual
-         register instantiation to simply modify the constant rather than
-         creating another one around this addition.  */
-      if (code == PLUS && CONST_INT_P (op2)
-         && GET_CODE (XEXP (value, 0)) == PLUS
+        operand a PLUS of a virtual register and something else.  In that
+        case, we want to emit the sum of the virtual register and the
+        constant first and then add the other value.  This allows virtual
+        register instantiation to simply modify the constant rather than
+        creating another one around this addition.  */
+      if (code == PLUS && CONST_INT_P (op2) && PLUS_P (XEXP (value, 0))
          && REG_P (XEXP (XEXP (value, 0), 0))
          && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
          && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
@@ -8295,12 +8294,12 @@ safe_from_p (const_rtx x, tree exp, int top_p)
          && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
              || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
              || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
-             != INTEGER_CST)
+                  != INTEGER_CST)
          && GET_MODE (x) == BLKmode)
       /* If X is in the outgoing argument area, it is always safe.  */
       || (MEM_P (x)
          && (XEXP (x, 0) == virtual_outgoing_args_rtx
-             || (GET_CODE (XEXP (x, 0)) == PLUS
+             || (PLUS_P (XEXP (x, 0))
                  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
     return 1;
 
diff --git a/gcc/final.cc b/gcc/final.cc
index eea572238f6..f6d4a62f10f 100644
--- a/gcc/final.cc
+++ b/gcc/final.cc
@@ -3061,13 +3061,14 @@ cleanup_subreg_operands (rtx_insn *insn)
         expression directly.  */
       if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
        {
-         recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], 
true);
+         recog_data.operand[i]
+           = alter_subreg (recog_data.operand_loc[i], true);
          changed = true;
        }
-      else if (GET_CODE (recog_data.operand[i]) == PLUS
-              || GET_CODE (recog_data.operand[i]) == MULT
+      else if (PLUS_P (recog_data.operand[i]) || MULT_P (recog_data.operand[i])
               || MEM_P (recog_data.operand[i]))
-       recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], 
&changed);
+       recog_data.operand[i]
+         = walk_alter_subreg (recog_data.operand_loc[i], &changed);
     }
 
   for (i = 0; i < recog_data.n_dups; i++)
@@ -3077,10 +3078,11 @@ cleanup_subreg_operands (rtx_insn *insn)
          *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
          changed = true;
        }
-      else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
-              || GET_CODE (*recog_data.dup_loc[i]) == MULT
+      else if (PLUS_P (*recog_data.dup_loc[i])
+              || MULT_P (*recog_data.dup_loc[i])
               || MEM_P (*recog_data.dup_loc[i]))
-       *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], 
&changed);
+       *recog_data.dup_loc[i]
+         = walk_alter_subreg (recog_data.dup_loc[i], &changed);
     }
   if (changed)
     df_insn_rescan (insn);
@@ -3252,9 +3254,9 @@ get_mem_expr_from_op (rtx op, int *paddressp)
   /* First check if we have a decl for the address, then look at the right side
      if it is a PLUS.  Otherwise, strip off arithmetic and keep looking.
      But don't allow the address to itself be indirect.  */
-  if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
+  if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && !inner_addressp)
     return expr;
-  else if (GET_CODE (op) == PLUS
+  else if (PLUS_P (op)
           && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
     return expr;
 
diff --git a/gcc/function.cc b/gcc/function.cc
index 9c8773bbc59..8ab2f0e7f62 100644
--- a/gcc/function.cc
+++ b/gcc/function.cc
@@ -757,10 +757,10 @@ find_temp_slot_from_address (rtx x)
 
   /* If we have a sum involving a register, see if it points to a temp
      slot.  */
-  if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
+  if (PLUS_P (x) && REG_P (XEXP (x, 0))
       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
     return p;
-  else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
+  else if (PLUS_P (x) && REG_P (XEXP (x, 1))
           && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
     return p;
 
@@ -1614,8 +1614,7 @@ instantiate_virtual_regs_in_insn (rtx_insn *insn)
       /* Handle a plus involving a virtual register by determining if the
         operands remain valid if they're modified in place.  */
       poly_int64 delta;
-      if (GET_CODE (SET_SRC (set)) == PLUS
-         && recog_data.n_operands >= 3
+      if (PLUS_P (SET_SRC (set)) && recog_data.n_operands >= 3
          && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
          && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
          && poly_int_rtx_p (recog_data.operand[2], &delta)
diff --git a/gcc/fwprop.cc b/gcc/fwprop.cc
index fc652ab9a1f..a905aff5f85 100644
--- a/gcc/fwprop.cc
+++ b/gcc/fwprop.cc
@@ -120,7 +120,7 @@ can_simplify_addr (rtx addr)
   if (CONSTANT_ADDRESS_P (addr))
     return false;
 
-  if (GET_CODE (addr) == PLUS)
+  if (PLUS_P (addr))
     reg = XEXP (addr, 0);
   else
     reg = addr;
diff --git a/gcc/haifa-sched.cc b/gcc/haifa-sched.cc
index 4efaa94458d..0569a40fdf1 100644
--- a/gcc/haifa-sched.cc
+++ b/gcc/haifa-sched.cc
@@ -378,12 +378,10 @@ static vec<rtx_insn *> scheduled_insns;
 static int may_trap_exp (const_rtx, int);
 
 /* Nonzero iff the address is comprised from at most 1 register.  */
-#define CONST_BASED_ADDRESS_P(x)                       \
-  (REG_P (x)                                   \
-   || ((GET_CODE (x) == PLUS || GET_CODE (x) == MINUS  \
-       || (GET_CODE (x) == LO_SUM))                    \
-       && (CONSTANT_P (XEXP (x, 0))                    \
-          || CONSTANT_P (XEXP (x, 1)))))
+#define CONST_BASED_ADDRESS_P(x)                                               
\
+  (REG_P (x)                                                                   
\
+   || ((PLUS_P (x) || MINUS_P (x) || (GET_CODE (x) == LO_SUM))                 
\
+       && (CONSTANT_P (XEXP (x, 0)) || CONSTANT_P (XEXP (x, 1)))))
 
 /* Returns a class that insn with GET_DEST(insn)=x may belong to,
    as found by analyzing insn's expression.  */
diff --git a/gcc/ifcvt.cc b/gcc/ifcvt.cc
index eb8efb89a89..d63434c2453 100644
--- a/gcc/ifcvt.cc
+++ b/gcc/ifcvt.cc
@@ -1360,16 +1360,14 @@ noce_try_store_flag_constants (struct noce_if_info 
*if_info)
   rtx b = if_info->b;
 
   /* Handle cases like x := test ? y + 3 : y + 4.  */
-  if (GET_CODE (a) == PLUS
-      && GET_CODE (b) == PLUS
-      && CONST_INT_P (XEXP (a, 1))
+  if (PLUS_P (a) && PLUS_P (b) && CONST_INT_P (XEXP (a, 1))
       && CONST_INT_P (XEXP (b, 1))
       && rtx_equal_p (XEXP (a, 0), XEXP (b, 0))
       /* Allow expressions that are not using the result or plain
-         registers where we handle overlap below.  */
+        registers where we handle overlap below.  */
       && (REG_P (XEXP (a, 0))
          || (noce_operand_ok (XEXP (a, 0))
-             && ! reg_overlap_mentioned_p (if_info->x, XEXP (a, 0)))))
+             && !reg_overlap_mentioned_p (if_info->x, XEXP (a, 0)))))
     {
       common = XEXP (a, 0);
       a = XEXP (a, 1);
@@ -1572,8 +1570,7 @@ noce_try_addcc (struct noce_if_info *if_info)
   if (!noce_simple_bbs (if_info))
     return FALSE;
 
-  if (GET_CODE (if_info->a) == PLUS
-      && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
+  if (PLUS_P (if_info->a) && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
       && noce_reversed_cond_code (if_info) != UNKNOWN)
     {
       rtx cond = if_info->rev_cond;
diff --git a/gcc/ira.cc b/gcc/ira.cc
index d28a67b2546..e16867d4d17 100644
--- a/gcc/ira.cc
+++ b/gcc/ira.cc
@@ -4163,10 +4163,10 @@ setup_reg_equiv (void)
                else if (function_invariant_p (x))
                  {
                    machine_mode mode;
-                   
+
                    mode = GET_MODE (SET_DEST (set));
-                   if (GET_CODE (x) == PLUS
-                       || x == frame_pointer_rtx || x == arg_pointer_rtx)
+                   if (PLUS_P (x) || x == frame_pointer_rtx
+                       || x == arg_pointer_rtx)
                      /* This is PLUS of frame pointer and a constant,
                         or fp, or argp.  */
                      ira_reg_equiv[i].invariant = x;
diff --git a/gcc/loop-doloop.cc b/gcc/loop-doloop.cc
index 30b45c8071a..136776b430f 100644
--- a/gcc/loop-doloop.cc
+++ b/gcc/loop-doloop.cc
@@ -204,52 +204,50 @@ doloop_condition_get (rtx_insn *doloop_pat)
     return 0;
 
   if ((XEXP (condition, 0) == reg)
-      /* For the third case:  */  
-      || ((cc_reg != NULL_RTX)
-         && (XEXP (condition, 0) == cc_reg)
+      /* For the third case:  */
+      || ((cc_reg != NULL_RTX) && (XEXP (condition, 0) == cc_reg)
          && (reg_orig == reg))
-      || (GET_CODE (XEXP (condition, 0)) == PLUS
-         && XEXP (XEXP (condition, 0), 0) == reg))
-   {
-     if (GET_CODE (pattern) != PARALLEL)
-     /*  For the second form we expect:
+      || (PLUS_P (XEXP (condition, 0)) && XEXP (XEXP (condition, 0), 0) == 
reg))
+    {
+      if (GET_CODE (pattern) != PARALLEL)
+       /*  For the second form we expect:
 
-         (set (reg) (plus (reg) (const_int -1))
-         (set (pc) (if_then_else (reg != 0)
-                                 (label_ref (label))
-                                 (pc))).
+           (set (reg) (plus (reg) (const_int -1))
+           (set (pc) (if_then_else (reg != 0)
+                                   (label_ref (label))
+                                   (pc))).
 
-         is equivalent to the following:
+           is equivalent to the following:
 
-         (parallel [(set (pc) (if_then_else (reg != 1)
-                                            (label_ref (label))
-                                            (pc)))
-                     (set (reg) (plus (reg) (const_int -1)))
-                     (additional clobbers and uses)])
+           (parallel [(set (pc) (if_then_else (reg != 1)
+                                              (label_ref (label))
+                                              (pc)))
+                       (set (reg) (plus (reg) (const_int -1)))
+                       (additional clobbers and uses)])
 
-        For the third form we expect:
+          For the third form we expect:
 
-        (parallel [(set (cc) (compare ((plus (reg) (const_int -1)), 0))
-                   (set (reg) (plus (reg) (const_int -1)))])
-        (set (pc) (if_then_else (cc == NE)
-                                (label_ref (label))
-                                (pc))) 
+          (parallel [(set (cc) (compare ((plus (reg) (const_int -1)), 0))
+                     (set (reg) (plus (reg) (const_int -1)))])
+          (set (pc) (if_then_else (cc == NE)
+                                  (label_ref (label))
+                                  (pc)))
 
-        which is equivalent to the following:
+          which is equivalent to the following:
 
-        (parallel [(set (cc) (compare (reg,  1))
-                   (set (reg) (plus (reg) (const_int -1)))
-                   (set (pc) (if_then_else (NE == cc)
-                                           (label_ref (label))
-                                           (pc))))])
+          (parallel [(set (cc) (compare (reg,  1))
+                     (set (reg) (plus (reg) (const_int -1)))
+                     (set (pc) (if_then_else (NE == cc)
+                                             (label_ref (label))
+                                             (pc))))])
 
-        So we return the second form instead for the two cases.
+          So we return the second form instead for the two cases.
 
-     */
+       */
         condition = gen_rtx_fmt_ee (NE, VOIDmode, inc_src, const1_rtx);
 
     return condition;
-   }
+    }
 
   /* ??? If a machine uses a funny comparison, we could return a
      canonicalized form here.  */
@@ -417,7 +415,7 @@ doloop_simplify_count (class loop *loop, scalar_int_mode 
mode, rtx count)
   if (GET_CODE (count) == ZERO_EXTEND)
     {
       rtx extop0 = XEXP (count, 0);
-      if (GET_CODE (extop0) == PLUS)
+      if (PLUS_P (extop0))
        {
          rtx addop0 = XEXP (extop0, 0);
          rtx addop1 = XEXP (extop0, 1);
@@ -471,10 +469,10 @@ doloop_modify (class loop *loop, class niter_desc *desc,
   delete_insn (jump_insn);
 
   counter_reg = XEXP (condition, 0);
-  if (GET_CODE (counter_reg) == PLUS)
+  if (PLUS_P (counter_reg))
     counter_reg = XEXP (counter_reg, 0);
   /* These patterns must operate on integer counters.  */
-  mode = as_a <scalar_int_mode> (GET_MODE (counter_reg));
+  mode = as_a<scalar_int_mode> (GET_MODE (counter_reg));
 
   increment_count = false;
   switch (GET_CODE (condition))
diff --git a/gcc/loop-iv.cc b/gcc/loop-iv.cc
index d639336445a..da685f15afc 100644
--- a/gcc/loop-iv.cc
+++ b/gcc/loop-iv.cc
@@ -1543,35 +1543,26 @@ implies_p (rtx a, rtx b)
     }
 
   /* A != N is equivalent to A - (N + 1) <u -1.  */
-  if (GET_CODE (a) == NE
-      && CONST_INT_P (op1)
-      && GET_CODE (b) == LTU
-      && opb1 == constm1_rtx
-      && GET_CODE (opb0) == PLUS
+  if (GET_CODE (a) == NE && CONST_INT_P (op1) && GET_CODE (b) == LTU
+      && opb1 == constm1_rtx && PLUS_P (opb0)
       && CONST_INT_P (XEXP (opb0, 1))
       /* Avoid overflows.  */
       && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
-         != ((unsigned HOST_WIDE_INT)1
-             << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
+         != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
       && INTVAL (XEXP (opb0, 1)) + 1 == -INTVAL (op1))
     return rtx_equal_p (op0, XEXP (opb0, 0));
 
   /* Likewise, A != N implies A - N > 0.  */
-  if (GET_CODE (a) == NE
-      && CONST_INT_P (op1))
+  if (GET_CODE (a) == NE && CONST_INT_P (op1))
     {
-      if (GET_CODE (b) == GTU
-         && GET_CODE (opb0) == PLUS
-         && opb1 == const0_rtx
+      if (GET_CODE (b) == GTU && PLUS_P (opb0) && opb1 == const0_rtx
          && CONST_INT_P (XEXP (opb0, 1))
          /* Avoid overflows.  */
          && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
              != (HOST_WIDE_INT_1U << (HOST_BITS_PER_WIDE_INT - 1)))
          && rtx_equal_p (XEXP (opb0, 0), op0))
        return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
-      if (GET_CODE (b) == GEU
-         && GET_CODE (opb0) == PLUS
-         && opb1 == const1_rtx
+      if (GET_CODE (b) == GEU && PLUS_P (opb0) && opb1 == const1_rtx
          && CONST_INT_P (XEXP (opb0, 1))
          /* Avoid overflows.  */
          && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
diff --git a/gcc/lra-constraints.cc b/gcc/lra-constraints.cc
index d92ab76908c..c20407fceda 100644
--- a/gcc/lra-constraints.cc
+++ b/gcc/lra-constraints.cc
@@ -608,7 +608,7 @@ canonicalize_reload_addr (rtx addr)
   FOR_EACH_SUBRTX_VAR (iter, array, addr, NONCONST)
     {
       rtx x = *iter;
-      if (GET_CODE (x) == MULT && CONST_INT_P (XEXP (x, 1)))
+      if (MULT_P (x) && CONST_INT_P (XEXP (x, 1)))
        {
          const HOST_WIDE_INT ci = INTVAL (XEXP (x, 1));
          const int pwr2 = exact_log2 (ci);
@@ -1763,19 +1763,19 @@ simplify_operand_subreg (int nop, machine_mode reg_mode)
   /* Force a reload of the SUBREG_REG if this is a constant or PLUS or
      if there may be a problem accessing OPERAND in the outer
      mode.  */
-  if ((REG_P (reg)
-       && REGNO (reg) >= FIRST_PSEUDO_REGISTER
+  if ((REG_P (reg) && REGNO (reg) >= FIRST_PSEUDO_REGISTER
        && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
        /* Don't reload paradoxical subregs because we could be looping
          having repeatedly final regno out of hard regs range.  */
        && (hard_regno_nregs (hard_regno, innermode)
           >= hard_regno_nregs (hard_regno, mode))
-       && simplify_subreg_regno (hard_regno, innermode,
-                                SUBREG_BYTE (operand), mode) < 0
+       && simplify_subreg_regno (hard_regno, innermode, SUBREG_BYTE (operand),
+                                mode)
+           < 0
        /* Don't reload subreg for matching reload.  It is actually
          valid subreg in LRA.  */
-       && ! LRA_SUBREG_P (operand))
-      || CONSTANT_P (reg) || GET_CODE (reg) == PLUS || MEM_P (reg))
+       && !LRA_SUBREG_P (operand))
+      || CONSTANT_P (reg) || PLUS_P (reg) || MEM_P (reg))
     {
       enum reg_class rclass;
 
@@ -3377,8 +3377,7 @@ equiv_address_substitution (struct address_info *ad)
          *base_term = new_base_reg;
          change_p = true;
        }
-      else if (GET_CODE (new_base_reg) == PLUS
-              && REG_P (XEXP (new_base_reg, 0))
+      else if (PLUS_P (new_base_reg) && REG_P (XEXP (new_base_reg, 0))
               && poly_int_rtx_p (XEXP (new_base_reg, 1), &offset)
               && can_add_disp_p (ad))
        {
@@ -3397,11 +3396,9 @@ equiv_address_substitution (struct address_info *ad)
          *index_term = new_index_reg;
          change_p = true;
        }
-      else if (GET_CODE (new_index_reg) == PLUS
-              && REG_P (XEXP (new_index_reg, 0))
+      else if (PLUS_P (new_index_reg) && REG_P (XEXP (new_index_reg, 0))
               && poly_int_rtx_p (XEXP (new_index_reg, 1), &offset)
-              && can_add_disp_p (ad)
-              && (scale = get_index_scale (ad)))
+              && can_add_disp_p (ad) && (scale = get_index_scale (ad)))
        {
          disp += offset * scale;
          *index_term = XEXP (new_index_reg, 0);
@@ -3720,11 +3717,9 @@ process_address_1 (int nop, bool check_only_p,
       last_insn = get_last_insn ();
       /* If we generated at least two insns, try last insn source as
         an address.  If we succeed, we generate one less insn.  */
-      if (REG_P (new_reg)
-         && last_insn != insns
+      if (REG_P (new_reg) && last_insn != insns
          && (set = single_set (last_insn)) != NULL_RTX
-         && GET_CODE (SET_SRC (set)) == PLUS
-         && REG_P (XEXP (SET_SRC (set), 0))
+         && PLUS_P (SET_SRC (set)) && REG_P (XEXP (SET_SRC (set), 0))
          && CONSTANT_P (XEXP (SET_SRC (set), 1)))
        {
          *ad.inner = SET_SRC (set);
@@ -3839,10 +3834,9 @@ emit_inc (enum reg_class new_rclass, rtx in, rtx value, 
poly_int64 inc_amount)
 
   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
     {
-      lra_assert (GET_CODE (XEXP (value, 1)) == PLUS
-                 || GET_CODE (XEXP (value, 1)) == MINUS);
+      lra_assert (PLUS_P (XEXP (value, 1)) || MINUS_P (XEXP (value, 1)));
       lra_assert (rtx_equal_p (XEXP (XEXP (value, 1), 0), XEXP (value, 0)));
-      plus_p = GET_CODE (XEXP (value, 1)) == PLUS;
+      plus_p = PLUS_P (XEXP (value, 1));
       inc = XEXP (XEXP (value, 1), 1);
     }
   else
diff --git a/gcc/lra-eliminations.cc b/gcc/lra-eliminations.cc
index c630ff4af2d..ec81d272401 100644
--- a/gcc/lra-eliminations.cc
+++ b/gcc/lra-eliminations.cc
@@ -213,13 +213,13 @@ form_sum (rtx x, rtx y)
   else if (CONSTANT_P (x))
     std::swap (x, y);
 
-  if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
+  if (PLUS_P (x) && CONSTANT_P (XEXP (x, 1)))
     return form_sum (XEXP (x, 0), form_sum (XEXP (x, 1), y));
 
   /* Note that if the operands of Y are specified in the opposite
      order in the recursive calls below, infinite recursion will
      occur.  */
-  if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
+  if (PLUS_P (y) && CONSTANT_P (XEXP (y, 1)))
     return form_sum (form_sum (x, XEXP (y, 0)), XEXP (y, 1));
 
   /* If both constant, encapsulate sum.         Otherwise, just form sum.  A
@@ -292,8 +292,7 @@ move_plus_up (rtx x)
   subreg_reg = SUBREG_REG (x);
   x_mode = GET_MODE (x);
   subreg_reg_mode = GET_MODE (subreg_reg);
-  if (!paradoxical_subreg_p (x)
-      && GET_CODE (subreg_reg) == PLUS
+  if (!paradoxical_subreg_p (x) && PLUS_P (subreg_reg)
       && CONSTANT_P (XEXP (subreg_reg, 1))
       && GET_MODE_CLASS (x_mode) == MODE_INT
       && GET_MODE_CLASS (subreg_reg_mode) == MODE_INT)
@@ -570,8 +569,7 @@ lra_eliminate_regs_1 (rtx_insn *insn, rtx x, machine_mode 
mem_mode,
         modified.  LRA has already make sure that this does not
         happen. The only remaining case we need to consider here is
         that the increment value may be an eliminable register.  */
-      if (GET_CODE (XEXP (x, 1)) == PLUS
-         && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
+      if (PLUS_P (XEXP (x, 1)) && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
        {
          rtx new_rtx = lra_eliminate_regs_1 (insn, XEXP (XEXP (x, 1), 1),
                                              mem_mode, subst_p, update_p,
@@ -744,8 +742,7 @@ mark_not_eliminable (rtx x, machine_mode mem_mode)
     case PRE_MODIFY:
       if (XEXP (x, 0) == stack_pointer_rtx
          && ((code != PRE_MODIFY && code != POST_MODIFY)
-             || (GET_CODE (XEXP (x, 1)) == PLUS
-                 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
+             || (PLUS_P (XEXP (x, 1)) && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
                  && poly_int_rtx_p (XEXP (XEXP (x, 1), 1), &offset))))
        {
          poly_int64 size = GET_MODE_SIZE (mem_mode);
@@ -807,8 +804,7 @@ mark_not_eliminable (rtx x, machine_mode mem_mode)
       return;
 
     case SET:
-      if (SET_DEST (x) == stack_pointer_rtx
-         && GET_CODE (SET_SRC (x)) == PLUS
+      if (SET_DEST (x) == stack_pointer_rtx && PLUS_P (SET_SRC (x))
          && XEXP (SET_SRC (x), 0) == SET_DEST (x)
          && poly_int_rtx_p (XEXP (SET_SRC (x), 1), &offset))
        {
@@ -909,7 +905,7 @@ eliminate_regs_in_insn (rtx_insn *insn, bool replace_p, 
bool first_p,
   poly_int64 offset = 0;
   if (old_set && REG_P (SET_DEST (old_set)))
     {
-      if (GET_CODE (SET_SRC (old_set)) == PLUS)
+      if (PLUS_P (SET_SRC (old_set)))
        plus_src = SET_SRC (old_set);
       /* First see if the source is of the form (plus (...) CST).  */
       if (plus_src && poly_int_rtx_p (XEXP (plus_src, 1), &offset))
@@ -1042,12 +1038,11 @@ eliminate_regs_in_insn (rtx_insn *insn, bool replace_p, 
bool first_p,
      after all reloads calculating its offset.  This permits to keep register
      pressure under control and helps to avoid LRA cycling in patalogical
      cases.  */
-  if (! replace_p && (set = single_set (insn)) != NULL
-      && GET_CODE (SET_SRC (set)) == PLUS
-      && GET_CODE (XEXP (SET_SRC (set), 0)) == PLUS)
+  if (!replace_p && (set = single_set (insn)) != NULL && PLUS_P (SET_SRC (set))
+      && PLUS_P (XEXP (SET_SRC (set), 0)))
     {
       rtx reg1, reg2, op1, op2;
-      
+
       reg1 = op1 = XEXP (XEXP (SET_SRC (set), 0), 0);
       reg2 = op2 = XEXP (SET_SRC (set), 1);
       if (GET_CODE (reg1) == SUBREG)
diff --git a/gcc/lra.cc b/gcc/lra.cc
index 1444cb75914..cf0edd8b487 100644
--- a/gcc/lra.cc
+++ b/gcc/lra.cc
@@ -355,7 +355,7 @@ lra_emit_add (rtx x, rtx y, rtx z)
   else
     {
       disp = a2 = NULL_RTX;
-      if (GET_CODE (y) == PLUS)
+      if (PLUS_P (y))
        {
          a1 = XEXP (y, 0);
          a2 = XEXP (y, 1);
@@ -370,14 +370,14 @@ lra_emit_add (rtx x, rtx y, rtx z)
            a2 = z;
        }
       index_scale = scale = NULL_RTX;
-      if (GET_CODE (a1) == MULT)
+      if (MULT_P (a1))
        {
          index_scale = a1;
          index = XEXP (a1, 0);
          scale = XEXP (a1, 1);
          base = a2;
        }
-      else if (a2 != NULL_RTX && GET_CODE (a2) == MULT)
+      else if (a2 != NULL_RTX && MULT_P (a2))
        {
          index_scale = a2;
          index = XEXP (a2, 0);
diff --git a/gcc/modulo-sched.cc b/gcc/modulo-sched.cc
index 162de199da6..95f6f805e9e 100644
--- a/gcc/modulo-sched.cc
+++ b/gcc/modulo-sched.cc
@@ -354,7 +354,7 @@ doloop_register_get (rtx_insn *head, rtx_insn *tail)
 
   if (REG_P (XEXP (condition, 0)))
     reg = XEXP (condition, 0);
-  else if (GET_CODE (XEXP (condition, 0)) == PLUS
+  else if (PLUS_P (XEXP (condition, 0))
           && REG_P (XEXP (XEXP (condition, 0), 0)))
     reg = XEXP (XEXP (condition, 0), 0);
   else
diff --git a/gcc/postreload.cc b/gcc/postreload.cc
index 41f61d32648..c1b2a837066 100644
--- a/gcc/postreload.cc
+++ b/gcc/postreload.cc
@@ -89,9 +89,8 @@ reload_cse_simplify (rtx_insn *insn, rtx testreg)
   /* Remember if this insn has been sp += const_int.  */
   rtx sp_set = set_for_reg_notes (insn);
   rtx sp_addend = NULL_RTX;
-  if (sp_set
-      && SET_DEST (sp_set) == stack_pointer_rtx
-      && GET_CODE (SET_SRC (sp_set)) == PLUS
+  if (sp_set && SET_DEST (sp_set) == stack_pointer_rtx
+      && PLUS_P (SET_SRC (sp_set))
       && XEXP (SET_SRC (sp_set), 0) == stack_pointer_rtx
       && CONST_INT_P (XEXP (SET_SRC (sp_set), 1)))
     sp_addend = XEXP (SET_SRC (sp_set), 1);
@@ -897,9 +896,7 @@ try_replace_in_use (struct reg_use *use, rtx reg, rtx src)
   else
     {
       rtx new_set = single_set (use_insn);
-      if (new_set
-         && REG_P (SET_DEST (new_set))
-         && GET_CODE (SET_SRC (new_set)) == PLUS
+      if (new_set && REG_P (SET_DEST (new_set)) && PLUS_P (SET_SRC (new_set))
          && REG_P (XEXP (SET_SRC (new_set), 0))
          && CONSTANT_P (XEXP (SET_SRC (new_set), 1)))
        {
@@ -1110,11 +1107,8 @@ reload_combine_recognize_pattern (rtx_insn *insn)
      Also, explicitly check that REGX != REGY; our life information
      does not yet show whether REGY changes in this insn.  */
 
-  if (GET_CODE (src) == PLUS
-      && reg_state[regno].all_offsets_match
-      && last_index_reg != -1
-      && REG_P (XEXP (src, 1))
-      && rtx_equal_p (XEXP (src, 0), reg)
+  if (PLUS_P (src) && reg_state[regno].all_offsets_match && last_index_reg != 
-1
+      && REG_P (XEXP (src, 1)) && rtx_equal_p (XEXP (src, 0), reg)
       && !rtx_equal_p (XEXP (src, 1), reg)
       && last_label_ruid < reg_state[regno].use_ruid)
     {
@@ -1987,9 +1981,7 @@ reload_cse_move2add (rtx_insn *first)
                  rtx set = NULL_RTX;
                  if (next)
                    set = single_set (next);
-                 if (set
-                     && SET_DEST (set) == reg
-                     && GET_CODE (SET_SRC (set)) == PLUS
+                 if (set && SET_DEST (set) == reg && PLUS_P (SET_SRC (set))
                      && XEXP (SET_SRC (set), 0) == reg
                      && CONST_INT_P (XEXP (SET_SRC (set), 1)))
                    {
@@ -2052,8 +2044,7 @@ reload_cse_move2add (rtx_insn *first)
             ...
             (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A))))  */
          if ((GET_CODE (src) == SYMBOL_REF
-              || (GET_CODE (src) == CONST
-                  && GET_CODE (XEXP (src, 0)) == PLUS
+              || (GET_CODE (src) == CONST && PLUS_P (XEXP (src, 0))
                   && GET_CODE (XEXP (XEXP (src, 0), 0)) == SYMBOL_REF
                   && CONST_INT_P (XEXP (XEXP (src, 0), 1))))
              && dbg_cnt (cse2_move2add))
@@ -2193,7 +2184,7 @@ move2add_note_store (rtx dst, const_rtx set, void *data)
          off = const0_rtx;
        }
       else if (note && GET_CODE (XEXP (note, 0)) == CONST
-              && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
+              && PLUS_P (XEXP (XEXP (note, 0), 0))
               && GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0)) == SYMBOL_REF
               && CONST_INT_P (XEXP (XEXP (XEXP (note, 0), 0), 1)))
        {
diff --git a/gcc/reginfo.cc b/gcc/reginfo.cc
index 705779d5c18..92614074676 100644
--- a/gcc/reginfo.cc
+++ b/gcc/reginfo.cc
@@ -1080,12 +1080,9 @@ reg_scan_mark_refs (rtx x, rtx_insn *insn)
             optimizations).  So only set REG_POINTER on the destination
             pseudo if this is the only set of that pseudo.  */
          && DF_REG_DEF_COUNT (REGNO (SET_DEST (x))) == 1
-         && ! REG_USERVAR_P (SET_DEST (x))
-         && ! REG_POINTER (SET_DEST (x))
-         && ((REG_P (SET_SRC (x))
-              && REG_POINTER (SET_SRC (x)))
-             || ((GET_CODE (SET_SRC (x)) == PLUS
-                  || GET_CODE (SET_SRC (x)) == LO_SUM)
+         && !REG_USERVAR_P (SET_DEST (x)) && !REG_POINTER (SET_DEST (x))
+         && ((REG_P (SET_SRC (x)) && REG_POINTER (SET_SRC (x)))
+             || ((PLUS_P (SET_SRC (x)) || GET_CODE (SET_SRC (x)) == LO_SUM)
                  && CONST_INT_P (XEXP (SET_SRC (x), 1))
                  && REG_P (XEXP (SET_SRC (x), 0))
                  && REG_POINTER (XEXP (SET_SRC (x), 0)))
@@ -1096,8 +1093,7 @@ reg_scan_mark_refs (rtx x, rtx_insn *insn)
                  && (GET_CODE (XEXP (SET_SRC (x), 0)) == CONST
                      || GET_CODE (XEXP (SET_SRC (x), 0)) == SYMBOL_REF
                      || GET_CODE (XEXP (SET_SRC (x), 0)) == LABEL_REF))
-             || ((GET_CODE (SET_SRC (x)) == PLUS
-                  || GET_CODE (SET_SRC (x)) == LO_SUM)
+             || ((PLUS_P (SET_SRC (x)) || GET_CODE (SET_SRC (x)) == LO_SUM)
                  && (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST
                      || GET_CODE (XEXP (SET_SRC (x), 1)) == SYMBOL_REF
                      || GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF))
diff --git a/gcc/reload.cc b/gcc/reload.cc
index 3ed901e3944..f7659c2d43e 100644
--- a/gcc/reload.cc
+++ b/gcc/reload.cc
@@ -846,7 +846,7 @@ reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool 
output)
   inner = SUBREG_REG (x);
 
   /* If INNER is a constant or PLUS, then INNER will need reloading.  */
-  if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
+  if (CONSTANT_P (inner) || PLUS_P (inner))
     return true;
 
   /* If INNER is not a hard register, then INNER will not need reloading.  */
@@ -1058,33 +1058,30 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
 
   scalar_int_mode inner_mode;
   if (in != 0 && GET_CODE (in) == SUBREG
-      && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
-                                       inmode, rclass)
+      && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)), inmode,
+                                       rclass)
       && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
       && (strict_low
          || (subreg_lowpart_p (in)
-             && (CONSTANT_P (SUBREG_REG (in))
-                 || GET_CODE (SUBREG_REG (in)) == PLUS
+             && (CONSTANT_P (SUBREG_REG (in)) || PLUS_P (SUBREG_REG (in))
                  || (((REG_P (SUBREG_REG (in))
                        && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
                       || MEM_P (SUBREG_REG (in)))
                      && (paradoxical_subreg_p (inmode,
                                                GET_MODE (SUBREG_REG (in)))
                          || (known_le (GET_MODE_SIZE (inmode), UNITS_PER_WORD)
-                             && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG
-                                                                  (in)),
-                                                        &inner_mode)
+                             && is_a<scalar_int_mode> (
+                               GET_MODE (SUBREG_REG (in)), &inner_mode)
                              && GET_MODE_SIZE (inner_mode) <= UNITS_PER_WORD
                              && paradoxical_subreg_p (inmode, inner_mode)
                              && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
                          || (WORD_REGISTER_OPERATIONS
                              && partial_subreg_p (inmode,
                                                   GET_MODE (SUBREG_REG (in)))
-                             && (known_equal_after_align_down
-                                 (GET_MODE_SIZE (inmode) - 1,
-                                  GET_MODE_SIZE (GET_MODE (SUBREG_REG
-                                                           (in))) - 1,
-                                  UNITS_PER_WORD)))))
+                             && (known_equal_after_align_down (
+                               GET_MODE_SIZE (inmode) - 1,
+                               GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1,
+                               UNITS_PER_WORD)))))
                  || (REG_P (SUBREG_REG (in))
                      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
                      /* The case where out is nonzero
@@ -1106,9 +1103,9 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
          || (REG_P (SUBREG_REG (in))
              && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER
              && reg_equiv_mem (REGNO (SUBREG_REG (in)))
-             && (mode_dependent_address_p
-                 (XEXP (reg_equiv_mem (REGNO (SUBREG_REG (in))), 0),
-                  MEM_ADDR_SPACE (reg_equiv_mem (REGNO (SUBREG_REG (in)))))))))
+             && (mode_dependent_address_p (
+               XEXP (reg_equiv_mem (REGNO (SUBREG_REG (in))), 0),
+               MEM_ADDR_SPACE (reg_equiv_mem (REGNO (SUBREG_REG (in)))))))))
     {
 #ifdef LIMIT_RELOAD_CLASS
       in_subreg_loc = inloc;
@@ -1146,11 +1143,9 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
                                                   SUBREG_BYTE (in),
                                                   GET_MODE (in)),
                              REGNO (SUBREG_REG (in)));
-      else if (CONSTANT_P (SUBREG_REG (in))
-               || GET_CODE (SUBREG_REG (in)) == PLUS)
-       subreg_in_class = find_valid_class_1 (inmode,
-                                             GET_MODE (SUBREG_REG (in)),
-                                             rclass);
+      else if (CONSTANT_P (SUBREG_REG (in)) || PLUS_P (SUBREG_REG (in)))
+       subreg_in_class
+         = find_valid_class_1 (inmode, GET_MODE (SUBREG_REG (in)), rclass);
 
       /* This relies on the fact that emit_reload_insns outputs the
         instructions for input reloads of type RELOAD_OTHER in the same
@@ -1244,7 +1239,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
 
   /* If IN appears in OUT, we can't share any input-only reload for IN.  */
   if (in != 0 && out != 0 && MEM_P (out)
-      && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
+      && (REG_P (in) || MEM_P (in) || PLUS_P (in))
       && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
     dont_share = 1;
 
@@ -2399,7 +2394,7 @@ decompose (rtx x)
 
        if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
          {
-           if (GET_CODE (XEXP (addr, 1)) == PLUS
+           if (PLUS_P (XEXP (addr, 1))
                && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
                && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
              {
@@ -2416,7 +2411,7 @@ decompose (rtx x)
            addr = XEXP (addr, 0);
            all_const = 1;
          }
-       if (GET_CODE (addr) == PLUS)
+       if (PLUS_P (addr))
          {
            if (CONSTANT_P (XEXP (addr, 0)))
              {
@@ -2437,7 +2432,7 @@ decompose (rtx x)
          }
        if (GET_CODE (offset) == CONST)
          offset = XEXP (offset, 0);
-       if (GET_CODE (offset) == PLUS)
+       if (PLUS_P (offset))
          {
            if (CONST_INT_P (XEXP (offset, 0)))
              {
@@ -2461,7 +2456,7 @@ decompose (rtx x)
            offset = const0_rtx;
          }
 
-       if (all_const && GET_CODE (base) == PLUS)
+       if (all_const && PLUS_P (base))
          base = gen_rtx_CONST (GET_MODE (base), base);
 
        gcc_assert (CONST_INT_P (offset));
@@ -2849,9 +2844,9 @@ find_reloads (rtx_insn *insn, int replace, int 
ind_levels, int live_known,
             PLUS or MULT or ASHIFT, re-recognize and try again.  */
          if ((OBJECT_P (*recog_data.operand_loc[i])
               || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
-             && (GET_CODE (recog_data.operand[i]) == MULT
+             && (MULT_P (recog_data.operand[i])
                  || GET_CODE (recog_data.operand[i]) == ASHIFT
-                 || GET_CODE (recog_data.operand[i]) == PLUS))
+                 || PLUS_P (recog_data.operand[i])))
            {
              INSN_CODE (insn) = -1;
              retval = find_reloads (insn, replace, ind_levels, live_known,
@@ -3110,7 +3105,7 @@ find_reloads (rtx_insn *insn, int replace, int 
ind_levels, int live_known,
                     be a problem accessing OPERAND in the outer mode.  */
                  scalar_int_mode inner_mode;
                  if (CONSTANT_P (operand)
-                     || GET_CODE (operand) == PLUS
+                     || PLUS_P (operand)
                      /* We must force a reload of paradoxical SUBREGs
                         of a MEM because the alignment of the inner value
                         may not be enough to do the outer reference.  On
@@ -3118,14 +3113,14 @@ find_reloads (rtx_insn *insn, int replace, int 
ind_levels, int live_known,
                         the object.
 
                         On machines that extend byte operations and we have a
-                        SUBREG where both the inner and outer modes are no 
wider
-                        than a word and the inner mode is narrower, is 
integral,
-                        and gets extended when loaded from memory, combine.cc 
has
-                        made assumptions about the behavior of the machine in 
such
-                        register access.  If the data is, in fact, in memory we
-                        must always load using the size assumed to be in the
-                        register and let the insn do the different-sized
-                        accesses.
+                        SUBREG where both the inner and outer modes are no
+                        wider than a word and the inner mode is narrower, is
+                        integral, and gets extended when loaded from memory,
+                        combine.cc has made assumptions about the behavior of
+                        the machine in such register access.  If the data is,
+                        in fact, in memory we must always load using the size
+                        assumed to be in the register and let the insn do the
+                        different-sized accesses.
 
                         This is doubly true if WORD_REGISTER_OPERATIONS.  In
                         this case eliminate_regs has left non-paradoxical
@@ -3139,16 +3134,16 @@ find_reloads (rtx_insn *insn, int replace, int 
ind_levels, int live_known,
                           || (REG_P (operand)
                               && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
                          && (WORD_REGISTER_OPERATIONS
-                             || (((maybe_lt
-                                   (GET_MODE_BITSIZE (GET_MODE (operand)),
-                                    BIGGEST_ALIGNMENT))
-                                  && (paradoxical_subreg_p
-                                      (operand_mode[i], GET_MODE (operand)))))
+                             || (((maybe_lt (GET_MODE_BITSIZE (
+                                               GET_MODE (operand)),
+                                             BIGGEST_ALIGNMENT))
+                                  && (paradoxical_subreg_p (
+                                    operand_mode[i], GET_MODE (operand)))))
                              || BYTES_BIG_ENDIAN
                              || (known_le (GET_MODE_SIZE (operand_mode[i]),
                                            UNITS_PER_WORD)
-                                 && (is_a <scalar_int_mode>
-                                     (GET_MODE (operand), &inner_mode))
+                                 && (is_a<scalar_int_mode> (GET_MODE (operand),
+                                                            &inner_mode))
                                  && (GET_MODE_SIZE (inner_mode)
                                      <= UNITS_PER_WORD)
                                  && paradoxical_subreg_p (operand_mode[i],
@@ -3163,11 +3158,10 @@ find_reloads (rtx_insn *insn, int replace, int 
ind_levels, int live_known,
                      || (REG_P (operand)
                          && REGNO (operand) >= FIRST_PSEUDO_REGISTER
                          && reg_equiv_mem (REGNO (operand))
-                         && (mode_dependent_address_p
-                             (XEXP (reg_equiv_mem (REGNO (operand)), 0),
-                              (MEM_ADDR_SPACE
-                               (reg_equiv_mem (REGNO (operand)))))))
-                     )
+                         && (mode_dependent_address_p (
+                           XEXP (reg_equiv_mem (REGNO (operand)), 0),
+                           (MEM_ADDR_SPACE (
+                             reg_equiv_mem (REGNO (operand))))))))
                    force_reload = 1;
                }
 
@@ -3944,7 +3938,7 @@ find_reloads (rtx_insn *insn, int replace, int 
ind_levels, int live_known,
            mode = GET_MODE (op);
          }
 
-       if (GET_CODE (op) == PLUS)
+       if (PLUS_P (op))
          {
            plus = op;
            op = XEXP (op, 1);
@@ -4927,7 +4921,7 @@ find_reloads_address (machine_mode mode, rtx *memrefloc, 
rtx ad,
              if (ind_levels > 0
                  && strict_memory_address_addr_space_p (mode, tem, as)
                  && (REG_P (XEXP (tem, 0))
-                     || (GET_CODE (XEXP (tem, 0)) == PLUS
+                     || (PLUS_P (XEXP (tem, 0))
                          && REG_P (XEXP (XEXP (tem, 0), 0))
                          && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
                {
@@ -4980,9 +4974,7 @@ find_reloads_address (machine_mode mode, rtx *memrefloc, 
rtx ad,
         So do a tree-walk to find and eliminate all such regs.  */
 
       /* But first quickly dispose of a common case.  */
-      if (GET_CODE (ad) == PLUS
-         && CONST_INT_P (XEXP (ad, 1))
-         && REG_P (XEXP (ad, 0))
+      if (PLUS_P (ad) && CONST_INT_P (XEXP (ad, 1)) && REG_P (XEXP (ad, 0))
          && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
        return 0;
 
@@ -5056,12 +5048,11 @@ find_reloads_address (machine_mode mode, rtx 
*memrefloc, rtx ad,
         taken care of above.  */
 
       if (ind_levels == 0
-         || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
+         || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && !indirect_symref_ok)
          || MEM_P (XEXP (tem, 0))
-         || ! (REG_P (XEXP (tem, 0))
-               || (GET_CODE (XEXP (tem, 0)) == PLUS
-                   && REG_P (XEXP (XEXP (tem, 0), 0))
-                   && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
+         || !(REG_P (XEXP (tem, 0))
+              || (PLUS_P (XEXP (tem, 0)) && REG_P (XEXP (XEXP (tem, 0), 0))
+                  && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
        {
          /* Must use TEM here, not AD, since it is the one that will
             have any subexpressions reloaded, if needed.  */
@@ -5080,8 +5071,7 @@ find_reloads_address (machine_mode mode, rtx *memrefloc, 
rtx ad,
      Handle all base registers here, not just fp/ap/sp, because on some
      targets (namely SH) we can also get too large displacements from
      big-endian corrections.  */
-  else if (GET_CODE (ad) == PLUS
-          && REG_P (XEXP (ad, 0))
+  else if (PLUS_P (ad) && REG_P (XEXP (ad, 0))
           && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
           && CONST_INT_P (XEXP (ad, 1))
           && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
@@ -5089,8 +5079,8 @@ find_reloads_address (machine_mode mode, rtx *memrefloc, 
rtx ad,
               /* Similarly, if we were to reload the base register and the
                  mem+offset address is still invalid, then we want to reload
                  the whole address, not just the base register.  */
-              || ! maybe_memory_address_addr_space_p
-                    (mode, ad, as, &(XEXP (ad, 0)))))
+              || !maybe_memory_address_addr_space_p (mode, ad, as,
+                                                     &(XEXP (ad, 0)))))
 
     {
       /* Unshare the MEM rtx so we can safely alter it.  */
@@ -5160,11 +5150,10 @@ find_reloads_address (machine_mode mode, rtx 
*memrefloc, rtx ad,
       enum rtx_code inner_code;
 
       if (GET_CODE (ad) != PLUS)
-         continue;
+       continue;
 
       inner_code = GET_CODE (XEXP (ad, 0));
-      if (!(GET_CODE (ad) == PLUS
-           && CONST_INT_P (XEXP (ad, 1))
+      if (!(PLUS_P (ad) && CONST_INT_P (XEXP (ad, 1))
            && (inner_code == PLUS || inner_code == LO_SUM)))
        continue;
 
@@ -5188,11 +5177,11 @@ find_reloads_address (machine_mode mode, rtx 
*memrefloc, rtx ad,
          rtx offset_reg;
          enum reg_class cls;
 
-         offset_reg = plus_constant (GET_MODE (ad), operand,
-                                     INTVAL (XEXP (ad, 1)));
+         offset_reg
+           = plus_constant (GET_MODE (ad), operand, INTVAL (XEXP (ad, 1)));
 
          /* Form the adjusted address.  */
-         if (GET_CODE (XEXP (ad, 0)) == PLUS)
+         if (PLUS_P (XEXP (ad, 0)))
            ad = gen_rtx_PLUS (GET_MODE (ad),
                               op_index == 0 ? offset_reg : addend,
                               op_index == 0 ? addend : offset_reg);
@@ -5220,7 +5209,7 @@ find_reloads_address (machine_mode mode, rtx *memrefloc, 
rtx ad,
      in a sum is replaced.  */
 
   tem = ad;
-  if (GET_CODE (ad) == PLUS)
+  if (PLUS_P (ad))
     tem = subst_indexed_address (ad);
   if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
     {
@@ -5358,12 +5347,12 @@ form_sum (machine_mode mode, rtx x, rtx y)
   else if (CONSTANT_P (x))
     tem = x, x = y, y = tem;
 
-  if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
+  if (PLUS_P (x) && CONSTANT_P (XEXP (x, 1)))
     return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
 
   /* Note that if the operands of Y are specified in the opposite
      order in the recursive calls below, infinite recursion will occur.  */
-  if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
+  if (PLUS_P (y) && CONSTANT_P (XEXP (y, 1)))
     return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
 
   /* If both constant, encapsulate sum.  Otherwise, just form sum.  A
@@ -5400,7 +5389,7 @@ subst_indexed_address (rtx addr)
   rtx tem;
   int regno;
 
-  if (GET_CODE (addr) == PLUS)
+  if (PLUS_P (addr))
     {
       /* Try to find a register to replace.  */
       op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
@@ -5409,24 +5398,20 @@ subst_indexed_address (rtx addr)
          && reg_renumber[regno] < 0
          && reg_equiv_constant (regno) != 0)
        op0 = reg_equiv_constant (regno);
-      else if (REG_P (op1)
-              && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
-              && reg_renumber[regno] < 0
-              && reg_equiv_constant (regno) != 0)
+      else if (REG_P (op1) && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
+              && reg_renumber[regno] < 0 && reg_equiv_constant (regno) != 0)
        op1 = reg_equiv_constant (regno);
-      else if (GET_CODE (op0) == PLUS
-              && (tem = subst_indexed_address (op0)) != op0)
+      else if (PLUS_P (op0) && (tem = subst_indexed_address (op0)) != op0)
        op0 = tem;
-      else if (GET_CODE (op1) == PLUS
-              && (tem = subst_indexed_address (op1)) != op1)
+      else if (PLUS_P (op1) && (tem = subst_indexed_address (op1)) != op1)
        op1 = tem;
       else
        return addr;
 
       /* Pick out up to three things to add.  */
-      if (GET_CODE (op1) == PLUS)
+      if (PLUS_P (op1))
        op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
-      else if (GET_CODE (op0) == PLUS)
+      else if (PLUS_P (op0))
        op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
 
       /* Compute the sum.  */
@@ -5693,7 +5678,7 @@ find_reloads_address_1 (machine_mode mode, addr_space_t 
as,
           conservative and class it as RELOAD_OTHER.  */
        if ((REG_P (XEXP (op1, 1))
             && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
-           || GET_CODE (XEXP (op1, 1)) == PLUS)
+           || PLUS_P (XEXP (op1, 1)))
          find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
                                  &XEXP (op1, 1), opnum, RELOAD_OTHER,
                                  ind_levels, insn);
@@ -6092,15 +6077,14 @@ find_reloads_address_part (rtx x, rtx *loc, enum 
reg_class rclass,
          || targetm.preferred_reload_class (x, rclass) == NO_REGS))
     {
       x = force_const_mem (mode, x);
-      find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
-                           opnum, type, ind_levels, 0);
+      find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0), opnum, type,
+                           ind_levels, 0);
     }
 
-  else if (GET_CODE (x) == PLUS
-          && CONSTANT_P (XEXP (x, 1))
+  else if (PLUS_P (x) && CONSTANT_P (XEXP (x, 1))
           && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
               || targetm.preferred_reload_class (XEXP (x, 1), rclass)
-                  == NO_REGS))
+                   == NO_REGS))
     {
       rtx tem;
 
@@ -6395,8 +6379,7 @@ find_replacement (rtx *loc)
 
   /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
      what's inside and make a new rtl if so.  */
-  if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
-      || GET_CODE (*loc) == MULT)
+  if (PLUS_P (*loc) || MINUS_P (*loc) || MULT_P (*loc))
     {
       rtx x = find_replacement (&XEXP (*loc, 0));
       rtx y = find_replacement (&XEXP (*loc, 1));
@@ -6590,7 +6573,7 @@ reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
     return reg_mentioned_p (x, in);
   else
     {
-      gcc_assert (GET_CODE (x) == PLUS);
+      gcc_assert (PLUS_P (x));
 
       /* We actually want to know if X is mentioned somewhere inside IN.
         We must not say that (plus (sp) (const_int 124)) is in
@@ -6601,7 +6584,7 @@ reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
        in = XEXP (in, 0);
       if (REG_P (in))
        return 0;
-      else if (GET_CODE (in) == PLUS)
+      else if (PLUS_P (in))
        return (rtx_equal_p (x, in)
                || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
                || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
@@ -6710,12 +6693,10 @@ find_equiv_reg (rtx goal, rtx_insn *insn, enum 
reg_class rclass, int other,
     }
   else if (CONSTANT_P (goal))
     goal_const = 1;
-  else if (GET_CODE (goal) == PLUS
-          && XEXP (goal, 0) == stack_pointer_rtx
+  else if (PLUS_P (goal) && XEXP (goal, 0) == stack_pointer_rtx
           && CONSTANT_P (XEXP (goal, 1)))
     goal_const = need_stable_sp = 1;
-  else if (GET_CODE (goal) == PLUS
-          && XEXP (goal, 0) == frame_pointer_rtx
+  else if (PLUS_P (goal) && XEXP (goal, 0) == frame_pointer_rtx
           && CONSTANT_P (XEXP (goal, 1)))
     goal_const = 1;
   else
@@ -7114,9 +7095,8 @@ find_inc_amount (rtx x, rtx inced)
           || GET_CODE (addr) == POST_INC)
          && XEXP (addr, 0) == inced)
        return GET_MODE_SIZE (GET_MODE (x));
-      else if ((GET_CODE (addr) == PRE_MODIFY
-               || GET_CODE (addr) == POST_MODIFY)
-              && GET_CODE (XEXP (addr, 1)) == PLUS
+      else if ((GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == 
POST_MODIFY)
+              && PLUS_P (XEXP (addr, 1))
               && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
               && XEXP (addr, 0) == inced
               && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
diff --git a/gcc/reload1.cc b/gcc/reload1.cc
index 728dc2a1a5c..09feb77b215 100644
--- a/gcc/reload1.cc
+++ b/gcc/reload1.cc
@@ -929,8 +929,7 @@ reload (rtx_insn *first, int global)
            else if (CONSTANT_P (XEXP (x, 0))
                     || (REG_P (XEXP (x, 0))
                         && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
-                    || (GET_CODE (XEXP (x, 0)) == PLUS
-                        && REG_P (XEXP (XEXP (x, 0), 0))
+                    || (PLUS_P (XEXP (x, 0)) && REG_P (XEXP (XEXP (x, 0), 0))
                         && (REGNO (XEXP (XEXP (x, 0), 0))
                             < FIRST_PSEUDO_REGISTER)
                         && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
@@ -2643,13 +2642,12 @@ eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx 
insn,
               didn't get a hard register but has a reg_equiv_constant,
               we must replace the constant here since it may no longer
               be in the position of any operand.  */
-           if (GET_CODE (new0) == PLUS && REG_P (new1)
+           if (PLUS_P (new0) && REG_P (new1)
                && REGNO (new1) >= FIRST_PSEUDO_REGISTER
-               && reg_renumber[REGNO (new1)] < 0
-               && reg_equivs
+               && reg_renumber[REGNO (new1)] < 0 && reg_equivs
                && reg_equiv_constant (REGNO (new1)) != 0)
              new1 = reg_equiv_constant (REGNO (new1));
-           else if (GET_CODE (new1) == PLUS && REG_P (new0)
+           else if (PLUS_P (new1) && REG_P (new0)
                     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
                     && reg_renumber[REGNO (new0)] < 0
                     && reg_equiv_constant (REGNO (new0)) != 0)
@@ -2774,11 +2772,10 @@ eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx 
insn,
         elimination_effects has already make sure that this does not
         happen.  The only remaining case we need to consider here is
         that the increment value may be an eliminable register.  */
-      if (GET_CODE (XEXP (x, 1)) == PLUS
-         && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
+      if (PLUS_P (XEXP (x, 1)) && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
        {
-         rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
-                                         insn, true, for_costs);
+         rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode, insn,
+                                         true, for_costs);
 
          if (new_rtx != XEXP (XEXP (x, 1), 1))
            return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
@@ -3021,8 +3018,7 @@ elimination_effects (rtx x, machine_mode mem_mode)
              ep->offset -= size;
            else if (code == PRE_MODIFY || code == POST_MODIFY)
              {
-               if (GET_CODE (XEXP (x, 1)) == PLUS
-                   && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
+               if (PLUS_P (XEXP (x, 1)) && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
                    && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
                  ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
                else
@@ -3109,8 +3105,7 @@ elimination_effects (rtx x, machine_mode mem_mode)
                   this elimination can't be done.  */
                rtx src = SET_SRC (x);
 
-               if (GET_CODE (src) == PLUS
-                   && XEXP (src, 0) == SET_DEST (x)
+               if (PLUS_P (src) && XEXP (src, 0) == SET_DEST (x)
                    && CONST_INT_P (XEXP (src, 1)))
                  ep->offset -= INTVAL (XEXP (src, 1));
                else
@@ -3229,11 +3224,10 @@ eliminate_regs_in_insn (rtx_insn *insn, int replace)
   plus_src = plus_cst_src = 0;
   if (old_set && REG_P (SET_DEST (old_set)))
     {
-      if (GET_CODE (SET_SRC (old_set)) == PLUS)
+      if (PLUS_P (SET_SRC (old_set)))
        plus_src = SET_SRC (old_set);
       /* First see if the source is of the form (plus (...) CST).  */
-      if (plus_src
-         && CONST_INT_P (XEXP (plus_src, 1)))
+      if (plus_src && CONST_INT_P (XEXP (plus_src, 1)))
        plus_cst_src = plus_src;
       else if (REG_P (SET_SRC (old_set))
               || plus_src)
@@ -3245,7 +3239,7 @@ eliminate_regs_in_insn (rtx_insn *insn, int replace)
            {
              if ((REG_NOTE_KIND (links) == REG_EQUAL
                   || REG_NOTE_KIND (links) == REG_EQUIV)
-                 && GET_CODE (XEXP (links, 0)) == PLUS
+                 && PLUS_P (XEXP (links, 0))
                  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
                {
                  plus_cst_src = XEXP (links, 0);
@@ -3435,11 +3429,9 @@ eliminate_regs_in_insn (rtx_insn *insn, int replace)
         hurt in this rare case.  */
       /* ??? Why this huge if statement - why don't we just rerecognize the
         thing always?  */
-      if (! insn_is_asm
-         && old_set != 0
+      if (!insn_is_asm && old_set != 0
          && ((REG_P (SET_SRC (old_set))
-              && (GET_CODE (new_body) != SET
-                  || !REG_P (SET_SRC (new_body))))
+              && (GET_CODE (new_body) != SET || !REG_P (SET_SRC (new_body))))
              /* If this was a load from or store to memory, compare
                 the MEM in recog_data.operand to the one in the insn.
                 If they are not equal, then rerecognize the insn.  */
@@ -3449,7 +3441,7 @@ eliminate_regs_in_insn (rtx_insn *insn, int replace)
                      || (MEM_P (SET_DEST (old_set))
                          && SET_DEST (old_set) != recog_data.operand[0])))
              /* If this was an add insn before, rerecognize.  */
-             || GET_CODE (SET_SRC (old_set)) == PLUS))
+             || PLUS_P (SET_SRC (old_set))))
        {
          int new_icode = recog (PATTERN (insn), insn, 0);
          if (new_icode >= 0)
@@ -3557,11 +3549,10 @@ elimination_costs_in_insn (rtx_insn *insn)
   if (old_set && REG_P (SET_DEST (old_set)))
     {
       sets_reg_p = true;
-      if (GET_CODE (SET_SRC (old_set)) == PLUS)
+      if (PLUS_P (SET_SRC (old_set)))
        plus_src = SET_SRC (old_set);
       /* First see if the source is of the form (plus (...) CST).  */
-      if (plus_src
-         && CONST_INT_P (XEXP (plus_src, 1)))
+      if (plus_src && CONST_INT_P (XEXP (plus_src, 1)))
        plus_cst_src = plus_src;
       else if (REG_P (SET_SRC (old_set))
               || plus_src)
@@ -3573,7 +3564,7 @@ elimination_costs_in_insn (rtx_insn *insn)
            {
              if ((REG_NOTE_KIND (links) == REG_EQUAL
                   || REG_NOTE_KIND (links) == REG_EQUIV)
-                 && GET_CODE (XEXP (links, 0)) == PLUS
+                 && PLUS_P (XEXP (links, 0))
                  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
                {
                  plus_cst_src = XEXP (links, 0);
@@ -4077,7 +4068,7 @@ init_eliminable_invariants (rtx_insn *first, bool 
do_subregs)
                  machine_mode mode;
 
                  mode = GET_MODE (SET_DEST (set));
-                 if (GET_CODE (x) == PLUS)
+                 if (PLUS_P (x))
                    {
                      /* This is PLUS of frame pointer and a constant,
                         and might be shared.  Unshare it.  */
@@ -5558,14 +5549,11 @@ gen_reload_chain_without_interm_reg_p (int r1, int r2)
      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
   strip_paradoxical_subreg (&in, &out);
 
-  if (GET_CODE (in) == PLUS
-      && (REG_P (XEXP (in, 0))
-         || GET_CODE (XEXP (in, 0)) == SUBREG
+  if (PLUS_P (in)
+      && (REG_P (XEXP (in, 0)) || GET_CODE (XEXP (in, 0)) == SUBREG
          || MEM_P (XEXP (in, 0)))
-      && (REG_P (XEXP (in, 1))
-         || GET_CODE (XEXP (in, 1)) == SUBREG
-         || CONSTANT_P (XEXP (in, 1))
-         || MEM_P (XEXP (in, 1))))
+      && (REG_P (XEXP (in, 1)) || GET_CODE (XEXP (in, 1)) == SUBREG
+         || CONSTANT_P (XEXP (in, 1)) || MEM_P (XEXP (in, 1))))
     {
       insn = emit_insn (gen_rtx_SET (out, in));
       code = recog_memoized (insn);
@@ -5963,7 +5951,7 @@ function_invariant_p (const_rtx x)
     return 1;
   if (x == frame_pointer_rtx || x == arg_pointer_rtx)
     return 1;
-  if (GET_CODE (x) == PLUS
+  if (PLUS_P (x)
       && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
       && GET_CODE (XEXP (x, 1)) == CONST_INT)
     return 1;
@@ -6605,16 +6593,12 @@ choose_reload_regs (class insn_chain *chain)
            }
 
          /* Here's another way to see if the value is already lying around.  */
-         if (inheritance
-             && rld[r].in != 0
-             && ! reload_inherited[r]
+         if (inheritance && rld[r].in != 0 && !reload_inherited[r]
              && rld[r].out == 0
-             && (CONSTANT_P (rld[r].in)
-                 || GET_CODE (rld[r].in) == PLUS
-                 || REG_P (rld[r].in)
-                 || MEM_P (rld[r].in))
+             && (CONSTANT_P (rld[r].in) || PLUS_P (rld[r].in)
+                 || REG_P (rld[r].in) || MEM_P (rld[r].in))
              && (rld[r].nregs == max_group_size
-                 || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
+                 || !reg_classes_intersect_p (rld[r].rclass, group_class)))
            search_equiv = rld[r].in;
 
          if (search_equiv)
@@ -8453,14 +8437,11 @@ gen_reload (rtx out, rtx in, int opnum, enum 
reload_type type)
 
      ??? At some point, this whole thing needs to be rethought.  */
 
-  if (GET_CODE (in) == PLUS
-      && (REG_P (XEXP (in, 0))
-         || GET_CODE (XEXP (in, 0)) == SUBREG
+  if (PLUS_P (in)
+      && (REG_P (XEXP (in, 0)) || GET_CODE (XEXP (in, 0)) == SUBREG
          || MEM_P (XEXP (in, 0)))
-      && (REG_P (XEXP (in, 1))
-         || GET_CODE (XEXP (in, 1)) == SUBREG
-         || CONSTANT_P (XEXP (in, 1))
-         || MEM_P (XEXP (in, 1))))
+      && (REG_P (XEXP (in, 1)) || GET_CODE (XEXP (in, 1)) == SUBREG
+         || CONSTANT_P (XEXP (in, 1)) || MEM_P (XEXP (in, 1))))
     {
       /* We need to compute the sum of a register or a MEM and another
         register, constant, or MEM, and put it into the reload
@@ -8991,7 +8972,7 @@ inc_for_reload (rtx reloadreg, rtx in, rtx value, 
poly_int64 inc_amount)
 
   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
     {
-      gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
+      gcc_assert (PLUS_P (XEXP (value, 1)));
       inc = find_replacement (&XEXP (XEXP (value, 1), 1));
     }
   else
diff --git a/gcc/reorg.cc b/gcc/reorg.cc
index 07b2045986c..9a596e042c7 100644
--- a/gcc/reorg.cc
+++ b/gcc/reorg.cc
@@ -2663,12 +2663,10 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx 
condition,
        return;
 
       dest = SET_DEST (pat), src = SET_SRC (pat);
-      if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
-         && rtx_equal_p (XEXP (src, 0), dest)
-         && (!FLOAT_MODE_P (GET_MODE (src))
-             || flag_unsafe_math_optimizations)
-         && ! reg_overlap_mentioned_p (dest, XEXP (src, 1))
-         && ! side_effects_p (pat))
+      if ((PLUS_P (src) || MINUS_P (src)) && rtx_equal_p (XEXP (src, 0), dest)
+         && (!FLOAT_MODE_P (GET_MODE (src)) || flag_unsafe_math_optimizations)
+         && !reg_overlap_mentioned_p (dest, XEXP (src, 1))
+         && !side_effects_p (pat))
        {
          rtx other = XEXP (src, 1);
          rtx new_arith;
@@ -2681,7 +2679,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx 
condition,
            new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
                                        negate_rtx (GET_MODE (src), other));
          else
-           new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
+           new_arith = gen_rtx_fmt_ee (PLUS_P (src) ? MINUS : PLUS,
                                        GET_MODE (src), dest, other);
 
          ninsn = emit_insn_after (gen_rtx_SET (dest, new_arith), insn);
diff --git a/gcc/rtl.cc b/gcc/rtl.cc
index 86c322b19ca..460b8ed05d5 100644
--- a/gcc/rtl.cc
+++ b/gcc/rtl.cc
@@ -269,12 +269,11 @@ shared_const_p (const_rtx orig)
   /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
      a LABEL_REF, it isn't sharable.  */
   poly_int64 offset;
-  return (GET_CODE (XEXP (orig, 0)) == PLUS
+  return (PLUS_P (XEXP (orig, 0))
          && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
          && poly_int_rtx_p (XEXP (XEXP (orig, 0), 1), &offset));
 }
 
-
 /* Create a new copy of an rtx.
    Recursively copies the operands of the rtx,
    except for those few rtx codes that are sharable.  */
diff --git a/gcc/rtl.h b/gcc/rtl.h
index 7a8c4709257..bf032eb57a5 100644
--- a/gcc/rtl.h
+++ b/gcc/rtl.h
@@ -882,6 +882,17 @@ struct GTY(()) rtvec_def {
 /* Predicate yielding true iff RTX is a symbol ref.  */
 #define SYMBOL_REF_P(RTX) (GET_CODE (RTX) == SYMBOL_REF)
 
+/* RTL predicates for value binary operators.  */
+/* Predicate yielding nonzero iff RTX is an addition operation.  */
+#define PLUS_P(RTX) (GET_CODE (RTX) == PLUS)
+
+/* Predicate yielding nonzero iff RTX is a subtraction operation.  */
+#define MINUS_P(RTX) (GET_CODE(RTX) == MINUS)
+
+/* Predicate yielding nonzero iff RTX is a multiplication operation.  */
+#define MULT_P(RTX) (GET_CODE(RTX) == MULT)
+
+
 template <>
 template <>
 inline bool
diff --git a/gcc/rtlanal.cc b/gcc/rtlanal.cc
index 56da7435a28..5266fbbdd9d 100644
--- a/gcc/rtlanal.cc
+++ b/gcc/rtlanal.cc
@@ -859,11 +859,9 @@ get_integer_term (const_rtx x)
   if (GET_CODE (x) == CONST)
     x = XEXP (x, 0);
 
-  if (GET_CODE (x) == MINUS
-      && CONST_INT_P (XEXP (x, 1)))
-    return - INTVAL (XEXP (x, 1));
-  if (GET_CODE (x) == PLUS
-      && CONST_INT_P (XEXP (x, 1)))
+  if (MINUS_P (x) && CONST_INT_P (XEXP (x, 1)))
+    return -INTVAL (XEXP (x, 1));
+  if (PLUS_P (x) && CONST_INT_P (XEXP (x, 1)))
     return INTVAL (XEXP (x, 1));
   return 0;
 }
@@ -878,11 +876,9 @@ get_related_value (const_rtx x)
   if (GET_CODE (x) != CONST)
     return 0;
   x = XEXP (x, 0);
-  if (GET_CODE (x) == PLUS
-      && CONST_INT_P (XEXP (x, 1)))
+  if (PLUS_P (x) && CONST_INT_P (XEXP (x, 1)))
     return XEXP (x, 0);
-  else if (GET_CODE (x) == MINUS
-          && CONST_INT_P (XEXP (x, 1)))
+  else if (MINUS_P (x) && CONST_INT_P (XEXP (x, 1)))
     return XEXP (x, 0);
   return 0;
 }
@@ -931,7 +927,7 @@ split_const (rtx x, rtx *base_out, rtx *offset_out)
   if (GET_CODE (x) == CONST)
     {
       x = XEXP (x, 0);
-      if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
+      if (PLUS_P (x) && CONST_INT_P (XEXP (x, 1)))
        {
          *base_out = XEXP (x, 0);
          *offset_out = XEXP (x, 1);
@@ -953,7 +949,7 @@ strip_offset (rtx x, poly_int64_pod *offset_out)
   rtx test = x;
   if (GET_CODE (test) == CONST)
     test = XEXP (test, 0);
-  if (GET_CODE (test) == PLUS)
+  if (PLUS_P (test))
     {
       base = XEXP (test, 0);
       test = XEXP (test, 1);
@@ -6597,7 +6593,7 @@ decompose_automod_address (struct address_info *info)
   gcc_checking_assert (info->base == info->base_term);
 
   rtx plus = XEXP (*info->inner, 1);
-  gcc_assert (GET_CODE (plus) == PLUS);
+  gcc_assert (PLUS_P (plus));
 
   info->base_term2 = &XEXP (plus, 0);
   gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
@@ -6617,7 +6613,7 @@ static rtx **
 extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
 {
   rtx x = *loc;
-  if (GET_CODE (x) == PLUS)
+  if (PLUS_P (x))
     {
       ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
       ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
@@ -6808,8 +6804,7 @@ HOST_WIDE_INT
 get_index_scale (const struct address_info *info)
 {
   rtx index = *info->index;
-  if (GET_CODE (index) == MULT
-      && CONST_INT_P (XEXP (index, 1))
+  if (MULT_P (index) && CONST_INT_P (XEXP (index, 1))
       && info->index_term == &XEXP (index, 0))
     return INTVAL (XEXP (index, 1));
 
diff --git a/gcc/sched-deps.cc b/gcc/sched-deps.cc
index 948aa0c3b60..3dd05cc7b6a 100644
--- a/gcc/sched-deps.cc
+++ b/gcc/sched-deps.cc
@@ -3439,9 +3439,7 @@ sched_analyze_insn (class deps_desc *deps, rtx x, 
rtx_insn *insn)
       tmp = SET_SRC (set);
       if (GET_CODE (tmp) == SUBREG)
        tmp = SUBREG_REG (tmp);
-      if ((GET_CODE (tmp) == PLUS
-          || GET_CODE (tmp) == MINUS)
-         && REG_P (XEXP (tmp, 0))
+      if ((PLUS_P (tmp) || MINUS_P (tmp)) && REG_P (XEXP (tmp, 0))
          && REGNO (XEXP (tmp, 0)) == STACK_POINTER_REGNUM
          && dest_regno == STACK_POINTER_REGNUM)
        src_regno = STACK_POINTER_REGNUM;
@@ -4867,12 +4865,12 @@ find_mem (struct mem_inc_info *mii, rtx *address_of_x)
       mii->mem_loc = address_of_x;
       mii->mem_index = NULL_RTX;
       mii->mem_constant = 0;
-      if (GET_CODE (reg0) == PLUS && CONST_INT_P (XEXP (reg0, 1)))
+      if (PLUS_P (reg0) && CONST_INT_P (XEXP (reg0, 1)))
        {
          mii->mem_constant = INTVAL (XEXP (reg0, 1));
          reg0 = XEXP (reg0, 0);
        }
-      if (GET_CODE (reg0) == PLUS)
+      if (PLUS_P (reg0))
        {
          mii->mem_index = XEXP (reg0, 1);
          reg0 = XEXP (reg0, 0);
diff --git a/gcc/simplify-rtx.cc b/gcc/simplify-rtx.cc
index fc0d6c3ca2a..dc7383a38e3 100644
--- a/gcc/simplify-rtx.cc
+++ b/gcc/simplify-rtx.cc
@@ -643,11 +643,8 @@ simplify_context::simplify_truncation (machine_mode mode, 
rtx op,
   /* If the machine can perform operations in the truncated mode, distribute
      the truncation, i.e. simplify (truncate:QI (op:SI (x:SI) (y:SI))) into
      (op:QI (truncate:QI (x:SI)) (truncate:QI (y:SI))).  */
-  if (1
-      && (!WORD_REGISTER_OPERATIONS || precision >= BITS_PER_WORD)
-      && (GET_CODE (op) == PLUS
-         || GET_CODE (op) == MINUS
-         || GET_CODE (op) == MULT))
+  if (1 && (!WORD_REGISTER_OPERATIONS || precision >= BITS_PER_WORD)
+      && (PLUS_P (op) || MINUS_P (op) || MULT_P (op)))
     {
       rtx op0 = simplify_gen_unary (TRUNCATE, mode, XEXP (op, 0), op_mode);
       if (op0)
@@ -947,8 +944,7 @@ simplify_context::simplify_unary_operation_1 (rtx_code 
code, machine_mode mode,
                                        XEXP (op, 0), XEXP (op, 1));
 
       /* (not (plus X -1)) can become (neg X).  */
-      if (GET_CODE (op) == PLUS
-         && XEXP (op, 1) == constm1_rtx)
+      if (PLUS_P (op) && XEXP (op, 1) == constm1_rtx)
        return simplify_gen_unary (NEG, mode, XEXP (op, 0), mode);
 
       /* Similarly, (not (neg X)) is (plus X -1).  Only do this for
@@ -966,14 +962,12 @@ simplify_context::simplify_unary_operation_1 (rtx_code 
code, machine_mode mode,
        return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp);
 
       /* (not (plus X C)) for signbit C is (xor X D) with D = ~C.  */
-      if (GET_CODE (op) == PLUS
-         && CONST_INT_P (XEXP (op, 1))
+      if (PLUS_P (op) && CONST_INT_P (XEXP (op, 1))
          && mode_signbit_p (mode, XEXP (op, 1))
-         && (temp = simplify_unary_operation (NOT, mode,
-                                              XEXP (op, 1), mode)) != 0)
+         && (temp = simplify_unary_operation (NOT, mode, XEXP (op, 1), mode))
+              != 0)
        return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp);
 
-
       /* (not (ashift 1 X)) is (rotate ~1 X).  We used to do this for
         operands other than 1, but that is not valid.  We could do a
         similar simplification for (not (lshiftrt C X)) where C is
@@ -1079,8 +1073,7 @@ simplify_context::simplify_unary_operation_1 (rtx_code 
code, machine_mode mode,
        }
 
       /* (neg (plus X 1)) can become (not X).  */
-      if (GET_CODE (op) == PLUS
-         && XEXP (op, 1) == const1_rtx)
+      if (PLUS_P (op) && XEXP (op, 1) == const1_rtx)
        return simplify_gen_unary (NOT, mode, XEXP (op, 0), mode);
 
       /* Similarly, (neg (not X)) is (plus X 1).  */
@@ -1093,13 +1086,11 @@ simplify_context::simplify_unary_operation_1 (rtx_code 
code, machine_mode mode,
         both +0, (minus Y X) is the same as (minus X Y).  If the
         rounding mode is towards +infinity (or -infinity) then the two
         expressions will be rounded differently.  */
-      if (GET_CODE (op) == MINUS
-         && !HONOR_SIGNED_ZEROS (mode)
+      if (MINUS_P (op) && !HONOR_SIGNED_ZEROS (mode)
          && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
        return simplify_gen_binary (MINUS, mode, XEXP (op, 1), XEXP (op, 0));
 
-      if (GET_CODE (op) == PLUS
-         && !HONOR_SIGNED_ZEROS (mode)
+      if (PLUS_P (op) && !HONOR_SIGNED_ZEROS (mode)
          && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
        {
          /* (neg (plus A C)) is simplified to (minus -C A).  */
@@ -1118,8 +1109,7 @@ simplify_context::simplify_unary_operation_1 (rtx_code 
code, machine_mode mode,
 
       /* (neg (mult A B)) becomes (mult A (neg B)).
         This works even for floating-point values.  */
-      if (GET_CODE (op) == MULT
-         && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
+      if (MULT_P (op) && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
        {
          temp = simplify_gen_unary (NEG, mode, XEXP (op, 1), mode);
          return simplify_gen_binary (MULT, mode, XEXP (op, 0), temp);
@@ -1209,8 +1199,7 @@ simplify_context::simplify_unary_operation_1 (rtx_code 
code, machine_mode mode,
     case TRUNCATE:
       /* Don't optimize (lshiftrt (mult ...)) as it would interfere
         with the umulXi3_highpart patterns.  */
-      if (GET_CODE (op) == LSHIFTRT
-         && GET_CODE (XEXP (op, 0)) == MULT)
+      if (GET_CODE (op) == LSHIFTRT && MULT_P (XEXP (op, 0)))
        break;
 
       if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
@@ -1482,16 +1471,15 @@ simplify_context::simplify_unary_operation_1 (rtx_code 
code, machine_mode mode,
         becomes just the MINUS if its mode is MODE.  This allows
         folding switch statements on machines using casesi (such as
         the VAX).  */
-      if (GET_CODE (op) == TRUNCATE
-         && GET_MODE (XEXP (op, 0)) == mode
-         && GET_CODE (XEXP (op, 0)) == MINUS
+      if (GET_CODE (op) == TRUNCATE && GET_MODE (XEXP (op, 0)) == mode
+         && MINUS_P (XEXP (op, 0))
          && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
          && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
        return XEXP (op, 0);
 
       /* Extending a widening multiplication should be canonicalized to
         a wider widening multiplication.  */
-      if (GET_CODE (op) == MULT)
+      if (MULT_P (op))
        {
          rtx lhs = XEXP (op, 0);
          rtx rhs = XEXP (op, 1);
@@ -1717,7 +1705,7 @@ simplify_context::simplify_unary_operation_1 (rtx_code 
code, machine_mode mode,
 
       /* Extending a widening multiplication should be canonicalized to
         a wider widening multiplication.  */
-      if (GET_CODE (op) == MULT)
+      if (MULT_P (op))
        {
          rtx lhs = XEXP (op, 0);
          rtx rhs = XEXP (op, 1);
@@ -2803,8 +2791,7 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
              coeff0 = wi::minus_one (GET_MODE_PRECISION (int_mode));
              lhs = XEXP (lhs, 0);
            }
-         else if (GET_CODE (lhs) == MULT
-                  && CONST_SCALAR_INT_P (XEXP (lhs, 1)))
+         else if (MULT_P (lhs) && CONST_SCALAR_INT_P (XEXP (lhs, 1)))
            {
              coeff0 = rtx_mode_t (XEXP (lhs, 1), int_mode);
              lhs = XEXP (lhs, 0);
@@ -2824,8 +2811,7 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
              coeff1 = wi::minus_one (GET_MODE_PRECISION (int_mode));
              rhs = XEXP (rhs, 0);
            }
-         else if (GET_CODE (rhs) == MULT
-                  && CONST_INT_P (XEXP (rhs, 1)))
+         else if (MULT_P (rhs) && CONST_INT_P (XEXP (rhs, 1)))
            {
              coeff1 = rtx_mode_t (XEXP (rhs, 1), int_mode);
              rhs = XEXP (rhs, 0);
@@ -2856,32 +2842,32 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
          /* Optimize (X - 1) * Y + Y to X * Y.  */
          lhs = op0;
          rhs = op1;
-         if (GET_CODE (op0) == MULT)
+         if (MULT_P (op0))
            {
-             if (((GET_CODE (XEXP (op0, 0)) == PLUS
+             if (((PLUS_P (XEXP (op0, 0))
                    && XEXP (XEXP (op0, 0), 1) == constm1_rtx)
-                  || (GET_CODE (XEXP (op0, 0)) == MINUS
+                  || (MINUS_P (XEXP (op0, 0))
                       && XEXP (XEXP (op0, 0), 1) == const1_rtx))
                  && rtx_equal_p (XEXP (op0, 1), op1))
                lhs = XEXP (XEXP (op0, 0), 0);
-             else if (((GET_CODE (XEXP (op0, 1)) == PLUS
+             else if (((PLUS_P (XEXP (op0, 1))
                         && XEXP (XEXP (op0, 1), 1) == constm1_rtx)
-                       || (GET_CODE (XEXP (op0, 1)) == MINUS
+                       || (MINUS_P (XEXP (op0, 1))
                            && XEXP (XEXP (op0, 1), 1) == const1_rtx))
                       && rtx_equal_p (XEXP (op0, 0), op1))
                lhs = XEXP (XEXP (op0, 1), 0);
            }
-         else if (GET_CODE (op1) == MULT)
+         else if (MULT_P (op1))
            {
-             if (((GET_CODE (XEXP (op1, 0)) == PLUS
+             if (((PLUS_P (XEXP (op1, 0))
                    && XEXP (XEXP (op1, 0), 1) == constm1_rtx)
-                  || (GET_CODE (XEXP (op1, 0)) == MINUS
+                  || (MINUS_P (XEXP (op1, 0))
                       && XEXP (XEXP (op1, 0), 1) == const1_rtx))
                  && rtx_equal_p (XEXP (op1, 1), op0))
                rhs = XEXP (XEXP (op1, 0), 0);
-             else if (((GET_CODE (XEXP (op1, 1)) == PLUS
+             else if (((PLUS_P (XEXP (op1, 1))
                         && XEXP (XEXP (op1, 1), 1) == constm1_rtx)
-                       || (GET_CODE (XEXP (op1, 1)) == MINUS
+                       || (MINUS_P (XEXP (op1, 1))
                            && XEXP (XEXP (op1, 1), 1) == const1_rtx))
                       && rtx_equal_p (XEXP (op1, 0), op0))
                rhs = XEXP (XEXP (op1, 1), 0);
@@ -2900,8 +2886,7 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
                                                         XEXP (op0, 1)));
 
       /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).  */
-      if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode)
-         && GET_CODE (op0) == MULT
+      if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode) && MULT_P (op0)
          && GET_CODE (XEXP (op0, 0)) == NEG)
        {
          rtx in1, in2;
@@ -3022,8 +3007,7 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
              coeff0 = wi::minus_one (GET_MODE_PRECISION (int_mode));
              lhs = XEXP (lhs, 0);
            }
-         else if (GET_CODE (lhs) == MULT
-                  && CONST_SCALAR_INT_P (XEXP (lhs, 1)))
+         else if (MULT_P (lhs) && CONST_SCALAR_INT_P (XEXP (lhs, 1)))
            {
              coeff0 = rtx_mode_t (XEXP (lhs, 1), int_mode);
              lhs = XEXP (lhs, 0);
@@ -3043,8 +3027,7 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
              negcoeff1 = wi::one (GET_MODE_PRECISION (int_mode));
              rhs = XEXP (rhs, 0);
            }
-         else if (GET_CODE (rhs) == MULT
-                  && CONST_INT_P (XEXP (rhs, 1)))
+         else if (MULT_P (rhs) && CONST_INT_P (XEXP (rhs, 1)))
            {
              negcoeff1 = wi::neg (rtx_mode_t (XEXP (rhs, 1), int_mode));
              rhs = XEXP (rhs, 0);
@@ -3075,17 +3058,17 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
 
          /* Optimize (X + 1) * Y - Y to X * Y.  */
          lhs = op0;
-         if (GET_CODE (op0) == MULT)
+         if (MULT_P (op0))
            {
-             if (((GET_CODE (XEXP (op0, 0)) == PLUS
+             if (((PLUS_P (XEXP (op0, 0))
                    && XEXP (XEXP (op0, 0), 1) == const1_rtx)
-                  || (GET_CODE (XEXP (op0, 0)) == MINUS
+                  || (MINUS_P (XEXP (op0, 0))
                       && XEXP (XEXP (op0, 0), 1) == constm1_rtx))
                  && rtx_equal_p (XEXP (op0, 1), op1))
                lhs = XEXP (XEXP (op0, 0), 0);
-             else if (((GET_CODE (XEXP (op0, 1)) == PLUS
+             else if (((PLUS_P (XEXP (op0, 1))
                         && XEXP (XEXP (op0, 1), 1) == const1_rtx)
-                       || (GET_CODE (XEXP (op0, 1)) == MINUS
+                       || (MINUS_P (XEXP (op0, 1))
                            && XEXP (XEXP (op0, 1), 1) == constm1_rtx))
                       && rtx_equal_p (XEXP (op0, 0), op1))
                lhs = XEXP (XEXP (op0, 1), 0);
@@ -3145,8 +3128,7 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
        return reversed;
 
       /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).  */
-      if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode)
-         && GET_CODE (op1) == MULT
+      if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode) && MULT_P (op1)
          && GET_CODE (XEXP (op1, 0)) == NEG)
        {
          rtx in1, in2;
@@ -3161,8 +3143,7 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
 
       /* Canonicalize (minus (neg A) (mult B C)) to
         (minus (mult (neg B) C) A).  */
-      if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode)
-         && GET_CODE (op1) == MULT
+      if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode) && MULT_P (op1)
          && GET_CODE (op0) == NEG)
        {
          rtx in1, in2;
@@ -3208,9 +3189,7 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
             just moved the NEG to the second operand, simplify_gen_binary
             below could through simplify_associative_operation move
             the NEG around again and recurse endlessly.  */
-         if (temp
-             && GET_CODE (op1) == MULT
-             && GET_CODE (temp) == MULT
+         if (temp && MULT_P (op1) && MULT_P (temp)
              && XEXP (op1, 0) == XEXP (temp, 0)
              && GET_CODE (XEXP (temp, 1)) == NEG
              && XEXP (op1, 1) == XEXP (XEXP (temp, 1), 0))
@@ -3225,9 +3204,7 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
             just moved the NEG to the second operand, simplify_gen_binary
             below could through simplify_associative_operation move
             the NEG around again and recurse endlessly.  */
-         if (temp
-             && GET_CODE (op0) == MULT
-             && GET_CODE (temp) == MULT
+         if (temp && MULT_P (op0) && MULT_P (temp)
              && XEXP (op0, 0) == XEXP (temp, 0)
              && GET_CODE (XEXP (temp, 1)) == NEG
              && XEXP (op0, 1) == XEXP (XEXP (temp, 1), 0))
@@ -3405,11 +3382,10 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
         the IOR as a PLUS and we can associate.  This is valid if OP1
          can be safely shifted left C bits.  */
       if (CONST_INT_P (trueop1) && GET_CODE (op0) == ASHIFTRT
-          && GET_CODE (XEXP (op0, 0)) == PLUS
-          && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
-          && CONST_INT_P (XEXP (op0, 1))
-          && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
-        {
+         && PLUS_P (XEXP (op0, 0)) && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
+         && CONST_INT_P (XEXP (op0, 1))
+         && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
+       {
          int count = INTVAL (XEXP (op0, 1));
          HOST_WIDE_INT mask = UINTVAL (trueop1) << count;
 
@@ -3420,7 +3396,7 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
                                        plus_constant (mode, XEXP (op0, 0),
                                                       mask),
                                        XEXP (op0, 1));
-        }
+       }
 
       /* The following happens with bitfield merging.
          (X & C) | ((X | Y) & ~C) -> X | (Y & ~C) */
@@ -3497,8 +3473,7 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
          && mode_signbit_p (mode, op1))
        return simplify_gen_binary (PLUS, mode, op0, op1);
       /* (xor (plus X C1) C2) is (xor X (C1^C2)) if C1 is signbit.  */
-      if (CONST_SCALAR_INT_P (op1)
-         && GET_CODE (op0) == PLUS
+      if (CONST_SCALAR_INT_P (op1) && PLUS_P (op0)
          && CONST_SCALAR_INT_P (XEXP (op0, 1))
          && mode_signbit_p (mode, XEXP (op0, 1)))
        return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
@@ -3849,11 +3824,10 @@ simplify_context::simplify_binary_operation_1 (rtx_code 
code,
         and for - instead of + and/or ^ instead of |.
          Also, if (N & M) == 0, then
         (A +- N) & M -> A & M.  */
-      if (CONST_INT_P (trueop1)
-         && HWI_COMPUTABLE_MODE_P (mode)
+      if (CONST_INT_P (trueop1) && HWI_COMPUTABLE_MODE_P (mode)
          && ~UINTVAL (trueop1)
          && (UINTVAL (trueop1) & (UINTVAL (trueop1) + 1)) == 0
-         && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS))
+         && (PLUS_P (op0) || MINUS_P (op0)))
        {
          rtx pmop[2];
          int which;
@@ -5413,8 +5387,7 @@ simplify_context::simplify_plus_minus (rtx_code code, 
machine_mode mode,
              break;
 
            case CONST:
-             if (n_ops != ARRAY_SIZE (ops)
-                 && GET_CODE (XEXP (this_op, 0)) == PLUS
+             if (n_ops != ARRAY_SIZE (ops) && PLUS_P (XEXP (this_op, 0))
                  && CONSTANT_P (XEXP (XEXP (this_op, 0), 0))
                  && CONSTANT_P (XEXP (XEXP (this_op, 0), 1)))
                {
@@ -5672,10 +5645,8 @@ simplify_context::simplify_plus_minus (rtx_code code, 
machine_mode mode,
 static bool
 plus_minus_operand_p (const_rtx x)
 {
-  return GET_CODE (x) == PLUS
-         || GET_CODE (x) == MINUS
-        || (GET_CODE (x) == CONST
-            && GET_CODE (XEXP (x, 0)) == PLUS
+  return PLUS_P (x) || MINUS_P (x)
+        || (GET_CODE (x) == CONST && PLUS_P (XEXP (x, 0))
             && CONSTANT_P (XEXP (XEXP (x, 0), 0))
             && CONSTANT_P (XEXP (XEXP (x, 0), 1)));
 }
@@ -5761,11 +5732,9 @@ simplify_context::simplify_relational_operation_1 
(rtx_code code,
 
   /* (LTU/GEU (PLUS a C) C), where C is constant, can be simplified to
      (GEU/LTU a -C).  Likewise for (LTU/GEU (PLUS a C) a).  */
-  if ((code == LTU || code == GEU)
-      && GET_CODE (op0) == PLUS
+  if ((code == LTU || code == GEU) && PLUS_P (op0)
       && CONST_INT_P (XEXP (op0, 1))
-      && (rtx_equal_p (op1, XEXP (op0, 0))
-         || rtx_equal_p (op1, XEXP (op0, 1)))
+      && (rtx_equal_p (op1, XEXP (op0, 0)) || rtx_equal_p (op1, XEXP (op0, 1)))
       /* (LTU/GEU (PLUS a 0) 0) is not the same as (GEU/LTU a 0). */
       && XEXP (op0, 1) != const0_rtx)
     {
@@ -5777,20 +5746,18 @@ simplify_context::simplify_relational_operation_1 
(rtx_code code,
 
   /* (GTU (PLUS a C) (C - 1)) where C is a non-zero constant can be
      transformed into (LTU a -C).  */
-  if (code == GTU && GET_CODE (op0) == PLUS && CONST_INT_P (op1)
+  if (code == GTU && PLUS_P (op0) && CONST_INT_P (op1)
       && CONST_INT_P (XEXP (op0, 1))
       && (UINTVAL (op1) == UINTVAL (XEXP (op0, 1)) - 1)
       && XEXP (op0, 1) != const0_rtx)
     {
-      rtx new_cmp
-       = simplify_gen_unary (NEG, cmp_mode, XEXP (op0, 1), cmp_mode);
+      rtx new_cmp = simplify_gen_unary (NEG, cmp_mode, XEXP (op0, 1), 
cmp_mode);
       return simplify_gen_relational (LTU, mode, cmp_mode,
                                       XEXP (op0, 0), new_cmp);
     }
 
   /* Canonicalize (LTU/GEU (PLUS a b) b) as (LTU/GEU (PLUS a b) a).  */
-  if ((code == LTU || code == GEU)
-      && GET_CODE (op0) == PLUS
+  if ((code == LTU || code == GEU) && PLUS_P (op0)
       && rtx_equal_p (op1, XEXP (op0, 1))
       /* Don't recurse "infinitely" for (LTU/GEU (PLUS b b) b).  */
       && !rtx_equal_p (op1, XEXP (op0, 0)))
diff --git a/gcc/var-tracking.cc b/gcc/var-tracking.cc
index 9c40ec4fb8b..37bd7f2e017 100644
--- a/gcc/var-tracking.cc
+++ b/gcc/var-tracking.cc
@@ -739,10 +739,9 @@ stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, 
rtx src, rtx srcoff,
     case PRE_MODIFY:
     case POST_MODIFY:
       /* We handle only adjustments by constant amount.  */
-      gcc_assert (GET_CODE (src) == PLUS
-                 && CONST_INT_P (XEXP (src, 1))
+      gcc_assert (PLUS_P (src) && CONST_INT_P (XEXP (src, 1))
                  && XEXP (src, 0) == stack_pointer_rtx);
-      ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
+      ((HOST_WIDE_INT *) arg)[GET_CODE (op) == POST_MODIFY]
        -= INTVAL (XEXP (src, 1));
       return 0;
     default:
@@ -1149,15 +1148,13 @@ adjust_mems (rtx loc, const_rtx old_rtx, void *data)
       if (tem == NULL_RTX)
        tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
     finish_subreg:
-      if (MAY_HAVE_DEBUG_BIND_INSNS
-         && GET_CODE (tem) == SUBREG
-         && (GET_CODE (SUBREG_REG (tem)) == PLUS
-             || GET_CODE (SUBREG_REG (tem)) == MINUS
-             || GET_CODE (SUBREG_REG (tem)) == MULT
+      if (MAY_HAVE_DEBUG_BIND_INSNS && GET_CODE (tem) == SUBREG
+         && (PLUS_P (SUBREG_REG (tem)) || MINUS_P (SUBREG_REG (tem))
+             || MULT_P (SUBREG_REG (tem))
              || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
-         && is_a <scalar_int_mode> (GET_MODE (tem), &tem_mode)
-         && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (tem)),
-                                    &tem_subreg_mode)
+         && is_a<scalar_int_mode> (GET_MODE (tem), &tem_mode)
+         && is_a<scalar_int_mode> (GET_MODE (SUBREG_REG (tem)),
+                                   &tem_subreg_mode)
          && (GET_MODE_PRECISION (tem_mode)
              < GET_MODE_PRECISION (tem_subreg_mode))
          && subreg_lowpart_p (tem)
@@ -2049,8 +2046,7 @@ negative_power_of_two_p (HOST_WIDE_INT i)
 static rtx
 vt_get_canonicalize_base (rtx loc)
 {
-  while ((GET_CODE (loc) == PLUS
-         || GET_CODE (loc) == AND)
+  while ((PLUS_P (loc) || GET_CODE (loc) == AND)
         && GET_CODE (XEXP (loc, 1)) == CONST_INT
         && (GET_CODE (loc) != AND
             || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
@@ -2192,8 +2188,7 @@ vt_canonicalize_addr (dataflow_set *set, rtx oloc)
 
   while (retry)
     {
-      while (GET_CODE (loc) == PLUS
-            && poly_int_rtx_p (XEXP (loc, 1), &term))
+      while (PLUS_P (loc) && poly_int_rtx_p (XEXP (loc, 1), &term))
        {
          ofst += term;
          loc = XEXP (loc, 0);
@@ -2220,8 +2215,7 @@ vt_canonicalize_addr (dataflow_set *set, rtx oloc)
            loc = get_addr_from_global_cache (loc);
 
          /* Consolidate plus_constants.  */
-         while (maybe_ne (ofst, 0)
-                && GET_CODE (loc) == PLUS
+         while (maybe_ne (ofst, 0) && PLUS_P (loc)
                 && poly_int_rtx_p (XEXP (loc, 1), &term))
            {
              ofst += term;
@@ -8841,8 +8835,7 @@ emit_note_insn_var_location (variable **varp, 
emit_note_data *data)
                    REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
                }
            }
-         else if (MEM_P (loc[n_var_parts])
-                  && GET_CODE (XEXP (loc2, 0)) == PLUS
+         else if (MEM_P (loc[n_var_parts]) && PLUS_P (XEXP (loc2, 0))
                   && REG_P (XEXP (XEXP (loc2, 0), 0))
                   && poly_int_rtx_p (XEXP (XEXP (loc2, 0), 1), &offset2))
            {
@@ -10153,7 +10146,7 @@ vt_initialize (void)
       elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
       if (elim != reg)
        {
-         if (GET_CODE (elim) == PLUS)
+         if (PLUS_P (elim))
            elim = XEXP (elim, 0);
          if (elim == stack_pointer_rtx)
            vt_init_cfa_base ();
@@ -10173,7 +10166,7 @@ vt_initialize (void)
       elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
       if (elim != reg)
        {
-         if (GET_CODE (elim) == PLUS)
+         if (PLUS_P (elim))
            {
              fp_cfa_offset -= rtx_to_poly_int64 (XEXP (elim, 1));
              elim = XEXP (elim, 0);
@@ -10205,7 +10198,7 @@ vt_initialize (void)
       elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
       if (elim != reg)
        {
-         if (GET_CODE (elim) == PLUS)
+         if (PLUS_P (elim))
            elim = XEXP (elim, 0);
          if (elim == hard_frame_pointer_rtx)
            vt_init_cfa_base ();
-- 
2.38.1


Reply via email to