This moves the rest of the patterns in match-bitwise.pd from the match-and-simplify branch (those that make sense - I'll merge the rest to the tail of match.pd on the branch).
Bootstrap & regtest running on x86_64-unknown-linux-gnu. Richard. 2015-06-30 Richard Biener <rguent...@suse.de> * fold-const.c (fold_binary_loc): Move ~X ^ X -> -1 and (X & Y) ^ Y -> ~X & Y transforms to ... * match.pd: ... here. Index: gcc/fold-const.c =================================================================== --- gcc/fold-const.c (revision 225184) +++ gcc/fold-const.c (working copy) @@ -10979,24 +10925,6 @@ fold_binary_loc (location_t loc, goto bit_rotate; case BIT_XOR_EXPR: - /* ~X ^ X is -1. */ - if (TREE_CODE (arg0) == BIT_NOT_EXPR - && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) - { - t1 = build_zero_cst (type); - t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); - return omit_one_operand_loc (loc, type, t1, arg1); - } - - /* X ^ ~X is -1. */ - if (TREE_CODE (arg1) == BIT_NOT_EXPR - && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) - { - t1 = build_zero_cst (type); - t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); - return omit_one_operand_loc (loc, type, t1, arg0); - } - /* Fold (X & 1) ^ 1 as (X & 1) == 0. */ if (TREE_CODE (arg0) == BIT_AND_EXPR && INTEGRAL_TYPE_P (type) @@ -11005,45 +10933,6 @@ fold_binary_loc (location_t loc, return fold_build2_loc (loc, EQ_EXPR, type, arg0, build_zero_cst (TREE_TYPE (arg0))); - /* Fold (X & Y) ^ Y as ~X & Y. */ - if (TREE_CODE (arg0) == BIT_AND_EXPR - && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) - { - tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); - return fold_build2_loc (loc, BIT_AND_EXPR, type, - fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), - fold_convert_loc (loc, type, arg1)); - } - /* Fold (X & Y) ^ X as ~Y & X. */ - if (TREE_CODE (arg0) == BIT_AND_EXPR - && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) - && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) - { - tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); - return fold_build2_loc (loc, BIT_AND_EXPR, type, - fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), - fold_convert_loc (loc, type, arg1)); - } - /* Fold X ^ (X & Y) as X & ~Y. */ - if (TREE_CODE (arg1) == BIT_AND_EXPR - && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) - { - tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); - return fold_build2_loc (loc, BIT_AND_EXPR, type, - fold_convert_loc (loc, type, arg0), - fold_build1_loc (loc, BIT_NOT_EXPR, type, tem)); - } - /* Fold X ^ (Y & X) as ~Y & X. */ - if (TREE_CODE (arg1) == BIT_AND_EXPR - && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) - && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) - { - tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); - return fold_build2_loc (loc, BIT_AND_EXPR, type, - fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), - fold_convert_loc (loc, type, arg0)); - } - /* See if this can be simplified into a rotate first. If that is unsuccessful continue in the association code. */ goto bit_rotate; Index: gcc/match.pd =================================================================== --- gcc/match.pd (revision 225184) +++ gcc/match.pd (working copy) @@ -299,6 +299,11 @@ (define_operator_list swapped_tcc_compar (bit_xor @0 integer_all_onesp@1) (bit_not @0)) +/* ~X ^ X is -1. */ +(simplify + (bit_xor:c (bit_not @0) @0) + { build_all_ones_cst (type); }) + /* x & ~0 -> x */ (simplify (bit_and @0 integer_all_onesp) @@ -429,6 +434,11 @@ (define_operator_list swapped_tcc_compar (bit_xor (convert? (bit_not @0)) INTEGER_CST@1) (bit_xor (convert @0) (bit_not @1))) +/* Fold (X & Y) ^ Y as ~X & Y. */ +(simplify + (bit_xor:c (bit_and:c @0 @1) @1) + (bit_and (bit_not @0) @1)) + (simplify (abs (abs@1 @0))