https://gcc.gnu.org/bugzilla/show_bug.cgi?id=88784

--- Comment #20 from Qi Feng <ffengqi at gcc dot gnu.org> ---
I have tried to merge signed and unsigned together:

/* x >  y   &&   x != ( 0 or XXX_MIN )   -->   x > y */
(for and (truth_and bit_and)
 (simplify
  (and:c (gt:c@3 @0 @1) (ne @0 INTEGER_CST@2))
  (if (INTEGRAL_TYPE_P (TREE_TYPE(@0)) && INTEGRAL_TYPE_P (TREE_TYPE(@1)))
   (if ((TYPE_UNSIGNED (TREE_TYPE(@0)) && TYPE_UNSIGNED (TREE_TYPE(@1))
            && integer_zerop (@2))
        || (!TYPE_UNSIGNED (TREE_TYPE(@0)) && !TYPE_UNSIGNED (TREE_TYPE(@1))
            && wi::eq_p (wi::to_wide (@2),
                         wi::min_value (TYPE_PRECISION (TREE_TYPE (@2)),
SIGNED))))
    @3))))

/* x >  y  ||  x != ( 0 or XXX_MIN )   -->  x != ( 0 or XXX_MIN ) */
(for or (truth_or bit_ior)
 (simplify
  (or:c (gt:c @0 @1) (ne@3 @0 INTEGER_CST@2))
  (if (INTEGRAL_TYPE_P (TREE_TYPE(@0)) && INTEGRAL_TYPE_P (TREE_TYPE(@1)))
   (if ((TYPE_UNSIGNED (TREE_TYPE(@0)) && TYPE_UNSIGNED (TREE_TYPE(@1))
            && integer_zerop (@2))
        || (!TYPE_UNSIGNED (TREE_TYPE(@0)) && !TYPE_UNSIGNED (TREE_TYPE(@1))
            && wi::eq_p (wi::to_wide (@2),
                         wi::min_value (TYPE_PRECISION (TREE_TYPE (@2)),
SIGNED))))
    @3))))

/* x <  y  &&  x != ( UXXX_MAX or XXX_MAX )  -->  x < y */
(for and (truth_and bit_and)
 (simplify
  (and:c (lt:c@3 @0 @1) (ne @0 INTEGER_CST@2))
  (if (INTEGRAL_TYPE_P (TREE_TYPE(@0)) && INTEGRAL_TYPE_P (TREE_TYPE(@1)))
   (if ((TYPE_UNSIGNED (TREE_TYPE(@0)) && TYPE_UNSIGNED (TREE_TYPE(@1))
            && wi::eq_p (wi::to_wide (@2),
                         wi::max_value (TYPE_PRECISION (TREE_TYPE (@2)),
UNSIGNED)))
        || (!TYPE_UNSIGNED (TREE_TYPE(@0)) && !TYPE_UNSIGNED (TREE_TYPE(@1))
            && wi::eq_p (wi::to_wide (@2),
                         wi::max_value (TYPE_PRECISION (TREE_TYPE (@2)),
SIGNED))))
    @3))))

(That's not all of them, for that would be too long and I think it's not
necessary.)

I also tried it on a x86 laptop, seems it does work. But I got a bootstrap
issue. I don't know if it's caused by my patch or the version of gcc I used to
compile.

Another problem is that I can't craft some c/c++ code to test truth_and. Maybe
it's used by other languages? Is it necessary to use truth_and along side
bit_and?

I have to make this work on a ppc64le machine, could you give me some hints of
where to look into?

BTW, the following tests may be useful if you want test it on your machine:

#include <limits.h>
/* x >  y   &&   x != ( 0 or INT_MIN )   -->   x > y */

_Bool f0 (unsigned x, unsigned y)
{
  return x >  y  &&  x != 0;
}

_Bool f1 (unsigned x, unsigned y)
{
  return y <  x  &&  x != 0;
}

_Bool f2 (unsigned x, unsigned y)
{
  return x != 0  &&  x >  y;
}

_Bool f3 (unsigned x, unsigned y)
{
  return x != 0  &&  y <  x;
}

_Bool f4 (int x, int y)
{
  return x >  y  &&  x != INT_MIN;
}

_Bool f5 (int x, int y)
{
  return y <  x  &&  x != INT_MIN;
}

_Bool f6 (int x, int y)
{
  return x != INT_MIN  &&  x >  y;
}

_Bool f7 (int x, int y)
{
  return x != INT_MIN  &&  y <  x;
}

_Bool f8 (unsigned char x, unsigned char y)
{
  return x >  y && x != 0;
}

/* x >  y  ||  x != ( 0 or XXX_MIN )   -->  x != ( 0 or XXX_MIN ) */

_Bool f10 (unsigned x, unsigned y)
{
  return x >  y  ||  x != 0;
}

_Bool f11 (int x, int y)
{
  return x >  y  ||  x != INT_MIN;
}

_Bool f12 (unsigned char x, unsigned char y)
{
  return x >  y  ||  x != 0;
}

_Bool f13 (signed char x, signed char y)
{
  return x >  y  ||  x != SCHAR_MIN;
}

/* x <  y  &&  x != ( UXXX_MAX or XXX_MAX )  -->  x < y */

_Bool f20 (unsigned x, unsigned y)
{
  return x <  y  &&  x != UINT_MAX;
}

_Bool f21 (int x, int y)
{
  return x <  y  &&  x != INT_MAX;
}

_Bool f22 (unsigned char x, unsigned char y)
{
  return x <  y  &&  x != UCHAR_MAX;
}

_Bool f23 (signed char x, signed char y)
{
  return x <  y  &&  x != SCHAR_MIN;
}

Reply via email to