fold-const.c (fold_truthop): Build a type for both the lhs and rhs and use it appropriately.

* fold-const.c (fold_truthop): Build a type for both the lhs and
        rhs and use it appropriately.

From-SVN: r26005
This commit is contained in:
Charles M. Hannum 1999-03-26 15:23:02 -07:00 committed by Jeff Law
parent 97ea717682
commit bd910dcf7a
2 changed files with 52 additions and 25 deletions

View File

@ -65,6 +65,9 @@ Fri Mar 26 10:43:47 1999 Nick Clifton <nickc@cygnus.com>
Fri Mar 26 01:59:15 1999 "Charles M. Hannum" <root@ihack.net>
* fold-const.c (fold_truthop): Build a type for both the lhs and
rhs and use it appropriately.
* fold-const.c (fold_truthop): Mask the lhs and rhs after merging
adjacent bitfield references.

View File

@ -3710,7 +3710,7 @@ fold_truthop (code, truth_type, lhs, rhs)
tree ll_mask, lr_mask, rl_mask, rr_mask;
tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
tree l_const, r_const;
tree type, result;
tree lntype, rntype, result;
int first_bit, end_bit;
int volatilep;
@ -3848,7 +3848,7 @@ fold_truthop (code, truth_type, lhs, rhs)
lnbitsize = GET_MODE_BITSIZE (lnmode);
lnbitpos = first_bit & ~ (lnbitsize - 1);
type = type_for_size (lnbitsize, 1);
lntype = type_for_size (lnbitsize, 1);
xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
if (BYTES_BIG_ENDIAN)
@ -3857,19 +3857,19 @@ fold_truthop (code, truth_type, lhs, rhs)
xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
}
ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
size_int (xll_bitpos), 0);
rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
size_int (xrl_bitpos), 0);
if (l_const)
{
l_const = convert (type, l_const);
l_const = convert (lntype, l_const);
l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
fold (build1 (BIT_NOT_EXPR,
type, ll_mask)),
lntype, ll_mask)),
0)))
{
warning ("comparison is always %d", wanted_code == NE_EXPR);
@ -3881,12 +3881,12 @@ fold_truthop (code, truth_type, lhs, rhs)
}
if (r_const)
{
r_const = convert (type, r_const);
r_const = convert (lntype, r_const);
r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
fold (build1 (BIT_NOT_EXPR,
type, rl_mask)),
lntype, rl_mask)),
0)))
{
warning ("comparison is always %d", wanted_code == NE_EXPR);
@ -3919,6 +3919,7 @@ fold_truthop (code, truth_type, lhs, rhs)
rnbitsize = GET_MODE_BITSIZE (rnmode);
rnbitpos = first_bit & ~ (rnbitsize - 1);
rntype = type_for_size (rnbitsize, 1);
xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
if (BYTES_BIG_ENDIAN)
@ -3927,29 +3928,30 @@ fold_truthop (code, truth_type, lhs, rhs)
xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
}
lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
size_int (xlr_bitpos), 0);
rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
size_int (xrr_bitpos), 0);
/* Make a mask that corresponds to both fields being compared.
Do this for both items being compared. If the masks agree,
and the bits being compared are in the same position, then
we can do this by masking both and comparing the masked
results. */
and the bits being compared are in the same position, and the
types agree, then we can do this by masking both and comparing
the masked results. */
ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
if (operand_equal_p (ll_mask, lr_mask, 0)
&& lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
&& lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos
&& lntype == rntype)
{
lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
ll_unsignedp || rl_unsignedp);
rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
lr_unsignedp || rr_unsignedp);
if (! all_ones_mask_p (ll_mask, lnbitsize))
{
lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
rhs = build (BIT_AND_EXPR, rntype, rhs, ll_mask);
}
return build (wanted_code, truth_type, lhs, rhs);
}
@ -3966,17 +3968,39 @@ fold_truthop (code, truth_type, lhs, rhs)
|| (ll_bitpos == rl_bitpos + rl_bitsize
&& lr_bitpos == rr_bitpos + rr_bitsize))
{
lhs = make_bit_field_ref (ll_inner, type, ll_bitsize + rl_bitsize,
tree type;
lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
/* Convert to the smaller type before masking out unwanted bits. */
type = lntype;
if (lntype != rntype)
{
if (lnbitsize > rnbitsize)
{
lhs = convert (rntype, lhs);
ll_mask = convert (rntype, ll_mask);
type = rntype;
}
else if (lnbitsize < rnbitsize)
{
rhs = convert (lntype, rhs);
lr_mask = convert (lntype, lr_mask);
type = lntype;
}
}
if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
rhs = make_bit_field_ref (lr_inner, type, lr_bitsize + rr_bitsize,
MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
@ -4011,12 +4035,12 @@ fold_truthop (code, truth_type, lhs, rhs)
reference we will make. Unless the mask is all ones the width of
that field, perform the mask operation. Then compare with the
merged constant. */
result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
ll_unsignedp || rl_unsignedp);
ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
if (! all_ones_mask_p (ll_mask, lnbitsize))
result = build (BIT_AND_EXPR, type, result, ll_mask);
result = build (BIT_AND_EXPR, lntype, result, ll_mask);
return build (wanted_code, truth_type, result,
const_binop (BIT_IOR_EXPR, l_const, r_const, 0));