From 0f808b6f0a60d2d487cc361a355d5d42c731607b Mon Sep 17 00:00:00 2001 From: Jan Hubicka Date: Mon, 3 Jan 2000 16:49:51 +0100 Subject: [PATCH] combine.c (expand_compound_operation): Do not convert ZERO_EXTEND to SIGN_EXTEND, convert SIGN_EXTEND to ZERO_EXTEND. * combine.c (expand_compound_operation): Do not convert ZERO_EXTEND to SIGN_EXTEND, convert SIGN_EXTEND to ZERO_EXTEND. (make_compound_operation): Choose cheaper alternative between ZERO_EXTEND and SIGN_EXTEND when sign bit is not set. From-SVN: r31180 --- gcc/ChangeLog | 5 +++ gcc/combine.c | 92 +++++++++++++++++++++++++++++++++++---------------- 2 files changed, 69 insertions(+), 28 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 3bdb03a06ea..61ce55b04c0 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,5 +1,10 @@ Mon Jan 3 15:33:37 MET 2000 Jan Hubicka + * combine.c (expand_compound_operation): Do not convert ZERO_EXTEND to + SIGN_EXTEND, convert SIGN_EXTEND to ZERO_EXTEND. + (make_compound_operation): Choose cheaper alternative between + ZERO_EXTEND and SIGN_EXTEND when sign bit is not set. + * regclass.c (op_costs): Remove global variable. (record_reg_classes): New parameter "op_costs" and "reg_pref". (record_operand_costs): Break out from ... diff --git a/gcc/combine.c b/gcc/combine.c index 3599168769f..2cb0c4561ab 100644 --- a/gcc/combine.c +++ b/gcc/combine.c @@ -5486,6 +5486,20 @@ expand_compound_operation (x) default: return x; } + /* Convert sign extension to zero extension, if we know that the high + bit is not set, as this is easier to optimize. It will be converted + back to cheaper alternative in make_extraction. */ + if (GET_CODE (x) == SIGN_EXTEND + && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT + && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0))) + & ~ (((unsigned HOST_WIDE_INT) + GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) + >> 1)) + == 0))) + { + rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0)); + return expand_compound_operation (temp); + } /* We can optimize some special cases of ZERO_EXTEND. */ if (GET_CODE (x) == ZERO_EXTEND) @@ -5533,30 +5547,6 @@ expand_compound_operation (x) & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) return SUBREG_REG (XEXP (x, 0)); - /* If sign extension is cheaper than zero extension, then use it - if we know that no extraneous bits are set, and that the high - bit is not set. */ - if (flag_expensive_optimizations - && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT - && ((nonzero_bits (XEXP (x, 0), GET_MODE (x)) - & ~ (((unsigned HOST_WIDE_INT) - GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) - >> 1)) - == 0)) - || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<' - && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - <= HOST_BITS_PER_WIDE_INT) - && (((HOST_WIDE_INT) STORE_FLAG_VALUE - & ~ (((unsigned HOST_WIDE_INT) - GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) - >> 1)) - == 0)))) - { - rtx temp = gen_rtx_SIGN_EXTEND (GET_MODE (x), XEXP (x, 0)); - - if (rtx_cost (temp, SET) < rtx_cost (x, SET)) - return expand_compound_operation (temp); - } } /* If we reach here, we want to return a pair of shifts. The inner @@ -5894,12 +5884,35 @@ make_extraction (mode, inner, pos, pos_rtx, len, ? gen_rtx_CLOBBER (tmode, const0_rtx) : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new))); + if (mode == tmode) + return new; + + /* If we know that no extraneous bits are set, and that the high + bit is not set, convert the extraction to the cheaper of + sign and zero extension, that are equivalent in these cases. */ + if (flag_expensive_optimizations + && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT + && ((nonzero_bits (new, tmode) + & ~ (((unsigned HOST_WIDE_INT) + GET_MODE_MASK (tmode)) + >> 1)) + == 0))) + { + rtx temp = gen_rtx_ZERO_EXTEND (mode, new); + rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new); + + /* Prefer ZERO_EXTENSION, since it gives more information to + backends. */ + if (rtx_cost (temp, SET) < rtx_cost (temp1, SET)) + return temp; + return temp1; + } + /* Otherwise, sign- or zero-extend unless we already are in the proper mode. */ - return (mode == tmode ? new - : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, - mode, new)); + return (gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, + mode, new)); } /* Unless this is a COMPARE or we have a funny memory reference, @@ -6088,7 +6101,30 @@ make_extraction (mode, inner, pos, pos_rtx, len, have to zero extend. Otherwise, we can just use a SUBREG. */ if (pos_rtx != 0 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx))) - pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx); + { + rtx temp = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx); + + /* If we know that no extraneous bits are set, and that the high + bit is not set, convert extraction to cheaper one - eighter + SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these + cases. */ + if (flag_expensive_optimizations + && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT + && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx)) + & ~ (((unsigned HOST_WIDE_INT) + GET_MODE_MASK (GET_MODE (pos_rtx))) + >> 1)) + == 0))) + { + rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx); + + /* Preffer ZERO_EXTENSION, since it gives more information to + backends. */ + if (rtx_cost (temp1, SET) < rtx_cost (temp, SET)) + temp = temp1; + } + pos_rtx = temp; + } else if (pos_rtx != 0 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx))) pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);