mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-04-02 06:40:25 +08:00
expr.c (store_field): Do a block copy from BLKmode to BLKmode-like.
* expr.c (store_field): Do a block copy from BLKmode to BLKmode-like. (get_inner_reference): Use BLKmode for byte-aligned BLKmode bitfields. From-SVN: r136699
This commit is contained in:
parent
b5a6bf2e7e
commit
bd323ce58c
@ -1,3 +1,8 @@
|
||||
2008-06-12 Eric Botcazou <ebotcazou@adacore.com>
|
||||
|
||||
* expr.c (store_field): Do a block copy from BLKmode to BLKmode-like.
|
||||
(get_inner_reference): Use BLKmode for byte-aligned BLKmode bitfields.
|
||||
|
||||
2008-06-12 Jakub Jelinek <jakub@redhat.com>
|
||||
|
||||
PR middle-end/36506
|
||||
|
54
gcc/expr.c
54
gcc/expr.c
@ -5795,13 +5795,19 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
|
||||
&& mode != TYPE_MODE (TREE_TYPE (exp)))
|
||||
temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
|
||||
|
||||
/* If the modes of TARGET and TEMP are both BLKmode, both
|
||||
/* If the modes of TEMP and TARGET are both BLKmode, both
|
||||
must be in memory and BITPOS must be aligned on a byte
|
||||
boundary. If so, we simply do a block copy. */
|
||||
if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
|
||||
boundary. If so, we simply do a block copy. Likewise
|
||||
for a BLKmode-like TARGET. */
|
||||
if (GET_MODE (temp) == BLKmode
|
||||
&& (GET_MODE (target) == BLKmode
|
||||
|| (MEM_P (target)
|
||||
&& GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
|
||||
&& (bitpos % BITS_PER_UNIT) == 0
|
||||
&& (bitsize % BITS_PER_UNIT) == 0)))
|
||||
{
|
||||
gcc_assert (MEM_P (target) && MEM_P (temp)
|
||||
&& !(bitpos % BITS_PER_UNIT));
|
||||
&& (bitpos % BITS_PER_UNIT) == 0);
|
||||
|
||||
target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
|
||||
emit_block_move (target, temp,
|
||||
@ -5847,12 +5853,11 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
|
||||
If any of the extraction expressions is volatile,
|
||||
we store 1 in *PVOLATILEP. Otherwise we don't change that.
|
||||
|
||||
If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
|
||||
is a mode that can be used to access the field. In that case, *PBITSIZE
|
||||
is redundant.
|
||||
If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
|
||||
Otherwise, it is a mode that can be used to access the field.
|
||||
|
||||
If the field describes a variable-sized object, *PMODE is set to
|
||||
VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
|
||||
BLKmode and *PBITSIZE is set to -1. An access cannot be made in
|
||||
this case, but the address of the object can be found.
|
||||
|
||||
If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
|
||||
@ -5877,6 +5882,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
|
||||
{
|
||||
tree size_tree = 0;
|
||||
enum machine_mode mode = VOIDmode;
|
||||
bool blkmode_bitfield = false;
|
||||
tree offset = size_zero_node;
|
||||
tree bit_offset = bitsize_zero_node;
|
||||
|
||||
@ -5884,11 +5890,14 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
|
||||
outermost expression. */
|
||||
if (TREE_CODE (exp) == COMPONENT_REF)
|
||||
{
|
||||
size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
|
||||
if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
|
||||
mode = DECL_MODE (TREE_OPERAND (exp, 1));
|
||||
tree field = TREE_OPERAND (exp, 1);
|
||||
size_tree = DECL_SIZE (field);
|
||||
if (!DECL_BIT_FIELD (field))
|
||||
mode = DECL_MODE (field);
|
||||
else if (DECL_MODE (field) == BLKmode)
|
||||
blkmode_bitfield = true;
|
||||
|
||||
*punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
|
||||
*punsignedp = DECL_UNSIGNED (field);
|
||||
}
|
||||
else if (TREE_CODE (exp) == BIT_FIELD_REF)
|
||||
{
|
||||
@ -5922,8 +5931,6 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
|
||||
*pbitsize = tree_low_cst (size_tree, 1);
|
||||
}
|
||||
|
||||
*pmode = mode;
|
||||
|
||||
/* Compute cumulative bit-offset for nested component-refs and array-refs,
|
||||
and find the ultimate containing object. */
|
||||
while (1)
|
||||
@ -6018,14 +6025,25 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
|
||||
if (double_int_fits_in_shwi_p (tem))
|
||||
{
|
||||
*pbitpos = double_int_to_shwi (tem);
|
||||
*poffset = NULL_TREE;
|
||||
return exp;
|
||||
*poffset = offset = NULL_TREE;
|
||||
}
|
||||
}
|
||||
|
||||
/* Otherwise, split it up. */
|
||||
*pbitpos = tree_low_cst (bit_offset, 0);
|
||||
*poffset = offset;
|
||||
if (offset)
|
||||
{
|
||||
*pbitpos = tree_low_cst (bit_offset, 0);
|
||||
*poffset = offset;
|
||||
}
|
||||
|
||||
/* We can use BLKmode for a byte-aligned BLKmode bitfield. */
|
||||
if (mode == VOIDmode
|
||||
&& blkmode_bitfield
|
||||
&& (*pbitpos % BITS_PER_UNIT) == 0
|
||||
&& (*pbitsize % BITS_PER_UNIT) == 0)
|
||||
*pmode = BLKmode;
|
||||
else
|
||||
*pmode = mode;
|
||||
|
||||
return exp;
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user