/* Storing any naturally aligned field can be done with a simple
store. For targets that support fast unaligned memory, any
naturally sized, unit aligned field can be done directly. */
- if (simple_mem_bitfield_p (str_rtx, bitsize, bitnum, fieldmode))
+ if (bitsize == GET_MODE_BITSIZE (fieldmode))
{
str_rtx = adjust_bitfield_address (str_rtx, fieldmode,
bitnum / BITS_PER_UNIT);
}
else
{
+ rtx temp;
+
str_rtx = narrow_bit_field_mem (str_rtx, fieldmode, bitsize, bitnum,
&bitnum);
- /* Explicitly override the C/C++ memory model; ignore the
- bit range so that we can do the access in the mode mandated
- by -fstrict-volatile-bitfields instead. */
- store_fixed_bit_field_1 (str_rtx, bitsize, bitnum, value);
+ temp = copy_to_reg (str_rtx);
+ if (!store_bit_field_1 (temp, bitsize, bitnum, 0, 0,
+ fieldmode, value, true))
+ gcc_unreachable ();
+
+ emit_move_insn (str_rtx, temp);
}
return;
if (strict_volatile_bitfield_p (str_rtx, bitsize, bitnum, mode1, 0, 0))
{
- rtx result;
-
/* Extraction of a full MODE1 value can be done with a load as long as
the field is on a byte boundary and is sufficiently aligned. */
- if (simple_mem_bitfield_p (str_rtx, bitsize, bitnum, mode1))
- result = adjust_bitfield_address (str_rtx, mode1,
- bitnum / BITS_PER_UNIT);
- else
+ if (bitsize == GET_MODE_BITSIZE(mode1))
{
- str_rtx = narrow_bit_field_mem (str_rtx, mode1, bitsize, bitnum,
- &bitnum);
- result = extract_fixed_bit_field_1 (mode, str_rtx, bitsize, bitnum,
- target, unsignedp);
+ rtx result = adjust_bitfield_address (str_rtx, mode1,
+ bitnum / BITS_PER_UNIT);
+ return convert_extracted_bit_field (result, mode, tmode, unsignedp);
}
- return convert_extracted_bit_field (result, mode, tmode, unsignedp);
+ str_rtx = narrow_bit_field_mem (str_rtx, mode1, bitsize, bitnum,
+ &bitnum);
+ str_rtx = copy_to_reg (str_rtx);
}
-
+
return extract_bit_field_1 (str_rtx, bitsize, bitnum, unsignedp,
target, mode, tmode, true);
}