From 9e5f281f99de583a0ca9d50d984bd3a33910f430 Mon Sep 17 00:00:00 2001 From: Olivier Hainque Date: Mon, 5 May 2003 19:56:35 +0200 Subject: [PATCH] expr.c (expand_expr, [...]): Refine the test forcing usage of bitfield instructions for mode1 != BLKmode... * expr.c (expand_expr, case BIT_FIELD_REF): Refine the test forcing usage of bitfield instructions for mode1 != BLKmode, only ignoring SLOW_UNALIGNED_ACCESS if the field is not byte aligned. (store_field): Likewise. From-SVN: r66492 --- gcc/ChangeLog | 7 +++++++ gcc/expr.c | 12 +++++++----- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 84bd4452014..0f8cad843e3 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,10 @@ +2003-05-05 Olivier Hainque + + * expr.c (expand_expr, case BIT_FIELD_REF): Refine the test forcing + usage of bitfield instructions for mode1 != BLKmode, only ignoring + SLOW_UNALIGNED_ACCESS if the field is not byte aligned. + (store_field): Likewise. + 2003-05-05 Aldy Hernandez * config/rs6000/rs6000.c (rs6000_expand_binop_builtin): Add diff --git a/gcc/expr.c b/gcc/expr.c index ba7db159d97..6659d61f714 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -5626,9 +5626,10 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type, /* If the field isn't aligned enough to store as an ordinary memref, store it as a bit field. */ || (mode != BLKmode - && ((SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)) - && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))) - || bitpos % GET_MODE_ALIGNMENT (mode))) + && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)) + || bitpos % GET_MODE_ALIGNMENT (mode)) + && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))) + || (bitpos % BITS_PER_UNIT != 0))) /* If the RHS and field are a constant size and the size of the RHS isn't the same size as the bitfield, we must use bitfield operations. */ @@ -7571,9 +7572,10 @@ expand_expr (exp, target, tmode, modifier) /* If the field isn't aligned enough to fetch as a memref, fetch it as a bit field. */ || (mode1 != BLKmode - && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode) + && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode) + || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)) && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))) - || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))) + || (bitpos % BITS_PER_UNIT != 0))) /* If the type and the field are a constant size and the size of the type isn't the same size as the bitfield, we must use bitfield operations. */ -- 2.30.2