aboutsummaryrefslogtreecommitdiff
path: root/gcc/expr.c
diff options
context:
space:
mode:
authorOlivier Hainque <hainque@act-europe.fr>2003-04-16 23:30:35 +0200
committerRichard Kenner <kenner@gcc.gnu.org>2003-04-16 17:30:35 -0400
commit15b19a7dbf433b1936eecd042ce7dedbb23220f9 (patch)
tree3af28f6f829e935d079f27a0ee941ec6ec92edf8 /gcc/expr.c
parentf2d80e7e8132037e56bdd430ffaf50a9dfa71593 (diff)
downloadgcc-15b19a7dbf433b1936eecd042ce7dedbb23220f9.zip
gcc-15b19a7dbf433b1936eecd042ce7dedbb23220f9.tar.gz
gcc-15b19a7dbf433b1936eecd042ce7dedbb23220f9.tar.bz2
expr.c (store_field): Force usage of bitfield instructions when the field position requires it...
* expr.c (store_field): Force usage of bitfield instructions when the field position requires it, whatever SLOW_UNALIGNED_ACCESS. (expand_expr, case BIT_FIELD_REF): likewise. From-SVN: r65701
Diffstat (limited to 'gcc/expr.c')
-rw-r--r--gcc/expr.c10
1 files changed, 5 insertions, 5 deletions
diff --git a/gcc/expr.c b/gcc/expr.c
index d203424..abf0875 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -5564,8 +5564,9 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
|| GET_CODE (target) == SUBREG
/* If the field isn't aligned enough to store as an ordinary memref,
store it as a bit field. */
- || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
- && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
+ || (mode != BLKmode
+ && ((SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
+ && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)))
|| bitpos % GET_MODE_ALIGNMENT (mode)))
/* If the RHS and field are a constant size and the size of the
RHS isn't the same size as the bitfield, we must use bitfield
@@ -7504,9 +7505,8 @@ expand_expr (exp, target, tmode, modifier)
/* If the field isn't aligned enough to fetch as a memref,
fetch it as a bit field. */
|| (mode1 != BLKmode
- && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
- && ((TYPE_ALIGN (TREE_TYPE (tem))
- < GET_MODE_ALIGNMENT (mode))
+ && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
+ && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
|| (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
/* If the type and the field are a constant size and the
size of the type isn't the same size as the bitfield,