aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Kenner <kenner@vlsi1.ultra.nyu.edu>2003-04-29 22:48:55 +0000
committerRichard Kenner <kenner@gcc.gnu.org>2003-04-29 18:48:55 -0400
commit0384674ecf6f70a9aab02476619e02972ea6a661 (patch)
tree0604df65f510f92fd4a32992a4f35775b6f99afe
parent7f78b6caa0a03fd3af6f122cac7d0143673ae11e (diff)
downloadgcc-0384674ecf6f70a9aab02476619e02972ea6a661.zip
gcc-0384674ecf6f70a9aab02476619e02972ea6a661.tar.gz
gcc-0384674ecf6f70a9aab02476619e02972ea6a661.tar.bz2
stor-layout.c (mode_for_size_tree): Use tree_low_cst.
* stor-layout.c (mode_for_size_tree): Use tree_low_cst. (layout_decl, place_field): Likewise. Also make minor type and whitespace changes. From-SVN: r66268
-rw-r--r--gcc/ChangeLog4
-rw-r--r--gcc/stor-layout.c72
2 files changed, 42 insertions, 34 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 46b4555..70b69cc 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -10,6 +10,10 @@
2003-04-29 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
+ * stor-layout.c (mode_for_size_tree): Use tree_low_cst.
+ (layout_decl, place_field): Likewise.
+ Also make minor type and whitespace changes.
+
* tree.c (save_expr): Don't fold a COMPONENT_REF.
2003-04-29 Olivier Hainque <hainque@act-europe.fr>
diff --git a/gcc/stor-layout.c b/gcc/stor-layout.c
index dad514e..0df34bd 100644
--- a/gcc/stor-layout.c
+++ b/gcc/stor-layout.c
@@ -249,7 +249,7 @@ mode_for_size_tree (size, class, limit)
|| compare_tree_int (size, 1000) > 0)
return BLKmode;
else
- return mode_for_size (TREE_INT_CST_LOW (size), class, limit);
+ return mode_for_size (tree_low_cst (size, 1), class, limit);
}
/* Similar, but never return BLKmode; return the narrowest mode that
@@ -533,7 +533,7 @@ layout_decl (decl, known_align)
if (size != 0 && TREE_CODE (size) == INTEGER_CST
&& compare_tree_int (size, larger_than_size) > 0)
{
- unsigned int size_as_int = TREE_INT_CST_LOW (size);
+ int size_as_int = TREE_INT_CST_LOW (size);
if (compare_tree_int (size, size_as_int) == 0)
warning_with_decl (decl, "size of `%s' is %d bytes", size_as_int);
@@ -1074,25 +1074,28 @@ place_field (rli, field)
if (DECL_BIT_FIELD_TYPE (field)
&& !integer_zerop (DECL_SIZE (field))
&& !integer_zerop (DECL_SIZE (rli->prev_field))
+ && host_integerp (DECL_SIZE (rli->prev_field), 0)
+ && host_integerp (TYPE_SIZE (type), 0)
&& simple_cst_equal (TYPE_SIZE (type),
- TYPE_SIZE (TREE_TYPE (rli->prev_field))) )
+ TYPE_SIZE (TREE_TYPE (rli->prev_field))))
{
/* We're in the middle of a run of equal type size fields; make
sure we realign if we run out of bits. (Not decl size,
type size!) */
- int bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
- tree type_size = TYPE_SIZE(TREE_TYPE(rli->prev_field));
+ HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 0);
if (rli->remaining_in_alignment < bitsize)
{
/* out of bits; bump up to next 'word'. */
rli->offset = DECL_FIELD_OFFSET (rli->prev_field);
- rli->bitpos = size_binop (PLUS_EXPR,
- type_size,
- DECL_FIELD_BIT_OFFSET(rli->prev_field));
+ rli->bitpos
+ = size_binop (PLUS_EXPR, TYPE_SIZE (type),
+ DECL_FIELD_BIT_OFFSET (rli->prev_field));
rli->prev_field = field;
- rli->remaining_in_alignment = TREE_INT_CST_LOW (type_size);
+ rli->remaining_in_alignment
+ = tree_low_cst (TYPE_SIZE (type), 0);
}
+
rli->remaining_in_alignment -= bitsize;
}
else
@@ -1108,26 +1111,24 @@ place_field (rli, field)
if (!integer_zerop (DECL_SIZE (rli->prev_field)))
{
- tree type_size = TYPE_SIZE(TREE_TYPE(rli->prev_field));
- rli->bitpos = size_binop (PLUS_EXPR,
- type_size,
- DECL_FIELD_BIT_OFFSET(rli->prev_field));
+ tree type_size = TYPE_SIZE (TREE_TYPE (rli->prev_field));
+
+ rli->bitpos
+ = size_binop (PLUS_EXPR, type_size,
+ DECL_FIELD_BIT_OFFSET (rli->prev_field));
}
else
- {
- /* We "use up" size zero fields; the code below should behave
- as if the prior field was not a bitfield. */
- prev_saved = NULL;
- }
+ /* We "use up" size zero fields; the code below should behave
+ as if the prior field was not a bitfield. */
+ prev_saved = NULL;
/* Cause a new bitfield to be captured, either this time (if
currently a bitfield) or next time we see one. */
if (!DECL_BIT_FIELD_TYPE(field)
|| integer_zerop (DECL_SIZE (field)))
- {
- rli->prev_field = NULL;
- }
+ rli->prev_field = NULL;
}
+
normalize_rli (rli);
}
@@ -1146,24 +1147,26 @@ place_field (rli, field)
if (!DECL_BIT_FIELD_TYPE (field)
|| ( prev_saved != NULL
? !simple_cst_equal (TYPE_SIZE (type),
- TYPE_SIZE (TREE_TYPE (prev_saved)))
- : !integer_zerop (DECL_SIZE (field)) ))
+ TYPE_SIZE (TREE_TYPE (prev_saved)))
+ : !integer_zerop (DECL_SIZE (field)) ))
{
- unsigned int type_align = 8; /* Never below 8 for compatibility */
+ /* Never smaller than a byte for compatibility. */
+ unsigned int type_align = BITS_PER_UNIT;
/* (When not a bitfield), we could be seeing a flex array (with
no DECL_SIZE). Since we won't be using remaining_in_alignment
until we see a bitfield (and come by here again) we just skip
calculating it. */
-
- if (DECL_SIZE (field) != NULL)
- rli->remaining_in_alignment
- = TREE_INT_CST_LOW (TYPE_SIZE(TREE_TYPE(field)))
- - TREE_INT_CST_LOW (DECL_SIZE (field));
+ if (DECL_SIZE (field) != NULL
+ && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 0)
+ && host_integerp (DECL_SIZE (field), 0))
+ rli->remaining_in_alignment
+ = tree_low_cst (TYPE_SIZE (TREE_TYPE(field)), 0)
+ - tree_low_cst (DECL_SIZE (field), 0);
/* Now align (conventionally) for the new type. */
if (!DECL_PACKED(field))
- type_align = MAX(TYPE_ALIGN (type), type_align);
+ type_align = MAX(TYPE_ALIGN (type), type_align);
if (prev_saved
&& DECL_BIT_FIELD_TYPE (prev_saved)
@@ -1178,6 +1181,7 @@ place_field (rli, field)
type_align = MIN (type_align, maximum_field_alignment);
rli->bitpos = round_up (rli->bitpos, type_align);
+
/* If we really aligned, don't allow subsequent bitfields
to undo that. */
rli->prev_field = NULL;
@@ -1860,10 +1864,10 @@ layout_type (type)
#endif
unsigned int alignment
= set_alignment ? set_alignment : SET_WORD_SIZE;
- int size_in_bits
- = (TREE_INT_CST_LOW (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
- - TREE_INT_CST_LOW (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) + 1);
- int rounded_size
+ HOST_WIDE_INT size_in_bits
+ = (tree_low_cst (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), 0)
+ - tree_low_cst (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), 0) + 1);
+ HOST_WIDE_INT rounded_size
= ((size_in_bits + alignment - 1) / alignment) * alignment;
if (rounded_size > (int) alignment)