aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorRichard Kenner <kenner@vlsi1.ultra.nyu.edu>2000-02-10 22:05:40 +0000
committerRichard Kenner <kenner@gcc.gnu.org>2000-02-10 17:05:40 -0500
commit729a212576ebc14125d4c8e33a5b13ba43eeadec (patch)
tree6986eb3ff22851f6ba40b08e9a2247fb26755e79 /gcc
parent5e4900c7f4945f48d9660d2682bbbcce40ec9603 (diff)
downloadgcc-729a212576ebc14125d4c8e33a5b13ba43eeadec.zip
gcc-729a212576ebc14125d4c8e33a5b13ba43eeadec.tar.gz
gcc-729a212576ebc14125d4c8e33a5b13ba43eeadec.tar.bz2
combine.c (make_extraction, [...]): Avoid warning on mixed-signedness conditionals.
* combine.c (make_extraction, force_to_mode): Avoid warning on mixed-signedness conditionals. (make_field_assignment, nonzero_bits): Likewise. * expmed.c (store_fixed_bit_field): ALIGN arg now unsigned. (store_split_bit_field, extract_split_bit_field): Likewise. (extract_fixed_bit_field, store_bit_field, * expr.c: Change alignment to be unsigned everywhere. (move_by_pieces, store_constructor_field, store_constructor): Alignment parm is unsigned. (emit_block_move, emit_group_load, emit_group_store): Likewise. (clear_storage, emit_push_insn, compare_from_rtx): Likewise. (do_compare_rtx_and_jump): Likewise. (move_by_pieces_ninsns, clear_by_pieces): Likewise. Compare align with GET_MODE_ALIGNMENT. (expand_expr_unaligned): Pointer to alignment is pointer to unsigned. (get_inner_reference): Likewise. (copy_blkmode_from_reg, emit_push_insn): Remove unneeded casts. (expand_assignment): Local vars for alignment now unsigned. (store_constructor, store_field, expand_expr, do_jump): Likewise. (do_compare_and_jump): Likewise. (store_field): Call new function expr_align. * expr.h (emit_block_move, emit_group_load, emit_group_store): Alignment arg now unsigned. (clear_storage, emit_push_insn, compare_from_rtx): Likewise. (do_compare_rtx_and_jump, store_bit_field): Likewise. (extract_bit_field): Likewise. * fold-const.c (add_double): Add cast to eliminate signedness warning. * machmode.h (GET_MODE_ALIGNMENT): Result is unsigned. (get_best_mode): Alignment arg is unsigned. * rtl.h (move_by_pieces): Likewise. * store-layout.c (maximum_field_alignment, set_alignment): Now unsigned. (layout_decl): Alignment arg is now unsigned. Remove unneeded casts. (layout_record, layout_union, layout_type): Remove unneeded casts. Local alignment variables now unsigned. (get_best_mode): Alignment arg now unsigned. * tree.c (expr_align): New function. * tree.h (expr_align): Likewise. (maximum_field_alignment, set_alignment): Now unsigned. (get_inner_reference): Alignment argument is now pointer to unsigned. * varasm.c (assemble_variable): Add cast to eliminate warning. From-SVN: r31904
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog45
-rw-r--r--gcc/combine.c16
-rw-r--r--gcc/expmed.c29
-rw-r--r--gcc/expr.c112
-rw-r--r--gcc/expr.h32
-rw-r--r--gcc/fold-const.c2
-rw-r--r--gcc/machmode.h7
-rw-r--r--gcc/rtl.h2
-rw-r--r--gcc/stor-layout.c82
-rw-r--r--gcc/tree.c44
-rw-r--r--gcc/tree.h7
-rw-r--r--gcc/varasm.c2
12 files changed, 243 insertions, 137 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index ccda7cd..705f84c 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,48 @@
+Thu Feb 10 16:26:49 2000 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
+
+ * combine.c (make_extraction, force_to_mode): Avoid warning on
+ mixed-signedness conditionals.
+ (make_field_assignment, nonzero_bits): Likewise.
+ * expmed.c (store_fixed_bit_field): ALIGN arg now unsigned.
+ (store_split_bit_field, extract_split_bit_field): Likewise.
+ (extract_fixed_bit_field, store_bit_field,
+ * expr.c: Change alignment to be unsigned everywhere.
+ (move_by_pieces, store_constructor_field, store_constructor):
+ Alignment parm is unsigned.
+ (emit_block_move, emit_group_load, emit_group_store): Likewise.
+ (clear_storage, emit_push_insn, compare_from_rtx): Likewise.
+ (do_compare_rtx_and_jump): Likewise.
+ (move_by_pieces_ninsns, clear_by_pieces): Likewise.
+ Compare align with GET_MODE_ALIGNMENT.
+ (expand_expr_unaligned): Pointer to alignment is pointer to unsigned.
+ (get_inner_reference): Likewise.
+ (copy_blkmode_from_reg, emit_push_insn): Remove unneeded casts.
+ (expand_assignment): Local vars for alignment now unsigned.
+ (store_constructor, store_field, expand_expr, do_jump): Likewise.
+ (do_compare_and_jump): Likewise.
+ (store_field): Call new function expr_align.
+ * expr.h (emit_block_move, emit_group_load, emit_group_store):
+ Alignment arg now unsigned.
+ (clear_storage, emit_push_insn, compare_from_rtx): Likewise.
+ (do_compare_rtx_and_jump, store_bit_field): Likewise.
+ (extract_bit_field): Likewise.
+ * fold-const.c (add_double): Add cast to eliminate signedness warning.
+ * machmode.h (GET_MODE_ALIGNMENT): Result is unsigned.
+ (get_best_mode): Alignment arg is unsigned.
+ * rtl.h (move_by_pieces): Likewise.
+ * store-layout.c (maximum_field_alignment, set_alignment):
+ Now unsigned.
+ (layout_decl): Alignment arg is now unsigned.
+ Remove unneeded casts.
+ (layout_record, layout_union, layout_type): Remove unneeded casts.
+ Local alignment variables now unsigned.
+ (get_best_mode): Alignment arg now unsigned.
+ * tree.c (expr_align): New function.
+ * tree.h (expr_align): Likewise.
+ (maximum_field_alignment, set_alignment): Now unsigned.
+ (get_inner_reference): Alignment argument is now pointer to unsigned.
+ * varasm.c (assemble_variable): Add cast to eliminate warning.
+
Thu Feb 10 12:56:47 2000 Jim Wilson <wilson@cygnus.com>
* expmed.c (store_bit_field): If op0 and fieldmode are the same size,
diff --git a/gcc/combine.c b/gcc/combine.c
index d40b6b1..f6b8867 100644
--- a/gcc/combine.c
+++ b/gcc/combine.c
@@ -5891,7 +5891,7 @@ make_extraction (mode, inner, pos, pos_rtx, len,
new = force_to_mode (inner, tmode,
len >= HOST_BITS_PER_WIDE_INT
? GET_MODE_MASK (tmode)
- : ((HOST_WIDE_INT) 1 << len) - 1,
+ : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
NULL_RTX, 0);
/* If this extraction is going into the destination of a SET,
@@ -6112,7 +6112,8 @@ make_extraction (mode, inner, pos, pos_rtx, len,
pos_rtx
|| len + orig_pos >= HOST_BITS_PER_WIDE_INT
? GET_MODE_MASK (wanted_inner_mode)
- : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
+ : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
+ << orig_pos),
NULL_RTX, 0);
}
@@ -6583,7 +6584,8 @@ force_to_mode (x, mode, mask, reg, just_select)
if (op_mode)
fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
? GET_MODE_MASK (op_mode)
- : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
+ : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
+ - 1));
else
fuller_mask = ~ (HOST_WIDE_INT) 0;
@@ -7525,7 +7527,7 @@ make_field_assignment (x)
mode,
GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
? GET_MODE_MASK (mode)
- : ((HOST_WIDE_INT) 1 << len) - 1,
+ : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
dest, 0);
return gen_rtx_combine (SET, VOIDmode, assign, src);
@@ -8110,8 +8112,10 @@ nonzero_bits (x, mode)
/* If this is a typical RISC machine, we only have to worry
about the way loads are extended. */
if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
- ? (nonzero
- & (1L << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))
+ ? (((nonzero
+ & (((unsigned HOST_WIDE_INT) 1
+ << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
+ != 0))
: LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
#endif
{
diff --git a/gcc/expmed.c b/gcc/expmed.c
index 6f31b8d..f938375 100644
--- a/gcc/expmed.c
+++ b/gcc/expmed.c
@@ -34,15 +34,19 @@ Boston, MA 02111-1307, USA. */
#include "real.h"
#include "recog.h"
-static void store_fixed_bit_field PARAMS ((rtx, int, int, int, rtx, int));
-static void store_split_bit_field PARAMS ((rtx, int, int, rtx, int));
+static void store_fixed_bit_field PARAMS ((rtx, int, int, int, rtx,
+ unsigned int));
+static void store_split_bit_field PARAMS ((rtx, int, int, rtx,
+ unsigned int));
static rtx extract_fixed_bit_field PARAMS ((enum machine_mode, rtx, int,
- int, int, rtx, int, int));
+ int, int, rtx, int,
+ unsigned int));
static rtx mask_rtx PARAMS ((enum machine_mode, int,
int, int));
static rtx lshift_value PARAMS ((enum machine_mode, rtx,
int, int));
-static rtx extract_split_bit_field PARAMS ((rtx, int, int, int, int));
+static rtx extract_split_bit_field PARAMS ((rtx, int, int, int,
+ unsigned int));
static void do_cmp_and_jump PARAMS ((rtx, rtx, enum rtx_code,
enum machine_mode, rtx));
@@ -224,7 +228,7 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
int bitnum;
enum machine_mode fieldmode;
rtx value;
- int align;
+ unsigned int align;
int total_size;
{
int unit = (GET_CODE (str_rtx) == MEM) ? BITS_PER_UNIT : BITS_PER_WORD;
@@ -508,7 +512,7 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
if (bestmode == VOIDmode
|| (SLOW_UNALIGNED_ACCESS (bestmode, align)
- && GET_MODE_SIZE (bestmode) > align))
+ && GET_MODE_SIZE (bestmode) > (int) align))
goto insv_loses;
/* Adjust address to point to the containing unit of that mode. */
@@ -623,7 +627,7 @@ store_fixed_bit_field (op0, offset, bitsize, bitpos, value, struct_align)
register rtx op0;
register int offset, bitsize, bitpos;
register rtx value;
- int struct_align;
+ unsigned int struct_align;
{
register enum machine_mode mode;
int total_bits = BITS_PER_WORD;
@@ -794,7 +798,7 @@ store_split_bit_field (op0, bitsize, bitpos, value, align)
rtx op0;
int bitsize, bitpos;
rtx value;
- int align;
+ unsigned int align;
{
int unit;
int bitsdone = 0;
@@ -951,7 +955,7 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
int unsignedp;
rtx target;
enum machine_mode mode, tmode;
- int align;
+ unsigned int align;
int total_size;
{
int unit = (GET_CODE (str_rtx) == MEM) ? BITS_PER_UNIT : BITS_PER_WORD;
@@ -1391,7 +1395,7 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
if (bestmode == VOIDmode
|| (SLOW_UNALIGNED_ACCESS (bestmode, align)
- && GET_MODE_SIZE (bestmode) > align))
+ && GET_MODE_SIZE (bestmode) > (int) align))
goto extv_loses;
/* Compute offset as multiple of this unit,
@@ -1530,7 +1534,7 @@ extract_fixed_bit_field (tmode, op0, offset, bitsize, bitpos,
register rtx op0, target;
register int offset, bitsize, bitpos;
int unsignedp;
- int align;
+ unsigned int align;
{
int total_bits = BITS_PER_WORD;
enum machine_mode mode;
@@ -1748,7 +1752,8 @@ lshift_value (mode, value, bitpos, bitsize)
static rtx
extract_split_bit_field (op0, bitsize, bitpos, unsignedp, align)
rtx op0;
- int bitsize, bitpos, unsignedp, align;
+ int bitsize, bitpos, unsignedp;
+ unsigned int align;
{
int unit;
int bitsdone = 0;
diff --git a/gcc/expr.c b/gcc/expr.c
index 087aaa3..26e23f06 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -132,21 +132,21 @@ extern struct obstack permanent_obstack;
static rtx get_push_address PARAMS ((int));
static rtx enqueue_insn PARAMS ((rtx, rtx));
-static int move_by_pieces_ninsns PARAMS ((unsigned int, int));
+static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
struct move_by_pieces *));
-static void clear_by_pieces PARAMS ((rtx, int, int));
+static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
enum machine_mode,
struct clear_by_pieces *));
static int is_zeros_p PARAMS ((tree));
static int mostly_zeros_p PARAMS ((tree));
static void store_constructor_field PARAMS ((rtx, int, int, enum machine_mode,
- tree, tree, int, int));
-static void store_constructor PARAMS ((tree, rtx, int, int, int));
+ tree, tree, unsigned int, int));
+static void store_constructor PARAMS ((tree, rtx, unsigned int, int, int));
static rtx store_field PARAMS ((rtx, int, int, enum machine_mode,
- tree, enum machine_mode, int, int,
- int, int));
+ tree, enum machine_mode, int,
+ unsigned int, int, int));
static enum memory_use_mode
get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
static tree save_noncopied_parts PARAMS ((tree, tree));
@@ -155,7 +155,7 @@ static int safe_from_p PARAMS ((rtx, tree, int));
static int fixed_type_p PARAMS ((tree));
static rtx var_rtx PARAMS ((tree));
static int readonly_fields_p PARAMS ((tree));
-static rtx expand_expr_unaligned PARAMS ((tree, int *));
+static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
static rtx expand_increment PARAMS ((tree, int, int));
static void preexpand_calls PARAMS ((tree));
static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
@@ -1360,7 +1360,8 @@ convert_modes (mode, oldmode, x, unsignedp)
void
move_by_pieces (to, from, len, align)
rtx to, from;
- int len, align;
+ int len;
+ unsigned int align;
{
struct move_by_pieces data;
rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
@@ -1455,7 +1456,7 @@ move_by_pieces (to, from, len, align)
icode = mov_optab->handlers[(int) mode].insn_code;
if (icode != CODE_FOR_nothing
&& align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
- GET_MODE_SIZE (mode)))
+ (unsigned int) GET_MODE_SIZE (mode)))
move_by_pieces_1 (GEN_FCN (icode), mode, &data);
max_size = GET_MODE_SIZE (mode);
@@ -1472,7 +1473,7 @@ move_by_pieces (to, from, len, align)
static int
move_by_pieces_ninsns (l, align)
unsigned int l;
- int align;
+ unsigned int align;
{
register int n_insns = 0;
int max_size = MOVE_MAX + 1;
@@ -1496,8 +1497,7 @@ move_by_pieces_ninsns (l, align)
icode = mov_optab->handlers[(int) mode].insn_code;
if (icode != CODE_FOR_nothing
- && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
- GET_MODE_SIZE (mode)))
+ && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
max_size = GET_MODE_SIZE (mode);
@@ -1574,7 +1574,7 @@ rtx
emit_block_move (x, y, size, align)
rtx x, y;
rtx size;
- int align;
+ unsigned int align;
{
rtx retval = 0;
#ifdef TARGET_MEM_FUNCTIONS
@@ -1883,7 +1883,8 @@ move_block_from_reg (regno, x, nregs, size)
void
emit_group_load (dst, orig_src, ssize, align)
rtx dst, orig_src;
- int align, ssize;
+ unsigned int align;
+ int ssize;
{
rtx *tmps, src;
int start, i;
@@ -1926,13 +1927,13 @@ emit_group_load (dst, orig_src, ssize, align)
shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
bytelen = ssize - bytepos;
if (bytelen <= 0)
- abort();
+ abort ();
}
/* Optimize the access just a bit. */
if (GET_CODE (src) == MEM
- && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
- && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
+ && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
+ && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
&& bytelen == GET_MODE_SIZE (mode))
{
tmps[i] = gen_reg_rtx (mode);
@@ -1979,7 +1980,8 @@ emit_group_load (dst, orig_src, ssize, align)
void
emit_group_store (orig_dst, src, ssize, align)
rtx orig_dst, src;
- int ssize, align;
+ int ssize;
+ unsigned int align;
{
rtx *tmps, dst;
int start, i;
@@ -2063,21 +2065,18 @@ emit_group_store (orig_dst, src, ssize, align)
/* Optimize the access just a bit. */
if (GET_CODE (dst) == MEM
- && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
- && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
+ && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
+ && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
&& bytelen == GET_MODE_SIZE (mode))
- {
- emit_move_insn (change_address (dst, mode,
- plus_constant (XEXP (dst, 0),
- bytepos)),
- tmps[i]);
- }
+ emit_move_insn (change_address (dst, mode,
+ plus_constant (XEXP (dst, 0),
+ bytepos)),
+ tmps[i]);
else
- {
- store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
+ store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
mode, tmps[i], align, ssize);
- }
}
+
emit_queue();
/* Copy from the pseudo into the (probable) hard reg. */
@@ -2092,18 +2091,17 @@ emit_group_store (orig_dst, src, ssize, align)
The primary purpose of this routine is to handle functions
that return BLKmode structures in registers. Some machines
(the PA for example) want to return all small structures
- in registers regardless of the structure's alignment.
- */
+ in registers regardless of the structure's alignment. */
rtx
-copy_blkmode_from_reg(tgtblk,srcreg,type)
+copy_blkmode_from_reg (tgtblk,srcreg,type)
rtx tgtblk;
rtx srcreg;
tree type;
{
int bytes = int_size_in_bytes (type);
rtx src = NULL, dst = NULL;
- int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
+ int bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
int bitpos, xbitpos, big_endian_correction = 0;
if (tgtblk == 0)
@@ -2232,7 +2230,8 @@ use_group_regs (call_fusage, regs)
static void
clear_by_pieces (to, len, align)
rtx to;
- int len, align;
+ int len;
+ unsigned int align;
{
struct clear_by_pieces data;
rtx to_addr = XEXP (to, 0);
@@ -2302,8 +2301,7 @@ clear_by_pieces (to, len, align)
icode = mov_optab->handlers[(int) mode].insn_code;
if (icode != CODE_FOR_nothing
- && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
- GET_MODE_SIZE (mode)))
+ && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
max_size = GET_MODE_SIZE (mode);
@@ -2361,7 +2359,7 @@ rtx
clear_storage (object, size, align)
rtx object;
rtx size;
- int align;
+ unsigned int align;
{
#ifdef TARGET_MEM_FUNCTIONS
static tree fn;
@@ -2905,7 +2903,7 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra,
enum machine_mode mode;
tree type;
rtx size;
- int align;
+ unsigned int align;
int partial;
rtx reg;
int extra;
@@ -3066,7 +3064,7 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra,
/* TEMP is the address of the block. Copy the data there. */
if (GET_CODE (size) == CONST_INT
- && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
+ && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
{
move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
INTVAL (size), align);
@@ -3328,7 +3326,7 @@ expand_assignment (to, from, want_value, suggest_reg)
int unsignedp;
int volatilep = 0;
tree tem;
- int alignment;
+ unsigned int alignment;
push_temp_slots ();
tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
@@ -4053,7 +4051,7 @@ store_constructor_field (target, bitsize, bitpos,
int bitsize, bitpos;
enum machine_mode mode;
tree exp, type;
- int align;
+ unsigned int align;
int cleared;
{
if (TREE_CODE (exp) == CONSTRUCTOR
@@ -4092,7 +4090,7 @@ static void
store_constructor (exp, target, align, cleared, size)
tree exp;
rtx target;
- int align;
+ unsigned int align;
int cleared;
int size;
{
@@ -4361,7 +4359,7 @@ store_constructor (exp, target, align, cleared, size)
int bitpos;
int unsignedp;
tree value = TREE_VALUE (elt);
- int align = TYPE_ALIGN (TREE_TYPE (value));
+ unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
tree index = TREE_PURPOSE (elt);
rtx xtarget = target;
@@ -4711,7 +4709,7 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
tree exp;
enum machine_mode value_mode;
int unsignedp;
- int align;
+ unsigned int align;
int total_size;
int alias_set;
{
@@ -4809,6 +4807,8 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
boundary. If so, we simply do a block copy. */
if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
{
+ unsigned int exp_align = expr_align (exp) / BITS_PER_UNIT;
+
if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
|| bitpos % BITS_PER_UNIT != 0)
abort ();
@@ -4817,12 +4817,8 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
plus_constant (XEXP (target, 0),
bitpos / BITS_PER_UNIT));
- /* Make sure that ALIGN is no stricter than the alignment of
- EXP. */
- if (TREE_CODE (exp) == VAR_DECL)
- align = MIN (DECL_ALIGN (exp) / BITS_PER_UNIT, align);
- else
- align = MIN (TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT, align);
+ /* Make sure that ALIGN is no stricter than the alignment of EXP. */
+ align = MIN (exp_align, align);
/* Find an alignment that is consistent with the bit position. */
while ((bitpos % (align * BITS_PER_UNIT)) != 0)
@@ -4928,7 +4924,7 @@ get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
enum machine_mode *pmode;
int *punsignedp;
int *pvolatilep;
- int *palignment;
+ unsigned int *palignment;
{
tree orig_exp = exp;
tree size_tree = 0;
@@ -6610,7 +6606,7 @@ expand_expr (exp, target, tmode, modifier)
int bitpos;
tree offset;
int volatilep = 0;
- int alignment;
+ unsigned int alignment;
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
&mode1, &unsignedp, &volatilep,
&alignment);
@@ -8477,7 +8473,7 @@ expand_expr (exp, target, tmode, modifier)
static rtx
expand_expr_unaligned (exp, palign)
register tree exp;
- int *palign;
+ unsigned int *palign;
{
register rtx op0;
tree type = TREE_TYPE (exp);
@@ -8607,7 +8603,7 @@ expand_expr_unaligned (exp, palign)
int bitpos;
tree offset;
int volatilep = 0;
- int alignment;
+ unsigned int alignment;
int unsignedp;
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
&mode1, &unsignedp, &volatilep,
@@ -9340,7 +9336,7 @@ do_jump (exp, if_false_label, if_true_label)
tree type;
tree offset;
int volatilep = 0;
- int alignment;
+ unsigned int alignment;
/* Get description of this reference. We don't actually care
about the underlying object here. */
@@ -9804,7 +9800,7 @@ compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
int unsignedp;
enum machine_mode mode;
rtx size;
- int align;
+ unsigned int align;
{
rtx tem;
@@ -9876,7 +9872,7 @@ do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
int unsignedp;
enum machine_mode mode;
rtx size;
- int align;
+ unsigned int align;
rtx if_false_label, if_true_label;
{
rtx tem;
@@ -9981,7 +9977,7 @@ do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
enum rtx_code signed_code, unsigned_code;
rtx if_false_label, if_true_label;
{
- int align0, align1;
+ unsigned int align0, align1;
register rtx op0, op1;
register tree type;
register enum machine_mode mode;
diff --git a/gcc/expr.h b/gcc/expr.h
index 95c6942..ef8622a 100644
--- a/gcc/expr.h
+++ b/gcc/expr.h
@@ -942,10 +942,11 @@ extern void convert_move PARAMS ((rtx, rtx, int));
extern rtx convert_to_mode PARAMS ((enum machine_mode, rtx, int));
/* Convert an rtx to MODE from OLDMODE and return the result. */
-extern rtx convert_modes PARAMS ((enum machine_mode, enum machine_mode, rtx, int));
+extern rtx convert_modes PARAMS ((enum machine_mode, enum machine_mode,
+ rtx, int));
/* Emit code to move a block Y to a block X. */
-extern rtx emit_block_move PARAMS ((rtx, rtx, rtx, int));
+extern rtx emit_block_move PARAMS ((rtx, rtx, rtx, unsigned int));
/* Copy all or part of a value X into registers starting at REGNO.
The number of registers to be filled is NREGS. */
@@ -957,10 +958,11 @@ extern void move_block_from_reg PARAMS ((int, rtx, int, int));
/* Load a BLKmode value into non-consecutive registers represented by a
PARALLEL. */
-extern void emit_group_load PARAMS ((rtx, rtx, int, int));
+extern void emit_group_load PARAMS ((rtx, rtx, int, unsigned int));
+
/* Store a BLKmode value from non-consecutive registers represented by a
PARALLEL. */
-extern void emit_group_store PARAMS ((rtx, rtx, int, int));
+extern void emit_group_store PARAMS ((rtx, rtx, int, unsigned int));
#ifdef TREE_CODE
/* Copy BLKmode object from a set of registers. */
@@ -969,16 +971,18 @@ extern rtx copy_blkmode_from_reg PARAMS ((rtx,rtx,tree));
/* Mark REG as holding a parameter for the next CALL_INSN. */
extern void use_reg PARAMS ((rtx *, rtx));
+
/* Mark NREGS consecutive regs, starting at REGNO, as holding parameters
for the next CALL_INSN. */
extern void use_regs PARAMS ((rtx *, int, int));
+
/* Mark a PARALLEL as holding a parameter for the next CALL_INSN. */
extern void use_group_regs PARAMS ((rtx *, rtx));
/* Write zeros through the storage of OBJECT.
If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is its
alignment. */
-extern rtx clear_storage PARAMS ((rtx, rtx, int));
+extern rtx clear_storage PARAMS ((rtx, rtx, unsigned int));
/* Emit insns to set X from Y. */
extern rtx emit_move_insn PARAMS ((rtx, rtx));
@@ -995,8 +999,9 @@ extern rtx gen_push_operand PARAMS ((void));
#ifdef TREE_CODE
/* Generate code to push something onto the stack, given its mode and type. */
-extern void emit_push_insn PARAMS ((rtx, enum machine_mode, tree, rtx, int,
- int, rtx, int, rtx, rtx, int, rtx));
+extern void emit_push_insn PARAMS ((rtx, enum machine_mode, tree, rtx,
+ unsigned int, int, rtx, int, rtx, rtx,
+ int, rtx));
/* Emit library call. */
extern void emit_library_call PARAMS ((rtx orgfun, int no_queue,
@@ -1056,10 +1061,10 @@ extern void do_jump PARAMS ((tree, rtx, rtx));
/* Generate rtl to compare two rtx's, will call emit_cmp_insn. */
extern rtx compare_from_rtx PARAMS ((rtx, rtx, enum rtx_code, int,
- enum machine_mode, rtx, int));
+ enum machine_mode, rtx, unsigned int));
extern void do_compare_rtx_and_jump PARAMS ((rtx, rtx, enum rtx_code, int,
- enum machine_mode, rtx, int,
- rtx, rtx));
+ enum machine_mode, rtx,
+ unsigned int, rtx, rtx));
/* Generate a tablejump instruction (used for switch statements). */
extern void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
@@ -1193,8 +1198,11 @@ extern rtx hard_libcall_value PARAMS ((enum machine_mode));
of STACK_BOUNDARY / BITS_PER_UNIT. */
extern rtx round_push PARAMS ((rtx));
-extern rtx store_bit_field PARAMS ((rtx, int, int, enum machine_mode, rtx, int, int));
-extern rtx extract_bit_field PARAMS ((rtx, int, int, int, rtx, enum machine_mode, enum machine_mode, int, int));
+extern rtx store_bit_field PARAMS ((rtx, int, int, enum machine_mode, rtx,
+ unsigned int, int));
+extern rtx extract_bit_field PARAMS ((rtx, int, int, int, rtx,
+ enum machine_mode, enum machine_mode,
+ unsigned int, int));
extern rtx expand_mult PARAMS ((enum machine_mode, rtx, rtx, rtx, int));
extern rtx expand_mult_add PARAMS ((rtx, rtx, rtx, rtx,enum machine_mode, int));
extern rtx expand_mult_highpart_adjust PARAMS ((enum machine_mode, rtx, rtx, rtx, rtx, int));
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 84c452d..e98a7f7 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -251,7 +251,7 @@ add_double (l1, h1, l2, h2, lv, hv)
HOST_WIDE_INT l, h;
l = l1 + l2;
- h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
+ h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < (unsigned HOST_WIDE_INT) l1);
*lv = l;
*hv = h;
diff --git a/gcc/machmode.h b/gcc/machmode.h
index 6521faf..1f2fb2b 100644
--- a/gcc/machmode.h
+++ b/gcc/machmode.h
@@ -120,13 +120,14 @@ extern enum machine_mode int_mode_for_mode PARAMS ((enum machine_mode));
/* Find the best mode to use to access a bit field. */
-extern enum machine_mode get_best_mode PARAMS ((int, int, int, enum machine_mode, int));
+extern enum machine_mode get_best_mode PARAMS ((int, int, unsigned int,
+ enum machine_mode, int));
/* Determine alignment, 1<=result<=BIGGEST_ALIGNMENT. */
#define GET_MODE_ALIGNMENT(MODE) \
- MIN (BIGGEST_ALIGNMENT, \
- MAX (1, (GET_MODE_UNIT_SIZE (MODE) * BITS_PER_UNIT)))
+ (unsigned int) MIN (BIGGEST_ALIGNMENT, \
+ MAX (1, (GET_MODE_UNIT_SIZE (MODE) * BITS_PER_UNIT)))
/* For each class, get the narrowest mode in that class. */
diff --git a/gcc/rtl.h b/gcc/rtl.h
index 1518512..f95b04b 100644
--- a/gcc/rtl.h
+++ b/gcc/rtl.h
@@ -1524,7 +1524,7 @@ extern int preserve_subexpressions_p PARAMS ((void));
/* In expr.c */
extern void init_expr_once PARAMS ((void));
-extern void move_by_pieces PARAMS ((rtx, rtx, int, int));
+extern void move_by_pieces PARAMS ((rtx, rtx, int, unsigned int));
/* In stupid.c */
diff --git a/gcc/stor-layout.c b/gcc/stor-layout.c
index 8ffacb7..707d0a0 100644
--- a/gcc/stor-layout.c
+++ b/gcc/stor-layout.c
@@ -37,11 +37,11 @@ struct sizetype_tab sizetype_tab;
/* If nonzero, this is an upper limit on alignment of structure fields.
The value is measured in bits. */
-int maximum_field_alignment;
+unsigned int maximum_field_alignment;
/* If non-zero, the alignment of a bitstring or (power-)set value, in bits.
May be overridden by front-ends. */
-int set_alignment = 0;
+unsigned int set_alignment = 0;
static tree layout_record PARAMS ((tree));
static void layout_union PARAMS ((tree));
@@ -241,7 +241,7 @@ round_up (value, divisor)
void
layout_decl (decl, known_align)
tree decl;
- unsigned known_align;
+ unsigned int known_align;
{
register tree type = TREE_TYPE (decl);
register enum tree_code code = TREE_CODE (decl);
@@ -289,8 +289,7 @@ layout_decl (decl, known_align)
{
DECL_BIT_FIELD_TYPE (decl) = DECL_BIT_FIELD (decl) ? type : 0;
if (maximum_field_alignment != 0)
- DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl),
- (unsigned)maximum_field_alignment);
+ DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), maximum_field_alignment);
else if (DECL_PACKED (decl))
DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
}
@@ -306,7 +305,7 @@ layout_decl (decl, known_align)
if (xmode != BLKmode
&& known_align % GET_MODE_ALIGNMENT (xmode) == 0)
{
- DECL_ALIGN (decl) = MAX ((unsigned) GET_MODE_ALIGNMENT (xmode),
+ DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
DECL_ALIGN (decl));
DECL_MODE (decl) = xmode;
DECL_SIZE (decl) = bitsize_int (GET_MODE_BITSIZE (xmode), 0);
@@ -346,8 +345,8 @@ layout_record (rec)
tree rec;
{
register tree field;
- unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
- unsigned unpacked_align = record_align;
+ unsigned int record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
+ unsigned int unpacked_align = record_align;
/* These must be laid out *after* the record is. */
tree pending_statics = NULL_TREE;
/* Record size so far is CONST_SIZE + VAR_SIZE bits,
@@ -355,11 +354,11 @@ layout_record (rec)
and VAR_SIZE is a tree expression.
If VAR_SIZE is null, the size is just CONST_SIZE.
Naturally we try to avoid using VAR_SIZE. */
- register HOST_WIDE_INT const_size = 0;
- register tree var_size = 0;
+ HOST_WIDE_INT const_size = 0;
+ tree var_size = 0;
/* Once we start using VAR_SIZE, this is the maximum alignment
that we know VAR_SIZE has. */
- register int var_align = BITS_PER_UNIT;
+ unsigned int var_align = BITS_PER_UNIT;
int packed_maybe_necessary = 0;
#ifdef STRUCTURE_SIZE_BOUNDARY
@@ -370,8 +369,8 @@ layout_record (rec)
for (field = TYPE_FIELDS (rec); field; field = TREE_CHAIN (field))
{
- register int known_align = var_size ? var_align : const_size;
- register int desired_align = 0;
+ unsigned int known_align = var_size ? var_align : const_size;
+ unsigned int desired_align = 0;
tree type = TREE_TYPE (field);
/* If FIELD is static, then treat it like a separate variable,
@@ -422,20 +421,21 @@ layout_record (rec)
It does, however, affect the alignment of the next field
within the structure. */
if (! integer_zerop (DECL_SIZE (field)))
- record_align = MAX ((int)record_align, desired_align);
+ record_align = MAX (record_align, desired_align);
else if (! DECL_PACKED (field))
desired_align = TYPE_ALIGN (type);
/* A named bit field of declared type `int'
forces the entire structure to have `int' alignment. */
if (DECL_NAME (field) != 0)
{
- int type_align = TYPE_ALIGN (type);
+ unsigned int type_align = TYPE_ALIGN (type);
+
if (maximum_field_alignment != 0)
type_align = MIN (type_align, maximum_field_alignment);
else if (DECL_PACKED (field))
type_align = MIN (type_align, BITS_PER_UNIT);
- record_align = MAX ((int) record_align, type_align);
+ record_align = MAX (record_align, type_align);
if (warn_packed)
unpacked_align = MAX (unpacked_align, TYPE_ALIGN (type));
}
@@ -443,7 +443,7 @@ layout_record (rec)
else
#endif
{
- record_align = MAX ((int) record_align, desired_align);
+ record_align = MAX (record_align, desired_align);
if (warn_packed)
unpacked_align = MAX (unpacked_align, TYPE_ALIGN (type));
}
@@ -451,8 +451,7 @@ layout_record (rec)
if (warn_packed && DECL_PACKED (field))
{
if (const_size % TYPE_ALIGN (type) == 0
- || (var_align % TYPE_ALIGN (type) == 0
- && var_size != NULL_TREE))
+ || (var_align % TYPE_ALIGN (type) == 0 && var_size != NULL_TREE))
{
if (TYPE_ALIGN (type) > desired_align)
{
@@ -471,8 +470,7 @@ layout_record (rec)
own alignment? */
if (const_size % desired_align != 0
- || (var_align % desired_align != 0
- && var_size != NULL_TREE))
+ || (var_align % desired_align != 0 && var_size != NULL_TREE))
{
/* No, we need to skip space before this field.
Bump the cumulative size to multiple of field alignment. */
@@ -480,8 +478,7 @@ layout_record (rec)
if (warn_padded)
warning_with_decl (field, "padding struct to align `%s'");
- if (var_size == NULL_TREE
- || var_align % desired_align == 0)
+ if (var_size == NULL_TREE || var_align % desired_align == 0)
const_size
= CEIL (const_size, desired_align) * desired_align;
else
@@ -504,7 +501,7 @@ layout_record (rec)
&& maximum_field_alignment == 0
&& !integer_zerop (DECL_SIZE (field)))
{
- int type_align = TYPE_ALIGN (type);
+ unsigned int type_align = TYPE_ALIGN (type);
register tree dsize = DECL_SIZE (field);
int field_size = TREE_INT_CST_LOW (dsize);
@@ -527,7 +524,7 @@ layout_record (rec)
&& !DECL_PACKED (field)
&& !integer_zerop (DECL_SIZE (field)))
{
- int type_align = TYPE_ALIGN (type);
+ unsigned int type_align = TYPE_ALIGN (type);
register tree dsize = DECL_SIZE (field);
int field_size = TREE_INT_CST_LOW (dsize);
@@ -598,9 +595,7 @@ layout_record (rec)
Round it up to a multiple of the record's alignment. */
if (var_size == NULL_TREE)
- {
- TYPE_SIZE (rec) = bitsize_int (const_size, 0L);
- }
+ TYPE_SIZE (rec) = bitsize_int (const_size, 0L);
else
{
if (const_size)
@@ -623,12 +618,14 @@ layout_record (rec)
{
tree unpadded_size = TYPE_SIZE (rec);
+
#ifdef ROUND_TYPE_SIZE
TYPE_SIZE (rec) = ROUND_TYPE_SIZE (rec, TYPE_SIZE (rec), TYPE_ALIGN (rec));
#else
/* Round the size up to be a multiple of the required alignment */
TYPE_SIZE (rec) = round_up (TYPE_SIZE (rec), TYPE_ALIGN (rec));
#endif
+
if (warn_padded && var_size == NULL_TREE
&& simple_cst_equal (unpadded_size, TYPE_SIZE (rec)) == 0)
warning ("padding struct size to alignment boundary");
@@ -638,6 +635,7 @@ layout_record (rec)
&& var_size == NULL_TREE)
{
tree unpacked_size;
+
TYPE_PACKED (rec) = 0;
#ifdef ROUND_TYPE_ALIGN
unpacked_align
@@ -650,11 +648,13 @@ layout_record (rec)
#else
unpacked_size = round_up (TYPE_SIZE (rec), unpacked_align);
#endif
+
if (simple_cst_equal (unpacked_size, TYPE_SIZE (rec)))
{
if (TYPE_NAME (rec))
{
char *name;
+
if (TREE_CODE (TYPE_NAME (rec)) == IDENTIFIER_NODE)
name = IDENTIFIER_POINTER (TYPE_NAME (rec));
else
@@ -689,7 +689,7 @@ layout_union (rec)
tree rec;
{
register tree field;
- unsigned union_align = BITS_PER_UNIT;
+ unsigned int union_align = BITS_PER_UNIT;
/* The size of the union, based on the fields scanned so far,
is max (CONST_SIZE, VAR_SIZE).
@@ -1111,9 +1111,11 @@ layout_type (type)
then stick with BLKmode. */
&& (! STRICT_ALIGNMENT
|| TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
- || (int)TYPE_ALIGN (type) >= TREE_INT_CST_LOW (TYPE_SIZE (type))))
+ || ((int) TYPE_ALIGN (type)
+ >= TREE_INT_CST_LOW (TYPE_SIZE (type)))))
{
tree field;
+
/* A union which has any BLKmode members must itself be BLKmode;
it can't go in a register.
Unless the member is BLKmode only because it isn't aligned. */
@@ -1144,16 +1146,19 @@ layout_type (type)
#ifndef SET_WORD_SIZE
#define SET_WORD_SIZE BITS_PER_WORD
#endif
- int alignment = set_alignment ? set_alignment : SET_WORD_SIZE;
+ unsigned int alignment
+ = set_alignment ? set_alignment : SET_WORD_SIZE;
int size_in_bits
= (TREE_INT_CST_LOW (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
- TREE_INT_CST_LOW (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) + 1);
int rounded_size
= ((size_in_bits + alignment - 1) / alignment) * alignment;
- if (rounded_size > alignment)
+
+ if (rounded_size > (int) alignment)
TYPE_MODE (type) = BLKmode;
else
TYPE_MODE (type) = mode_for_size (alignment, MODE_INT, 1);
+
TYPE_SIZE (type) = bitsize_int (rounded_size, 0L);
TYPE_SIZE_UNIT (type) = size_int (rounded_size / BITS_PER_UNIT);
TYPE_ALIGN (type) = alignment;
@@ -1170,7 +1175,7 @@ layout_type (type)
default:
abort ();
- } /* end switch */
+ }
/* Normally, use the alignment corresponding to the mode chosen.
However, where strict alignment is not required, avoid
@@ -1224,7 +1229,7 @@ layout_type (type)
/* Record layout info of this variant. */
tree size = TYPE_SIZE (type);
tree size_unit = TYPE_SIZE_UNIT (type);
- int align = TYPE_ALIGN (type);
+ unsigned int align = TYPE_ALIGN (type);
enum machine_mode mode = TYPE_MODE (type);
/* Copy it into all variants. */
@@ -1281,7 +1286,6 @@ make_signed_type (precision)
set_sizetype (type);
/* Lay out the type: set its alignment, size, etc. */
-
layout_type (type);
return type;
}
@@ -1386,7 +1390,6 @@ fixup_signed_type (type)
TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
/* Lay out the type: set its alignment, size, etc. */
-
layout_type (type);
}
@@ -1413,7 +1416,6 @@ fixup_unsigned_type (type)
TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
/* Lay out the type: set its alignment, size, etc. */
-
layout_type (type);
}
@@ -1435,7 +1437,7 @@ fixup_unsigned_type (type)
enum machine_mode
get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
int bitsize, bitpos;
- int align;
+ unsigned int align;
enum machine_mode largest_mode;
int volatilep;
{
@@ -1459,7 +1461,7 @@ get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
if the extra 4th byte is past the end of memory.
(Though at least one Unix compiler ignores this problem:
that on the Sequent 386 machine. */
- || MIN (unit, BIGGEST_ALIGNMENT) > align
+ || MIN (unit, BIGGEST_ALIGNMENT) > (int) align
|| (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
return VOIDmode;
@@ -1473,7 +1475,7 @@ get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
unit = GET_MODE_BITSIZE (tmode);
if (bitpos / unit == (bitpos + bitsize - 1) / unit
&& unit <= BITS_PER_WORD
- && unit <= MIN (align, BIGGEST_ALIGNMENT)
+ && unit <= (int) MIN (align, BIGGEST_ALIGNMENT)
&& (largest_mode == VOIDmode
|| unit <= GET_MODE_BITSIZE (largest_mode)))
wide_mode = tmode;
diff --git a/gcc/tree.c b/gcc/tree.c
index 5dcc32b..d0313c4 100644
--- a/gcc/tree.c
+++ b/gcc/tree.c
@@ -2251,6 +2251,50 @@ int_size_in_bytes (type)
return TREE_INT_CST_LOW (t);
}
+
+/* Return the strictest alignment, in bits, that T is known to have. */
+
+unsigned int
+expr_align (t)
+ tree t;
+{
+ unsigned int align0, align1;
+
+ switch (TREE_CODE (t))
+ {
+ case NOP_EXPR: case CONVERT_EXPR: case NON_LVALUE_EXPR:
+ /* If we have conversions, we know that the alignment of the
+ object must meet each of the alignments of the types. */
+ align0 = expr_align (TREE_OPERAND (t, 0));
+ align1 = TYPE_ALIGN (TREE_TYPE (t));
+ return MAX (align0, align1);
+
+ case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
+ case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
+ case WITH_RECORD_EXPR: case CLEANUP_POINT_EXPR: case UNSAVE_EXPR:
+ /* These don't change the alignment of an object. */
+ return expr_align (TREE_OPERAND (t, 0));
+
+ case COND_EXPR:
+ /* The best we can do is say that the alignment is the least aligned
+ of the two arms. */
+ align0 = expr_align (TREE_OPERAND (t, 1));
+ align1 = expr_align (TREE_OPERAND (t, 2));
+ return MIN (align0, align1);
+
+ case FUNCTION_DECL: case LABEL_DECL: case CONST_DECL:
+ case VAR_DECL: case PARM_DECL: case RESULT_DECL:
+ if (DECL_ALIGN (t) != 0)
+ return DECL_ALIGN (t);
+ break;
+
+ default:
+ break;
+ }
+
+ /* Otherwise take the alignment from that of the type. */
+ return TYPE_ALIGN (TREE_TYPE (t));
+}
/* Return, as a tree node, the number of elements for TYPE (which is an
ARRAY_TYPE) minus one. This counts only elements of the top array. */
diff --git a/gcc/tree.h b/gcc/tree.h
index 99f4d2c..b27f0e0 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -1766,6 +1766,7 @@ extern tree non_lvalue PARAMS ((tree));
extern tree pedantic_non_lvalue PARAMS ((tree));
extern tree convert PARAMS ((tree, tree));
+extern unsigned int expr_align PARAMS ((tree));
extern tree size_in_bytes PARAMS ((tree));
extern HOST_WIDE_INT int_size_in_bytes PARAMS ((tree));
extern tree size_binop PARAMS ((enum tree_code, tree, tree));
@@ -1806,10 +1807,10 @@ extern struct sizetype_tab sizetype_tab;
#define ubitsizetype sizetype_tab.xubitsizetype
/* If nonzero, an upper limit on alignment of structure fields, in bits. */
-extern int maximum_field_alignment;
+extern unsigned int maximum_field_alignment;
/* If non-zero, the alignment of a bitstring or (power-)set value, in bits. */
-extern int set_alignment;
+extern unsigned int set_alignment;
/* Concatenate two lists (chains of TREE_LIST nodes) X and Y
by making the last node in X point to Y.
@@ -1995,7 +1996,7 @@ extern tree maybe_build_cleanup PARAMS ((tree));
extern tree get_inner_reference PARAMS ((tree, int *, int *, tree *,
enum machine_mode *, int *,
- int *, int *));
+ int *, unsigned int *));
/* Given a DECL or TYPE, return the scope in which it was declared, or
NUL_TREE if there is no containing scope. */
diff --git a/gcc/varasm.c b/gcc/varasm.c
index ff3b45e..76d91f71 100644
--- a/gcc/varasm.c
+++ b/gcc/varasm.c
@@ -1511,7 +1511,7 @@ assemble_variable (decl, top_level, at_end, dont_output_data)
* (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
#if !defined(ASM_OUTPUT_ALIGNED_COMMON) && !defined(ASM_OUTPUT_ALIGNED_BSS)
- if ( (DECL_ALIGN (decl) / BITS_PER_UNIT) > rounded)
+ if ((DECL_ALIGN (decl) / BITS_PER_UNIT) > (unsigned int) rounded)
warning_with_decl
(decl, "requested alignment for %s is greater than implemented alignment of %d.",rounded);
#endif