From 770ae6cc710a7a0f7db4ef7f09941bbe19d0ee78 Mon Sep 17 00:00:00 2001 From: Richard Kenner Date: Sat, 25 Mar 2000 18:34:13 +0000 Subject: * Rework fields used to describe positions of bitfields and modify sizes to be unsigned and use HOST_WIDE_INT. * alias.c (reg_known_value_size): Now unsigned. * c-typeck.c (build_unary_op, case ADDR_EXPR): Use byte_position. (really_start_incremental_init): Use bitsize_zero_node. (push_init_level, pop_init_level, output_init_element): Likewise. Use bitsize_unit_node and bitsize_one_node. (output_pending_init_elements, process_init_element): Likewise. * combine.c (combine_max_regno, reg_sign_bit_copies): Now unsigned. (make_extraction): Position and length HOST_WIDE_INT and unsigned HOST_WIDE_INT, respectively. (get_pos_from_mask): Passed in value is unsigned HOST_WIDE_INT. (num_sign_bit_copies): Returns unsigned. BITWIDTH now unsigned; rework arithmetic. Remove recursive call from arg to MAX. (combine_instructions, init_reg_last_arrays): NREGS now unsigned. (setup_incoming_promotions, can_combine_p, try_combine, simplify_set): REGNO now unsigned. (set_nonzero_bit_and_sign_copies): NUM now unsigned. (find_split_point, expand_compound_operation, make_extraction): LEN now unsigned HOST_WIDE_INT, POS now HOST_WIDE_INT. (make_field_assignment): Likewise. (combine_simplify_rtx): Add cast. (expand_compound_operation): MODEWIDTH now unsigned; rework arithmetic. (force_to_mode): WIDTH now unsigned; add cast. (if_then_else_cond): SIZE now unsigned. (nonzero_bits): MODE_WIDTH, RESULT_WIDTH, and WIDTH now unsigned. (extended_count): Now returns unsigned. (simplify_shift_const): COUNT unsigned; arg is now INPUT_COUNT. Add SIGNED_COUNT variable; MODE_WORDS and FIRST_COUNT now unsigned. (simplify_comparison): MODE_WIDTH now unsigned. (update_table_tick): REGNO and ENDREGNO now unsigned; new var R. (mark_used_regs_combine): Likewise; rework arithmetic. (record_value_for_reg): REGNO, ENDREGNO, and I now unsigned. (record_dead_and_set_regs, reg_dead_at_p, distribute_notes): Likewise. (record_promoted_value): REGNO now unsigned. (get_last_value_validate): REGNO, ENDREGNO, and J now unsigned. (get_last_value): REGNO now unsigned. (use_crosses_set_p): REGNO and ENDREGNO now unsigned. (reg_dead_regno, reg_dead_endregno): Now unsigned. (remove_death): Arg REGNO now unsigned. (move_deaths): REGNO, DEADREGNO, DEADEND, OUREND, and I now unsigned. (reg_bitfield_target_p): REGNO, REGNO, ENDREGNO, and ENDTREGNO now unsigned. * convert.c (convert_to_integer): INPREC and OUTPREC now unsigned. * cse.c (struct qty_table_elem): FIRST_REG and LAST_REG now unsigned. (struct cse_reg_info): REGNO now unsigned. (cached_regno): Now unsigned. (REGNO_QTY_VALID_P): Add cast. (make_new_qty, make_regs_eqv, delete_reg_eqiv): Regno args unsigned. (remove_invalid_regs): Likewise. (remove_invalid_subreg_refs): Likewise; arg WORD also unsigned as are variables END and I. (get_cse_reg_info, insert): Likewise. (mention_regs, invalidate_for_call): REGNO, ENDREGNO, and I unsigned. (canon_hash): Likewise. (insert_regs, lookup_for_remove): REGNO now unsigned. (invalidate): REGNO, ENDREGNO, TREGNO, and TENDREGNO now unsigned. New variable RN. * dbxout.c (dbxout_parms, dbxout_reg_parms): Don't check for REGNO < 0. * dwarf2out.c (dwarf2ou_frame_debug_expr): Remove cast. * emit-rtl.c (subreg_realpart_p): Add cast. (operand_subword): Arg I is now unsigned as is var PARTWORDS. (operand_subword_force): Arg I is now unsigned. * except.c (eh_regs): Variable I is now unsigned. * explow.c (hard_function_value): BYTES is unsigned HOST_WIDE_INT. * expmed.c (store_fixed_bit_field): Position is HOST_WIDE_INT; length is unsigned HOST_WIDE_INT; likewise for internal variables. (store_split_bit_field, extract_fixed_bit_field): Likewise. (extract_split_bit_field, store_bit_field, extract_bit_field): Likewise. * expr.c (store_constructor_fields, store_constructor, store_field): Positions are HOST_WIDE_INT and lengths are unsigned HOST_WIDE_INT. (expand_assignment, expand_expr, expand_expr_unaligned): Likewise. (do_jump): Likewise. (move_by_pieces, move_by_pieces_ninsns, clear_by_pieces): MAX_SIZE is now unsigned. (emit_group_load): BYTEPOS is HOST_WIDE_INT; BYTELEN is unsigned. (emit_group_store): Likewise. (emit_move_insn): I now unsigned. (store_constructor): Use host_integerp, tree_low_cst, and bitsize_unit_node. (get_inner_reference): Return bitpos and bitsize as HOST_WIDE_INT. Rework all calculations to use trees and new fields. * expr.h (promoted_input_arg): Regno now unsigned. (store_bit_field, extract_bit_field): Adjust types of pos and size. (mark_seen_cases): Arg is HOST_WIDE_INT. * flow.c (verify_wide_reg_1): REGNO now unsigned. * fold-const.c (decode_field_reference): Size and pos HOST_WIDE_INT; precisions and alignments are unsigned. (optimize_bit_field_compare, fold_truthop): Likewise. (int_const_binop): Adjust threshold for size_int_type_wide call. (fold_convert): Likewise. (size_int_type_wide): Make table larger and fix thinko that only had half of table used. (all_ones_mask_p, fold): Precisions are unsigned. * function.c (put_reg_info_stack): REGNO is unsigned. (instantiate_decl): Size is HOST_WIDE_INT. (instantiate_virtual_regs): I is unsigned. (assign_parms): REGNO, REGNOI, and REGNOR are unsigned. (promoted_input_arg): REGNO is unsigned. * function.h (struct function): x_max_parm_reg is now unsigned. * gcse.c (max_gcse_regno): Now unsigned. (struct null_pointer_info): min_reg and max_reg now unsigned. (lookup_set, next_set): REGNO arg now unsigned. (compute_hash_table): REGNO and I now unsigned. (handle_avail_expr): regnum_for_replacing now unsigned. (cprop_insn): REGNO now unsigned. (delete_null_pointer_checks_1): BLOCK_REG now pointer to unsigned. * ggc-common.c (ggc_mark_tree_children, case FIELD_DECL): New case. * global.c (set_preference): SRC_REGNO, DEST_REGNO, and I now unsigned. * hard-reg-set.h (reg_class_size): Now unsigned. * integrate.c (mark_stores): LAST_REG and I now unsigned; new UREGNO. * jump.c (mark_modified_reg): I now unsigned; add cast. (rtx_equal_for_thread_p): Add cast. * loop.c (max_reg_before_loop): Now unsigned. (struct_movable): REGNO now unsigned. (try_copy_prop): REGNO arg unsigned. (regs_match_p): XN and YN now unsigned. (consec_sets_invariant_p, maybe_eliminate_biv): REGNO now unsigned. (strength_reduce): Likewise; NREGS also unsigned. (first_increment_giv, last_increment_giv unsigned): Now unsigned. * loop.h (struct iv_class): REGNO now unsigned. (max_reg_before_loop, first_increment_giv, last_increment_giv): Now unsigned. * machmode.h (mode_size, mode_unit_size): Now unsigned. (mode_for_size, smallest_mode_for_size): Pass size as unsigned. * optabs.c (expand_binop): I and NWORDS now unsigned. (expand_unop): I now unsigned. * print-tree.c (print_node): Don't print DECL_FIELD_BITPOS, but do print DECL_FIELD_OFFSET and DECL_FIELD_BIT_OFFSET. * real.c (significand_size): Now returns unsigned. * real.h (significand_size): Likewise. * regclass.c (reg_class_size): Now unsigned. (choose_hard_reg_mode): Both operands now unsigned. (record_reg_classes): REGNO and NR now unsigned. (reg_scan): NREGS now unsigned. (reg_scan_update): old_max_regno now unsigned. (reg_scan_mark_refs): Arg MIN_REGNO and var REGNO now unsigned. * reload.c (find_valid_class): BEST_SIZE now unsigned. (find_dummy_reload): REGNO, NWORDS, and I now unsigned. (hard_reg_set_here_p): Args BEG_REGNO and END_REGNO now unsigned. Likewise for variable R. (refers_to_regno_for_reload_p): Args REGNO and END_REGNO now unsigned, as are variables INNER_REGNO and INNER_ENDREGNO; add new variable R. (find_equiv_reg): Add casts. (regno_clobbered_p): Arg REGNO now unsigned. * reload.h (struct reload): NREGS now unsigned. (refers_to_regno_for_reload_p): Regno args are unsigned. (regno_clobbered_p): Likewise. * reload1.c (reg_max_ref_width, spill_stack_slot_width): Now unsigned. (compute_use_by_pseudos): REGNO now unsigned. (find_reg): I and J now unsigned, new variable K, and change loop variables accordingly; THIS_NREGS now unsigned. (alter_reg): INHERENT_SIZE and TOTAL_SIZE now unsigned. (spill_hard_reg): REGNO arg now unsigned; add casts. (forget_old_reloads_1): REGNO, NR, and I now unsigned. (mark_reload_reg_in_use): Arg REGNO and vars NREGS and I now unsigned. (clear_reload_reg_in_use): Arg REGNO and vars NREGS, START_REGNO, END_REGNO, CONFLICT_START, and CONFLICT_END now unsigned. (reload_reg_free_p, reload_reg_reaches_end_p): Arg REGNO now unsigned. (choose_reload_regs): MAX_GROUP_SIZE now unsigned. (emit_reload_insns): REGNO now unsigned. (reload_cse_move2add): Add cast. (move2add_note_store): REGNO and I now unsigned; new variable ENDREGNO and rework loop. * resource.c (mark_referenced_resources, mark_set_resources): New variable R; REGNO and LAST_REGNO now unsigned. (mark_target_live_regs): J and REGNO now unsigned. * rtl.c (mode_size, mode_unit_size): Now unsigned. * rtl.h (union rtunion_def): New field rtuint. (XCUINT): New macro. (ADDRESSOF_REGNO, REGNO, SUBREG_WORD): New XCUINT. (operand_subword, operand_subword_force): Word number is unsigned. (choose_hard_reg_mode): Operands are unsigned. (refers_to-regno_p, dead_or_set_regno_p): Regno arg is unsigned. (find_regno_note, find_regno_fusage, replace_regs): Likewise. (regno_use_in, combine_instructions, remove_death): Likewise. (reg_scan, reg_scan_update): Likewise. (extended_count): Return is unsigned. * rtlanal.c (refers_to_regno_p): Args REGNO and ENDREGNO and vars I, INNER_REGNO, and INNER_ENDREGNO now unsigned; new variable X_REGNO. (reg_overlap_mentioned_p): REGNO and ENDREGNO now unsigned. (reg_set_last_first_regno, reg_set_last_last_regno): Now unsigned. (reg_reg_last_1): FIRS and LAST now unsigned. (dead_or_set_p): REGNO, LAST_REGNO, and I now unsigned. (dead_or_set_regno_p): Arg TEST_REGNO and vars REGNO and ENDREGNO now unsigned. (find_regno_note, regno_use_in): Arg REGNO now unsigned. (find_regno_fusage): Likewise; also var REGNOTE now unsigned. (find_reg_fusage): Variables REGNO, END_REGNO, and I now unsigned. (replace_regs): Arg NREGS now unsigned. * sdbout.c (sdbout_parms, sdbout_reg_parms): Don't check REGNO < 0. * simplify-rtx.c (simplify_unary_operation): WIDTH now unsigned. (simplify_binary_operation): Likewise. (cselib_invalidate_regno): Arg REGNO and variables ENDREGNO, I, and THIS_LAST now unsigned. (cselib_record_set): Add cast. * ssa.c (ssa_max_reg_num): Now unsigned. (rename_block): REGNO now unsigned. * stmt.c (expand_return): Bit positions unsigned HOST_WIDE_INT; sizes now unsigned. (all_cases_count): Just return -1 not -2. COUNT, MINVAL, and LASTVAL now HOST_WIDE_INT. Rework tests to use trees whenever possible. Use host_integerp and tree_low_cst. (mark_seen_cases): COUNT arg now HOST_WIDE_INT; Likewise variable NEXT_NODE_OFFSET; XLO now unsigned. (check_for_full_enumeration_handing): BYTES_NEEDED, I to HOST_WIDE_INT. * stor-layout.c (mode_for_size): SIZE arg now unsigned. (smallest_mode_for_size): Likewise. (layout_decl): Simplify handing of a specified DECL_SIZE_UNIT. KNOWN_ALIGN is now an alignment, so simplify code. Don't turn off DECL_BIT_FIELD if field is BLKmode, but not type. (start_record_layout): Renamed from new_record_layout_info. Update to new fields. (debug_rli, normalize_rli, rli_size_unit_so_far, rli_size_so_far): New functions. (place_union_field): Renamed from layout_union_field. Update to use new fields in rli. (place_field): Renamed from layout_field. Major rewrite to use new fields in rli; pass alignment to layout_decl. (finalize_record_size): Rework to use new fields in rli and handle union. (compute_record_mode): Rework to simplify and to use new DECL fields. (finalize_type_size): Make rounding more consistent. (finish_union_layout): Deleted. (layout_type, case VOID_TYPE): Don't set TYPE_SIZE_UNIT either. (layout_type, case RECORD_TYPE): Call new function names. (initialize_sizetypes): Set TYPE_IS_SIZETYPE. (set_sizetype): Set TYPE_IS_SIZETYPE earlier. (get_best_mode): UNIT is now unsigned; remove casts. * tree.c (bit_position): Compute from new fields. (byte_position, int_byte_position): New functions. (print_type_hash_statistics): Cast to remove warning. (build_range_type): Use host_integerp and tree_low_cst to try to hash. (build_index_type): Likewise; make subtype of sizetype. (build_index_2_type): Pass sizetype to build_range_type. (build_common_tree_nodes): Use size_int and bitsize_int to initialize nodes; add bitsize_{zero,one,unit}_node. * tree.h (DECL_FIELD_CONTEXT): Use FIELD_DECL_CHECK. (DECL_BIT_FIELD_TYPE, DECL_QUALIFIER, DECL_FCONTEXT): Likewise. (DECL_PACKED, DECL_BIT_FIELD): Likewise. (DECL_FIELD_BITPOS): Deleted. (DECL_FIELD_OFFSET, DECL_FIELD_BIT_OFFSET): New fields. (DECL_RESULT, DECL_SAVED_INSNS): Use FUNCTION_DECL_CHECK. (DECL_FRAME_SIZE, DECL_FUNCTION_CODE, DECL_NO_STATIC_CHAIN): Likewise. (DECL_INLINE, DECL_BUILT_IN_NONANSI, DECL_IS_MALLOC): Likewise. (DECL_BUILT_IN_CLASS, DECL_STATIC_CONSTRUCTOR): Likewise. (DECL_STATIC_DESTRUCTOR, DECL_NO_CHECK_MEMORY_USAGE): Likewise. (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT, DECL_NO_LIMIT_STACK) Likewise. (DECL_ORIGINAL_TYPE, TYPE_DECL_SUPPRESS_DEBUG): Use TYPE_DECL_CHECK. (DECL_ARG_TYPE_AS_WRITEN, DECL_ARG_TYPE): Use PARM_DECL_CHECK. (DECL_INCOMING_RTL, DECL_TRANSPARENT_UNION): Likewise. (DECL_ALIGN): Adjust to new field in union. (DECL_OFFSET_ALIGN): New field. (DECL_ERROR_ISSUED, DECL_TOO_LATE): Use LABEL_DECL_CHECK. (DECL_IN_TEXT_SECTION): Use VAR_DECL_CHECK. (union tree_decl): Add struct for both aligns. (enum tree_index): Add TI_BITSIZE_{ZERO,ONE,UNIT}. (bitsize_zero_node, bitsize_one_node, bitsize_unit_node): Added. (struct record_layout_info): Rework fields to have offset alignment and byte and bit position. (start_record_layout, place_field): Renamed from old names. (rli_size_so_far, rli_size_unit_so_far, normalize_rli): New decls. (byte_position, int_byte_position): Likewise. (get_inner_reference): Change types of position and length. * unroll.c (unroll_loop): New variable R; use for some loops. MAX_LOCAL_REGNUM and MAXREGNUM now unsigned. (calculate_giv_inc): Arg REGNO now unsigned. (copy_loop_body): REGNO and SRC_REGNO now unsigned. * varasm.c (assemble_variable): Clean up handling of size using host_integerp and tree_low_cst. (decode_addr_const): Use byte, not bit, position. (output_constructor): bitpos and offsets are HOST_WIDE_INT; use tree_low_cst and int_bit_position. * objc/objc-act.c (build_ivar_list_initializer): Use byte_position. * ch/actions.c (check_missing_cases): BYTES_NEEDED is HOST_WIDE_INT. * ch/typeck.c (expand_constant_to_buffer): Use int_byte_position. (extract_constant_from_buffer): Likewise. * cp/class.c (build_vbase_pointer_fields): layout_field now place_field. (get_vfield_offset): Use byte_position. (set_rtti_entry): Set OFFSET to ssizetype zero. (get_binfo_offset_as_int): Deleted. (dfs_record_base_offsets): Use tree_low_cst. (dfs_search_base_offsets): Likewise. (layout_nonempty_base_or_field): Reflect changes in RLI format and call byte_position. (layout_empty_base): Convert offset to ssizetype. (build_base_field): use rli_size_unit_so_far. (dfs_propagate_binfo_offsets): Do computation in proper type. (layout_virtual_bases): Pass ssizetype to propagate_binfo_offsets. (layout_class_type): Reflect changes in RLI names and fields. (finish_struct_1): Set DECL_FIELD_OFFSET. * cp/dump.c (dequeue_and_dump): Call bit_position. * cp/expr.c (cplus_expand_constant): Use byte_position. * cp/rtti.c (expand_class_desc): Use bitsize_one_node. * cp/typeck.c (build_component_addr): Use byte_position and don't special case for zero offset. * f/com.c (ffecom_tree_canonize_ptr_): Use bitsize_zero_node. (ffecom_tree_canonize_ref_): Likewise. * java/class.c (make_field_value): Use byte_position. * java/expr.c (JAVA_ARRAY_LENGTH_OFFSET): Use byte_position. (java_array_data_offset): Likewise. * java/java-tree.h (MAYBE_CREATE_TYPE_TYPE_LANG_SPECIFIC): Add case to bzero call. From-SVN: r32742 --- gcc/expr.c | 240 +++++++++++++++++++++++++------------------------------------ 1 file changed, 99 insertions(+), 141 deletions(-) (limited to 'gcc/expr.c') diff --git a/gcc/expr.c b/gcc/expr.c index 09d4063..4d7007e 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -143,12 +143,15 @@ static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), struct clear_by_pieces *)); static int is_zeros_p PARAMS ((tree)); static int mostly_zeros_p PARAMS ((tree)); -static void store_constructor_field PARAMS ((rtx, int, int, enum machine_mode, +static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT, + HOST_WIDE_INT, enum machine_mode, tree, tree, unsigned int, int)); -static void store_constructor PARAMS ((tree, rtx, unsigned int, int, int)); -static rtx store_field PARAMS ((rtx, int, int, enum machine_mode, +static void store_constructor PARAMS ((tree, rtx, unsigned int, int, + unsigned HOST_WIDE_INT)); +static rtx store_field PARAMS ((rtx, HOST_WIDE_INT, + HOST_WIDE_INT, enum machine_mode, tree, enum machine_mode, int, - unsigned int, int, int)); + unsigned int, HOST_WIDE_INT, int)); static enum memory_use_mode get_memory_usage_from_modifier PARAMS ((enum expand_modifier)); static tree save_noncopied_parts PARAMS ((tree, tree)); @@ -162,7 +165,8 @@ static rtx expand_increment PARAMS ((tree, int, int)); static void preexpand_calls PARAMS ((tree)); static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx)); static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx)); -static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, rtx, rtx)); +static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, + rtx, rtx)); static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int)); /* Record for each mode whether we can move a register directly to or @@ -1368,7 +1372,7 @@ move_by_pieces (to, from, len, align) { struct move_by_pieces data; rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0); - int max_size = MOVE_MAX_PIECES + 1; + unsigned int max_size = MOVE_MAX_PIECES + 1; enum machine_mode mode = VOIDmode, tmode; enum insn_code icode; @@ -1479,7 +1483,7 @@ move_by_pieces_ninsns (l, align) unsigned int align; { register int n_insns = 0; - int max_size = MOVE_MAX + 1; + unsigned int max_size = MOVE_MAX + 1; if (! SLOW_UNALIGNED_ACCESS (word_mode, align) || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT) @@ -1920,8 +1924,8 @@ emit_group_load (dst, orig_src, ssize, align) for (i = start; i < XVECLEN (dst, 0); i++) { enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0)); - int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); - int bytelen = GET_MODE_SIZE (mode); + HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); + unsigned int bytelen = GET_MODE_SIZE (mode); int shift = 0; /* Handle trailing fragments that run over the size of the struct. */ @@ -2050,9 +2054,9 @@ emit_group_store (orig_dst, src, ssize, align) /* Process the pieces. */ for (i = start; i < XVECLEN (src, 0); i++) { - int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); + HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); enum machine_mode mode = GET_MODE (tmps[i]); - int bytelen = GET_MODE_SIZE (mode); + unsigned int bytelen = GET_MODE_SIZE (mode); /* Handle trailing fragments that run over the size of the struct. */ if (ssize >= 0 && bytepos + bytelen > ssize) @@ -2238,7 +2242,7 @@ clear_by_pieces (to, len, align) { struct clear_by_pieces data; rtx to_addr = XEXP (to, 0); - int max_size = MOVE_MAX_PIECES + 1; + unsigned int max_size = MOVE_MAX_PIECES + 1; enum machine_mode mode = VOIDmode, tmode; enum insn_code icode; @@ -2587,7 +2591,7 @@ emit_move_insn_1 (x, y) enum machine_mode mode = GET_MODE (x); enum machine_mode submode; enum mode_class class = GET_MODE_CLASS (mode); - int i; + unsigned int i; if (mode >= MAX_MACHINE_MODE) abort (); @@ -3323,8 +3327,7 @@ expand_assignment (to, from, want_value, suggest_reg) || TREE_CODE (to) == ARRAY_REF) { enum machine_mode mode1; - int bitsize; - int bitpos; + HOST_WIDE_INT bitsize, bitpos; tree offset; int unsignedp; int volatilep = 0; @@ -4051,7 +4054,8 @@ static void store_constructor_field (target, bitsize, bitpos, mode, exp, type, align, cleared) rtx target; - int bitsize, bitpos; + unsigned HOST_WIDE_INT bitsize; + HOST_WIDE_INT bitpos; enum machine_mode mode; tree exp, type; unsigned int align; @@ -4095,7 +4099,7 @@ store_constructor (exp, target, align, cleared, size) rtx target; unsigned int align; int cleared; - int size; + unsigned HOST_WIDE_INT size; { tree type = TREE_TYPE (exp); #ifdef WORD_REGISTER_OPERATIONS @@ -4175,10 +4179,10 @@ store_constructor (exp, target, align, cleared, size) tree value = TREE_VALUE (elt); #endif register enum machine_mode mode; - int bitsize; - int bitpos = 0; + HOST_WIDE_INT bitsize; + HOST_WIDE_INT bitpos = 0; int unsignedp; - tree pos, constant = 0, offset = 0; + tree offset; rtx to_rtx = target; /* Just ignore missing fields. @@ -4190,8 +4194,8 @@ store_constructor (exp, target, align, cleared, size) if (cleared && is_zeros_p (TREE_VALUE (elt))) continue; - if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST) - bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)); + if (host_integerp (DECL_SIZE (field), 1)) + bitsize = tree_low_cst (DECL_SIZE (field), 1); else bitsize = -1; @@ -4200,18 +4204,16 @@ store_constructor (exp, target, align, cleared, size) if (DECL_BIT_FIELD (field)) mode = VOIDmode; - pos = DECL_FIELD_BITPOS (field); - if (TREE_CODE (pos) == INTEGER_CST) - constant = pos; - else if (TREE_CODE (pos) == PLUS_EXPR - && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST) - constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0); + offset = DECL_FIELD_OFFSET (field); + if (host_integerp (offset, 0) + && host_integerp (bit_position (field), 0)) + { + bitpos = int_bit_position (field); + offset = 0; + } else - offset = pos; - - if (constant) - bitpos = TREE_INT_CST_LOW (constant); - + bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); + if (offset) { rtx offset_rtx; @@ -4220,8 +4222,7 @@ store_constructor (exp, target, align, cleared, size) offset = build (WITH_RECORD_EXPR, bitsizetype, offset, make_tree (TREE_TYPE (exp), target)); - offset = size_binop (EXACT_DIV_EXPR, offset, - bitsize_int (BITS_PER_UNIT)); + offset = size_binop (EXACT_DIV_EXPR, offset, bitsize_unit_node); offset = convert (sizetype, offset); offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); @@ -4257,8 +4258,7 @@ store_constructor (exp, target, align, cleared, size) start of a word, try to widen it to a full word. This special case allows us to output C++ member function initializations in a form that the optimizers can understand. */ - if (constant - && GET_CODE (target) == REG + if (GET_CODE (target) == REG && bitsize < BITS_PER_WORD && bitpos % BITS_PER_WORD == 0 && GET_MODE_CLASS (mode) == MODE_INT @@ -4707,13 +4707,14 @@ static rtx store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, align, total_size, alias_set) rtx target; - int bitsize, bitpos; + HOST_WIDE_INT bitsize; + HOST_WIDE_INT bitpos; enum machine_mode mode; tree exp; enum machine_mode value_mode; int unsignedp; unsigned int align; - int total_size; + HOST_WIDE_INT total_size; int alias_set; { HOST_WIDE_INT width_mask = 0; @@ -4929,25 +4930,29 @@ tree get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep, palignment) tree exp; - int *pbitsize; - int *pbitpos; + HOST_WIDE_INT *pbitsize; + HOST_WIDE_INT *pbitpos; tree *poffset; enum machine_mode *pmode; int *punsignedp; int *pvolatilep; unsigned int *palignment; { - tree orig_exp = exp; tree size_tree = 0; enum machine_mode mode = VOIDmode; tree offset = size_zero_node; + tree bit_offset = bitsize_zero_node; unsigned int alignment = BIGGEST_ALIGNMENT; + tree tem; + /* First get the mode, signedness, and size. We do this from just the + outermost expression. */ if (TREE_CODE (exp) == COMPONENT_REF) { size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1))) mode = DECL_MODE (TREE_OPERAND (exp, 1)); + *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1)); } else if (TREE_CODE (exp) == BIT_FIELD_REF) @@ -4958,122 +4963,71 @@ get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, else { mode = TYPE_MODE (TREE_TYPE (exp)); + *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp)); + if (mode == BLKmode) size_tree = TYPE_SIZE (TREE_TYPE (exp)); - - *pbitsize = GET_MODE_BITSIZE (mode); - *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp)); + else + *pbitsize = GET_MODE_BITSIZE (mode); } - if (size_tree) + if (size_tree != 0) { - if (TREE_CODE (size_tree) != INTEGER_CST) + if (! host_integerp (size_tree, 1)) mode = BLKmode, *pbitsize = -1; else - *pbitsize = TREE_INT_CST_LOW (size_tree); + *pbitsize = tree_low_cst (size_tree, 1); } /* Compute cumulative bit-offset for nested component-refs and array-refs, and find the ultimate containing object. */ - - *pbitpos = 0; - while (1) { - if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF) + if (TREE_CODE (exp) == BIT_FIELD_REF) + bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2)); + else if (TREE_CODE (exp) == COMPONENT_REF) { - tree pos = (TREE_CODE (exp) == COMPONENT_REF - ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1)) - : TREE_OPERAND (exp, 2)); - tree constant = bitsize_int (0), var = pos; + tree field = TREE_OPERAND (exp, 1); + tree this_offset = DECL_FIELD_OFFSET (field); /* If this field hasn't been filled in yet, don't go past it. This should only happen when folding expressions made during type construction. */ - if (pos == 0) + if (this_offset == 0) break; + else if (! TREE_CONSTANT (this_offset) + && contains_placeholder_p (this_offset)) + this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp); - /* Assume here that the offset is a multiple of a unit. - If not, there should be an explicitly added constant. */ - if (TREE_CODE (pos) == PLUS_EXPR - && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST) - constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0); - else if (TREE_CODE (pos) == INTEGER_CST) - constant = pos, var = bitsize_int (0); + offset = size_binop (PLUS_EXPR, offset, DECL_FIELD_OFFSET (field)); + bit_offset = size_binop (PLUS_EXPR, bit_offset, + DECL_FIELD_BIT_OFFSET (field)); - *pbitpos += TREE_INT_CST_LOW (constant); - offset - = size_binop (PLUS_EXPR, offset, - convert (sizetype, - size_binop (EXACT_DIV_EXPR, var, - bitsize_int (BITS_PER_UNIT)))); + if (! host_integerp (offset, 0)) + alignment = MIN (alignment, DECL_OFFSET_ALIGN (field)); } - else if (TREE_CODE (exp) == ARRAY_REF) { - /* This code is based on the code in case ARRAY_REF in expand_expr - below. We assume here that the size of an array element is - always an integral multiple of BITS_PER_UNIT. */ - tree index = TREE_OPERAND (exp, 1); tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); - tree low_bound - = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node; - tree index_type = TREE_TYPE (index); - tree xindex; - - if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype)) - { - index = convert (type_for_size (TYPE_PRECISION (sizetype), 0), - index); - index_type = TREE_TYPE (index); - } + tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0); - /* Optimize the special-case of a zero lower bound. - - We convert the low_bound to sizetype to avoid some problems - with constant folding. (E.g. suppose the lower bound is 1, - and its mode is QI. Without the conversion, (ARRAY - +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) - +INDEX), which becomes (ARRAY+255+INDEX). Oops!) - - But sizetype isn't quite right either (especially if - the lowbound is negative). FIXME */ - - if (! integer_zerop (low_bound)) - index = fold (build (MINUS_EXPR, index_type, index, - convert (sizetype, low_bound))); - - if (TREE_CODE (index) == INTEGER_CST) - { - index = convert (sbitsizetype, index); - index_type = TREE_TYPE (index); - } + /* We assume all arrays have sizes that are a multiple of a byte. + First subtract the lower bound, if any, in the type of the + index, then convert to sizetype and multiply by the size of the + array element. */ + if (low_bound != 0 && ! integer_zerop (low_bound)) + index = fold (build (MINUS_EXPR, TREE_TYPE (index), + index, low_bound)); - xindex = fold (build (MULT_EXPR, sbitsizetype, index, - convert (sbitsizetype, - TYPE_SIZE (TREE_TYPE (exp))))); + if (! TREE_CONSTANT (index) + && contains_placeholder_p (index)) + index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp); - if (TREE_CODE (xindex) == INTEGER_CST - && TREE_INT_CST_HIGH (xindex) == 0) - *pbitpos += TREE_INT_CST_LOW (xindex); - else - { - /* Either the bit offset calculated above is not constant, or - it overflowed. In either case, redo the multiplication - against the size in units. This is especially important - in the non-constant case to avoid a division at runtime. */ - xindex - = fold (build (MULT_EXPR, ssizetype, index, - convert (ssizetype, - TYPE_SIZE_UNIT (TREE_TYPE (exp))))); - - if (contains_placeholder_p (xindex)) - xindex = build (WITH_RECORD_EXPR, ssizetype, xindex, exp); - - offset - = size_binop (PLUS_EXPR, offset, convert (sizetype, xindex)); - } + offset = size_binop (PLUS_EXPR, offset, + size_binop (MULT_EXPR, + convert (sizetype, index), + TYPE_SIZE_UNIT (TREE_TYPE (exp)))); } else if (TREE_CODE (exp) != NON_LVALUE_EXPR && ! ((TREE_CODE (exp) == NOP_EXPR @@ -5088,7 +5042,7 @@ get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, /* If the offset is non-constant already, then we can't assume any alignment more than the alignment here. */ - if (! integer_zerop (offset)) + if (! TREE_CONSTANT (offset)) alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp))); exp = TREE_OPERAND (exp, 0); @@ -5099,19 +5053,24 @@ get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, else if (TREE_TYPE (exp) != 0) alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp))); - if (integer_zerop (offset)) - offset = 0; - - if (offset != 0 && contains_placeholder_p (offset)) - offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp); + /* If OFFSET is constant, see if we can return the whole thing as a + constant bit position. Otherwise, split it up. */ + if (host_integerp (offset, 0) + && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset), + bitsize_unit_node)) + && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset)) + && host_integerp (tem, 0)) + *pbitpos = tree_low_cst (tem, 0), *poffset = 0; + else + *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset; *pmode = mode; - *poffset = offset; *palignment = alignment / BITS_PER_UNIT; return exp; } /* Subroutine of expand_exp: compute memory_usage from modifier. */ + static enum memory_use_mode get_memory_usage_from_modifier (modifier) enum expand_modifier modifier; @@ -6615,8 +6574,7 @@ expand_expr (exp, target, tmode, modifier) { enum machine_mode mode1; - int bitsize; - int bitpos; + HOST_WIDE_INT bitsize, bitpos; tree offset; int volatilep = 0; unsigned int alignment; @@ -8616,8 +8574,7 @@ expand_expr_unaligned (exp, palign) { enum machine_mode mode1; - int bitsize; - int bitpos; + HOST_WIDE_INT bitsize, bitpos; tree offset; int volatilep = 0; unsigned int alignment; @@ -9350,7 +9307,8 @@ do_jump (exp, if_false_label, if_true_label) case BIT_FIELD_REF: case ARRAY_REF: { - int bitsize, bitpos, unsignedp; + HOST_WIDE_INT bitsize, bitpos; + int unsignedp; enum machine_mode mode; tree type; tree offset; -- cgit v1.1