diff options
author | Jerry Quinn <jlquinn@optonline.net> | 2004-07-01 12:52:53 +0000 |
---|---|---|
committer | Jerry Quinn <jlquinn@gcc.gnu.org> | 2004-07-01 12:52:53 +0000 |
commit | 3c0cb5de6a4e067a36096c2141384bf9e3f520b6 (patch) | |
tree | f48d60264473680791f8223b21f0c82f2666fcba /gcc/combine.c | |
parent | 8436e65ac6e638dd045098516f37d4f2c685a3e1 (diff) | |
download | gcc-3c0cb5de6a4e067a36096c2141384bf9e3f520b6.zip gcc-3c0cb5de6a4e067a36096c2141384bf9e3f520b6.tar.gz gcc-3c0cb5de6a4e067a36096c2141384bf9e3f520b6.tar.bz2 |
alias.c (get_alias_set, [...]): Use MEM_P.
2004-07-01 Jerry Quinn <jlquinn@optonline.net>
* alias.c (get_alias_set, canon_rtx, get_addr,
nonoverlapping_memrefs_p, nonlocal_referenced_p_1, memory_modified_1):
Use MEM_P.
* builtins.c (expand_builtin_prefetch, expand_builtin_profile_func,
expand_builtin): Likewise.
* calls.c (expand_call, emit_library_call_value_1, store_one_arg):
Likewise.
* combine.c (can_combine_p, combinable_i3pat, try_combine,
find_split_point, combine_simplify_rtx, simplify_set, make_extraction,
rtx_equal_for_field_assignment_p, gen_lowpart_for_combine,
record_dead_and_set_regs_1, get_last_value_validate,
mark_used_regs_combine, move_deaths, unmentioned_reg_p_1): Likewise.
* cse.c (check_dependence, canon_hash, equiv_constant,
gen_lowpart_if_possible, cse_insn, invalidate_from_clobbers,
cse_around_loop, cse_check_loop_start, cse_set_around_loop,
count_reg_usage): Likewise.
* cselib.c (rtx_equal_for_cselib_p, add_mem_for_addr, cselib_lookup,
cselib_invalidate_mem, cselib_invalidate_rtx, cselib_record_set,
cselib_record_sets): Likewise.
* dbxout.c (PARM_PASSED_IN_MEMORY, dbxout_symbol,
dbxout_symbol_location, dbxout_parms, dbxout_reg_parms): Likewise.
* ddg.c (mark_mem_use, mark_mem_store, rtx_mem_access_p): Likewise.
* df.c (df_uses_record): Likewise.
* dojump (do_jump): Likewise.
* dwarf2out.c (stack_adjust_offset, mem_loc_descriptor,
loc_descriptor_from_tree, rtl_for_decl_location, add_bound_info,
decl_start_label): Likewise.
* emit-rtl.c (gen_complex_constant_part, gen_highpart,
operand_subword, change_address_1, make_safe_from): Likewise.
* explow.c (break_out_memory_refs, copy_all_regs, validize_mem,
stabilize, force_not_mem): Likewise.
* expmed.c (store_bit_field, store_split_bit_field, extract_bit_field,
expand_mult_const, expand_divmod, emit_store_flag): Likewise.
* expr.c (convert_move, convert_modes, emit_block_move,
emit_group_load, emit_group_store, clear_storage, emit_move_insn,
emit_move_insn_1, expand_assignment, store_expr,
store_constructor_field, store_constructor, store_field,
force_operand, safe_from_p, expand_expr_real_1, expand_increment):
Likewise.
* final.c (cleanup_subreg_operands, alter_subreg,
get_mem_expr_from_op): Likewise.
* flow.c (notice_stack_pointer_modification_1,
init_propagate_block_info, insn_dead_p, mark_set_1, mark_used_regs):
Likewise.
* function.c (mark_temp_addr_taken, preserve_temp_slots,
preserve_rtl_expr_result, put_var_into_stack, fixup_var_refs_1,
optimize_bit_field, flush_addressof, purge_addressof_1,
instantiate_decl, instantiate_virtual_regs_1, assign_parms,
setjmp_protect, setjmp_protect_args, fix_lexical_addr,
keep_stack_depressed): Likewise.
* ifcvt.c (noce_try_cmove_arith, noce_try_abs, noce_operand_ok,
noce_process_if_block, find_memory): Likewise.
* integrate.c (subst_constants, allocate_initial_values): Likewise.
* local-alloc.c (validate_equiv_mem_from_store, memref_referenced_p,
update_equiv_regs): Likewise.
* loop.c (scan_loop, prescan_loop, note_addr_stored, check_store,
maybe_eliminate_biv_1, find_mem_in_note_1): Likewise.
* optabs.c (expand_abs, emit_unop_insn): Likewise.
* passes.c (rest_of_handle_final): Likewise.
* postreload.c (reload_cse_simplify_set, reload_cse_simplify_operands,
move2add_note_store): Likewise.
* ra-build.c (detect_remat_webs): Likewise.
* ra-debug.c (dump_static_insn_cost): Likewise.
* ra-rewrite.c (slots_overlap_p, insert_stores): Likewise.
* recog.c (validate_change, apply_change_group, cancel_changes,
validate_replace_rtx_1, general_operand, register_operand,
nonmemory_operand, push_operand, pop_operand, memory_operand,
indirect_operand, asm_operand_ok, offsettable_memref_p,
offsettable_nonstrict_memref_p, constrain_operands,
store_data_bypass_p): Likewise.
* reg-stack.c (subst_stack_regs_pat): Likewise.
* regclass.c (record_operand_costs, scan_one_insn, record_reg_classes,
copy_cost, reg_scan_mark_refs): Likewise.
* regmove.c (optimize_reg_copy_3, stack_memref_p,
combine_stack_adjustments_for_block): Likewise.
* regrename.c (copyprop_hardreg_forward_1): Likewise.
* reload.c (can_reload_into, push_reload, decompose, immune_p,
find_reloads, find_reloads_address, find_reloads_address_1,
reg_overlap_mentioned_for_reload_p, refers_to_mem_for_reload_p,
find_equiv_reg): Likewise.
* reload1.c (reload, eliminate_regs, eliminate_regs_in_insn,
reload_as_needed, choose_reload_regs, emit_input_reload_insns,
do_input_reload, emit_reload_insns, gen_reload, delete_output_reload,
delete_address_reloads): Likewise.
* resource.c (mark_referenced_resources): Likewise.
* rtlanal.c (get_jump_table_offset, count_occurrences,
reg_referenced_p, reg_set_p, set_of_1, set_noop_p,
reg_overlap_mentioned_p, note_uses, replace_regs, nonzero_bits1,
num_sign_bit_copies1): Likewise.
* rtlhooks.c (gen_lowpart_general): Likewise.
* sched-deps.c (sched_analyze_1, sched_analyze_2): Likewise.
* sdbout.c (PARM_PASSED_IN_MEMORY, sdbout_symbol,
sdbout_toplevel_data, sdbout_parms, sdbout_reg_parms,
sdbout_global_decl): Likewise.
* simplify-rtx.c (simplify_subreg): Likewise.
* stmt.c (expand_asm_operands, expand_expr_stmt_value, expand_decl,
expand_anon_union_decl, expand_end_case_type): Likewise.
* unroll.c (calculate_giv_inc): Likewise.
* var-tracking.c (stack_adjust_offset_pre_post,
bb_stack_adjust_offset, track_expr_p, count_uses, add_uses,
add_stores, compute_bb_dataflow, vt_get_decl_and_offset,
vt_add_function_parameters): Likewise.
* varasm.c (make_var_volatile, notice_global_symbol,
assemble_external, decode_addr_const, mark_weak,
default_encode_section_info): Likewise.
From-SVN: r83980
Diffstat (limited to 'gcc/combine.c')
-rw-r--r-- | gcc/combine.c | 60 |
1 files changed, 30 insertions, 30 deletions
diff --git a/gcc/combine.c b/gcc/combine.c index 0762eda..533e4a8 100644 --- a/gcc/combine.c +++ b/gcc/combine.c @@ -1234,7 +1234,7 @@ can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ, are intervening stores. Also, don't move a volatile asm or UNSPEC_VOLATILE across any other insns. */ || (! all_adjacent - && (((GET_CODE (src) != MEM + && (((!MEM_P (src) || ! find_reg_note (insn, REG_EQUIV, src)) && use_crosses_set_p (src, INSN_CUID (insn))) || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src)) @@ -1433,7 +1433,7 @@ combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest, into the address of a MEM, so only prevent the combination if i1 or i2 set the same MEM. */ if ((inner_dest != dest && - (GET_CODE (inner_dest) != MEM + (!MEM_P (inner_dest) || rtx_equal_p (i2dest, inner_dest) || (i1dest && rtx_equal_p (i1dest, inner_dest))) && (reg_overlap_mentioned_p (i2dest, inner_dest) @@ -1914,7 +1914,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p) #if 0 if (!(GET_CODE (PATTERN (i3)) == SET && REG_P (SET_SRC (PATTERN (i3))) - && GET_CODE (SET_DEST (PATTERN (i3))) == MEM + && MEM_P (SET_DEST (PATTERN (i3))) && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC))) /* It's not the exception. */ @@ -2414,7 +2414,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p) #ifdef INSN_SCHEDULING /* If *SPLIT is a paradoxical SUBREG, when we split it, it should be written as a ZERO_EXTEND. */ - if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM) + if (split_code == SUBREG && MEM_P (SUBREG_REG (*split))) { #ifdef LOAD_EXTEND_OP /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's @@ -3046,7 +3046,7 @@ find_split_point (rtx *loc, rtx insn) #ifdef INSN_SCHEDULING /* If we are making a paradoxical SUBREG invalid, it becomes a split point. */ - if (GET_CODE (SUBREG_REG (x)) == MEM) + if (MEM_P (SUBREG_REG (x))) return loc; #endif return find_split_point (&SUBREG_REG (x), insn); @@ -3995,7 +3995,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest) /* Don't change the mode of the MEM if that would change the meaning of the address. */ - if (GET_CODE (SUBREG_REG (x)) == MEM + if (MEM_P (SUBREG_REG (x)) && (MEM_VOLATILE_P (SUBREG_REG (x)) || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0)))) return gen_rtx_CLOBBER (mode, const0_rtx); @@ -5358,7 +5358,7 @@ simplify_set (rtx x) && SUBREG_BYTE (src) == 0 && (GET_MODE_SIZE (GET_MODE (src)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))) - && GET_CODE (SUBREG_REG (src)) == MEM) + && MEM_P (SUBREG_REG (src))) { SUBST (SET_SRC (x), gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))), @@ -6139,7 +6139,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, The subreg adds or removes high bits; its mode is irrelevant to the meaning of this extraction, since POS and LEN count from the lsb. */ - if (GET_CODE (SUBREG_REG (inner)) == MEM) + if (MEM_P (SUBREG_REG (inner))) is_mode = GET_MODE (SUBREG_REG (inner)); inner = SUBREG_REG (inner); } @@ -6180,11 +6180,11 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, if (tmode != BLKmode && ! (spans_byte && inner_mode != tmode) && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0 - && GET_CODE (inner) != MEM + && !MEM_P (inner) && (! in_dest || (REG_P (inner) && have_insn_for (STRICT_LOW_PART, tmode)))) - || (GET_CODE (inner) == MEM && pos_rtx == 0 + || (MEM_P (inner) && pos_rtx == 0 && (pos % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode) : BITS_PER_UNIT)) == 0 @@ -6202,7 +6202,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, If INNER is not a MEM, get a piece consisting of just the field of interest (in this case POS % BITS_PER_WORD must be 0). */ - if (GET_CODE (inner) == MEM) + if (MEM_P (inner)) { HOST_WIDE_INT offset; @@ -6261,7 +6261,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, make a STRICT_LOW_PART unless we made a MEM. */ if (in_dest) - return (GET_CODE (new) == MEM ? new + return (MEM_P (new) ? new : (GET_CODE (new) != SUBREG ? gen_rtx_CLOBBER (tmode, const0_rtx) : gen_rtx_STRICT_LOW_PART (VOIDmode, new))); @@ -6312,7 +6312,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, length is not 1. In all other cases, we would only be going outside our object in cases when an original shift would have been undefined. */ - if (! spans_byte && GET_CODE (inner) == MEM + if (! spans_byte && MEM_P (inner) && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode)) || (pos_rtx != 0 && len != 1))) return 0; @@ -6355,7 +6355,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, /* If this is not from memory, the desired mode is wanted_inner_reg_mode; if we have to change the mode of memory and cannot, the desired mode is EXTRACTION_MODE. */ - if (GET_CODE (inner) != MEM) + if (!MEM_P (inner)) wanted_inner_mode = wanted_inner_reg_mode; else if (inner_mode != wanted_inner_mode && (mode_dependent_address_p (XEXP (inner, 0)) @@ -6373,7 +6373,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, If it's a MEM we need to recompute POS relative to that. However, if we're extracting from (or inserting into) a register, we want to recompute POS relative to wanted_inner_mode. */ - int width = (GET_CODE (inner) == MEM + int width = (MEM_P (inner) ? GET_MODE_BITSIZE (is_mode) : GET_MODE_BITSIZE (wanted_inner_mode)); @@ -6383,7 +6383,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, pos_rtx = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx); /* POS may be less than 0 now, but we check for that below. - Note that it can only be less than 0 if GET_CODE (inner) != MEM. */ + Note that it can only be less than 0 if !MEM_P (inner). */ } /* If INNER has a wider mode, make it smaller. If this is a constant @@ -6391,7 +6391,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, the value. */ if (wanted_inner_mode != VOIDmode && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode) - && ((GET_CODE (inner) == MEM + && ((MEM_P (inner) && (inner_mode == wanted_inner_mode || (! mode_dependent_address_p (XEXP (inner, 0)) && ! MEM_VOLATILE_P (inner)))))) @@ -6429,7 +6429,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, /* If INNER is not memory, we can always get it into the proper mode. If we are changing its mode, POS must be a constant and smaller than the size of the new mode. */ - else if (GET_CODE (inner) != MEM) + else if (!MEM_P (inner)) { if (GET_MODE (inner) != wanted_inner_mode && (pos_rtx != 0 @@ -7771,14 +7771,14 @@ rtx_equal_for_field_assignment_p (rtx x, rtx y) /* Check for a paradoxical SUBREG of a MEM compared with the MEM. Note that all SUBREGs of MEM are paradoxical; otherwise they would have been rewritten. */ - if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG - && GET_CODE (SUBREG_REG (y)) == MEM + if (MEM_P (x) && GET_CODE (y) == SUBREG + && MEM_P (SUBREG_REG (y)) && rtx_equal_p (SUBREG_REG (y), gen_lowpart (GET_MODE (SUBREG_REG (y)), x))) return 1; - if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG - && GET_CODE (SUBREG_REG (x)) == MEM + if (MEM_P (y) && GET_CODE (x) == SUBREG + && MEM_P (SUBREG_REG (x)) && rtx_equal_p (SUBREG_REG (x), gen_lowpart (GET_MODE (SUBREG_REG (x)), y))) return 1; @@ -9348,7 +9348,7 @@ gen_lowpart_for_combine (enum machine_mode mode, rtx x) /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart won't know what to do. So we will strip off the SUBREG here and process normally. */ - if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM) + if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x))) { x = SUBREG_REG (x); if (GET_MODE (x) == mode) @@ -9369,7 +9369,7 @@ gen_lowpart_for_combine (enum machine_mode mode, rtx x) if (result) return result; - if (GET_CODE (x) == MEM) + if (MEM_P (x)) { int offset = 0; @@ -10899,7 +10899,7 @@ record_dead_and_set_regs_1 (rtx dest, rtx setter, void *data) else record_value_for_reg (dest, record_dead_insn, NULL_RTX); } - else if (GET_CODE (dest) == MEM + else if (MEM_P (dest) /* Ignore pushes, they clobber nothing. */ && ! push_operand (dest, GET_MODE (dest))) mem_last_set = INSN_CUID (record_dead_insn); @@ -11088,7 +11088,7 @@ get_last_value_validate (rtx *loc, rtx insn, int tick, int replace) /* If this is a memory reference, make sure that there were no stores after it that might have clobbered the value. We don't have alias info, so we assume any store invalidates it. */ - else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x) + else if (MEM_P (x) && ! RTX_UNCHANGING_P (x) && INSN_CUID (insn) <= mem_last_set) { if (replace) @@ -11384,7 +11384,7 @@ mark_used_regs_combine (rtx x) case CLOBBER: /* If we are clobbering a MEM, mark any hard registers inside the address as used. */ - if (GET_CODE (XEXP (x, 0)) == MEM) + if (MEM_P (XEXP (x, 0))) mark_used_regs_combine (XEXP (XEXP (x, 0), 0)); return; @@ -11425,7 +11425,7 @@ mark_used_regs_combine (rtx x) || GET_CODE (testreg) == STRICT_LOW_PART) testreg = XEXP (testreg, 0); - if (GET_CODE (testreg) == MEM) + if (MEM_P (testreg)) mark_used_regs_combine (XEXP (testreg, 0)); mark_used_regs_combine (SET_SRC (x)); @@ -11624,7 +11624,7 @@ move_deaths (rtx x, rtx maybe_kill_insn, int from_cuid, rtx to_insn, For a REG (the only other possibility), the entire value is being replaced so the old value is not used in this insn. */ - if (GET_CODE (dest) == MEM) + if (MEM_P (dest)) move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid, to_insn, pnotes); return; @@ -12356,7 +12356,7 @@ unmentioned_reg_p_1 (rtx *loc, void *expr) rtx x = *loc; if (x != NULL_RTX - && (REG_P (x) || GET_CODE (x) == MEM) + && (REG_P (x) || MEM_P (x)) && ! reg_mentioned_p (x, (rtx) expr)) return 1; return 0; |