aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-sccvn.c
diff options
context:
space:
mode:
authorRichard Guenther <rguenther@suse.de>2010-07-01 08:49:19 +0000
committerRichard Biener <rguenth@gcc.gnu.org>2010-07-01 08:49:19 +0000
commit70f348148c09468b05aa09fcfa91b61611003c27 (patch)
tree4cc8d9c35ed3127dbf885a1f08a83776819bed41 /gcc/tree-ssa-sccvn.c
parent952b984e86f884d08d2e1ae5675ce518381692c5 (diff)
downloadgcc-70f348148c09468b05aa09fcfa91b61611003c27.zip
gcc-70f348148c09468b05aa09fcfa91b61611003c27.tar.gz
gcc-70f348148c09468b05aa09fcfa91b61611003c27.tar.bz2
re PR middle-end/42834 (memcpy folding overeager)
2010-07-01 Richard Guenther <rguenther@suse.de> PR middle-end/42834 PR middle-end/44468 * doc/gimple.texi (is_gimple_mem_ref_addr): Document. * doc/generic.texi (References to storage): Document MEM_REF. * tree-pretty-print.c (dump_generic_node): Handle MEM_REF. (print_call_name): Likewise. * tree.c (recompute_tree_invariant_for_addr_expr): Handle MEM_REF. (build_simple_mem_ref_loc): New function. (mem_ref_offset): Likewise. * tree.h (build_simple_mem_ref_loc): Declare. (build_simple_mem_ref): Define. (mem_ref_offset): Declare. * fold-const.c: Include tree-flow.h. (operand_equal_p): Handle MEM_REF. (build_fold_addr_expr_with_type_loc): Likewise. (fold_comparison): Likewise. (fold_unary_loc): Fold VIEW_CONVERT_EXPR <T1, MEM_REF <T2, ...>> to MEM_REF <T1, ...>. (fold_binary_loc): Fold MEM[&MEM[p, CST1], CST2] to MEM[p, CST1 + CST2], fold MEM[&a.b, CST2] to MEM[&a, offsetof (a, b) + CST2]. * tree-ssa-alias.c (ptr_deref_may_alias_decl_p): Handle MEM_REF. (ptr_deref_may_alias_ref_p_1): Likewise. (ao_ref_base_alias_set): Properly differentiate base object for offset and TBAA. (ao_ref_init_from_ptr_and_size): Use MEM_REF. (indirect_ref_may_alias_decl_p): Handle MEM_REFs properly. (indirect_refs_may_alias_p): Likewise. (refs_may_alias_p_1): Likewise. Remove pointer SSA name def chasing code. (ref_maybe_used_by_call_p_1): Handle MEM_REF. (call_may_clobber_ref_p_1): Likewise. * dwarf2out.c (loc_list_from_tree): Handle MEM_REF. * expr.c (expand_assignment): Handle MEM_REF. (store_expr): Handle MEM_REFs from STRING_CSTs. (store_field): If expanding a MEM_REF of a non-addressable decl use bitfield operations. (get_inner_reference): Handle MEM_REF. (expand_expr_addr_expr_1): Likewise. (expand_expr_real_1): Likewise. * tree-eh.c (tree_could_trap_p): Handle MEM_REF. * alias.c (ao_ref_from_mem): Handle MEM_REF. (get_alias_set): Likewise. Properly handle VIEW_CONVERT_EXPRs. * tree-data-ref.c (dr_analyze_innermost): Handle MEM_REF. (dr_analyze_indices): Likewise. (dr_analyze_alias): Likewise. (object_address_invariant_in_loop_p): Likewise. * gimplify.c (mark_addressable): Handle MEM_REF. (gimplify_cond_expr): Build MEM_REFs. (gimplify_modify_expr_to_memcpy): Likewise. (gimplify_init_ctor_preeval_1): Handle MEM_REF. (gimple_fold_indirect_ref): Adjust. (gimplify_expr): Handle MEM_REF. Gimplify INDIRECT_REF to MEM_REF. * tree.def (MEM_REF): New tree code. * tree-dfa.c: Include toplev.h. (get_ref_base_and_extent): Handle MEM_REF. (get_addr_base_and_unit_offset): New function. * emit-rtl.c (set_mem_attributes_minus_bitpos): Handle MEM_REF. * gimple-fold.c (may_propagate_address_into_dereference): Handle MEM_REF. (maybe_fold_offset_to_array_ref): Allow possibly out-of bounds accesses if the array has just one dimension. Remove always true parameter. Do not require type compatibility here. (maybe_fold_offset_to_component_ref): Remove. (maybe_fold_stmt_indirect): Remove. (maybe_fold_reference): Remove INDIRECT_REF handling. Fold back to non-MEM_REF. (maybe_fold_offset_to_address): Simplify. Deal with type mismatches here. (maybe_fold_reference): Likewise. (maybe_fold_stmt_addition): Likewise. Also handle &ARRAY + I in addition to &ARRAY[0] + I. (fold_gimple_assign): Handle ADDR_EXPR of MEM_REFs. (gimple_get_relevant_ref_binfo): Handle MEM_REF. * cfgexpand.c (expand_debug_expr): Handle MEM_REF. * tree-ssa.c (useless_type_conversion_p): Make most pointer conversions useless. (warn_uninitialized_var): Handle MEM_REF. (maybe_rewrite_mem_ref_base): New function. (execute_update_addresses_taken): Implement re-writing of MEM_REFs to SSA form. * tree-inline.c (remap_gimple_op_r): Handle MEM_REF, remove INDIRECT_REF handling. (copy_tree_body_r): Handle MEM_REF. * gimple.c (is_gimple_addressable): Adjust. (is_gimple_address): Likewise. (is_gimple_invariant_address): ADDR_EXPRs of MEM_REFs with invariant base are invariant. (is_gimple_min_lval): Adjust. (is_gimple_mem_ref_addr): New function. (get_base_address): Handle MEM_REF. (count_ptr_derefs): Likewise. (get_base_loadstore): Likewise. * gimple.h (is_gimple_mem_ref_addr): Declare. (gimple_call_fndecl): Handle invariant MEM_REF addresses. * tree-cfg.c (verify_address): New function, split out from ... (verify_expr): ... here. Use for verifying ADDR_EXPRs and the address operand of MEM_REFs. Verify MEM_REFs. Reject INDIRECT_REFs. (verify_types_in_gimple_min_lval): Handle MEM_REF. Disallow INDIRECT_REF. Allow conversions. (verify_types_in_gimple_reference): Verify VIEW_CONVERT_EXPR of a register does not change its size. (verify_types_in_gimple_reference): Verify MEM_REF. (verify_gimple_assign_single): Disallow INDIRECT_REF. Handle MEM_REF. * tree-ssa-operands.c (opf_non_addressable, opf_not_non_addressable): New. (mark_address_taken): Handle MEM_REF. (get_indirect_ref_operands): Pass through opf_not_non_addressable. (get_asm_expr_operands): Pass opf_not_non_addressable. (get_expr_operands): Handle opf_[not_]non_addressable. Handle MEM_REF. Remove INDIRECT_REF handling. * tree-vrp.c: (check_array_ref): Handle MEM_REF. (search_for_addr_array): Likewise. (check_array_bounds): Likewise. (vrp_stmt_computes_nonzero): Adjust for MEM_REF. * tree-ssa-loop-im.c (for_each_index): Handle MEM_REF. (ref_always_accessed_p): Likewise. (gen_lsm_tmp_name): Likewise. Handle ADDR_EXPR. * tree-complex.c (extract_component): Do not handle INDIRECT_REF. Handle MEM_REF. * cgraphbuild.c (mark_load): Properly check for NULL result from get_base_address. (mark_store): Likewise. * tree-ssa-loop-niter.c (array_at_struct_end_p): Handle MEM_REF. * tree-loop-distribution.c (generate_builtin): Exchange INDIRECT_REF handling for MEM_REF. * tree-scalar-evolution.c (follow_ssa_edge_expr): Handle &MEM[ptr + CST] similar to POINTER_PLUS_EXPR. * builtins.c (stabilize_va_list_loc): Use the function ABI valist type if we couldn't canonicalize the argument type. Always dereference with the canonical va-list type. (maybe_emit_free_warning): Handle MEM_REF. (fold_builtin_memory_op): Simplify and handle MEM_REFs in folding memmove to memcpy. * builtins.c (fold_builtin_memory_op): Use ref-all types for all memcpy foldings. * omp-low.c (build_receiver_ref): Adjust for MEM_REF. (build_outer_var_ref): Likewise. (scan_omp_1_op): Likewise. (lower_rec_input_clauses): Likewise. (lower_lastprivate_clauses): Likewise. (lower_reduction_clauses): Likewise. (lower_copyprivate_clauses): Likewise. (expand_omp_atomic_pipeline): Likewise. (expand_omp_atomic_mutex): Likewise. (create_task_copyfn): Likewise. * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Handle MEM_REF. Remove old union trick. Initialize constant offsets. (ao_ref_init_from_vn_reference): Likewise. Do not handle INDIRECT_REF. Init base_alias_set properly. (vn_reference_lookup_3): Replace INDIRECT_REF handling with MEM_REF. (vn_reference_fold_indirect): Adjust for MEM_REFs. (valueize_refs): Fold MEM_REFs. Re-evaluate constant offset for ARRAY_REFs. (may_insert): Remove. (visit_reference_op_load): Do not test may_insert. (run_scc_vn): Remove parameter, do not fiddle with may_insert. * tree-ssa-sccvn.h (struct vn_reference_op_struct): Add a field to store the constant offset this op applies. (run_scc_vn): Adjust prototype. * cgraphunit.c (thunk_adjust): Adjust for MEM_REF. * tree-ssa-ccp.c (ccp_fold): Replace INDIRECT_REF folding with MEM_REF. Propagate &foo + CST as &MEM[&foo, CST]. Do not bother about volatile qualifiers on pointers. (fold_const_aggregate_ref): Handle MEM_REF, do not handle INDIRECT_REF. * tree-ssa-loop-ivopts.c * tree-ssa-loop-ivopts.c (determine_base_object): Adjust for MEM_REF. (strip_offset_1): Likewise. (find_interesting_uses_address): Replace INDIRECT_REF handling with MEM_REF handling. (get_computation_cost_at): Likewise. * ipa-pure-const.c (check_op): Handle MEM_REF. * tree-stdarg.c (check_all_va_list_escapes): Adjust for MEM_REF. * tree-ssa-sink.c (is_hidden_global_store): Handle MEM_REF and constants. * ipa-inline.c (likely_eliminated_by_inlining_p): Handle MEM_REF. * tree-parloops.c (take_address_of): Adjust for MEM_REF. (eliminate_local_variables_1): Likewise. (create_call_for_reduction_1): Likewise. (create_loads_for_reductions): Likewise. (create_loads_and_stores_for_name): Likewise. * matrix-reorg.c (may_flatten_matrices_1): Sanitize. (ssa_accessed_in_tree): Handle MEM_REF. (ssa_accessed_in_assign_rhs): Likewise. (update_type_size): Likewise. (analyze_accesses_for_call_stmt): Likewise. (analyze_accesses_for_assign_stmt): Likewise. (transform_access_sites): Likewise. (transform_allocation_sites): Likewise. * tree-affine.c (tree_to_aff_combination): Handle MEM_REF. * tree-vect-data-refs.c (vect_create_addr_base_for_vector_ref): Do not handle INDIRECT_REF. * tree-ssa-phiopt.c (add_or_mark_expr): Handle MEM_REF. (cond_store_replacement): Likewise. * tree-ssa-pre.c (create_component_ref_by_pieces_1): Handle MEM_REF, no not handle INDIRECT_REFs. (insert_into_preds_of_block): Properly initialize avail. (phi_translate_1): Fold MEM_REFs. Re-evaluate constant offset for ARRAY_REFs. Properly handle reference lookups that require a bit re-interpretation. (can_PRE_operation): Do not handle INDIRECT_REF. Handle MEM_REF. * tree-sra.c * tree-sra.c (build_access_from_expr_1): Handle MEM_REF. (build_ref_for_offset_1): Remove. (build_ref_for_offset): Build MEM_REFs. (gate_intra_sra): Disable for now. (sra_ipa_modify_expr): Handle MEM_REF. (ipa_early_sra_gate): Disable for now. * tree-sra.c (create_access): Swap INDIRECT_REF handling for MEM_REF handling. (disqualify_base_of_expr): Likewise. (ptr_parm_has_direct_uses): Swap INDIRECT_REF handling for MEM_REF handling. (sra_ipa_modify_expr): Remove INDIRECT_REF handling. Use mem_ref_offset. Remove bogus folding. (build_access_from_expr_1): Properly handle MEM_REF for non IPA-SRA. (make_fancy_name_1): Add support for MEM_REF. * tree-predcom.c (ref_at_iteration): Handle MEM_REFs. * tree-mudflap.c (mf_xform_derefs_1): Adjust for MEM_REF. * ipa-prop.c (compute_complex_assign_jump_func): Handle MEM_REF. (compute_complex_ancestor_jump_func): Likewise. (ipa_analyze_virtual_call_uses): Likewise. * tree-ssa-forwprop.c (forward_propagate_addr_expr_1): Replace INDIRECT_REF folding with more generalized MEM_REF folding. (tree_ssa_forward_propagate_single_use_vars): Adjust accordingly. (forward_propagate_addr_into_variable_array_index): Also handle &ARRAY + I in addition to &ARRAY[0] + I. * tree-ssa-dce.c (ref_may_be_aliased): Handle MEM_REF. * tree-ssa-ter.c (find_replaceable_in_bb): Avoid TER if that creates assignments with overlap. * tree-nested.c (get_static_chain): Adjust for MEM_REF. (get_frame_field): Likewise. (get_nonlocal_debug_decl): Likewise. (convert_nonlocal_reference_op): Likewise. (struct nesting_info): Add mem_refs pointer-set. (create_nesting_tree): Allocate it. (convert_local_reference_op): Insert to be folded mem-refs. (fold_mem_refs): New function. (finalize_nesting_tree_1): Perform defered folding of mem-refs (free_nesting_tree): Free the pointer-set. * tree-vect-stmts.c (vectorizable_store): Adjust for MEM_REF. (vectorizable_load): Likewise. * tree-ssa-phiprop.c (phiprop_insert_phi): Adjust for MEM_REF. (propagate_with_phi): Likewise. * tree-object-size.c (addr_object_size): Handle MEM_REFs instead of INDIRECT_REFs. (compute_object_offset): Handle MEM_REF. (plus_stmt_object_size): Handle MEM_REF. (collect_object_sizes_for): Dispatch to plus_stmt_object_size for &MEM_REF. * tree-flow.h (get_addr_base_and_unit_offset): Declare. (symbol_marked_for_renaming): Likewise. * Makefile.in (tree-dfa.o): Add $(TOPLEV_H). (fold-const.o): Add $(TREE_FLOW_H). * tree-ssa-structalias.c (get_constraint_for_1): Handle MEM_REF. (find_func_clobbers): Likewise. * ipa-struct-reorg.c (decompose_indirect_ref_acc): Handle MEM_REF. (decompose_access): Likewise. (replace_field_acc): Likewise. (replace_field_access_stmt): Likewise. (insert_new_var_in_stmt): Likewise. (get_stmt_accesses): Likewise. (reorg_structs_drive): Disable. * config/i386/i386.c (ix86_va_start): Adjust for MEM_REF. (ix86_canonical_va_list_type): Likewise. cp/ * cp-gimplify.c (cp_gimplify_expr): Open-code the rhs predicate we are looking for, allow non-gimplified INDIRECT_REFs. testsuite/ * gcc.c-torture/execute/20100316-1.c: New testcase. * gcc.c-torture/execute/pr44468.c: Likewise. * gcc.c-torture/compile/20100609-1.c: Likewise. * gcc.dg/volatile2.c: Adjust. * gcc.dg/plugin/selfassign.c: Likewise. * gcc.dg/pr36902.c: Likewise. * gcc.dg/tree-ssa/foldaddr-2.c: Remove. * gcc.dg/tree-ssa/foldaddr-3.c: Likewise. * gcc.dg/tree-ssa/forwprop-8.c: Adjust. * gcc.dg/tree-ssa/pr17141-1.c: Likewise. * gcc.dg/tree-ssa/ssa-fre-13.c: Likewise. * gcc.dg/tree-ssa/ssa-fre-14.c: Likewise. * gcc.dg/tree-ssa/ssa-ccp-21.c: Likewise. * gcc.dg/tree-ssa/pta-ptrarith-1.c: Likewise. * gcc.dg/tree-ssa/20030807-7.c: Likewise. * gcc.dg/tree-ssa/forwprop-10.c: Likewise. * gcc.dg/tree-ssa/ssa-fre-1.c: Likewise. * gcc.dg/tree-ssa/pta-ptrarith-2.c: Likewise. * gcc.dg/tree-ssa/ssa-ccp-23.c: Likewise. * gcc.dg/tree-ssa/forwprop-1.c: Likewise. * gcc.dg/tree-ssa/forwprop-2.c: Likewise. * gcc.dg/tree-ssa/struct-aliasing-1.c: Likewise. * gcc.dg/tree-ssa/ssa-ccp-25.c: Likewise. * gcc.dg/tree-ssa/ssa-pre-26.c: Likewise. * gcc.dg/tree-ssa/struct-aliasing-2.c: Likewise. * gcc.dg/tree-ssa/ssa-ccp-26.c: Likewise. * gcc.dg/tree-ssa/ssa-sccvn-4.c: Likewise. * gcc.dg/tree-ssa/ssa-pre-7.c: Likewise. * gcc.dg/tree-ssa/forwprop-5.c: Likewise. * gcc.dg/struct/w_prof_two_strs.c: XFAIL. * gcc.dg/struct/wo_prof_escape_arg_to_local.c: Likewise. * gcc.dg/struct/wo_prof_global_var.c: Likewise. * gcc.dg/struct/wo_prof_malloc_size_var.c: Likewise. * gcc.dg/struct/w_prof_local_array.c: Likewise. * gcc.dg/struct/w_prof_single_str_global.c: Likewise. * gcc.dg/struct/wo_prof_escape_str_init.c: Likewise. * gcc.dg/struct/wo_prof_array_through_pointer.c: Likewise. * gcc.dg/struct/w_prof_global_array.c: Likewise. * gcc.dg/struct/wo_prof_array_field.c: Likewise. * gcc.dg/struct/wo_prof_single_str_local.c: Likewise. * gcc.dg/struct/w_prof_local_var.c: Likewise. * gcc.dg/struct/wo_prof_two_strs.c: Likewise. * gcc.dg/struct/wo_prof_empty_str.c: Likewise. * gcc.dg/struct/wo_prof_local_array.c: Likewise. * gcc.dg/struct/w_prof_global_var.c: Likewise. * gcc.dg/struct/wo_prof_single_str_global.c: Likewise. * gcc.dg/struct/wo_prof_escape_substr_value.c: Likewise. * gcc.dg/struct/wo_prof_global_array.c: Likewise. * gcc.dg/struct/wo_prof_escape_return.c: Likewise. * gcc.dg/struct/wo_prof_escape_substr_array.c: Likewise. * gcc.dg/struct/wo_prof_double_malloc.c: Likewise. * gcc.dg/struct/w_ratio_cold_str.c: Likewise. * gcc.dg/struct/wo_prof_escape_substr_pointer.c: Likewise. * gcc.dg/struct/wo_prof_local_var.c: Likewise. * gcc.dg/tree-prof/stringop-1.c: Adjust. * g++.dg/tree-ssa/pr31146.C: Likewise. * g++.dg/tree-ssa/copyprop-1.C: Likewise. * g++.dg/tree-ssa/pr33604.C: Likewise. * g++.dg/plugin/selfassign.c: Likewise. * gfortran.dg/array_memcpy_3.f90: Likewise. * gfortran.dg/array_memcpy_4.f90: Likewise. * c-c++-common/torture/pr42834.c: New testcase. From-SVN: r161655
Diffstat (limited to 'gcc/tree-ssa-sccvn.c')
-rw-r--r--gcc/tree-ssa-sccvn.c328
1 files changed, 216 insertions, 112 deletions
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index b27fe0c..cc66720 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -156,8 +156,6 @@ static unsigned int next_value_id;
static unsigned int next_dfs_num;
static VEC (tree, heap) *sccstack;
-static bool may_insert;
-
DEF_VEC_P(vn_ssa_aux_t);
DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
@@ -431,9 +429,41 @@ vn_reference_compute_hash (const vn_reference_t vr1)
hashval_t result = 0;
int i;
vn_reference_op_t vro;
+ HOST_WIDE_INT off = -1;
+ bool deref = false;
for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
- result = vn_reference_op_compute_hash (vro, result);
+ {
+ if (vro->opcode == MEM_REF)
+ deref = true;
+ else if (vro->opcode != ADDR_EXPR)
+ deref = false;
+ if (vro->off != -1)
+ {
+ if (off == -1)
+ off = 0;
+ off += vro->off;
+ }
+ else
+ {
+ if (off != -1
+ && off != 0)
+ result = iterative_hash_hashval_t (off, result);
+ off = -1;
+ if (deref
+ && vro->opcode == ADDR_EXPR)
+ {
+ if (vro->op0)
+ {
+ tree op = TREE_OPERAND (vro->op0, 0);
+ result = iterative_hash_hashval_t (TREE_CODE (op), result);
+ result = iterative_hash_expr (op, result);
+ }
+ }
+ else
+ result = vn_reference_op_compute_hash (vro, result);
+ }
+ }
if (vr1->vuse)
result += SSA_NAME_VERSION (vr1->vuse);
@@ -446,8 +476,7 @@ vn_reference_compute_hash (const vn_reference_t vr1)
int
vn_reference_eq (const void *p1, const void *p2)
{
- int i;
- vn_reference_op_t vro;
+ unsigned i, j;
const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
@@ -466,17 +495,58 @@ vn_reference_eq (const void *p1, const void *p2)
if (vr1->operands == vr2->operands)
return true;
- /* We require that address operands be canonicalized in a way that
- two memory references will have the same operands if they are
- equivalent. */
- if (VEC_length (vn_reference_op_s, vr1->operands)
- != VEC_length (vn_reference_op_s, vr2->operands))
+ if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
return false;
- for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
- if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
- vro))
- return false;
+ i = 0;
+ j = 0;
+ do
+ {
+ HOST_WIDE_INT off1 = 0, off2 = 0;
+ vn_reference_op_t vro1, vro2;
+ vn_reference_op_s tem1, tem2;
+ bool deref1 = false, deref2 = false;
+ for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
+ {
+ if (vro1->opcode == MEM_REF)
+ deref1 = true;
+ if (vro1->off == -1)
+ break;
+ off1 += vro1->off;
+ }
+ for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
+ {
+ if (vro2->opcode == MEM_REF)
+ deref2 = true;
+ if (vro2->off == -1)
+ break;
+ off2 += vro2->off;
+ }
+ if (off1 != off2)
+ return false;
+ if (deref1 && vro1->opcode == ADDR_EXPR)
+ {
+ memset (&tem1, 0, sizeof (tem1));
+ tem1.op0 = TREE_OPERAND (vro1->op0, 0);
+ tem1.type = TREE_TYPE (tem1.op0);
+ tem1.opcode = TREE_CODE (tem1.op0);
+ vro1 = &tem1;
+ }
+ if (deref2 && vro2->opcode == ADDR_EXPR)
+ {
+ memset (&tem2, 0, sizeof (tem2));
+ tem2.op0 = TREE_OPERAND (vro2->op0, 0);
+ tem2.type = TREE_TYPE (tem2.op0);
+ tem2.opcode = TREE_CODE (tem2.op0);
+ vro2 = &tem2;
+ }
+ if (!vn_reference_op_eq (vro1, vro2))
+ return false;
+ ++j;
+ ++i;
+ }
+ while (VEC_length (vn_reference_op_s, vr1->operands) != i
+ || VEC_length (vn_reference_op_s, vr2->operands) != j);
return true;
}
@@ -503,6 +573,7 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
temp.op0 = TMR_INDEX (ref);
temp.op1 = TMR_STEP (ref);
temp.op2 = TMR_OFFSET (ref);
+ temp.off = -1;
VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
memset (&temp, 0, sizeof (temp));
@@ -510,6 +581,7 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
temp.opcode = TREE_CODE (base);
temp.op0 = base;
temp.op1 = TMR_ORIGINAL (ref);
+ temp.off = -1;
VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
return;
}
@@ -524,17 +596,23 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
/* We do not care for spurious type qualifications. */
temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
temp.opcode = TREE_CODE (ref);
+ temp.off = -1;
switch (temp.opcode)
{
case ALIGN_INDIRECT_REF:
- case INDIRECT_REF:
/* The only operand is the address, which gets its own
vn_reference_op_s structure. */
break;
case MISALIGNED_INDIRECT_REF:
temp.op0 = TREE_OPERAND (ref, 1);
break;
+ case MEM_REF:
+ /* The base address gets its own vn_reference_op_s structure. */
+ temp.op0 = TREE_OPERAND (ref, 1);
+ if (host_integerp (TREE_OPERAND (ref, 1), 0))
+ temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
+ break;
case BIT_FIELD_REF:
/* Record bits and position. */
temp.op0 = TREE_OPERAND (ref, 1);
@@ -547,17 +625,25 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
temp.type = NULL_TREE;
temp.op0 = TREE_OPERAND (ref, 1);
temp.op1 = TREE_OPERAND (ref, 2);
- /* If this is a reference to a union member, record the union
- member size as operand. Do so only if we are doing
- expression insertion (during FRE), as PRE currently gets
- confused with this. */
- if (may_insert
- && temp.op1 == NULL_TREE
- && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
- && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
- && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
- && host_integerp (DECL_SIZE (temp.op0), 0))
- temp.op0 = DECL_SIZE (temp.op0);
+ {
+ tree this_offset = component_ref_field_offset (ref);
+ if (this_offset
+ && TREE_CODE (this_offset) == INTEGER_CST)
+ {
+ tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
+ if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
+ {
+ double_int off
+ = double_int_add (tree_to_double_int (this_offset),
+ double_int_sdiv
+ (tree_to_double_int (bit_offset),
+ uhwi_to_double_int (BITS_PER_UNIT),
+ TRUNC_DIV_EXPR));
+ if (double_int_fits_in_shwi_p (off))
+ temp.off = off.low;
+ }
+ }
+ }
break;
case ARRAY_RANGE_REF:
case ARRAY_REF:
@@ -566,6 +652,18 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
/* Always record lower bounds and element size. */
temp.op1 = array_ref_low_bound (ref);
temp.op2 = array_ref_element_size (ref);
+ if (TREE_CODE (temp.op0) == INTEGER_CST
+ && TREE_CODE (temp.op1) == INTEGER_CST
+ && TREE_CODE (temp.op2) == INTEGER_CST)
+ {
+ double_int off = tree_to_double_int (temp.op0);
+ off = double_int_add (off,
+ double_int_neg
+ (tree_to_double_int (temp.op1)));
+ off = double_int_mul (off, tree_to_double_int (temp.op2));
+ if (double_int_fits_in_shwi_p (off))
+ temp.off = off.low;
+ }
break;
case STRING_CST:
case INTEGER_CST:
@@ -592,9 +690,13 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
ref in the chain of references (IE they require an
operand), so we don't have to put anything
for op* as it will be handled by the iteration */
- case IMAGPART_EXPR:
case REALPART_EXPR:
case VIEW_CONVERT_EXPR:
+ temp.off = 0;
+ break;
+ case IMAGPART_EXPR:
+ /* This is only interesting for its constant offset. */
+ temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
break;
default:
gcc_unreachable ();
@@ -627,16 +729,12 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
HOST_WIDE_INT max_size;
HOST_WIDE_INT size = -1;
tree size_tree = NULL_TREE;
+ alias_set_type base_alias_set = -1;
/* First get the final access size from just the outermost expression. */
op = VEC_index (vn_reference_op_s, ops, 0);
if (op->opcode == COMPONENT_REF)
- {
- if (TREE_CODE (op->op0) == INTEGER_CST)
- size_tree = op->op0;
- else
- size_tree = DECL_SIZE (op->op0);
- }
+ size_tree = DECL_SIZE (op->op0);
else if (op->opcode == BIT_FIELD_REF)
size_tree = op->op0;
else
@@ -667,13 +765,31 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
{
/* These may be in the reference ops, but we cannot do anything
sensible with them here. */
- case CALL_EXPR:
case ADDR_EXPR:
+ /* Apart from ADDR_EXPR arguments to MEM_REF. */
+ if (base != NULL_TREE
+ && TREE_CODE (base) == MEM_REF
+ && op->op0
+ && DECL_P (TREE_OPERAND (op->op0, 0)))
+ {
+ vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
+ base = TREE_OPERAND (op->op0, 0);
+ if (pop->off == -1)
+ {
+ max_size = -1;
+ offset = 0;
+ }
+ else
+ offset += pop->off * BITS_PER_UNIT;
+ op0_p = NULL;
+ break;
+ }
+ /* Fallthru. */
+ case CALL_EXPR:
return false;
/* Record the base objects. */
case ALIGN_INDIRECT_REF:
- case INDIRECT_REF:
*op0_p = build1 (op->opcode, op->type, NULL_TREE);
op0_p = &TREE_OPERAND (*op0_p, 0);
break;
@@ -684,11 +800,19 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
op0_p = &TREE_OPERAND (*op0_p, 0);
break;
+ case MEM_REF:
+ base_alias_set = get_deref_alias_set (op->op0);
+ *op0_p = build2 (MEM_REF, op->type,
+ NULL_TREE, op->op0);
+ op0_p = &TREE_OPERAND (*op0_p, 0);
+ break;
+
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
case SSA_NAME:
*op0_p = op->op0;
+ op0_p = NULL;
break;
/* And now the usual component-reference style ops. */
@@ -703,11 +827,8 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
cannot use component_ref_field_offset. Do the interesting
parts manually. */
- /* Our union trick, done for offset zero only. */
- if (TREE_CODE (field) == INTEGER_CST)
- ;
- else if (op->op1
- || !host_integerp (DECL_FIELD_OFFSET (field), 1))
+ if (op->op1
+ || !host_integerp (DECL_FIELD_OFFSET (field), 1))
max_size = -1;
else
{
@@ -768,7 +889,10 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
ref->size = size;
ref->max_size = max_size;
ref->ref_alias_set = set;
- ref->base_alias_set = -1;
+ if (base_alias_set != -1)
+ ref->base_alias_set = base_alias_set;
+ else
+ ref->base_alias_set = get_alias_set (base);
return true;
}
@@ -789,6 +913,7 @@ copy_reference_ops_from_call (gimple call,
temp.opcode = CALL_EXPR;
temp.op0 = gimple_call_fn (call);
temp.op1 = gimple_call_chain (call);
+ temp.off = -1;
VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
/* Copy the call arguments. As they can be references as well,
@@ -830,62 +955,30 @@ void
vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
unsigned int *i_p)
{
- VEC(vn_reference_op_s, heap) *mem = NULL;
- vn_reference_op_t op;
unsigned int i = *i_p;
- unsigned int j;
-
- /* Get ops for the addressed object. */
- op = VEC_index (vn_reference_op_s, *ops, i);
- /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
- around it to avoid later ICEs. */
- if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
- && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
- {
- vn_reference_op_s aref;
- tree dom;
- aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
- aref.opcode = ARRAY_REF;
- aref.op0 = integer_zero_node;
- if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
- && TYPE_MIN_VALUE (dom))
- aref.op0 = TYPE_MIN_VALUE (dom);
- aref.op1 = aref.op0;
- aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
- VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
- }
- copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
-
- /* Do the replacement - we should have at least one op in mem now. */
- if (VEC_length (vn_reference_op_s, mem) == 1)
- {
- VEC_replace (vn_reference_op_s, *ops, i - 1,
- VEC_index (vn_reference_op_s, mem, 0));
- VEC_ordered_remove (vn_reference_op_s, *ops, i);
- i--;
- }
- else if (VEC_length (vn_reference_op_s, mem) == 2)
- {
- VEC_replace (vn_reference_op_s, *ops, i - 1,
- VEC_index (vn_reference_op_s, mem, 0));
- VEC_replace (vn_reference_op_s, *ops, i,
- VEC_index (vn_reference_op_s, mem, 1));
- }
- else if (VEC_length (vn_reference_op_s, mem) > 2)
- {
- VEC_replace (vn_reference_op_s, *ops, i - 1,
- VEC_index (vn_reference_op_s, mem, 0));
- VEC_replace (vn_reference_op_s, *ops, i,
- VEC_index (vn_reference_op_s, mem, 1));
- /* ??? There is no VEC_splice. */
- for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
- VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
+ vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
+ vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
+ tree addr_base;
+ HOST_WIDE_INT addr_offset;
+
+ /* The only thing we have to do is from &OBJ.foo.bar add the offset
+ from .foo.bar to the preceeding MEM_REF offset and replace the
+ address with &OBJ. */
+ addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
+ &addr_offset);
+ gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
+ if (addr_base != op->op0)
+ {
+ double_int off = tree_to_double_int (mem_op->op0);
+ off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
+ off = double_int_add (off, shwi_to_double_int (addr_offset));
+ mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
+ op->op0 = build_fold_addr_expr (addr_base);
+ if (host_integerp (mem_op->op0, 0))
+ mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
+ else
+ mem_op->off = -1;
}
- else
- gcc_unreachable ();
-
- VEC_free (vn_reference_op_s, heap, mem);
- *i_p = i;
}
/* Optimize the reference REF to a constant if possible or return
@@ -978,20 +1071,35 @@ valueize_refs (VEC (vn_reference_op_s, heap) *orig)
the opcode. */
if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
vro->opcode = TREE_CODE (vro->op0);
- /* If it transforms from an SSA_NAME to an address, fold with
- a preceding indirect reference. */
- if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
- && VEC_index (vn_reference_op_s,
- orig, i - 1)->opcode == INDIRECT_REF)
- {
- vn_reference_fold_indirect (&orig, &i);
- continue;
- }
}
if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
vro->op1 = SSA_VAL (vro->op1);
if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
vro->op2 = SSA_VAL (vro->op2);
+ /* If it transforms from an SSA_NAME to an address, fold with
+ a preceding indirect reference. */
+ if (i > 0
+ && vro->op0
+ && TREE_CODE (vro->op0) == ADDR_EXPR
+ && VEC_index (vn_reference_op_s,
+ orig, i - 1)->opcode == MEM_REF)
+ vn_reference_fold_indirect (&orig, &i);
+ /* If it transforms a non-constant ARRAY_REF into a constant
+ one, adjust the constant offset. */
+ else if (vro->opcode == ARRAY_REF
+ && vro->off == -1
+ && TREE_CODE (vro->op0) == INTEGER_CST
+ && TREE_CODE (vro->op1) == INTEGER_CST
+ && TREE_CODE (vro->op2) == INTEGER_CST)
+ {
+ double_int off = tree_to_double_int (vro->op0);
+ off = double_int_add (off,
+ double_int_neg
+ (tree_to_double_int (vro->op1)));
+ off = double_int_mul (off, tree_to_double_int (vro->op2));
+ if (double_int_fits_in_shwi_p (off))
+ vro->off = off.low;
+ }
}
return orig;
@@ -1172,7 +1280,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
the copy kills ref. */
else if (gimple_assign_single_p (def_stmt)
&& (DECL_P (gimple_assign_rhs1 (def_stmt))
- || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
+ || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
|| handled_component_p (gimple_assign_rhs1 (def_stmt))))
{
tree base2;
@@ -2092,9 +2200,9 @@ visit_reference_op_load (tree lhs, tree op, gimple stmt)
result = vn_nary_op_lookup (val, NULL);
/* If the expression is not yet available, value-number lhs to
a new SSA_NAME we create. */
- if (!result && may_insert)
+ if (!result)
{
- result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
+ result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
/* Initialize value-number information properly. */
VN_INFO_GET (result)->valnum = result;
VN_INFO (result)->value_id = get_next_value_id ();
@@ -3266,14 +3374,12 @@ set_hashtable_value_ids (void)
due to resource constraints. */
bool
-run_scc_vn (bool may_insert_arg)
+run_scc_vn (void)
{
size_t i;
tree param;
bool changed = true;
- may_insert = may_insert_arg;
-
init_scc_vn ();
current_info = valid_info;
@@ -3297,7 +3403,6 @@ run_scc_vn (bool may_insert_arg)
if (!DFS (name))
{
free_scc_vn ();
- may_insert = false;
return false;
}
}
@@ -3359,7 +3464,6 @@ run_scc_vn (bool may_insert_arg)
}
}
- may_insert = false;
return true;
}