aboutsummaryrefslogtreecommitdiff
path: root/gcc/gimple-fold.c
diff options
context:
space:
mode:
authorRichard Guenther <rguenther@suse.de>2010-07-01 08:49:19 +0000
committerRichard Biener <rguenth@gcc.gnu.org>2010-07-01 08:49:19 +0000
commit70f348148c09468b05aa09fcfa91b61611003c27 (patch)
tree4cc8d9c35ed3127dbf885a1f08a83776819bed41 /gcc/gimple-fold.c
parent952b984e86f884d08d2e1ae5675ce518381692c5 (diff)
downloadgcc-70f348148c09468b05aa09fcfa91b61611003c27.zip
gcc-70f348148c09468b05aa09fcfa91b61611003c27.tar.gz
gcc-70f348148c09468b05aa09fcfa91b61611003c27.tar.bz2
re PR middle-end/42834 (memcpy folding overeager)
2010-07-01 Richard Guenther <rguenther@suse.de> PR middle-end/42834 PR middle-end/44468 * doc/gimple.texi (is_gimple_mem_ref_addr): Document. * doc/generic.texi (References to storage): Document MEM_REF. * tree-pretty-print.c (dump_generic_node): Handle MEM_REF. (print_call_name): Likewise. * tree.c (recompute_tree_invariant_for_addr_expr): Handle MEM_REF. (build_simple_mem_ref_loc): New function. (mem_ref_offset): Likewise. * tree.h (build_simple_mem_ref_loc): Declare. (build_simple_mem_ref): Define. (mem_ref_offset): Declare. * fold-const.c: Include tree-flow.h. (operand_equal_p): Handle MEM_REF. (build_fold_addr_expr_with_type_loc): Likewise. (fold_comparison): Likewise. (fold_unary_loc): Fold VIEW_CONVERT_EXPR <T1, MEM_REF <T2, ...>> to MEM_REF <T1, ...>. (fold_binary_loc): Fold MEM[&MEM[p, CST1], CST2] to MEM[p, CST1 + CST2], fold MEM[&a.b, CST2] to MEM[&a, offsetof (a, b) + CST2]. * tree-ssa-alias.c (ptr_deref_may_alias_decl_p): Handle MEM_REF. (ptr_deref_may_alias_ref_p_1): Likewise. (ao_ref_base_alias_set): Properly differentiate base object for offset and TBAA. (ao_ref_init_from_ptr_and_size): Use MEM_REF. (indirect_ref_may_alias_decl_p): Handle MEM_REFs properly. (indirect_refs_may_alias_p): Likewise. (refs_may_alias_p_1): Likewise. Remove pointer SSA name def chasing code. (ref_maybe_used_by_call_p_1): Handle MEM_REF. (call_may_clobber_ref_p_1): Likewise. * dwarf2out.c (loc_list_from_tree): Handle MEM_REF. * expr.c (expand_assignment): Handle MEM_REF. (store_expr): Handle MEM_REFs from STRING_CSTs. (store_field): If expanding a MEM_REF of a non-addressable decl use bitfield operations. (get_inner_reference): Handle MEM_REF. (expand_expr_addr_expr_1): Likewise. (expand_expr_real_1): Likewise. * tree-eh.c (tree_could_trap_p): Handle MEM_REF. * alias.c (ao_ref_from_mem): Handle MEM_REF. (get_alias_set): Likewise. Properly handle VIEW_CONVERT_EXPRs. * tree-data-ref.c (dr_analyze_innermost): Handle MEM_REF. (dr_analyze_indices): Likewise. (dr_analyze_alias): Likewise. (object_address_invariant_in_loop_p): Likewise. * gimplify.c (mark_addressable): Handle MEM_REF. (gimplify_cond_expr): Build MEM_REFs. (gimplify_modify_expr_to_memcpy): Likewise. (gimplify_init_ctor_preeval_1): Handle MEM_REF. (gimple_fold_indirect_ref): Adjust. (gimplify_expr): Handle MEM_REF. Gimplify INDIRECT_REF to MEM_REF. * tree.def (MEM_REF): New tree code. * tree-dfa.c: Include toplev.h. (get_ref_base_and_extent): Handle MEM_REF. (get_addr_base_and_unit_offset): New function. * emit-rtl.c (set_mem_attributes_minus_bitpos): Handle MEM_REF. * gimple-fold.c (may_propagate_address_into_dereference): Handle MEM_REF. (maybe_fold_offset_to_array_ref): Allow possibly out-of bounds accesses if the array has just one dimension. Remove always true parameter. Do not require type compatibility here. (maybe_fold_offset_to_component_ref): Remove. (maybe_fold_stmt_indirect): Remove. (maybe_fold_reference): Remove INDIRECT_REF handling. Fold back to non-MEM_REF. (maybe_fold_offset_to_address): Simplify. Deal with type mismatches here. (maybe_fold_reference): Likewise. (maybe_fold_stmt_addition): Likewise. Also handle &ARRAY + I in addition to &ARRAY[0] + I. (fold_gimple_assign): Handle ADDR_EXPR of MEM_REFs. (gimple_get_relevant_ref_binfo): Handle MEM_REF. * cfgexpand.c (expand_debug_expr): Handle MEM_REF. * tree-ssa.c (useless_type_conversion_p): Make most pointer conversions useless. (warn_uninitialized_var): Handle MEM_REF. (maybe_rewrite_mem_ref_base): New function. (execute_update_addresses_taken): Implement re-writing of MEM_REFs to SSA form. * tree-inline.c (remap_gimple_op_r): Handle MEM_REF, remove INDIRECT_REF handling. (copy_tree_body_r): Handle MEM_REF. * gimple.c (is_gimple_addressable): Adjust. (is_gimple_address): Likewise. (is_gimple_invariant_address): ADDR_EXPRs of MEM_REFs with invariant base are invariant. (is_gimple_min_lval): Adjust. (is_gimple_mem_ref_addr): New function. (get_base_address): Handle MEM_REF. (count_ptr_derefs): Likewise. (get_base_loadstore): Likewise. * gimple.h (is_gimple_mem_ref_addr): Declare. (gimple_call_fndecl): Handle invariant MEM_REF addresses. * tree-cfg.c (verify_address): New function, split out from ... (verify_expr): ... here. Use for verifying ADDR_EXPRs and the address operand of MEM_REFs. Verify MEM_REFs. Reject INDIRECT_REFs. (verify_types_in_gimple_min_lval): Handle MEM_REF. Disallow INDIRECT_REF. Allow conversions. (verify_types_in_gimple_reference): Verify VIEW_CONVERT_EXPR of a register does not change its size. (verify_types_in_gimple_reference): Verify MEM_REF. (verify_gimple_assign_single): Disallow INDIRECT_REF. Handle MEM_REF. * tree-ssa-operands.c (opf_non_addressable, opf_not_non_addressable): New. (mark_address_taken): Handle MEM_REF. (get_indirect_ref_operands): Pass through opf_not_non_addressable. (get_asm_expr_operands): Pass opf_not_non_addressable. (get_expr_operands): Handle opf_[not_]non_addressable. Handle MEM_REF. Remove INDIRECT_REF handling. * tree-vrp.c: (check_array_ref): Handle MEM_REF. (search_for_addr_array): Likewise. (check_array_bounds): Likewise. (vrp_stmt_computes_nonzero): Adjust for MEM_REF. * tree-ssa-loop-im.c (for_each_index): Handle MEM_REF. (ref_always_accessed_p): Likewise. (gen_lsm_tmp_name): Likewise. Handle ADDR_EXPR. * tree-complex.c (extract_component): Do not handle INDIRECT_REF. Handle MEM_REF. * cgraphbuild.c (mark_load): Properly check for NULL result from get_base_address. (mark_store): Likewise. * tree-ssa-loop-niter.c (array_at_struct_end_p): Handle MEM_REF. * tree-loop-distribution.c (generate_builtin): Exchange INDIRECT_REF handling for MEM_REF. * tree-scalar-evolution.c (follow_ssa_edge_expr): Handle &MEM[ptr + CST] similar to POINTER_PLUS_EXPR. * builtins.c (stabilize_va_list_loc): Use the function ABI valist type if we couldn't canonicalize the argument type. Always dereference with the canonical va-list type. (maybe_emit_free_warning): Handle MEM_REF. (fold_builtin_memory_op): Simplify and handle MEM_REFs in folding memmove to memcpy. * builtins.c (fold_builtin_memory_op): Use ref-all types for all memcpy foldings. * omp-low.c (build_receiver_ref): Adjust for MEM_REF. (build_outer_var_ref): Likewise. (scan_omp_1_op): Likewise. (lower_rec_input_clauses): Likewise. (lower_lastprivate_clauses): Likewise. (lower_reduction_clauses): Likewise. (lower_copyprivate_clauses): Likewise. (expand_omp_atomic_pipeline): Likewise. (expand_omp_atomic_mutex): Likewise. (create_task_copyfn): Likewise. * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Handle MEM_REF. Remove old union trick. Initialize constant offsets. (ao_ref_init_from_vn_reference): Likewise. Do not handle INDIRECT_REF. Init base_alias_set properly. (vn_reference_lookup_3): Replace INDIRECT_REF handling with MEM_REF. (vn_reference_fold_indirect): Adjust for MEM_REFs. (valueize_refs): Fold MEM_REFs. Re-evaluate constant offset for ARRAY_REFs. (may_insert): Remove. (visit_reference_op_load): Do not test may_insert. (run_scc_vn): Remove parameter, do not fiddle with may_insert. * tree-ssa-sccvn.h (struct vn_reference_op_struct): Add a field to store the constant offset this op applies. (run_scc_vn): Adjust prototype. * cgraphunit.c (thunk_adjust): Adjust for MEM_REF. * tree-ssa-ccp.c (ccp_fold): Replace INDIRECT_REF folding with MEM_REF. Propagate &foo + CST as &MEM[&foo, CST]. Do not bother about volatile qualifiers on pointers. (fold_const_aggregate_ref): Handle MEM_REF, do not handle INDIRECT_REF. * tree-ssa-loop-ivopts.c * tree-ssa-loop-ivopts.c (determine_base_object): Adjust for MEM_REF. (strip_offset_1): Likewise. (find_interesting_uses_address): Replace INDIRECT_REF handling with MEM_REF handling. (get_computation_cost_at): Likewise. * ipa-pure-const.c (check_op): Handle MEM_REF. * tree-stdarg.c (check_all_va_list_escapes): Adjust for MEM_REF. * tree-ssa-sink.c (is_hidden_global_store): Handle MEM_REF and constants. * ipa-inline.c (likely_eliminated_by_inlining_p): Handle MEM_REF. * tree-parloops.c (take_address_of): Adjust for MEM_REF. (eliminate_local_variables_1): Likewise. (create_call_for_reduction_1): Likewise. (create_loads_for_reductions): Likewise. (create_loads_and_stores_for_name): Likewise. * matrix-reorg.c (may_flatten_matrices_1): Sanitize. (ssa_accessed_in_tree): Handle MEM_REF. (ssa_accessed_in_assign_rhs): Likewise. (update_type_size): Likewise. (analyze_accesses_for_call_stmt): Likewise. (analyze_accesses_for_assign_stmt): Likewise. (transform_access_sites): Likewise. (transform_allocation_sites): Likewise. * tree-affine.c (tree_to_aff_combination): Handle MEM_REF. * tree-vect-data-refs.c (vect_create_addr_base_for_vector_ref): Do not handle INDIRECT_REF. * tree-ssa-phiopt.c (add_or_mark_expr): Handle MEM_REF. (cond_store_replacement): Likewise. * tree-ssa-pre.c (create_component_ref_by_pieces_1): Handle MEM_REF, no not handle INDIRECT_REFs. (insert_into_preds_of_block): Properly initialize avail. (phi_translate_1): Fold MEM_REFs. Re-evaluate constant offset for ARRAY_REFs. Properly handle reference lookups that require a bit re-interpretation. (can_PRE_operation): Do not handle INDIRECT_REF. Handle MEM_REF. * tree-sra.c * tree-sra.c (build_access_from_expr_1): Handle MEM_REF. (build_ref_for_offset_1): Remove. (build_ref_for_offset): Build MEM_REFs. (gate_intra_sra): Disable for now. (sra_ipa_modify_expr): Handle MEM_REF. (ipa_early_sra_gate): Disable for now. * tree-sra.c (create_access): Swap INDIRECT_REF handling for MEM_REF handling. (disqualify_base_of_expr): Likewise. (ptr_parm_has_direct_uses): Swap INDIRECT_REF handling for MEM_REF handling. (sra_ipa_modify_expr): Remove INDIRECT_REF handling. Use mem_ref_offset. Remove bogus folding. (build_access_from_expr_1): Properly handle MEM_REF for non IPA-SRA. (make_fancy_name_1): Add support for MEM_REF. * tree-predcom.c (ref_at_iteration): Handle MEM_REFs. * tree-mudflap.c (mf_xform_derefs_1): Adjust for MEM_REF. * ipa-prop.c (compute_complex_assign_jump_func): Handle MEM_REF. (compute_complex_ancestor_jump_func): Likewise. (ipa_analyze_virtual_call_uses): Likewise. * tree-ssa-forwprop.c (forward_propagate_addr_expr_1): Replace INDIRECT_REF folding with more generalized MEM_REF folding. (tree_ssa_forward_propagate_single_use_vars): Adjust accordingly. (forward_propagate_addr_into_variable_array_index): Also handle &ARRAY + I in addition to &ARRAY[0] + I. * tree-ssa-dce.c (ref_may_be_aliased): Handle MEM_REF. * tree-ssa-ter.c (find_replaceable_in_bb): Avoid TER if that creates assignments with overlap. * tree-nested.c (get_static_chain): Adjust for MEM_REF. (get_frame_field): Likewise. (get_nonlocal_debug_decl): Likewise. (convert_nonlocal_reference_op): Likewise. (struct nesting_info): Add mem_refs pointer-set. (create_nesting_tree): Allocate it. (convert_local_reference_op): Insert to be folded mem-refs. (fold_mem_refs): New function. (finalize_nesting_tree_1): Perform defered folding of mem-refs (free_nesting_tree): Free the pointer-set. * tree-vect-stmts.c (vectorizable_store): Adjust for MEM_REF. (vectorizable_load): Likewise. * tree-ssa-phiprop.c (phiprop_insert_phi): Adjust for MEM_REF. (propagate_with_phi): Likewise. * tree-object-size.c (addr_object_size): Handle MEM_REFs instead of INDIRECT_REFs. (compute_object_offset): Handle MEM_REF. (plus_stmt_object_size): Handle MEM_REF. (collect_object_sizes_for): Dispatch to plus_stmt_object_size for &MEM_REF. * tree-flow.h (get_addr_base_and_unit_offset): Declare. (symbol_marked_for_renaming): Likewise. * Makefile.in (tree-dfa.o): Add $(TOPLEV_H). (fold-const.o): Add $(TREE_FLOW_H). * tree-ssa-structalias.c (get_constraint_for_1): Handle MEM_REF. (find_func_clobbers): Likewise. * ipa-struct-reorg.c (decompose_indirect_ref_acc): Handle MEM_REF. (decompose_access): Likewise. (replace_field_acc): Likewise. (replace_field_access_stmt): Likewise. (insert_new_var_in_stmt): Likewise. (get_stmt_accesses): Likewise. (reorg_structs_drive): Disable. * config/i386/i386.c (ix86_va_start): Adjust for MEM_REF. (ix86_canonical_va_list_type): Likewise. cp/ * cp-gimplify.c (cp_gimplify_expr): Open-code the rhs predicate we are looking for, allow non-gimplified INDIRECT_REFs. testsuite/ * gcc.c-torture/execute/20100316-1.c: New testcase. * gcc.c-torture/execute/pr44468.c: Likewise. * gcc.c-torture/compile/20100609-1.c: Likewise. * gcc.dg/volatile2.c: Adjust. * gcc.dg/plugin/selfassign.c: Likewise. * gcc.dg/pr36902.c: Likewise. * gcc.dg/tree-ssa/foldaddr-2.c: Remove. * gcc.dg/tree-ssa/foldaddr-3.c: Likewise. * gcc.dg/tree-ssa/forwprop-8.c: Adjust. * gcc.dg/tree-ssa/pr17141-1.c: Likewise. * gcc.dg/tree-ssa/ssa-fre-13.c: Likewise. * gcc.dg/tree-ssa/ssa-fre-14.c: Likewise. * gcc.dg/tree-ssa/ssa-ccp-21.c: Likewise. * gcc.dg/tree-ssa/pta-ptrarith-1.c: Likewise. * gcc.dg/tree-ssa/20030807-7.c: Likewise. * gcc.dg/tree-ssa/forwprop-10.c: Likewise. * gcc.dg/tree-ssa/ssa-fre-1.c: Likewise. * gcc.dg/tree-ssa/pta-ptrarith-2.c: Likewise. * gcc.dg/tree-ssa/ssa-ccp-23.c: Likewise. * gcc.dg/tree-ssa/forwprop-1.c: Likewise. * gcc.dg/tree-ssa/forwprop-2.c: Likewise. * gcc.dg/tree-ssa/struct-aliasing-1.c: Likewise. * gcc.dg/tree-ssa/ssa-ccp-25.c: Likewise. * gcc.dg/tree-ssa/ssa-pre-26.c: Likewise. * gcc.dg/tree-ssa/struct-aliasing-2.c: Likewise. * gcc.dg/tree-ssa/ssa-ccp-26.c: Likewise. * gcc.dg/tree-ssa/ssa-sccvn-4.c: Likewise. * gcc.dg/tree-ssa/ssa-pre-7.c: Likewise. * gcc.dg/tree-ssa/forwprop-5.c: Likewise. * gcc.dg/struct/w_prof_two_strs.c: XFAIL. * gcc.dg/struct/wo_prof_escape_arg_to_local.c: Likewise. * gcc.dg/struct/wo_prof_global_var.c: Likewise. * gcc.dg/struct/wo_prof_malloc_size_var.c: Likewise. * gcc.dg/struct/w_prof_local_array.c: Likewise. * gcc.dg/struct/w_prof_single_str_global.c: Likewise. * gcc.dg/struct/wo_prof_escape_str_init.c: Likewise. * gcc.dg/struct/wo_prof_array_through_pointer.c: Likewise. * gcc.dg/struct/w_prof_global_array.c: Likewise. * gcc.dg/struct/wo_prof_array_field.c: Likewise. * gcc.dg/struct/wo_prof_single_str_local.c: Likewise. * gcc.dg/struct/w_prof_local_var.c: Likewise. * gcc.dg/struct/wo_prof_two_strs.c: Likewise. * gcc.dg/struct/wo_prof_empty_str.c: Likewise. * gcc.dg/struct/wo_prof_local_array.c: Likewise. * gcc.dg/struct/w_prof_global_var.c: Likewise. * gcc.dg/struct/wo_prof_single_str_global.c: Likewise. * gcc.dg/struct/wo_prof_escape_substr_value.c: Likewise. * gcc.dg/struct/wo_prof_global_array.c: Likewise. * gcc.dg/struct/wo_prof_escape_return.c: Likewise. * gcc.dg/struct/wo_prof_escape_substr_array.c: Likewise. * gcc.dg/struct/wo_prof_double_malloc.c: Likewise. * gcc.dg/struct/w_ratio_cold_str.c: Likewise. * gcc.dg/struct/wo_prof_escape_substr_pointer.c: Likewise. * gcc.dg/struct/wo_prof_local_var.c: Likewise. * gcc.dg/tree-prof/stringop-1.c: Adjust. * g++.dg/tree-ssa/pr31146.C: Likewise. * g++.dg/tree-ssa/copyprop-1.C: Likewise. * g++.dg/tree-ssa/pr33604.C: Likewise. * g++.dg/plugin/selfassign.c: Likewise. * gfortran.dg/array_memcpy_3.f90: Likewise. * gfortran.dg/array_memcpy_4.f90: Likewise. * c-c++-common/torture/pr42834.c: New testcase. From-SVN: r161655
Diffstat (limited to 'gcc/gimple-fold.c')
-rw-r--r--gcc/gimple-fold.c493
1 files changed, 124 insertions, 369 deletions
diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c
index a1fc020..6af6511 100644
--- a/gcc/gimple-fold.c
+++ b/gcc/gimple-fold.c
@@ -82,7 +82,7 @@ get_symbol_constant_value (tree sym)
bool
may_propagate_address_into_dereference (tree addr, tree deref)
{
- gcc_assert (INDIRECT_REF_P (deref)
+ gcc_assert (TREE_CODE (deref) == MEM_REF
&& TREE_CODE (addr) == ADDR_EXPR);
/* Don't propagate if ADDR's operand has incomplete type. */
@@ -108,15 +108,12 @@ may_propagate_address_into_dereference (tree addr, tree deref)
/* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
- BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
- is the desired result type.
+ BASE is an array type. OFFSET is a byte displacement.
LOC is the location of the original expression. */
static tree
-maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
- tree orig_type,
- bool allow_negative_idx)
+maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
{
tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
tree array_type, elt_type, elt_size;
@@ -145,8 +142,6 @@ maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
if (TREE_CODE (array_type) != ARRAY_TYPE)
return NULL_TREE;
elt_type = TREE_TYPE (array_type);
- if (!useless_type_conversion_p (orig_type, elt_type))
- return NULL_TREE;
/* Use signed size type for intermediate computation on the index. */
idx_type = ssizetype;
@@ -219,34 +214,22 @@ maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
char *(c[4]);
c[3][2];
should not be simplified into (*c)[14] or tree-vrp will
- give false warnings. The same is true for
- struct A { long x; char d[0]; } *a;
- (char *)a - 4;
- which should be not folded to &a->d[-8]. */
- if (domain_type
- && TYPE_MAX_VALUE (domain_type)
- && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
+ give false warnings.
+ This is only an issue for multi-dimensional arrays. */
+ if (TREE_CODE (elt_type) == ARRAY_TYPE
+ && domain_type)
{
- tree up_bound = TYPE_MAX_VALUE (domain_type);
-
- if (tree_int_cst_lt (up_bound, idx)
- /* Accesses after the end of arrays of size 0 (gcc
- extension) and 1 are likely intentional ("struct
- hack"). */
- && compare_tree_int (up_bound, 1) > 0)
+ if (TYPE_MAX_VALUE (domain_type)
+ && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
+ && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
return NULL_TREE;
- }
- if (domain_type
- && TYPE_MIN_VALUE (domain_type))
- {
- if (!allow_negative_idx
- && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
- && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
+ else if (TYPE_MIN_VALUE (domain_type)
+ && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
+ && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
+ return NULL_TREE;
+ else if (compare_tree_int (idx, 0) < 0)
return NULL_TREE;
}
- else if (!allow_negative_idx
- && compare_tree_int (idx, 0) < 0)
- return NULL_TREE;
{
tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
@@ -256,340 +239,55 @@ maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
}
-/* Attempt to fold *(S+O) to S.X.
- BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
- is the desired result type.
-
- LOC is the location of the original expression. */
-
-static tree
-maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
- tree base, tree offset, tree orig_type)
-{
- tree f, t, field_type, tail_array_field, field_offset;
- tree ret;
- tree new_base;
-
- if (TREE_CODE (record_type) != RECORD_TYPE
- && TREE_CODE (record_type) != UNION_TYPE
- && TREE_CODE (record_type) != QUAL_UNION_TYPE)
- return NULL_TREE;
-
- /* Short-circuit silly cases. */
- if (useless_type_conversion_p (record_type, orig_type))
- return NULL_TREE;
-
- tail_array_field = NULL_TREE;
- for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
- {
- int cmp;
-
- if (TREE_CODE (f) != FIELD_DECL)
- continue;
- if (DECL_BIT_FIELD (f))
- continue;
-
- if (!DECL_FIELD_OFFSET (f))
- continue;
- field_offset = byte_position (f);
- if (TREE_CODE (field_offset) != INTEGER_CST)
- continue;
-
- /* ??? Java creates "interesting" fields for representing base classes.
- They have no name, and have no context. With no context, we get into
- trouble with nonoverlapping_component_refs_p. Skip them. */
- if (!DECL_FIELD_CONTEXT (f))
- continue;
-
- /* The previous array field isn't at the end. */
- tail_array_field = NULL_TREE;
-
- /* Check to see if this offset overlaps with the field. */
- cmp = tree_int_cst_compare (field_offset, offset);
- if (cmp > 0)
- continue;
-
- field_type = TREE_TYPE (f);
-
- /* Here we exactly match the offset being checked. If the types match,
- then we can return that field. */
- if (cmp == 0
- && useless_type_conversion_p (orig_type, field_type))
- {
- t = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
- return t;
- }
-
- /* Don't care about offsets into the middle of scalars. */
- if (!AGGREGATE_TYPE_P (field_type))
- continue;
-
- /* Check for array at the end of the struct. This is often
- used as for flexible array members. We should be able to
- turn this into an array access anyway. */
- if (TREE_CODE (field_type) == ARRAY_TYPE)
- tail_array_field = f;
-
- /* Check the end of the field against the offset. */
- if (!DECL_SIZE_UNIT (f)
- || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
- continue;
- t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
- if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
- continue;
-
- /* If we matched, then set offset to the displacement into
- this field. */
- new_base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
- SET_EXPR_LOCATION (new_base, loc);
-
- /* Recurse to possibly find the match. */
- ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
- f == TYPE_FIELDS (record_type));
- if (ret)
- return ret;
- ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
- orig_type);
- if (ret)
- return ret;
- }
-
- if (!tail_array_field)
- return NULL_TREE;
-
- f = tail_array_field;
- field_type = TREE_TYPE (f);
- offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
-
- /* If we get here, we've got an aggregate field, and a possibly
- nonzero offset into them. Recurse and hope for a valid match. */
- base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
- SET_EXPR_LOCATION (base, loc);
-
- t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
- f == TYPE_FIELDS (record_type));
- if (t)
- return t;
- return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
- orig_type);
-}
-
-/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
- or BASE[index] or by combination of those.
-
+/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
LOC is the location of original expression.
- Before attempting the conversion strip off existing ADDR_EXPRs and
- handled component refs. */
+ Before attempting the conversion strip off existing ADDR_EXPRs. */
tree
maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
tree orig_type)
{
tree ret;
- tree type;
STRIP_NOPS (base);
if (TREE_CODE (base) != ADDR_EXPR)
return NULL_TREE;
base = TREE_OPERAND (base, 0);
-
- /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
- so it needs to be removed and new COMPONENT_REF constructed.
- The wrong COMPONENT_REF are often constructed by folding the
- (type *)&object within the expression (type *)&object+offset */
- if (handled_component_p (base))
- {
- HOST_WIDE_INT sub_offset, size, maxsize;
- tree newbase;
- newbase = get_ref_base_and_extent (base, &sub_offset,
- &size, &maxsize);
- gcc_assert (newbase);
- if (size == maxsize
- && size != -1
- && !(sub_offset & (BITS_PER_UNIT - 1)))
- {
- base = newbase;
- if (sub_offset)
- offset = int_const_binop (PLUS_EXPR, offset,
- build_int_cst (TREE_TYPE (offset),
- sub_offset / BITS_PER_UNIT), 1);
- }
- }
- if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
+ if (types_compatible_p (orig_type, TREE_TYPE (base))
&& integer_zerop (offset))
return base;
- type = TREE_TYPE (base);
- ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type);
- if (!ret)
- ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true);
-
- return ret;
+ ret = maybe_fold_offset_to_array_ref (loc, base, offset);
+ if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
+ return ret;
+ return NULL_TREE;
}
-/* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
- or &BASE[index] or by combination of those.
-
- LOC is the location of the original expression.
-
- Before attempting the conversion strip off existing component refs. */
+/* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
+ LOC is the location of the original expression. */
tree
maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
tree orig_type)
{
- tree t;
+ tree base, ret;
- gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
- && POINTER_TYPE_P (orig_type));
-
- t = maybe_fold_offset_to_reference (loc, addr, offset,
- TREE_TYPE (orig_type));
- if (t != NULL_TREE)
- {
- tree orig = addr;
- tree ptr_type;
-
- /* For __builtin_object_size to function correctly we need to
- make sure not to fold address arithmetic so that we change
- reference from one array to another. This would happen for
- example for
-
- struct X { char s1[10]; char s2[10] } s;
- char *foo (void) { return &s.s2[-4]; }
-
- where we need to avoid generating &s.s1[6]. As the C and
- C++ frontends create different initial trees
- (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
- sophisticated comparisons here. Note that checking for the
- condition after the fact is easier than trying to avoid doing
- the folding. */
- STRIP_NOPS (orig);
- if (TREE_CODE (orig) == ADDR_EXPR)
- orig = TREE_OPERAND (orig, 0);
- if ((TREE_CODE (orig) == ARRAY_REF
- || (TREE_CODE (orig) == COMPONENT_REF
- && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
- && (TREE_CODE (t) == ARRAY_REF
- || TREE_CODE (t) == COMPONENT_REF)
- && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
- ? TREE_OPERAND (orig, 0) : orig,
- TREE_CODE (t) == ARRAY_REF
- ? TREE_OPERAND (t, 0) : t, 0))
- return NULL_TREE;
-
- ptr_type = build_pointer_type (TREE_TYPE (t));
- if (!useless_type_conversion_p (orig_type, ptr_type))
- return NULL_TREE;
- return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
- }
-
- return NULL_TREE;
-}
-
-/* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
- Return the simplified expression, or NULL if nothing could be done. */
-
-static tree
-maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
-{
- tree t;
- bool volatile_p = TREE_THIS_VOLATILE (expr);
- location_t loc = EXPR_LOCATION (expr);
-
- /* We may well have constructed a double-nested PLUS_EXPR via multiple
- substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
- are sometimes added. */
- base = fold (base);
- STRIP_TYPE_NOPS (base);
- TREE_OPERAND (expr, 0) = base;
-
- /* One possibility is that the address reduces to a string constant. */
- t = fold_read_from_constant_string (expr);
- if (t)
- return t;
-
- /* Add in any offset from a POINTER_PLUS_EXPR. */
- if (TREE_CODE (base) == POINTER_PLUS_EXPR)
+ STRIP_NOPS (addr);
+ if (TREE_CODE (addr) != ADDR_EXPR)
+ return NULL_TREE;
+ base = TREE_OPERAND (addr, 0);
+ ret = maybe_fold_offset_to_array_ref (loc, base, offset);
+ if (ret)
{
- tree offset2;
-
- offset2 = TREE_OPERAND (base, 1);
- if (TREE_CODE (offset2) != INTEGER_CST)
+ ret = build_fold_addr_expr (ret);
+ if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
return NULL_TREE;
- base = TREE_OPERAND (base, 0);
-
- offset = fold_convert (sizetype,
- int_const_binop (PLUS_EXPR, offset, offset2, 1));
+ SET_EXPR_LOCATION (ret, loc);
}
- if (TREE_CODE (base) == ADDR_EXPR)
- {
- tree base_addr = base;
-
- /* Strip the ADDR_EXPR. */
- base = TREE_OPERAND (base, 0);
-
- /* Fold away CONST_DECL to its value, if the type is scalar. */
- if (TREE_CODE (base) == CONST_DECL
- && is_gimple_min_invariant (DECL_INITIAL (base)))
- return DECL_INITIAL (base);
-
- /* If there is no offset involved simply return the folded base. */
- if (integer_zerop (offset))
- return base;
-
- /* Try folding *(&B+O) to B.X. */
- t = maybe_fold_offset_to_reference (loc, base_addr, offset,
- TREE_TYPE (expr));
- if (t)
- {
- /* Preserve volatileness of the original expression.
- We can end up with a plain decl here which is shared
- and we shouldn't mess with its flags. */
- if (!SSA_VAR_P (t))
- TREE_THIS_VOLATILE (t) = volatile_p;
- return t;
- }
- }
- else
- {
- /* We can get here for out-of-range string constant accesses,
- such as "_"[3]. Bail out of the entire substitution search
- and arrange for the entire statement to be replaced by a
- call to __builtin_trap. In all likelihood this will all be
- constant-folded away, but in the meantime we can't leave with
- something that get_expr_operands can't understand. */
-
- t = base;
- STRIP_NOPS (t);
- if (TREE_CODE (t) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
- {
- /* FIXME: Except that this causes problems elsewhere with dead
- code not being deleted, and we die in the rtl expanders
- because we failed to remove some ssa_name. In the meantime,
- just return zero. */
- /* FIXME2: This condition should be signaled by
- fold_read_from_constant_string directly, rather than
- re-checking for it here. */
- return integer_zero_node;
- }
-
- /* Try folding *(B+O) to B->X. Still an improvement. */
- if (POINTER_TYPE_P (TREE_TYPE (base)))
- {
- t = maybe_fold_offset_to_reference (loc, base, offset,
- TREE_TYPE (expr));
- if (t)
- return t;
- }
- }
-
- /* Otherwise we had an offset that we could not simplify. */
- return NULL_TREE;
+ return ret;
}
@@ -622,18 +320,17 @@ maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
/* Or op0 should now be A[0] and the non-constant offset defined
via a multiplication by the array element size. */
if (TREE_CODE (op0) == ARRAY_REF
- && integer_zerop (TREE_OPERAND (op0, 1))
- && TREE_CODE (op1) == SSA_NAME
- && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
- {
- gimple offset_def = SSA_NAME_DEF_STMT (op1);
- if (!is_gimple_assign (offset_def))
- return NULL_TREE;
-
/* As we will end up creating a variable index array access
in the outermost array dimension make sure there isn't
a more inner array that the index could overflow to. */
- if (TREE_CODE (TREE_OPERAND (op0, 0)) == ARRAY_REF)
+ && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
+ && integer_zerop (TREE_OPERAND (op0, 1))
+ && TREE_CODE (op1) == SSA_NAME)
+ {
+ gimple offset_def = SSA_NAME_DEF_STMT (op1);
+ tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
+ if (!host_integerp (elsz, 1)
+ || !is_gimple_assign (offset_def))
return NULL_TREE;
/* Do not build array references of something that we can't
@@ -644,15 +341,14 @@ maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
&& TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
- && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
- TYPE_SIZE_UNIT (TREE_TYPE (op0))))
+ && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
return build_fold_addr_expr
(build4 (ARRAY_REF, TREE_TYPE (op0),
TREE_OPERAND (op0, 0),
gimple_assign_rhs1 (offset_def),
TREE_OPERAND (op0, 2),
TREE_OPERAND (op0, 3)));
- else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
+ else if (integer_onep (elsz)
&& gimple_assign_rhs_code (offset_def) != MULT_EXPR)
return build_fold_addr_expr
(build4 (ARRAY_REF, TREE_TYPE (op0),
@@ -661,6 +357,38 @@ maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
TREE_OPERAND (op0, 2),
TREE_OPERAND (op0, 3)));
}
+ else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
+ /* Dto. */
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
+ && TREE_CODE (op1) == SSA_NAME)
+ {
+ gimple offset_def = SSA_NAME_DEF_STMT (op1);
+ tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
+ if (!host_integerp (elsz, 1)
+ || !is_gimple_assign (offset_def))
+ return NULL_TREE;
+
+ /* Do not build array references of something that we can't
+ see the true number of array dimensions for. */
+ if (!DECL_P (op0)
+ && !handled_component_p (op0))
+ return NULL_TREE;
+
+ if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
+ && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
+ && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
+ return build_fold_addr_expr
+ (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
+ op0, gimple_assign_rhs1 (offset_def),
+ integer_zero_node, NULL_TREE));
+ else if (integer_onep (elsz)
+ && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
+ return build_fold_addr_expr
+ (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
+ op0, op1,
+ integer_zero_node, NULL_TREE));
+ }
+
return NULL_TREE;
}
@@ -715,13 +443,12 @@ maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
ptd_type = TREE_TYPE (TREE_TYPE (op0));
/* At which point we can try some of the same things as for indirects. */
- t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
- if (!t)
- t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
- ptd_type);
+ t = maybe_fold_offset_to_array_ref (loc, op0, op1);
if (t)
{
- t = build1 (ADDR_EXPR, res_type, t);
+ t = build_fold_addr_expr (t);
+ if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
+ return NULL_TREE;
SET_EXPR_LOCATION (t, loc);
}
@@ -759,19 +486,39 @@ maybe_fold_reference (tree expr, bool is_lhs)
while (handled_component_p (*t))
t = &TREE_OPERAND (*t, 0);
- if (TREE_CODE (*t) == INDIRECT_REF)
+ /* Fold back MEM_REFs to reference trees. */
+ if (TREE_CODE (*t) == MEM_REF
+ && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
+ && integer_zerop (TREE_OPERAND (*t, 1))
+ && (TREE_THIS_VOLATILE (*t)
+ == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
+ && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
+ && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
+ == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
+ /* We have to look out here to not drop a required conversion
+ from the rhs to the lhs if is_lhs, but we don't have the
+ rhs here to verify that. Thus require strict type
+ compatibility. */
+ && types_compatible_p (TREE_TYPE (*t),
+ TREE_TYPE (TREE_OPERAND
+ (TREE_OPERAND (*t, 0), 0))))
{
- tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
- integer_zero_node);
- /* Avoid folding *"abc" = 5 into 'a' = 5. */
- if (is_lhs && tem && CONSTANT_CLASS_P (tem))
- tem = NULL_TREE;
- if (!tem
- && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
- /* If we had a good reason for propagating the address here,
- make sure we end up with valid gimple. See PR34989. */
- tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
-
+ tree tem;
+ *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
+ tem = maybe_fold_reference (expr, is_lhs);
+ if (tem)
+ return tem;
+ return expr;
+ }
+ /* Canonicalize MEM_REFs invariant address operand. */
+ else if (TREE_CODE (*t) == MEM_REF
+ && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
+ && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
+ && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
+ {
+ tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
+ TREE_OPERAND (*t, 0),
+ TREE_OPERAND (*t, 1));
if (tem)
{
*t = tem;
@@ -863,10 +610,18 @@ fold_gimple_assign (gimple_stmt_iterator *si)
else if (TREE_CODE (rhs) == ADDR_EXPR)
{
- tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
- if (tem)
+ tree ref = TREE_OPERAND (rhs, 0);
+ tree tem = maybe_fold_reference (ref, true);
+ if (tem
+ && TREE_CODE (tem) == MEM_REF
+ && integer_zerop (TREE_OPERAND (tem, 1)))
+ result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
+ else if (tem)
result = fold_convert (TREE_TYPE (rhs),
build_fold_addr_expr_loc (loc, tem));
+ else if (TREE_CODE (ref) == MEM_REF
+ && integer_zerop (TREE_OPERAND (ref, 1)))
+ result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
}
else if (TREE_CODE (rhs) == CONSTRUCTOR
@@ -1580,7 +1335,7 @@ gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
return TYPE_BINFO (TREE_TYPE (ref));
else if (known_binfo
&& (TREE_CODE (ref) == SSA_NAME
- || TREE_CODE (ref) == INDIRECT_REF))
+ || TREE_CODE (ref) == MEM_REF))
return known_binfo;
else
return NULL_TREE;