diff options
author | Daniel Berlin <dberlin@dberlin.org> | 2006-01-19 01:42:48 +0000 |
---|---|---|
committer | Daniel Berlin <dberlin@gcc.gnu.org> | 2006-01-19 01:42:48 +0000 |
commit | d16a5e36692853e74ed97c362b41fdca8d69752e (patch) | |
tree | 1bea89da9c401e1b8e235d514d1d027b81c0eece /gcc/tree-ssa-operands.c | |
parent | c8db7d5c17414a130f023cf25b3fe12f6842599b (diff) | |
download | gcc-d16a5e36692853e74ed97c362b41fdca8d69752e.zip gcc-d16a5e36692853e74ed97c362b41fdca8d69752e.tar.gz gcc-d16a5e36692853e74ed97c362b41fdca8d69752e.tar.bz2 |
tree-ssa-operands.h (ssa_call_clobbered_cache_valid): Remove.
2006-01-16 Daniel Berlin <dberlin@dberlin.org>
* tree-ssa-operands.h (ssa_call_clobbered_cache_valid): Remove.
(ssa_ro_call_cache_valid): Ditto.
* tree-ssa-alias.c (sort_tags_by_id): New function.
(init_transitive_clobber_worklist): Ditto.
(add_to_worklist): Ditto.
(mark_aliases_call_clobbered): Ditto.
(compute_tag_properties): Ditto.
(set_initial_properties): Ditto.
(compute_call_clobbered): Ditto.
(compute_may_aliases): Call compute_call_clobbered and grouping.
(compute_flow_sensitive_aliasing): Remove clobbering related code.
(compute_flow_insensitive_aliasing): Grouping now happens in our
caller.
(setup_pointers_and_addressables): Remove clobbering related code.
(add_may_alias): Ditto.
(replace_may_alias): Ditto.
(get_nmt_for): Ditto.
(create_global_var):
(is_escape_site): Return an escape_type enumeration.
* tree-flow-inline.h (is_call_clobbered): Global var does not
imply call clobbered.
(mark_call_clobbered): Take a reason for marking this. Remove
marking of globalness, and cache invalidation.
(clear_call_clobbered): Remove cache invalidation code.
* tree-dfa.c (dump_variable): If details is on, dump the reason
for escaping.
* tree-outof-ssa.c (create_temp): Copy escape mask from original
variable.
* tree-flow.h (struct ptr_info_def): Add escape mask member.
(struct var_ann_d): Ditto.
(enum escape_type): New.
(mark_call_clobbered): Adjust prototype.
* tree-ssa-structalias.c (update_alias_info): Unmodifiable vars
are never call clobbered.
Record reasons for escaping.
* tree-ssa-structalias.h (is_escape_site): Update prototype.
* tree-ssa-operands.c (ssa_call_clobbered_cache_valid): Remove.
(ssa_ro_call_cache_valid): Ditto.
(clobbered_v_may_defs): Ditto.
(clobbered_vuses): Ditto.
(ro_call_vuses): Ditto.
(clobber_stats): New.
(init_ssa_operands): Zero out clobber stats.
(fini_ssa_operands): Print out clobber stats.
(get_call_expr_operands): Pass callee fndecl to
add_call_read_ops).
(add_call_clobber_ops): Remove use of cache.
Add use of PURE_CONST information.
(add_call_read_ops): Remove use of cache.
Add use of static not_read information.
From-SVN: r109938
Diffstat (limited to 'gcc/tree-ssa-operands.c')
-rw-r--r-- | gcc/tree-ssa-operands.c | 231 |
1 files changed, 109 insertions, 122 deletions
diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c index 87a1fc6..57cfedc 100644 --- a/gcc/tree-ssa-operands.c +++ b/gcc/tree-ssa-operands.c @@ -119,14 +119,8 @@ static VEC(tree,heap) *build_vuses; /* Array for building all the v_must_def operands. */ static VEC(tree,heap) *build_v_must_defs; -/* True if the operands for call clobbered vars are cached and valid. */ -bool ssa_call_clobbered_cache_valid; -bool ssa_ro_call_cache_valid; /* These arrays are the cached operand vectors for call clobbered calls. */ -static VEC(tree,heap) *clobbered_v_may_defs; -static VEC(tree,heap) *clobbered_vuses; -static VEC(tree,heap) *ro_call_vuses; static bool ops_active = false; static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL; @@ -142,7 +136,7 @@ static inline void append_use (tree *); static void append_v_may_def (tree); static void append_v_must_def (tree); static void add_call_clobber_ops (tree, tree); -static void add_call_read_ops (tree); +static void add_call_read_ops (tree, tree); static void add_stmt_operand (tree *, stmt_ann_t, int); static void build_ssa_operands (tree stmt); @@ -220,7 +214,34 @@ ssa_operands_active (void) return ops_active; } +/* Structure storing statistics on how many call clobbers we have, and + how many where avoided. */ +static struct +{ + /* Number of call-clobbered ops we attempt to add to calls in + add_call_clobber_ops. */ + unsigned int clobbered_vars; + + /* Number of write-clobbers (v_may_defs) avoided by using + not_written information. */ + unsigned int static_write_clobbers_avoided; + + /* Number of reads (vuses) avoided by using not_read + information. */ + unsigned int static_read_clobbers_avoided; + + /* Number of write-clobbers avoided because the variable can't escape to + this call. */ + unsigned int unescapable_clobbers_avoided; + /* Number of readonly uses we attempt to add to calls in + add_call_read_ops. */ + unsigned int readonly_clobbers; + + /* Number of readonly uses we avoid using not_read information. */ + unsigned int static_readonly_clobbers_avoided; +} clobber_stats; + /* Initialize the operand cache routines. */ void @@ -235,6 +256,8 @@ init_ssa_operands (void) gcc_assert (operand_memory == NULL); operand_memory_index = SSA_OPERAND_MEMORY_SIZE; ops_active = true; + memset (&clobber_stats, 0, sizeof (clobber_stats)); + } @@ -260,10 +283,17 @@ fini_ssa_operands (void) ggc_free (ptr); } - VEC_free (tree, heap, clobbered_v_may_defs); - VEC_free (tree, heap, clobbered_vuses); - VEC_free (tree, heap, ro_call_vuses); ops_active = false; + + if (dump_file && (dump_flags & TDF_STATS)) + { + fprintf (dump_file, "Original clobbered vars:%d\n", clobber_stats.clobbered_vars); + fprintf (dump_file, "Static write clobbers avoided:%d\n", clobber_stats.static_write_clobbers_avoided); + fprintf (dump_file, "Static read clobbers avoided:%d\n", clobber_stats.static_read_clobbers_avoided); + fprintf (dump_file, "Unescapable clobbers avoided:%d\n", clobber_stats.unescapable_clobbers_avoided); + fprintf (dump_file, "Original readonly clobbers:%d\n", clobber_stats.readonly_clobbers); + fprintf (dump_file, "Static readonly clobbers avoided:%d\n", clobber_stats.static_readonly_clobbers_avoided); + } } @@ -1528,7 +1558,7 @@ get_call_expr_operands (tree stmt, tree expr) && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) add_call_clobber_ops (stmt, get_callee_fndecl (expr)); else if (!(call_flags & ECF_CONST)) - add_call_read_ops (stmt); + add_call_read_ops (stmt, get_callee_fndecl (expr)); } /* Find uses in the called function. */ @@ -1715,7 +1745,6 @@ add_to_addressable_set (tree ref, bitmap *addresses_taken) } } - /* Add clobbering definitions for .GLOBAL_VAR or for each of the call clobbered variables in the function. */ @@ -1723,12 +1752,10 @@ static void add_call_clobber_ops (tree stmt, tree callee) { unsigned u; - tree t; bitmap_iterator bi; stmt_ann_t s_ann = stmt_ann (stmt); - struct stmt_ann_d empty_ann; bitmap not_read_b, not_written_b; - + /* Functions that are not const, pure or never return may clobber call-clobbered variables. */ if (s_ann) @@ -1742,100 +1769,67 @@ add_call_clobber_ops (tree stmt, tree callee) return; } - /* FIXME - if we have better information from the static vars - analysis, we need to make the cache call site specific. This way - we can have the performance benefits even if we are doing good - optimization. */ - /* Get info for local and module level statics. There is a bit set for each static if the call being processed does not read or write that variable. */ not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL; - - /* If cache is valid, copy the elements into the build vectors. */ - if (ssa_call_clobbered_cache_valid - && (!not_read_b || bitmap_empty_p (not_read_b)) - && (!not_written_b || bitmap_empty_p (not_written_b))) - { - for (u = 0 ; u < VEC_length (tree, clobbered_vuses); u++) - { - t = VEC_index (tree, clobbered_vuses, u); - gcc_assert (TREE_CODE (t) != SSA_NAME); - var_ann (t)->in_vuse_list = 1; - VEC_safe_push (tree, heap, build_vuses, (tree)t); - } - for (u = 0; u < VEC_length (tree, clobbered_v_may_defs); u++) - { - t = VEC_index (tree, clobbered_v_may_defs, u); - gcc_assert (TREE_CODE (t) != SSA_NAME); - var_ann (t)->in_v_may_def_list = 1; - VEC_safe_push (tree, heap, build_v_may_defs, (tree)t); - } - return; - } - - memset (&empty_ann, 0, sizeof (struct stmt_ann_d)); - /* Add a V_MAY_DEF operand for every call clobbered variable. */ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi) { - tree var = referenced_var (u); - unsigned int uid = u; + tree var = referenced_var_lookup (u); + unsigned int escape_mask = var_ann (var)->escape_mask; + tree real_var = var; + bool not_read; + bool not_written; + + /* Not read and not written are computed on regular vars, not + subvars, so look at the parent var if this is an SFT. */ - if (unmodifiable_var_p (var)) - add_stmt_operand (&var, &empty_ann, opf_none); - else + if (TREE_CODE (var) == STRUCT_FIELD_TAG) + real_var = SFT_PARENT_VAR (var); + + not_read = not_read_b ? bitmap_bit_p (not_read_b, + DECL_UID (real_var)) : false; + not_written = not_written_b ? bitmap_bit_p (not_written_b, + DECL_UID (real_var)) : false; + gcc_assert (!unmodifiable_var_p (var)); + + clobber_stats.clobbered_vars++; + + /* See if this variable is really clobbered by this function. */ + + /* Trivial case: Things escaping only to pure/const are not + clobbered by non-pure-const, and only read by pure/const. */ + if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0) { - bool not_read; - bool not_written; - - /* Not read and not written are computed on regular vars, not - subvars, so look at the parent var if this is an SFT. */ - - if (TREE_CODE (var) == STRUCT_FIELD_TAG) - uid = DECL_UID (SFT_PARENT_VAR (var)); - - not_read = - not_read_b ? bitmap_bit_p (not_read_b, uid) : false; - not_written = - not_written_b ? bitmap_bit_p (not_written_b, uid) : false; - - if (not_written) + tree call = get_call_expr_in (stmt); + if (call_expr_flags (call) & (ECF_CONST | ECF_PURE)) { - if (!not_read) - add_stmt_operand (&var, &empty_ann, opf_none); + add_stmt_operand (&var, s_ann, opf_none); + clobber_stats.unescapable_clobbers_avoided++; + continue; } else - add_stmt_operand (&var, &empty_ann, opf_is_def); + { + clobber_stats.unescapable_clobbers_avoided++; + continue; + } } + + if (not_written) + { + clobber_stats.static_write_clobbers_avoided++; + if (!not_read) + add_stmt_operand (&var, s_ann, opf_none); + else + clobber_stats.static_read_clobbers_avoided++; + } + else + add_stmt_operand (&var, s_ann, opf_is_def); } - - if ((!not_read_b || bitmap_empty_p (not_read_b)) - && (!not_written_b || bitmap_empty_p (not_written_b))) - { - /* Prepare empty cache vectors. */ - VEC_truncate (tree, clobbered_vuses, 0); - VEC_truncate (tree, clobbered_v_may_defs, 0); - - /* Now fill the clobbered cache with the values that have been found. */ - for (u = 0; u < VEC_length (tree, build_vuses); u++) - VEC_safe_push (tree, heap, clobbered_vuses, - VEC_index (tree, build_vuses, u)); - - gcc_assert (VEC_length (tree, build_vuses) - == VEC_length (tree, clobbered_vuses)); - - for (u = 0; u < VEC_length (tree, build_v_may_defs); u++) - VEC_safe_push (tree, heap, clobbered_v_may_defs, - VEC_index (tree, build_v_may_defs, u)); - - gcc_assert (VEC_length (tree, build_v_may_defs) - == VEC_length (tree, clobbered_v_may_defs)); - - ssa_call_clobbered_cache_valid = true; - } + } @@ -1843,13 +1837,12 @@ add_call_clobber_ops (tree stmt, tree callee) function. */ static void -add_call_read_ops (tree stmt) +add_call_read_ops (tree stmt, tree callee) { unsigned u; - tree t; bitmap_iterator bi; stmt_ann_t s_ann = stmt_ann (stmt); - struct stmt_ann_d empty_ann; + bitmap not_read_b; /* if the function is not pure, it may reference memory. Add a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var @@ -1860,40 +1853,34 @@ add_call_read_ops (tree stmt) return; } - /* If cache is valid, copy the elements into the build vector. */ - if (ssa_ro_call_cache_valid) - { - for (u = 0; u < VEC_length (tree, ro_call_vuses); u++) - { - t = VEC_index (tree, ro_call_vuses, u); - gcc_assert (TREE_CODE (t) != SSA_NAME); - var_ann (t)->in_vuse_list = 1; - VEC_safe_push (tree, heap, build_vuses, (tree)t); - } - return; - } - - memset (&empty_ann, 0, sizeof (struct stmt_ann_d)); + not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; /* Add a VUSE for each call-clobbered variable. */ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi) { tree var = referenced_var (u); - add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific); - } - - /* Prepare empty cache vectors. */ - VEC_truncate (tree, ro_call_vuses, 0); + tree real_var = var; + bool not_read; + + clobber_stats.readonly_clobbers++; - /* Now fill the clobbered cache with the values that have been found. */ - for (u = 0; u < VEC_length (tree, build_vuses); u++) - VEC_safe_push (tree, heap, ro_call_vuses, - VEC_index (tree, build_vuses, u)); + /* Not read and not written are computed on regular vars, not + subvars, so look at the parent var if this is an SFT. */ - gcc_assert (VEC_length (tree, build_vuses) - == VEC_length (tree, ro_call_vuses)); + if (TREE_CODE (var) == STRUCT_FIELD_TAG) + real_var = SFT_PARENT_VAR (var); - ssa_ro_call_cache_valid = true; + not_read = not_read_b ? bitmap_bit_p (not_read_b, + DECL_UID (real_var)) : false; + + if (not_read) + { + clobber_stats.static_readonly_clobbers_avoided++; + continue; + } + + add_stmt_operand (&var, s_ann, opf_none | opf_non_specific); + } } |