diff options
author | Zdenek Dvorak <rakdver@atrey.karlin.mff.cuni.cz> | 2004-09-16 23:29:43 +0200 |
---|---|---|
committer | Zdenek Dvorak <rakdver@gcc.gnu.org> | 2004-09-16 21:29:43 +0000 |
commit | 42759f1ea05f7893f3bee4adbab74becf1a9f764 (patch) | |
tree | 5d0332daba22aa87d85d5e5174a5bd80e35b53fc /gcc/tree-cfg.c | |
parent | 2731cf24d22fbc55453b74a5498f72ce5c41a52b (diff) | |
download | gcc-42759f1ea05f7893f3bee4adbab74becf1a9f764.zip gcc-42759f1ea05f7893f3bee4adbab74becf1a9f764.tar.gz gcc-42759f1ea05f7893f3bee4adbab74becf1a9f764.tar.bz2 |
Makefile.in (tree-cfg.o): Add CFGLAYOUT_H dependency.
* Makefile.in (tree-cfg.o): Add CFGLAYOUT_H dependency.
* basic-block.h (get_dominated_by_region): Declare.
* dominance.c (get_dominated_by_region): New function.
* tree-cfg.c: Include cfglayout.h.
(tree_duplicate_bb): Duplicate also phi nodes.
(struct ssa_name_map_entry): New type.
(add_phi_args_after_copy_bb, add_phi_args_after_copy,
ssa_name_map_entry_hash, ssa_name_map_entry_eq,
allocate_ssa_names, rewrite_to_new_ssa_names_def,
rewrite_to_new_ssa_names_use, rewrite_to_new_ssa_names_bb,
rewrite_to_new_ssa_names, tree_duplicate_sese_region): New functions.
* tree-flow.h (tree_duplicate_sese_region, add_phi_args_after_copy_bb,
add_phi_args_after_copy, rewrite_to_new_ssa_names_bb,
rewrite_to_new_ssa_names, allocate_ssa_names,
rewrite_into_loop_closed_ssa, verify_loop_closed_ssa): Declare.
* tree-ssa-loop-ch.c (duplicate_blocks): Removed.
(copy_loop_headers): Use tree_duplicate_sese_region.
* gcc.dg/tree-ssa/copy-headers.c: Update outcome.
From-SVN: r87614
Diffstat (limited to 'gcc/tree-cfg.c')
-rw-r--r-- | gcc/tree-cfg.c | 395 |
1 files changed, 394 insertions, 1 deletions
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c index b8b712b..f7c5155 100644 --- a/gcc/tree-cfg.c +++ b/gcc/tree-cfg.c @@ -43,6 +43,7 @@ Boston, MA 02111-1307, USA. */ #include "toplev.h" #include "except.h" #include "cfgloop.h" +#include "cfglayout.h" /* This file contains functions for building the Control Flow Graph (CFG) for a function tree. */ @@ -4237,7 +4238,6 @@ tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED) return true; } - /* Create a duplicate of the basic block BB. NOTE: This does not preserve SSA form. */ @@ -4251,10 +4251,15 @@ tree_duplicate_bb (basic_block bb) new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb); + /* First copy the phi nodes. We do not copy phi node arguments here, + since the edges are not ready yet. Keep the chain of phi nodes in + the same order, so that we can add them later. */ for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi)) { mark_for_rewrite (PHI_RESULT (phi)); + create_phi_node (PHI_RESULT (phi), new_bb); } + set_phi_nodes (new_bb, nreverse (phi_nodes (new_bb))); bsi_tgt = bsi_start (new_bb); for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) @@ -4283,6 +4288,394 @@ tree_duplicate_bb (basic_block bb) return new_bb; } +/* Basic block BB_COPY was created by code duplication. Add phi node + arguments for edges going out of BB_COPY. The blocks that were + duplicated have rbi->duplicated set to one. */ + +void +add_phi_args_after_copy_bb (basic_block bb_copy) +{ + basic_block bb, dest; + edge e, e_copy; + tree phi, phi_copy, phi_next, def; + + bb = bb_copy->rbi->original; + + for (e_copy = bb_copy->succ; e_copy; e_copy = e_copy->succ_next) + { + if (!phi_nodes (e_copy->dest)) + continue; + + if (e_copy->dest->rbi->duplicated) + dest = e_copy->dest->rbi->original; + else + dest = e_copy->dest; + + e = find_edge (bb, dest); + if (!e) + { + /* During loop unrolling the target of the latch edge is copied. + In this case we are not looking for edge to dest, but to + duplicated block whose original was dest. */ + for (e = bb->succ; e; e = e->succ_next) + if (e->dest->rbi->duplicated + && e->dest->rbi->original == dest) + break; + + gcc_assert (e != NULL); + } + + for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest); + phi; + phi = phi_next, phi_copy = TREE_CHAIN (phi_copy)) + { + phi_next = TREE_CHAIN (phi); + + gcc_assert (PHI_RESULT (phi) == PHI_RESULT (phi_copy)); + def = PHI_ARG_DEF_FROM_EDGE (phi, e); + add_phi_arg (&phi_copy, def, e_copy); + } + } +} + +/* Blocks in REGION_COPY array of length N_REGION were created by + duplication of basic blocks. Add phi node arguments for edges + going from these blocks. */ + +void +add_phi_args_after_copy (basic_block *region_copy, unsigned n_region) +{ + unsigned i; + + for (i = 0; i < n_region; i++) + region_copy[i]->rbi->duplicated = 1; + + for (i = 0; i < n_region; i++) + add_phi_args_after_copy_bb (region_copy[i]); + + for (i = 0; i < n_region; i++) + region_copy[i]->rbi->duplicated = 0; +} + +/* Maps the old ssa name FROM_NAME to TO_NAME. */ + +struct ssa_name_map_entry +{ + tree from_name; + tree to_name; +}; + +/* Hash function for ssa_name_map_entry. */ + +static hashval_t +ssa_name_map_entry_hash (const void *entry) +{ + const struct ssa_name_map_entry *en = entry; + return SSA_NAME_VERSION (en->from_name); +} + +/* Equality function for ssa_name_map_entry. */ + +static int +ssa_name_map_entry_eq (const void *in_table, const void *ssa_name) +{ + const struct ssa_name_map_entry *en = in_table; + + return en->from_name == ssa_name; +} + +/* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping + to MAP. */ + +void +allocate_ssa_names (bitmap definitions, htab_t *map) +{ + tree name; + struct ssa_name_map_entry *entry; + PTR *slot; + unsigned ver; + + if (!*map) + *map = htab_create (10, ssa_name_map_entry_hash, + ssa_name_map_entry_eq, free); + EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, + { + name = ssa_name (ver); + slot = htab_find_slot_with_hash (*map, name, SSA_NAME_VERSION (name), + INSERT); + if (*slot) + entry = *slot; + else + { + entry = xmalloc (sizeof (struct ssa_name_map_entry)); + entry->from_name = name; + *slot = entry; + } + entry->to_name = duplicate_ssa_name (name, SSA_NAME_DEF_STMT (name)); + }); +} + +/* Rewrite the definition DEF in statement STMT to new ssa name as specified + by the mapping MAP. */ + +static void +rewrite_to_new_ssa_names_def (def_operand_p def, tree stmt, htab_t map) +{ + tree name = DEF_FROM_PTR (def); + struct ssa_name_map_entry *entry; + + gcc_assert (TREE_CODE (name) == SSA_NAME); + + entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name)); + if (!entry) + return; + + SET_DEF (def, entry->to_name); + SSA_NAME_DEF_STMT (entry->to_name) = stmt; +} + +/* Rewrite the USE to new ssa name as specified by the mapping MAP. */ + +static void +rewrite_to_new_ssa_names_use (use_operand_p use, htab_t map) +{ + tree name = USE_FROM_PTR (use); + struct ssa_name_map_entry *entry; + + if (TREE_CODE (name) != SSA_NAME) + return; + + entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name)); + if (!entry) + return; + + SET_USE (use, entry->to_name); +} + +/* Rewrite the ssa names in basic block BB to new ones as specified by the + mapping MAP. */ + +void +rewrite_to_new_ssa_names_bb (basic_block bb, htab_t map) +{ + unsigned i; + edge e; + tree phi, stmt; + block_stmt_iterator bsi; + use_optype uses; + vuse_optype vuses; + def_optype defs; + v_may_def_optype v_may_defs; + v_must_def_optype v_must_defs; + stmt_ann_t ann; + + for (e = bb->pred; e; e = e->pred_next) + if (e->flags & EDGE_ABNORMAL) + break; + + for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi)) + { + rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi), phi, map); + if (e) + SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1; + } + + for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) + { + stmt = bsi_stmt (bsi); + get_stmt_operands (stmt); + ann = stmt_ann (stmt); + + uses = USE_OPS (ann); + for (i = 0; i < NUM_USES (uses); i++) + rewrite_to_new_ssa_names_use (USE_OP_PTR (uses, i), map); + + defs = DEF_OPS (ann); + for (i = 0; i < NUM_DEFS (defs); i++) + rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs, i), stmt, map); + + vuses = VUSE_OPS (ann); + for (i = 0; i < NUM_VUSES (vuses); i++) + rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses, i), map); + + v_may_defs = V_MAY_DEF_OPS (ann); + for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++) + { + rewrite_to_new_ssa_names_use + (V_MAY_DEF_OP_PTR (v_may_defs, i), map); + rewrite_to_new_ssa_names_def + (V_MAY_DEF_RESULT_PTR (v_may_defs, i), stmt, map); + } + + v_must_defs = V_MUST_DEF_OPS (ann); + for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++) + rewrite_to_new_ssa_names_def + (V_MUST_DEF_OP_PTR (v_must_defs, i), stmt, map); + } + + for (e = bb->succ; e; e = e->succ_next) + for (phi = phi_nodes (e->dest); phi; phi = TREE_CHAIN (phi)) + { + rewrite_to_new_ssa_names_use + (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), map); + + if (e->flags & EDGE_ABNORMAL) + { + tree op = PHI_ARG_DEF_FROM_EDGE (phi, e); + SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op) = 1; + } + } +} + +/* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified + by the mapping MAP. */ + +void +rewrite_to_new_ssa_names (basic_block *region, unsigned n_region, htab_t map) +{ + unsigned r; + + for (r = 0; r < n_region; r++) + rewrite_to_new_ssa_names_bb (region[r], map); +} + +/* Duplicates a REGION (set of N_REGION basic blocks) with just a single + important exit edge EXIT. By important we mean that no SSA name defined + inside region is live over the other exit edges of the region. All entry + edges to the region must go to ENTRY->dest. The edge ENTRY is redirected + to the duplicate of the region. SSA form, dominance and loop information + is updated. The new basic blocks are stored to REGION_COPY in the same + order as they had in REGION, provided that REGION_COPY is not NULL. + The function returns false if it is unable to copy the region, + true otherwise. */ + +bool +tree_duplicate_sese_region (edge entry, edge exit, + basic_block *region, unsigned n_region, + basic_block *region_copy) +{ + unsigned i, n_doms, ver; + bool free_region_copy = false, copying_header = false; + struct loop *loop = entry->dest->loop_father; + edge exit_copy; + bitmap definitions; + tree phi, var; + basic_block *doms; + htab_t ssa_name_map = NULL; + edge redirected; + + if (!can_copy_bbs_p (region, n_region)) + return false; + + /* Some sanity checking. Note that we do not check for all possible + missuses of the functions. I.e. if you ask to copy something weird, + it will work, but the state of structures probably will not be + correct. */ + + for (i = 0; i < n_region; i++) + { + /* We do not handle subloops, i.e. all the blocks must belong to the + same loop. */ + if (region[i]->loop_father != loop) + return false; + + if (region[i] != entry->dest + && region[i] == loop->header) + return false; + } + + loop->copy = loop; + + /* In case the function is used for loop header copying (which is the primary + use), ensure that EXIT and its copy will be new latch and entry edges. */ + if (loop->header == entry->dest) + { + copying_header = true; + loop->copy = loop->outer; + + if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src)) + return false; + + for (i = 0; i < n_region; i++) + if (region[i] != exit->src + && dominated_by_p (CDI_DOMINATORS, region[i], exit->src)) + return false; + } + + if (!region_copy) + { + region_copy = xmalloc (sizeof (basic_block) * n_region); + free_region_copy = true; + } + + gcc_assert (!any_marked_for_rewrite_p ()); + + /* Record blocks outside the region that are duplicated by something + inside. */ + doms = xmalloc (sizeof (basic_block) * n_basic_blocks); + n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms); + + copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop); + definitions = marked_ssa_names (); + + if (copying_header) + { + loop->header = exit->dest; + loop->latch = exit->src; + } + + /* Redirect the entry and add the phi node arguments. */ + redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy); + gcc_assert (redirected != NULL); + for (phi = phi_nodes (entry->dest), var = PENDING_STMT (entry); + phi; + phi = TREE_CHAIN (phi), var = TREE_CHAIN (var)) + add_phi_arg (&phi, TREE_VALUE (var), entry); + PENDING_STMT (entry) = NULL; + + /* Concerning updating of dominators: We must recount dominators + for entry block and its copy. Anything that is outside of the region, but + was dominated by something inside needs recounting as well. */ + set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src); + doms[n_doms++] = entry->dest->rbi->original; + iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms); + free (doms); + + /* Add the other phi node arguments. */ + add_phi_args_after_copy (region_copy, n_region); + + /* Add phi nodes for definitions at exit. TODO -- once we have immediate + uses, it should be possible to emit phi nodes just for definitions that + are used outside region. */ + EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, + { + tree name = ssa_name (ver); + + phi = create_phi_node (name, exit->dest); + add_phi_arg (&phi, name, exit); + add_phi_arg (&phi, name, exit_copy); + + SSA_NAME_DEF_STMT (name) = phi; + }); + + /* And create new definitions inside region and its copy. TODO -- once we + have immediate uses, it might be better to leave definitions in region + unchanged, create new ssa names for phi nodes on exit, and rewrite + the uses, to avoid changing the copied region. */ + allocate_ssa_names (definitions, &ssa_name_map); + rewrite_to_new_ssa_names (region, n_region, ssa_name_map); + allocate_ssa_names (definitions, &ssa_name_map); + rewrite_to_new_ssa_names (region_copy, n_region, ssa_name_map); + htab_delete (ssa_name_map); + + if (free_region_copy) + free (region_copy); + + unmark_all_for_rewrite (); + BITMAP_XFREE (definitions); + + return true; +} /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */ |