1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
|
/* Define control flow data structures for the CFG.
Copyright (C) 1987-2024 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
#ifndef GCC_BASIC_BLOCK_H
#define GCC_BASIC_BLOCK_H
#include <profile-count.h>
/* Control flow edge information. */
class GTY((user)) edge_def {
public:
/* The two blocks at the ends of the edge. */
basic_block src;
basic_block dest;
/* Instructions queued on the edge. */
union edge_def_insns {
gimple_seq g;
rtx_insn *r;
} insns;
/* Auxiliary info specific to a pass. */
void *aux;
/* Location of any goto implicit in the edge. */
location_t goto_locus;
/* The index number corresponding to this edge in the edge vector
dest->preds. */
unsigned int dest_idx;
int flags; /* see cfg-flags.def */
profile_probability probability;
/* Return count of edge E. */
inline profile_count count () const;
};
/* Masks for edge.flags. */
#define DEF_EDGE_FLAG(NAME,IDX) EDGE_##NAME = 1 << IDX ,
enum cfg_edge_flags {
#include "cfg-flags.def"
LAST_CFG_EDGE_FLAG /* this is only used for EDGE_ALL_FLAGS */
};
#undef DEF_EDGE_FLAG
/* Bit mask for all edge flags. */
#define EDGE_ALL_FLAGS ((LAST_CFG_EDGE_FLAG - 1) * 2 - 1)
/* The following four flags all indicate something special about an edge.
Test the edge flags on EDGE_COMPLEX to detect all forms of "strange"
control flow transfers. */
#define EDGE_COMPLEX \
(EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_EH | EDGE_PRESERVE)
struct GTY(()) rtl_bb_info {
/* The first insn of the block is embedded into bb->il.x. */
/* The last insn of the block. */
rtx_insn *end_;
/* In CFGlayout mode points to insn notes/jumptables to be placed just before
and after the block. */
rtx_insn *header_;
rtx_insn *footer_;
};
struct GTY(()) gimple_bb_info {
/* Sequence of statements in this block. */
gimple_seq seq;
/* PHI nodes for this block. */
gimple_seq phi_nodes;
};
/* A basic block is a sequence of instructions with only one entry and
only one exit. If any one of the instructions are executed, they
will all be executed, and in sequence from first to last.
There may be COND_EXEC instructions in the basic block. The
COND_EXEC *instructions* will be executed -- but if the condition
is false the conditionally executed *expressions* will of course
not be executed. We don't consider the conditionally executed
expression (which might have side-effects) to be in a separate
basic block because the program counter will always be at the same
location after the COND_EXEC instruction, regardless of whether the
condition is true or not.
Basic blocks need not start with a label nor end with a jump insn.
For example, a previous basic block may just "conditionally fall"
into the succeeding basic block, and the last basic block need not
end with a jump insn. Block 0 is a descendant of the entry block.
A basic block beginning with two labels cannot have notes between
the labels.
Data for jump tables are stored in jump_insns that occur in no
basic block even though these insns can follow or precede insns in
basic blocks. */
/* Basic block information indexed by block number. */
struct GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb"))) basic_block_def {
/* The edges into and out of the block. */
vec<edge, va_gc> *preds;
vec<edge, va_gc> *succs;
/* Auxiliary info specific to a pass. */
void *GTY ((skip (""))) aux;
/* Innermost loop containing the block. */
class loop *loop_father;
/* The dominance and postdominance information node. */
struct et_node * GTY ((skip (""))) dom[2];
/* Previous and next blocks in the chain. */
basic_block prev_bb;
basic_block next_bb;
union basic_block_il_dependent {
struct gimple_bb_info GTY ((tag ("0"))) gimple;
struct {
rtx_insn *head_;
struct rtl_bb_info * rtl;
} GTY ((tag ("1"))) x;
} GTY ((desc ("((%1.flags & BB_RTL) != 0)"))) il;
/* Various flags. See cfg-flags.def. */
int flags;
/* The index of this block. */
int index;
/* Expected number of executions: calculated in profile.cc. */
profile_count count;
};
/* This ensures that struct gimple_bb_info is smaller than
struct rtl_bb_info, so that inlining the former into basic_block_def
is the better choice. */
STATIC_ASSERT (sizeof (rtl_bb_info) >= sizeof (gimple_bb_info));
#define BB_FREQ_MAX 10000
/* Masks for basic_block.flags. */
#define DEF_BASIC_BLOCK_FLAG(NAME,IDX) BB_##NAME = 1 << IDX ,
enum cfg_bb_flags
{
#include "cfg-flags.def"
LAST_CFG_BB_FLAG /* this is only used for BB_ALL_FLAGS */
};
#undef DEF_BASIC_BLOCK_FLAG
/* Bit mask for all basic block flags. */
#define BB_ALL_FLAGS ((LAST_CFG_BB_FLAG - 1) * 2 - 1)
/* Bit mask for all basic block flags that must be preserved. These are
the bit masks that are *not* cleared by clear_bb_flags. */
#define BB_FLAGS_TO_PRESERVE \
(BB_DISABLE_SCHEDULE | BB_RTL | BB_NON_LOCAL_GOTO_TARGET \
| BB_HOT_PARTITION | BB_COLD_PARTITION)
/* Dummy bitmask for convenience in the hot/cold partitioning code. */
#define BB_UNPARTITIONED 0
/* Partitions, to be used when partitioning hot and cold basic blocks into
separate sections. */
#define BB_PARTITION(bb) ((bb)->flags & (BB_HOT_PARTITION|BB_COLD_PARTITION))
#define BB_SET_PARTITION(bb, part) do { \
basic_block bb_ = (bb); \
bb_->flags = ((bb_->flags & ~(BB_HOT_PARTITION|BB_COLD_PARTITION)) \
| (part)); \
} while (0)
#define BB_COPY_PARTITION(dstbb, srcbb) \
BB_SET_PARTITION (dstbb, BB_PARTITION (srcbb))
/* Defines for accessing the fields of the CFG structure for function FN. */
#define ENTRY_BLOCK_PTR_FOR_FN(FN) ((FN)->cfg->x_entry_block_ptr)
#define EXIT_BLOCK_PTR_FOR_FN(FN) ((FN)->cfg->x_exit_block_ptr)
#define basic_block_info_for_fn(FN) ((FN)->cfg->x_basic_block_info)
#define n_basic_blocks_for_fn(FN) ((FN)->cfg->x_n_basic_blocks)
#define n_edges_for_fn(FN) ((FN)->cfg->x_n_edges)
#define last_basic_block_for_fn(FN) ((FN)->cfg->x_last_basic_block)
#define label_to_block_map_for_fn(FN) ((FN)->cfg->x_label_to_block_map)
#define profile_status_for_fn(FN) ((FN)->cfg->x_profile_status)
#define BASIC_BLOCK_FOR_FN(FN,N) \
((*basic_block_info_for_fn (FN))[(N)])
#define SET_BASIC_BLOCK_FOR_FN(FN,N,BB) \
((*basic_block_info_for_fn (FN))[(N)] = (BB))
/* For iterating over basic blocks. */
#define FOR_BB_BETWEEN(BB, FROM, TO, DIR) \
for (BB = FROM; BB != TO; BB = BB->DIR)
#define FOR_EACH_BB_FN(BB, FN) \
FOR_BB_BETWEEN (BB, (FN)->cfg->x_entry_block_ptr->next_bb, (FN)->cfg->x_exit_block_ptr, next_bb)
#define FOR_EACH_BB_REVERSE_FN(BB, FN) \
FOR_BB_BETWEEN (BB, (FN)->cfg->x_exit_block_ptr->prev_bb, (FN)->cfg->x_entry_block_ptr, prev_bb)
/* For iterating over insns in basic block. */
#define FOR_BB_INSNS(BB, INSN) \
for ((INSN) = BB_HEAD (BB); \
(INSN) && (INSN) != NEXT_INSN (BB_END (BB)); \
(INSN) = NEXT_INSN (INSN))
/* For iterating over insns in basic block when we might remove the
current insn. */
#define FOR_BB_INSNS_SAFE(BB, INSN, CURR) \
for ((INSN) = BB_HEAD (BB), (CURR) = (INSN) ? NEXT_INSN ((INSN)): NULL; \
(INSN) && (INSN) != NEXT_INSN (BB_END (BB)); \
(INSN) = (CURR), (CURR) = (INSN) ? NEXT_INSN ((INSN)) : NULL)
#define FOR_BB_INSNS_REVERSE(BB, INSN) \
for ((INSN) = BB_END (BB); \
(INSN) && (INSN) != PREV_INSN (BB_HEAD (BB)); \
(INSN) = PREV_INSN (INSN))
#define FOR_BB_INSNS_REVERSE_SAFE(BB, INSN, CURR) \
for ((INSN) = BB_END (BB),(CURR) = (INSN) ? PREV_INSN ((INSN)) : NULL; \
(INSN) && (INSN) != PREV_INSN (BB_HEAD (BB)); \
(INSN) = (CURR), (CURR) = (INSN) ? PREV_INSN ((INSN)) : NULL)
/* Cycles through _all_ basic blocks, even the fake ones (entry and
exit block). */
#define FOR_ALL_BB_FN(BB, FN) \
for (BB = ENTRY_BLOCK_PTR_FOR_FN (FN); BB; BB = BB->next_bb)
/* Stuff for recording basic block info. */
/* For now, these will be functions (so that they can include checked casts
to rtx_insn. Once the underlying fields are converted from rtx
to rtx_insn, these can be converted back to macros. */
#define BB_HEAD(B) (B)->il.x.head_
#define BB_END(B) (B)->il.x.rtl->end_
#define BB_HEADER(B) (B)->il.x.rtl->header_
#define BB_FOOTER(B) (B)->il.x.rtl->footer_
/* Special block numbers [markers] for entry and exit.
Neither of them is supposed to hold actual statements. */
#define ENTRY_BLOCK (0)
#define EXIT_BLOCK (1)
/* The two blocks that are always in the cfg. */
#define NUM_FIXED_BLOCKS (2)
/* This is the value which indicates no edge is present. */
#define EDGE_INDEX_NO_EDGE -1
/* EDGE_INDEX returns an integer index for an edge, or EDGE_INDEX_NO_EDGE
if there is no edge between the 2 basic blocks. */
#define EDGE_INDEX(el, pred, succ) (find_edge_index ((el), (pred), (succ)))
/* INDEX_EDGE_PRED_BB and INDEX_EDGE_SUCC_BB return a pointer to the basic
block which is either the pred or succ end of the indexed edge. */
#define INDEX_EDGE_PRED_BB(el, index) ((el)->index_to_edge[(index)]->src)
#define INDEX_EDGE_SUCC_BB(el, index) ((el)->index_to_edge[(index)]->dest)
/* INDEX_EDGE returns a pointer to the edge. */
#define INDEX_EDGE(el, index) ((el)->index_to_edge[(index)])
/* Number of edges in the compressed edge list. */
#define NUM_EDGES(el) ((el)->num_edges)
/* BB is assumed to contain conditional jump. Return the fallthru edge. */
#define FALLTHRU_EDGE(bb) (EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
? EDGE_SUCC ((bb), 0) : EDGE_SUCC ((bb), 1))
/* BB is assumed to contain conditional jump. Return the branch edge. */
#define BRANCH_EDGE(bb) (EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
? EDGE_SUCC ((bb), 1) : EDGE_SUCC ((bb), 0))
/* Return expected execution frequency of the edge E. */
#define EDGE_FREQUENCY(e) e->count ().to_frequency (cfun)
/* Compute a scale factor (or probability) suitable for scaling of
gcov_type values via apply_probability() and apply_scale(). */
#define GCOV_COMPUTE_SCALE(num,den) \
((den) ? RDIV ((num) * REG_BR_PROB_BASE, (den)) : REG_BR_PROB_BASE)
/* Return nonzero if edge is critical. */
#define EDGE_CRITICAL_P(e) (EDGE_COUNT ((e)->src->succs) >= 2 \
&& EDGE_COUNT ((e)->dest->preds) >= 2)
#define EDGE_COUNT(ev) vec_safe_length (ev)
#define EDGE_I(ev,i) (*ev)[(i)]
#define EDGE_PRED(bb,i) (*(bb)->preds)[(i)]
#define EDGE_SUCC(bb,i) (*(bb)->succs)[(i)]
/* Returns true if BB has precisely one successor. */
inline bool
single_succ_p (const_basic_block bb)
{
return EDGE_COUNT (bb->succs) == 1;
}
/* Returns true if BB has precisely one predecessor. */
inline bool
single_pred_p (const_basic_block bb)
{
return EDGE_COUNT (bb->preds) == 1;
}
/* Returns the single successor edge of basic block BB. Aborts if
BB does not have exactly one successor. */
inline edge
single_succ_edge (const_basic_block bb)
{
gcc_checking_assert (single_succ_p (bb));
return EDGE_SUCC (bb, 0);
}
/* Returns the single predecessor edge of basic block BB. Aborts
if BB does not have exactly one predecessor. */
inline edge
single_pred_edge (const_basic_block bb)
{
gcc_checking_assert (single_pred_p (bb));
return EDGE_PRED (bb, 0);
}
/* Returns the single successor block of basic block BB. Aborts
if BB does not have exactly one successor. */
inline basic_block
single_succ (const_basic_block bb)
{
return single_succ_edge (bb)->dest;
}
/* Returns the single predecessor block of basic block BB. Aborts
if BB does not have exactly one predecessor.*/
inline basic_block
single_pred (const_basic_block bb)
{
return single_pred_edge (bb)->src;
}
/* Iterator object for edges. */
struct edge_iterator {
unsigned index;
vec<edge, va_gc> **container;
};
inline vec<edge, va_gc> *
ei_container (edge_iterator i)
{
gcc_checking_assert (i.container);
return *i.container;
}
#define ei_start(iter) ei_start_1 (&(iter))
#define ei_last(iter) ei_last_1 (&(iter))
/* Return an iterator pointing to the start of an edge vector. */
inline edge_iterator
ei_start_1 (vec<edge, va_gc> **ev)
{
edge_iterator i;
i.index = 0;
i.container = ev;
return i;
}
/* Return an iterator pointing to the last element of an edge
vector. */
inline edge_iterator
ei_last_1 (vec<edge, va_gc> **ev)
{
edge_iterator i;
i.index = EDGE_COUNT (*ev) - 1;
i.container = ev;
return i;
}
/* Is the iterator `i' at the end of the sequence? */
inline bool
ei_end_p (edge_iterator i)
{
return (i.index == EDGE_COUNT (ei_container (i)));
}
/* Is the iterator `i' at one position before the end of the
sequence? */
inline bool
ei_one_before_end_p (edge_iterator i)
{
return (i.index + 1 == EDGE_COUNT (ei_container (i)));
}
/* Advance the iterator to the next element. */
inline void
ei_next (edge_iterator *i)
{
gcc_checking_assert (i->index < EDGE_COUNT (ei_container (*i)));
i->index++;
}
/* Move the iterator to the previous element. */
inline void
ei_prev (edge_iterator *i)
{
gcc_checking_assert (i->index > 0);
i->index--;
}
/* Return the edge pointed to by the iterator `i'. */
inline edge
ei_edge (edge_iterator i)
{
return EDGE_I (ei_container (i), i.index);
}
/* Return an edge pointed to by the iterator. Do it safely so that
NULL is returned when the iterator is pointing at the end of the
sequence. */
inline edge
ei_safe_edge (edge_iterator i)
{
return !ei_end_p (i) ? ei_edge (i) : NULL;
}
/* Return 1 if we should continue to iterate. Return 0 otherwise.
*Edge P is set to the next edge if we are to continue to iterate
and NULL otherwise. */
inline bool
ei_cond (edge_iterator ei, edge *p)
{
if (!ei_end_p (ei))
{
*p = ei_edge (ei);
return 1;
}
else
{
*p = NULL;
return 0;
}
}
/* This macro serves as a convenient way to iterate each edge in a
vector of predecessor or successor edges. It must not be used when
an element might be removed during the traversal, otherwise
elements will be missed. Instead, use a for-loop like that shown
in the following pseudo-code:
FOR (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
{
IF (e != taken_edge)
remove_edge (e);
ELSE
ei_next (&ei);
}
*/
#define FOR_EACH_EDGE(EDGE,ITER,EDGE_VEC) \
for ((ITER) = ei_start ((EDGE_VEC)); \
ei_cond ((ITER), &(EDGE)); \
ei_next (&(ITER)))
#define CLEANUP_EXPENSIVE 1 /* Do relatively expensive optimizations
except for edge forwarding */
#define CLEANUP_CROSSJUMP 2 /* Do crossjumping. */
#define CLEANUP_POST_REGSTACK 4 /* We run after reg-stack and need
to care REG_DEAD notes. */
#define CLEANUP_THREADING 8 /* Do jump threading. */
#define CLEANUP_NO_INSN_DEL 16 /* Do not try to delete trivially dead
insns. */
#define CLEANUP_CFGLAYOUT 32 /* Do cleanup in cfglayout mode. */
#define CLEANUP_CFG_CHANGED 64 /* The caller changed the CFG. */
#define CLEANUP_NO_PARTITIONING 128 /* Do not try to fix partitions. */
#define CLEANUP_FORCE_FAST_DCE 0x100 /* Force run_fast_dce to be called
at least once. */
/* Return true if BB is in a transaction. */
inline bool
bb_in_transaction (basic_block bb)
{
return bb->flags & BB_IN_TRANSACTION;
}
/* Return true when one of the predecessor edges of BB is marked with EDGE_EH. */
inline bool
bb_has_eh_pred (basic_block bb)
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (e->flags & EDGE_EH)
return true;
}
return false;
}
/* Return true when one of the predecessor edges of BB is marked with EDGE_ABNORMAL. */
inline bool
bb_has_abnormal_pred (basic_block bb)
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (e->flags & EDGE_ABNORMAL)
return true;
}
return false;
}
/* Return the fallthru edge in EDGES if it exists, NULL otherwise. */
inline edge
find_fallthru_edge (vec<edge, va_gc> *edges)
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, edges)
if (e->flags & EDGE_FALLTHRU)
break;
return e;
}
/* Check tha probability is sane. */
inline void
check_probability (int prob)
{
gcc_checking_assert (prob >= 0 && prob <= REG_BR_PROB_BASE);
}
/* Given PROB1 and PROB2, return PROB1*PROB2/REG_BR_PROB_BASE.
Used to combine BB probabilities. */
inline int
combine_probabilities (int prob1, int prob2)
{
check_probability (prob1);
check_probability (prob2);
return RDIV (prob1 * prob2, REG_BR_PROB_BASE);
}
/* Apply scale factor SCALE on frequency or count FREQ. Use this
interface when potentially scaling up, so that SCALE is not
constrained to be < REG_BR_PROB_BASE. */
inline gcov_type
apply_scale (gcov_type freq, gcov_type scale)
{
return RDIV (freq * scale, REG_BR_PROB_BASE);
}
/* Apply probability PROB on frequency or count FREQ. */
inline gcov_type
apply_probability (gcov_type freq, int prob)
{
check_probability (prob);
return apply_scale (freq, prob);
}
/* Return inverse probability for PROB. */
inline int
inverse_probability (int prob1)
{
check_probability (prob1);
return REG_BR_PROB_BASE - prob1;
}
/* Return true if BB has at least one abnormal outgoing edge. */
inline bool
has_abnormal_or_eh_outgoing_edge_p (basic_block bb)
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
return true;
return false;
}
/* Return true when one of the predecessor edges of BB is marked with
EDGE_ABNORMAL_CALL or EDGE_EH. */
inline bool
has_abnormal_call_or_eh_pred_edge_p (basic_block bb)
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
return true;
return false;
}
/* Return count of edge E. */
inline profile_count edge_def::count () const
{
return src->count.apply_probability (probability);
}
#endif /* GCC_BASIC_BLOCK_H */
|