aboutsummaryrefslogtreecommitdiff
path: root/gcc/cfgexpand.c
diff options
context:
space:
mode:
authorJakub Jelinek <jakub@redhat.com>2012-11-12 16:52:26 +0100
committerDodji Seketeli <dodji@gcc.gnu.org>2012-11-12 16:52:26 +0100
commitf3ddd6929ad26e1c6202265460730241ad6c28d0 (patch)
tree199bfa081ada2d767c47988ded788ff71b17e44d /gcc/cfgexpand.c
parentdfb9e332a5ceefd6770c028d9a937ab869116cba (diff)
downloadgcc-f3ddd6929ad26e1c6202265460730241ad6c28d0.zip
gcc-f3ddd6929ad26e1c6202265460730241ad6c28d0.tar.gz
gcc-f3ddd6929ad26e1c6202265460730241ad6c28d0.tar.bz2
Implement protection of stack variables
This patch implements the protection of stack variables. It lays out stack variables as well as the different red zones, emits some prologue code to populate the shadow memory as to poison (mark as non-accessible) the regions of the red zones and mark the regions of stack variables as accessible, and emit some epilogue code to un-poison (mark as accessible) the regions of red zones right before the function exits. * Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H). (cfgexpand.o): Depend on asan.h. * asan.c: Include expr.h and optabs.h. (asan_shadow_set): New variable. (asan_shadow_cst, asan_emit_stack_protection): New functions. (asan_init_shadow_ptr_types): Initialize also asan_shadow_set. * cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector. (partition_stack_vars): If i is large alignment and j small alignment or vice versa, break out of the loop instead of continue, and put the test earlier. If flag_asan, break out of the loop if for small alignment size is different. (struct stack_vars_data): New type. (expand_stack_vars): Add DATA argument. Change PRED type to function taking size_t argument instead of tree. Adjust pred calls. Fill DATA in and add needed padding in between variables if -faddress-sanitizer. (defer_stack_allocation): Defer everything for flag_asan. (stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take size_t index into stack_vars array instead of the decl directly. (asan_decl_phase_3): New function. (expand_used_vars): Return var destruction sequence. Adjust expand_stack_vars calls, add another one for flag_asan. Call asan_emit_stack_protection if expand_stack_vars added anything to the vectors. (expand_gimple_basic_block): Add disable_tail_calls argument. (gimple_expand_cfg): Pass true to it if expand_used_vars returned non-NULL. Emit the sequence returned by expand_used_vars after return_label. * asan.h (asan_emit_stack_protection): New prototype. (asan_shadow_set): New decl. (ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE, ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define. (asan_protect_stack_decl): New inline. * toplev.c (process_options): Also disable -faddress-sanitizer on !FRAME_GROWS_DOWNWARDS targets. From-SVN: r193436
Diffstat (limited to 'gcc/cfgexpand.c')
-rw-r--r--gcc/cfgexpand.c165
1 files changed, 144 insertions, 21 deletions
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index 0bd9d1d..2c17ec1 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -47,6 +47,7 @@ along with GCC; see the file COPYING3. If not see
#include "cfgloop.h"
#include "regs.h" /* For reg_renumber. */
#include "insn-attr.h" /* For INSN_SCHEDULING. */
+#include "asan.h"
/* This variable holds information helping the rewriting of SSA trees
into RTL. */
@@ -736,6 +737,7 @@ partition_stack_vars (void)
{
size_t i = stack_vars_sorted[si];
unsigned int ialign = stack_vars[i].alignb;
+ HOST_WIDE_INT isize = stack_vars[i].size;
/* Ignore objects that aren't partition representatives. If we
see a var that is not a partition representative, it must
@@ -747,19 +749,28 @@ partition_stack_vars (void)
{
size_t j = stack_vars_sorted[sj];
unsigned int jalign = stack_vars[j].alignb;
+ HOST_WIDE_INT jsize = stack_vars[j].size;
/* Ignore objects that aren't partition representatives. */
if (stack_vars[j].representative != j)
continue;
- /* Ignore conflicting objects. */
- if (stack_var_conflict_p (i, j))
- continue;
-
/* Do not mix objects of "small" (supported) alignment
and "large" (unsupported) alignment. */
if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
!= (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
+ break;
+
+ /* For Address Sanitizer do not mix objects with different
+ sizes, as the shorter vars wouldn't be adequately protected.
+ Don't do that for "large" (unsupported) alignment objects,
+ those aren't protected anyway. */
+ if (flag_asan && isize != jsize
+ && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
+ break;
+
+ /* Ignore conflicting objects. */
+ if (stack_var_conflict_p (i, j))
continue;
/* UNION the objects, placing J at OFFSET. */
@@ -837,12 +848,26 @@ expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
set_rtl (decl, x);
}
+DEF_VEC_I(HOST_WIDE_INT);
+DEF_VEC_ALLOC_I(HOST_WIDE_INT,heap);
+
+struct stack_vars_data
+{
+ /* Vector of offset pairs, always end of some padding followed
+ by start of the padding that needs Address Sanitizer protection.
+ The vector is in reversed, highest offset pairs come first. */
+ VEC(HOST_WIDE_INT, heap) *asan_vec;
+
+ /* Vector of partition representative decls in between the paddings. */
+ VEC(tree, heap) *asan_decl_vec;
+};
+
/* A subroutine of expand_used_vars. Give each partition representative
a unique location within the stack frame. Update each partition member
with that location. */
static void
-expand_stack_vars (bool (*pred) (tree))
+expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
{
size_t si, i, j, n = stack_vars_num;
HOST_WIDE_INT large_size = 0, large_alloc = 0;
@@ -913,13 +938,45 @@ expand_stack_vars (bool (*pred) (tree))
/* Check the predicate to see whether this variable should be
allocated in this pass. */
- if (pred && !pred (decl))
+ if (pred && !pred (i))
continue;
alignb = stack_vars[i].alignb;
if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
{
- offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
+ if (flag_asan && pred)
+ {
+ HOST_WIDE_INT prev_offset = frame_offset;
+ tree repr_decl = NULL_TREE;
+
+ offset
+ = alloc_stack_frame_space (stack_vars[i].size
+ + ASAN_RED_ZONE_SIZE,
+ MAX (alignb, ASAN_RED_ZONE_SIZE));
+ VEC_safe_push (HOST_WIDE_INT, heap, data->asan_vec,
+ prev_offset);
+ VEC_safe_push (HOST_WIDE_INT, heap, data->asan_vec,
+ offset + stack_vars[i].size);
+ /* Find best representative of the partition.
+ Prefer those with DECL_NAME, even better
+ satisfying asan_protect_stack_decl predicate. */
+ for (j = i; j != EOC; j = stack_vars[j].next)
+ if (asan_protect_stack_decl (stack_vars[j].decl)
+ && DECL_NAME (stack_vars[j].decl))
+ {
+ repr_decl = stack_vars[j].decl;
+ break;
+ }
+ else if (repr_decl == NULL_TREE
+ && DECL_P (stack_vars[j].decl)
+ && DECL_NAME (stack_vars[j].decl))
+ repr_decl = stack_vars[j].decl;
+ if (repr_decl == NULL_TREE)
+ repr_decl = stack_vars[i].decl;
+ VEC_safe_push (tree, heap, data->asan_decl_vec, repr_decl);
+ }
+ else
+ offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
base = virtual_stack_vars_rtx;
base_align = crtl->max_used_stack_slot_alignment;
}
@@ -1057,8 +1114,9 @@ static bool
defer_stack_allocation (tree var, bool toplevel)
{
/* If stack protection is enabled, *all* stack variables must be deferred,
- so that we can re-order the strings to the top of the frame. */
- if (flag_stack_protect)
+ so that we can re-order the strings to the top of the frame.
+ Similarly for Address Sanitizer. */
+ if (flag_stack_protect || flag_asan)
return true;
/* We handle "large" alignment via dynamic allocation. We want to handle
@@ -1329,15 +1387,31 @@ stack_protect_decl_phase (tree decl)
as callbacks for expand_stack_vars. */
static bool
-stack_protect_decl_phase_1 (tree decl)
+stack_protect_decl_phase_1 (size_t i)
+{
+ return stack_protect_decl_phase (stack_vars[i].decl) == 1;
+}
+
+static bool
+stack_protect_decl_phase_2 (size_t i)
{
- return stack_protect_decl_phase (decl) == 1;
+ return stack_protect_decl_phase (stack_vars[i].decl) == 2;
}
+/* And helper function that checks for asan phase (with stack protector
+ it is phase 3). This is used as callback for expand_stack_vars.
+ Returns true if any of the vars in the partition need to be protected. */
+
static bool
-stack_protect_decl_phase_2 (tree decl)
+asan_decl_phase_3 (size_t i)
{
- return stack_protect_decl_phase (decl) == 2;
+ while (i != EOC)
+ {
+ if (asan_protect_stack_decl (stack_vars[i].decl))
+ return true;
+ i = stack_vars[i].next;
+ }
+ return false;
}
/* Ensure that variables in different stack protection phases conflict
@@ -1448,11 +1522,12 @@ estimated_stack_frame_size (struct cgraph_node *node)
/* Expand all variables used in the function. */
-static void
+static rtx
expand_used_vars (void)
{
tree var, outer_block = DECL_INITIAL (current_function_decl);
VEC(tree,heap) *maybe_local_decls = NULL;
+ rtx var_end_seq = NULL_RTX;
struct pointer_map_t *ssa_name_decls;
unsigned i;
unsigned len;
@@ -1603,6 +1678,11 @@ expand_used_vars (void)
/* Assign rtl to each variable based on these partitions. */
if (stack_vars_num > 0)
{
+ struct stack_vars_data data;
+
+ data.asan_vec = NULL;
+ data.asan_decl_vec = NULL;
+
/* Reorder decls to be protected by iterating over the variables
array multiple times, and allocating out of each phase in turn. */
/* ??? We could probably integrate this into the qsort we did
@@ -1611,14 +1691,41 @@ expand_used_vars (void)
if (has_protected_decls)
{
/* Phase 1 contains only character arrays. */
- expand_stack_vars (stack_protect_decl_phase_1);
+ expand_stack_vars (stack_protect_decl_phase_1, &data);
/* Phase 2 contains other kinds of arrays. */
if (flag_stack_protect == 2)
- expand_stack_vars (stack_protect_decl_phase_2);
+ expand_stack_vars (stack_protect_decl_phase_2, &data);
}
- expand_stack_vars (NULL);
+ if (flag_asan)
+ /* Phase 3, any partitions that need asan protection
+ in addition to phase 1 and 2. */
+ expand_stack_vars (asan_decl_phase_3, &data);
+
+ if (!VEC_empty (HOST_WIDE_INT, data.asan_vec))
+ {
+ HOST_WIDE_INT prev_offset = frame_offset;
+ HOST_WIDE_INT offset
+ = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
+ ASAN_RED_ZONE_SIZE);
+ VEC_safe_push (HOST_WIDE_INT, heap, data.asan_vec, prev_offset);
+ VEC_safe_push (HOST_WIDE_INT, heap, data.asan_vec, offset);
+
+ var_end_seq
+ = asan_emit_stack_protection (virtual_stack_vars_rtx,
+ VEC_address (HOST_WIDE_INT,
+ data.asan_vec),
+ VEC_address (tree,
+ data.asan_decl_vec),
+ VEC_length (HOST_WIDE_INT,
+ data.asan_vec));
+ }
+
+ expand_stack_vars (NULL, &data);
+
+ VEC_free (HOST_WIDE_INT, heap, data.asan_vec);
+ VEC_free (tree, heap, data.asan_decl_vec);
}
fini_vars_expansion ();
@@ -1645,6 +1752,8 @@ expand_used_vars (void)
frame_offset += align - 1;
frame_offset &= -align;
}
+
+ return var_end_seq;
}
@@ -3661,7 +3770,7 @@ expand_debug_locations (void)
/* Expand basic block BB from GIMPLE trees to RTL. */
static basic_block
-expand_gimple_basic_block (basic_block bb)
+expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
{
gimple_stmt_iterator gsi;
gimple_seq stmts;
@@ -3949,6 +4058,11 @@ expand_gimple_basic_block (basic_block bb)
}
else
{
+ if (is_gimple_call (stmt)
+ && gimple_call_tail_p (stmt)
+ && disable_tail_calls)
+ gimple_call_set_tail (stmt, false);
+
if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
{
bool can_fallthru;
@@ -4308,7 +4422,7 @@ gimple_expand_cfg (void)
sbitmap blocks;
edge_iterator ei;
edge e;
- rtx var_seq;
+ rtx var_seq, var_ret_seq;
unsigned i;
timevar_push (TV_OUT_OF_SSA);
@@ -4368,7 +4482,7 @@ gimple_expand_cfg (void)
timevar_push (TV_VAR_EXPAND);
start_sequence ();
- expand_used_vars ();
+ var_ret_seq = expand_used_vars ();
var_seq = get_insns ();
end_sequence ();
@@ -4494,7 +4608,7 @@ gimple_expand_cfg (void)
lab_rtx_for_bb = pointer_map_create ();
FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
- bb = expand_gimple_basic_block (bb);
+ bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
if (MAY_HAVE_DEBUG_INSNS)
expand_debug_locations ();
@@ -4522,6 +4636,15 @@ gimple_expand_cfg (void)
construct_exit_block ();
insn_locations_finalize ();
+ if (var_ret_seq)
+ {
+ rtx after = return_label;
+ rtx next = NEXT_INSN (after);
+ if (next && NOTE_INSN_BASIC_BLOCK_P (next))
+ after = next;
+ emit_insn_after (var_ret_seq, after);
+ }
+
/* Zap the tree EH table. */
set_eh_throw_stmt_table (cfun, NULL);