aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog38
-rw-r--r--gcc/Makefile.in4
-rw-r--r--gcc/asan.c306
-rw-r--r--gcc/asan.h31
-rw-r--r--gcc/cfgexpand.c165
-rw-r--r--gcc/toplev.c4
6 files changed, 514 insertions, 34 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 5f6e41c..0d8e5cc 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,5 +1,43 @@
2012-11-12 Jakub Jelinek <jakub@redhat.com>
+ * Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
+ (cfgexpand.o): Depend on asan.h.
+ * asan.c: Include expr.h and optabs.h.
+ (asan_shadow_set): New variable.
+ (asan_shadow_cst, asan_emit_stack_protection): New functions.
+ (asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
+ * cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
+ (partition_stack_vars): If i is large alignment and j small
+ alignment or vice versa, break out of the loop instead of continue,
+ and put the test earlier. If flag_asan, break out of the loop
+ if for small alignment size is different.
+ (struct stack_vars_data): New type.
+ (expand_stack_vars): Add DATA argument. Change PRED type to
+ function taking size_t argument instead of tree. Adjust pred
+ calls. Fill DATA in and add needed padding in between variables
+ if -faddress-sanitizer.
+ (defer_stack_allocation): Defer everything for flag_asan.
+ (stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
+ size_t index into stack_vars array instead of the decl directly.
+ (asan_decl_phase_3): New function.
+ (expand_used_vars): Return var destruction sequence. Adjust
+ expand_stack_vars calls, add another one for flag_asan. Call
+ asan_emit_stack_protection if expand_stack_vars added anything
+ to the vectors.
+ (expand_gimple_basic_block): Add disable_tail_calls argument.
+ (gimple_expand_cfg): Pass true to it if expand_used_vars returned
+ non-NULL. Emit the sequence returned by expand_used_vars after
+ return_label.
+ * asan.h (asan_emit_stack_protection): New prototype.
+ (asan_shadow_set): New decl.
+ (ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
+ ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
+ (asan_protect_stack_decl): New inline.
+ * toplev.c (process_options): Also disable -faddress-sanitizer on
+ !FRAME_GROWS_DOWNWARDS targets.
+
+2012-11-12 Jakub Jelinek <jakub@redhat.com>
+
* asan.c (build_check_stmt): Rename join_bb variable to else_bb.
(gate_asan_O0): New function.
(pass_asan_O0): New variable.
diff --git a/gcc/Makefile.in b/gcc/Makefile.in
index b9a1e74..7a0b074 100644
--- a/gcc/Makefile.in
+++ b/gcc/Makefile.in
@@ -2211,7 +2211,7 @@ stor-layout.o : stor-layout.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
asan.o : asan.c asan.h $(CONFIG_H) $(SYSTEM_H) $(GIMPLE_H) \
output.h coretypes.h $(GIMPLE_PRETTY_PRINT_H) \
tree-iterator.h $(TREE_FLOW_H) $(TREE_PASS_H) \
- $(TARGET_H)
+ $(TARGET_H) $(EXPR_H) $(OPTABS_H)
tree-ssa-tail-merge.o: tree-ssa-tail-merge.c \
$(SYSTEM_H) $(CONFIG_H) coretypes.h $(TM_H) $(BITMAP_H) \
$(FLAGS_H) $(TM_P_H) $(BASIC_BLOCK_H) \
@@ -3082,7 +3082,7 @@ cfgexpand.o : cfgexpand.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(DIAGNOSTIC_H) toplev.h $(DIAGNOSTIC_CORE_H) $(BASIC_BLOCK_H) $(FLAGS_H) debug.h $(PARAMS_H) \
value-prof.h $(TREE_INLINE_H) $(TARGET_H) $(SSAEXPAND_H) $(REGS_H) \
$(GIMPLE_PRETTY_PRINT_H) $(BITMAP_H) sbitmap.h \
- $(INSN_ATTR_H) $(CFGLOOP_H)
+ $(INSN_ATTR_H) $(CFGLOOP_H) asan.h
cfgrtl.o : cfgrtl.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_ERROR_H) \
$(FLAGS_H) insn-config.h $(BASIC_BLOCK_H) $(REGS_H) hard-reg-set.h \
$(FUNCTION_H) $(EXCEPT_H) $(TM_P_H) $(INSN_ATTR_H) \
diff --git a/gcc/asan.c b/gcc/asan.c
index 398b837..b9f2744 100644
--- a/gcc/asan.c
+++ b/gcc/asan.c
@@ -29,6 +29,8 @@ along with GCC; see the file COPYING3. If not see
#include "asan.h"
#include "gimple-pretty-print.h"
#include "target.h"
+#include "expr.h"
+#include "optabs.h"
/*
AddressSanitizer finds out-of-bounds and use-after-free bugs
@@ -58,17 +60,303 @@ along with GCC; see the file COPYING3. If not see
Read more:
http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
- Future work:
- The current implementation supports only detection of out-of-bounds and
- use-after-free bugs in heap.
- In order to support out-of-bounds for stack and globals we will need
- to create redzones for stack and global object and poison them.
-*/
+ The current implementation supports detection of out-of-bounds and
+ use-after-free in the heap, on the stack and for global variables.
+
+ [Protection of stack variables]
+
+ To understand how detection of out-of-bounds and use-after-free works
+ for stack variables, lets look at this example on x86_64 where the
+ stack grows downward:
+
+ int
+ foo ()
+ {
+ char a[23] = {0};
+ int b[2] = {0};
+
+ a[5] = 1;
+ b[1] = 2;
+
+ return a[5] + b[1];
+ }
+
+ For this function, the stack protected by asan will be organized as
+ follows, from the top of the stack to the bottom:
+
+ Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
+
+ Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
+ the next slot be 32 bytes aligned; this one is called Partial
+ Redzone; this 32 bytes alignment is an asan constraint]
+
+ Slot 3/ [24 bytes for variable 'a']
+
+ Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
+
+ Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
+
+ Slot 6/ [8 bytes for variable 'b']
+
+ Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called 'LEFT
+ RedZone']
+
+ The 32 bytes of LEFT red zone at the bottom of the stack can be
+ decomposed as such:
+
+ 1/ The first 8 bytes contain a magical asan number that is always
+ 0x41B58AB3.
+
+ 2/ The following 8 bytes contains a pointer to a string (to be
+ parsed at runtime by the runtime asan library), which format is
+ the following:
+
+ "<function-name> <space> <num-of-variables-on-the-stack>
+ (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
+ <length-of-var-in-bytes> ){n} "
+
+ where '(...){n}' means the content inside the parenthesis occurs 'n'
+ times, with 'n' being the number of variables on the stack.
+
+ 3/ The following 16 bytes of the red zone have no particular
+ format.
+
+ The shadow memory for that stack layout is going to look like this:
+
+ - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
+ The F1 byte pattern is a magic number called
+ ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
+ the memory for that shadow byte is part of a the LEFT red zone
+ intended to seat at the bottom of the variables on the stack.
+
+ - content of shadow memory 8 bytes for slots 6 and 5:
+ 0xF4F4F400. The F4 byte pattern is a magic number
+ called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
+ memory region for this shadow byte is a PARTIAL red zone
+ intended to pad a variable A, so that the slot following
+ {A,padding} is 32 bytes aligned.
+
+ Note that the fact that the least significant byte of this
+ shadow memory content is 00 means that 8 bytes of its
+ corresponding memory (which corresponds to the memory of
+ variable 'b') is addressable.
+
+ - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
+ The F2 byte pattern is a magic number called
+ ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
+ region for this shadow byte is a MIDDLE red zone intended to
+ seat between two 32 aligned slots of {variable,padding}.
+
+ - content of shadow memory 8 bytes for slot 3 and 2:
+ 0xFFFFFFFFF4000000. This represents is the concatenation of
+ variable 'a' and the partial red zone following it, like what we
+ had for variable 'b'. The least significant 3 bytes being 00
+ means that the 3 bytes of variable 'a' are addressable.
+
+ - content of shadow memory 8 bytes for slot 1: 0xFFFFFFFFF3F3F3F3.
+ The F3 byte pattern is a magic number called
+ ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
+ region for this shadow byte is a RIGHT red zone intended to seat
+ at the top of the variables of the stack.
+
+ Note that the real variable layout is done in expand_used_vars in
+ cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
+ stack variables as well as the different red zones, emits some
+ prologue code to populate the shadow memory as to poison (mark as
+ non-accessible) the regions of the red zones and mark the regions of
+ stack variables as accessible, and emit some epilogue code to
+ un-poison (mark as accessible) the regions of red zones right before
+ the function exits. */
+
+alias_set_type asan_shadow_set = -1;
/* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
alias set is used for all shadow memory accesses. */
static GTY(()) tree shadow_ptr_types[2];
+/* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
+
+static rtx
+asan_shadow_cst (unsigned char shadow_bytes[4])
+{
+ int i;
+ unsigned HOST_WIDE_INT val = 0;
+ gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
+ for (i = 0; i < 4; i++)
+ val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
+ << (BITS_PER_UNIT * i);
+ return GEN_INT (trunc_int_for_mode (val, SImode));
+}
+
+/* Insert code to protect stack vars. The prologue sequence should be emitted
+ directly, epilogue sequence returned. BASE is the register holding the
+ stack base, against which OFFSETS array offsets are relative to, OFFSETS
+ array contains pairs of offsets in reverse order, always the end offset
+ of some gap that needs protection followed by starting offset,
+ and DECLS is an array of representative decls for each var partition.
+ LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
+ elements long (OFFSETS include gap before the first variable as well
+ as gaps after each stack variable). */
+
+rtx
+asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
+ int length)
+{
+ rtx shadow_base, shadow_mem, ret, mem;
+ unsigned char shadow_bytes[4];
+ HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
+ HOST_WIDE_INT last_offset, last_size;
+ int l;
+ unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
+ static pretty_printer pp;
+ static bool pp_initialized;
+ const char *buf;
+ size_t len;
+ tree str_cst;
+
+ /* First of all, prepare the description string. */
+ if (!pp_initialized)
+ {
+ pp_construct (&pp, /* prefix */NULL, /* line-width */0);
+ pp_initialized = true;
+ }
+ pp_clear_output_area (&pp);
+ if (DECL_NAME (current_function_decl))
+ pp_base_tree_identifier (&pp, DECL_NAME (current_function_decl));
+ else
+ pp_string (&pp, "<unknown>");
+ pp_space (&pp);
+ pp_decimal_int (&pp, length / 2 - 1);
+ pp_space (&pp);
+ for (l = length - 2; l; l -= 2)
+ {
+ tree decl = decls[l / 2 - 1];
+ pp_wide_integer (&pp, offsets[l] - base_offset);
+ pp_space (&pp);
+ pp_wide_integer (&pp, offsets[l - 1] - offsets[l]);
+ pp_space (&pp);
+ if (DECL_P (decl) && DECL_NAME (decl))
+ {
+ pp_decimal_int (&pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
+ pp_space (&pp);
+ pp_base_tree_identifier (&pp, DECL_NAME (decl));
+ }
+ else
+ pp_string (&pp, "9 <unknown>");
+ pp_space (&pp);
+ }
+ buf = pp_base_formatted_text (&pp);
+ len = strlen (buf);
+ str_cst = build_string (len + 1, buf);
+ TREE_TYPE (str_cst)
+ = build_array_type (char_type_node, build_index_type (size_int (len)));
+ TREE_READONLY (str_cst) = 1;
+ TREE_STATIC (str_cst) = 1;
+ str_cst = build1 (ADDR_EXPR, build_pointer_type (char_type_node), str_cst);
+
+ /* Emit the prologue sequence. */
+ base = expand_binop (Pmode, add_optab, base, GEN_INT (base_offset),
+ NULL_RTX, 1, OPTAB_DIRECT);
+ mem = gen_rtx_MEM (ptr_mode, base);
+ emit_move_insn (mem, GEN_INT (ASAN_STACK_FRAME_MAGIC));
+ mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
+ emit_move_insn (mem, expand_normal (str_cst));
+ shadow_base = expand_binop (Pmode, lshr_optab, base,
+ GEN_INT (ASAN_SHADOW_SHIFT),
+ NULL_RTX, 1, OPTAB_DIRECT);
+ shadow_base = expand_binop (Pmode, add_optab, shadow_base,
+ GEN_INT (targetm.asan_shadow_offset ()),
+ NULL_RTX, 1, OPTAB_DIRECT);
+ gcc_assert (asan_shadow_set != -1
+ && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
+ shadow_mem = gen_rtx_MEM (SImode, shadow_base);
+ set_mem_alias_set (shadow_mem, asan_shadow_set);
+ prev_offset = base_offset;
+ for (l = length; l; l -= 2)
+ {
+ if (l == 2)
+ cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
+ offset = offsets[l - 1];
+ if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
+ {
+ int i;
+ HOST_WIDE_INT aoff
+ = base_offset + ((offset - base_offset)
+ & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
+ shadow_mem = adjust_address (shadow_mem, VOIDmode,
+ (aoff - prev_offset)
+ >> ASAN_SHADOW_SHIFT);
+ prev_offset = aoff;
+ for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
+ if (aoff < offset)
+ {
+ if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
+ shadow_bytes[i] = 0;
+ else
+ shadow_bytes[i] = offset - aoff;
+ }
+ else
+ shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
+ emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
+ offset = aoff;
+ }
+ while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
+ {
+ shadow_mem = adjust_address (shadow_mem, VOIDmode,
+ (offset - prev_offset)
+ >> ASAN_SHADOW_SHIFT);
+ prev_offset = offset;
+ memset (shadow_bytes, cur_shadow_byte, 4);
+ emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
+ offset += ASAN_RED_ZONE_SIZE;
+ }
+ cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
+ }
+ do_pending_stack_adjust ();
+
+ /* Construct epilogue sequence. */
+ start_sequence ();
+
+ shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
+ set_mem_alias_set (shadow_mem, asan_shadow_set);
+ prev_offset = base_offset;
+ last_offset = base_offset;
+ last_size = 0;
+ for (l = length; l; l -= 2)
+ {
+ offset = base_offset + ((offsets[l - 1] - base_offset)
+ & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
+ if (last_offset + last_size != offset)
+ {
+ shadow_mem = adjust_address (shadow_mem, VOIDmode,
+ (last_offset - prev_offset)
+ >> ASAN_SHADOW_SHIFT);
+ prev_offset = last_offset;
+ clear_storage (shadow_mem, GEN_INT (last_size >> ASAN_SHADOW_SHIFT),
+ BLOCK_OP_NORMAL);
+ last_offset = offset;
+ last_size = 0;
+ }
+ last_size += base_offset + ((offsets[l - 2] - base_offset)
+ & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
+ - offset;
+ }
+ if (last_size)
+ {
+ shadow_mem = adjust_address (shadow_mem, VOIDmode,
+ (last_offset - prev_offset)
+ >> ASAN_SHADOW_SHIFT);
+ clear_storage (shadow_mem, GEN_INT (last_size >> ASAN_SHADOW_SHIFT),
+ BLOCK_OP_NORMAL);
+ }
+
+ do_pending_stack_adjust ();
+
+ ret = get_insns ();
+ end_sequence ();
+ return ret;
+}
+
/* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
IS_STORE is either 1 (for a store) or 0 (for a load).
SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
@@ -389,12 +677,12 @@ asan_finish_file (void)
static void
asan_init_shadow_ptr_types (void)
{
- alias_set_type set = new_alias_set ();
+ asan_shadow_set = new_alias_set ();
shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
- TYPE_ALIAS_SET (shadow_ptr_types[0]) = set;
+ TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
- TYPE_ALIAS_SET (shadow_ptr_types[1]) = set;
+ TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
}
diff --git a/gcc/asan.h b/gcc/asan.h
index 0d9ab8b..6f0edbf 100644
--- a/gcc/asan.h
+++ b/gcc/asan.h
@@ -21,10 +21,39 @@ along with GCC; see the file COPYING3. If not see
#ifndef TREE_ASAN
#define TREE_ASAN
-extern void asan_finish_file(void);
+extern void asan_finish_file (void);
+extern rtx asan_emit_stack_protection (rtx, HOST_WIDE_INT *, tree *, int);
+
+/* Alias set for accessing the shadow memory. */
+extern alias_set_type asan_shadow_set;
/* Shadow memory is found at
(address >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
#define ASAN_SHADOW_SHIFT 3
+/* Red zone size, stack and global variables are padded by ASAN_RED_ZONE_SIZE
+ up to 2 * ASAN_RED_ZONE_SIZE - 1 bytes. */
+#define ASAN_RED_ZONE_SIZE 32
+
+/* Shadow memory values for stack protection. Left is below protected vars,
+ the first pointer in stack corresponding to that offset contains
+ ASAN_STACK_FRAME_MAGIC word, the second pointer to a string describing
+ the frame. Middle is for padding in between variables, right is
+ above the last protected variable and partial immediately after variables
+ up to ASAN_RED_ZONE_SIZE alignment. */
+#define ASAN_STACK_MAGIC_LEFT 0xf1
+#define ASAN_STACK_MAGIC_MIDDLE 0xf2
+#define ASAN_STACK_MAGIC_RIGHT 0xf3
+#define ASAN_STACK_MAGIC_PARTIAL 0xf4
+
+#define ASAN_STACK_FRAME_MAGIC 0x41b58ab3
+
+/* Return true if DECL should be guarded on the stack. */
+
+static inline bool
+asan_protect_stack_decl (tree decl)
+{
+ return DECL_P (decl) && !DECL_ARTIFICIAL (decl);
+}
+
#endif /* TREE_ASAN */
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index 0bd9d1d..2c17ec1 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -47,6 +47,7 @@ along with GCC; see the file COPYING3. If not see
#include "cfgloop.h"
#include "regs.h" /* For reg_renumber. */
#include "insn-attr.h" /* For INSN_SCHEDULING. */
+#include "asan.h"
/* This variable holds information helping the rewriting of SSA trees
into RTL. */
@@ -736,6 +737,7 @@ partition_stack_vars (void)
{
size_t i = stack_vars_sorted[si];
unsigned int ialign = stack_vars[i].alignb;
+ HOST_WIDE_INT isize = stack_vars[i].size;
/* Ignore objects that aren't partition representatives. If we
see a var that is not a partition representative, it must
@@ -747,19 +749,28 @@ partition_stack_vars (void)
{
size_t j = stack_vars_sorted[sj];
unsigned int jalign = stack_vars[j].alignb;
+ HOST_WIDE_INT jsize = stack_vars[j].size;
/* Ignore objects that aren't partition representatives. */
if (stack_vars[j].representative != j)
continue;
- /* Ignore conflicting objects. */
- if (stack_var_conflict_p (i, j))
- continue;
-
/* Do not mix objects of "small" (supported) alignment
and "large" (unsupported) alignment. */
if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
!= (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
+ break;
+
+ /* For Address Sanitizer do not mix objects with different
+ sizes, as the shorter vars wouldn't be adequately protected.
+ Don't do that for "large" (unsupported) alignment objects,
+ those aren't protected anyway. */
+ if (flag_asan && isize != jsize
+ && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
+ break;
+
+ /* Ignore conflicting objects. */
+ if (stack_var_conflict_p (i, j))
continue;
/* UNION the objects, placing J at OFFSET. */
@@ -837,12 +848,26 @@ expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
set_rtl (decl, x);
}
+DEF_VEC_I(HOST_WIDE_INT);
+DEF_VEC_ALLOC_I(HOST_WIDE_INT,heap);
+
+struct stack_vars_data
+{
+ /* Vector of offset pairs, always end of some padding followed
+ by start of the padding that needs Address Sanitizer protection.
+ The vector is in reversed, highest offset pairs come first. */
+ VEC(HOST_WIDE_INT, heap) *asan_vec;
+
+ /* Vector of partition representative decls in between the paddings. */
+ VEC(tree, heap) *asan_decl_vec;
+};
+
/* A subroutine of expand_used_vars. Give each partition representative
a unique location within the stack frame. Update each partition member
with that location. */
static void
-expand_stack_vars (bool (*pred) (tree))
+expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
{
size_t si, i, j, n = stack_vars_num;
HOST_WIDE_INT large_size = 0, large_alloc = 0;
@@ -913,13 +938,45 @@ expand_stack_vars (bool (*pred) (tree))
/* Check the predicate to see whether this variable should be
allocated in this pass. */
- if (pred && !pred (decl))
+ if (pred && !pred (i))
continue;
alignb = stack_vars[i].alignb;
if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
{
- offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
+ if (flag_asan && pred)
+ {
+ HOST_WIDE_INT prev_offset = frame_offset;
+ tree repr_decl = NULL_TREE;
+
+ offset
+ = alloc_stack_frame_space (stack_vars[i].size
+ + ASAN_RED_ZONE_SIZE,
+ MAX (alignb, ASAN_RED_ZONE_SIZE));
+ VEC_safe_push (HOST_WIDE_INT, heap, data->asan_vec,
+ prev_offset);
+ VEC_safe_push (HOST_WIDE_INT, heap, data->asan_vec,
+ offset + stack_vars[i].size);
+ /* Find best representative of the partition.
+ Prefer those with DECL_NAME, even better
+ satisfying asan_protect_stack_decl predicate. */
+ for (j = i; j != EOC; j = stack_vars[j].next)
+ if (asan_protect_stack_decl (stack_vars[j].decl)
+ && DECL_NAME (stack_vars[j].decl))
+ {
+ repr_decl = stack_vars[j].decl;
+ break;
+ }
+ else if (repr_decl == NULL_TREE
+ && DECL_P (stack_vars[j].decl)
+ && DECL_NAME (stack_vars[j].decl))
+ repr_decl = stack_vars[j].decl;
+ if (repr_decl == NULL_TREE)
+ repr_decl = stack_vars[i].decl;
+ VEC_safe_push (tree, heap, data->asan_decl_vec, repr_decl);
+ }
+ else
+ offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
base = virtual_stack_vars_rtx;
base_align = crtl->max_used_stack_slot_alignment;
}
@@ -1057,8 +1114,9 @@ static bool
defer_stack_allocation (tree var, bool toplevel)
{
/* If stack protection is enabled, *all* stack variables must be deferred,
- so that we can re-order the strings to the top of the frame. */
- if (flag_stack_protect)
+ so that we can re-order the strings to the top of the frame.
+ Similarly for Address Sanitizer. */
+ if (flag_stack_protect || flag_asan)
return true;
/* We handle "large" alignment via dynamic allocation. We want to handle
@@ -1329,15 +1387,31 @@ stack_protect_decl_phase (tree decl)
as callbacks for expand_stack_vars. */
static bool
-stack_protect_decl_phase_1 (tree decl)
+stack_protect_decl_phase_1 (size_t i)
+{
+ return stack_protect_decl_phase (stack_vars[i].decl) == 1;
+}
+
+static bool
+stack_protect_decl_phase_2 (size_t i)
{
- return stack_protect_decl_phase (decl) == 1;
+ return stack_protect_decl_phase (stack_vars[i].decl) == 2;
}
+/* And helper function that checks for asan phase (with stack protector
+ it is phase 3). This is used as callback for expand_stack_vars.
+ Returns true if any of the vars in the partition need to be protected. */
+
static bool
-stack_protect_decl_phase_2 (tree decl)
+asan_decl_phase_3 (size_t i)
{
- return stack_protect_decl_phase (decl) == 2;
+ while (i != EOC)
+ {
+ if (asan_protect_stack_decl (stack_vars[i].decl))
+ return true;
+ i = stack_vars[i].next;
+ }
+ return false;
}
/* Ensure that variables in different stack protection phases conflict
@@ -1448,11 +1522,12 @@ estimated_stack_frame_size (struct cgraph_node *node)
/* Expand all variables used in the function. */
-static void
+static rtx
expand_used_vars (void)
{
tree var, outer_block = DECL_INITIAL (current_function_decl);
VEC(tree,heap) *maybe_local_decls = NULL;
+ rtx var_end_seq = NULL_RTX;
struct pointer_map_t *ssa_name_decls;
unsigned i;
unsigned len;
@@ -1603,6 +1678,11 @@ expand_used_vars (void)
/* Assign rtl to each variable based on these partitions. */
if (stack_vars_num > 0)
{
+ struct stack_vars_data data;
+
+ data.asan_vec = NULL;
+ data.asan_decl_vec = NULL;
+
/* Reorder decls to be protected by iterating over the variables
array multiple times, and allocating out of each phase in turn. */
/* ??? We could probably integrate this into the qsort we did
@@ -1611,14 +1691,41 @@ expand_used_vars (void)
if (has_protected_decls)
{
/* Phase 1 contains only character arrays. */
- expand_stack_vars (stack_protect_decl_phase_1);
+ expand_stack_vars (stack_protect_decl_phase_1, &data);
/* Phase 2 contains other kinds of arrays. */
if (flag_stack_protect == 2)
- expand_stack_vars (stack_protect_decl_phase_2);
+ expand_stack_vars (stack_protect_decl_phase_2, &data);
}
- expand_stack_vars (NULL);
+ if (flag_asan)
+ /* Phase 3, any partitions that need asan protection
+ in addition to phase 1 and 2. */
+ expand_stack_vars (asan_decl_phase_3, &data);
+
+ if (!VEC_empty (HOST_WIDE_INT, data.asan_vec))
+ {
+ HOST_WIDE_INT prev_offset = frame_offset;
+ HOST_WIDE_INT offset
+ = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
+ ASAN_RED_ZONE_SIZE);
+ VEC_safe_push (HOST_WIDE_INT, heap, data.asan_vec, prev_offset);
+ VEC_safe_push (HOST_WIDE_INT, heap, data.asan_vec, offset);
+
+ var_end_seq
+ = asan_emit_stack_protection (virtual_stack_vars_rtx,
+ VEC_address (HOST_WIDE_INT,
+ data.asan_vec),
+ VEC_address (tree,
+ data.asan_decl_vec),
+ VEC_length (HOST_WIDE_INT,
+ data.asan_vec));
+ }
+
+ expand_stack_vars (NULL, &data);
+
+ VEC_free (HOST_WIDE_INT, heap, data.asan_vec);
+ VEC_free (tree, heap, data.asan_decl_vec);
}
fini_vars_expansion ();
@@ -1645,6 +1752,8 @@ expand_used_vars (void)
frame_offset += align - 1;
frame_offset &= -align;
}
+
+ return var_end_seq;
}
@@ -3661,7 +3770,7 @@ expand_debug_locations (void)
/* Expand basic block BB from GIMPLE trees to RTL. */
static basic_block
-expand_gimple_basic_block (basic_block bb)
+expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
{
gimple_stmt_iterator gsi;
gimple_seq stmts;
@@ -3949,6 +4058,11 @@ expand_gimple_basic_block (basic_block bb)
}
else
{
+ if (is_gimple_call (stmt)
+ && gimple_call_tail_p (stmt)
+ && disable_tail_calls)
+ gimple_call_set_tail (stmt, false);
+
if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
{
bool can_fallthru;
@@ -4308,7 +4422,7 @@ gimple_expand_cfg (void)
sbitmap blocks;
edge_iterator ei;
edge e;
- rtx var_seq;
+ rtx var_seq, var_ret_seq;
unsigned i;
timevar_push (TV_OUT_OF_SSA);
@@ -4368,7 +4482,7 @@ gimple_expand_cfg (void)
timevar_push (TV_VAR_EXPAND);
start_sequence ();
- expand_used_vars ();
+ var_ret_seq = expand_used_vars ();
var_seq = get_insns ();
end_sequence ();
@@ -4494,7 +4608,7 @@ gimple_expand_cfg (void)
lab_rtx_for_bb = pointer_map_create ();
FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
- bb = expand_gimple_basic_block (bb);
+ bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
if (MAY_HAVE_DEBUG_INSNS)
expand_debug_locations ();
@@ -4522,6 +4636,15 @@ gimple_expand_cfg (void)
construct_exit_block ();
insn_locations_finalize ();
+ if (var_ret_seq)
+ {
+ rtx after = return_label;
+ rtx next = NEXT_INSN (after);
+ if (next && NOTE_INSN_BASIC_BLOCK_P (next))
+ after = next;
+ emit_insn_after (var_ret_seq, after);
+ }
+
/* Zap the tree EH table. */
set_eh_throw_stmt_table (cfun, NULL);
diff --git a/gcc/toplev.c b/gcc/toplev.c
index d9dfb2a..8911ca3 100644
--- a/gcc/toplev.c
+++ b/gcc/toplev.c
@@ -1542,7 +1542,9 @@ process_options (void)
}
/* Address Sanitizer needs porting to each target architecture. */
- if (flag_asan && targetm.asan_shadow_offset == NULL)
+ if (flag_asan
+ && (targetm.asan_shadow_offset == NULL
+ || !FRAME_GROWS_DOWNWARD))
{
warning (0, "-faddress-sanitizer not supported for this target");
flag_asan = 0;