aboutsummaryrefslogtreecommitdiff
path: root/gcc/cfgexpand.c
diff options
context:
space:
mode:
authorMatthew Malcomson <matthew.malcomson@arm.com>2020-11-25 16:31:45 +0000
committerMatthew Malcomson <matthew.malcomson@arm.com>2020-11-25 16:38:06 +0000
commit0854b584bdc2862b49f029095e58beca797cf449 (patch)
treee94eeff162ddb12990760dd50c50754a56d1b957 /gcc/cfgexpand.c
parent3bd8783207760cc805d8a4e64e5ac92ca508a711 (diff)
downloadgcc-0854b584bdc2862b49f029095e58beca797cf449.zip
gcc-0854b584bdc2862b49f029095e58beca797cf449.tar.gz
gcc-0854b584bdc2862b49f029095e58beca797cf449.tar.bz2
libsanitizer: mid-end: Introduce stack variable handling for HWASAN
Handling stack variables has three features. 1) Ensure HWASAN required alignment for stack variables When tagging shadow memory, we need to ensure that each tag granule is only used by one variable at a time. This is done by ensuring that each tagged variable is aligned to the tag granule representation size and also ensure that the end of each object is aligned to ensure the start of any other data stored on the stack is in a different granule. This patch ensures the above by forcing the stack pointer to be aligned before and after allocating any stack objects. Since we are forcing alignment we also use `align_local_variable` to ensure this new alignment is advertised properly through SET_DECL_ALIGN. 2) Put tags into each stack variable pointer Make sure that every pointer to a stack variable includes a tag of some sort on it. The way tagging works is: 1) For every new stack frame, a random tag is generated. 2) A base register is formed from the stack pointer value and this random tag. 3) References to stack variables are now formed with RTL describing an offset from this base in both tag and value. The random tag generation is handled by a backend hook. This hook decides whether to introduce a random tag or use the stack background based on the parameter hwasan-random-frame-tag. Using the stack background is necessary for testing and bootstrap. It is necessary during bootstrap to avoid breaking the `configure` test program for determining stack direction. Using the stack background means that every stack frame has the initial tag of zero and variables are tagged with incrementing tags from 1, which also makes debugging a bit easier. Backend hooks define the size of a tag, the layout of the HWASAN shadow memory, and handle emitting the code that inserts and extracts tags from a pointer. 3) For each stack variable, tag and untag the shadow stack on function prologue and epilogue. On entry to each function we tag the relevant shadow stack region for each stack variable. This stack region is tagged to match the tag added to each pointer to that variable. This is the first patch where we use the HWASAN shadow space, so we need to add in the libhwasan initialisation code that creates this shadow memory region into the binary we produce. This instrumentation is done in `compile_file`. When exiting a function we need to ensure the shadow stack for this function has no remaining tags. Without clearing the shadow stack area for this stack frame, later function calls could get false positives when those later function calls check untagged areas (such as parameters passed on the stack) against a shadow stack area with left-over tag. Hence we ensure that the entire stack frame is cleared on function exit. config/ChangeLog: * bootstrap-hwasan.mk: Disable random frame tags for stack-tagging during bootstrap. gcc/ChangeLog: * asan.c (struct hwasan_stack_var): New. (hwasan_sanitize_p): New. (hwasan_sanitize_stack_p): New. (hwasan_sanitize_allocas_p): New. (initialize_sanitizer_builtins): Define new builtins. (ATTR_NOTHROW_LIST): New macro. (hwasan_current_frame_tag): New. (hwasan_frame_base): New. (stack_vars_base_reg_p): New. (hwasan_maybe_init_frame_base_init): New. (hwasan_record_stack_var): New. (hwasan_get_frame_extent): New. (hwasan_increment_frame_tag): New. (hwasan_record_frame_init): New. (hwasan_emit_prologue): New. (hwasan_emit_untag_frame): New. (hwasan_finish_file): New. (hwasan_truncate_to_tag_size): New. * asan.h (hwasan_record_frame_init): New declaration. (hwasan_record_stack_var): New declaration. (hwasan_emit_prologue): New declaration. (hwasan_emit_untag_frame): New declaration. (hwasan_get_frame_extent): New declaration. (hwasan_maybe_enit_frame_base_init): New declaration. (hwasan_frame_base): New declaration. (stack_vars_base_reg_p): New declaration. (hwasan_current_frame_tag): New declaration. (hwasan_increment_frame_tag): New declaration. (hwasan_truncate_to_tag_size): New declaration. (hwasan_finish_file): New declaration. (hwasan_sanitize_p): New declaration. (hwasan_sanitize_stack_p): New declaration. (hwasan_sanitize_allocas_p): New declaration. (HWASAN_TAG_SIZE): New macro. (HWASAN_TAG_GRANULE_SIZE): New macro. (HWASAN_STACK_BACKGROUND): New macro. * builtin-types.def (BT_FN_VOID_PTR_UINT8_PTRMODE): New. * builtins.def (DEF_SANITIZER_BUILTIN): Enable for HWASAN. * cfgexpand.c (align_local_variable): When using hwasan ensure alignment to tag granule. (align_frame_offset): New. (expand_one_stack_var_at): For hwasan use tag offset. (expand_stack_vars): Record stack objects for hwasan. (expand_one_stack_var_1): Record stack objects for hwasan. (init_vars_expansion): Initialise hwasan state. (expand_used_vars): Emit hwasan prologue and generate hwasan epilogue. (pass_expand::execute): Emit hwasan base initialization if needed. * doc/tm.texi (TARGET_MEMTAG_TAG_SIZE,TARGET_MEMTAG_GRANULE_SIZE, TARGET_MEMTAG_INSERT_RANDOM_TAG,TARGET_MEMTAG_ADD_TAG, TARGET_MEMTAG_SET_TAG,TARGET_MEMTAG_EXTRACT_TAG, TARGET_MEMTAG_UNTAGGED_POINTER): Document new hooks. * doc/tm.texi.in (TARGET_MEMTAG_TAG_SIZE,TARGET_MEMTAG_GRANULE_SIZE, TARGET_MEMTAG_INSERT_RANDOM_TAG,TARGET_MEMTAG_ADD_TAG, TARGET_MEMTAG_SET_TAG,TARGET_MEMTAG_EXTRACT_TAG, TARGET_MEMTAG_UNTAGGED_POINTER): Document new hooks. * explow.c (get_dynamic_stack_base): Take new `base` argument. * explow.h (get_dynamic_stack_base): Take new `base` argument. * sanitizer.def (BUILT_IN_HWASAN_INIT): New. (BUILT_IN_HWASAN_TAG_MEM): New. * target.def (target_memtag_tag_size,target_memtag_granule_size, target_memtag_insert_random_tag,target_memtag_add_tag, target_memtag_set_tag,target_memtag_extract_tag, target_memtag_untagged_pointer): New hooks. * targhooks.c (HWASAN_SHIFT): New. (HWASAN_SHIFT_RTX): New. (default_memtag_tag_size): New default hook. (default_memtag_granule_size): New default hook. (default_memtag_insert_random_tag): New default hook. (default_memtag_add_tag): New default hook. (default_memtag_set_tag): New default hook. (default_memtag_extract_tag): New default hook. (default_memtag_untagged_pointer): New default hook. * targhooks.h (default_memtag_tag_size): New default hook. (default_memtag_granule_size): New default hook. (default_memtag_insert_random_tag): New default hook. (default_memtag_add_tag): New default hook. (default_memtag_set_tag): New default hook. (default_memtag_extract_tag): New default hook. (default_memtag_untagged_pointer): New default hook. * toplev.c (compile_file): Call hwasan_finish_file when finished.
Diffstat (limited to 'gcc/cfgexpand.c')
-rw-r--r--gcc/cfgexpand.c167
1 files changed, 146 insertions, 21 deletions
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index 1df6f4b..7e0bdd5 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -376,15 +376,18 @@ align_local_variable (tree decl, bool really_expand)
align = GET_MODE_ALIGNMENT (mode);
}
else
- {
- align = LOCAL_DECL_ALIGNMENT (decl);
- /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
- That is done before IPA and could bump alignment based on host
- backend even for offloaded code which wants different
- LOCAL_DECL_ALIGNMENT. */
- if (really_expand)
- SET_DECL_ALIGN (decl, align);
- }
+ align = LOCAL_DECL_ALIGNMENT (decl);
+
+ if (hwasan_sanitize_stack_p ())
+ align = MAX (align, (unsigned) HWASAN_TAG_GRANULE_SIZE * BITS_PER_UNIT);
+
+ if (TREE_CODE (decl) != SSA_NAME && really_expand)
+ /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
+ That is done before IPA and could bump alignment based on host
+ backend even for offloaded code which wants different
+ LOCAL_DECL_ALIGNMENT. */
+ SET_DECL_ALIGN (decl, align);
+
return align / BITS_PER_UNIT;
}
@@ -428,6 +431,14 @@ alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
return offset;
}
+/* Ensure that the stack is aligned to ALIGN bytes.
+ Return the new frame offset. */
+static poly_int64
+align_frame_offset (unsigned HOST_WIDE_INT align)
+{
+ return alloc_stack_frame_space (0, align);
+}
+
/* Accumulate DECL into STACK_VARS. */
static void
@@ -1004,7 +1015,12 @@ expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
/* If this fails, we've overflowed the stack frame. Error nicely? */
gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
- x = plus_constant (Pmode, base, offset);
+ if (hwasan_sanitize_stack_p ())
+ x = targetm.memtag.add_tag (base, offset,
+ hwasan_current_frame_tag ());
+ else
+ x = plus_constant (Pmode, base, offset);
+
x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
? TYPE_MODE (TREE_TYPE (decl))
: DECL_MODE (decl), x);
@@ -1013,7 +1029,7 @@ expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
If it is we generate stack slots only accidentally so it isn't as
important, we'll simply set the alignment directly on the MEM. */
- if (base == virtual_stack_vars_rtx)
+ if (stack_vars_base_reg_p (base))
offset -= frame_phase;
align = known_alignment (offset);
align *= BITS_PER_UNIT;
@@ -1056,13 +1072,13 @@ public:
/* A subroutine of expand_used_vars. Give each partition representative
a unique location within the stack frame. Update each partition member
with that location. */
-
static void
expand_stack_vars (bool (*pred) (size_t), class stack_vars_data *data)
{
size_t si, i, j, n = stack_vars_num;
poly_uint64 large_size = 0, large_alloc = 0;
rtx large_base = NULL;
+ rtx large_untagged_base = NULL;
unsigned large_align = 0;
bool large_allocation_done = false;
tree decl;
@@ -1113,7 +1129,7 @@ expand_stack_vars (bool (*pred) (size_t), class stack_vars_data *data)
{
rtx base;
unsigned base_align, alignb;
- poly_int64 offset;
+ poly_int64 offset = 0;
i = stack_vars_sorted[si];
@@ -1134,10 +1150,33 @@ expand_stack_vars (bool (*pred) (size_t), class stack_vars_data *data)
if (pred && !pred (i))
continue;
+ base = (hwasan_sanitize_stack_p ()
+ ? hwasan_frame_base ()
+ : virtual_stack_vars_rtx);
alignb = stack_vars[i].alignb;
if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
{
- base = virtual_stack_vars_rtx;
+ poly_int64 hwasan_orig_offset;
+ if (hwasan_sanitize_stack_p ())
+ {
+ /* There must be no tag granule "shared" between different
+ objects. This means that no HWASAN_TAG_GRANULE_SIZE byte
+ chunk can have more than one object in it.
+
+ We ensure this by forcing the end of the last bit of data to
+ be aligned to HWASAN_TAG_GRANULE_SIZE bytes here, and setting
+ the start of each variable to be aligned to
+ HWASAN_TAG_GRANULE_SIZE bytes in `align_local_variable`.
+
+ We can't align just one of the start or end, since there are
+ untagged things stored on the stack which we do not align to
+ HWASAN_TAG_GRANULE_SIZE bytes. If we only aligned the start
+ or the end of tagged objects then untagged objects could end
+ up sharing the first granule of a tagged object or sharing the
+ last granule of a tagged object respectively. */
+ hwasan_orig_offset = align_frame_offset (HWASAN_TAG_GRANULE_SIZE);
+ gcc_assert (stack_vars[i].alignb >= HWASAN_TAG_GRANULE_SIZE);
+ }
/* ASAN description strings don't yet have a syntax for expressing
polynomial offsets. */
HOST_WIDE_INT prev_offset;
@@ -1148,7 +1187,7 @@ expand_stack_vars (bool (*pred) (size_t), class stack_vars_data *data)
{
if (data->asan_vec.is_empty ())
{
- alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE);
+ align_frame_offset (ASAN_RED_ZONE_SIZE);
prev_offset = frame_offset.to_constant ();
}
prev_offset = align_base (prev_offset,
@@ -1216,6 +1255,24 @@ expand_stack_vars (bool (*pred) (size_t), class stack_vars_data *data)
{
offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
base_align = crtl->max_used_stack_slot_alignment;
+
+ if (hwasan_sanitize_stack_p ())
+ {
+ /* Align again since the point of this alignment is to handle
+ the "end" of the object (i.e. smallest address after the
+ stack object). For FRAME_GROWS_DOWNWARD that requires
+ aligning the stack before allocating, but for a frame that
+ grows upwards that requires aligning the stack after
+ allocation.
+
+ Use `frame_offset` to record the offset value rather than
+ `offset` since the `frame_offset` describes the extent
+ allocated for this particular variable while `offset`
+ describes the address that this variable starts at. */
+ align_frame_offset (HWASAN_TAG_GRANULE_SIZE);
+ hwasan_record_stack_var (virtual_stack_vars_rtx, base,
+ hwasan_orig_offset, frame_offset);
+ }
}
}
else
@@ -1236,14 +1293,33 @@ expand_stack_vars (bool (*pred) (size_t), class stack_vars_data *data)
loffset = alloc_stack_frame_space
(rtx_to_poly_int64 (large_allocsize),
PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
- large_base = get_dynamic_stack_base (loffset, large_align);
+ large_base = get_dynamic_stack_base (loffset, large_align, base);
large_allocation_done = true;
}
- gcc_assert (large_base != NULL);
+ gcc_assert (large_base != NULL);
large_alloc = aligned_upper_bound (large_alloc, alignb);
offset = large_alloc;
large_alloc += stack_vars[i].size;
+ if (hwasan_sanitize_stack_p ())
+ {
+ /* An object with a large alignment requirement means that the
+ alignment requirement is greater than the required alignment
+ for tags. */
+ if (!large_untagged_base)
+ large_untagged_base
+ = targetm.memtag.untagged_pointer (large_base, NULL_RTX);
+ /* Ensure the end of the variable is also aligned correctly. */
+ poly_int64 align_again
+ = aligned_upper_bound (large_alloc, HWASAN_TAG_GRANULE_SIZE);
+ /* For large allocations we always allocate a chunk of space
+ (which is addressed by large_untagged_base/large_base) and
+ then use positive offsets from that. Hence the farthest
+ offset is `align_again` and the nearest offset from the base
+ is `offset`. */
+ hwasan_record_stack_var (large_untagged_base, large_base,
+ offset, align_again);
+ }
base = large_base;
base_align = large_align;
@@ -1254,9 +1330,10 @@ expand_stack_vars (bool (*pred) (size_t), class stack_vars_data *data)
for (j = i; j != EOC; j = stack_vars[j].next)
{
expand_one_stack_var_at (stack_vars[j].decl,
- base, base_align,
- offset);
+ base, base_align, offset);
}
+ if (hwasan_sanitize_stack_p ())
+ hwasan_increment_frame_tag ();
}
gcc_assert (known_eq (large_alloc, large_size));
@@ -1347,10 +1424,37 @@ expand_one_stack_var_1 (tree var)
/* We handle highly aligned variables in expand_stack_vars. */
gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
- offset = alloc_stack_frame_space (size, byte_align);
+ rtx base;
+ if (hwasan_sanitize_stack_p ())
+ {
+ /* Allocate zero bytes to align the stack. */
+ poly_int64 hwasan_orig_offset
+ = align_frame_offset (HWASAN_TAG_GRANULE_SIZE);
+ offset = alloc_stack_frame_space (size, byte_align);
+ align_frame_offset (HWASAN_TAG_GRANULE_SIZE);
+ base = hwasan_frame_base ();
+ /* Use `frame_offset` to automatically account for machines where the
+ frame grows upwards.
+
+ `offset` will always point to the "start" of the stack object, which
+ will be the smallest address, for ! FRAME_GROWS_DOWNWARD this is *not*
+ the "furthest" offset from the base delimiting the current stack
+ object. `frame_offset` will always delimit the extent that the frame.
+ */
+ hwasan_record_stack_var (virtual_stack_vars_rtx, base,
+ hwasan_orig_offset, frame_offset);
+ }
+ else
+ {
+ offset = alloc_stack_frame_space (size, byte_align);
+ base = virtual_stack_vars_rtx;
+ }
- expand_one_stack_var_at (var, virtual_stack_vars_rtx,
+ expand_one_stack_var_at (var, base,
crtl->max_used_stack_slot_alignment, offset);
+
+ if (hwasan_sanitize_stack_p ())
+ hwasan_increment_frame_tag ();
}
/* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
@@ -1950,6 +2054,8 @@ init_vars_expansion (void)
/* Initialize local stack smashing state. */
has_protected_decls = false;
has_short_buffer = false;
+ if (hwasan_sanitize_stack_p ())
+ hwasan_record_frame_init ();
}
/* Free up stack variable graph data. */
@@ -2277,10 +2383,26 @@ expand_used_vars (void)
expand_stack_vars (NULL, &data);
}
+ if (hwasan_sanitize_stack_p ())
+ hwasan_emit_prologue ();
if (asan_sanitize_allocas_p () && cfun->calls_alloca)
var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
virtual_stack_vars_rtx,
var_end_seq);
+ else if (hwasan_sanitize_allocas_p () && cfun->calls_alloca)
+ /* When using out-of-line instrumentation we only want to emit one function
+ call for clearing the tags in a region of shadow stack. When there are
+ alloca calls in this frame we want to emit a call using the
+ virtual_stack_dynamic_rtx, but when not we use the hwasan_frame_extent
+ rtx we created in expand_stack_vars. */
+ var_end_seq = hwasan_emit_untag_frame (virtual_stack_dynamic_rtx,
+ virtual_stack_vars_rtx);
+ else if (hwasan_sanitize_stack_p ())
+ /* If no variables were stored on the stack, `hwasan_get_frame_extent`
+ will return NULL_RTX and hence `hwasan_emit_untag_frame` will return
+ NULL (i.e. an empty sequence). */
+ var_end_seq = hwasan_emit_untag_frame (hwasan_get_frame_extent (),
+ virtual_stack_vars_rtx);
fini_vars_expansion ();
@@ -6641,6 +6763,9 @@ pass_expand::execute (function *fun)
emit_insn_after (var_ret_seq, after);
}
+ if (hwasan_sanitize_stack_p ())
+ hwasan_maybe_emit_frame_base_init ();
+
/* Zap the tree EH table. */
set_eh_throw_stmt_table (fun, NULL);