diff options
author | Maxim Ostapenko <m.ostapenko@samsung.com> | 2017-07-06 16:02:06 +0000 |
---|---|---|
committer | Maxim Ostapenko <chefmax@gcc.gnu.org> | 2017-07-06 19:02:06 +0300 |
commit | e3174bdf35c172f69daf08350401aa177f8f1498 (patch) | |
tree | e7632bf6f8c4e541f5dbf209ad90d807a7b79318 /gcc/asan.c | |
parent | b6f4312871d39547c2e286b6eadac4408ab494ae (diff) | |
download | gcc-e3174bdf35c172f69daf08350401aa177f8f1498.zip gcc-e3174bdf35c172f69daf08350401aa177f8f1498.tar.gz gcc-e3174bdf35c172f69daf08350401aa177f8f1498.tar.bz2 |
ASAN: Implement dynamic allocas/VLAs sanitization.
gcc/
* asan.c: Include gimple-fold.h.
(get_last_alloca_addr): New function.
(handle_builtin_stackrestore): Likewise.
(handle_builtin_alloca): Likewise.
(asan_emit_allocas_unpoison): Likewise.
(get_mem_refs_of_builtin_call): Add new parameter, remove const
quallifier from first paramerer. Handle BUILT_IN_ALLOCA,
BUILT_IN_ALLOCA_WITH_ALIGN and BUILT_IN_STACK_RESTORE builtins.
(instrument_builtin_call): Pass gimple iterator to
get_mem_refs_of_builtin_call.
(last_alloca_addr): New global.
* asan.h (asan_emit_allocas_unpoison): Declare.
* builtins.c (expand_asan_emit_allocas_unpoison): New function.
(expand_builtin): Handle BUILT_IN_ASAN_ALLOCAS_UNPOISON.
* cfgexpand.c (expand_used_vars): Call asan_emit_allocas_unpoison
if function calls alloca.
* gimple-fold.c (replace_call_with_value): Remove static keyword.
* gimple-fold.h (replace_call_with_value): Declare.
* internal-fn.c: Include asan.h.
* sanitizer.def (BUILT_IN_ASAN_ALLOCA_POISON,
BUILT_IN_ASAN_ALLOCAS_UNPOISON): New builtins.
gcc/testsuite/
* c-c++-common/asan/alloca_big_alignment.c: New test.
* c-c++-common/asan/alloca_detect_custom_size.c: Likewise.
* c-c++-common/asan/alloca_instruments_all_paddings.c: Likewise.
* c-c++-common/asan/alloca_loop_unpoisoning.c: Likewise.
* c-c++-common/asan/alloca_overflow_partial.c: Likewise.
* c-c++-common/asan/alloca_overflow_right.c: Likewise.
* c-c++-common/asan/alloca_safe_access.c: Likewise.
* c-c++-common/asan/alloca_underflow_left.c: Likewise.
From-SVN: r250031
Diffstat (limited to 'gcc/asan.c')
-rw-r--r-- | gcc/asan.c | 215 |
1 files changed, 212 insertions, 3 deletions
@@ -55,6 +55,7 @@ along with GCC; see the file COPYING3. If not see #include "langhooks.h" #include "cfgloop.h" #include "gimple-builder.h" +#include "gimple-fold.h" #include "ubsan.h" #include "params.h" #include "builtins.h" @@ -245,6 +246,7 @@ along with GCC; see the file COPYING3. If not see static unsigned HOST_WIDE_INT asan_shadow_offset_value; static bool asan_shadow_offset_computed; static vec<char *> sanitized_sections; +static tree last_alloca_addr; /* Set of variable declarations that are going to be guarded by use-after-scope sanitizer. */ @@ -529,11 +531,186 @@ get_mem_ref_of_assignment (const gassign *assignment, return true; } +/* Return address of last allocated dynamic alloca. */ + +static tree +get_last_alloca_addr () +{ + if (last_alloca_addr) + return last_alloca_addr; + + last_alloca_addr = create_tmp_reg (ptr_type_node, "last_alloca_addr"); + gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_node); + edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); + gsi_insert_on_edge_immediate (e, g); + return last_alloca_addr; +} + +/* Insert __asan_allocas_unpoison (top, bottom) call after + __builtin_stack_restore (new_sp) call. + The pseudocode of this routine should look like this: + __builtin_stack_restore (new_sp); + top = last_alloca_addr; + bot = new_sp; + __asan_allocas_unpoison (top, bot); + last_alloca_addr = new_sp; + In general, we can't use new_sp as bot parameter because on some + architectures SP has non zero offset from dynamic stack area. Moreover, on + some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each + particular function only after all callees were expanded to rtl. + The most noticeable example is PowerPC{,64}, see + http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK. + To overcome the issue we use following trick: pass new_sp as a second + parameter to __asan_allocas_unpoison and rewrite it during expansion with + virtual_dynamic_stack_rtx later in expand_asan_emit_allocas_unpoison + function. +*/ + +static void +handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter) +{ + if (!iter) + return; + + tree last_alloca = get_last_alloca_addr (); + tree restored_stack = gimple_call_arg (call, 0); + tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON); + gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack); + gsi_insert_after (iter, g, GSI_NEW_STMT); + g = gimple_build_assign (last_alloca, restored_stack); + gsi_insert_after (iter, g, GSI_NEW_STMT); +} + +/* Deploy and poison redzones around __builtin_alloca call. To do this, we + should replace this call with another one with changed parameters and + replace all its uses with new address, so + addr = __builtin_alloca (old_size, align); + is replaced by + left_redzone_size = max (align, ASAN_RED_ZONE_SIZE); + Following two statements are optimized out if we know that + old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial + redzone. + misalign = old_size & (ASAN_RED_ZONE_SIZE - 1); + partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign; + right_redzone_size = ASAN_RED_ZONE_SIZE; + additional_size = left_redzone_size + partial_redzone_size + + right_redzone_size; + new_size = old_size + additional_size; + new_alloca = __builtin_alloca (new_size, max (align, 32)) + __asan_alloca_poison (new_alloca, old_size) + addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE); + last_alloca_addr = new_alloca; + ADDITIONAL_SIZE is added to make new memory allocation contain not only + requested memory, but also left, partial and right redzones as well as some + additional space, required by alignment. */ + +static void +handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter) +{ + if (!iter) + return; + + gassign *g; + gcall *gg; + const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1; + + tree last_alloca = get_last_alloca_addr (); + tree callee = gimple_call_fndecl (call); + tree old_size = gimple_call_arg (call, 0); + tree ptr_type = gimple_call_lhs (call) ? TREE_TYPE (gimple_call_lhs (call)) + : ptr_type_node; + tree partial_size = NULL_TREE; + bool alloca_with_align + = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN; + unsigned int align + = alloca_with_align ? tree_to_uhwi (gimple_call_arg (call, 1)) : 0; + + /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN + bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE + manually. */ + align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT); + + tree alloca_rz_mask = build_int_cst (size_type_node, redzone_mask); + tree redzone_size = build_int_cst (size_type_node, ASAN_RED_ZONE_SIZE); + + /* Extract lower bits from old_size. */ + wide_int size_nonzero_bits = get_nonzero_bits (old_size); + wide_int rz_mask + = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits)); + wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask); + + /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial + redzone. Otherwise, compute its size here. */ + if (wi::ne_p (old_size_lower_bits, 0)) + { + /* misalign = size & (ASAN_RED_ZONE_SIZE - 1) + partial_size = ASAN_RED_ZONE_SIZE - misalign. */ + g = gimple_build_assign (make_ssa_name (size_type_node, NULL), + BIT_AND_EXPR, old_size, alloca_rz_mask); + gsi_insert_before (iter, g, GSI_SAME_STMT); + tree misalign = gimple_assign_lhs (g); + g = gimple_build_assign (make_ssa_name (size_type_node, NULL), MINUS_EXPR, + redzone_size, misalign); + gsi_insert_before (iter, g, GSI_SAME_STMT); + partial_size = gimple_assign_lhs (g); + } + + /* additional_size = align + ASAN_RED_ZONE_SIZE. */ + tree additional_size = build_int_cst (size_type_node, align / BITS_PER_UNIT + + ASAN_RED_ZONE_SIZE); + /* If alloca has partial redzone, include it to additional_size too. */ + if (partial_size) + { + /* additional_size += partial_size. */ + g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, + partial_size, additional_size); + gsi_insert_before (iter, g, GSI_SAME_STMT); + additional_size = gimple_assign_lhs (g); + } + + /* new_size = old_size + additional_size. */ + g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, old_size, + additional_size); + gsi_insert_before (iter, g, GSI_SAME_STMT); + tree new_size = gimple_assign_lhs (g); + + /* Build new __builtin_alloca call: + new_alloca_with_rz = __builtin_alloca (new_size, align). */ + tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN); + gg = gimple_build_call (fn, 2, new_size, + build_int_cst (size_type_node, align)); + tree new_alloca_with_rz = make_ssa_name (ptr_type, gg); + gimple_call_set_lhs (gg, new_alloca_with_rz); + gsi_insert_before (iter, gg, GSI_SAME_STMT); + + /* new_alloca = new_alloca_with_rz + align. */ + g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR, + new_alloca_with_rz, + build_int_cst (size_type_node, + align / BITS_PER_UNIT)); + gsi_insert_before (iter, g, GSI_SAME_STMT); + tree new_alloca = gimple_assign_lhs (g); + + /* Poison newly created alloca redzones: + __asan_alloca_poison (new_alloca, old_size). */ + fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON); + gg = gimple_build_call (fn, 2, new_alloca, old_size); + gsi_insert_before (iter, gg, GSI_SAME_STMT); + + /* Save new_alloca_with_rz value into last_alloca to use it during + allocas unpoisoning. */ + g = gimple_build_assign (last_alloca, new_alloca_with_rz); + gsi_insert_before (iter, g, GSI_SAME_STMT); + + /* Finally, replace old alloca ptr with NEW_ALLOCA. */ + replace_call_with_value (iter, new_alloca); +} + /* Return the memory references contained in a gimple statement representing a builtin call that has to do with memory access. */ static bool -get_mem_refs_of_builtin_call (const gcall *call, +get_mem_refs_of_builtin_call (gcall *call, asan_mem_ref *src0, tree *src0_len, bool *src0_is_store, @@ -544,7 +721,8 @@ get_mem_refs_of_builtin_call (const gcall *call, tree *dst_len, bool *dst_is_store, bool *dest_is_deref, - bool *intercepted_p) + bool *intercepted_p, + gimple_stmt_iterator *iter = NULL) { gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL)); @@ -603,6 +781,14 @@ get_mem_refs_of_builtin_call (const gcall *call, len = gimple_call_lhs (call); break; + case BUILT_IN_STACK_RESTORE: + handle_builtin_stack_restore (call, iter); + break; + + case BUILT_IN_ALLOCA_WITH_ALIGN: + case BUILT_IN_ALLOCA: + handle_builtin_alloca (call, iter); + break; /* And now the __atomic* and __sync builtins. These are handled differently from the classical memory memory access builtins above. */ @@ -1363,6 +1549,28 @@ asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb, return insns; } +/* Emit __asan_allocas_unpoison (top, bot) call. The BASE parameter corresponds + to BOT argument, for TOP virtual_stack_dynamic_rtx is used. NEW_SEQUENCE + indicates whether we're emitting new instructions sequence or not. */ + +rtx_insn * +asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before) +{ + if (before) + push_to_sequence (before); + else + start_sequence (); + rtx ret = init_one_libfunc ("__asan_allocas_unpoison"); + ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2, top, + TYPE_MODE (pointer_sized_int_node), bot, + TYPE_MODE (pointer_sized_int_node)); + + do_pending_stack_adjust (); + rtx_insn *insns = get_insns (); + end_sequence (); + return insns; +} + /* Return true if DECL, a global var, might be overridden and needs therefore a local alias. */ @@ -2002,7 +2210,7 @@ instrument_builtin_call (gimple_stmt_iterator *iter) &src0, &src0_len, &src0_is_store, &src1, &src1_len, &src1_is_store, &dest, &dest_len, &dest_is_store, - &dest_is_deref, &intercepted_p)) + &dest_is_deref, &intercepted_p, iter)) { if (dest_is_deref) { @@ -3192,6 +3400,7 @@ asan_instrument (void) if (shadow_ptr_types[0] == NULL_TREE) asan_init_shadow_ptr_types (); transform_statements (); + last_alloca_addr = NULL_TREE; return 0; } |