diff options
author | Jakub Jelinek <jakub@redhat.com> | 2017-07-28 12:37:51 +0200 |
---|---|---|
committer | Jakub Jelinek <jakub@gcc.gnu.org> | 2017-07-28 12:37:51 +0200 |
commit | c9b39a4955f56fe609ef54784f7bf48c4cba6b1a (patch) | |
tree | 6ddac4284a4bae1e7241b22e28dcaaeb311277f9 /gcc/ubsan.c | |
parent | 70affe6aff39d347a0e2b7f12a27e1cad4cae405 (diff) | |
download | gcc-c9b39a4955f56fe609ef54784f7bf48c4cba6b1a.zip gcc-c9b39a4955f56fe609ef54784f7bf48c4cba6b1a.tar.gz gcc-c9b39a4955f56fe609ef54784f7bf48c4cba6b1a.tar.bz2 |
re PR sanitizer/80998 (Implement -fsanitize=pointer-overflow)
PR sanitizer/80998
* sanopt.c (pass_sanopt::execute): Handle IFN_UBSAN_PTR.
* tree-ssa-alias.c (call_may_clobber_ref_p_1): Likewise.
* flag-types.h (enum sanitize_code): Add SANITIZER_POINTER_OVERFLOW.
Or it into SANITIZER_UNDEFINED.
* ubsan.c: Include gimple-fold.h and varasm.h.
(ubsan_expand_ptr_ifn): New function.
(instrument_pointer_overflow): New function.
(maybe_instrument_pointer_overflow): New function.
(instrument_object_size): Formatting fix.
(pass_ubsan::execute): Call instrument_pointer_overflow
and maybe_instrument_pointer_overflow.
* internal-fn.c (expand_UBSAN_PTR): New function.
* ubsan.h (ubsan_expand_ptr_ifn): Declare.
* sanitizer.def (__ubsan_handle_pointer_overflow,
__ubsan_handle_pointer_overflow_abort): New builtins.
* tree-ssa-tail-merge.c (merge_stmts_p): Handle IFN_UBSAN_PTR.
* internal-fn.def (UBSAN_PTR): New internal function.
* opts.c (sanitizer_opts): Add pointer-overflow.
* lto-streamer-in.c (input_function): Handle IFN_UBSAN_PTR.
* fold-const.c (build_range_check): Compute pointer range check in
integral type if pointer arithmetics would be needed. Formatting
fixes.
gcc/testsuite/
* c-c++-common/ubsan/ptr-overflow-1.c: New test.
* c-c++-common/ubsan/ptr-overflow-2.c: New test.
libsanitizer/
* ubsan/ubsan_handlers.cc: Cherry-pick upstream r304461.
* ubsan/ubsan_checks.inc: Likewise.
* ubsan/ubsan_handlers.h: Likewise.
From-SVN: r250656
Diffstat (limited to 'gcc/ubsan.c')
-rw-r--r-- | gcc/ubsan.c | 302 |
1 files changed, 300 insertions, 2 deletions
diff --git a/gcc/ubsan.c b/gcc/ubsan.c index 8ea352a..cca3c2d 100644 --- a/gcc/ubsan.c +++ b/gcc/ubsan.c @@ -45,6 +45,8 @@ along with GCC; see the file COPYING3. If not see #include "builtins.h" #include "tree-object-size.h" #include "tree-cfg.h" +#include "gimple-fold.h" +#include "varasm.h" /* Map from a tree to a VAR_DECL tree. */ @@ -1029,6 +1031,170 @@ ubsan_expand_objsize_ifn (gimple_stmt_iterator *gsi) return true; } +/* Expand UBSAN_PTR internal call. */ + +bool +ubsan_expand_ptr_ifn (gimple_stmt_iterator *gsip) +{ + gimple_stmt_iterator gsi = *gsip; + gimple *stmt = gsi_stmt (gsi); + location_t loc = gimple_location (stmt); + gcc_assert (gimple_call_num_args (stmt) == 2); + tree ptr = gimple_call_arg (stmt, 0); + tree off = gimple_call_arg (stmt, 1); + + if (integer_zerop (off)) + { + gsi_remove (gsip, true); + unlink_stmt_vdef (stmt); + return true; + } + + basic_block cur_bb = gsi_bb (gsi); + tree ptrplusoff = make_ssa_name (pointer_sized_int_node); + tree ptri = make_ssa_name (pointer_sized_int_node); + int pos_neg = get_range_pos_neg (off); + + /* Split the original block holding the pointer dereference. */ + edge e = split_block (cur_bb, stmt); + + /* Get a hold on the 'condition block', the 'then block' and the + 'else block'. */ + basic_block cond_bb = e->src; + basic_block fallthru_bb = e->dest; + basic_block then_bb = create_empty_bb (cond_bb); + basic_block cond_pos_bb = NULL, cond_neg_bb = NULL; + add_bb_to_loop (then_bb, cond_bb->loop_father); + loops_state_set (LOOPS_NEED_FIXUP); + + /* Set up the fallthrough basic block. */ + e->flags = EDGE_FALSE_VALUE; + if (pos_neg != 3) + { + e->count = cond_bb->count; + e->probability = profile_probability::very_likely (); + + /* Connect 'then block' with the 'else block'. This is needed + as the ubsan routines we call in the 'then block' are not noreturn. + The 'then block' only has one outcoming edge. */ + make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU); + + /* Make an edge coming from the 'cond block' into the 'then block'; + this edge is unlikely taken, so set up the probability + accordingly. */ + e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE); + e->probability = profile_probability::very_unlikely (); + } + else + { + profile_count count = cond_bb->count.apply_probability (PROB_EVEN); + e->count = count; + e->probability = profile_probability::even (); + + e = split_block (fallthru_bb, (gimple *) NULL); + cond_neg_bb = e->src; + fallthru_bb = e->dest; + e->count = count; + e->probability = profile_probability::very_likely (); + e->flags = EDGE_FALSE_VALUE; + + e = make_edge (cond_neg_bb, then_bb, EDGE_TRUE_VALUE); + e->probability = profile_probability::very_unlikely (); + + cond_pos_bb = create_empty_bb (cond_bb); + add_bb_to_loop (cond_pos_bb, cond_bb->loop_father); + + e = make_edge (cond_bb, cond_pos_bb, EDGE_TRUE_VALUE); + e->count = count; + e->probability = profile_probability::even (); + + e = make_edge (cond_pos_bb, then_bb, EDGE_TRUE_VALUE); + e->probability = profile_probability::very_unlikely (); + + e = make_edge (cond_pos_bb, fallthru_bb, EDGE_FALSE_VALUE); + e->count = count; + e->probability = profile_probability::very_likely (); + + make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU); + } + + gimple *g = gimple_build_assign (ptri, NOP_EXPR, ptr); + gimple_set_location (g, loc); + gsi_insert_before (&gsi, g, GSI_SAME_STMT); + g = gimple_build_assign (ptrplusoff, PLUS_EXPR, ptri, off); + gimple_set_location (g, loc); + gsi_insert_before (&gsi, g, GSI_SAME_STMT); + + /* Update dominance info for the newly created then_bb; note that + fallthru_bb's dominance info has already been updated by + split_block. */ + if (dom_info_available_p (CDI_DOMINATORS)) + { + set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb); + if (pos_neg == 3) + { + set_immediate_dominator (CDI_DOMINATORS, cond_pos_bb, cond_bb); + set_immediate_dominator (CDI_DOMINATORS, fallthru_bb, cond_bb); + } + } + + /* Put the ubsan builtin call into the newly created BB. */ + if (flag_sanitize_undefined_trap_on_error) + g = gimple_build_call (builtin_decl_implicit (BUILT_IN_TRAP), 0); + else + { + enum built_in_function bcode + = (flag_sanitize_recover & SANITIZE_POINTER_OVERFLOW) + ? BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW + : BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW_ABORT; + tree fn = builtin_decl_implicit (bcode); + tree data + = ubsan_create_data ("__ubsan_ptrovf_data", 1, &loc, + NULL_TREE, NULL_TREE); + data = build_fold_addr_expr_loc (loc, data); + g = gimple_build_call (fn, 3, data, ptr, ptrplusoff); + } + gimple_stmt_iterator gsi2 = gsi_start_bb (then_bb); + gimple_set_location (g, loc); + gsi_insert_after (&gsi2, g, GSI_NEW_STMT); + + /* Unlink the UBSAN_PTRs vops before replacing it. */ + unlink_stmt_vdef (stmt); + + if (TREE_CODE (off) == INTEGER_CST) + g = gimple_build_cond (wi::neg_p (off) ? LT_EXPR : GE_EXPR, ptri, + fold_build1 (NEGATE_EXPR, sizetype, off), + NULL_TREE, NULL_TREE); + else if (pos_neg != 3) + g = gimple_build_cond (pos_neg == 1 ? LT_EXPR : GT_EXPR, + ptrplusoff, ptri, NULL_TREE, NULL_TREE); + else + { + gsi2 = gsi_start_bb (cond_pos_bb); + g = gimple_build_cond (LT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE); + gimple_set_location (g, loc); + gsi_insert_after (&gsi2, g, GSI_NEW_STMT); + + gsi2 = gsi_start_bb (cond_neg_bb); + g = gimple_build_cond (GT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE); + gimple_set_location (g, loc); + gsi_insert_after (&gsi2, g, GSI_NEW_STMT); + + gimple_seq seq = NULL; + tree t = gimple_build (&seq, loc, NOP_EXPR, ssizetype, off); + t = gimple_build (&seq, loc, GE_EXPR, boolean_type_node, + t, ssize_int (0)); + gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); + g = gimple_build_cond (NE_EXPR, t, boolean_false_node, + NULL_TREE, NULL_TREE); + } + gimple_set_location (g, loc); + /* Replace the UBSAN_PTR with a GIMPLE_COND stmt. */ + gsi_replace (&gsi, g, false); + return false; +} + + /* Cached __ubsan_vptr_type_cache decl. */ static GTY(()) tree ubsan_vptr_type_cache_decl; @@ -1234,6 +1400,111 @@ instrument_null (gimple_stmt_iterator gsi, tree t, bool is_lhs) instrument_mem_ref (t, base, &gsi, is_lhs); } +/* Instrument pointer arithmetics PTR p+ OFF. */ + +static void +instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree ptr, tree off) +{ + if (TYPE_PRECISION (sizetype) != POINTER_SIZE) + return; + gcall *g = gimple_build_call_internal (IFN_UBSAN_PTR, 2, ptr, off); + gimple_set_location (g, gimple_location (gsi_stmt (*gsi))); + gsi_insert_before (gsi, g, GSI_SAME_STMT); +} + +/* Instrument pointer arithmetics if any. */ + +static void +maybe_instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree t) +{ + if (TYPE_PRECISION (sizetype) != POINTER_SIZE) + return; + + /* Handle also e.g. &s->i. */ + if (TREE_CODE (t) == ADDR_EXPR) + t = TREE_OPERAND (t, 0); + + if (!handled_component_p (t) && TREE_CODE (t) != MEM_REF) + return; + + HOST_WIDE_INT bitsize, bitpos, bytepos; + tree offset; + machine_mode mode; + int volatilep = 0, reversep, unsignedp = 0; + tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode, + &unsignedp, &reversep, &volatilep); + tree moff = NULL_TREE; + + bool decl_p = DECL_P (inner); + tree base; + if (decl_p) + { + if (DECL_REGISTER (inner)) + return; + base = inner; + /* If BASE is a fixed size automatic variable or + global variable defined in the current TU and bitpos + fits, don't instrument anything. */ + if (offset == NULL_TREE + && bitpos > 0 + && (VAR_P (base) + || TREE_CODE (base) == PARM_DECL + || TREE_CODE (base) == RESULT_DECL) + && DECL_SIZE (base) + && TREE_CODE (DECL_SIZE (base)) == INTEGER_CST + && compare_tree_int (DECL_SIZE (base), bitpos) >= 0 + && (!is_global_var (base) || decl_binds_to_current_def_p (base))) + return; + } + else if (TREE_CODE (inner) == MEM_REF) + { + base = TREE_OPERAND (inner, 0); + if (TREE_CODE (base) == ADDR_EXPR + && DECL_P (TREE_OPERAND (base, 0)) + && !TREE_ADDRESSABLE (TREE_OPERAND (base, 0)) + && !is_global_var (TREE_OPERAND (base, 0))) + return; + moff = TREE_OPERAND (inner, 1); + if (integer_zerop (moff)) + moff = NULL_TREE; + } + else + return; + + if (!POINTER_TYPE_P (TREE_TYPE (base)) && !DECL_P (base)) + return; + bytepos = bitpos / BITS_PER_UNIT; + if (offset == NULL_TREE && bytepos == 0 && moff == NULL_TREE) + return; + + tree base_addr = base; + if (decl_p) + base_addr = build1 (ADDR_EXPR, + build_pointer_type (TREE_TYPE (base)), base); + t = offset; + if (bytepos) + { + if (t) + t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t, + build_int_cst (TREE_TYPE (t), bytepos)); + else + t = size_int (bytepos); + } + if (moff) + { + if (t) + t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t, + fold_convert (TREE_TYPE (t), moff)); + else + t = fold_convert (sizetype, moff); + } + t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE, true, + GSI_SAME_STMT); + base_addr = force_gimple_operand_gsi (gsi, base_addr, true, NULL_TREE, true, + GSI_SAME_STMT); + instrument_pointer_overflow (gsi, base_addr, t); +} + /* Build an ubsan builtin call for the signed-integer-overflow sanitization. CODE says what kind of builtin are we building, LOC is a location, LHSTYPE is the type of LHS, OP0 and OP1 @@ -1849,7 +2120,7 @@ instrument_object_size (gimple_stmt_iterator *gsi, tree t, bool is_lhs) { tree rhs1 = gimple_assign_rhs1 (def_stmt); if (TREE_CODE (rhs1) == SSA_NAME - && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)) + && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)) break; else base = rhs1; @@ -1973,7 +2244,8 @@ public: | SANITIZE_ALIGNMENT | SANITIZE_NONNULL_ATTRIBUTE | SANITIZE_RETURNS_NONNULL_ATTRIBUTE - | SANITIZE_OBJECT_SIZE)); + | SANITIZE_OBJECT_SIZE + | SANITIZE_POINTER_OVERFLOW)); } virtual unsigned int execute (function *); @@ -2065,6 +2337,32 @@ pass_ubsan::execute (function *fun) } } + if (sanitize_flags_p (SANITIZE_POINTER_OVERFLOW, fun->decl)) + { + if (is_gimple_assign (stmt) + && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) + instrument_pointer_overflow (&gsi, + gimple_assign_rhs1 (stmt), + gimple_assign_rhs2 (stmt)); + if (gimple_store_p (stmt)) + maybe_instrument_pointer_overflow (&gsi, + gimple_get_lhs (stmt)); + if (gimple_assign_single_p (stmt)) + maybe_instrument_pointer_overflow (&gsi, + gimple_assign_rhs1 (stmt)); + if (is_gimple_call (stmt)) + { + unsigned args_num = gimple_call_num_args (stmt); + for (unsigned i = 0; i < args_num; ++i) + { + tree arg = gimple_call_arg (stmt, i); + if (is_gimple_reg (arg)) + continue; + maybe_instrument_pointer_overflow (&gsi, arg); + } + } + } + gsi_next (&gsi); } if (gimple_purge_dead_eh_edges (bb)) |