aboutsummaryrefslogtreecommitdiff
path: root/gcc/internal-fn.c
diff options
context:
space:
mode:
authorJakub Jelinek <jakub@redhat.com>2016-11-16 09:28:50 +0100
committerJakub Jelinek <jakub@gcc.gnu.org>2016-11-16 09:28:50 +0100
commit1705cebd79f24bc85dab766a9a26390827f26fa0 (patch)
tree9fffaa05cb502bdcbfd169cbadc603c4b6d31f2a /gcc/internal-fn.c
parent00178b98eb609827f8e799931f55288ab2f58ee1 (diff)
downloadgcc-1705cebd79f24bc85dab766a9a26390827f26fa0.zip
gcc-1705cebd79f24bc85dab766a9a26390827f26fa0.tar.gz
gcc-1705cebd79f24bc85dab766a9a26390827f26fa0.tar.bz2
re PR sanitizer/77823 (ICE: in ubsan_encode_value, at ubsan.c:137 with -fsanitize=undefined and vector types)
PR sanitizer/77823 * ubsan.c (ubsan_build_overflow_builtin): Add DATAP argument, if it points to non-NULL tree, use it instead of ubsan_create_data. (instrument_si_overflow): Handle vector signed integer overflow checking. * ubsan.h (ubsan_build_overflow_builtin): Add DATAP argument. * tree-vrp.c (simplify_internal_call_using_ranges): Punt for vector IFN_UBSAN_CHECK_*. * internal-fn.c (expand_addsub_overflow): Add DATAP argument, pass it through to ubsan_build_overflow_builtin. (expand_neg_overflow, expand_mul_overflow): Likewise. (expand_vector_ubsan_overflow): New function. (expand_UBSAN_CHECK_ADD, expand_UBSAN_CHECK_SUB, expand_UBSAN_CHECK_MUL): Use tit for vector arithmetics. (expand_arith_overflow): Adjust expand_*_overflow callers. * c-c++-common/ubsan/overflow-vec-1.c: New test. * c-c++-common/ubsan/overflow-vec-2.c: New test. From-SVN: r242469
Diffstat (limited to 'gcc/internal-fn.c')
-rw-r--r--gcc/internal-fn.c189
1 files changed, 170 insertions, 19 deletions
diff --git a/gcc/internal-fn.c b/gcc/internal-fn.c
index 0875559..ca347c5 100644
--- a/gcc/internal-fn.c
+++ b/gcc/internal-fn.c
@@ -42,6 +42,7 @@ along with GCC; see the file COPYING3. If not see
#include "ubsan.h"
#include "recog.h"
#include "builtins.h"
+#include "optabs-tree.h"
/* The names of each internal function, indexed by function number. */
const char *const internal_fn_name_array[] = {
@@ -513,7 +514,7 @@ expand_ubsan_result_store (rtx target, rtx res)
static void
expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
tree arg0, tree arg1, bool unsr_p, bool uns0_p,
- bool uns1_p, bool is_ubsan)
+ bool uns1_p, bool is_ubsan, tree *datap)
{
rtx res, target = NULL_RTX;
tree fn;
@@ -929,7 +930,7 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
/* Expand the ubsan builtin call. */
push_temp_slots ();
fn = ubsan_build_overflow_builtin (code, loc, TREE_TYPE (arg0),
- arg0, arg1);
+ arg0, arg1, datap);
expand_normal (fn);
pop_temp_slots ();
do_pending_stack_adjust ();
@@ -958,7 +959,8 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
/* Add negate overflow checking to the statement STMT. */
static void
-expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan)
+expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan,
+ tree *datap)
{
rtx res, op1;
tree fn;
@@ -1024,7 +1026,7 @@ expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan)
/* Expand the ubsan builtin call. */
push_temp_slots ();
fn = ubsan_build_overflow_builtin (NEGATE_EXPR, loc, TREE_TYPE (arg1),
- arg1, NULL_TREE);
+ arg1, NULL_TREE, datap);
expand_normal (fn);
pop_temp_slots ();
do_pending_stack_adjust ();
@@ -1048,7 +1050,8 @@ expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan)
static void
expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
- bool unsr_p, bool uns0_p, bool uns1_p, bool is_ubsan)
+ bool unsr_p, bool uns0_p, bool uns1_p, bool is_ubsan,
+ tree *datap)
{
rtx res, op0, op1;
tree fn, type;
@@ -1685,7 +1688,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
/* Expand the ubsan builtin call. */
push_temp_slots ();
fn = ubsan_build_overflow_builtin (MULT_EXPR, loc, TREE_TYPE (arg0),
- arg0, arg1);
+ arg0, arg1, datap);
expand_normal (fn);
pop_temp_slots ();
do_pending_stack_adjust ();
@@ -1734,6 +1737,145 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
}
}
+/* Expand UBSAN_CHECK_* internal function if it has vector operands. */
+
+static void
+expand_vector_ubsan_overflow (location_t loc, enum tree_code code, tree lhs,
+ tree arg0, tree arg1)
+{
+ int cnt = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
+ rtx_code_label *loop_lab = NULL;
+ rtx cntvar = NULL_RTX;
+ tree cntv = NULL_TREE;
+ tree eltype = TREE_TYPE (TREE_TYPE (arg0));
+ tree sz = TYPE_SIZE (eltype);
+ tree data = NULL_TREE;
+ tree resv = NULL_TREE;
+ rtx lhsr = NULL_RTX;
+ rtx resvr = NULL_RTX;
+
+ if (lhs)
+ {
+ optab op;
+ lhsr = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
+ if (GET_MODE (lhsr) == BLKmode
+ || (op = optab_for_tree_code (code, TREE_TYPE (arg0),
+ optab_default)) == unknown_optab
+ || (optab_handler (op, TYPE_MODE (TREE_TYPE (arg0)))
+ == CODE_FOR_nothing))
+ {
+ if (MEM_P (lhsr))
+ resv = make_tree (TREE_TYPE (lhs), lhsr);
+ else
+ {
+ resvr = assign_temp (TREE_TYPE (lhs), 1, 1);
+ resv = make_tree (TREE_TYPE (lhs), resvr);
+ }
+ }
+ }
+ if (cnt > 4)
+ {
+ do_pending_stack_adjust ();
+ loop_lab = gen_label_rtx ();
+ cntvar = gen_reg_rtx (TYPE_MODE (sizetype));
+ cntv = make_tree (sizetype, cntvar);
+ emit_move_insn (cntvar, const0_rtx);
+ emit_label (loop_lab);
+ }
+ if (TREE_CODE (arg0) != VECTOR_CST)
+ {
+ rtx arg0r = expand_normal (arg0);
+ arg0 = make_tree (TREE_TYPE (arg0), arg0r);
+ }
+ if (TREE_CODE (arg1) != VECTOR_CST)
+ {
+ rtx arg1r = expand_normal (arg1);
+ arg1 = make_tree (TREE_TYPE (arg1), arg1r);
+ }
+ for (int i = 0; i < (cnt > 4 ? 1 : cnt); i++)
+ {
+ tree op0, op1, res = NULL_TREE;
+ if (cnt > 4)
+ {
+ tree atype = build_array_type_nelts (eltype, cnt);
+ op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, atype, arg0);
+ op0 = build4_loc (loc, ARRAY_REF, eltype, op0, cntv,
+ NULL_TREE, NULL_TREE);
+ op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, atype, arg1);
+ op1 = build4_loc (loc, ARRAY_REF, eltype, op1, cntv,
+ NULL_TREE, NULL_TREE);
+ if (resv)
+ {
+ res = fold_build1_loc (loc, VIEW_CONVERT_EXPR, atype, resv);
+ res = build4_loc (loc, ARRAY_REF, eltype, res, cntv,
+ NULL_TREE, NULL_TREE);
+ }
+ }
+ else
+ {
+ tree bitpos = bitsize_int (tree_to_uhwi (sz) * i);
+ op0 = fold_build3_loc (loc, BIT_FIELD_REF, eltype, arg0, sz, bitpos);
+ op1 = fold_build3_loc (loc, BIT_FIELD_REF, eltype, arg1, sz, bitpos);
+ if (resv)
+ res = fold_build3_loc (loc, BIT_FIELD_REF, eltype, resv, sz,
+ bitpos);
+ }
+ switch (code)
+ {
+ case PLUS_EXPR:
+ expand_addsub_overflow (loc, PLUS_EXPR, res, op0, op1,
+ false, false, false, true, &data);
+ break;
+ case MINUS_EXPR:
+ if (cnt > 4 ? integer_zerop (arg0) : integer_zerop (op0))
+ expand_neg_overflow (loc, res, op1, true, &data);
+ else
+ expand_addsub_overflow (loc, MINUS_EXPR, res, op0, op1,
+ false, false, false, true, &data);
+ break;
+ case MULT_EXPR:
+ expand_mul_overflow (loc, res, op0, op1, false, false, false,
+ true, &data);
+ break;
+ default:
+ gcc_unreachable ();
+ }
+ }
+ if (cnt > 4)
+ {
+ struct separate_ops ops;
+ ops.code = PLUS_EXPR;
+ ops.type = TREE_TYPE (cntv);
+ ops.op0 = cntv;
+ ops.op1 = build_int_cst (TREE_TYPE (cntv), 1);
+ ops.op2 = NULL_TREE;
+ ops.location = loc;
+ rtx ret = expand_expr_real_2 (&ops, cntvar, TYPE_MODE (sizetype),
+ EXPAND_NORMAL);
+ if (ret != cntvar)
+ emit_move_insn (cntvar, ret);
+ do_compare_rtx_and_jump (cntvar, GEN_INT (cnt), NE, false,
+ TYPE_MODE (sizetype), NULL_RTX, NULL, loop_lab,
+ PROB_VERY_LIKELY);
+ }
+ if (lhs && resv == NULL_TREE)
+ {
+ struct separate_ops ops;
+ ops.code = code;
+ ops.type = TREE_TYPE (arg0);
+ ops.op0 = arg0;
+ ops.op1 = arg1;
+ ops.op2 = NULL_TREE;
+ ops.location = loc;
+ rtx ret = expand_expr_real_2 (&ops, lhsr, TYPE_MODE (TREE_TYPE (arg0)),
+ EXPAND_NORMAL);
+ if (ret != lhsr)
+ emit_move_insn (lhsr, ret);
+ }
+ else if (resvr)
+ emit_move_insn (lhsr, resvr);
+}
+
/* Expand UBSAN_CHECK_ADD call STMT. */
static void
@@ -1743,8 +1885,11 @@ expand_UBSAN_CHECK_ADD (internal_fn, gcall *stmt)
tree lhs = gimple_call_lhs (stmt);
tree arg0 = gimple_call_arg (stmt, 0);
tree arg1 = gimple_call_arg (stmt, 1);
- expand_addsub_overflow (loc, PLUS_EXPR, lhs, arg0, arg1,
- false, false, false, true);
+ if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
+ expand_vector_ubsan_overflow (loc, PLUS_EXPR, lhs, arg0, arg1);
+ else
+ expand_addsub_overflow (loc, PLUS_EXPR, lhs, arg0, arg1,
+ false, false, false, true, NULL);
}
/* Expand UBSAN_CHECK_SUB call STMT. */
@@ -1756,11 +1901,13 @@ expand_UBSAN_CHECK_SUB (internal_fn, gcall *stmt)
tree lhs = gimple_call_lhs (stmt);
tree arg0 = gimple_call_arg (stmt, 0);
tree arg1 = gimple_call_arg (stmt, 1);
- if (integer_zerop (arg0))
- expand_neg_overflow (loc, lhs, arg1, true);
+ if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
+ expand_vector_ubsan_overflow (loc, MINUS_EXPR, lhs, arg0, arg1);
+ else if (integer_zerop (arg0))
+ expand_neg_overflow (loc, lhs, arg1, true, NULL);
else
expand_addsub_overflow (loc, MINUS_EXPR, lhs, arg0, arg1,
- false, false, false, true);
+ false, false, false, true, NULL);
}
/* Expand UBSAN_CHECK_MUL call STMT. */
@@ -1772,7 +1919,11 @@ expand_UBSAN_CHECK_MUL (internal_fn, gcall *stmt)
tree lhs = gimple_call_lhs (stmt);
tree arg0 = gimple_call_arg (stmt, 0);
tree arg1 = gimple_call_arg (stmt, 1);
- expand_mul_overflow (loc, lhs, arg0, arg1, false, false, false, true);
+ if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
+ expand_vector_ubsan_overflow (loc, MULT_EXPR, lhs, arg0, arg1);
+ else
+ expand_mul_overflow (loc, lhs, arg0, arg1, false, false, false, true,
+ NULL);
}
/* Helper function for {ADD,SUB,MUL}_OVERFLOW call stmt expansion. */
@@ -1864,17 +2015,17 @@ expand_arith_overflow (enum tree_code code, gimple *stmt)
case MINUS_EXPR:
if (integer_zerop (arg0) && !unsr_p)
{
- expand_neg_overflow (loc, lhs, arg1, false);
+ expand_neg_overflow (loc, lhs, arg1, false, NULL);
return;
}
/* FALLTHRU */
case PLUS_EXPR:
- expand_addsub_overflow (loc, code, lhs, arg0, arg1,
- unsr_p, unsr_p, unsr_p, false);
+ expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
+ unsr_p, unsr_p, false, NULL);
return;
case MULT_EXPR:
- expand_mul_overflow (loc, lhs, arg0, arg1,
- unsr_p, unsr_p, unsr_p, false);
+ expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
+ unsr_p, unsr_p, false, NULL);
return;
default:
gcc_unreachable ();
@@ -1916,10 +2067,10 @@ expand_arith_overflow (enum tree_code code, gimple *stmt)
arg1 = fold_convert_loc (loc, types[uns1_p], arg1);
if (code != MULT_EXPR)
expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
- uns0_p, uns1_p, false);
+ uns0_p, uns1_p, false, NULL);
else
expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
- uns0_p, uns1_p, false);
+ uns0_p, uns1_p, false, NULL);
return;
}