aboutsummaryrefslogtreecommitdiff
path: root/gcc/cfgexpand.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/cfgexpand.c')
-rw-r--r--gcc/cfgexpand.c873
1 files changed, 870 insertions, 3 deletions
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index 07d6dd3..8bd58b0 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -70,7 +70,13 @@ gimple_assign_rhs_to_tree (gimple stmt)
TREE_TYPE (gimple_assign_lhs (stmt)),
gimple_assign_rhs1 (stmt));
else if (grhs_class == GIMPLE_SINGLE_RHS)
- t = gimple_assign_rhs1 (stmt);
+ {
+ t = gimple_assign_rhs1 (stmt);
+ /* Avoid modifying this tree in place below. */
+ if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
+ && gimple_location (stmt) != EXPR_LOCATION (t))
+ t = copy_node (t);
+ }
else
gcc_unreachable ();
@@ -1834,7 +1840,8 @@ maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "\n;; ");
- print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
+ print_gimple_stmt (dump_file, stmt, 0,
+ TDF_SLIM | (dump_flags & TDF_LINENO));
fprintf (dump_file, "\n");
print_rtl (dump_file, since ? NEXT_INSN (since) : since);
@@ -2147,6 +2154,808 @@ expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
return bb;
}
+/* Return the difference between the floor and the truncated result of
+ a signed division by OP1 with remainder MOD. */
+static rtx
+floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
+{
+ /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
+ return gen_rtx_IF_THEN_ELSE
+ (mode, gen_rtx_NE (BImode, mod, const0_rtx),
+ gen_rtx_IF_THEN_ELSE
+ (mode, gen_rtx_LT (BImode,
+ gen_rtx_DIV (mode, op1, mod),
+ const0_rtx),
+ constm1_rtx, const0_rtx),
+ const0_rtx);
+}
+
+/* Return the difference between the ceil and the truncated result of
+ a signed division by OP1 with remainder MOD. */
+static rtx
+ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
+{
+ /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
+ return gen_rtx_IF_THEN_ELSE
+ (mode, gen_rtx_NE (BImode, mod, const0_rtx),
+ gen_rtx_IF_THEN_ELSE
+ (mode, gen_rtx_GT (BImode,
+ gen_rtx_DIV (mode, op1, mod),
+ const0_rtx),
+ const1_rtx, const0_rtx),
+ const0_rtx);
+}
+
+/* Return the difference between the ceil and the truncated result of
+ an unsigned division by OP1 with remainder MOD. */
+static rtx
+ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
+{
+ /* (mod != 0 ? 1 : 0) */
+ return gen_rtx_IF_THEN_ELSE
+ (mode, gen_rtx_NE (BImode, mod, const0_rtx),
+ const1_rtx, const0_rtx);
+}
+
+/* Return the difference between the rounded and the truncated result
+ of a signed division by OP1 with remainder MOD. Halfway cases are
+ rounded away from zero, rather than to the nearest even number. */
+static rtx
+round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
+{
+ /* (abs (mod) >= abs (op1) - abs (mod)
+ ? (op1 / mod > 0 ? 1 : -1)
+ : 0) */
+ return gen_rtx_IF_THEN_ELSE
+ (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
+ gen_rtx_MINUS (mode,
+ gen_rtx_ABS (mode, op1),
+ gen_rtx_ABS (mode, mod))),
+ gen_rtx_IF_THEN_ELSE
+ (mode, gen_rtx_GT (BImode,
+ gen_rtx_DIV (mode, op1, mod),
+ const0_rtx),
+ const1_rtx, constm1_rtx),
+ const0_rtx);
+}
+
+/* Return the difference between the rounded and the truncated result
+ of a unsigned division by OP1 with remainder MOD. Halfway cases
+ are rounded away from zero, rather than to the nearest even
+ number. */
+static rtx
+round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
+{
+ /* (mod >= op1 - mod ? 1 : 0) */
+ return gen_rtx_IF_THEN_ELSE
+ (mode, gen_rtx_GE (BImode, mod,
+ gen_rtx_MINUS (mode, op1, mod)),
+ const1_rtx, const0_rtx);
+}
+
+/* Wrap modeless constants in CONST:MODE. */
+rtx
+wrap_constant (enum machine_mode mode, rtx x)
+{
+ if (GET_MODE (x) != VOIDmode)
+ return x;
+
+ if (CONST_INT_P (x)
+ || GET_CODE (x) == CONST_FIXED
+ || GET_CODE (x) == CONST_DOUBLE
+ || GET_CODE (x) == LABEL_REF)
+ {
+ gcc_assert (mode != VOIDmode);
+
+ x = gen_rtx_CONST (mode, x);
+ }
+
+ return x;
+}
+
+/* Remove CONST wrapper added by wrap_constant(). */
+rtx
+unwrap_constant (rtx x)
+{
+ rtx ret = x;
+
+ if (GET_CODE (x) != CONST)
+ return x;
+
+ x = XEXP (x, 0);
+
+ if (CONST_INT_P (x)
+ || GET_CODE (x) == CONST_FIXED
+ || GET_CODE (x) == CONST_DOUBLE
+ || GET_CODE (x) == LABEL_REF)
+ ret = x;
+
+ return ret;
+}
+
+/* Return an RTX equivalent to the value of the tree expression
+ EXP. */
+
+static rtx
+expand_debug_expr (tree exp)
+{
+ rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+ int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
+
+ switch (TREE_CODE_CLASS (TREE_CODE (exp)))
+ {
+ case tcc_expression:
+ switch (TREE_CODE (exp))
+ {
+ case COND_EXPR:
+ goto ternary;
+
+ case TRUTH_ANDIF_EXPR:
+ case TRUTH_ORIF_EXPR:
+ case TRUTH_AND_EXPR:
+ case TRUTH_OR_EXPR:
+ case TRUTH_XOR_EXPR:
+ goto binary;
+
+ case TRUTH_NOT_EXPR:
+ goto unary;
+
+ default:
+ break;
+ }
+ break;
+
+ ternary:
+ op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
+ if (!op2)
+ return NULL_RTX;
+ /* Fall through. */
+
+ binary:
+ case tcc_binary:
+ case tcc_comparison:
+ op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
+ if (!op1)
+ return NULL_RTX;
+ /* Fall through. */
+
+ unary:
+ case tcc_unary:
+ op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
+ if (!op0)
+ return NULL_RTX;
+ break;
+
+ case tcc_type:
+ case tcc_statement:
+ gcc_unreachable ();
+
+ case tcc_constant:
+ case tcc_exceptional:
+ case tcc_declaration:
+ case tcc_reference:
+ case tcc_vl_exp:
+ break;
+ }
+
+ switch (TREE_CODE (exp))
+ {
+ case STRING_CST:
+ if (!lookup_constant_def (exp))
+ {
+ op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
+ op0 = gen_rtx_MEM (BLKmode, op0);
+ set_mem_attributes (op0, exp, 0);
+ return op0;
+ }
+ /* Fall through... */
+
+ case INTEGER_CST:
+ case REAL_CST:
+ case FIXED_CST:
+ op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
+ return op0;
+
+ case COMPLEX_CST:
+ gcc_assert (COMPLEX_MODE_P (mode));
+ op0 = expand_debug_expr (TREE_REALPART (exp));
+ op0 = wrap_constant (GET_MODE_INNER (mode), op0);
+ op1 = expand_debug_expr (TREE_IMAGPART (exp));
+ op1 = wrap_constant (GET_MODE_INNER (mode), op1);
+ return gen_rtx_CONCAT (mode, op0, op1);
+
+ case VAR_DECL:
+ case PARM_DECL:
+ case FUNCTION_DECL:
+ case LABEL_DECL:
+ case CONST_DECL:
+ case RESULT_DECL:
+ op0 = DECL_RTL_IF_SET (exp);
+
+ /* This decl was probably optimized away. */
+ if (!op0)
+ return NULL;
+
+ op0 = copy_rtx (op0);
+
+ if (GET_MODE (op0) == BLKmode)
+ {
+ gcc_assert (MEM_P (op0));
+ op0 = adjust_address_nv (op0, mode, 0);
+ return op0;
+ }
+
+ /* Fall through. */
+
+ adjust_mode:
+ case PAREN_EXPR:
+ case NOP_EXPR:
+ case CONVERT_EXPR:
+ {
+ enum machine_mode inner_mode = GET_MODE (op0);
+
+ if (mode == inner_mode)
+ return op0;
+
+ if (inner_mode == VOIDmode)
+ {
+ inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ if (mode == inner_mode)
+ return op0;
+ }
+
+ if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
+ {
+ if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
+ op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
+ else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
+ op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
+ else
+ op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
+ }
+ else if (FLOAT_MODE_P (mode))
+ {
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
+ else
+ op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
+ }
+ else if (FLOAT_MODE_P (inner_mode))
+ {
+ if (unsignedp)
+ op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
+ else
+ op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
+ }
+ else if (CONSTANT_P (op0)
+ || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
+ op0 = simplify_gen_subreg (mode, op0, inner_mode,
+ subreg_lowpart_offset (mode,
+ inner_mode));
+ else if (unsignedp)
+ op0 = gen_rtx_ZERO_EXTEND (mode, op0);
+ else
+ op0 = gen_rtx_SIGN_EXTEND (mode, op0);
+
+ return op0;
+ }
+
+ case INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case MISALIGNED_INDIRECT_REF:
+ op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
+ if (!op0)
+ return NULL;
+
+ gcc_assert (GET_MODE (op0) == Pmode
+ || GET_CODE (op0) == CONST_INT
+ || GET_CODE (op0) == CONST_DOUBLE);
+
+ if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
+ {
+ int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
+ op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
+ }
+
+ op0 = gen_rtx_MEM (mode, op0);
+
+ set_mem_attributes (op0, exp, 0);
+
+ return op0;
+
+ case TARGET_MEM_REF:
+ if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
+ return NULL;
+
+ op0 = expand_debug_expr
+ (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)),
+ exp));
+ if (!op0)
+ return NULL;
+
+ gcc_assert (GET_MODE (op0) == Pmode
+ || GET_CODE (op0) == CONST_INT
+ || GET_CODE (op0) == CONST_DOUBLE);
+
+ op0 = gen_rtx_MEM (mode, op0);
+
+ set_mem_attributes (op0, exp, 0);
+
+ return op0;
+
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ case COMPONENT_REF:
+ case BIT_FIELD_REF:
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
+ case VIEW_CONVERT_EXPR:
+ {
+ enum machine_mode mode1;
+ HOST_WIDE_INT bitsize, bitpos;
+ tree offset;
+ int volatilep = 0;
+ tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
+ &mode1, &unsignedp, &volatilep, false);
+ rtx orig_op0;
+
+ orig_op0 = op0 = expand_debug_expr (tem);
+
+ if (!op0)
+ return NULL;
+
+ if (offset)
+ {
+ gcc_assert (MEM_P (op0));
+
+ op1 = expand_debug_expr (offset);
+ if (!op1)
+ return NULL;
+
+ op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, XEXP (op0, 0), op1));
+ }
+
+ if (MEM_P (op0))
+ {
+ if (bitpos >= BITS_PER_UNIT)
+ {
+ op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
+ bitpos %= BITS_PER_UNIT;
+ }
+ else if (bitpos < 0)
+ {
+ int units = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
+ op0 = adjust_address_nv (op0, mode1, units);
+ bitpos += units * BITS_PER_UNIT;
+ }
+ else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
+ op0 = adjust_address_nv (op0, mode, 0);
+ else if (GET_MODE (op0) != mode1)
+ op0 = adjust_address_nv (op0, mode1, 0);
+ else
+ op0 = copy_rtx (op0);
+ if (op0 == orig_op0)
+ op0 = shallow_copy_rtx (op0);
+ set_mem_attributes (op0, exp, 0);
+ }
+
+ if (bitpos == 0 && mode == GET_MODE (op0))
+ return op0;
+
+ if ((bitpos % BITS_PER_UNIT) == 0
+ && bitsize == GET_MODE_BITSIZE (mode1))
+ {
+ enum machine_mode opmode = GET_MODE (op0);
+
+ gcc_assert (opmode != BLKmode);
+
+ if (opmode == VOIDmode)
+ opmode = mode1;
+
+ /* This condition may hold if we're expanding the address
+ right past the end of an array that turned out not to
+ be addressable (i.e., the address was only computed in
+ debug stmts). The gen_subreg below would rightfully
+ crash, and the address doesn't really exist, so just
+ drop it. */
+ if (bitpos >= GET_MODE_BITSIZE (opmode))
+ return NULL;
+
+ return simplify_gen_subreg (mode, op0, opmode,
+ bitpos / BITS_PER_UNIT);
+ }
+
+ return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
+ && TYPE_UNSIGNED (TREE_TYPE (exp))
+ ? SIGN_EXTRACT
+ : ZERO_EXTRACT, mode,
+ GET_MODE (op0) != VOIDmode
+ ? GET_MODE (op0) : mode1,
+ op0, GEN_INT (bitsize), GEN_INT (bitpos));
+ }
+
+ case EXC_PTR_EXPR:
+ /* ??? Do not call get_exception_pointer(), we don't want to gen
+ it if it hasn't been created yet. */
+ return get_exception_pointer ();
+
+ case FILTER_EXPR:
+ /* Likewise get_exception_filter(). */
+ return get_exception_filter ();
+
+ case ABS_EXPR:
+ return gen_rtx_ABS (mode, op0);
+
+ case NEGATE_EXPR:
+ return gen_rtx_NEG (mode, op0);
+
+ case BIT_NOT_EXPR:
+ return gen_rtx_NOT (mode, op0);
+
+ case FLOAT_EXPR:
+ if (unsignedp)
+ return gen_rtx_UNSIGNED_FLOAT (mode, op0);
+ else
+ return gen_rtx_FLOAT (mode, op0);
+
+ case FIX_TRUNC_EXPR:
+ if (unsignedp)
+ return gen_rtx_UNSIGNED_FIX (mode, op0);
+ else
+ return gen_rtx_FIX (mode, op0);
+
+ case POINTER_PLUS_EXPR:
+ case PLUS_EXPR:
+ return gen_rtx_PLUS (mode, op0, op1);
+
+ case MINUS_EXPR:
+ return gen_rtx_MINUS (mode, op0, op1);
+
+ case MULT_EXPR:
+ return gen_rtx_MULT (mode, op0, op1);
+
+ case RDIV_EXPR:
+ case TRUNC_DIV_EXPR:
+ case EXACT_DIV_EXPR:
+ if (unsignedp)
+ return gen_rtx_UDIV (mode, op0, op1);
+ else
+ return gen_rtx_DIV (mode, op0, op1);
+
+ case TRUNC_MOD_EXPR:
+ if (unsignedp)
+ return gen_rtx_UMOD (mode, op0, op1);
+ else
+ return gen_rtx_MOD (mode, op0, op1);
+
+ case FLOOR_DIV_EXPR:
+ if (unsignedp)
+ return gen_rtx_UDIV (mode, op0, op1);
+ else
+ {
+ rtx div = gen_rtx_DIV (mode, op0, op1);
+ rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx adj = floor_sdiv_adjust (mode, mod, op1);
+ return gen_rtx_PLUS (mode, div, adj);
+ }
+
+ case FLOOR_MOD_EXPR:
+ if (unsignedp)
+ return gen_rtx_UMOD (mode, op0, op1);
+ else
+ {
+ rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx adj = floor_sdiv_adjust (mode, mod, op1);
+ adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
+ return gen_rtx_PLUS (mode, mod, adj);
+ }
+
+ case CEIL_DIV_EXPR:
+ if (unsignedp)
+ {
+ rtx div = gen_rtx_UDIV (mode, op0, op1);
+ rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx adj = ceil_udiv_adjust (mode, mod, op1);
+ return gen_rtx_PLUS (mode, div, adj);
+ }
+ else
+ {
+ rtx div = gen_rtx_DIV (mode, op0, op1);
+ rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx adj = ceil_sdiv_adjust (mode, mod, op1);
+ return gen_rtx_PLUS (mode, div, adj);
+ }
+
+ case CEIL_MOD_EXPR:
+ if (unsignedp)
+ {
+ rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx adj = ceil_udiv_adjust (mode, mod, op1);
+ adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
+ return gen_rtx_PLUS (mode, mod, adj);
+ }
+ else
+ {
+ rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx adj = ceil_sdiv_adjust (mode, mod, op1);
+ adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
+ return gen_rtx_PLUS (mode, mod, adj);
+ }
+
+ case ROUND_DIV_EXPR:
+ if (unsignedp)
+ {
+ rtx div = gen_rtx_UDIV (mode, op0, op1);
+ rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx adj = round_udiv_adjust (mode, mod, op1);
+ return gen_rtx_PLUS (mode, div, adj);
+ }
+ else
+ {
+ rtx div = gen_rtx_DIV (mode, op0, op1);
+ rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx adj = round_sdiv_adjust (mode, mod, op1);
+ return gen_rtx_PLUS (mode, div, adj);
+ }
+
+ case ROUND_MOD_EXPR:
+ if (unsignedp)
+ {
+ rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx adj = round_udiv_adjust (mode, mod, op1);
+ adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
+ return gen_rtx_PLUS (mode, mod, adj);
+ }
+ else
+ {
+ rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx adj = round_sdiv_adjust (mode, mod, op1);
+ adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
+ return gen_rtx_PLUS (mode, mod, adj);
+ }
+
+ case LSHIFT_EXPR:
+ return gen_rtx_ASHIFT (mode, op0, op1);
+
+ case RSHIFT_EXPR:
+ if (unsignedp)
+ return gen_rtx_LSHIFTRT (mode, op0, op1);
+ else
+ return gen_rtx_ASHIFTRT (mode, op0, op1);
+
+ case LROTATE_EXPR:
+ return gen_rtx_ROTATE (mode, op0, op1);
+
+ case RROTATE_EXPR:
+ return gen_rtx_ROTATERT (mode, op0, op1);
+
+ case MIN_EXPR:
+ if (unsignedp)
+ return gen_rtx_UMIN (mode, op0, op1);
+ else
+ return gen_rtx_SMIN (mode, op0, op1);
+
+ case MAX_EXPR:
+ if (unsignedp)
+ return gen_rtx_UMAX (mode, op0, op1);
+ else
+ return gen_rtx_SMAX (mode, op0, op1);
+
+ case BIT_AND_EXPR:
+ case TRUTH_AND_EXPR:
+ return gen_rtx_AND (mode, op0, op1);
+
+ case BIT_IOR_EXPR:
+ case TRUTH_OR_EXPR:
+ return gen_rtx_IOR (mode, op0, op1);
+
+ case BIT_XOR_EXPR:
+ case TRUTH_XOR_EXPR:
+ return gen_rtx_XOR (mode, op0, op1);
+
+ case TRUTH_ANDIF_EXPR:
+ return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
+
+ case TRUTH_ORIF_EXPR:
+ return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
+
+ case TRUTH_NOT_EXPR:
+ return gen_rtx_EQ (mode, op0, const0_rtx);
+
+ case LT_EXPR:
+ if (unsignedp)
+ return gen_rtx_LTU (mode, op0, op1);
+ else
+ return gen_rtx_LT (mode, op0, op1);
+
+ case LE_EXPR:
+ if (unsignedp)
+ return gen_rtx_LEU (mode, op0, op1);
+ else
+ return gen_rtx_LE (mode, op0, op1);
+
+ case GT_EXPR:
+ if (unsignedp)
+ return gen_rtx_GTU (mode, op0, op1);
+ else
+ return gen_rtx_GT (mode, op0, op1);
+
+ case GE_EXPR:
+ if (unsignedp)
+ return gen_rtx_GEU (mode, op0, op1);
+ else
+ return gen_rtx_GE (mode, op0, op1);
+
+ case EQ_EXPR:
+ return gen_rtx_EQ (mode, op0, op1);
+
+ case NE_EXPR:
+ return gen_rtx_NE (mode, op0, op1);
+
+ case UNORDERED_EXPR:
+ return gen_rtx_UNORDERED (mode, op0, op1);
+
+ case ORDERED_EXPR:
+ return gen_rtx_ORDERED (mode, op0, op1);
+
+ case UNLT_EXPR:
+ return gen_rtx_UNLT (mode, op0, op1);
+
+ case UNLE_EXPR:
+ return gen_rtx_UNLE (mode, op0, op1);
+
+ case UNGT_EXPR:
+ return gen_rtx_UNGT (mode, op0, op1);
+
+ case UNGE_EXPR:
+ return gen_rtx_UNGE (mode, op0, op1);
+
+ case UNEQ_EXPR:
+ return gen_rtx_UNEQ (mode, op0, op1);
+
+ case LTGT_EXPR:
+ return gen_rtx_LTGT (mode, op0, op1);
+
+ case COND_EXPR:
+ return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
+
+ case COMPLEX_EXPR:
+ gcc_assert (COMPLEX_MODE_P (mode));
+ if (GET_MODE (op0) == VOIDmode)
+ op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
+ if (GET_MODE (op1) == VOIDmode)
+ op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
+ return gen_rtx_CONCAT (mode, op0, op1);
+
+ case ADDR_EXPR:
+ op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
+ if (!op0 || !MEM_P (op0))
+ return NULL;
+
+ return XEXP (op0, 0);
+
+ case VECTOR_CST:
+ exp = build_constructor_from_list (TREE_TYPE (exp),
+ TREE_VECTOR_CST_ELTS (exp));
+ /* Fall through. */
+
+ case CONSTRUCTOR:
+ if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
+ {
+ unsigned i;
+ tree val;
+
+ op0 = gen_rtx_CONCATN
+ (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
+
+ FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
+ {
+ op1 = expand_debug_expr (val);
+ if (!op1)
+ return NULL;
+ XVECEXP (op0, 0, i) = op1;
+ }
+
+ if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
+ {
+ op1 = expand_debug_expr
+ (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
+
+ if (!op1)
+ return NULL;
+
+ for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
+ XVECEXP (op0, 0, i) = op1;
+ }
+
+ return op0;
+ }
+ else
+ goto flag_unsupported;
+
+ case CALL_EXPR:
+ /* ??? Maybe handle some builtins? */
+ return NULL;
+
+ case SSA_NAME:
+ {
+ int part = var_to_partition (SA.map, exp);
+
+ if (part == NO_PARTITION)
+ return NULL;
+
+ gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
+
+ op0 = SA.partition_to_pseudo[part];
+ goto adjust_mode;
+ }
+
+ case ERROR_MARK:
+ return NULL;
+
+ default:
+ flag_unsupported:
+#ifdef ENABLE_CHECKING
+ debug_tree (exp);
+ gcc_unreachable ();
+#else
+ return NULL;
+#endif
+ }
+}
+
+/* Expand the _LOCs in debug insns. We run this after expanding all
+ regular insns, so that any variables referenced in the function
+ will have their DECL_RTLs set. */
+
+static void
+expand_debug_locations (void)
+{
+ rtx insn;
+ rtx last = get_last_insn ();
+ int save_strict_alias = flag_strict_aliasing;
+
+ /* New alias sets while setting up memory attributes cause
+ -fcompare-debug failures, even though it doesn't bring about any
+ codegen changes. */
+ flag_strict_aliasing = 0;
+
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ if (DEBUG_INSN_P (insn))
+ {
+ tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
+ rtx val;
+ enum machine_mode mode;
+
+ if (value == NULL_TREE)
+ val = NULL_RTX;
+ else
+ {
+ val = expand_debug_expr (value);
+ gcc_assert (last == get_last_insn ());
+ }
+
+ if (!val)
+ val = gen_rtx_UNKNOWN_VAR_LOC ();
+ else
+ {
+ mode = GET_MODE (INSN_VAR_LOCATION (insn));
+
+ gcc_assert (mode == GET_MODE (val)
+ || (GET_MODE (val) == VOIDmode
+ && (CONST_INT_P (val)
+ || GET_CODE (val) == CONST_FIXED
+ || GET_CODE (val) == CONST_DOUBLE
+ || GET_CODE (val) == LABEL_REF)));
+ }
+
+ INSN_VAR_LOCATION_LOC (insn) = val;
+ }
+
+ flag_strict_aliasing = save_strict_alias;
+}
+
/* Expand basic block BB from GIMPLE trees to RTL. */
static basic_block
@@ -2234,9 +3043,10 @@ expand_gimple_basic_block (basic_block bb)
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
basic_block new_bb;
+ stmt = gsi_stmt (gsi);
+
/* Expand this statement, then evaluate the resulting RTL and
fixup the CFG accordingly. */
if (gimple_code (stmt) == GIMPLE_COND)
@@ -2245,6 +3055,60 @@ expand_gimple_basic_block (basic_block bb)
if (new_bb)
return new_bb;
}
+ else if (gimple_debug_bind_p (stmt))
+ {
+ location_t sloc = get_curr_insn_source_location ();
+ tree sblock = get_curr_insn_block ();
+ gimple_stmt_iterator nsi = gsi;
+
+ for (;;)
+ {
+ tree var = gimple_debug_bind_get_var (stmt);
+ tree value;
+ rtx val;
+ enum machine_mode mode;
+
+ if (gimple_debug_bind_has_value_p (stmt))
+ value = gimple_debug_bind_get_value (stmt);
+ else
+ value = NULL_TREE;
+
+ last = get_last_insn ();
+
+ set_curr_insn_source_location (gimple_location (stmt));
+ set_curr_insn_block (gimple_block (stmt));
+
+ if (DECL_P (var))
+ mode = DECL_MODE (var);
+ else
+ mode = TYPE_MODE (TREE_TYPE (var));
+
+ val = gen_rtx_VAR_LOCATION
+ (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
+
+ val = emit_debug_insn (val);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ /* We can't dump the insn with a TREE where an RTX
+ is expected. */
+ INSN_VAR_LOCATION_LOC (val) = const0_rtx;
+ maybe_dump_rtl_for_gimple_stmt (stmt, last);
+ INSN_VAR_LOCATION_LOC (val) = (rtx)value;
+ }
+
+ gsi = nsi;
+ gsi_next (&nsi);
+ if (gsi_end_p (nsi))
+ break;
+ stmt = gsi_stmt (nsi);
+ if (!gimple_debug_bind_p (stmt))
+ break;
+ }
+
+ set_curr_insn_source_location (sloc);
+ set_curr_insn_block (sblock);
+ }
else
{
if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
@@ -2718,6 +3582,9 @@ gimple_expand_cfg (void)
FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
bb = expand_gimple_basic_block (bb);
+ if (MAY_HAVE_DEBUG_INSNS)
+ expand_debug_locations ();
+
execute_free_datastructures ();
finish_out_of_ssa (&SA);