aboutsummaryrefslogtreecommitdiff
path: root/gcc/expr.c
diff options
context:
space:
mode:
authorRichard Sandiford <richard.sandiford@linaro.org>2011-03-23 09:30:58 +0000
committerRichard Sandiford <rsandifo@gcc.gnu.org>2011-03-23 09:30:58 +0000
commita5c7d693b920b650fb863e4b9a41b01c199f698f (patch)
tree149ffb595908dd2c0744e01184f253f737706ef4 /gcc/expr.c
parent78fadbabe3c1efabb7ebc58819ed75131958dfc4 (diff)
downloadgcc-a5c7d693b920b650fb863e4b9a41b01c199f698f.zip
gcc-a5c7d693b920b650fb863e4b9a41b01c199f698f.tar.gz
gcc-a5c7d693b920b650fb863e4b9a41b01c199f698f.tar.bz2
optabs.h (emit_unop_insn, [...]): Change insn code parameter from "int" to "enum insn_code".
gcc/ * optabs.h (emit_unop_insn, maybe_emit_unop_insn): Change insn code parameter from "int" to "enum insn_code". (expand_operand_type): New enum. (expand_operand): New structure. (create_expand_operand): New function. (create_fixed_operand, create_output_operand): Likewise (create_input_operand, create_convert_operand_to): Likewise. (create_convert_operand_from, create_address_operand): Likewise. (create_integer_operand): Likewise. (create_convert_operand_from_type, maybe_legitimize_operands): Declare. (maybe_gen_insn, maybe_expand_insn, maybe_expand_jump_insn): Likewise. (expand_insn, expand_jump_insn): Likewise. * builtins.c (expand_builtin_prefetch): Use the new interfaces. (expand_builtin_interclass_mathfn, expand_builtin_strlen): Likewise. (expand_movstr, expand_builtin___clear_cache): Likewise. (expand_builtin_lock_release): Likewise. * explow.c (allocate_dynamic_stack_space): Likewise. (probe_stack_range): Likewise. Allow check_stack to FAIL, and use the default handling in that case. * expmed.c (check_predicate_volatile_ok): Delete. (store_bit_field_1, extract_bit_field_1): Use the new interfaces. (emit_cstore): Likewise. * expr.c (emit_block_move_via_movmem): Likewise. (set_storage_via_setmem, expand_assignment): Likewise. (emit_storent_insn, try_casesi): Likewise. (emit_single_push_insn): Likewise. Allow the expansion to fail. * optabs.c (expand_widen_pattern_expr, expand_ternary_op): Likewise. (expand_vec_shift_expr, expand_binop_directly): Likewise. (expand_twoval_unop, expand_twoval_binop): Likewise. (expand_unop_direct, emit_indirect_jump): Likewise. (emit_conditional_move, vector_compare_rtx): Likewise. (expand_vec_cond_expr, expand_val_compare_and_swap_1): Likewise. (expand_sync_operation, expand_sync_fetch_operation): Likewise. (expand_sync_lock_test_and_set): Likewise. (maybe_emit_unop_insn): Likewise. Change icode to an insn_code. (emit_unop_insn): Likewise. (expand_copysign_absneg): Change icode to an insn_code. (create_convert_operand_from_type): New function. (maybe_legitimize_operand, maybe_legitimize_operands): Likewise. (maybe_gen_insn, maybe_expand_insn, maybe_expand_jump_insn): Likewise. (expand_insn, expand_jump_insn): Likewise. * config/i386/i386.md (setmem<mode>): Use nonmemory_operand rather than const_int_operand for operand 2. From-SVN: r171341
Diffstat (limited to 'gcc/expr.c')
-rw-r--r--gcc/expr.c192
1 files changed, 61 insertions, 131 deletions
diff --git a/gcc/expr.c b/gcc/expr.c
index 2c0378c..572f0f4 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -1258,7 +1258,6 @@ static bool
emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
unsigned int expected_align, HOST_WIDE_INT expected_size)
{
- rtx opalign = GEN_INT (align / BITS_PER_UNIT);
int save_volatile_ok = volatile_ok;
enum machine_mode mode;
@@ -1276,7 +1275,6 @@ emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
mode = GET_MODE_WIDER_MODE (mode))
{
enum insn_code code = direct_optab_handler (movmem_optab, mode);
- insn_operand_predicate_fn pred;
if (code != CODE_FOR_nothing
/* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
@@ -1286,43 +1284,32 @@ emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
&& ((CONST_INT_P (size)
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
<= (GET_MODE_MASK (mode) >> 1)))
- || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
- && ((pred = insn_data[(int) code].operand[0].predicate) == 0
- || (*pred) (x, BLKmode))
- && ((pred = insn_data[(int) code].operand[1].predicate) == 0
- || (*pred) (y, BLKmode))
- && ((pred = insn_data[(int) code].operand[3].predicate) == 0
- || (*pred) (opalign, VOIDmode)))
- {
- rtx op2;
- rtx last = get_last_insn ();
- rtx pat;
-
- op2 = convert_to_mode (mode, size, 1);
- pred = insn_data[(int) code].operand[2].predicate;
- if (pred != 0 && ! (*pred) (op2, mode))
- op2 = copy_to_mode_reg (mode, op2);
+ || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
+ {
+ struct expand_operand ops[6];
+ unsigned int nops;
/* ??? When called via emit_block_move_for_call, it'd be
nice if there were some way to inform the backend, so
that it doesn't fail the expansion because it thinks
emitting the libcall would be more efficient. */
-
- if (insn_data[(int) code].n_operands == 4)
- pat = GEN_FCN ((int) code) (x, y, op2, opalign);
- else
- pat = GEN_FCN ((int) code) (x, y, op2, opalign,
- GEN_INT (expected_align
- / BITS_PER_UNIT),
- GEN_INT (expected_size));
- if (pat)
+ nops = insn_data[(int) code].n_operands;
+ create_fixed_operand (&ops[0], x);
+ create_fixed_operand (&ops[1], y);
+ /* The check above guarantees that this size conversion is valid. */
+ create_convert_operand_to (&ops[2], size, mode, true);
+ create_integer_operand (&ops[3], align / BITS_PER_UNIT);
+ if (nops != 4)
+ {
+ create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
+ create_integer_operand (&ops[5], expected_size);
+ nops = 6;
+ }
+ if (maybe_expand_insn (code, nops, ops))
{
- emit_insn (pat);
volatile_ok = save_volatile_ok;
return true;
}
- else
- delete_insns_since (last);
}
}
@@ -2705,7 +2692,6 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
including more than one in the machine description unless
the more limited one has some advantage. */
- rtx opalign = GEN_INT (align / BITS_PER_UNIT);
enum machine_mode mode;
if (expected_align < align)
@@ -2715,7 +2701,6 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
mode = GET_MODE_WIDER_MODE (mode))
{
enum insn_code code = direct_optab_handler (setmem_optab, mode);
- insn_operand_predicate_fn pred;
if (code != CODE_FOR_nothing
/* We don't need MODE to be narrower than
@@ -2725,46 +2710,25 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
&& ((CONST_INT_P (size)
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
<= (GET_MODE_MASK (mode) >> 1)))
- || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
- && ((pred = insn_data[(int) code].operand[0].predicate) == 0
- || (*pred) (object, BLKmode))
- && ((pred = insn_data[(int) code].operand[3].predicate) == 0
- || (*pred) (opalign, VOIDmode)))
- {
- rtx opsize, opchar;
- enum machine_mode char_mode;
- rtx last = get_last_insn ();
- rtx pat;
-
- opsize = convert_to_mode (mode, size, 1);
- pred = insn_data[(int) code].operand[1].predicate;
- if (pred != 0 && ! (*pred) (opsize, mode))
- opsize = copy_to_mode_reg (mode, opsize);
-
- opchar = val;
- char_mode = insn_data[(int) code].operand[2].mode;
- if (char_mode != VOIDmode)
+ || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
+ {
+ struct expand_operand ops[6];
+ unsigned int nops;
+
+ nops = insn_data[(int) code].n_operands;
+ create_fixed_operand (&ops[0], object);
+ /* The check above guarantees that this size conversion is valid. */
+ create_convert_operand_to (&ops[1], size, mode, true);
+ create_convert_operand_from (&ops[2], val, byte_mode, true);
+ create_integer_operand (&ops[3], align / BITS_PER_UNIT);
+ if (nops != 4)
{
- opchar = convert_to_mode (char_mode, opchar, 1);
- pred = insn_data[(int) code].operand[2].predicate;
- if (pred != 0 && ! (*pred) (opchar, char_mode))
- opchar = copy_to_mode_reg (char_mode, opchar);
+ create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
+ create_integer_operand (&ops[5], expected_size);
+ nops = 6;
}
-
- if (insn_data[(int) code].n_operands == 4)
- pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
- else
- pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
- GEN_INT (expected_align
- / BITS_PER_UNIT),
- GEN_INT (expected_size));
- if (pat)
- {
- emit_insn (pat);
- return true;
- }
- else
- delete_insns_since (last);
+ if (maybe_expand_insn (code, nops, ops))
+ return true;
}
}
@@ -3547,7 +3511,6 @@ emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
rtx dest;
enum insn_code icode;
- insn_operand_predicate_fn pred;
stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
/* If there is push pattern, use it. Otherwise try old way of throwing
@@ -3555,11 +3518,11 @@ emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
icode = optab_handler (push_optab, mode);
if (icode != CODE_FOR_nothing)
{
- if (((pred = insn_data[(int) icode].operand[0].predicate)
- && !((*pred) (x, mode))))
- x = force_reg (mode, x);
- emit_insn (GEN_FCN (icode) (x));
- return;
+ struct expand_operand ops[1];
+
+ create_input_operand (&ops[0], x, mode);
+ if (maybe_expand_insn (icode, 1, ops))
+ return;
}
if (GET_MODE_SIZE (mode) == rounded_size)
dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
@@ -4147,7 +4110,8 @@ expand_assignment (tree to, tree from, bool nontemporal)
rtx to_rtx = 0;
rtx result;
enum machine_mode mode;
- int align, icode;
+ int align;
+ enum insn_code icode;
/* Don't crash if the lhs of the assignment was erroneous. */
if (TREE_CODE (to) == ERROR_MARK)
@@ -4170,8 +4134,9 @@ expand_assignment (tree to, tree from, bool nontemporal)
&& ((icode = optab_handler (movmisalign_optab, mode))
!= CODE_FOR_nothing))
{
- enum machine_mode address_mode, op_mode1;
- rtx insn, reg, op0, mem;
+ struct expand_operand ops[2];
+ enum machine_mode address_mode;
+ rtx reg, op0, mem;
reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
reg = force_not_mem (reg);
@@ -4212,16 +4177,11 @@ expand_assignment (tree to, tree from, bool nontemporal)
if (TREE_THIS_VOLATILE (to))
MEM_VOLATILE_P (mem) = 1;
- op_mode1 = insn_data[icode].operand[1].mode;
- if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
- && op_mode1 != VOIDmode)
- reg = copy_to_mode_reg (op_mode1, reg);
-
- insn = GEN_FCN (icode) (mem, reg);
+ create_fixed_operand (&ops[0], mem);
+ create_input_operand (&ops[1], reg, mode);
/* The movmisalign<mode> pattern cannot fail, else the assignment would
silently be omitted. */
- gcc_assert (insn != NULL_RTX);
- emit_insn (insn);
+ expand_insn (icode, 2, ops);
return;
}
@@ -4483,31 +4443,16 @@ expand_assignment (tree to, tree from, bool nontemporal)
bool
emit_storent_insn (rtx to, rtx from)
{
- enum machine_mode mode = GET_MODE (to), imode;
+ struct expand_operand ops[2];
+ enum machine_mode mode = GET_MODE (to);
enum insn_code code = optab_handler (storent_optab, mode);
- rtx pattern;
if (code == CODE_FOR_nothing)
return false;
- imode = insn_data[code].operand[0].mode;
- if (!insn_data[code].operand[0].predicate (to, imode))
- return false;
-
- imode = insn_data[code].operand[1].mode;
- if (!insn_data[code].operand[1].predicate (from, imode))
- {
- from = copy_to_mode_reg (imode, from);
- if (!insn_data[code].operand[1].predicate (from, imode))
- return false;
- }
-
- pattern = GEN_FCN (code) (to, from);
- if (pattern == NULL_RTX)
- return false;
-
- emit_insn (pattern);
- return true;
+ create_fixed_operand (&ops[0], to);
+ create_input_operand (&ops[1], from, mode);
+ return maybe_expand_insn (code, 2, ops);
}
/* Generate code for computing expression EXP,
@@ -10120,10 +10065,10 @@ try_casesi (tree index_type, tree index_expr, tree minval, tree range,
rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
rtx fallback_label ATTRIBUTE_UNUSED)
{
+ struct expand_operand ops[5];
enum machine_mode index_mode = SImode;
int index_bits = GET_MODE_BITSIZE (index_mode);
rtx op1, op2, index;
- enum machine_mode op_mode;
if (! HAVE_casesi)
return 0;
@@ -10158,32 +10103,17 @@ try_casesi (tree index_type, tree index_expr, tree minval, tree range,
do_pending_stack_adjust ();
- op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
- if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
- (index, op_mode))
- index = copy_to_mode_reg (op_mode, index);
-
op1 = expand_normal (minval);
-
- op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
- op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
- op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
- if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
- (op1, op_mode))
- op1 = copy_to_mode_reg (op_mode, op1);
-
op2 = expand_normal (range);
- op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
- op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
- op2, TYPE_UNSIGNED (TREE_TYPE (range)));
- if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
- (op2, op_mode))
- op2 = copy_to_mode_reg (op_mode, op2);
-
- emit_jump_insn (gen_casesi (index, op1, op2,
- table_label, !default_label
- ? fallback_label : default_label));
+ create_input_operand (&ops[0], index, index_mode);
+ create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
+ create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
+ create_fixed_operand (&ops[3], table_label);
+ create_fixed_operand (&ops[4], (default_label
+ ? default_label
+ : fallback_label));
+ expand_jump_insn (CODE_FOR_casesi, 5, ops);
return 1;
}