aboutsummaryrefslogtreecommitdiff
path: root/gcc/expr.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/expr.c')
-rw-r--r--gcc/expr.c402
1 files changed, 312 insertions, 90 deletions
diff --git a/gcc/expr.c b/gcc/expr.c
index dfbeae7..dd2200d 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -696,12 +696,12 @@ convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
return x;
if (CONST_SCALAR_INT_P (x)
- && is_int_mode (mode, &int_mode))
+ && is_a <scalar_int_mode> (mode, &int_mode))
{
/* If the caller did not tell us the old mode, then there is not
much to do with respect to canonicalization. We have to
assume that all the bits are significant. */
- if (GET_MODE_CLASS (oldmode) != MODE_INT)
+ if (!is_a <scalar_int_mode> (oldmode))
oldmode = MAX_MODE_INT;
wide_int w = wide_int::from (rtx_mode_t (x, oldmode),
GET_MODE_PRECISION (int_mode),
@@ -1637,6 +1637,12 @@ emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
x = adjust_address (x, BLKmode, 0);
y = adjust_address (y, BLKmode, 0);
+ /* If source and destination are the same, no need to copy anything. */
+ if (rtx_equal_p (x, y)
+ && !MEM_VOLATILE_P (x)
+ && !MEM_VOLATILE_P (y))
+ return 0;
+
/* Set MEM_SIZE as appropriate for this block copy. The main place this
can be incorrect is coming from __builtin_memcpy. */
poly_int64 const_size;
@@ -3814,6 +3820,80 @@ emit_move_insn (rtx x, rtx y)
gcc_assert (mode != BLKmode
&& (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
+ /* If we have a copy that looks like one of the following patterns:
+ (set (subreg:M1 (reg:M2 ...)) (subreg:M1 (reg:M2 ...)))
+ (set (subreg:M1 (reg:M2 ...)) (mem:M1 ADDR))
+ (set (mem:M1 ADDR) (subreg:M1 (reg:M2 ...)))
+ (set (subreg:M1 (reg:M2 ...)) (constant C))
+ where mode M1 is equal in size to M2, try to detect whether the
+ mode change involves an implicit round trip through memory.
+ If so, see if we can avoid that by removing the subregs and
+ doing the move in mode M2 instead. */
+
+ rtx x_inner = NULL_RTX;
+ rtx y_inner = NULL_RTX;
+
+ auto candidate_subreg_p = [&](rtx subreg) {
+ return (REG_P (SUBREG_REG (subreg))
+ && known_eq (GET_MODE_SIZE (GET_MODE (SUBREG_REG (subreg))),
+ GET_MODE_SIZE (GET_MODE (subreg)))
+ && optab_handler (mov_optab, GET_MODE (SUBREG_REG (subreg)))
+ != CODE_FOR_nothing);
+ };
+
+ auto candidate_mem_p = [&](machine_mode innermode, rtx mem) {
+ return (!targetm.can_change_mode_class (innermode, GET_MODE (mem), ALL_REGS)
+ && !push_operand (mem, GET_MODE (mem))
+ /* Not a candiate if innermode requires too much alignment. */
+ && (MEM_ALIGN (mem) >= GET_MODE_ALIGNMENT (innermode)
+ || targetm.slow_unaligned_access (GET_MODE (mem),
+ MEM_ALIGN (mem))
+ || !targetm.slow_unaligned_access (innermode,
+ MEM_ALIGN (mem))));
+ };
+
+ if (SUBREG_P (x) && candidate_subreg_p (x))
+ x_inner = SUBREG_REG (x);
+
+ if (SUBREG_P (y) && candidate_subreg_p (y))
+ y_inner = SUBREG_REG (y);
+
+ if (x_inner != NULL_RTX
+ && y_inner != NULL_RTX
+ && GET_MODE (x_inner) == GET_MODE (y_inner)
+ && !targetm.can_change_mode_class (GET_MODE (x_inner), mode, ALL_REGS))
+ {
+ x = x_inner;
+ y = y_inner;
+ mode = GET_MODE (x_inner);
+ }
+ else if (x_inner != NULL_RTX
+ && MEM_P (y)
+ && candidate_mem_p (GET_MODE (x_inner), y))
+ {
+ x = x_inner;
+ y = adjust_address (y, GET_MODE (x_inner), 0);
+ mode = GET_MODE (x_inner);
+ }
+ else if (y_inner != NULL_RTX
+ && MEM_P (x)
+ && candidate_mem_p (GET_MODE (y_inner), x))
+ {
+ x = adjust_address (x, GET_MODE (y_inner), 0);
+ y = y_inner;
+ mode = GET_MODE (y_inner);
+ }
+ else if (x_inner != NULL_RTX
+ && CONSTANT_P (y)
+ && !targetm.can_change_mode_class (GET_MODE (x_inner),
+ mode, ALL_REGS)
+ && (y_inner = simplify_subreg (GET_MODE (x_inner), y, mode, 0)))
+ {
+ x = x_inner;
+ y = y_inner;
+ mode = GET_MODE (x_inner);
+ }
+
if (CONSTANT_P (y))
{
if (optimize
@@ -5583,6 +5663,7 @@ store_expr (tree exp, rtx target, int call_param_p,
rtx temp;
rtx alt_rtl = NULL_RTX;
location_t loc = curr_insn_location ();
+ bool shortened_string_cst = false;
if (VOID_TYPE_P (TREE_TYPE (exp)))
{
@@ -5749,10 +5830,40 @@ store_expr (tree exp, rtx target, int call_param_p,
/* If we want to use a nontemporal or a reverse order store, force the
value into a register first. */
tmp_target = nontemporal || reverse ? NULL_RTX : target;
- temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
+ tree rexp = exp;
+ if (TREE_CODE (exp) == STRING_CST
+ && tmp_target == target
+ && GET_MODE (target) == BLKmode
+ && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
+ {
+ rtx size = expr_size (exp);
+ if (CONST_INT_P (size)
+ && size != const0_rtx
+ && (UINTVAL (size)
+ > ((unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (exp) + 32)))
+ {
+ /* If the STRING_CST has much larger array type than
+ TREE_STRING_LENGTH, only emit the TREE_STRING_LENGTH part of
+ it into the rodata section as the code later on will use
+ memset zero for the remainder anyway. See PR95052. */
+ tmp_target = NULL_RTX;
+ rexp = copy_node (exp);
+ tree index
+ = build_index_type (size_int (TREE_STRING_LENGTH (exp) - 1));
+ TREE_TYPE (rexp) = build_array_type (TREE_TYPE (TREE_TYPE (exp)),
+ index);
+ shortened_string_cst = true;
+ }
+ }
+ temp = expand_expr_real (rexp, tmp_target, GET_MODE (target),
(call_param_p
? EXPAND_STACK_PARM : EXPAND_NORMAL),
&alt_rtl, false);
+ if (shortened_string_cst)
+ {
+ gcc_assert (MEM_P (temp));
+ temp = change_address (temp, BLKmode, NULL_RTX);
+ }
}
/* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
@@ -5763,6 +5874,7 @@ store_expr (tree exp, rtx target, int call_param_p,
&& TREE_CODE (exp) != ERROR_MARK
&& GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
{
+ gcc_assert (!shortened_string_cst);
if (GET_MODE_CLASS (GET_MODE (target))
!= GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp)))
&& known_eq (GET_MODE_BITSIZE (GET_MODE (target)),
@@ -5815,6 +5927,7 @@ store_expr (tree exp, rtx target, int call_param_p,
{
if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
{
+ gcc_assert (!shortened_string_cst);
if (GET_MODE (target) == BLKmode)
{
/* Handle calls that return BLKmode values in registers. */
@@ -5900,6 +6013,8 @@ store_expr (tree exp, rtx target, int call_param_p,
emit_label (label);
}
}
+ else if (shortened_string_cst)
+ gcc_unreachable ();
/* Handle calls that return values in multiple non-contiguous locations.
The Irix 6 ABI has examples of this. */
else if (GET_CODE (target) == PARALLEL)
@@ -5929,6 +6044,8 @@ store_expr (tree exp, rtx target, int call_param_p,
emit_move_insn (target, temp);
}
}
+ else
+ gcc_assert (!shortened_string_cst);
return NULL_RTX;
}
@@ -8268,7 +8385,12 @@ expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
/* Handle calls that pass values in multiple non-contiguous
locations. The Irix 6 ABI has examples of this. */
if (target == 0 || ! safe_from_p (target, exp, 1)
- || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
+ || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM
+ /* Also make a temporary if the store is to volatile memory, to
+ avoid individual accesses to aggregate members. */
+ || (GET_CODE (target) == MEM
+ && MEM_VOLATILE_P (target)
+ && !TREE_ADDRESSABLE (TREE_TYPE (exp))))
{
if (avoid_temp_mem)
return NULL_RTX;
@@ -8553,7 +8675,9 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
reduce_bit_field = (INTEGRAL_TYPE_P (type)
&& !type_has_mode_precision_p (type));
- if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
+ if (reduce_bit_field
+ && (modifier == EXPAND_STACK_PARM
+ || (target && GET_MODE (target) != mode)))
target = 0;
/* Use subtarget as the target for operand 0 of a binary operation. */
@@ -9205,17 +9329,8 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
if (temp != 0)
return temp;
- /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
- and similarly for MAX <x, y>. */
if (VECTOR_TYPE_P (type))
- {
- tree t0 = make_tree (type, op0);
- tree t1 = make_tree (type, op1);
- tree comparison = build2 (code == MIN_EXPR ? LE_EXPR : GE_EXPR,
- type, t0, t1);
- return expand_vec_cond_expr (type, comparison, t0, t1,
- original_target);
- }
+ gcc_unreachable ();
/* At this point, a MEM target is no longer useful; we will get better
code without it. */
@@ -9804,10 +9919,6 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
return temp;
}
- case VEC_COND_EXPR:
- target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
- return target;
-
case VEC_DUPLICATE_EXPR:
op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
target = expand_vector_broadcast (mode, op0);
@@ -11427,26 +11538,26 @@ expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
static rtx
reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
{
+ scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
HOST_WIDE_INT prec = TYPE_PRECISION (type);
- if (target && GET_MODE (target) != GET_MODE (exp))
- target = 0;
- /* For constant values, reduce using build_int_cst_type. */
- poly_int64 const_exp;
- if (poly_int_rtx_p (exp, &const_exp))
+ gcc_assert ((GET_MODE (exp) == VOIDmode || GET_MODE (exp) == mode)
+ && (!target || GET_MODE (target) == mode));
+
+ /* For constant values, reduce using wide_int_to_tree. */
+ if (poly_int_rtx_p (exp))
{
- tree t = build_int_cst_type (type, const_exp);
+ auto value = wi::to_poly_wide (exp, mode);
+ tree t = wide_int_to_tree (type, value);
return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
}
else if (TYPE_UNSIGNED (type))
{
- scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (exp));
rtx mask = immed_wide_int_const
(wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
return expand_and (mode, exp, mask, target);
}
else
{
- scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (exp));
int count = GET_MODE_PRECISION (mode) - prec;
exp = expand_shift (LSHIFT_EXPR, mode, exp, count, target, 0);
return expand_shift (RSHIFT_EXPR, mode, exp, count, target, 0);
@@ -11489,15 +11600,112 @@ is_aligning_offset (const_tree offset, const_tree exp)
/* This must now be the address of EXP. */
return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
}
-
-/* Return the tree node if an ARG corresponds to a string constant or zero
- if it doesn't. If we return nonzero, set *PTR_OFFSET to the (possibly
- non-constant) offset in bytes within the string that ARG is accessing.
- If MEM_SIZE is non-zero the storage size of the memory is returned.
- If DECL is non-zero the constant declaration is returned if available. */
-tree
-string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
+/* If EXPR is a constant initializer (either an expression or CONSTRUCTOR),
+ attempt to obtain its native representation as an array of nonzero BYTES.
+ Return true on success and false on failure (the latter without modifying
+ BYTES). */
+
+static bool
+convert_to_bytes (tree type, tree expr, vec<unsigned char> *bytes)
+{
+ if (TREE_CODE (expr) == CONSTRUCTOR)
+ {
+ /* Set to the size of the CONSTRUCTOR elements. */
+ unsigned HOST_WIDE_INT ctor_size = bytes->length ();
+
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ {
+ tree val, idx;
+ tree eltype = TREE_TYPE (type);
+ unsigned HOST_WIDE_INT elsize =
+ tree_to_uhwi (TYPE_SIZE_UNIT (eltype));
+
+ /* Jump through hoops to determine the lower bound for languages
+ like Ada that can set it to an (almost) arbitrary value. */
+ tree dom = TYPE_DOMAIN (type);
+ if (!dom)
+ return false;
+ tree min = TYPE_MIN_VALUE (dom);
+ if (!min || !tree_fits_uhwi_p (min))
+ return false;
+ unsigned HOST_WIDE_INT i, last_idx = tree_to_uhwi (min) - 1;
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, idx, val)
+ {
+ /* Append zeros for elements with no initializers. */
+ if (!tree_fits_uhwi_p (idx))
+ return false;
+ unsigned HOST_WIDE_INT cur_idx = tree_to_uhwi (idx);
+ if (unsigned HOST_WIDE_INT size = cur_idx - (last_idx + 1))
+ {
+ size = size * elsize + bytes->length ();
+ bytes->safe_grow_cleared (size);
+ }
+
+ if (!convert_to_bytes (eltype, val, bytes))
+ return false;
+
+ last_idx = cur_idx;
+ }
+ }
+ else if (TREE_CODE (type) == RECORD_TYPE)
+ {
+ tree val, fld;
+ unsigned HOST_WIDE_INT i;
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, fld, val)
+ {
+ /* Append zeros for members with no initializers and
+ any padding. */
+ unsigned HOST_WIDE_INT cur_off = int_byte_position (fld);
+ if (bytes->length () < cur_off)
+ bytes->safe_grow_cleared (cur_off);
+
+ if (!convert_to_bytes (TREE_TYPE (val), val, bytes))
+ return false;
+ }
+ }
+ else
+ return false;
+
+ /* Compute the size of the COSNTRUCTOR elements. */
+ ctor_size = bytes->length () - ctor_size;
+
+ /* Append zeros to the byte vector to the full size of the type.
+ The type size can be less than the size of the CONSTRUCTOR
+ if the latter contains initializers for a flexible array
+ member. */
+ tree size = TYPE_SIZE_UNIT (type);
+ unsigned HOST_WIDE_INT type_size = tree_to_uhwi (size);
+ if (ctor_size < type_size)
+ if (unsigned HOST_WIDE_INT size_grow = type_size - ctor_size)
+ bytes->safe_grow_cleared (bytes->length () + size_grow);
+
+ return true;
+ }
+
+ unsigned char charbuf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
+ int len = native_encode_expr (expr, charbuf, sizeof charbuf, 0);
+ if (len <= 0)
+ return false;
+
+ unsigned n = bytes->length ();
+ bytes->safe_grow (n + len);
+ unsigned char *p = bytes->address ();
+ memcpy (p + n, charbuf, len);
+ return true;
+}
+
+/* Return a STRING_CST corresponding to ARG's constant initializer either
+ if it's a string constant, or, when VALREP is set, any other constant,
+ or null otherwise.
+ On success, set *PTR_OFFSET to the (possibly non-constant) byte offset
+ within the byte string that ARG is references. If nonnull set *MEM_SIZE
+ to the size of the byte string. If nonnull, set *DECL to the constant
+ declaration ARG refers to. */
+
+static tree
+constant_byte_string (tree arg, tree *ptr_offset, tree *mem_size, tree *decl,
+ bool valrep = false)
{
tree dummy = NULL_TREE;;
if (!mem_size)
@@ -11644,18 +11852,43 @@ string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
return array;
}
- if (!VAR_P (array) && TREE_CODE (array) != CONST_DECL)
- return NULL_TREE;
-
tree init = ctor_for_folding (array);
-
- /* Handle variables initialized with string literals. */
if (!init || init == error_mark_node)
return NULL_TREE;
+
+ if (valrep)
+ {
+ HOST_WIDE_INT cstoff;
+ if (!base_off.is_constant (&cstoff))
+ return NULL_TREE;
+
+ /* If value representation was requested convert the initializer
+ for the whole array or object into a string of bytes forming
+ its value representation and return it. */
+ auto_vec<unsigned char> bytes;
+ if (!convert_to_bytes (TREE_TYPE (init), init, &bytes))
+ return NULL_TREE;
+
+ unsigned n = bytes.length ();
+ const char *p = reinterpret_cast<const char *>(bytes.address ());
+ init = build_string_literal (n, p, char_type_node);
+ init = TREE_OPERAND (init, 0);
+ init = TREE_OPERAND (init, 0);
+
+ *mem_size = size_int (TREE_STRING_LENGTH (init));
+ *ptr_offset = wide_int_to_tree (ssizetype, base_off);
+
+ if (decl)
+ *decl = array;
+
+ return init;
+ }
+
if (TREE_CODE (init) == CONSTRUCTOR)
{
/* Convert the 64-bit constant offset to a wider type to avoid
- overflow. */
+ overflow and use it to obtain the initializer for the subobject
+ it points into. */
offset_int wioff;
if (!base_off.is_constant (&wioff))
return NULL_TREE;
@@ -11668,6 +11901,9 @@ string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
unsigned HOST_WIDE_INT fieldoff = 0;
init = fold_ctor_reference (TREE_TYPE (arg), init, base_off, 0, array,
&fieldoff);
+ if (!init || init == error_mark_node)
+ return NULL_TREE;
+
HOST_WIDE_INT cstoff;
if (!base_off.is_constant (&cstoff))
return NULL_TREE;
@@ -11680,9 +11916,6 @@ string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
offset = off;
}
- if (!init)
- return NULL_TREE;
-
*ptr_offset = offset;
tree inittype = TREE_TYPE (init);
@@ -11723,17 +11956,22 @@ string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
chartype = TREE_TYPE (chartype);
while (TREE_CODE (chartype) == ARRAY_TYPE)
chartype = TREE_TYPE (chartype);
- /* Convert a char array to an empty STRING_CST having an array
- of the expected type. */
- if (!initsize)
- initsize = integer_zero_node;
- unsigned HOST_WIDE_INT size = tree_to_uhwi (initsize);
- init = build_string_literal (size ? 1 : 0, "", chartype, size);
- init = TREE_OPERAND (init, 0);
- init = TREE_OPERAND (init, 0);
+ if (INTEGRAL_TYPE_P (chartype)
+ && TYPE_PRECISION (chartype) == TYPE_PRECISION (char_type_node))
+ {
+ /* Convert a char array to an empty STRING_CST having an array
+ of the expected type and size. */
+ if (!initsize)
+ initsize = integer_zero_node;
- *ptr_offset = integer_zero_node;
+ unsigned HOST_WIDE_INT size = tree_to_uhwi (initsize);
+ init = build_string_literal (size, NULL, chartype, size);
+ init = TREE_OPERAND (init, 0);
+ init = TREE_OPERAND (init, 0);
+
+ *ptr_offset = integer_zero_node;
+ }
}
if (decl)
@@ -11748,37 +11986,27 @@ string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
return init;
}
-
-/* Compute the modular multiplicative inverse of A modulo M
- using extended Euclid's algorithm. Assumes A and M are coprime. */
-static wide_int
-mod_inv (const wide_int &a, const wide_int &b)
-{
- /* Verify the assumption. */
- gcc_checking_assert (wi::eq_p (wi::gcd (a, b), 1));
- unsigned int p = a.get_precision () + 1;
- gcc_checking_assert (b.get_precision () + 1 == p);
- wide_int c = wide_int::from (a, p, UNSIGNED);
- wide_int d = wide_int::from (b, p, UNSIGNED);
- wide_int x0 = wide_int::from (0, p, UNSIGNED);
- wide_int x1 = wide_int::from (1, p, UNSIGNED);
+/* Return STRING_CST if an ARG corresponds to a string constant or zero
+ if it doesn't. If we return nonzero, set *PTR_OFFSET to the (possibly
+ non-constant) offset in bytes within the string that ARG is accessing.
+ If MEM_SIZE is non-zero the storage size of the memory is returned.
+ If DECL is non-zero the constant declaration is returned if available. */
- if (wi::eq_p (b, 1))
- return wide_int::from (1, p, UNSIGNED);
+tree
+string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
+{
+ return constant_byte_string (arg, ptr_offset, mem_size, decl, false);
+}
- while (wi::gt_p (c, 1, UNSIGNED))
- {
- wide_int t = d;
- wide_int q = wi::divmod_trunc (c, d, UNSIGNED, &d);
- c = t;
- wide_int s = x0;
- x0 = wi::sub (x1, wi::mul (q, x0));
- x1 = s;
- }
- if (wi::lt_p (x1, 0, SIGNED))
- x1 += d;
- return x1;
+/* Similar to string_constant, return a STRING_CST corresponding
+ to the value representation of the first argument if it's
+ a constant. */
+
+tree
+byte_representation (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
+{
+ return constant_byte_string (arg, ptr_offset, mem_size, decl, true);
}
/* Optimize x % C1 == C2 for signed modulo if C1 is a power of two and C2
@@ -11991,7 +12219,7 @@ maybe_optimize_mod_cmp (enum tree_code code, tree *arg0, tree *arg1)
w = wi::lrshift (w, shift);
wide_int a = wide_int::from (w, prec + 1, UNSIGNED);
wide_int b = wi::shifted_mask (prec, 1, false, prec + 1);
- wide_int m = wide_int::from (mod_inv (a, b), prec, UNSIGNED);
+ wide_int m = wide_int::from (wi::mod_inv (a, b), prec, UNSIGNED);
tree c3 = wide_int_to_tree (type, m);
tree c5 = NULL_TREE;
wide_int d, e;
@@ -12138,8 +12366,7 @@ do_store_flag (sepops ops, rtx target, machine_mode mode)
STRIP_NOPS (arg1);
/* For vector typed comparisons emit code to generate the desired
- all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
- expander for this. */
+ all-ones or all-zeros mask. */
if (TREE_CODE (ops->type) == VECTOR_TYPE)
{
tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
@@ -12147,12 +12374,7 @@ do_store_flag (sepops ops, rtx target, machine_mode mode)
&& expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type, ops->code))
return expand_vec_cmp_expr (ops->type, ifexp, target);
else
- {
- tree if_true = constant_boolean_node (true, ops->type);
- tree if_false = constant_boolean_node (false, ops->type);
- return expand_vec_cond_expr (ops->type, ifexp, if_true,
- if_false, target);
- }
+ gcc_unreachable ();
}
/* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial