aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRoger Sayle <roger@eyesopen.com>2003-07-18 00:26:51 +0000
committerRoger Sayle <sayle@gcc.gnu.org>2003-07-18 00:26:51 +0000
commit3e4093b64f2e2623bba32e733e7e8f08efc700ac (patch)
tree3875a0446ee77a593d30fb10e6a14b62fcc0f4ec
parent35c77862c921a82a9d2bc4175dc041bd1e99e2bd (diff)
downloadgcc-3e4093b64f2e2623bba32e733e7e8f08efc700ac.zip
gcc-3e4093b64f2e2623bba32e733e7e8f08efc700ac.tar.gz
gcc-3e4093b64f2e2623bba32e733e7e8f08efc700ac.tar.bz2
fold-const.c (const_binop): Avoid performing the FP operation at compile-time...
* fold-const.c (const_binop): Avoid performing the FP operation at compile-time, if either operand is NaN and we honor signaling NaNs, or if we're dividing by zero and either flag_trapping_math is set or the desired mode doesn't support infinities. (fold_initializer): New function to fold an expression ignoring any potential run-time exceptions or traps. * tree.h (fold_initializer): Prototype here. * c-typeck.c (build_binary_op): Move to the end of the file so that intializer_stack is in scope. If constructing an initializer, i.e. when initializer_stack is not NULL, use fold_initializer to fold expressions. * simplify-rtx.c (simplify_binary_operation): Likewise, avoid performing FP operations at compile-time, if they would raise an exception at run-time. From-SVN: r69533
-rw-r--r--gcc/ChangeLog17
-rw-r--r--gcc/c-typeck.c1463
-rw-r--r--gcc/fold-const.c48
-rw-r--r--gcc/simplify-rtx.c8
-rw-r--r--gcc/tree.h1
5 files changed, 801 insertions, 736 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index fea6e3b..dfff20c 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,20 @@
+2003-07-17 Roger Sayle <roger@eyesopen.com>
+
+ * fold-const.c (const_binop): Avoid performing the FP operation at
+ compile-time, if either operand is NaN and we honor signaling NaNs,
+ or if we're dividing by zero and either flag_trapping_math is set
+ or the desired mode doesn't support infinities.
+ (fold_initializer): New function to fold an expression ignoring any
+ potential run-time exceptions or traps.
+ * tree.h (fold_initializer): Prototype here.
+ * c-typeck.c (build_binary_op): Move to the end of the file so
+ that intializer_stack is in scope. If constructing an initializer,
+ i.e. when initializer_stack is not NULL, use fold_initializer to
+ fold expressions.
+ * simplify-rtx.c (simplify_binary_operation): Likewise, avoid
+ performing FP operations at compile-time, if they would raise an
+ exception at run-time.
+
2003-07-17 Geoffrey Keating <geoffk@apple.com>
PR 11498
diff --git a/gcc/c-typeck.c b/gcc/c-typeck.c
index 43b5797..c4154de 100644
--- a/gcc/c-typeck.c
+++ b/gcc/c-typeck.c
@@ -2008,736 +2008,6 @@ parser_build_binary_op (enum tree_code code, tree arg1, tree arg2)
return result;
}
-
-/* Build a binary-operation expression without default conversions.
- CODE is the kind of expression to build.
- This function differs from `build' in several ways:
- the data type of the result is computed and recorded in it,
- warnings are generated if arg data types are invalid,
- special handling for addition and subtraction of pointers is known,
- and some optimization is done (operations on narrow ints
- are done in the narrower type when that gives the same result).
- Constant folding is also done before the result is returned.
-
- Note that the operands will never have enumeral types, or function
- or array types, because either they will have the default conversions
- performed or they have both just been converted to some other type in which
- the arithmetic is to be done. */
-
-tree
-build_binary_op (enum tree_code code, tree orig_op0, tree orig_op1,
- int convert_p)
-{
- tree type0, type1;
- enum tree_code code0, code1;
- tree op0, op1;
-
- /* Expression code to give to the expression when it is built.
- Normally this is CODE, which is what the caller asked for,
- but in some special cases we change it. */
- enum tree_code resultcode = code;
-
- /* Data type in which the computation is to be performed.
- In the simplest cases this is the common type of the arguments. */
- tree result_type = NULL;
-
- /* Nonzero means operands have already been type-converted
- in whatever way is necessary.
- Zero means they need to be converted to RESULT_TYPE. */
- int converted = 0;
-
- /* Nonzero means create the expression with this type, rather than
- RESULT_TYPE. */
- tree build_type = 0;
-
- /* Nonzero means after finally constructing the expression
- convert it to this type. */
- tree final_type = 0;
-
- /* Nonzero if this is an operation like MIN or MAX which can
- safely be computed in short if both args are promoted shorts.
- Also implies COMMON.
- -1 indicates a bitwise operation; this makes a difference
- in the exact conditions for when it is safe to do the operation
- in a narrower mode. */
- int shorten = 0;
-
- /* Nonzero if this is a comparison operation;
- if both args are promoted shorts, compare the original shorts.
- Also implies COMMON. */
- int short_compare = 0;
-
- /* Nonzero if this is a right-shift operation, which can be computed on the
- original short and then promoted if the operand is a promoted short. */
- int short_shift = 0;
-
- /* Nonzero means set RESULT_TYPE to the common type of the args. */
- int common = 0;
-
- if (convert_p)
- {
- op0 = default_conversion (orig_op0);
- op1 = default_conversion (orig_op1);
- }
- else
- {
- op0 = orig_op0;
- op1 = orig_op1;
- }
-
- type0 = TREE_TYPE (op0);
- type1 = TREE_TYPE (op1);
-
- /* The expression codes of the data types of the arguments tell us
- whether the arguments are integers, floating, pointers, etc. */
- code0 = TREE_CODE (type0);
- code1 = TREE_CODE (type1);
-
- /* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */
- STRIP_TYPE_NOPS (op0);
- STRIP_TYPE_NOPS (op1);
-
- /* If an error was already reported for one of the arguments,
- avoid reporting another error. */
-
- if (code0 == ERROR_MARK || code1 == ERROR_MARK)
- return error_mark_node;
-
- switch (code)
- {
- case PLUS_EXPR:
- /* Handle the pointer + int case. */
- if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
- return pointer_int_sum (PLUS_EXPR, op0, op1);
- else if (code1 == POINTER_TYPE && code0 == INTEGER_TYPE)
- return pointer_int_sum (PLUS_EXPR, op1, op0);
- else
- common = 1;
- break;
-
- case MINUS_EXPR:
- /* Subtraction of two similar pointers.
- We must subtract them as integers, then divide by object size. */
- if (code0 == POINTER_TYPE && code1 == POINTER_TYPE
- && comp_target_types (type0, type1, 1))
- return pointer_diff (op0, op1);
- /* Handle pointer minus int. Just like pointer plus int. */
- else if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
- return pointer_int_sum (MINUS_EXPR, op0, op1);
- else
- common = 1;
- break;
-
- case MULT_EXPR:
- common = 1;
- break;
-
- case TRUNC_DIV_EXPR:
- case CEIL_DIV_EXPR:
- case FLOOR_DIV_EXPR:
- case ROUND_DIV_EXPR:
- case EXACT_DIV_EXPR:
- /* Floating point division by zero is a legitimate way to obtain
- infinities and NaNs. */
- if (warn_div_by_zero && skip_evaluation == 0 && integer_zerop (op1))
- warning ("division by zero");
-
- if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE
- || code0 == COMPLEX_TYPE || code0 == VECTOR_TYPE)
- && (code1 == INTEGER_TYPE || code1 == REAL_TYPE
- || code1 == COMPLEX_TYPE || code1 == VECTOR_TYPE))
- {
- if (!(code0 == INTEGER_TYPE && code1 == INTEGER_TYPE))
- resultcode = RDIV_EXPR;
- else
- /* Although it would be tempting to shorten always here, that
- loses on some targets, since the modulo instruction is
- undefined if the quotient can't be represented in the
- computation mode. We shorten only if unsigned or if
- dividing by something we know != -1. */
- shorten = (TREE_UNSIGNED (TREE_TYPE (orig_op0))
- || (TREE_CODE (op1) == INTEGER_CST
- && ! integer_all_onesp (op1)));
- common = 1;
- }
- break;
-
- case BIT_AND_EXPR:
- case BIT_ANDTC_EXPR:
- case BIT_IOR_EXPR:
- case BIT_XOR_EXPR:
- if (code0 == INTEGER_TYPE && code1 == INTEGER_TYPE)
- shorten = -1;
- else if (code0 == VECTOR_TYPE && code1 == VECTOR_TYPE)
- common = 1;
- break;
-
- case TRUNC_MOD_EXPR:
- case FLOOR_MOD_EXPR:
- if (warn_div_by_zero && skip_evaluation == 0 && integer_zerop (op1))
- warning ("division by zero");
-
- if (code0 == INTEGER_TYPE && code1 == INTEGER_TYPE)
- {
- /* Although it would be tempting to shorten always here, that loses
- on some targets, since the modulo instruction is undefined if the
- quotient can't be represented in the computation mode. We shorten
- only if unsigned or if dividing by something we know != -1. */
- shorten = (TREE_UNSIGNED (TREE_TYPE (orig_op0))
- || (TREE_CODE (op1) == INTEGER_CST
- && ! integer_all_onesp (op1)));
- common = 1;
- }
- break;
-
- case TRUTH_ANDIF_EXPR:
- case TRUTH_ORIF_EXPR:
- case TRUTH_AND_EXPR:
- case TRUTH_OR_EXPR:
- case TRUTH_XOR_EXPR:
- if ((code0 == INTEGER_TYPE || code0 == POINTER_TYPE
- || code0 == REAL_TYPE || code0 == COMPLEX_TYPE)
- && (code1 == INTEGER_TYPE || code1 == POINTER_TYPE
- || code1 == REAL_TYPE || code1 == COMPLEX_TYPE))
- {
- /* Result of these operations is always an int,
- but that does not mean the operands should be
- converted to ints! */
- result_type = integer_type_node;
- op0 = c_common_truthvalue_conversion (op0);
- op1 = c_common_truthvalue_conversion (op1);
- converted = 1;
- }
- break;
-
- /* Shift operations: result has same type as first operand;
- always convert second operand to int.
- Also set SHORT_SHIFT if shifting rightward. */
-
- case RSHIFT_EXPR:
- if (code0 == INTEGER_TYPE && code1 == INTEGER_TYPE)
- {
- if (TREE_CODE (op1) == INTEGER_CST && skip_evaluation == 0)
- {
- if (tree_int_cst_sgn (op1) < 0)
- warning ("right shift count is negative");
- else
- {
- if (! integer_zerop (op1))
- short_shift = 1;
-
- if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0)
- warning ("right shift count >= width of type");
- }
- }
-
- /* Use the type of the value to be shifted. */
- result_type = type0;
- /* Convert the shift-count to an integer, regardless of size
- of value being shifted. */
- if (TYPE_MAIN_VARIANT (TREE_TYPE (op1)) != integer_type_node)
- op1 = convert (integer_type_node, op1);
- /* Avoid converting op1 to result_type later. */
- converted = 1;
- }
- break;
-
- case LSHIFT_EXPR:
- if (code0 == INTEGER_TYPE && code1 == INTEGER_TYPE)
- {
- if (TREE_CODE (op1) == INTEGER_CST && skip_evaluation == 0)
- {
- if (tree_int_cst_sgn (op1) < 0)
- warning ("left shift count is negative");
-
- else if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0)
- warning ("left shift count >= width of type");
- }
-
- /* Use the type of the value to be shifted. */
- result_type = type0;
- /* Convert the shift-count to an integer, regardless of size
- of value being shifted. */
- if (TYPE_MAIN_VARIANT (TREE_TYPE (op1)) != integer_type_node)
- op1 = convert (integer_type_node, op1);
- /* Avoid converting op1 to result_type later. */
- converted = 1;
- }
- break;
-
- case RROTATE_EXPR:
- case LROTATE_EXPR:
- if (code0 == INTEGER_TYPE && code1 == INTEGER_TYPE)
- {
- if (TREE_CODE (op1) == INTEGER_CST && skip_evaluation == 0)
- {
- if (tree_int_cst_sgn (op1) < 0)
- warning ("shift count is negative");
- else if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0)
- warning ("shift count >= width of type");
- }
-
- /* Use the type of the value to be shifted. */
- result_type = type0;
- /* Convert the shift-count to an integer, regardless of size
- of value being shifted. */
- if (TYPE_MAIN_VARIANT (TREE_TYPE (op1)) != integer_type_node)
- op1 = convert (integer_type_node, op1);
- /* Avoid converting op1 to result_type later. */
- converted = 1;
- }
- break;
-
- case EQ_EXPR:
- case NE_EXPR:
- if (warn_float_equal && (code0 == REAL_TYPE || code1 == REAL_TYPE))
- warning ("comparing floating point with == or != is unsafe");
- /* Result of comparison is always int,
- but don't convert the args to int! */
- build_type = integer_type_node;
- if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE
- || code0 == COMPLEX_TYPE
- || code0 == VECTOR_TYPE)
- && (code1 == INTEGER_TYPE || code1 == REAL_TYPE
- || code1 == COMPLEX_TYPE
- || code1 == VECTOR_TYPE))
- short_compare = 1;
- else if (code0 == POINTER_TYPE && code1 == POINTER_TYPE)
- {
- tree tt0 = TREE_TYPE (type0);
- tree tt1 = TREE_TYPE (type1);
- /* Anything compares with void *. void * compares with anything.
- Otherwise, the targets must be compatible
- and both must be object or both incomplete. */
- if (comp_target_types (type0, type1, 1))
- result_type = common_type (type0, type1);
- else if (VOID_TYPE_P (tt0))
- {
- /* op0 != orig_op0 detects the case of something
- whose value is 0 but which isn't a valid null ptr const. */
- if (pedantic && (!integer_zerop (op0) || op0 != orig_op0)
- && TREE_CODE (tt1) == FUNCTION_TYPE)
- pedwarn ("ISO C forbids comparison of `void *' with function pointer");
- }
- else if (VOID_TYPE_P (tt1))
- {
- if (pedantic && (!integer_zerop (op1) || op1 != orig_op1)
- && TREE_CODE (tt0) == FUNCTION_TYPE)
- pedwarn ("ISO C forbids comparison of `void *' with function pointer");
- }
- else
- pedwarn ("comparison of distinct pointer types lacks a cast");
-
- if (result_type == NULL_TREE)
- result_type = ptr_type_node;
- }
- else if (code0 == POINTER_TYPE && TREE_CODE (op1) == INTEGER_CST
- && integer_zerop (op1))
- result_type = type0;
- else if (code1 == POINTER_TYPE && TREE_CODE (op0) == INTEGER_CST
- && integer_zerop (op0))
- result_type = type1;
- else if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
- {
- result_type = type0;
- pedwarn ("comparison between pointer and integer");
- }
- else if (code0 == INTEGER_TYPE && code1 == POINTER_TYPE)
- {
- result_type = type1;
- pedwarn ("comparison between pointer and integer");
- }
- break;
-
- case MAX_EXPR:
- case MIN_EXPR:
- if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE)
- && (code1 == INTEGER_TYPE || code1 == REAL_TYPE))
- shorten = 1;
- else if (code0 == POINTER_TYPE && code1 == POINTER_TYPE)
- {
- if (comp_target_types (type0, type1, 1))
- {
- result_type = common_type (type0, type1);
- if (pedantic
- && TREE_CODE (TREE_TYPE (type0)) == FUNCTION_TYPE)
- pedwarn ("ISO C forbids ordered comparisons of pointers to functions");
- }
- else
- {
- result_type = ptr_type_node;
- pedwarn ("comparison of distinct pointer types lacks a cast");
- }
- }
- break;
-
- case LE_EXPR:
- case GE_EXPR:
- case LT_EXPR:
- case GT_EXPR:
- build_type = integer_type_node;
- if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE)
- && (code1 == INTEGER_TYPE || code1 == REAL_TYPE))
- short_compare = 1;
- else if (code0 == POINTER_TYPE && code1 == POINTER_TYPE)
- {
- if (comp_target_types (type0, type1, 1))
- {
- result_type = common_type (type0, type1);
- if (!COMPLETE_TYPE_P (TREE_TYPE (type0))
- != !COMPLETE_TYPE_P (TREE_TYPE (type1)))
- pedwarn ("comparison of complete and incomplete pointers");
- else if (pedantic
- && TREE_CODE (TREE_TYPE (type0)) == FUNCTION_TYPE)
- pedwarn ("ISO C forbids ordered comparisons of pointers to functions");
- }
- else
- {
- result_type = ptr_type_node;
- pedwarn ("comparison of distinct pointer types lacks a cast");
- }
- }
- else if (code0 == POINTER_TYPE && TREE_CODE (op1) == INTEGER_CST
- && integer_zerop (op1))
- {
- result_type = type0;
- if (pedantic || extra_warnings)
- pedwarn ("ordered comparison of pointer with integer zero");
- }
- else if (code1 == POINTER_TYPE && TREE_CODE (op0) == INTEGER_CST
- && integer_zerop (op0))
- {
- result_type = type1;
- if (pedantic)
- pedwarn ("ordered comparison of pointer with integer zero");
- }
- else if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
- {
- result_type = type0;
- pedwarn ("comparison between pointer and integer");
- }
- else if (code0 == INTEGER_TYPE && code1 == POINTER_TYPE)
- {
- result_type = type1;
- pedwarn ("comparison between pointer and integer");
- }
- break;
-
- case UNORDERED_EXPR:
- case ORDERED_EXPR:
- case UNLT_EXPR:
- case UNLE_EXPR:
- case UNGT_EXPR:
- case UNGE_EXPR:
- case UNEQ_EXPR:
- build_type = integer_type_node;
- if (code0 != REAL_TYPE || code1 != REAL_TYPE)
- {
- error ("unordered comparison on non-floating point argument");
- return error_mark_node;
- }
- common = 1;
- break;
-
- default:
- break;
- }
-
- if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE || code0 == COMPLEX_TYPE
- || code0 == VECTOR_TYPE)
- &&
- (code1 == INTEGER_TYPE || code1 == REAL_TYPE || code1 == COMPLEX_TYPE
- || code1 == VECTOR_TYPE))
- {
- int none_complex = (code0 != COMPLEX_TYPE && code1 != COMPLEX_TYPE);
-
- if (shorten || common || short_compare)
- result_type = common_type (type0, type1);
-
- /* For certain operations (which identify themselves by shorten != 0)
- if both args were extended from the same smaller type,
- do the arithmetic in that type and then extend.
-
- shorten !=0 and !=1 indicates a bitwise operation.
- For them, this optimization is safe only if
- both args are zero-extended or both are sign-extended.
- Otherwise, we might change the result.
- Eg, (short)-1 | (unsigned short)-1 is (int)-1
- but calculated in (unsigned short) it would be (unsigned short)-1. */
-
- if (shorten && none_complex)
- {
- int unsigned0, unsigned1;
- tree arg0 = get_narrower (op0, &unsigned0);
- tree arg1 = get_narrower (op1, &unsigned1);
- /* UNS is 1 if the operation to be done is an unsigned one. */
- int uns = TREE_UNSIGNED (result_type);
- tree type;
-
- final_type = result_type;
-
- /* Handle the case that OP0 (or OP1) does not *contain* a conversion
- but it *requires* conversion to FINAL_TYPE. */
-
- if ((TYPE_PRECISION (TREE_TYPE (op0))
- == TYPE_PRECISION (TREE_TYPE (arg0)))
- && TREE_TYPE (op0) != final_type)
- unsigned0 = TREE_UNSIGNED (TREE_TYPE (op0));
- if ((TYPE_PRECISION (TREE_TYPE (op1))
- == TYPE_PRECISION (TREE_TYPE (arg1)))
- && TREE_TYPE (op1) != final_type)
- unsigned1 = TREE_UNSIGNED (TREE_TYPE (op1));
-
- /* Now UNSIGNED0 is 1 if ARG0 zero-extends to FINAL_TYPE. */
-
- /* For bitwise operations, signedness of nominal type
- does not matter. Consider only how operands were extended. */
- if (shorten == -1)
- uns = unsigned0;
-
- /* Note that in all three cases below we refrain from optimizing
- an unsigned operation on sign-extended args.
- That would not be valid. */
-
- /* Both args variable: if both extended in same way
- from same width, do it in that width.
- Do it unsigned if args were zero-extended. */
- if ((TYPE_PRECISION (TREE_TYPE (arg0))
- < TYPE_PRECISION (result_type))
- && (TYPE_PRECISION (TREE_TYPE (arg1))
- == TYPE_PRECISION (TREE_TYPE (arg0)))
- && unsigned0 == unsigned1
- && (unsigned0 || !uns))
- result_type
- = c_common_signed_or_unsigned_type
- (unsigned0, common_type (TREE_TYPE (arg0), TREE_TYPE (arg1)));
- else if (TREE_CODE (arg0) == INTEGER_CST
- && (unsigned1 || !uns)
- && (TYPE_PRECISION (TREE_TYPE (arg1))
- < TYPE_PRECISION (result_type))
- && (type
- = c_common_signed_or_unsigned_type (unsigned1,
- TREE_TYPE (arg1)),
- int_fits_type_p (arg0, type)))
- result_type = type;
- else if (TREE_CODE (arg1) == INTEGER_CST
- && (unsigned0 || !uns)
- && (TYPE_PRECISION (TREE_TYPE (arg0))
- < TYPE_PRECISION (result_type))
- && (type
- = c_common_signed_or_unsigned_type (unsigned0,
- TREE_TYPE (arg0)),
- int_fits_type_p (arg1, type)))
- result_type = type;
- }
-
- /* Shifts can be shortened if shifting right. */
-
- if (short_shift)
- {
- int unsigned_arg;
- tree arg0 = get_narrower (op0, &unsigned_arg);
-
- final_type = result_type;
-
- if (arg0 == op0 && final_type == TREE_TYPE (op0))
- unsigned_arg = TREE_UNSIGNED (TREE_TYPE (op0));
-
- if (TYPE_PRECISION (TREE_TYPE (arg0)) < TYPE_PRECISION (result_type)
- /* We can shorten only if the shift count is less than the
- number of bits in the smaller type size. */
- && compare_tree_int (op1, TYPE_PRECISION (TREE_TYPE (arg0))) < 0
- /* We cannot drop an unsigned shift after sign-extension. */
- && (!TREE_UNSIGNED (final_type) || unsigned_arg))
- {
- /* Do an unsigned shift if the operand was zero-extended. */
- result_type
- = c_common_signed_or_unsigned_type (unsigned_arg,
- TREE_TYPE (arg0));
- /* Convert value-to-be-shifted to that type. */
- if (TREE_TYPE (op0) != result_type)
- op0 = convert (result_type, op0);
- converted = 1;
- }
- }
-
- /* Comparison operations are shortened too but differently.
- They identify themselves by setting short_compare = 1. */
-
- if (short_compare)
- {
- /* Don't write &op0, etc., because that would prevent op0
- from being kept in a register.
- Instead, make copies of the our local variables and
- pass the copies by reference, then copy them back afterward. */
- tree xop0 = op0, xop1 = op1, xresult_type = result_type;
- enum tree_code xresultcode = resultcode;
- tree val
- = shorten_compare (&xop0, &xop1, &xresult_type, &xresultcode);
-
- if (val != 0)
- return val;
-
- op0 = xop0, op1 = xop1;
- converted = 1;
- resultcode = xresultcode;
-
- if (warn_sign_compare && skip_evaluation == 0)
- {
- int op0_signed = ! TREE_UNSIGNED (TREE_TYPE (orig_op0));
- int op1_signed = ! TREE_UNSIGNED (TREE_TYPE (orig_op1));
- int unsignedp0, unsignedp1;
- tree primop0 = get_narrower (op0, &unsignedp0);
- tree primop1 = get_narrower (op1, &unsignedp1);
-
- xop0 = orig_op0;
- xop1 = orig_op1;
- STRIP_TYPE_NOPS (xop0);
- STRIP_TYPE_NOPS (xop1);
-
- /* Give warnings for comparisons between signed and unsigned
- quantities that may fail.
-
- Do the checking based on the original operand trees, so that
- casts will be considered, but default promotions won't be.
-
- Do not warn if the comparison is being done in a signed type,
- since the signed type will only be chosen if it can represent
- all the values of the unsigned type. */
- if (! TREE_UNSIGNED (result_type))
- /* OK */;
- /* Do not warn if both operands are the same signedness. */
- else if (op0_signed == op1_signed)
- /* OK */;
- else
- {
- tree sop, uop;
-
- if (op0_signed)
- sop = xop0, uop = xop1;
- else
- sop = xop1, uop = xop0;
-
- /* Do not warn if the signed quantity is an
- unsuffixed integer literal (or some static
- constant expression involving such literals or a
- conditional expression involving such literals)
- and it is non-negative. */
- if (c_tree_expr_nonnegative_p (sop))
- /* OK */;
- /* Do not warn if the comparison is an equality operation,
- the unsigned quantity is an integral constant, and it
- would fit in the result if the result were signed. */
- else if (TREE_CODE (uop) == INTEGER_CST
- && (resultcode == EQ_EXPR || resultcode == NE_EXPR)
- && int_fits_type_p
- (uop, c_common_signed_type (result_type)))
- /* OK */;
- /* Do not warn if the unsigned quantity is an enumeration
- constant and its maximum value would fit in the result
- if the result were signed. */
- else if (TREE_CODE (uop) == INTEGER_CST
- && TREE_CODE (TREE_TYPE (uop)) == ENUMERAL_TYPE
- && int_fits_type_p
- (TYPE_MAX_VALUE (TREE_TYPE(uop)),
- c_common_signed_type (result_type)))
- /* OK */;
- else
- warning ("comparison between signed and unsigned");
- }
-
- /* Warn if two unsigned values are being compared in a size
- larger than their original size, and one (and only one) is the
- result of a `~' operator. This comparison will always fail.
-
- Also warn if one operand is a constant, and the constant
- does not have all bits set that are set in the ~ operand
- when it is extended. */
-
- if ((TREE_CODE (primop0) == BIT_NOT_EXPR)
- != (TREE_CODE (primop1) == BIT_NOT_EXPR))
- {
- if (TREE_CODE (primop0) == BIT_NOT_EXPR)
- primop0 = get_narrower (TREE_OPERAND (primop0, 0),
- &unsignedp0);
- else
- primop1 = get_narrower (TREE_OPERAND (primop1, 0),
- &unsignedp1);
-
- if (host_integerp (primop0, 0) || host_integerp (primop1, 0))
- {
- tree primop;
- HOST_WIDE_INT constant, mask;
- int unsignedp, bits;
-
- if (host_integerp (primop0, 0))
- {
- primop = primop1;
- unsignedp = unsignedp1;
- constant = tree_low_cst (primop0, 0);
- }
- else
- {
- primop = primop0;
- unsignedp = unsignedp0;
- constant = tree_low_cst (primop1, 0);
- }
-
- bits = TYPE_PRECISION (TREE_TYPE (primop));
- if (bits < TYPE_PRECISION (result_type)
- && bits < HOST_BITS_PER_WIDE_INT && unsignedp)
- {
- mask = (~ (HOST_WIDE_INT) 0) << bits;
- if ((mask & constant) != mask)
- warning ("comparison of promoted ~unsigned with constant");
- }
- }
- else if (unsignedp0 && unsignedp1
- && (TYPE_PRECISION (TREE_TYPE (primop0))
- < TYPE_PRECISION (result_type))
- && (TYPE_PRECISION (TREE_TYPE (primop1))
- < TYPE_PRECISION (result_type)))
- warning ("comparison of promoted ~unsigned with unsigned");
- }
- }
- }
- }
-
- /* At this point, RESULT_TYPE must be nonzero to avoid an error message.
- If CONVERTED is zero, both args will be converted to type RESULT_TYPE.
- Then the expression will be built.
- It will be given type FINAL_TYPE if that is nonzero;
- otherwise, it will be given type RESULT_TYPE. */
-
- if (!result_type)
- {
- binary_op_error (code);
- return error_mark_node;
- }
-
- if (! converted)
- {
- if (TREE_TYPE (op0) != result_type)
- op0 = convert (result_type, op0);
- if (TREE_TYPE (op1) != result_type)
- op1 = convert (result_type, op1);
- }
-
- if (build_type == NULL_TREE)
- build_type = result_type;
-
- {
- tree result = build (resultcode, build_type, op0, op1);
- tree folded;
-
- folded = fold (result);
- if (folded == result)
- TREE_CONSTANT (folded) = TREE_CONSTANT (op0) & TREE_CONSTANT (op1);
- if (final_type != 0)
- return convert (final_type, folded);
- return folded;
- }
-}
/* Return true if `t' is known to be non-negative. */
@@ -7201,3 +6471,736 @@ c_finish_case (void)
splay_tree_delete (cs->cases);
free (cs);
}
+
+/* Build a binary-operation expression without default conversions.
+ CODE is the kind of expression to build.
+ This function differs from `build' in several ways:
+ the data type of the result is computed and recorded in it,
+ warnings are generated if arg data types are invalid,
+ special handling for addition and subtraction of pointers is known,
+ and some optimization is done (operations on narrow ints
+ are done in the narrower type when that gives the same result).
+ Constant folding is also done before the result is returned.
+
+ Note that the operands will never have enumeral types, or function
+ or array types, because either they will have the default conversions
+ performed or they have both just been converted to some other type in which
+ the arithmetic is to be done. */
+
+tree
+build_binary_op (enum tree_code code, tree orig_op0, tree orig_op1,
+ int convert_p)
+{
+ tree type0, type1;
+ enum tree_code code0, code1;
+ tree op0, op1;
+
+ /* Expression code to give to the expression when it is built.
+ Normally this is CODE, which is what the caller asked for,
+ but in some special cases we change it. */
+ enum tree_code resultcode = code;
+
+ /* Data type in which the computation is to be performed.
+ In the simplest cases this is the common type of the arguments. */
+ tree result_type = NULL;
+
+ /* Nonzero means operands have already been type-converted
+ in whatever way is necessary.
+ Zero means they need to be converted to RESULT_TYPE. */
+ int converted = 0;
+
+ /* Nonzero means create the expression with this type, rather than
+ RESULT_TYPE. */
+ tree build_type = 0;
+
+ /* Nonzero means after finally constructing the expression
+ convert it to this type. */
+ tree final_type = 0;
+
+ /* Nonzero if this is an operation like MIN or MAX which can
+ safely be computed in short if both args are promoted shorts.
+ Also implies COMMON.
+ -1 indicates a bitwise operation; this makes a difference
+ in the exact conditions for when it is safe to do the operation
+ in a narrower mode. */
+ int shorten = 0;
+
+ /* Nonzero if this is a comparison operation;
+ if both args are promoted shorts, compare the original shorts.
+ Also implies COMMON. */
+ int short_compare = 0;
+
+ /* Nonzero if this is a right-shift operation, which can be computed on the
+ original short and then promoted if the operand is a promoted short. */
+ int short_shift = 0;
+
+ /* Nonzero means set RESULT_TYPE to the common type of the args. */
+ int common = 0;
+
+ if (convert_p)
+ {
+ op0 = default_conversion (orig_op0);
+ op1 = default_conversion (orig_op1);
+ }
+ else
+ {
+ op0 = orig_op0;
+ op1 = orig_op1;
+ }
+
+ type0 = TREE_TYPE (op0);
+ type1 = TREE_TYPE (op1);
+
+ /* The expression codes of the data types of the arguments tell us
+ whether the arguments are integers, floating, pointers, etc. */
+ code0 = TREE_CODE (type0);
+ code1 = TREE_CODE (type1);
+
+ /* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */
+ STRIP_TYPE_NOPS (op0);
+ STRIP_TYPE_NOPS (op1);
+
+ /* If an error was already reported for one of the arguments,
+ avoid reporting another error. */
+
+ if (code0 == ERROR_MARK || code1 == ERROR_MARK)
+ return error_mark_node;
+
+ switch (code)
+ {
+ case PLUS_EXPR:
+ /* Handle the pointer + int case. */
+ if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
+ return pointer_int_sum (PLUS_EXPR, op0, op1);
+ else if (code1 == POINTER_TYPE && code0 == INTEGER_TYPE)
+ return pointer_int_sum (PLUS_EXPR, op1, op0);
+ else
+ common = 1;
+ break;
+
+ case MINUS_EXPR:
+ /* Subtraction of two similar pointers.
+ We must subtract them as integers, then divide by object size. */
+ if (code0 == POINTER_TYPE && code1 == POINTER_TYPE
+ && comp_target_types (type0, type1, 1))
+ return pointer_diff (op0, op1);
+ /* Handle pointer minus int. Just like pointer plus int. */
+ else if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
+ return pointer_int_sum (MINUS_EXPR, op0, op1);
+ else
+ common = 1;
+ break;
+
+ case MULT_EXPR:
+ common = 1;
+ break;
+
+ case TRUNC_DIV_EXPR:
+ case CEIL_DIV_EXPR:
+ case FLOOR_DIV_EXPR:
+ case ROUND_DIV_EXPR:
+ case EXACT_DIV_EXPR:
+ /* Floating point division by zero is a legitimate way to obtain
+ infinities and NaNs. */
+ if (warn_div_by_zero && skip_evaluation == 0 && integer_zerop (op1))
+ warning ("division by zero");
+
+ if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE
+ || code0 == COMPLEX_TYPE || code0 == VECTOR_TYPE)
+ && (code1 == INTEGER_TYPE || code1 == REAL_TYPE
+ || code1 == COMPLEX_TYPE || code1 == VECTOR_TYPE))
+ {
+ if (!(code0 == INTEGER_TYPE && code1 == INTEGER_TYPE))
+ resultcode = RDIV_EXPR;
+ else
+ /* Although it would be tempting to shorten always here, that
+ loses on some targets, since the modulo instruction is
+ undefined if the quotient can't be represented in the
+ computation mode. We shorten only if unsigned or if
+ dividing by something we know != -1. */
+ shorten = (TREE_UNSIGNED (TREE_TYPE (orig_op0))
+ || (TREE_CODE (op1) == INTEGER_CST
+ && ! integer_all_onesp (op1)));
+ common = 1;
+ }
+ break;
+
+ case BIT_AND_EXPR:
+ case BIT_ANDTC_EXPR:
+ case BIT_IOR_EXPR:
+ case BIT_XOR_EXPR:
+ if (code0 == INTEGER_TYPE && code1 == INTEGER_TYPE)
+ shorten = -1;
+ else if (code0 == VECTOR_TYPE && code1 == VECTOR_TYPE)
+ common = 1;
+ break;
+
+ case TRUNC_MOD_EXPR:
+ case FLOOR_MOD_EXPR:
+ if (warn_div_by_zero && skip_evaluation == 0 && integer_zerop (op1))
+ warning ("division by zero");
+
+ if (code0 == INTEGER_TYPE && code1 == INTEGER_TYPE)
+ {
+ /* Although it would be tempting to shorten always here, that loses
+ on some targets, since the modulo instruction is undefined if the
+ quotient can't be represented in the computation mode. We shorten
+ only if unsigned or if dividing by something we know != -1. */
+ shorten = (TREE_UNSIGNED (TREE_TYPE (orig_op0))
+ || (TREE_CODE (op1) == INTEGER_CST
+ && ! integer_all_onesp (op1)));
+ common = 1;
+ }
+ break;
+
+ case TRUTH_ANDIF_EXPR:
+ case TRUTH_ORIF_EXPR:
+ case TRUTH_AND_EXPR:
+ case TRUTH_OR_EXPR:
+ case TRUTH_XOR_EXPR:
+ if ((code0 == INTEGER_TYPE || code0 == POINTER_TYPE
+ || code0 == REAL_TYPE || code0 == COMPLEX_TYPE)
+ && (code1 == INTEGER_TYPE || code1 == POINTER_TYPE
+ || code1 == REAL_TYPE || code1 == COMPLEX_TYPE))
+ {
+ /* Result of these operations is always an int,
+ but that does not mean the operands should be
+ converted to ints! */
+ result_type = integer_type_node;
+ op0 = c_common_truthvalue_conversion (op0);
+ op1 = c_common_truthvalue_conversion (op1);
+ converted = 1;
+ }
+ break;
+
+ /* Shift operations: result has same type as first operand;
+ always convert second operand to int.
+ Also set SHORT_SHIFT if shifting rightward. */
+
+ case RSHIFT_EXPR:
+ if (code0 == INTEGER_TYPE && code1 == INTEGER_TYPE)
+ {
+ if (TREE_CODE (op1) == INTEGER_CST && skip_evaluation == 0)
+ {
+ if (tree_int_cst_sgn (op1) < 0)
+ warning ("right shift count is negative");
+ else
+ {
+ if (! integer_zerop (op1))
+ short_shift = 1;
+
+ if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0)
+ warning ("right shift count >= width of type");
+ }
+ }
+
+ /* Use the type of the value to be shifted. */
+ result_type = type0;
+ /* Convert the shift-count to an integer, regardless of size
+ of value being shifted. */
+ if (TYPE_MAIN_VARIANT (TREE_TYPE (op1)) != integer_type_node)
+ op1 = convert (integer_type_node, op1);
+ /* Avoid converting op1 to result_type later. */
+ converted = 1;
+ }
+ break;
+
+ case LSHIFT_EXPR:
+ if (code0 == INTEGER_TYPE && code1 == INTEGER_TYPE)
+ {
+ if (TREE_CODE (op1) == INTEGER_CST && skip_evaluation == 0)
+ {
+ if (tree_int_cst_sgn (op1) < 0)
+ warning ("left shift count is negative");
+
+ else if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0)
+ warning ("left shift count >= width of type");
+ }
+
+ /* Use the type of the value to be shifted. */
+ result_type = type0;
+ /* Convert the shift-count to an integer, regardless of size
+ of value being shifted. */
+ if (TYPE_MAIN_VARIANT (TREE_TYPE (op1)) != integer_type_node)
+ op1 = convert (integer_type_node, op1);
+ /* Avoid converting op1 to result_type later. */
+ converted = 1;
+ }
+ break;
+
+ case RROTATE_EXPR:
+ case LROTATE_EXPR:
+ if (code0 == INTEGER_TYPE && code1 == INTEGER_TYPE)
+ {
+ if (TREE_CODE (op1) == INTEGER_CST && skip_evaluation == 0)
+ {
+ if (tree_int_cst_sgn (op1) < 0)
+ warning ("shift count is negative");
+ else if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0)
+ warning ("shift count >= width of type");
+ }
+
+ /* Use the type of the value to be shifted. */
+ result_type = type0;
+ /* Convert the shift-count to an integer, regardless of size
+ of value being shifted. */
+ if (TYPE_MAIN_VARIANT (TREE_TYPE (op1)) != integer_type_node)
+ op1 = convert (integer_type_node, op1);
+ /* Avoid converting op1 to result_type later. */
+ converted = 1;
+ }
+ break;
+
+ case EQ_EXPR:
+ case NE_EXPR:
+ if (warn_float_equal && (code0 == REAL_TYPE || code1 == REAL_TYPE))
+ warning ("comparing floating point with == or != is unsafe");
+ /* Result of comparison is always int,
+ but don't convert the args to int! */
+ build_type = integer_type_node;
+ if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE
+ || code0 == COMPLEX_TYPE
+ || code0 == VECTOR_TYPE)
+ && (code1 == INTEGER_TYPE || code1 == REAL_TYPE
+ || code1 == COMPLEX_TYPE
+ || code1 == VECTOR_TYPE))
+ short_compare = 1;
+ else if (code0 == POINTER_TYPE && code1 == POINTER_TYPE)
+ {
+ tree tt0 = TREE_TYPE (type0);
+ tree tt1 = TREE_TYPE (type1);
+ /* Anything compares with void *. void * compares with anything.
+ Otherwise, the targets must be compatible
+ and both must be object or both incomplete. */
+ if (comp_target_types (type0, type1, 1))
+ result_type = common_type (type0, type1);
+ else if (VOID_TYPE_P (tt0))
+ {
+ /* op0 != orig_op0 detects the case of something
+ whose value is 0 but which isn't a valid null ptr const. */
+ if (pedantic && (!integer_zerop (op0) || op0 != orig_op0)
+ && TREE_CODE (tt1) == FUNCTION_TYPE)
+ pedwarn ("ISO C forbids comparison of `void *' with function pointer");
+ }
+ else if (VOID_TYPE_P (tt1))
+ {
+ if (pedantic && (!integer_zerop (op1) || op1 != orig_op1)
+ && TREE_CODE (tt0) == FUNCTION_TYPE)
+ pedwarn ("ISO C forbids comparison of `void *' with function pointer");
+ }
+ else
+ pedwarn ("comparison of distinct pointer types lacks a cast");
+
+ if (result_type == NULL_TREE)
+ result_type = ptr_type_node;
+ }
+ else if (code0 == POINTER_TYPE && TREE_CODE (op1) == INTEGER_CST
+ && integer_zerop (op1))
+ result_type = type0;
+ else if (code1 == POINTER_TYPE && TREE_CODE (op0) == INTEGER_CST
+ && integer_zerop (op0))
+ result_type = type1;
+ else if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
+ {
+ result_type = type0;
+ pedwarn ("comparison between pointer and integer");
+ }
+ else if (code0 == INTEGER_TYPE && code1 == POINTER_TYPE)
+ {
+ result_type = type1;
+ pedwarn ("comparison between pointer and integer");
+ }
+ break;
+
+ case MAX_EXPR:
+ case MIN_EXPR:
+ if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE)
+ && (code1 == INTEGER_TYPE || code1 == REAL_TYPE))
+ shorten = 1;
+ else if (code0 == POINTER_TYPE && code1 == POINTER_TYPE)
+ {
+ if (comp_target_types (type0, type1, 1))
+ {
+ result_type = common_type (type0, type1);
+ if (pedantic
+ && TREE_CODE (TREE_TYPE (type0)) == FUNCTION_TYPE)
+ pedwarn ("ISO C forbids ordered comparisons of pointers to functions");
+ }
+ else
+ {
+ result_type = ptr_type_node;
+ pedwarn ("comparison of distinct pointer types lacks a cast");
+ }
+ }
+ break;
+
+ case LE_EXPR:
+ case GE_EXPR:
+ case LT_EXPR:
+ case GT_EXPR:
+ build_type = integer_type_node;
+ if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE)
+ && (code1 == INTEGER_TYPE || code1 == REAL_TYPE))
+ short_compare = 1;
+ else if (code0 == POINTER_TYPE && code1 == POINTER_TYPE)
+ {
+ if (comp_target_types (type0, type1, 1))
+ {
+ result_type = common_type (type0, type1);
+ if (!COMPLETE_TYPE_P (TREE_TYPE (type0))
+ != !COMPLETE_TYPE_P (TREE_TYPE (type1)))
+ pedwarn ("comparison of complete and incomplete pointers");
+ else if (pedantic
+ && TREE_CODE (TREE_TYPE (type0)) == FUNCTION_TYPE)
+ pedwarn ("ISO C forbids ordered comparisons of pointers to functions");
+ }
+ else
+ {
+ result_type = ptr_type_node;
+ pedwarn ("comparison of distinct pointer types lacks a cast");
+ }
+ }
+ else if (code0 == POINTER_TYPE && TREE_CODE (op1) == INTEGER_CST
+ && integer_zerop (op1))
+ {
+ result_type = type0;
+ if (pedantic || extra_warnings)
+ pedwarn ("ordered comparison of pointer with integer zero");
+ }
+ else if (code1 == POINTER_TYPE && TREE_CODE (op0) == INTEGER_CST
+ && integer_zerop (op0))
+ {
+ result_type = type1;
+ if (pedantic)
+ pedwarn ("ordered comparison of pointer with integer zero");
+ }
+ else if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
+ {
+ result_type = type0;
+ pedwarn ("comparison between pointer and integer");
+ }
+ else if (code0 == INTEGER_TYPE && code1 == POINTER_TYPE)
+ {
+ result_type = type1;
+ pedwarn ("comparison between pointer and integer");
+ }
+ break;
+
+ case UNORDERED_EXPR:
+ case ORDERED_EXPR:
+ case UNLT_EXPR:
+ case UNLE_EXPR:
+ case UNGT_EXPR:
+ case UNGE_EXPR:
+ case UNEQ_EXPR:
+ build_type = integer_type_node;
+ if (code0 != REAL_TYPE || code1 != REAL_TYPE)
+ {
+ error ("unordered comparison on non-floating point argument");
+ return error_mark_node;
+ }
+ common = 1;
+ break;
+
+ default:
+ break;
+ }
+
+ if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE || code0 == COMPLEX_TYPE
+ || code0 == VECTOR_TYPE)
+ &&
+ (code1 == INTEGER_TYPE || code1 == REAL_TYPE || code1 == COMPLEX_TYPE
+ || code1 == VECTOR_TYPE))
+ {
+ int none_complex = (code0 != COMPLEX_TYPE && code1 != COMPLEX_TYPE);
+
+ if (shorten || common || short_compare)
+ result_type = common_type (type0, type1);
+
+ /* For certain operations (which identify themselves by shorten != 0)
+ if both args were extended from the same smaller type,
+ do the arithmetic in that type and then extend.
+
+ shorten !=0 and !=1 indicates a bitwise operation.
+ For them, this optimization is safe only if
+ both args are zero-extended or both are sign-extended.
+ Otherwise, we might change the result.
+ Eg, (short)-1 | (unsigned short)-1 is (int)-1
+ but calculated in (unsigned short) it would be (unsigned short)-1. */
+
+ if (shorten && none_complex)
+ {
+ int unsigned0, unsigned1;
+ tree arg0 = get_narrower (op0, &unsigned0);
+ tree arg1 = get_narrower (op1, &unsigned1);
+ /* UNS is 1 if the operation to be done is an unsigned one. */
+ int uns = TREE_UNSIGNED (result_type);
+ tree type;
+
+ final_type = result_type;
+
+ /* Handle the case that OP0 (or OP1) does not *contain* a conversion
+ but it *requires* conversion to FINAL_TYPE. */
+
+ if ((TYPE_PRECISION (TREE_TYPE (op0))
+ == TYPE_PRECISION (TREE_TYPE (arg0)))
+ && TREE_TYPE (op0) != final_type)
+ unsigned0 = TREE_UNSIGNED (TREE_TYPE (op0));
+ if ((TYPE_PRECISION (TREE_TYPE (op1))
+ == TYPE_PRECISION (TREE_TYPE (arg1)))
+ && TREE_TYPE (op1) != final_type)
+ unsigned1 = TREE_UNSIGNED (TREE_TYPE (op1));
+
+ /* Now UNSIGNED0 is 1 if ARG0 zero-extends to FINAL_TYPE. */
+
+ /* For bitwise operations, signedness of nominal type
+ does not matter. Consider only how operands were extended. */
+ if (shorten == -1)
+ uns = unsigned0;
+
+ /* Note that in all three cases below we refrain from optimizing
+ an unsigned operation on sign-extended args.
+ That would not be valid. */
+
+ /* Both args variable: if both extended in same way
+ from same width, do it in that width.
+ Do it unsigned if args were zero-extended. */
+ if ((TYPE_PRECISION (TREE_TYPE (arg0))
+ < TYPE_PRECISION (result_type))
+ && (TYPE_PRECISION (TREE_TYPE (arg1))
+ == TYPE_PRECISION (TREE_TYPE (arg0)))
+ && unsigned0 == unsigned1
+ && (unsigned0 || !uns))
+ result_type
+ = c_common_signed_or_unsigned_type
+ (unsigned0, common_type (TREE_TYPE (arg0), TREE_TYPE (arg1)));
+ else if (TREE_CODE (arg0) == INTEGER_CST
+ && (unsigned1 || !uns)
+ && (TYPE_PRECISION (TREE_TYPE (arg1))
+ < TYPE_PRECISION (result_type))
+ && (type
+ = c_common_signed_or_unsigned_type (unsigned1,
+ TREE_TYPE (arg1)),
+ int_fits_type_p (arg0, type)))
+ result_type = type;
+ else if (TREE_CODE (arg1) == INTEGER_CST
+ && (unsigned0 || !uns)
+ && (TYPE_PRECISION (TREE_TYPE (arg0))
+ < TYPE_PRECISION (result_type))
+ && (type
+ = c_common_signed_or_unsigned_type (unsigned0,
+ TREE_TYPE (arg0)),
+ int_fits_type_p (arg1, type)))
+ result_type = type;
+ }
+
+ /* Shifts can be shortened if shifting right. */
+
+ if (short_shift)
+ {
+ int unsigned_arg;
+ tree arg0 = get_narrower (op0, &unsigned_arg);
+
+ final_type = result_type;
+
+ if (arg0 == op0 && final_type == TREE_TYPE (op0))
+ unsigned_arg = TREE_UNSIGNED (TREE_TYPE (op0));
+
+ if (TYPE_PRECISION (TREE_TYPE (arg0)) < TYPE_PRECISION (result_type)
+ /* We can shorten only if the shift count is less than the
+ number of bits in the smaller type size. */
+ && compare_tree_int (op1, TYPE_PRECISION (TREE_TYPE (arg0))) < 0
+ /* We cannot drop an unsigned shift after sign-extension. */
+ && (!TREE_UNSIGNED (final_type) || unsigned_arg))
+ {
+ /* Do an unsigned shift if the operand was zero-extended. */
+ result_type
+ = c_common_signed_or_unsigned_type (unsigned_arg,
+ TREE_TYPE (arg0));
+ /* Convert value-to-be-shifted to that type. */
+ if (TREE_TYPE (op0) != result_type)
+ op0 = convert (result_type, op0);
+ converted = 1;
+ }
+ }
+
+ /* Comparison operations are shortened too but differently.
+ They identify themselves by setting short_compare = 1. */
+
+ if (short_compare)
+ {
+ /* Don't write &op0, etc., because that would prevent op0
+ from being kept in a register.
+ Instead, make copies of the our local variables and
+ pass the copies by reference, then copy them back afterward. */
+ tree xop0 = op0, xop1 = op1, xresult_type = result_type;
+ enum tree_code xresultcode = resultcode;
+ tree val
+ = shorten_compare (&xop0, &xop1, &xresult_type, &xresultcode);
+
+ if (val != 0)
+ return val;
+
+ op0 = xop0, op1 = xop1;
+ converted = 1;
+ resultcode = xresultcode;
+
+ if (warn_sign_compare && skip_evaluation == 0)
+ {
+ int op0_signed = ! TREE_UNSIGNED (TREE_TYPE (orig_op0));
+ int op1_signed = ! TREE_UNSIGNED (TREE_TYPE (orig_op1));
+ int unsignedp0, unsignedp1;
+ tree primop0 = get_narrower (op0, &unsignedp0);
+ tree primop1 = get_narrower (op1, &unsignedp1);
+
+ xop0 = orig_op0;
+ xop1 = orig_op1;
+ STRIP_TYPE_NOPS (xop0);
+ STRIP_TYPE_NOPS (xop1);
+
+ /* Give warnings for comparisons between signed and unsigned
+ quantities that may fail.
+
+ Do the checking based on the original operand trees, so that
+ casts will be considered, but default promotions won't be.
+
+ Do not warn if the comparison is being done in a signed type,
+ since the signed type will only be chosen if it can represent
+ all the values of the unsigned type. */
+ if (! TREE_UNSIGNED (result_type))
+ /* OK */;
+ /* Do not warn if both operands are the same signedness. */
+ else if (op0_signed == op1_signed)
+ /* OK */;
+ else
+ {
+ tree sop, uop;
+
+ if (op0_signed)
+ sop = xop0, uop = xop1;
+ else
+ sop = xop1, uop = xop0;
+
+ /* Do not warn if the signed quantity is an
+ unsuffixed integer literal (or some static
+ constant expression involving such literals or a
+ conditional expression involving such literals)
+ and it is non-negative. */
+ if (c_tree_expr_nonnegative_p (sop))
+ /* OK */;
+ /* Do not warn if the comparison is an equality operation,
+ the unsigned quantity is an integral constant, and it
+ would fit in the result if the result were signed. */
+ else if (TREE_CODE (uop) == INTEGER_CST
+ && (resultcode == EQ_EXPR || resultcode == NE_EXPR)
+ && int_fits_type_p
+ (uop, c_common_signed_type (result_type)))
+ /* OK */;
+ /* Do not warn if the unsigned quantity is an enumeration
+ constant and its maximum value would fit in the result
+ if the result were signed. */
+ else if (TREE_CODE (uop) == INTEGER_CST
+ && TREE_CODE (TREE_TYPE (uop)) == ENUMERAL_TYPE
+ && int_fits_type_p
+ (TYPE_MAX_VALUE (TREE_TYPE(uop)),
+ c_common_signed_type (result_type)))
+ /* OK */;
+ else
+ warning ("comparison between signed and unsigned");
+ }
+
+ /* Warn if two unsigned values are being compared in a size
+ larger than their original size, and one (and only one) is the
+ result of a `~' operator. This comparison will always fail.
+
+ Also warn if one operand is a constant, and the constant
+ does not have all bits set that are set in the ~ operand
+ when it is extended. */
+
+ if ((TREE_CODE (primop0) == BIT_NOT_EXPR)
+ != (TREE_CODE (primop1) == BIT_NOT_EXPR))
+ {
+ if (TREE_CODE (primop0) == BIT_NOT_EXPR)
+ primop0 = get_narrower (TREE_OPERAND (primop0, 0),
+ &unsignedp0);
+ else
+ primop1 = get_narrower (TREE_OPERAND (primop1, 0),
+ &unsignedp1);
+
+ if (host_integerp (primop0, 0) || host_integerp (primop1, 0))
+ {
+ tree primop;
+ HOST_WIDE_INT constant, mask;
+ int unsignedp, bits;
+
+ if (host_integerp (primop0, 0))
+ {
+ primop = primop1;
+ unsignedp = unsignedp1;
+ constant = tree_low_cst (primop0, 0);
+ }
+ else
+ {
+ primop = primop0;
+ unsignedp = unsignedp0;
+ constant = tree_low_cst (primop1, 0);
+ }
+
+ bits = TYPE_PRECISION (TREE_TYPE (primop));
+ if (bits < TYPE_PRECISION (result_type)
+ && bits < HOST_BITS_PER_WIDE_INT && unsignedp)
+ {
+ mask = (~ (HOST_WIDE_INT) 0) << bits;
+ if ((mask & constant) != mask)
+ warning ("comparison of promoted ~unsigned with constant");
+ }
+ }
+ else if (unsignedp0 && unsignedp1
+ && (TYPE_PRECISION (TREE_TYPE (primop0))
+ < TYPE_PRECISION (result_type))
+ && (TYPE_PRECISION (TREE_TYPE (primop1))
+ < TYPE_PRECISION (result_type)))
+ warning ("comparison of promoted ~unsigned with unsigned");
+ }
+ }
+ }
+ }
+
+ /* At this point, RESULT_TYPE must be nonzero to avoid an error message.
+ If CONVERTED is zero, both args will be converted to type RESULT_TYPE.
+ Then the expression will be built.
+ It will be given type FINAL_TYPE if that is nonzero;
+ otherwise, it will be given type RESULT_TYPE. */
+
+ if (!result_type)
+ {
+ binary_op_error (code);
+ return error_mark_node;
+ }
+
+ if (! converted)
+ {
+ if (TREE_TYPE (op0) != result_type)
+ op0 = convert (result_type, op0);
+ if (TREE_TYPE (op1) != result_type)
+ op1 = convert (result_type, op1);
+ }
+
+ if (build_type == NULL_TREE)
+ build_type = result_type;
+
+ {
+ tree result = build (resultcode, build_type, op0, op1);
+ tree folded;
+
+ /* Treat expressions in initializers specially as they can't trap. */
+ folded = initializer_stack ? fold_initializer (result)
+ : fold (result);
+ if (folded == result)
+ TREE_CONSTANT (folded) = TREE_CONSTANT (op0) & TREE_CONSTANT (op1);
+ if (final_type != 0)
+ return convert (final_type, folded);
+ return folded;
+ }
+}
+
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 95a68bc..32c145e 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -1228,14 +1228,31 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
if (TREE_CODE (arg1) == REAL_CST)
{
+ enum machine_mode mode;
REAL_VALUE_TYPE d1;
REAL_VALUE_TYPE d2;
REAL_VALUE_TYPE value;
- tree t;
+ tree t, type;
d1 = TREE_REAL_CST (arg1);
d2 = TREE_REAL_CST (arg2);
+ type = TREE_TYPE (arg1);
+ mode = TYPE_MODE (type);
+
+ /* Don't perform operation if we honor signaling NaNs and
+ either operand is a NaN. */
+ if (HONOR_SNANS (mode)
+ && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
+ return NULL_TREE;
+
+ /* Don't perform operation if it would raise a division
+ by zero exception. */
+ if (code == RDIV_EXPR
+ && REAL_VALUES_EQUAL (d2, dconst0)
+ && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
+ return NULL_TREE;
+
/* If either operand is a NaN, just return it. Otherwise, set up
for floating-point trap; we return an overflow. */
if (REAL_VALUE_ISNAN (d1))
@@ -1245,9 +1262,7 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
REAL_ARITHMETIC (value, code, d1, d2);
- t = build_real (TREE_TYPE (arg1),
- real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
- value));
+ t = build_real (type, real_value_truncate (mode, value));
TREE_OVERFLOW (t)
= (force_fit_type (t, 0)
@@ -7886,6 +7901,31 @@ fold (tree expr)
} /* switch (code) */
}
+/* Perform constant folding and related simplification of intializer
+ expression EXPR. This behaves identically to "fold" but ignores
+ potential run-time traps and exceptions that fold must preserve. */
+
+tree
+fold_initializer (tree expr)
+{
+ int saved_signaling_nans = flag_signaling_nans;
+ int saved_trapping_math = flag_trapping_math;
+ int saved_trapv = flag_trapv;
+ tree result;
+
+ flag_signaling_nans = 0;
+ flag_trapping_math = 0;
+ flag_trapv = 0;
+
+ result = fold (expr);
+
+ flag_signaling_nans = saved_signaling_nans;
+ flag_trapping_math = saved_trapping_math;
+ flag_trapv = saved_trapv;
+
+ return result;
+}
+
/* Determine if first argument is a multiple of second argument. Return 0 if
it is not, or we cannot easily determined it to be.
diff --git a/gcc/simplify-rtx.c b/gcc/simplify-rtx.c
index 3b85767..8568529 100644
--- a/gcc/simplify-rtx.c
+++ b/gcc/simplify-rtx.c
@@ -929,9 +929,13 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode,
f0 = real_value_truncate (mode, f0);
f1 = real_value_truncate (mode, f1);
+ if (HONOR_SNANS (mode)
+ && (REAL_VALUE_ISNAN (f0) || REAL_VALUE_ISNAN (f1)))
+ return 0;
+
if (code == DIV
- && !MODE_HAS_INFINITIES (mode)
- && REAL_VALUES_EQUAL (f1, dconst0))
+ && REAL_VALUES_EQUAL (f1, dconst0)
+ && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
return 0;
REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
diff --git a/gcc/tree.h b/gcc/tree.h
index 04ad1d0..b627a98 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -2715,6 +2715,7 @@ extern void using_eh_for_cleanups (void);
subexpressions are not changed. */
extern tree fold (tree);
+extern tree fold_initializer (tree);
extern tree fold_single_bit_test (enum tree_code, tree, tree, tree);
extern int force_fit_type (tree, int);