aboutsummaryrefslogtreecommitdiff
path: root/gcc/fold-const.c
diff options
context:
space:
mode:
authorRoger Sayle <roger@eyesopen.com>2003-08-14 20:53:26 +0000
committerRoger Sayle <sayle@gcc.gnu.org>2003-08-14 20:53:26 +0000
commit8ab49fef1f21ff68d39a28b1ddc98d4822603516 (patch)
tree9a1c805b0d9f0abdd6f5f935e7f53f0fb1091c08 /gcc/fold-const.c
parent74bed51bf60b6c4a51a05b6fa54cde5be2194b40 (diff)
downloadgcc-8ab49fef1f21ff68d39a28b1ddc98d4822603516.zip
gcc-8ab49fef1f21ff68d39a28b1ddc98d4822603516.tar.gz
gcc-8ab49fef1f21ff68d39a28b1ddc98d4822603516.tar.bz2
fold-const.c (negate_expr_p): MULT_EXPRs and RDIV_EXPRs are easy to negate if either operand is easy to negate...
* fold-const.c (negate_expr_p): MULT_EXPRs and RDIV_EXPRs are easy to negate if either operand is easy to negate, if we don't care about sign-dependent rounding. (negate_expr): Make the logic to negate a REAL_CST explicit. Attempt to negate a MULT_EXPR or RDIV_EXPR by negating an operand that's easy to negate, if we don't honor sign-dependent rounding. (fold <MULT_EXPR>): Optimize -A * B as A * -B if B is easy to negate, and the symmetric A * -B as -A * B if A is easy to negate. (fold <RDIV_EXPR>): Likewise, optimize -A/B and C/-D as A/-B and -C/D if B and C are cheap to negate. Add an explicit rule to optimize X/-1.0 as -X when we don't care about signaling NaNs. From-SVN: r70455
Diffstat (limited to 'gcc/fold-const.c')
-rw-r--r--gcc/fold-const.c67
1 files changed, 62 insertions, 5 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 9db9f2e..11d0f9d 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -847,6 +847,18 @@ negate_expr_p (tree t)
/* We can't turn -(A-B) into B-A when we honor signed zeros. */
return ! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations;
+ case MULT_EXPR:
+ if (TREE_UNSIGNED (TREE_TYPE (t)))
+ break;
+
+ /* Fall through. */
+
+ case RDIV_EXPR:
+ if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
+ return negate_expr_p (TREE_OPERAND (t, 1))
+ || negate_expr_p (TREE_OPERAND (t, 0));
+ break;
+
default:
break;
}
@@ -871,13 +883,19 @@ negate_expr (tree t)
switch (TREE_CODE (t))
{
case INTEGER_CST:
- case REAL_CST:
if (! TREE_UNSIGNED (type)
&& 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
&& ! TREE_OVERFLOW (tem))
return tem;
break;
+ case REAL_CST:
+ tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
+ /* Two's complement FP formats, such as c4x, may overflow. */
+ if (! TREE_OVERFLOW (tem))
+ return convert (type, tem);
+ break;
+
case NEGATE_EXPR:
return convert (type, TREE_OPERAND (t, 0));
@@ -890,6 +908,30 @@ negate_expr (tree t)
TREE_OPERAND (t, 0))));
break;
+ case MULT_EXPR:
+ if (TREE_UNSIGNED (TREE_TYPE (t)))
+ break;
+
+ /* Fall through. */
+
+ case RDIV_EXPR:
+ if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
+ {
+ tem = TREE_OPERAND (t, 1);
+ if (negate_expr_p (tem))
+ return convert (type,
+ fold (build (TREE_CODE (t), TREE_TYPE (t),
+ TREE_OPERAND (t, 0),
+ negate_expr (tem))));
+ tem = TREE_OPERAND (t, 0);
+ if (negate_expr_p (tem))
+ return convert (type,
+ fold (build (TREE_CODE (t), TREE_TYPE (t),
+ negate_expr (tem),
+ TREE_OPERAND (t, 1))));
+ }
+ break;
+
default:
break;
}
@@ -5965,8 +6007,13 @@ fold (tree expr)
case MULT_EXPR:
/* (-A) * (-B) -> A * B */
- if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
- return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
+ if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
+ return fold (build (MULT_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ negate_expr (arg1)));
+ if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
+ return fold (build (MULT_EXPR, type,
+ negate_expr (arg0),
TREE_OPERAND (arg1, 0)));
if (! FLOAT_TYPE_P (type))
@@ -6315,8 +6362,13 @@ fold (tree expr)
return t;
/* (-A) / (-B) -> A / B */
- if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
- return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
+ if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
+ return fold (build (RDIV_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ negate_expr (arg1)));
+ if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
+ return fold (build (RDIV_EXPR, type,
+ negate_expr (arg0),
TREE_OPERAND (arg1, 0)));
/* In IEEE floating point, x/1 is not equivalent to x for snans. */
@@ -6324,6 +6376,11 @@ fold (tree expr)
&& real_onep (arg1))
return non_lvalue (convert (type, arg0));
+ /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
+ if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
+ && real_minus_onep (arg1))
+ return non_lvalue (convert (type, negate_expr (arg0)));
+
/* If ARG1 is a constant, we can convert this to a multiply by the
reciprocal. This does not have the same rounding properties,
so only do this if -funsafe-math-optimizations. We can actually