aboutsummaryrefslogtreecommitdiff
path: root/gcc/fold-const.c
diff options
context:
space:
mode:
authorRichard Guenther <rguenther@suse.de>2006-12-14 13:12:11 +0000
committerRichard Biener <rguenth@gcc.gnu.org>2006-12-14 13:12:11 +0000
commit9f5396713d9e188bad160c5816f3bfd21b6f32ca (patch)
tree7529cb235a0fc6106fa9a66695f0d7ae23d8c60f /gcc/fold-const.c
parent85aef79f75e6be8e8a8d7a1111f6110b98a609ca (diff)
downloadgcc-9f5396713d9e188bad160c5816f3bfd21b6f32ca.zip
gcc-9f5396713d9e188bad160c5816f3bfd21b6f32ca.tar.gz
gcc-9f5396713d9e188bad160c5816f3bfd21b6f32ca.tar.bz2
re PR middle-end/30172 (Operations with partly constant complex values not folded)
2006-12-14 Richard Guenther <rguenther@suse.de> PR middle-end/30172 * fold-const.c (fold_binary): Fold __complex__ ( x, 0 ) + __complex__ ( 0, y ) to __complex__ ( x, y ). Fold __complex__ (x, y) * +-I to __complex__ (-+y, +-x). * gcc.dg/pr30172-1.c: New testcase. From-SVN: r119859
Diffstat (limited to 'gcc/fold-const.c')
-rw-r--r--gcc/fold-const.c57
1 files changed, 57 insertions, 0 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 751ef2e..1874609 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -8755,6 +8755,41 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
fold_convert (type, tem));
}
+ /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
+ to __complex__ ( x, y ). This is not the same for SNaNs or
+ if singed zeros are involved. */
+ if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
+ && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
+ {
+ tree rtype = TREE_TYPE (TREE_TYPE (arg0));
+ tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
+ tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
+ bool arg0rz = false, arg0iz = false;
+ if ((arg0r && (arg0rz = real_zerop (arg0r)))
+ || (arg0i && (arg0iz = real_zerop (arg0i))))
+ {
+ tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
+ tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
+ if (arg0rz && arg1i && real_zerop (arg1i))
+ {
+ tree rp = arg1r ? arg1r
+ : build1 (REALPART_EXPR, rtype, arg1);
+ tree ip = arg0i ? arg0i
+ : build1 (IMAGPART_EXPR, rtype, arg0);
+ return fold_build2 (COMPLEX_EXPR, type, rp, ip);
+ }
+ else if (arg0iz && arg1r && real_zerop (arg1r))
+ {
+ tree rp = arg0r ? arg0r
+ : build1 (REALPART_EXPR, rtype, arg0);
+ tree ip = arg1i ? arg1i
+ : build1 (IMAGPART_EXPR, rtype, arg1);
+ return fold_build2 (COMPLEX_EXPR, type, rp, ip);
+ }
+ }
+ }
+
if (flag_unsafe_math_optimizations
&& (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
&& (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
@@ -9207,6 +9242,28 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
}
}
+ /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
+ This is not the same for NaNs or if singed zeros are
+ involved. */
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
+ && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
+ && TREE_CODE (arg1) == COMPLEX_CST
+ && real_zerop (TREE_REALPART (arg1)))
+ {
+ tree rtype = TREE_TYPE (TREE_TYPE (arg0));
+ if (real_onep (TREE_IMAGPART (arg1)))
+ return fold_build2 (COMPLEX_EXPR, type,
+ negate_expr (fold_build1 (IMAGPART_EXPR,
+ rtype, arg0)),
+ fold_build1 (REALPART_EXPR, rtype, arg0));
+ else if (real_minus_onep (TREE_IMAGPART (arg1)))
+ return fold_build2 (COMPLEX_EXPR, type,
+ fold_build1 (IMAGPART_EXPR, rtype, arg0),
+ negate_expr (fold_build1 (REALPART_EXPR,
+ rtype, arg0)));
+ }
+
/* Optimize z * conj(z) for floating point complex numbers.
Guarded by flag_unsafe_math_optimizations as non-finite
imaginary components don't produce scalar results. */