diff options
author | Richard Biener <rguenther@suse.de> | 2016-05-11 10:24:11 +0000 |
---|---|---|
committer | Richard Biener <rguenth@gcc.gnu.org> | 2016-05-11 10:24:11 +0000 |
commit | ebc1b29edb3428dd18e76cba8626625bfb184f1d (patch) | |
tree | c56af27e777a33142ec5005ae7600ac16297fad4 /gcc/fold-const.c | |
parent | 98ccd1d7ab1d9a0c99cce0d1c05ddcda3659dd26 (diff) | |
download | gcc-ebc1b29edb3428dd18e76cba8626625bfb184f1d.zip gcc-ebc1b29edb3428dd18e76cba8626625bfb184f1d.tar.gz gcc-ebc1b29edb3428dd18e76cba8626625bfb184f1d.tar.bz2 |
re PR middle-end/71002 (-fstrict-aliasing breaks Boost's short string optimization implementation)
2016-05-11 Richard Biener <rguenther@suse.de>
PR middle-end/71002
* alias.c (reference_alias_ptr_type): Preserve alias-set zero
if the langhook insists on it.
* fold-const.c (make_bit_field_ref): Add arg for the original
reference and preserve its alias-set.
(decode_field_reference): Take exp by reference and adjust it
to the original memory reference.
(optimize_bit_field_compare): Adjust callers.
(fold_truth_andor_1): Likewise.
* gimplify.c (gimplify_expr): Adjust in-SSA form test.
* g++.dg/torture/pr71002.C: New testcase.
From-SVN: r236117
Diffstat (limited to 'gcc/fold-const.c')
-rw-r--r-- | gcc/fold-const.c | 52 |
1 files changed, 30 insertions, 22 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 0ef48dd..c4346de 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -117,14 +117,8 @@ static enum tree_code compcode_to_comparison (enum comparison_code); static int operand_equal_for_comparison_p (tree, tree, tree); static int twoval_comparison_p (tree, tree *, tree *, int *); static tree eval_subst (location_t, tree, tree, tree, tree, tree); -static tree make_bit_field_ref (location_t, tree, tree, - HOST_WIDE_INT, HOST_WIDE_INT, int, int); static tree optimize_bit_field_compare (location_t, enum tree_code, tree, tree, tree); -static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *, - HOST_WIDE_INT *, - machine_mode *, int *, int *, int *, - tree *, tree *); static int simple_operand_p (const_tree); static bool simple_operand_p_2 (tree); static tree range_binop (enum tree_code, tree, tree, int, tree, int); @@ -3803,15 +3797,23 @@ distribute_real_division (location_t loc, enum tree_code code, tree type, /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero - and uses reverse storage order if REVERSEP is nonzero. */ + and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER + is the original memory reference used to preserve the alias set of + the access. */ static tree -make_bit_field_ref (location_t loc, tree inner, tree type, +make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp, int reversep) { tree result, bftype; + if (get_alias_set (inner) != get_alias_set (orig_inner)) + inner = fold_build2 (MEM_REF, TREE_TYPE (inner), + build_fold_addr_expr (inner), + build_int_cst + (reference_alias_ptr_type (orig_inner), 0)); + if (bitpos == 0 && !reversep) { tree size = TYPE_SIZE (TREE_TYPE (inner)); @@ -3937,13 +3939,13 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, and return. */ return fold_build2_loc (loc, code, compare_type, fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, - make_bit_field_ref (loc, linner, + make_bit_field_ref (loc, linner, lhs, unsigned_type, nbitsize, nbitpos, 1, lreversep), mask), fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, - make_bit_field_ref (loc, rinner, + make_bit_field_ref (loc, rinner, rhs, unsigned_type, nbitsize, nbitpos, 1, rreversep), @@ -3988,8 +3990,8 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, /* Make a new bitfield reference, shift the constant over the appropriate number of bits and mask it with the computed mask (in case this was a signed field). If we changed it, make a new one. */ - lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1, - lreversep); + lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type, + nbitsize, nbitpos, 1, lreversep); rhs = const_binop (BIT_AND_EXPR, const_binop (LSHIFT_EXPR, @@ -4028,11 +4030,12 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, do anything with. */ static tree -decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize, +decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize, HOST_WIDE_INT *pbitpos, machine_mode *pmode, int *punsignedp, int *preversep, int *pvolatilep, tree *pmask, tree *pand_mask) { + tree exp = *exp_; tree outer_type = 0; tree and_mask = 0; tree mask, inner, offset; @@ -4069,6 +4072,8 @@ decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize, || TREE_CODE (inner) == PLACEHOLDER_EXPR) return 0; + *exp_ = exp; + /* If the number of bits in the reference is the same as the bitsize of the outer type, then the outer type gives the signedness. Otherwise (in case of a small bitfield) the signedness is unchanged. */ @@ -5677,19 +5682,19 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type, ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0; volatilep = 0; - ll_inner = decode_field_reference (loc, ll_arg, + ll_inner = decode_field_reference (loc, &ll_arg, &ll_bitsize, &ll_bitpos, &ll_mode, &ll_unsignedp, &ll_reversep, &volatilep, &ll_mask, &ll_and_mask); - lr_inner = decode_field_reference (loc, lr_arg, + lr_inner = decode_field_reference (loc, &lr_arg, &lr_bitsize, &lr_bitpos, &lr_mode, &lr_unsignedp, &lr_reversep, &volatilep, &lr_mask, &lr_and_mask); - rl_inner = decode_field_reference (loc, rl_arg, + rl_inner = decode_field_reference (loc, &rl_arg, &rl_bitsize, &rl_bitpos, &rl_mode, &rl_unsignedp, &rl_reversep, &volatilep, &rl_mask, &rl_and_mask); - rr_inner = decode_field_reference (loc, rr_arg, + rr_inner = decode_field_reference (loc, &rr_arg, &rr_bitsize, &rr_bitpos, &rr_mode, &rr_unsignedp, &rr_reversep, &volatilep, &rr_mask, &rr_and_mask); @@ -5851,12 +5856,14 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type, lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask); if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos) { - lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, + lhs = make_bit_field_ref (loc, ll_inner, ll_arg, + lntype, lnbitsize, lnbitpos, ll_unsignedp || rl_unsignedp, ll_reversep); if (! all_ones_mask_p (ll_mask, lnbitsize)) lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask); - rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos, + rhs = make_bit_field_ref (loc, lr_inner, lr_arg, + rntype, rnbitsize, rnbitpos, lr_unsignedp || rr_unsignedp, lr_reversep); if (! all_ones_mask_p (lr_mask, rnbitsize)) rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask); @@ -5878,11 +5885,11 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type, { tree type; - lhs = make_bit_field_ref (loc, ll_inner, lntype, + lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype, ll_bitsize + rl_bitsize, MIN (ll_bitpos, rl_bitpos), ll_unsignedp, ll_reversep); - rhs = make_bit_field_ref (loc, lr_inner, rntype, + rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype, lr_bitsize + rr_bitsize, MIN (lr_bitpos, rr_bitpos), lr_unsignedp, lr_reversep); @@ -5947,7 +5954,8 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type, reference we will make. Unless the mask is all ones the width of that field, perform the mask operation. Then compare with the merged constant. */ - result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, + result = make_bit_field_ref (loc, ll_inner, ll_arg, + lntype, lnbitsize, lnbitpos, ll_unsignedp || rl_unsignedp, ll_reversep); ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask); |