diff options
author | Richard Sandiford <richard.sandiford@arm.com> | 2016-05-02 09:39:38 +0000 |
---|---|---|
committer | Richard Sandiford <rsandifo@gcc.gnu.org> | 2016-05-02 09:39:38 +0000 |
commit | 8de73453a42758af02bb23ed58f4b1e78ad11bc7 (patch) | |
tree | ea2f8d60c73117cebcd97dfbc56bb80d7c4ee632 /gcc/tree-ssa-alias.c | |
parent | 032c80e999eac4288ff1b0f752e15c8e7c5cdf51 (diff) | |
download | gcc-8de73453a42758af02bb23ed58f4b1e78ad11bc7.zip gcc-8de73453a42758af02bb23ed58f4b1e78ad11bc7.tar.gz gcc-8de73453a42758af02bb23ed58f4b1e78ad11bc7.tar.bz2 |
Support << and >> for offset_int and widest_int
Following on from the comparison patch, I think it makes sense to
support << and >> for offset_int (int128_t) and widest_int (intNNN_t),
with >> being arithmetic shift. It doesn't make sense to use
logical right shift on a potentially negative offset_int, since
the precision of 128 bits has no meaning on the target.
Tested on x86_64-linux-gnu and aarch64-linux-gnu.
gcc/
* wide-int.h: Update offset_int and widest_int documentation.
(WI_SIGNED_SHIFT_RESULT): New macro.
(wi::binary_shift): Define signed_shift_result_type for
shifts on offset_int- and widest_int-like types.
(generic_wide_int): Support <<= and >>= if << and >> are supported.
* tree.h (int_bit_position): Use shift operators instead of wi::
shifts.
* alias.c (adjust_offset_for_component_ref): Likewise.
* expr.c (get_inner_reference): Likewise.
* fold-const.c (fold_comparison): Likewise.
* gimple-fold.c (fold_nonarray_ctor_reference): Likewise.
* gimple-ssa-strength-reduction.c (restructure_reference): Likewise.
* tree-dfa.c (get_ref_base_and_extent): Likewise.
* tree-ssa-alias.c (indirect_ref_may_alias_decl_p): Likewise.
(stmt_kills_ref_p): Likewise.
* tree-ssa-ccp.c (bit_value_binop_1): Likewise.
* tree-ssa-math-opts.c (find_bswap_or_nop_load): Likewise.
* tree-ssa-sccvn.c (copy_reference_ops_from_ref): Likewise.
(ao_ref_init_from_vn_reference): Likewise.
gcc/cp/
* init.c (build_new_1): Use shift operators instead of wi:: shifts.
From-SVN: r235720
Diffstat (limited to 'gcc/tree-ssa-alias.c')
-rw-r--r-- | gcc/tree-ssa-alias.c | 21 |
1 files changed, 9 insertions, 12 deletions
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c index 4ffefaf..58920e0 100644 --- a/gcc/tree-ssa-alias.c +++ b/gcc/tree-ssa-alias.c @@ -1101,7 +1101,7 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, /* The offset embedded in MEM_REFs can be negative. Bias them so that the resulting offset adjustment is positive. */ offset_int moff = mem_ref_offset (base1); - moff = wi::lshift (moff, LOG2_BITS_PER_UNIT); + moff <<= LOG2_BITS_PER_UNIT; if (wi::neg_p (moff)) offset2p += (-moff).to_short_addr (); else @@ -1173,7 +1173,7 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, || TREE_CODE (dbase2) == TARGET_MEM_REF) { offset_int moff = mem_ref_offset (dbase2); - moff = wi::lshift (moff, LOG2_BITS_PER_UNIT); + moff <<= LOG2_BITS_PER_UNIT; if (wi::neg_p (moff)) doffset1 -= (-moff).to_short_addr (); else @@ -1271,13 +1271,13 @@ indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, /* The offset embedded in MEM_REFs can be negative. Bias them so that the resulting offset adjustment is positive. */ moff = mem_ref_offset (base1); - moff = wi::lshift (moff, LOG2_BITS_PER_UNIT); + moff <<= LOG2_BITS_PER_UNIT; if (wi::neg_p (moff)) offset2 += (-moff).to_short_addr (); else offset1 += moff.to_shwi (); moff = mem_ref_offset (base2); - moff = wi::lshift (moff, LOG2_BITS_PER_UNIT); + moff <<= LOG2_BITS_PER_UNIT; if (wi::neg_p (moff)) offset1 += (-moff).to_short_addr (); else @@ -2358,10 +2358,10 @@ stmt_kills_ref_p (gimple *stmt, ao_ref *ref) TREE_OPERAND (ref->base, 1))) { offset_int off1 = mem_ref_offset (base); - off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT); + off1 <<= LOG2_BITS_PER_UNIT; off1 += offset; offset_int off2 = mem_ref_offset (ref->base); - off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT); + off2 <<= LOG2_BITS_PER_UNIT; off2 += ref_offset; if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2)) { @@ -2432,18 +2432,15 @@ stmt_kills_ref_p (gimple *stmt, ao_ref *ref) if (TREE_CODE (rbase) != MEM_REF) return false; // Compare pointers. - offset += wi::lshift (mem_ref_offset (base), - LOG2_BITS_PER_UNIT); - roffset += wi::lshift (mem_ref_offset (rbase), - LOG2_BITS_PER_UNIT); + offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT; + roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT; base = TREE_OPERAND (base, 0); rbase = TREE_OPERAND (rbase, 0); } if (base == rbase && offset <= roffset && (roffset + ref->max_size - <= offset + wi::lshift (wi::to_offset (len), - LOG2_BITS_PER_UNIT))) + <= offset + (wi::to_offset (len) << LOG2_BITS_PER_UNIT))) return true; break; } |