diff options
author | Jakub Jelinek <jakub@redhat.com> | 2011-05-31 21:14:21 +0200 |
---|---|---|
committer | Jakub Jelinek <jakub@gcc.gnu.org> | 2011-05-31 21:14:21 +0200 |
commit | 509f4495ee4dedc2a27f9f8f749a0507ef3f1938 (patch) | |
tree | 2f0b491691c28732618def0868a08c868f96b580 | |
parent | 7351d8da1bdd94d04d276ce75e11897bbc8e3832 (diff) | |
download | gcc-509f4495ee4dedc2a27f9f8f749a0507ef3f1938.zip gcc-509f4495ee4dedc2a27f9f8f749a0507ef3f1938.tar.gz gcc-509f4495ee4dedc2a27f9f8f749a0507ef3f1938.tar.bz2 |
cselib.c (promote_debug_loc): Allow l->next non-NULL for cselib_preserve_constants.
* cselib.c (promote_debug_loc): Allow l->next non-NULL for
cselib_preserve_constants.
(cselib_lookup_1): If cselib_preserve_constants,
a new VALUE is being created for REG and there is a VALUE for the
same register in wider mode, add another loc with lowpart SUBREG of
the wider VALUE.
(cselib_subst_to_values): Handle ENTRY_VALUE.
* var-tracking.c (replace_expr_with_values): Return NULL for
ENTRY_VALUE too.
* dwarf2out.c (convert_descriptor_to_signed): New function.
(mem_loc_descriptor) <case ZERO_EXTEND>: Optimize using DW_OP_and
instead of two shifts.
(mem_loc_descriptor) <do_shift>: ZERO_EXTEND second argument to
the right mode if needed.
(mem_loc_descriptor) <case MOD>: For typed ops just use DW_OP_mod.
(mem_loc_descriptor) <case UNSIGNED_FIX>: Use
convert_descriptor_to_signed.
(mem_loc_descriptor) <case UDIV, CLZ, CTZ, FFS, POPCOUNT, PARITY,
BSWAP, ROTATE, ROTATERT>: Handle these rtls.
* gcc.dg/guality/bswaptest.c: New test.
* gcc.dg/guality/clztest.c: New test.
* gcc.dg/guality/ctztest.c: New test.
* gcc.dg/guality/rotatetest.c: New test.
From-SVN: r174508
-rw-r--r-- | gcc/ChangeLog | 20 | ||||
-rw-r--r-- | gcc/cselib.c | 45 | ||||
-rw-r--r-- | gcc/dwarf2out.c | 470 | ||||
-rw-r--r-- | gcc/testsuite/ChangeLog | 5 | ||||
-rw-r--r-- | gcc/testsuite/gcc.dg/guality/bswaptest.c | 32 | ||||
-rw-r--r-- | gcc/testsuite/gcc.dg/guality/clztest.c | 33 | ||||
-rw-r--r-- | gcc/testsuite/gcc.dg/guality/ctztest.c | 33 | ||||
-rw-r--r-- | gcc/testsuite/gcc.dg/guality/rotatetest.c | 76 | ||||
-rw-r--r-- | gcc/var-tracking.c | 2 |
9 files changed, 686 insertions, 30 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 0c64540..ed0fb81 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,5 +1,25 @@ 2011-05-31 Jakub Jelinek <jakub@redhat.com> + * cselib.c (promote_debug_loc): Allow l->next non-NULL for + cselib_preserve_constants. + (cselib_lookup_1): If cselib_preserve_constants, + a new VALUE is being created for REG and there is a VALUE for the + same register in wider mode, add another loc with lowpart SUBREG of + the wider VALUE. + (cselib_subst_to_values): Handle ENTRY_VALUE. + * var-tracking.c (replace_expr_with_values): Return NULL for + ENTRY_VALUE too. + * dwarf2out.c (convert_descriptor_to_signed): New function. + (mem_loc_descriptor) <case ZERO_EXTEND>: Optimize using DW_OP_and + instead of two shifts. + (mem_loc_descriptor) <do_shift>: ZERO_EXTEND second argument to + the right mode if needed. + (mem_loc_descriptor) <case MOD>: For typed ops just use DW_OP_mod. + (mem_loc_descriptor) <case UNSIGNED_FIX>: Use + convert_descriptor_to_signed. + (mem_loc_descriptor) <case UDIV, CLZ, CTZ, FFS, POPCOUNT, PARITY, + BSWAP, ROTATE, ROTATERT>: Handle these rtls. + PR target/48688 * config/i386/i386.md (*lea_general_4): New define_insn_and_split. diff --git a/gcc/cselib.c b/gcc/cselib.c index 56e6d178..e0697ec 100644 --- a/gcc/cselib.c +++ b/gcc/cselib.c @@ -257,7 +257,7 @@ promote_debug_loc (struct elt_loc_list *l) { n_debug_values--; l->setting_insn = cselib_current_insn; - gcc_assert (!l->next); + gcc_assert (!l->next || cselib_preserve_constants); } } @@ -1719,6 +1719,12 @@ cselib_subst_to_values (rtx x, enum machine_mode memmode) } return e->val_rtx; + case ENTRY_VALUE: + e = cselib_lookup (x, GET_MODE (x), 0, memmode); + if (! e) + break; + return e->val_rtx; + case CONST_DOUBLE: case CONST_VECTOR: case CONST_INT: @@ -1843,6 +1849,43 @@ cselib_lookup_1 (rtx x, enum machine_mode mode, used_regs[n_used_regs++] = i; REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL); } + else if (cselib_preserve_constants + && GET_MODE_CLASS (mode) == MODE_INT) + { + /* During var-tracking, try harder to find equivalences + for SUBREGs. If a setter sets say a DImode register + and user uses that register only in SImode, add a lowpart + subreg location. */ + struct elt_list *lwider = NULL; + l = REG_VALUES (i); + if (l && l->elt == NULL) + l = l->next; + for (; l; l = l->next) + if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT + && GET_MODE_SIZE (GET_MODE (l->elt->val_rtx)) + > GET_MODE_SIZE (mode) + && (lwider == NULL + || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx)) + < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx)))) + { + struct elt_loc_list *el; + if (i < FIRST_PSEUDO_REGISTER + && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1) + continue; + for (el = l->elt->locs; el; el = el->next) + if (!REG_P (el->loc)) + break; + if (el) + lwider = l; + } + if (lwider) + { + rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx, + GET_MODE (lwider->elt->val_rtx)); + if (sub) + e->locs->next = new_elt_loc_list (e->locs->next, sub); + } + } REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e); slot = cselib_find_slot (x, e->hash, INSERT, memmode); *slot = e; diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c index 9a415e9..58a622c 100644 --- a/gcc/dwarf2out.c +++ b/gcc/dwarf2out.c @@ -13824,6 +13824,37 @@ base_type_for_mode (enum machine_mode mode, bool unsignedp) return type_die; } +/* For OP descriptor assumed to be in unsigned MODE, convert it to a signed + type matching MODE, or, if MODE is narrower than DWARF2_ADDR_SIZE, signed + type matching DWARF2_ADDR_SIZE. Return NULL if the conversion is not + possible. */ + +static dw_loc_descr_ref +convert_descriptor_to_signed (enum machine_mode mode, dw_loc_descr_ref op) +{ + enum machine_mode outer_mode = mode; + dw_die_ref type_die; + dw_loc_descr_ref cvt; + + if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE) + { + outer_mode = mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, + MODE_INT, 0); + if (outer_mode == BLKmode + || GET_MODE_SIZE (outer_mode) != DWARF2_ADDR_SIZE) + return NULL; + } + type_die = base_type_for_mode (outer_mode, 0); + if (type_die == NULL) + return NULL; + cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0); + cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; + cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; + cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; + add_loc_descr (&op, cvt); + return op; +} + /* The following routine converts the RTL for a variable or parameter (resident in memory) into an equivalent Dwarf representation of a mechanism for getting the address of that same variable onto the top of a @@ -13986,6 +14017,21 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, mem_mode, VAR_INIT_STATUS_INITIALIZED); if (op0 == 0) break; + else if (GET_CODE (rtl) == ZERO_EXTEND + && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE + && GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0))) + < HOST_BITS_PER_WIDE_INT + /* If DW_OP_const{1,2,4}u won't be used, it is shorter + to expand zero extend as two shifts instead of + masking. */ + && GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= 4) + { + enum machine_mode imode = GET_MODE (XEXP (rtl, 0)); + mem_loc_result = op0; + add_loc_descr (&mem_loc_result, + int_loc_descriptor (GET_MODE_MASK (imode))); + add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0)); + } else if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE) { int shift = DWARF2_ADDR_SIZE @@ -14239,10 +14285,15 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, do_shift: op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, VAR_INIT_STATUS_INITIALIZED); - op1 = mem_loc_descriptor (XEXP (rtl, 1), - GET_MODE (XEXP (rtl, 1)) == VOIDmode - ? mode : GET_MODE (XEXP (rtl, 1)), mem_mode, - VAR_INIT_STATUS_INITIALIZED); + { + rtx rtlop1 = XEXP (rtl, 1); + if (GET_MODE (rtlop1) != VOIDmode + && GET_MODE_BITSIZE (GET_MODE (rtlop1)) + < GET_MODE_BITSIZE (mode)) + rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1); + op1 = mem_loc_descriptor (rtlop1, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + } if (op0 == 0 || op1 == 0) break; @@ -14279,6 +14330,16 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, break; case MOD: + if (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE && !dwarf_strict) + { + /* If MODE is wider than DWARF2_ADDR_SIZE, mem_loc_descriptor + should return signed typed values and therefore DW_OP_mod + won't be unsigned as it defaults for untyped stack values, + but signed. */ + op = DW_OP_mod; + goto do_binop; + } + op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, VAR_INIT_STATUS_INITIALIZED); op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, @@ -14296,6 +14357,38 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0)); break; + case UDIV: + if (!dwarf_strict && GET_MODE_CLASS (mode) == MODE_INT) + { + dw_die_ref type_die; + dw_loc_descr_ref cvt; + + type_die = base_type_for_mode (mode, 1); + if (type_die == NULL) + break; + op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op0 == 0 || op1 == 0) + break; + cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0); + cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; + cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; + cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; + add_loc_descr (&op0, cvt); + cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0); + cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; + cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; + cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; + add_loc_descr (&op1, cvt); + mem_loc_result = op0; + add_loc_descr (&mem_loc_result, op1); + add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0)); + mem_loc_result = convert_descriptor_to_signed (mode, mem_loc_result); + } + break; + case NOT: op = DW_OP_not; goto do_unop; @@ -14812,31 +14905,359 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, && (GET_CODE (rtl) == UNSIGNED_FIX || GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)) { - enum machine_mode outer_mode = mode; - if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE) - { - outer_mode = mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, - MODE_INT, 0); - if (outer_mode == BLKmode - || GET_MODE_SIZE (outer_mode) != DWARF2_ADDR_SIZE) - break; - } - type_die = base_type_for_mode (outer_mode, 0); - if (type_die == NULL) + op0 = convert_descriptor_to_signed (mode, op0); + if (op0 == NULL) break; - cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0); - cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; - cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; - cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; - add_loc_descr (&op0, cvt); } mem_loc_result = op0; } break; - case COMPARE: + case CLZ: + case CTZ: + case FFS: + /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value, + const0 is DW_OP_lit0 or corresponding typed constant, + const1 is DW_OP_lit1 or corresponding typed constant + and constMSB is constant with just the MSB bit set + for the mode): + DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4> + L1: const0 DW_OP_swap + L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl + DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> + L3: DW_OP_drop + L4: DW_OP_nop + + CTZ is similar: + DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4> + L1: const0 DW_OP_swap + L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr + DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> + L3: DW_OP_drop + L4: DW_OP_nop + + FFS is similar: + DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4> + L1: const1 DW_OP_swap + L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr + DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> + L3: DW_OP_drop + L4: DW_OP_nop */ + if (GET_MODE_CLASS (mode) == MODE_INT + && GET_MODE (XEXP (rtl, 0)) == mode + && (GET_CODE (rtl) != CLZ + || GET_MODE_BITSIZE (mode) <= 2 * HOST_BITS_PER_WIDE_INT)) + { + HOST_WIDE_INT valv; + dw_loc_descr_ref l1jump, l1label; + dw_loc_descr_ref l2jump, l2label; + dw_loc_descr_ref l3jump, l3label; + dw_loc_descr_ref l4jump, l4label; + rtx msb; + + op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op0 == NULL) + break; + if (GET_CODE (rtl) == CLZ) + { + if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv)) + valv = GET_MODE_BITSIZE (mode); + } + else if (GET_CODE (rtl) == FFS) + valv = 0; + else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv)) + valv = GET_MODE_BITSIZE (mode); + add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0)); + l1jump = new_loc_descr (DW_OP_bra, 0, 0); + add_loc_descr (&op0, l1jump); + add_loc_descr (&op0, new_loc_descr (DW_OP_drop, 0, 0)); + op1 = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + l4jump = new_loc_descr (DW_OP_skip, 0, 0); + add_loc_descr (&op0, l4jump); + l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS + ? const1_rtx : const0_rtx, + mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (l1label == NULL) + break; + add_loc_descr (&op0, l1label); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + l2label = new_loc_descr (DW_OP_dup, 0, 0); + add_loc_descr (&op0, l2label); + if (GET_CODE (rtl) != CLZ) + msb = const1_rtx; + else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) + msb = GEN_INT ((unsigned HOST_WIDE_INT) 1 + << (GET_MODE_BITSIZE (mode) - 1)); + else + msb = immed_double_const (0, (unsigned HOST_WIDE_INT) 1 + << (GET_MODE_BITSIZE (mode) + - HOST_BITS_PER_WIDE_INT - 1), + mode); + if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0) + op1 = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32 + ? DW_OP_const4u + : HOST_BITS_PER_WIDE_INT == 64 + ? DW_OP_const8u : DW_OP_constu, + INTVAL (msb), 0); + else + op1 = mem_loc_descriptor (msb, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0)); + l3jump = new_loc_descr (DW_OP_bra, 0, 0); + add_loc_descr (&op0, l3jump); + op1 = mem_loc_descriptor (const1_rtx, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (GET_CODE (rtl) == CLZ + ? DW_OP_shl : DW_OP_shr, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, 1, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + l2jump = new_loc_descr (DW_OP_skip, 0, 0); + add_loc_descr (&op0, l2jump); + l3label = new_loc_descr (DW_OP_drop, 0, 0); + add_loc_descr (&op0, l3label); + l4label = new_loc_descr (DW_OP_nop, 0, 0); + add_loc_descr (&op0, l4label); + l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l1jump->dw_loc_oprnd1.v.val_loc = l1label; + l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l2jump->dw_loc_oprnd1.v.val_loc = l2label; + l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l3jump->dw_loc_oprnd1.v.val_loc = l3label; + l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l4jump->dw_loc_oprnd1.v.val_loc = l4label; + mem_loc_result = op0; + } + break; + + case POPCOUNT: + case PARITY: + /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant, + const1 is DW_OP_lit1 or corresponding typed constant): + const0 DW_OP_swap + L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and + DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1> + L2: DW_OP_drop + + PARITY is similar: + L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and + DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1> + L2: DW_OP_drop */ + if (GET_MODE_CLASS (mode) == MODE_INT + && GET_MODE (XEXP (rtl, 0)) == mode) + { + dw_loc_descr_ref l1jump, l1label; + dw_loc_descr_ref l2jump, l2label; + + op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op0 == NULL) + break; + op1 = mem_loc_descriptor (const0_rtx, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + l1label = new_loc_descr (DW_OP_dup, 0, 0); + add_loc_descr (&op0, l1label); + l2jump = new_loc_descr (DW_OP_bra, 0, 0); + add_loc_descr (&op0, l2jump); + add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_rot, 0, 0)); + op1 = mem_loc_descriptor (const1_rtx, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0)); + add_loc_descr (&op0, new_loc_descr (GET_CODE (rtl) == POPCOUNT + ? DW_OP_plus : DW_OP_xor, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + op1 = mem_loc_descriptor (const1_rtx, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_shr, 0, 0)); + l1jump = new_loc_descr (DW_OP_skip, 0, 0); + add_loc_descr (&op0, l1jump); + l2label = new_loc_descr (DW_OP_drop, 0, 0); + add_loc_descr (&op0, l2label); + l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l1jump->dw_loc_oprnd1.v.val_loc = l1label; + l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l2jump->dw_loc_oprnd1.v.val_loc = l2label; + mem_loc_result = op0; + } + break; + + case BSWAP: + /* BSWAP (constS is initial shift count, either 56 or 24): + constS const0 + L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr + const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or + DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8 + DW_OP_minus DW_OP_swap DW_OP_skip <L1> + L2: DW_OP_drop DW_OP_swap DW_OP_drop */ + if (GET_MODE_CLASS (mode) == MODE_INT + && BITS_PER_UNIT == 8 + && (GET_MODE_BITSIZE (mode) == 32 + || GET_MODE_BITSIZE (mode) == 64)) + { + dw_loc_descr_ref l1jump, l1label; + dw_loc_descr_ref l2jump, l2label; + + op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op0 == NULL) + break; + + op1 = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8), + mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + op1 = mem_loc_descriptor (const0_rtx, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + l1label = new_loc_descr (DW_OP_pick, 2, 0); + add_loc_descr (&op0, l1label); + op1 = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8), + mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_pick, 3, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_minus, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_shr, 0, 0)); + op1 = mem_loc_descriptor (GEN_INT (255), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_pick, 2, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_or, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0)); + op1 = mem_loc_descriptor (const0_rtx, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_eq, 0, 0)); + l2jump = new_loc_descr (DW_OP_bra, 0, 0); + add_loc_descr (&op0, l2jump); + op1 = mem_loc_descriptor (GEN_INT (8), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_minus, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + l1jump = new_loc_descr (DW_OP_skip, 0, 0); + add_loc_descr (&op0, l1jump); + l2label = new_loc_descr (DW_OP_drop, 0, 0); + add_loc_descr (&op0, l2label); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_drop, 0, 0)); + l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l1jump->dw_loc_oprnd1.v.val_loc = l1label; + l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l2jump->dw_loc_oprnd1.v.val_loc = l2label; + mem_loc_result = op0; + } + break; + case ROTATE: case ROTATERT: + /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode): + DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot + [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg + DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or + + ROTATERT is similar: + DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE> + DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot + [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or + */ + if (GET_MODE_CLASS (mode) == MODE_INT) + { + rtx rtlop1 = XEXP (rtl, 1); + dw_loc_descr_ref mask[2] = { NULL, NULL }; + int i; + + if (GET_MODE (rtlop1) != VOIDmode + && GET_MODE_BITSIZE (GET_MODE (rtlop1)) + < GET_MODE_BITSIZE (mode)) + rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1); + op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + op1 = mem_loc_descriptor (rtlop1, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op0 == NULL || op1 == NULL) + break; + if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE) + for (i = 0; i < 2; i++) + { + if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT) + mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)), + mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT) + mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32 + ? DW_OP_const4u + : HOST_BITS_PER_WIDE_INT == 64 + ? DW_OP_const8u : DW_OP_constu, + GET_MODE_MASK (mode), 0); + else + mask[i] = NULL; + if (mask[i] == NULL) + return NULL; + add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0)); + } + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_over, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_over, 0, 0)); + if (GET_CODE (rtl) == ROTATERT) + { + add_loc_descr (&op0, new_loc_descr (DW_OP_neg, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, + GET_MODE_BITSIZE (mode), 0)); + } + add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0)); + if (mask[0] != NULL) + add_loc_descr (&op0, mask[0]); + add_loc_descr (&op0, new_loc_descr (DW_OP_rot, 0, 0)); + if (mask[1] != NULL) + { + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + add_loc_descr (&op0, mask[1]); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + } + if (GET_CODE (rtl) == ROTATE) + { + add_loc_descr (&op0, new_loc_descr (DW_OP_neg, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, + GET_MODE_BITSIZE (mode), 0)); + } + add_loc_descr (&op0, new_loc_descr (DW_OP_shr, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_or, 0, 0)); + mem_loc_result = op0; + } + break; + + case COMPARE: case TRUNCATE: /* In theory, we could implement the above. */ /* DWARF cannot represent the unsigned compare operations @@ -14856,7 +15277,6 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, case US_ASHIFT: case SS_TRUNCATE: case US_TRUNCATE: - case UDIV: case UNORDERED: case ORDERED: case UNEQ: @@ -14870,12 +15290,6 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, case SAT_FRACT: case UNSIGNED_SAT_FRACT: case SQRT: - case BSWAP: - case FFS: - case CLZ: - case CTZ: - case POPCOUNT: - case PARITY: case ASM_OPERANDS: case VEC_MERGE: case VEC_SELECT: diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog index dea7162..85c4a37 100644 --- a/gcc/testsuite/ChangeLog +++ b/gcc/testsuite/ChangeLog @@ -1,5 +1,10 @@ 2011-05-31 Jakub Jelinek <jakub@redhat.com> + * gcc.dg/guality/bswaptest.c: New test. + * gcc.dg/guality/clztest.c: New test. + * gcc.dg/guality/ctztest.c: New test. + * gcc.dg/guality/rotatetest.c: New test. + PR target/48688 * gcc.target/i386/pr48688.c: New test. diff --git a/gcc/testsuite/gcc.dg/guality/bswaptest.c b/gcc/testsuite/gcc.dg/guality/bswaptest.c new file mode 100644 index 0000000..38b9d98 --- /dev/null +++ b/gcc/testsuite/gcc.dg/guality/bswaptest.c @@ -0,0 +1,32 @@ +/* { dg-do run { target { x86_64-*-* && lp64 } } } */ +/* { dg-options "-g" } */ + +volatile int vv; + +__attribute__((noclone, noinline)) long +foo (long x) +{ + long f = __builtin_bswap64 (x); + long g = f; + asm volatile ("" : "+r" (f)); + vv++; /* { dg-final { gdb-test 12 "g" "f" } } */ + return f; +} + +__attribute__((noclone, noinline)) int +bar (int x) +{ + int f = __builtin_bswap32 (x); + int g = f; + asm volatile ("" : "+r" (f)); + vv++; /* { dg-final { gdb-test 22 "g" "f" } } */ + return f; +} + +int +main () +{ + foo (0x123456789abcde0fUL); + bar (0x12345678); + return 0; +} diff --git a/gcc/testsuite/gcc.dg/guality/clztest.c b/gcc/testsuite/gcc.dg/guality/clztest.c new file mode 100644 index 0000000..9428be3 --- /dev/null +++ b/gcc/testsuite/gcc.dg/guality/clztest.c @@ -0,0 +1,33 @@ +/* { dg-do run { target { x86_64-*-* && lp64 } } } */ +/* { dg-options "-g" } */ + +volatile int vv; + +__attribute__((noinline, noclone)) long +foo (long x) +{ + long f = __builtin_clzl (x); + long g = f; + asm volatile ("" : "+r" (f)); + vv++; /* { dg-final { gdb-test 12 "g" "f" } } */ + return f; +} + +__attribute__((noinline, noclone)) long +bar (long x) +{ + long f = __builtin_clzl (x); + long g = f; + asm volatile ("" : "+r" (f)); + vv++; /* { dg-final { gdb-test 22 "g" "f" } } */ + return f; +} + +int +main () +{ + long x = vv; + foo (x + 0x123456UL); + bar (x + 0x7fffffffUL); + return 0; +} diff --git a/gcc/testsuite/gcc.dg/guality/ctztest.c b/gcc/testsuite/gcc.dg/guality/ctztest.c new file mode 100644 index 0000000..d243845 --- /dev/null +++ b/gcc/testsuite/gcc.dg/guality/ctztest.c @@ -0,0 +1,33 @@ +/* { dg-do run { target { x86_64-*-* && lp64 } } } */ +/* { dg-options "-g" } */ + +volatile int vv; + +__attribute__((noinline, noclone)) long +foo (long x) +{ + long f = __builtin_ctzl (x); + long g = f; + asm volatile ("" : "+r" (f)); + vv++; /* { dg-final { gdb-test 12 "g" "f" } } */ + return f; +} + +__attribute__((noinline, noclone)) long +bar (long x) +{ + long f = __builtin_ctzl (x); + long g = f; + asm volatile ("" : "+r" (f)); + vv++; /* { dg-final { gdb-test 22 "g" "f" } } */ + return f; +} + +int +main () +{ + long x = vv; + foo (x + 0x1234560UL); + bar (x + 0x7fff8000UL); + return 0; +} diff --git a/gcc/testsuite/gcc.dg/guality/rotatetest.c b/gcc/testsuite/gcc.dg/guality/rotatetest.c new file mode 100644 index 0000000..dff987c --- /dev/null +++ b/gcc/testsuite/gcc.dg/guality/rotatetest.c @@ -0,0 +1,76 @@ +/* { dg-do run { target { x86_64-*-* && lp64 } } } */ +/* { dg-options "-g" } */ + +volatile int vv; + +__attribute__((noclone, noinline)) long +f1 (unsigned long x) +{ + long f = (x << 12) | (x >> (64 - 12)); + long g = f; + asm volatile ("" : "+r" (f)); + vv++; /* { dg-final { gdb-test 12 "g" "f" } } */ + return f; +} + +__attribute__((noclone, noinline)) long +f2 (unsigned long x, int y) +{ + long f = (x << y) | (x >> (64 - y)); + long g = f; + asm volatile ("" : "+r" (f)); + vv++; /* { dg-final { gdb-test 22 "g" "f" } } */ + return f; +} + +__attribute__((noclone, noinline)) long +f3 (unsigned long x, int y) +{ + long f = (x >> y) | (x << (64 - y)); + long g = f; + asm volatile ("" : "+r" (f)); + vv++; /* { dg-final { gdb-test 32 "g" "f" } } */ + return f; +} + +__attribute__((noclone, noinline)) unsigned int +f4 (unsigned int x) +{ + unsigned int f = (x << 12) | (x >> (32 - 12)); + unsigned int g = f; + asm volatile ("" : "+r" (f)); + vv++; /* { dg-final { gdb-test 42 "g" "f" } } */ + return f; +} + +__attribute__((noclone, noinline)) unsigned int +f5 (unsigned int x, int y) +{ + unsigned int f = (x << y) | (x >> (64 - y)); + unsigned int g = f; + asm volatile ("" : "+r" (f)); + vv++; /* { dg-final { gdb-test 52 "g" "f" } } */ + return f; +} + +__attribute__((noclone, noinline)) unsigned int +f6 (unsigned int x, int y) +{ + unsigned int f = (x >> y) | (x << (64 - y)); + unsigned int g = f; + asm volatile ("" : "+r" (f)); + vv++; /* { dg-final { gdb-test 62 "g" "f" } } */ + return f; +} + +int +main () +{ + f1 (0x123456789abcde0fUL); + f2 (0x123456789abcde0fUL, 18); + f3 (0x123456789abcde0fUL, 17); + f4 (0x12345678); + f5 (0x12345678, 18); + f6 (0x12345678, 17); + return 0; +} diff --git a/gcc/var-tracking.c b/gcc/var-tracking.c index 488635d..f761234 100644 --- a/gcc/var-tracking.c +++ b/gcc/var-tracking.c @@ -4837,7 +4837,7 @@ get_address_mode (rtx mem) static rtx replace_expr_with_values (rtx loc) { - if (REG_P (loc)) + if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE) return NULL; else if (MEM_P (loc)) { |