diff options
author | Jakub Jelinek <jakub@redhat.com> | 2011-05-31 21:14:21 +0200 |
---|---|---|
committer | Jakub Jelinek <jakub@gcc.gnu.org> | 2011-05-31 21:14:21 +0200 |
commit | 509f4495ee4dedc2a27f9f8f749a0507ef3f1938 (patch) | |
tree | 2f0b491691c28732618def0868a08c868f96b580 /gcc/dwarf2out.c | |
parent | 7351d8da1bdd94d04d276ce75e11897bbc8e3832 (diff) | |
download | gcc-509f4495ee4dedc2a27f9f8f749a0507ef3f1938.zip gcc-509f4495ee4dedc2a27f9f8f749a0507ef3f1938.tar.gz gcc-509f4495ee4dedc2a27f9f8f749a0507ef3f1938.tar.bz2 |
cselib.c (promote_debug_loc): Allow l->next non-NULL for cselib_preserve_constants.
* cselib.c (promote_debug_loc): Allow l->next non-NULL for
cselib_preserve_constants.
(cselib_lookup_1): If cselib_preserve_constants,
a new VALUE is being created for REG and there is a VALUE for the
same register in wider mode, add another loc with lowpart SUBREG of
the wider VALUE.
(cselib_subst_to_values): Handle ENTRY_VALUE.
* var-tracking.c (replace_expr_with_values): Return NULL for
ENTRY_VALUE too.
* dwarf2out.c (convert_descriptor_to_signed): New function.
(mem_loc_descriptor) <case ZERO_EXTEND>: Optimize using DW_OP_and
instead of two shifts.
(mem_loc_descriptor) <do_shift>: ZERO_EXTEND second argument to
the right mode if needed.
(mem_loc_descriptor) <case MOD>: For typed ops just use DW_OP_mod.
(mem_loc_descriptor) <case UNSIGNED_FIX>: Use
convert_descriptor_to_signed.
(mem_loc_descriptor) <case UDIV, CLZ, CTZ, FFS, POPCOUNT, PARITY,
BSWAP, ROTATE, ROTATERT>: Handle these rtls.
* gcc.dg/guality/bswaptest.c: New test.
* gcc.dg/guality/clztest.c: New test.
* gcc.dg/guality/ctztest.c: New test.
* gcc.dg/guality/rotatetest.c: New test.
From-SVN: r174508
Diffstat (limited to 'gcc/dwarf2out.c')
-rw-r--r-- | gcc/dwarf2out.c | 470 |
1 files changed, 442 insertions, 28 deletions
diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c index 9a415e9..58a622c 100644 --- a/gcc/dwarf2out.c +++ b/gcc/dwarf2out.c @@ -13824,6 +13824,37 @@ base_type_for_mode (enum machine_mode mode, bool unsignedp) return type_die; } +/* For OP descriptor assumed to be in unsigned MODE, convert it to a signed + type matching MODE, or, if MODE is narrower than DWARF2_ADDR_SIZE, signed + type matching DWARF2_ADDR_SIZE. Return NULL if the conversion is not + possible. */ + +static dw_loc_descr_ref +convert_descriptor_to_signed (enum machine_mode mode, dw_loc_descr_ref op) +{ + enum machine_mode outer_mode = mode; + dw_die_ref type_die; + dw_loc_descr_ref cvt; + + if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE) + { + outer_mode = mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, + MODE_INT, 0); + if (outer_mode == BLKmode + || GET_MODE_SIZE (outer_mode) != DWARF2_ADDR_SIZE) + return NULL; + } + type_die = base_type_for_mode (outer_mode, 0); + if (type_die == NULL) + return NULL; + cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0); + cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; + cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; + cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; + add_loc_descr (&op, cvt); + return op; +} + /* The following routine converts the RTL for a variable or parameter (resident in memory) into an equivalent Dwarf representation of a mechanism for getting the address of that same variable onto the top of a @@ -13986,6 +14017,21 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, mem_mode, VAR_INIT_STATUS_INITIALIZED); if (op0 == 0) break; + else if (GET_CODE (rtl) == ZERO_EXTEND + && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE + && GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0))) + < HOST_BITS_PER_WIDE_INT + /* If DW_OP_const{1,2,4}u won't be used, it is shorter + to expand zero extend as two shifts instead of + masking. */ + && GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= 4) + { + enum machine_mode imode = GET_MODE (XEXP (rtl, 0)); + mem_loc_result = op0; + add_loc_descr (&mem_loc_result, + int_loc_descriptor (GET_MODE_MASK (imode))); + add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0)); + } else if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE) { int shift = DWARF2_ADDR_SIZE @@ -14239,10 +14285,15 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, do_shift: op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, VAR_INIT_STATUS_INITIALIZED); - op1 = mem_loc_descriptor (XEXP (rtl, 1), - GET_MODE (XEXP (rtl, 1)) == VOIDmode - ? mode : GET_MODE (XEXP (rtl, 1)), mem_mode, - VAR_INIT_STATUS_INITIALIZED); + { + rtx rtlop1 = XEXP (rtl, 1); + if (GET_MODE (rtlop1) != VOIDmode + && GET_MODE_BITSIZE (GET_MODE (rtlop1)) + < GET_MODE_BITSIZE (mode)) + rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1); + op1 = mem_loc_descriptor (rtlop1, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + } if (op0 == 0 || op1 == 0) break; @@ -14279,6 +14330,16 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, break; case MOD: + if (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE && !dwarf_strict) + { + /* If MODE is wider than DWARF2_ADDR_SIZE, mem_loc_descriptor + should return signed typed values and therefore DW_OP_mod + won't be unsigned as it defaults for untyped stack values, + but signed. */ + op = DW_OP_mod; + goto do_binop; + } + op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, VAR_INIT_STATUS_INITIALIZED); op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, @@ -14296,6 +14357,38 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0)); break; + case UDIV: + if (!dwarf_strict && GET_MODE_CLASS (mode) == MODE_INT) + { + dw_die_ref type_die; + dw_loc_descr_ref cvt; + + type_die = base_type_for_mode (mode, 1); + if (type_die == NULL) + break; + op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op0 == 0 || op1 == 0) + break; + cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0); + cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; + cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; + cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; + add_loc_descr (&op0, cvt); + cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0); + cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; + cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; + cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; + add_loc_descr (&op1, cvt); + mem_loc_result = op0; + add_loc_descr (&mem_loc_result, op1); + add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0)); + mem_loc_result = convert_descriptor_to_signed (mode, mem_loc_result); + } + break; + case NOT: op = DW_OP_not; goto do_unop; @@ -14812,31 +14905,359 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, && (GET_CODE (rtl) == UNSIGNED_FIX || GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)) { - enum machine_mode outer_mode = mode; - if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE) - { - outer_mode = mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, - MODE_INT, 0); - if (outer_mode == BLKmode - || GET_MODE_SIZE (outer_mode) != DWARF2_ADDR_SIZE) - break; - } - type_die = base_type_for_mode (outer_mode, 0); - if (type_die == NULL) + op0 = convert_descriptor_to_signed (mode, op0); + if (op0 == NULL) break; - cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0); - cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; - cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; - cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; - add_loc_descr (&op0, cvt); } mem_loc_result = op0; } break; - case COMPARE: + case CLZ: + case CTZ: + case FFS: + /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value, + const0 is DW_OP_lit0 or corresponding typed constant, + const1 is DW_OP_lit1 or corresponding typed constant + and constMSB is constant with just the MSB bit set + for the mode): + DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4> + L1: const0 DW_OP_swap + L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl + DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> + L3: DW_OP_drop + L4: DW_OP_nop + + CTZ is similar: + DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4> + L1: const0 DW_OP_swap + L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr + DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> + L3: DW_OP_drop + L4: DW_OP_nop + + FFS is similar: + DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4> + L1: const1 DW_OP_swap + L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr + DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> + L3: DW_OP_drop + L4: DW_OP_nop */ + if (GET_MODE_CLASS (mode) == MODE_INT + && GET_MODE (XEXP (rtl, 0)) == mode + && (GET_CODE (rtl) != CLZ + || GET_MODE_BITSIZE (mode) <= 2 * HOST_BITS_PER_WIDE_INT)) + { + HOST_WIDE_INT valv; + dw_loc_descr_ref l1jump, l1label; + dw_loc_descr_ref l2jump, l2label; + dw_loc_descr_ref l3jump, l3label; + dw_loc_descr_ref l4jump, l4label; + rtx msb; + + op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op0 == NULL) + break; + if (GET_CODE (rtl) == CLZ) + { + if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv)) + valv = GET_MODE_BITSIZE (mode); + } + else if (GET_CODE (rtl) == FFS) + valv = 0; + else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv)) + valv = GET_MODE_BITSIZE (mode); + add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0)); + l1jump = new_loc_descr (DW_OP_bra, 0, 0); + add_loc_descr (&op0, l1jump); + add_loc_descr (&op0, new_loc_descr (DW_OP_drop, 0, 0)); + op1 = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + l4jump = new_loc_descr (DW_OP_skip, 0, 0); + add_loc_descr (&op0, l4jump); + l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS + ? const1_rtx : const0_rtx, + mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (l1label == NULL) + break; + add_loc_descr (&op0, l1label); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + l2label = new_loc_descr (DW_OP_dup, 0, 0); + add_loc_descr (&op0, l2label); + if (GET_CODE (rtl) != CLZ) + msb = const1_rtx; + else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) + msb = GEN_INT ((unsigned HOST_WIDE_INT) 1 + << (GET_MODE_BITSIZE (mode) - 1)); + else + msb = immed_double_const (0, (unsigned HOST_WIDE_INT) 1 + << (GET_MODE_BITSIZE (mode) + - HOST_BITS_PER_WIDE_INT - 1), + mode); + if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0) + op1 = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32 + ? DW_OP_const4u + : HOST_BITS_PER_WIDE_INT == 64 + ? DW_OP_const8u : DW_OP_constu, + INTVAL (msb), 0); + else + op1 = mem_loc_descriptor (msb, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0)); + l3jump = new_loc_descr (DW_OP_bra, 0, 0); + add_loc_descr (&op0, l3jump); + op1 = mem_loc_descriptor (const1_rtx, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (GET_CODE (rtl) == CLZ + ? DW_OP_shl : DW_OP_shr, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, 1, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + l2jump = new_loc_descr (DW_OP_skip, 0, 0); + add_loc_descr (&op0, l2jump); + l3label = new_loc_descr (DW_OP_drop, 0, 0); + add_loc_descr (&op0, l3label); + l4label = new_loc_descr (DW_OP_nop, 0, 0); + add_loc_descr (&op0, l4label); + l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l1jump->dw_loc_oprnd1.v.val_loc = l1label; + l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l2jump->dw_loc_oprnd1.v.val_loc = l2label; + l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l3jump->dw_loc_oprnd1.v.val_loc = l3label; + l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l4jump->dw_loc_oprnd1.v.val_loc = l4label; + mem_loc_result = op0; + } + break; + + case POPCOUNT: + case PARITY: + /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant, + const1 is DW_OP_lit1 or corresponding typed constant): + const0 DW_OP_swap + L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and + DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1> + L2: DW_OP_drop + + PARITY is similar: + L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and + DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1> + L2: DW_OP_drop */ + if (GET_MODE_CLASS (mode) == MODE_INT + && GET_MODE (XEXP (rtl, 0)) == mode) + { + dw_loc_descr_ref l1jump, l1label; + dw_loc_descr_ref l2jump, l2label; + + op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op0 == NULL) + break; + op1 = mem_loc_descriptor (const0_rtx, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + l1label = new_loc_descr (DW_OP_dup, 0, 0); + add_loc_descr (&op0, l1label); + l2jump = new_loc_descr (DW_OP_bra, 0, 0); + add_loc_descr (&op0, l2jump); + add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_rot, 0, 0)); + op1 = mem_loc_descriptor (const1_rtx, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0)); + add_loc_descr (&op0, new_loc_descr (GET_CODE (rtl) == POPCOUNT + ? DW_OP_plus : DW_OP_xor, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + op1 = mem_loc_descriptor (const1_rtx, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_shr, 0, 0)); + l1jump = new_loc_descr (DW_OP_skip, 0, 0); + add_loc_descr (&op0, l1jump); + l2label = new_loc_descr (DW_OP_drop, 0, 0); + add_loc_descr (&op0, l2label); + l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l1jump->dw_loc_oprnd1.v.val_loc = l1label; + l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l2jump->dw_loc_oprnd1.v.val_loc = l2label; + mem_loc_result = op0; + } + break; + + case BSWAP: + /* BSWAP (constS is initial shift count, either 56 or 24): + constS const0 + L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr + const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or + DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8 + DW_OP_minus DW_OP_swap DW_OP_skip <L1> + L2: DW_OP_drop DW_OP_swap DW_OP_drop */ + if (GET_MODE_CLASS (mode) == MODE_INT + && BITS_PER_UNIT == 8 + && (GET_MODE_BITSIZE (mode) == 32 + || GET_MODE_BITSIZE (mode) == 64)) + { + dw_loc_descr_ref l1jump, l1label; + dw_loc_descr_ref l2jump, l2label; + + op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op0 == NULL) + break; + + op1 = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8), + mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + op1 = mem_loc_descriptor (const0_rtx, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + l1label = new_loc_descr (DW_OP_pick, 2, 0); + add_loc_descr (&op0, l1label); + op1 = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8), + mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_pick, 3, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_minus, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_shr, 0, 0)); + op1 = mem_loc_descriptor (GEN_INT (255), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op1 == NULL) + break; + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_pick, 2, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_or, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0)); + op1 = mem_loc_descriptor (const0_rtx, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_eq, 0, 0)); + l2jump = new_loc_descr (DW_OP_bra, 0, 0); + add_loc_descr (&op0, l2jump); + op1 = mem_loc_descriptor (GEN_INT (8), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_minus, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + l1jump = new_loc_descr (DW_OP_skip, 0, 0); + add_loc_descr (&op0, l1jump); + l2label = new_loc_descr (DW_OP_drop, 0, 0); + add_loc_descr (&op0, l2label); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_drop, 0, 0)); + l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l1jump->dw_loc_oprnd1.v.val_loc = l1label; + l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; + l2jump->dw_loc_oprnd1.v.val_loc = l2label; + mem_loc_result = op0; + } + break; + case ROTATE: case ROTATERT: + /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode): + DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot + [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg + DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or + + ROTATERT is similar: + DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE> + DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot + [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or + */ + if (GET_MODE_CLASS (mode) == MODE_INT) + { + rtx rtlop1 = XEXP (rtl, 1); + dw_loc_descr_ref mask[2] = { NULL, NULL }; + int i; + + if (GET_MODE (rtlop1) != VOIDmode + && GET_MODE_BITSIZE (GET_MODE (rtlop1)) + < GET_MODE_BITSIZE (mode)) + rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1); + op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + op1 = mem_loc_descriptor (rtlop1, mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + if (op0 == NULL || op1 == NULL) + break; + if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE) + for (i = 0; i < 2; i++) + { + if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT) + mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)), + mode, mem_mode, + VAR_INIT_STATUS_INITIALIZED); + else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT) + mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32 + ? DW_OP_const4u + : HOST_BITS_PER_WIDE_INT == 64 + ? DW_OP_const8u : DW_OP_constu, + GET_MODE_MASK (mode), 0); + else + mask[i] = NULL; + if (mask[i] == NULL) + return NULL; + add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0)); + } + add_loc_descr (&op0, op1); + add_loc_descr (&op0, new_loc_descr (DW_OP_over, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_over, 0, 0)); + if (GET_CODE (rtl) == ROTATERT) + { + add_loc_descr (&op0, new_loc_descr (DW_OP_neg, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, + GET_MODE_BITSIZE (mode), 0)); + } + add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0)); + if (mask[0] != NULL) + add_loc_descr (&op0, mask[0]); + add_loc_descr (&op0, new_loc_descr (DW_OP_rot, 0, 0)); + if (mask[1] != NULL) + { + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + add_loc_descr (&op0, mask[1]); + add_loc_descr (&op0, new_loc_descr (DW_OP_swap, 0, 0)); + } + if (GET_CODE (rtl) == ROTATE) + { + add_loc_descr (&op0, new_loc_descr (DW_OP_neg, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, + GET_MODE_BITSIZE (mode), 0)); + } + add_loc_descr (&op0, new_loc_descr (DW_OP_shr, 0, 0)); + add_loc_descr (&op0, new_loc_descr (DW_OP_or, 0, 0)); + mem_loc_result = op0; + } + break; + + case COMPARE: case TRUNCATE: /* In theory, we could implement the above. */ /* DWARF cannot represent the unsigned compare operations @@ -14856,7 +15277,6 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, case US_ASHIFT: case SS_TRUNCATE: case US_TRUNCATE: - case UDIV: case UNORDERED: case ORDERED: case UNEQ: @@ -14870,12 +15290,6 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, case SAT_FRACT: case UNSIGNED_SAT_FRACT: case SQRT: - case BSWAP: - case FFS: - case CLZ: - case CTZ: - case POPCOUNT: - case PARITY: case ASM_OPERANDS: case VEC_MERGE: case VEC_SELECT: |