diff options
author | Kyrylo Tkachov <kyrylo.tkachov@arm.com> | 2016-06-14 13:39:03 +0000 |
---|---|---|
committer | Kyrylo Tkachov <ktkachov@gcc.gnu.org> | 2016-06-14 13:39:03 +0000 |
commit | 8c83f71d3b17a8bf136ca0243397dad75ea698f0 (patch) | |
tree | b5ebe211d89bb75e06aae3093f0f4a9f6892c9ce | |
parent | 124bed29851cb5ece3d1218c6113a0774ffc26a7 (diff) | |
download | gcc-8c83f71d3b17a8bf136ca0243397dad75ea698f0.zip gcc-8c83f71d3b17a8bf136ca0243397dad75ea698f0.tar.gz gcc-8c83f71d3b17a8bf136ca0243397dad75ea698f0.tar.bz2 |
[AArch64] Handle AND+ASHIFT form of UBFIZ correctly in costs
* config/aarch64/aarch64.c (aarch64_mask_and_shift_for_ubfiz_p):
New function.
(aarch64_rtx_costs): Use it. Rewrite CONST_INT_P (op1) case to handle
mask+shift version.
* config/aarch64/aarch64-protos.h (aarch64_mask_and_shift_for_ubfiz_p):
New prototype.
* config/aarch64/aarch64.md (*andim_ashift<mode>_bfiz): Replace
matching condition with aarch64_mask_and_shift_for_ubfiz_p.
From-SVN: r237440
-rw-r--r-- | gcc/ChangeLog | 11 | ||||
-rw-r--r-- | gcc/config/aarch64/aarch64-protos.h | 1 | ||||
-rw-r--r-- | gcc/config/aarch64/aarch64.c | 43 | ||||
-rw-r--r-- | gcc/config/aarch64/aarch64.md | 4 |
4 files changed, 48 insertions, 11 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 9904443..6836663 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,14 @@ +2015-06-14 Kyrylo Tkachov <kyrylo.tkachov@arm.com> + + * config/aarch64/aarch64.c (aarch64_mask_and_shift_for_ubfiz_p): + New function. + (aarch64_rtx_costs): Use it. Rewrite CONST_INT_P (op1) case to handle + mask+shift version. + * config/aarch64/aarch64-protos.h (aarch64_mask_and_shift_for_ubfiz_p): + New prototype. + * config/aarch64/aarch64.md (*andim_ashift<mode>_bfiz): Replace + matching condition with aarch64_mask_and_shift_for_ubfiz_p. + 2016-06-14 Richard Biener <rguenther@suse.de> PR tree-optimization/71522 diff --git a/gcc/config/aarch64/aarch64-protos.h b/gcc/config/aarch64/aarch64-protos.h index 3e0a0a3..e8c2ac8 100644 --- a/gcc/config/aarch64/aarch64-protos.h +++ b/gcc/config/aarch64/aarch64-protos.h @@ -318,6 +318,7 @@ bool aarch64_is_noplt_call_p (rtx); bool aarch64_label_mentioned_p (rtx); void aarch64_declare_function_name (FILE *, const char*, tree); bool aarch64_legitimate_pic_operand_p (rtx); +bool aarch64_mask_and_shift_for_ubfiz_p (machine_mode, rtx, rtx); bool aarch64_modes_tieable_p (machine_mode mode1, machine_mode mode2); bool aarch64_zero_extend_const_eq (machine_mode, rtx, machine_mode, rtx); diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c index df7cb92..e91017b 100644 --- a/gcc/config/aarch64/aarch64.c +++ b/gcc/config/aarch64/aarch64.c @@ -6050,6 +6050,19 @@ aarch64_extend_bitfield_pattern_p (rtx x) return op; } +/* Return true if the mask and a shift amount from an RTX of the form + (x << SHFT_AMNT) & MASK are valid to combine into a UBFIZ instruction of + mode MODE. See the *andim_ashift<mode>_bfiz pattern. */ + +bool +aarch64_mask_and_shift_for_ubfiz_p (machine_mode mode, rtx mask, rtx shft_amnt) +{ + return CONST_INT_P (mask) && CONST_INT_P (shft_amnt) + && INTVAL (shft_amnt) < GET_MODE_BITSIZE (mode) + && exact_log2 ((INTVAL (mask) >> INTVAL (shft_amnt)) + 1) >= 0 + && (INTVAL (mask) & ((1 << INTVAL (shft_amnt)) - 1)) == 0; +} + /* Calculate the cost of calculating X, storing it in *COST. Result is true if the total cost of the operation has now been calculated. */ static bool @@ -6624,17 +6637,31 @@ cost_plus: if (GET_MODE_CLASS (mode) == MODE_INT) { - /* We possibly get the immediate for free, this is not - modelled. */ - if (CONST_INT_P (op1) - && aarch64_bitmask_imm (INTVAL (op1), mode)) + if (CONST_INT_P (op1)) { - *cost += rtx_cost (op0, mode, (enum rtx_code) code, 0, speed); + /* We have a mask + shift version of a UBFIZ + i.e. the *andim_ashift<mode>_bfiz pattern. */ + if (GET_CODE (op0) == ASHIFT + && aarch64_mask_and_shift_for_ubfiz_p (mode, op1, + XEXP (op0, 1))) + { + *cost += rtx_cost (XEXP (op0, 0), mode, + (enum rtx_code) code, 0, speed); + if (speed) + *cost += extra_cost->alu.bfx; - if (speed) - *cost += extra_cost->alu.logical; + return true; + } + else if (aarch64_bitmask_imm (INTVAL (op1), mode)) + { + /* We possibly get the immediate for free, this is not + modelled. */ + *cost += rtx_cost (op0, mode, (enum rtx_code) code, 0, speed); + if (speed) + *cost += extra_cost->alu.logical; - return true; + return true; + } } else { diff --git a/gcc/config/aarch64/aarch64.md b/gcc/config/aarch64/aarch64.md index 704e855..b4e6ba0 100644 --- a/gcc/config/aarch64/aarch64.md +++ b/gcc/config/aarch64/aarch64.md @@ -4380,9 +4380,7 @@ (and:GPI (ashift:GPI (match_operand:GPI 1 "register_operand" "r") (match_operand 2 "const_int_operand" "n")) (match_operand 3 "const_int_operand" "n")))] - "(INTVAL (operands[2]) < (<GPI:sizen>)) - && exact_log2 ((INTVAL (operands[3]) >> INTVAL (operands[2])) + 1) >= 0 - && (INTVAL (operands[3]) & ((1 << INTVAL (operands[2])) - 1)) == 0" + "aarch64_mask_and_shift_for_ubfiz_p (<MODE>mode, operands[3], operands[2])" "ubfiz\\t%<w>0, %<w>1, %2, %P3" [(set_attr "type" "bfm")] ) |