diff options
author | Kyrylo Tkachov <kyrylo.tkachov@arm.com> | 2015-04-30 17:03:26 +0000 |
---|---|---|
committer | Kyrylo Tkachov <ktkachov@gcc.gnu.org> | 2015-04-30 17:03:26 +0000 |
commit | fb0cb7fa67a4e267715129fc06185f631904cd86 (patch) | |
tree | ad3217dda400a9658457de3b3337e2df31f4cdf2 /gcc | |
parent | 19261b995bae88efa98e73cf9cf002aa1a4163ff (diff) | |
download | gcc-fb0cb7fa67a4e267715129fc06185f631904cd86.zip gcc-fb0cb7fa67a4e267715129fc06185f631904cd86.tar.gz gcc-fb0cb7fa67a4e267715129fc06185f631904cd86.tar.bz2 |
[AArch64] Add alternative 'extr' pattern, calculate rtx cost properly
* config/aarch64/aarch64.md (*extr<mode>5_insn_alt): New pattern.
(*extrsi5_insn_uxtw_alt): Likewise.
* config/aarch64/aarch64.c (aarch64_extr_rtx_p): New function.
(aarch64_rtx_costs, IOR case): Use above to properly cost extr
operations.
From-SVN: r222639
Diffstat (limited to 'gcc')
-rw-r--r-- | gcc/ChangeLog | 8 | ||||
-rw-r--r-- | gcc/config/aarch64/aarch64.c | 55 | ||||
-rw-r--r-- | gcc/config/aarch64/aarch64.md | 28 |
3 files changed, 91 insertions, 0 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 3710788..d24460f 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,5 +1,13 @@ 2015-04-30 Kyrylo Tkachov <kyrylo.tkachov@arm.com> + * config/aarch64/aarch64.md (*extr<mode>5_insn_alt): New pattern. + (*extrsi5_insn_uxtw_alt): Likewise. + * config/aarch64/aarch64.c (aarch64_extr_rtx_p): New function. + (aarch64_rtx_costs, IOR case): Use above to properly cost extr + operations. + +2015-04-30 Kyrylo Tkachov <kyrylo.tkachov@arm.com> + * config/aarch64/aarch64.c (aarch64_rtx_costs): Handle pattern for fabd in ABS case. diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c index f1aec67..d7ef473 100644 --- a/gcc/config/aarch64/aarch64.c +++ b/gcc/config/aarch64/aarch64.c @@ -5445,6 +5445,51 @@ aarch64_frint_unspec_p (unsigned int u) } } +/* Return true iff X is an rtx that will match an extr instruction + i.e. as described in the *extr<mode>5_insn family of patterns. + OP0 and OP1 will be set to the operands of the shifts involved + on success and will be NULL_RTX otherwise. */ + +static bool +aarch64_extr_rtx_p (rtx x, rtx *res_op0, rtx *res_op1) +{ + rtx op0, op1; + machine_mode mode = GET_MODE (x); + + *res_op0 = NULL_RTX; + *res_op1 = NULL_RTX; + + if (GET_CODE (x) != IOR) + return false; + + op0 = XEXP (x, 0); + op1 = XEXP (x, 1); + + if ((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT) + || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT)) + { + /* Canonicalise locally to ashift in op0, lshiftrt in op1. */ + if (GET_CODE (op1) == ASHIFT) + std::swap (op0, op1); + + if (!CONST_INT_P (XEXP (op0, 1)) || !CONST_INT_P (XEXP (op1, 1))) + return false; + + unsigned HOST_WIDE_INT shft_amnt_0 = UINTVAL (XEXP (op0, 1)); + unsigned HOST_WIDE_INT shft_amnt_1 = UINTVAL (XEXP (op1, 1)); + + if (shft_amnt_0 < GET_MODE_BITSIZE (mode) + && shft_amnt_0 + shft_amnt_1 == GET_MODE_BITSIZE (mode)) + { + *res_op0 = XEXP (op0, 0); + *res_op1 = XEXP (op1, 0); + return true; + } + } + + return false; +} + /* Calculate the cost of calculating (if_then_else (OP0) (OP1) (OP2)), storing it in *COST. Result is true if the total cost of the operation has now been calculated. */ @@ -5977,6 +6022,16 @@ cost_plus: return true; } + + if (aarch64_extr_rtx_p (x, &op0, &op1)) + { + *cost += rtx_cost (op0, IOR, 0, speed) + + rtx_cost (op1, IOR, 1, speed); + if (speed) + *cost += extra_cost->alu.shift; + + return true; + } /* Fall through. */ case XOR: case AND: diff --git a/gcc/config/aarch64/aarch64.md b/gcc/config/aarch64/aarch64.md index 194bfd3..11c4709 100644 --- a/gcc/config/aarch64/aarch64.md +++ b/gcc/config/aarch64/aarch64.md @@ -3597,6 +3597,21 @@ [(set_attr "type" "shift_imm")] ) +;; There are no canonicalisation rules for ashift and lshiftrt inside an ior +;; so we have to match both orderings. +(define_insn "*extr<mode>5_insn_alt" + [(set (match_operand:GPI 0 "register_operand" "=r") + (ior:GPI (lshiftrt:GPI (match_operand:GPI 2 "register_operand" "r") + (match_operand 4 "const_int_operand" "n")) + (ashift:GPI (match_operand:GPI 1 "register_operand" "r") + (match_operand 3 "const_int_operand" "n"))))] + "UINTVAL (operands[3]) < GET_MODE_BITSIZE (<MODE>mode) + && (UINTVAL (operands[3]) + UINTVAL (operands[4]) + == GET_MODE_BITSIZE (<MODE>mode))" + "extr\\t%<w>0, %<w>1, %<w>2, %4" + [(set_attr "type" "shift_imm")] +) + ;; zero_extend version of the above (define_insn "*extrsi5_insn_uxtw" [(set (match_operand:DI 0 "register_operand" "=r") @@ -3611,6 +3626,19 @@ [(set_attr "type" "shift_imm")] ) +(define_insn "*extrsi5_insn_uxtw_alt" + [(set (match_operand:DI 0 "register_operand" "=r") + (zero_extend:DI + (ior:SI (lshiftrt:SI (match_operand:SI 2 "register_operand" "r") + (match_operand 4 "const_int_operand" "n")) + (ashift:SI (match_operand:SI 1 "register_operand" "r") + (match_operand 3 "const_int_operand" "n")))))] + "UINTVAL (operands[3]) < 32 && + (UINTVAL (operands[3]) + UINTVAL (operands[4]) == 32)" + "extr\\t%w0, %w1, %w2, %4" + [(set_attr "type" "shift_imm")] +) + (define_insn "*ror<mode>3_insn" [(set (match_operand:GPI 0 "register_operand" "=r") (rotate:GPI (match_operand:GPI 1 "register_operand" "r") |