aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorKyrylo Tkachov <kyrylo.tkachov@arm.com>2015-12-16 15:49:42 +0000
committerKyrylo Tkachov <ktkachov@gcc.gnu.org>2015-12-16 15:49:42 +0000
commit283b6c85430d825db1a0cfbd4f54a41d66721e31 (patch)
treea22a8a0307a6b40dea66cb671b888c5e439eb57f /gcc
parent077067a5f30559fd97ffbf2d9f8f636442d8905a (diff)
downloadgcc-283b6c85430d825db1a0cfbd4f54a41d66721e31.zip
gcc-283b6c85430d825db1a0cfbd4f54a41d66721e31.tar.gz
gcc-283b6c85430d825db1a0cfbd4f54a41d66721e31.tar.bz2
[AArch64] Properly cost zero_extend+ashift forms of ubfi[xz]
* config/aarch64/aarch64.c (aarch64_extend_bitfield_pattern_p): New function. (aarch64_rtx_costs, ZERO_EXTEND, SIGN_EXTEND cases): Use the above to handle extend+shift rtxes. From-SVN: r231698
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog7
-rw-r--r--gcc/config/aarch64/aarch64.c62
2 files changed, 69 insertions, 0 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 75da80b..ef9db37 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,5 +1,12 @@
2015-12-16 Kyrylo Tkachov <kyrylo.tkachov@arm.com>
+ * config/aarch64/aarch64.c (aarch64_extend_bitfield_pattern_p):
+ New function.
+ (aarch64_rtx_costs, ZERO_EXTEND, SIGN_EXTEND cases): Use the above
+ to handle extend+shift rtxes.
+
+2015-12-16 Kyrylo Tkachov <kyrylo.tkachov@arm.com>
+
PR target/68696
* config/aarch64/aarch64-simd.md (*aarch64_simd_bsl<mode>_alt):
New pattern.
diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c
index 1e1b864d..e3e5b6b 100644
--- a/gcc/config/aarch64/aarch64.c
+++ b/gcc/config/aarch64/aarch64.c
@@ -6146,6 +6146,50 @@ aarch64_if_then_else_costs (rtx op0, rtx op1, rtx op2, int *cost, bool speed)
return false;
}
+/* Check whether X is a bitfield operation of the form shift + extend that
+ maps down to a UBFIZ/SBFIZ/UBFX/SBFX instruction. If so, return the
+ operand to which the bitfield operation is applied. Otherwise return
+ NULL_RTX. */
+
+static rtx
+aarch64_extend_bitfield_pattern_p (rtx x)
+{
+ rtx_code outer_code = GET_CODE (x);
+ machine_mode outer_mode = GET_MODE (x);
+
+ if (outer_code != ZERO_EXTEND && outer_code != SIGN_EXTEND
+ && outer_mode != SImode && outer_mode != DImode)
+ return NULL_RTX;
+
+ rtx inner = XEXP (x, 0);
+ rtx_code inner_code = GET_CODE (inner);
+ machine_mode inner_mode = GET_MODE (inner);
+ rtx op = NULL_RTX;
+
+ switch (inner_code)
+ {
+ case ASHIFT:
+ if (CONST_INT_P (XEXP (inner, 1))
+ && (inner_mode == QImode || inner_mode == HImode))
+ op = XEXP (inner, 0);
+ break;
+ case LSHIFTRT:
+ if (outer_code == ZERO_EXTEND && CONST_INT_P (XEXP (inner, 1))
+ && (inner_mode == QImode || inner_mode == HImode))
+ op = XEXP (inner, 0);
+ break;
+ case ASHIFTRT:
+ if (outer_code == SIGN_EXTEND && CONST_INT_P (XEXP (inner, 1))
+ && (inner_mode == QImode || inner_mode == HImode))
+ op = XEXP (inner, 0);
+ break;
+ default:
+ break;
+ }
+
+ return op;
+}
+
/* Calculate the cost of calculating X, storing it in *COST. Result
is true if the total cost of the operation has now been calculated. */
static bool
@@ -6837,6 +6881,15 @@ cost_plus:
return true;
}
+ op0 = aarch64_extend_bitfield_pattern_p (x);
+ if (op0)
+ {
+ *cost += rtx_cost (op0, mode, ZERO_EXTEND, 0, speed);
+ if (speed)
+ *cost += extra_cost->alu.bfx;
+ return true;
+ }
+
if (speed)
{
if (VECTOR_MODE_P (mode))
@@ -6868,6 +6921,15 @@ cost_plus:
return true;
}
+ op0 = aarch64_extend_bitfield_pattern_p (x);
+ if (op0)
+ {
+ *cost += rtx_cost (op0, mode, SIGN_EXTEND, 0, speed);
+ if (speed)
+ *cost += extra_cost->alu.bfx;
+ return true;
+ }
+
if (speed)
{
if (VECTOR_MODE_P (mode))