aboutsummaryrefslogtreecommitdiff
path: root/gcc/config/aarch64
diff options
context:
space:
mode:
authorRichard Henderson <richard.henderson@linaro.org>2019-09-19 14:36:33 +0000
committerRichard Henderson <rth@gcc.gnu.org>2019-09-19 07:36:33 -0700
commitb7e560deb37e38fb224a0cf108e15df4a717167a (patch)
tree7f44a7e5a50a5b2a1d2d39a01338dd1aa2ba645f /gcc/config/aarch64
parent4a2095ebace8534038ce2adf4ae94bfc854066c4 (diff)
downloadgcc-b7e560deb37e38fb224a0cf108e15df4a717167a.zip
gcc-b7e560deb37e38fb224a0cf108e15df4a717167a.tar.gz
gcc-b7e560deb37e38fb224a0cf108e15df4a717167a.tar.bz2
aarch64: Tidy aarch64_split_compare_and_swap
With aarch64_track_speculation, we had extra code to do exactly what the !strong_zero_p path already did. The rest is reducing code duplication. * config/aarch64/aarch64 (aarch64_split_compare_and_swap): Disable strong_zero_p for aarch64_track_speculation; unify some code paths; use aarch64_gen_compare_reg instead of open-coding. From-SVN: r275966
Diffstat (limited to 'gcc/config/aarch64')
-rw-r--r--gcc/config/aarch64/aarch64.c50
1 files changed, 14 insertions, 36 deletions
diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c
index a5c4f55627..b937514 100644
--- a/gcc/config/aarch64/aarch64.c
+++ b/gcc/config/aarch64/aarch64.c
@@ -16955,13 +16955,11 @@ aarch64_emit_post_barrier (enum memmodel model)
void
aarch64_split_compare_and_swap (rtx operands[])
{
- rtx rval, mem, oldval, newval, scratch;
+ rtx rval, mem, oldval, newval, scratch, x, model_rtx;
machine_mode mode;
bool is_weak;
rtx_code_label *label1, *label2;
- rtx x, cond;
enum memmodel model;
- rtx model_rtx;
rval = operands[0];
mem = operands[1];
@@ -16982,7 +16980,8 @@ aarch64_split_compare_and_swap (rtx operands[])
CBNZ scratch, .label1
.label2:
CMP rval, 0. */
- bool strong_zero_p = !is_weak && oldval == const0_rtx && mode != TImode;
+ bool strong_zero_p = (!is_weak && !aarch64_track_speculation &&
+ oldval == const0_rtx && mode != TImode);
label1 = NULL;
if (!is_weak)
@@ -16995,35 +16994,20 @@ aarch64_split_compare_and_swap (rtx operands[])
/* The initial load can be relaxed for a __sync operation since a final
barrier will be emitted to stop code hoisting. */
if (is_mm_sync (model))
- aarch64_emit_load_exclusive (mode, rval, mem,
- GEN_INT (MEMMODEL_RELAXED));
+ aarch64_emit_load_exclusive (mode, rval, mem, GEN_INT (MEMMODEL_RELAXED));
else
aarch64_emit_load_exclusive (mode, rval, mem, model_rtx);
if (strong_zero_p)
- {
- if (aarch64_track_speculation)
- {
- /* Emit an explicit compare instruction, so that we can correctly
- track the condition codes. */
- rtx cc_reg = aarch64_gen_compare_reg (NE, rval, const0_rtx);
- x = gen_rtx_NE (GET_MODE (cc_reg), cc_reg, const0_rtx);
- }
- else
- x = gen_rtx_NE (VOIDmode, rval, const0_rtx);
-
- x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
- gen_rtx_LABEL_REF (Pmode, label2), pc_rtx);
- aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
- }
+ x = gen_rtx_NE (VOIDmode, rval, const0_rtx);
else
{
- cond = aarch64_gen_compare_reg_maybe_ze (NE, rval, oldval, mode);
- x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
- x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
- gen_rtx_LABEL_REF (Pmode, label2), pc_rtx);
- aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
+ rtx cc_reg = aarch64_gen_compare_reg_maybe_ze (NE, rval, oldval, mode);
+ x = gen_rtx_NE (VOIDmode, cc_reg, const0_rtx);
}
+ x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
+ gen_rtx_LABEL_REF (Pmode, label2), pc_rtx);
+ aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
aarch64_emit_store_exclusive (mode, scratch, mem, newval, model_rtx);
@@ -17044,22 +17028,16 @@ aarch64_split_compare_and_swap (rtx operands[])
aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
}
else
- {
- cond = gen_rtx_REG (CCmode, CC_REGNUM);
- x = gen_rtx_COMPARE (CCmode, scratch, const0_rtx);
- emit_insn (gen_rtx_SET (cond, x));
- }
+ aarch64_gen_compare_reg (NE, scratch, const0_rtx);
emit_label (label2);
+
/* If we used a CBNZ in the exchange loop emit an explicit compare with RVAL
to set the condition flags. If this is not used it will be removed by
later passes. */
if (strong_zero_p)
- {
- cond = gen_rtx_REG (CCmode, CC_REGNUM);
- x = gen_rtx_COMPARE (CCmode, rval, const0_rtx);
- emit_insn (gen_rtx_SET (cond, x));
- }
+ aarch64_gen_compare_reg (NE, rval, const0_rtx);
+
/* Emit any final barrier needed for a __sync operation. */
if (is_mm_sync (model))
aarch64_emit_post_barrier (model);