aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/config/arm/arm.c26
-rw-r--r--gcc/config/arm/sync.md28
-rw-r--r--gcc/config/arm/thumb1.md15
-rw-r--r--gcc/testsuite/gcc.target/arm/pr99977.c6
4 files changed, 57 insertions, 18 deletions
diff --git a/gcc/config/arm/arm.c b/gcc/config/arm/arm.c
index 352b2cd..6641e3f 100644
--- a/gcc/config/arm/arm.c
+++ b/gcc/config/arm/arm.c
@@ -30743,13 +30743,31 @@ arm_split_compare_and_swap (rtx operands[])
}
else
{
- emit_move_insn (neg_bval, const1_rtx);
cond = gen_rtx_NE (VOIDmode, rval, oldval);
if (thumb1_cmpneg_operand (oldval, SImode))
- emit_unlikely_jump (gen_cbranchsi4_scratch (neg_bval, rval, oldval,
- label2, cond));
+ {
+ rtx src = rval;
+ if (!satisfies_constraint_L (oldval))
+ {
+ gcc_assert (satisfies_constraint_J (oldval));
+
+ /* For such immediates, ADDS needs the source and destination regs
+ to be the same.
+
+ Normally this would be handled by RA, but this is all happening
+ after RA. */
+ emit_move_insn (neg_bval, rval);
+ src = neg_bval;
+ }
+
+ emit_unlikely_jump (gen_cbranchsi4_neg_late (neg_bval, src, oldval,
+ label2, cond));
+ }
else
- emit_unlikely_jump (gen_cbranchsi4_insn (cond, rval, oldval, label2));
+ {
+ emit_move_insn (neg_bval, const1_rtx);
+ emit_unlikely_jump (gen_cbranchsi4_insn (cond, rval, oldval, label2));
+ }
}
arm_emit_store_exclusive (mode, neg_bval, mem, newval, use_release);
diff --git a/gcc/config/arm/sync.md b/gcc/config/arm/sync.md
index e4682c0..b9fa870 100644
--- a/gcc/config/arm/sync.md
+++ b/gcc/config/arm/sync.md
@@ -187,20 +187,20 @@
;; Constraints of this pattern must be at least as strict as those of the
;; cbranchsi operations in thumb1.md and aim to be as permissive.
(define_insn_and_split "@atomic_compare_and_swap<CCSI:arch><NARROW:mode>_1"
- [(set (match_operand:CCSI 0 "cc_register_operand" "=&c,&l,&l,&l") ;; bool out
+ [(set (match_operand:CCSI 0 "cc_register_operand" "=&c,&l,&l") ;; bool out
(unspec_volatile:CCSI [(const_int 0)] VUNSPEC_ATOMIC_CAS))
- (set (match_operand:SI 1 "s_register_operand" "=&r,&l,&0,&l*h") ;; val out
+ (set (match_operand:SI 1 "s_register_operand" "=&r,&l,&l*h") ;; val out
(zero_extend:SI
- (match_operand:NARROW 2 "mem_noofs_operand" "+Ua,Ua,Ua,Ua"))) ;; memory
+ (match_operand:NARROW 2 "mem_noofs_operand" "+Ua,Ua,Ua"))) ;; memory
(set (match_dup 2)
(unspec_volatile:NARROW
- [(match_operand:SI 3 "arm_add_operand" "rIL,lIL*h,J,*r") ;; expected
- (match_operand:NARROW 4 "s_register_operand" "r,r,r,r") ;; desired
+ [(match_operand:SI 3 "arm_add_operand" "rIL,lILJ*h,*r") ;; expected
+ (match_operand:NARROW 4 "s_register_operand" "r,r,r") ;; desired
(match_operand:SI 5 "const_int_operand") ;; is_weak
(match_operand:SI 6 "const_int_operand") ;; mod_s
(match_operand:SI 7 "const_int_operand")] ;; mod_f
VUNSPEC_ATOMIC_CAS))
- (clobber (match_scratch:SI 8 "=&r,X,X,X"))]
+ (clobber (match_scratch:SI 8 "=&r,X,X"))]
"<NARROW:sync_predtab>"
"#"
"&& reload_completed"
@@ -209,7 +209,7 @@
arm_split_compare_and_swap (operands);
DONE;
}
- [(set_attr "arch" "32,v8mb,v8mb,v8mb")])
+ [(set_attr "arch" "32,v8mb,v8mb")])
(define_mode_attr cas_cmp_operand
[(SI "arm_add_operand") (DI "cmpdi_operand")])
@@ -219,19 +219,19 @@
;; Constraints of this pattern must be at least as strict as those of the
;; cbranchsi operations in thumb1.md and aim to be as permissive.
(define_insn_and_split "@atomic_compare_and_swap<CCSI:arch><SIDI:mode>_1"
- [(set (match_operand:CCSI 0 "cc_register_operand" "=&c,&l,&l,&l") ;; bool out
+ [(set (match_operand:CCSI 0 "cc_register_operand" "=&c,&l,&l") ;; bool out
(unspec_volatile:CCSI [(const_int 0)] VUNSPEC_ATOMIC_CAS))
- (set (match_operand:SIDI 1 "s_register_operand" "=&r,&l,&0,&l*h") ;; val out
- (match_operand:SIDI 2 "mem_noofs_operand" "+Ua,Ua,Ua,Ua")) ;; memory
+ (set (match_operand:SIDI 1 "s_register_operand" "=&r,&l,&l*h") ;; val out
+ (match_operand:SIDI 2 "mem_noofs_operand" "+Ua,Ua,Ua")) ;; memory
(set (match_dup 2)
(unspec_volatile:SIDI
- [(match_operand:SIDI 3 "<SIDI:cas_cmp_operand>" "<SIDI:cas_cmp_str>,lIL*h,J,*r") ;; expect
- (match_operand:SIDI 4 "s_register_operand" "r,r,r,r") ;; desired
+ [(match_operand:SIDI 3 "<SIDI:cas_cmp_operand>" "<SIDI:cas_cmp_str>,lILJ*h,*r") ;; expect
+ (match_operand:SIDI 4 "s_register_operand" "r,r,r") ;; desired
(match_operand:SI 5 "const_int_operand") ;; is_weak
(match_operand:SI 6 "const_int_operand") ;; mod_s
(match_operand:SI 7 "const_int_operand")] ;; mod_f
VUNSPEC_ATOMIC_CAS))
- (clobber (match_scratch:SI 8 "=&r,X,X,X"))]
+ (clobber (match_scratch:SI 8 "=&r,X,X"))]
"<SIDI:sync_predtab>"
"#"
"&& reload_completed"
@@ -240,7 +240,7 @@
arm_split_compare_and_swap (operands);
DONE;
}
- [(set_attr "arch" "32,v8mb,v8mb,v8mb")])
+ [(set_attr "arch" "32,v8mb,v8mb")])
(define_insn_and_split "atomic_exchange<mode>"
[(set (match_operand:QHSD 0 "s_register_operand" "=&r,&r") ;; output
diff --git a/gcc/config/arm/thumb1.md b/gcc/config/arm/thumb1.md
index c98b59c..084ed65 100644
--- a/gcc/config/arm/thumb1.md
+++ b/gcc/config/arm/thumb1.md
@@ -1206,6 +1206,21 @@
(set_attr "type" "multiple")]
)
+;; An expander which makes use of the cbranchsi4_scratch insn, but can
+;; be used safely after RA.
+(define_expand "cbranchsi4_neg_late"
+ [(parallel [
+ (set (pc) (if_then_else
+ (match_operator 4 "arm_comparison_operator"
+ [(match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "thumb1_cmpneg_operand")])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))
+ (clobber (match_operand:SI 0 "s_register_operand"))
+ ])]
+ "TARGET_THUMB1"
+)
+
;; Changes to the constraints of this pattern must be propagated to those of
;; atomic compare_and_swap splitters in sync.md. These must be at least as
;; strict as the constraints here and aim to be as permissive.
diff --git a/gcc/testsuite/gcc.target/arm/pr99977.c b/gcc/testsuite/gcc.target/arm/pr99977.c
new file mode 100644
index 0000000..7911899
--- /dev/null
+++ b/gcc/testsuite/gcc.target/arm/pr99977.c
@@ -0,0 +1,6 @@
+/* { dg-do compile } */
+/* { dg-options "-march=armv8-m.base -mfloat-abi=soft -O2" } */
+_Bool f1(int *p) { return __sync_bool_compare_and_swap (p, -1, 2); }
+_Bool f2(int *p) { return __sync_bool_compare_and_swap (p, -8, 2); }
+int g1(int *p) { return __sync_val_compare_and_swap (p, -1, 2); }
+int g2(int *p) { return __sync_val_compare_and_swap (p, -8, 3); }