diff options
author | Pat Pannuto <pat.pannuto@gmail.com> | 2017-01-11 23:50:19 -0500 |
---|---|---|
committer | Corinna Vinschen <corinna@vinschen.de> | 2017-01-25 13:32:09 +0100 |
commit | 3ebc26958e6456befd95809faa6d88481f29bc89 (patch) | |
tree | 36659b5751c161de672e7e3c4eabe364565f2f04 /newlib/libc/machine | |
parent | b219285f873cc79361355938bd2a994957b4a6ef (diff) | |
download | newlib-3ebc26958e6456befd95809faa6d88481f29bc89.zip newlib-3ebc26958e6456befd95809faa6d88481f29bc89.tar.gz newlib-3ebc26958e6456befd95809faa6d88481f29bc89.tar.bz2 |
arm: Remove RETURN macro
LTO can re-order top-level assembly blocks, which can cause this
macro definition to appear after its use (or not at all), causing
compilation failures. On modern toolchains (armv4t+), assembly
should write `bx lr` in all cases, and linkers will transparently
convert them to `mov pc, lr`, allowing us to simply remove the
macro.
(source: https://groups.google.com/forum/#!topic/comp.sys.arm/3l7fVGX-Wug
and verified empirically)
For the armv4.S file, preserve this macro to maximize backwards
compatibility.
Diffstat (limited to 'newlib/libc/machine')
-rw-r--r-- | newlib/libc/machine/arm/arm_asm.h | 22 | ||||
-rw-r--r-- | newlib/libc/machine/arm/strcmp-arm-tiny.S | 2 | ||||
-rw-r--r-- | newlib/libc/machine/arm/strcmp-armv4.S | 12 | ||||
-rw-r--r-- | newlib/libc/machine/arm/strcmp-armv7m.S | 6 | ||||
-rw-r--r-- | newlib/libc/machine/arm/strcpy.c | 12 | ||||
-rw-r--r-- | newlib/libc/machine/arm/strlen-stub.c | 2 |
6 files changed, 23 insertions, 33 deletions
diff --git a/newlib/libc/machine/arm/arm_asm.h b/newlib/libc/machine/arm/arm_asm.h index bf18c0a..2708057 100644 --- a/newlib/libc/machine/arm/arm_asm.h +++ b/newlib/libc/machine/arm/arm_asm.h @@ -60,26 +60,4 @@ # define _ISA_THUMB_1 #endif - -/* Now some macros for common instruction sequences. */ -#ifdef __ASSEMBLER__ -.macro RETURN cond= -#if defined (_ISA_ARM_4T) || defined (_ISA_THUMB_1) - bx\cond lr -#else - mov\cond pc, lr -#endif -.endm - -#else -asm(".macro RETURN cond=\n\t" -#if defined (_ISA_ARM_4T) || defined (_ISA_THUMB_1) - "bx\\cond lr\n\t" -#else - "mov\\cond pc, lr\n\t" -#endif - ".endm" - ); -#endif - #endif /* ARM_ASM__H */ diff --git a/newlib/libc/machine/arm/strcmp-arm-tiny.S b/newlib/libc/machine/arm/strcmp-arm-tiny.S index 6b6bd13..607a41d 100644 --- a/newlib/libc/machine/arm/strcmp-arm-tiny.S +++ b/newlib/libc/machine/arm/strcmp-arm-tiny.S @@ -42,6 +42,6 @@ def_fn strcmp beq 1b 2: subs r0, r2, r3 - RETURN + bx lr .cfi_endproc .size strcmp, . - strcmp diff --git a/newlib/libc/machine/arm/strcmp-armv4.S b/newlib/libc/machine/arm/strcmp-armv4.S index 05e3df6..e8d0e24 100644 --- a/newlib/libc/machine/arm/strcmp-armv4.S +++ b/newlib/libc/machine/arm/strcmp-armv4.S @@ -43,6 +43,18 @@ #define tmp1 r12 #define syndrome r12 /* Overlaps tmp1 */ +/* For armv4t and newer, toolchains will transparently convert + 'bx lr' to 'mov pc, lr' if needed. GCC has deprecated support + for anything older than armv4t, but this should handle that + corner case in case anyone needs it anyway */ +.macro RETURN +#if __ARM_ARCH <= 4 && __ARM_ARCH_ISA_THUMB == 0 + mov pc, lr +#else + bx lr +#endif +.endm + .arm def_fn strcmp .cfi_sections .debug_frame diff --git a/newlib/libc/machine/arm/strcmp-armv7m.S b/newlib/libc/machine/arm/strcmp-armv7m.S index 7b63049..cdb4912 100644 --- a/newlib/libc/machine/arm/strcmp-armv7m.S +++ b/newlib/libc/machine/arm/strcmp-armv7m.S @@ -106,7 +106,7 @@ def_fn strcmp lsrs result, result, #24 subs result, result, data2 #endif - RETURN + bx lr #if 0 @@ -356,7 +356,7 @@ def_fn strcmp ldmfd sp!, {r5} .cfi_restore 5 .cfi_def_cfa_offset 0 - RETURN + bx lr .Lstrcmp_tail: .cfi_restore_state @@ -373,6 +373,6 @@ def_fn strcmp ldmfd sp!, {r5} .cfi_restore 5 .cfi_def_cfa_offset 0 - RETURN + bx lr .cfi_endproc .size strcmp, . - strcmp diff --git a/newlib/libc/machine/arm/strcpy.c b/newlib/libc/machine/arm/strcpy.c index b90d5cf..6f358e4 100644 --- a/newlib/libc/machine/arm/strcpy.c +++ b/newlib/libc/machine/arm/strcpy.c @@ -108,7 +108,7 @@ strcpy (char* dst, const char* src) #ifndef __thumb2__ "ldr r5, [sp], #4\n\t" #endif - "RETURN\n" + "bx lr\n" /* Strings have the same offset from word alignment, but it's not zero. */ @@ -119,7 +119,7 @@ strcpy (char* dst, const char* src) "strb r2, [ip], #1\n\t" "cmp r2, #0\n\t" "it eq\n" - "RETURN eq\n" + "bxeq lr\n" "1:\n\t" "tst r1, #2\n\t" "beq 5b\n\t" @@ -139,7 +139,7 @@ strcpy (char* dst, const char* src) "tstne r2, #0xff00\n\t" #endif "bne 5b\n\t" - "RETURN\n" + "bx lr\n" /* src and dst do not have a common word-alignement. Fall back to byte copying. */ @@ -148,7 +148,7 @@ strcpy (char* dst, const char* src) "strb r2, [ip], #1\n\t" "cmp r2, #0\n\t" "bne 4b\n\t" - "RETURN" + "bx lr\n\t" #elif !defined (__thumb__) || defined (__thumb2__) "mov r3, r0\n\t" @@ -157,7 +157,7 @@ strcpy (char* dst, const char* src) "strb r2, [r3], #1\n\t" "cmp r2, #0\n\t" "bne 1b\n\t" - "RETURN" + "bx lr\n\t" #else "mov r3, r0\n\t" "1:\n\t" @@ -167,7 +167,7 @@ strcpy (char* dst, const char* src) "add r3, r3, #1\n\t" "cmp r2, #0\n\t" "bne 1b\n\t" - "RETURN" + "bx lr\n\t" #endif ); } diff --git a/newlib/libc/machine/arm/strlen-stub.c b/newlib/libc/machine/arm/strlen-stub.c index 69cfa3d..8f87cba 100644 --- a/newlib/libc/machine/arm/strlen-stub.c +++ b/newlib/libc/machine/arm/strlen-stub.c @@ -168,7 +168,7 @@ strlen (const char* str) "addne len, len, #1\n\t" # endif #endif - "RETURN"); + "bx lr\n\t"); } #endif #endif |