diff options
author | Alexey Brodkin <abrodkin@synopsys.com> | 2024-08-20 15:10:40 +0300 |
---|---|---|
committer | Jeff Johnston <jjohnstn@redhat.com> | 2024-08-20 18:23:20 -0400 |
commit | 16accfa08d4b8d35314f682f79b8e3386e7d268b (patch) | |
tree | bc005218222de9cc39875fdf3d3356a391a8acf3 | |
parent | 3e9f6a005c8df720dbdd42bba92df8081ee722c2 (diff) | |
download | newlib-16accfa08d4b8d35314f682f79b8e3386e7d268b.zip newlib-16accfa08d4b8d35314f682f79b8e3386e7d268b.tar.gz newlib-16accfa08d4b8d35314f682f79b8e3386e7d268b.tar.bz2 |
arc: Remove @ from symbol references in assembly
There's no semantic change, it's only to make the same code
compilable with MetaWare toolchian, which actually assumes
@x as a full name, not omitting @.
Signed-off-by: Alexey Brodkin <abrodkin@synopsys.com>
-rw-r--r-- | libgloss/arc/crt0.S | 46 | ||||
-rw-r--r-- | libgloss/arc/gcrt0.S | 2 | ||||
-rw-r--r-- | newlib/libc/machine/arc/memcpy-archs.S | 34 | ||||
-rw-r--r-- | newlib/libc/machine/arc/strcmp-archs.S | 2 |
4 files changed, 42 insertions, 42 deletions
diff --git a/libgloss/arc/crt0.S b/libgloss/arc/crt0.S index 36b9c25..04fe82d 100644 --- a/libgloss/arc/crt0.S +++ b/libgloss/arc/crt0.S @@ -106,11 +106,11 @@ IVT_ENTRY(IRQ_20) ; 20 0x50 80 #ifdef __ARC601__ ; Startup code for the ARC601 processor __start: - mov gp, @__SDATA_BEGIN__ - mov sp, @__stack_top ; Point to top of stack + mov gp, __SDATA_BEGIN__ + mov sp, __stack_top ; Point to top of stack mov r5, 0 ; Zero value - mov_s r2, @__sbss_start ; r2 = start of the bss section - sub r3, @_end, r2 ; r3 = size of the bss section in bytes + mov_s r2, __sbss_start ; r2 = start of the bss section + sub r3, _end, r2 ; r3 = size of the bss section in bytes asr_s r3, r3 asr_s r3, r3 ; r3 = size of bss in words @@ -144,11 +144,11 @@ __start: #if defined (__ARC_CODE_DENSITY__) ;; Initialize jli_base - sr @__JLI_TABLE__,[jli_base] + sr __JLI_TABLE__,[jli_base] #endif - mov gp, @__SDATA_BEGIN__ - mov_s r2, @__sbss_start ; r2 = start of the bss section - sub r3, @_end, r2 ; r3 = size of the bss section in bytes + mov gp, __SDATA_BEGIN__ + mov_s r2, __sbss_start ; r2 = start of the bss section + sub r3, _end, r2 ; r3 = size of the bss section in bytes ; set up the loop counter register to the size (in words) of the bss section #if defined (__ARC_BARREL_SHIFTER__) asr.f lp_count, r3, 2 @@ -158,19 +158,19 @@ __start: #endif #if defined (__ARC600__) ; loop to zero out the bss. Enter loop only if lp_count != 0 - lpnz @.Lend_zbss + lpnz .Lend_zbss add r3, pcl, 20 sr r3, [2] ; LP_END ; initialize stack pointer, and this instruction has 2 words - mov sp, @__stack_top + mov sp, __stack_top mov_s r3, 0 st.ab r3, [r2, 4] ; zero out the word .Lend_zbss: #else - mov sp, @__stack_top ; initialize stack pointer + mov sp, __stack_top ; initialize stack pointer mov_s r3,0 ; loop to zero out the bss. Enter loop only if lp_count != 0 - lpnz @.Lend_zbss + lpnz .Lend_zbss st.ab r3,[r2, 4] ; zero out the word nop .Lend_zbss: @@ -220,30 +220,30 @@ __start: #endif /* ARCv2 */ ;; Call constructors - jl @_init + jl _init ;;; Setup fini routines to be called from exit - mov_s r0, @_fini - jl @atexit + mov_s r0, _fini + jl atexit #ifdef PROFILE_SUPPORT /* Defined in gcrt0.S. */ - mov r0,@__start - mov r1,@_etext - jl @_monstartup + mov r0,__start + mov r1,_etext + jl _monstartup #endif /* PROFILE_SUPPORT */ ; branch to main mov fp,0 ; initialize frame pointer - jl @__setup_argv_and_call_main + jl __setup_argv_and_call_main #ifdef PROFILE_SUPPORT mov r13, r0 ; Save return code - jl @_mcleanup + jl _mcleanup mov r0, r13 #endif /* PROFILE_SUPPORT */ ; r0 contains exit code - j @exit + j exit .size __start, .-__start ;;; arc-main-helper.o object can be used to replace this function and @@ -258,7 +258,7 @@ __setup_argv_and_call_main: ; Call main with argc = 0 and *argv[] = 0 mov r0, 0 mov r1, 0 - jl @main + jl main pop_s blink j_s [blink] @@ -275,5 +275,5 @@ _exit_halt: nop nop #endif - b @_exit_halt + b _exit_halt .align 4 diff --git a/libgloss/arc/gcrt0.S b/libgloss/arc/gcrt0.S index 0ce6b63..0526b99 100644 --- a/libgloss/arc/gcrt0.S +++ b/libgloss/arc/gcrt0.S @@ -58,7 +58,7 @@ __mcount: push r11 push r12 mov r0,blink - jl @_mcount_internal + jl _mcount_internal pop r12 pop r11 pop r10 diff --git a/newlib/libc/machine/arc/memcpy-archs.S b/newlib/libc/machine/arc/memcpy-archs.S index 3c477a7..e8a2a7e 100644 --- a/newlib/libc/machine/arc/memcpy-archs.S +++ b/newlib/libc/machine/arc/memcpy-archs.S @@ -95,12 +95,12 @@ ENTRY (memcpy) ; if size <= 8 cmp r2, 8 - bls.d @.Lsmallchunk + bls.d .Lsmallchunk mov.f lp_count, r2 and.f r4, r0, 0x03 rsub lp_count, r4, 4 - lpnz @.Laligndestination + lpnz .Laligndestination ; LOOP BEGIN ldb.ab r5, [r1,1] sub r2, r2, 1 @@ -109,12 +109,12 @@ ENTRY (memcpy) ; Check the alignment of the source and.f r4, r1, 0x03 - bnz.d @.Lsourceunaligned + bnz.d .Lsourceunaligned ; CASE 0: Both source and destination are 32bit aligned ; Convert len to Dwords, unfold x4 lsr.f lp_count, r2, ZOLSHFT - lpnz @.Lcopy32_64bytes + lpnz .Lcopy32_64bytes ; LOOP START LOADX (r6, r1) PREFETCH_READ (r1) @@ -130,7 +130,7 @@ ENTRY (memcpy) and.f lp_count, r2, ZOLAND ;Last remaining 31 bytes .Lsmallchunk: - lpnz @.Lcopyremainingbytes + lpnz .Lcopyremainingbytes ; LOOP START ldb.ab r5, [r1,1] stb.ab r5, [r3,1] @@ -141,10 +141,10 @@ ENTRY (memcpy) .Lsourceunaligned: cmp r4, 2 - beq.d @.LunalignedOffby2 + beq.d .LunalignedOffby2 sub r2, r2, 1 - bhi.d @.LunalignedOffby3 + bhi.d .LunalignedOffby3 ldb.ab r5, [r1, 1] ; CASE 1: The source is unaligned, off by 1 @@ -159,7 +159,7 @@ ENTRY (memcpy) or r5, r5, r6 ; Both src and dst are aligned - lpnz @.Lcopy8bytes_1 + lpnz .Lcopy8bytes_1 ; LOOP START ld.ab r6, [r1, 4] prefetch [r1, 28] ;Prefetch the next read location @@ -186,7 +186,7 @@ ENTRY (memcpy) stb.ab r5, [r3, 1] and.f lp_count, r2, 0x07 ;Last 8bytes - lpnz @.Lcopybytewise_1 + lpnz .Lcopybytewise_1 ; LOOP START ldb.ab r6, [r1,1] stb.ab r6, [r3,1] @@ -204,7 +204,7 @@ ENTRY (memcpy) #ifdef __BIG_ENDIAN__ asl.nz r5, r5, 16 #endif - lpnz @.Lcopy8bytes_2 + lpnz .Lcopy8bytes_2 ; LOOP START ld.ab r6, [r1, 4] prefetch [r1, 28] ;Prefetch the next read location @@ -229,7 +229,7 @@ ENTRY (memcpy) sth.ab r5, [r3, 2] and.f lp_count, r2, 0x07 ;Last 8bytes - lpnz @.Lcopybytewise_2 + lpnz .Lcopybytewise_2 ; LOOP START ldb.ab r6, [r1,1] stb.ab r6, [r3,1] @@ -246,7 +246,7 @@ ENTRY (memcpy) #ifdef __BIG_ENDIAN__ asl.ne r5, r5, 24 #endif - lpnz @.Lcopy8bytes_3 + lpnz .Lcopy8bytes_3 ; LOOP START ld.ab r6, [r1, 4] prefetch [r1, 28] ;Prefetch the next read location @@ -271,7 +271,7 @@ ENTRY (memcpy) stb.ab r5, [r3, 1] and.f lp_count, r2, 0x07 ;Last 8bytes - lpnz @.Lcopybytewise_3 + lpnz .Lcopybytewise_3 ; LOOP START ldb.ab r6, [r1,1] stb.ab r6, [r3,1] @@ -294,12 +294,12 @@ ENTRY(memcpy) ;;; if size <= 8 cmp r2, 8 - bls.d @.Lsmallchunk + bls.d .Lsmallchunk mov.f lp_count, r2 ;;; Convert len to Dwords, unfold x4 lsr.f lp_count, r2, ZOLSHFT - lpnz @.Lcopyfast + lpnz .Lcopyfast ;; LOOP START LOADX (r6, r1) PREFETCH_READ (r1) @@ -316,7 +316,7 @@ ENTRY(memcpy) #ifdef __ARC_LL64__ and r2, r2, ZOLAND ;Remaining 31 bytes lsr.f lp_count, r2, 3 ;Convert to 64-bit words. - lpnz @.Lcopy64b + lpnz .Lcopy64b ;; LOOP START ldd.ab r6,[r1,8] std.ab r6,[r3,8] @@ -328,7 +328,7 @@ ENTRY(memcpy) #endif .Lsmallchunk: - lpnz @.Lcopyremainingbytes + lpnz .Lcopyremainingbytes ;; LOOP START ldb.ab r5, [r1,1] stb.ab r5, [r3,1] diff --git a/newlib/libc/machine/arc/strcmp-archs.S b/newlib/libc/machine/arc/strcmp-archs.S index 543cebc..3226c37 100644 --- a/newlib/libc/machine/arc/strcmp-archs.S +++ b/newlib/libc/machine/arc/strcmp-archs.S @@ -40,7 +40,7 @@ ENTRY (strcmp) or r2, r0, r1 bmsk_s r2, r2, 1 - brne r2, 0, @.Lcharloop + brne r2, 0, .Lcharloop ; s1 and s2 are word aligned ld.ab r2, [r0, 4] |