diff options
author | m fally <marlene.fally@gmail.com> | 2025-04-10 13:23:08 +0200 |
---|---|---|
committer | Jeff Johnston <jjohnstn@redhat.com> | 2025-04-11 16:48:58 -0400 |
commit | dde1d9a6f04cf7f6495034f62740a09e64588ebc (patch) | |
tree | 1a7f684b9bc1fa4e1b7fd4dc0e70926d42b9d83b | |
parent | b7106be22824061c6164f9c3c544ddf023a1ee48 (diff) | |
download | newlib-dde1d9a6f04cf7f6495034f62740a09e64588ebc.zip newlib-dde1d9a6f04cf7f6495034f62740a09e64588ebc.tar.gz newlib-dde1d9a6f04cf7f6495034f62740a09e64588ebc.tar.bz2 |
RISC-V: Size optimized versions: Replace add with addiHEADgithub/mastergithub/mainmastermain
Replace add instructions with addi where applicable in
the size optimized versions of memmove(), memset(), memcpy(),
and strcmp(). This change does not affect the functions themselves
and is only done to improve syntactic accuracy.
Reviewed-by: Christian Herber <christian.herber@oss.nxp.com>
Signed-off-by: m fally <marlene.fally@gmail.com>
-rw-r--r-- | newlib/libc/machine/riscv/memcpy-asm.S | 6 | ||||
-rw-r--r-- | newlib/libc/machine/riscv/memmove.S | 2 | ||||
-rw-r--r-- | newlib/libc/machine/riscv/memset.S | 4 | ||||
-rw-r--r-- | newlib/libc/machine/riscv/strcmp.S | 4 |
4 files changed, 8 insertions, 8 deletions
diff --git a/newlib/libc/machine/riscv/memcpy-asm.S b/newlib/libc/machine/riscv/memcpy-asm.S index e9fe381..2771285 100644 --- a/newlib/libc/machine/riscv/memcpy-asm.S +++ b/newlib/libc/machine/riscv/memcpy-asm.S @@ -20,9 +20,9 @@ memcpy: 1: lbu a4, 0(a1) sb a4, 0(a3) - add a2, a2, -1 - add a3, a3, 1 - add a1, a1, 1 + addi a2, a2, -1 + addi a3, a3, 1 + addi a1, a1, 1 bnez a2, 1b 2: diff --git a/newlib/libc/machine/riscv/memmove.S b/newlib/libc/machine/riscv/memmove.S index 6ecad9a..061472c 100644 --- a/newlib/libc/machine/riscv/memmove.S +++ b/newlib/libc/machine/riscv/memmove.S @@ -29,7 +29,7 @@ memmove: add a1, a1, a3 .Lcopy: lbu a5, 0(a1) - add a2, a2, -1 /* copy bytes as long as a2 (= the number of bytes to be copied) > 0. the increment is done here to relax the RAW dependency between load and store */ + addi a2, a2, -1 /* copy bytes as long as a2 (= the number of bytes to be copied) > 0. the increment is done here to relax the RAW dependency between load and store */ sb a5, 0(a4) bnez a2, .Lincrement diff --git a/newlib/libc/machine/riscv/memset.S b/newlib/libc/machine/riscv/memset.S index 943835d..3d207e7 100644 --- a/newlib/libc/machine/riscv/memset.S +++ b/newlib/libc/machine/riscv/memset.S @@ -19,8 +19,8 @@ memset: .Lset: sb a1, 0(a3) - add a2, a2, -1 - add a3, a3, 1 + addi a2, a2, -1 + addi a3, a3, 1 bnez a2, .Lset .Ldone: diff --git a/newlib/libc/machine/riscv/strcmp.S b/newlib/libc/machine/riscv/strcmp.S index cc29b7b..5d3370f 100644 --- a/newlib/libc/machine/riscv/strcmp.S +++ b/newlib/libc/machine/riscv/strcmp.S @@ -19,8 +19,8 @@ strcmp: 1: lbu a2, 0(a0) lbu a3, 0(a1) - add a0, a0, 1 - add a1, a1, 1 + addi a0, a0, 1 + addi a1, a1, 1 bne a2, a3, 2f bnez a2, 1b |