aboutsummaryrefslogtreecommitdiff
path: root/sysdeps
diff options
context:
space:
mode:
authorAndreas Schwab <schwab@suse.de>1999-06-14 00:59:40 +0000
committerAndreas Schwab <schwab@suse.de>1999-06-14 00:59:40 +0000
commitc2df6ff9c78224caf400e7ea9a1831086cc9523b (patch)
treeeac0f32c29dfdec76beddb6dfade8bdb9eb3cc70 /sysdeps
parent2e92188db9129756d7be46cfa0d53d900e3d1135 (diff)
downloadglibc-c2df6ff9c78224caf400e7ea9a1831086cc9523b.zip
glibc-c2df6ff9c78224caf400e7ea9a1831086cc9523b.tar.gz
glibc-c2df6ff9c78224caf400e7ea9a1831086cc9523b.tar.bz2
New files, optimized for m68k.
Diffstat (limited to 'sysdeps')
-rw-r--r--sysdeps/m68k/memchr.S228
-rw-r--r--sysdeps/m68k/rawmemchr.S179
-rw-r--r--sysdeps/m68k/strchr.S257
-rw-r--r--sysdeps/m68k/strchrnul.S250
4 files changed, 914 insertions, 0 deletions
diff --git a/sysdeps/m68k/memchr.S b/sysdeps/m68k/memchr.S
new file mode 100644
index 0000000..a1599f8
--- /dev/null
+++ b/sysdeps/m68k/memchr.S
@@ -0,0 +1,228 @@
+/* memchr (str, ch, n) -- Return pointer to first occurrence of CH in the
+ first N bytes of STR.
+ For Motorola 68000.
+ Copyright (C) 1999 Free Software Foundation, Inc.
+ This file is part of the GNU C Library.
+ Contributed by Andreas Schwab <schwab@gnu.org>.
+
+ The GNU C Library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public License as
+ published by the Free Software Foundation; either version 2 of the
+ License, or (at your option) any later version.
+
+ The GNU C Library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public
+ License along with the GNU C Library; see the file COPYING.LIB. If not,
+ write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ Boston, MA 02111-1307, USA. */
+
+#include <sysdep.h>
+#include "asm-syntax.h"
+
+ TEXT
+ENTRY(memchr)
+ /* Save the callee-saved registers we use. */
+ moveml R(d2)-R(d4),MEM_PREDEC(sp)
+
+ /* Get string pointer, character and length. */
+ movel MEM_DISP(sp,16),R(a0)
+ moveb MEM_DISP(sp,23),R(d0)
+ movel MEM_DISP(sp,24),R(d4)
+
+ /* Check if at least four bytes left to search. */
+ moveql #4,R(d1)
+ cmpl R(d1),R(d4)
+ bcs L(L6)
+
+ /* Distribute the character to all bytes of a longword. */
+ movel R(d0),R(d1)
+ lsll #8,R(d1)
+ moveb R(d0),R(d1)
+ movel R(d1),R(d0)
+ swap R(d0)
+ movew R(d1),R(d0)
+
+ /* First search for the character one byte at a time until the
+ pointer is aligned to a longword boundary. */
+ movel R(a0),R(d1)
+ andw #3,R(d1)
+ beq L(L1)
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+ subql #1,R(d4)
+ beq L(L7)
+
+ movel R(a0),R(d1)
+ andw #3,R(d1)
+ beq L(L1)
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+ subql #1,R(d4)
+ beq L(L7)
+
+ movel R(a0),R(d1)
+ andw #3,R(d1)
+ beq L(L1)
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+ subql #1,R(d4)
+ beq L(L7)
+
+L(L1:)
+ /* Load the magic bits. Unlike the generic implementation we can
+ use the carry bit as the fourth hole. */
+ movel #0xfefefeff,R(d3)
+
+ /* We exit the loop if adding MAGIC_BITS to LONGWORD fails to
+ change any of the hole bits of LONGWORD.
+
+ 1) Is this safe? Will it catch all the zero bytes?
+ Suppose there is a byte with all zeros. Any carry bits
+ propagating from its left will fall into the hole at its
+ least significant bit and stop. Since there will be no
+ carry from its most significant bit, the LSB of the
+ byte to the left will be unchanged, and the zero will be
+ detected.
+
+ 2) Is this worthwhile? Will it ignore everything except
+ zero bytes? Suppose every byte of LONGWORD has a bit set
+ somewhere. There will be a carry into bit 8. If bit 8
+ is set, this will carry into bit 16. If bit 8 is clear,
+ one of bits 9-15 must be set, so there will be a carry
+ into bit 16. Similarly, there will be a carry into bit
+ 24. If one of bits 24-31 is set, there will be a carry
+ into bit 32 (=carry flag), so all of the hole bits will
+ be changed.
+
+ 3) But wait! Aren't we looking for C, not zero?
+ Good point. So what we do is XOR LONGWORD with a longword,
+ each of whose bytes is C. This turns each byte that is C
+ into a zero. */
+
+ /* Still at least 4 bytes to search? */
+ subql #4,R(d4)
+ bcs L(L6)
+
+L(L2:)
+ /* Get the longword in question. */
+ movel MEM_POSTINC(a0),R(d1)
+ /* XOR with the byte we search for. */
+ eorl R(d0),R(d1)
+
+ /* Add the magic value. We get carry bits reported for each byte
+ which is not C. */
+ movel R(d3),R(d2)
+ addl R(d1),R(d2)
+
+ /* Check the fourth carry bit before it is clobbered by the next
+ XOR. If it is not set we have a hit. */
+ bcc L(L8)
+
+ /* We are only interested in carry bits that change due to the
+ previous add, so remove original bits. */
+ eorl R(d1),R(d2)
+
+ /* Now test for the other three overflow bits.
+ Set all non-carry bits. */
+ orl R(d3),R(d2)
+ /* Add 1 to get zero if all carry bits were set. */
+ addql #1,R(d2)
+
+ /* If we don't get zero then at least one byte of the word equals
+ C. */
+ bne L(L8)
+
+ /* Still at least 4 bytes to search? */
+ subql #4,R(d4)
+ bcs L(L6)
+
+ /* Get the longword in question. */
+ movel MEM_POSTINC(a0),R(d1)
+ /* XOR with the byte we search for. */
+ eorl R(d0),R(d1)
+
+ /* Add the magic value. We get carry bits reported for each byte
+ which is not C. */
+ movel R(d3),R(d2)
+ addl R(d1),R(d2)
+
+ /* Check the fourth carry bit before it is clobbered by the next
+ XOR. If it is not set we have a hit. */
+ bcc L(L8)
+
+ /* We are only interested in carry bits that change due to the
+ previous add, so remove original bits */
+ eorl R(d1),R(d2)
+
+ /* Now test for the other three overflow bits.
+ Set all non-carry bits. */
+ orl R(d3),R(d2)
+ /* Add 1 to get zero if all carry bits were set. */
+ addql #1,R(d2)
+
+ /* If we don't get zero then at least one byte of the word equals
+ C. */
+ bne L(L8)
+
+ /* Still at least 4 bytes to search? */
+ subql #4,R(d4)
+ bcc L(L2)
+
+L(L6:)
+ /* Search one byte at a time in the remaining less than 4 bytes. */
+ andw #3,R(d4)
+ beq L(L7)
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+
+ subqw #1,R(d4)
+ beq L(L7)
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+
+ subqw #1,R(d4)
+ beq L(L7)
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+
+L(L7:)
+ /* Return NULL. */
+ clrl R(d0)
+ movel R(d0),R(a0)
+ moveml MEM_POSTINC(sp),R(d2)-R(d4)
+ rts
+
+L(L8:)
+ /* We have a hit. Check to see which byte it was. First
+ compensate for the autoincrement in the loop. */
+ subql #4,R(a0)
+
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+
+ /* Otherwise the fourth byte must equal C. */
+L(L9:)
+ movel R(a0),R(d0)
+ moveml MEM_POSTINC(sp),R(d2)-R(d4)
+ rts
+END(strchr)
+
+weak_alias (strchr, index)
diff --git a/sysdeps/m68k/rawmemchr.S b/sysdeps/m68k/rawmemchr.S
new file mode 100644
index 0000000..74eb1ca
--- /dev/null
+++ b/sysdeps/m68k/rawmemchr.S
@@ -0,0 +1,179 @@
+/* rawmemchr (str, ch) -- Return pointer to first occurrence of CH in STR.
+ For Motorola 68000.
+ Copyright (C) 1999 Free Software Foundation, Inc.
+ This file is part of the GNU C Library.
+ Contributed by Andreas Schwab <schwab@gnu.org>.
+
+ The GNU C Library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public License as
+ published by the Free Software Foundation; either version 2 of the
+ License, or (at your option) any later version.
+
+ The GNU C Library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public
+ License along with the GNU C Library; see the file COPYING.LIB. If not,
+ write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ Boston, MA 02111-1307, USA. */
+
+#include <sysdep.h>
+#include "asm-syntax.h"
+
+ TEXT
+ENTRY(__rawmemchr)
+ /* Save the callee-saved registers we use. */
+ movel R(d2),MEM_PREDEC(sp)
+ movel R(d3),MEM_PREDEC(sp)
+
+ /* Get string pointer and character. */
+ movel MEM_DISP(sp,12),R(a0)
+ moveb MEM_DISP(sp,19),R(d0)
+
+ /* Distribute the character to all bytes of a longword. */
+ movel R(d0),R(d1)
+ lsll #8,R(d1)
+ moveb R(d0),R(d1)
+ movel R(d1),R(d0)
+ swap R(d0)
+ movew R(d1),R(d0)
+
+ /* First search for the character one byte at a time until the
+ pointer is aligned to a longword boundary. */
+ movel R(a0),R(d1)
+ andw #3,R(d1)
+ beq L(L1)
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+
+ movel R(a0),R(d1)
+ andw #3,R(d1)
+ beq L(L1)
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+
+ movel R(a0),R(d1)
+ andw #3,R(d1)
+ beq L(L1)
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+
+L(L1:)
+ /* Load the magic bits. Unlike the generic implementation we can
+ use the carry bit as the fourth hole. */
+ movel #0xfefefeff,R(d3)
+
+ /* We exit the loop if adding MAGIC_BITS to LONGWORD fails to
+ change any of the hole bits of LONGWORD.
+
+ 1) Is this safe? Will it catch all the zero bytes?
+ Suppose there is a byte with all zeros. Any carry bits
+ propagating from its left will fall into the hole at its
+ least significant bit and stop. Since there will be no
+ carry from its most significant bit, the LSB of the
+ byte to the left will be unchanged, and the zero will be
+ detected.
+
+ 2) Is this worthwhile? Will it ignore everything except
+ zero bytes? Suppose every byte of LONGWORD has a bit set
+ somewhere. There will be a carry into bit 8. If bit 8
+ is set, this will carry into bit 16. If bit 8 is clear,
+ one of bits 9-15 must be set, so there will be a carry
+ into bit 16. Similarly, there will be a carry into bit
+ 24. If one of bits 24-31 is set, there will be a carry
+ into bit 32 (=carry flag), so all of the hole bits will
+ be changed.
+
+ 3) But wait! Aren't we looking for C, not zero?
+ Good point. So what we do is XOR LONGWORD with a longword,
+ each of whose bytes is C. This turns each byte that is C
+ into a zero. */
+
+L(L2:)
+ /* Get the longword in question. */
+ movel MEM_POSTINC(a0),R(d1)
+ /* XOR with the byte we search for. */
+ eorl R(d0),R(d1)
+
+ /* Add the magic value. We get carry bits reported for each byte
+ which is not C. */
+ movel R(d3),R(d2)
+ addl R(d1),R(d2)
+
+ /* Check the fourth carry bit before it is clobbered by the next
+ XOR. If it is not set we have a hit. */
+ bcc L(L8)
+
+ /* We are only interested in carry bits that change due to the
+ previous add, so remove original bits. */
+ eorl R(d1),R(d2)
+
+ /* Now test for the other three overflow bits.
+ Set all non-carry bits. */
+ orl R(d3),R(d2)
+ /* Add 1 to get zero if all carry bits were set. */
+ addql #1,R(d2)
+
+ /* If we don't get zero then at least one byte of the word equals
+ C. */
+ bne L(L8)
+
+ /* Get the longword in question. */
+ movel MEM_POSTINC(a0),R(d1)
+ /* XOR with the byte we search for. */
+ eorl R(d0),R(d1)
+
+ /* Add the magic value. We get carry bits reported for each byte
+ which is not C. */
+ movel R(d3),R(d2)
+ addl R(d1),R(d2)
+
+ /* Check the fourth carry bit before it is clobbered by the next
+ XOR. If it is not set we have a hit. */
+ bcc L(L8)
+
+ /* We are only interested in carry bits that change due to the
+ previous add, so remove original bits */
+ eorl R(d1),R(d2)
+
+ /* Now test for the other three overflow bits.
+ Set all non-carry bits. */
+ orl R(d3),R(d2)
+ /* Add 1 to get zero if all carry bits were set. */
+ addql #1,R(d2)
+
+ /* If we don't get zero then at least one byte of the word equals
+ C. */
+ beq L(L2)
+
+L(L8:)
+ /* We have a hit. Check to see which byte it was. First
+ compensate for the autoincrement in the loop. */
+ subql #4,R(a0)
+
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+
+ cmpb MEM(a0),R(d0)
+ beq L(L9)
+ addql #1,R(a0)
+
+ /* Otherwise the fourth byte must equal C. */
+L(L9:)
+ movel R(a0),R(d0)
+ movel MEM_POSTINC(sp),R(d3)
+ movel MEM_POSTINC(sp),R(d2)
+ rts
+END(__rawmemchr)
+
+weak_alias (__rawmemchr, rawmemchr)
diff --git a/sysdeps/m68k/strchr.S b/sysdeps/m68k/strchr.S
new file mode 100644
index 0000000..45d02d0
--- /dev/null
+++ b/sysdeps/m68k/strchr.S
@@ -0,0 +1,257 @@
+/* strchr (str, ch) -- Return pointer to first occurrence of CH in STR.
+ For Motorola 68000.
+ Copyright (C) 1999 Free Software Foundation, Inc.
+ This file is part of the GNU C Library.
+ Contributed by Andreas Schwab <schwab@gnu.org>.
+
+ The GNU C Library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public License as
+ published by the Free Software Foundation; either version 2 of the
+ License, or (at your option) any later version.
+
+ The GNU C Library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public
+ License along with the GNU C Library; see the file COPYING.LIB. If not,
+ write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ Boston, MA 02111-1307, USA. */
+
+#include <sysdep.h>
+#include "asm-syntax.h"
+
+ TEXT
+ENTRY(strchr)
+ /* Save the callee-saved registers we use. */
+ movel R(d2),MEM_PREDEC(sp)
+ movel R(d3),MEM_PREDEC(sp)
+
+ /* Get string pointer and character. */
+ movel MEM_DISP(sp,12),R(a0)
+ moveb MEM_DISP(sp,19),R(d0)
+
+ /* Distribute the character to all bytes of a longword. */
+ movel R(d0),R(d1)
+ lsll #8,R(d1)
+ moveb R(d0),R(d1)
+ movel R(d1),R(d0)
+ swap R(d0)
+ movew R(d1),R(d0)
+
+ /* First search for the character one byte at a time until the
+ pointer is aligned to a longword boundary. */
+ movel R(a0),R(d1)
+ andw #3,R(d1)
+ beq L(L1)
+ moveb MEM(a0),R(d1)
+ cmpb R(d0),R(d1)
+ beq L(L9)
+ tstb R(d1)
+ beq L(L3)
+ addql #1,R(a0)
+
+ movel R(a0),R(d1)
+ andw #3,R(d1)
+ beq L(L1)
+ moveb MEM(a0),R(d1)
+ cmpb R(d0),R(d1)
+ beq L(L9)
+ tstb R(d1)
+ beq L(L3)
+ addql #1,R(a0)
+
+ movel R(a0),R(d1)
+ andw #3,R(d1)
+ beq L(L1)
+ moveb MEM(a0),R(d1)
+ cmpb R(d0),R(d1)
+ beq L(L9)
+ tstb R(d1)
+ beq L(L3)
+ addql #1,R(a0)
+
+L(L1:)
+ /* Load the magic bits. Unlike the generic implementation we can
+ use the carry bit as the fourth hole. */
+ movel #0xfefefeff,R(d3)
+
+ /* We exit the loop if adding MAGIC_BITS to LONGWORD fails to
+ change any of the hole bits of LONGWORD.
+
+ 1) Is this safe? Will it catch all the zero bytes?
+ Suppose there is a byte with all zeros. Any carry bits
+ propagating from its left will fall into the hole at its
+ least significant bit and stop. Since there will be no
+ carry from its most significant bit, the LSB of the
+ byte to the left will be unchanged, and the zero will be
+ detected.
+
+ 2) Is this worthwhile? Will it ignore everything except
+ zero bytes? Suppose every byte of LONGWORD has a bit set
+ somewhere. There will be a carry into bit 8. If bit 8
+ is set, this will carry into bit 16. If bit 8 is clear,
+ one of bits 9-15 must be set, so there will be a carry
+ into bit 16. Similarly, there will be a carry into bit
+ 24. If one of bits 24-31 is set, there will be a carry
+ into bit 32 (=carry flag), so all of the hole bits will
+ be changed.
+
+ 3) But wait! Aren't we looking for C, not zero?
+ Good point. So what we do is XOR LONGWORD with a longword,
+ each of whose bytes is C. This turns each byte that is C
+ into a zero. */
+
+L(L2:)
+ /* Get the longword in question. */
+ movel MEM_POSTINC(a0),R(d1)
+ /* XOR with the byte we search for. */
+ eorl R(d0),R(d1)
+
+ /* Add the magic value. We get carry bits reported for each byte
+ which is not C. */
+ movel R(d3),R(d2)
+ addl R(d1),R(d2)
+
+ /* Check the fourth carry bit before it is clobbered by the next
+ XOR. If it is not set we have a hit. */
+ bcc L(L8)
+
+ /* We are only interested in carry bits that change due to the
+ previous add, so remove original bits. */
+ eorl R(d1),R(d2)
+
+ /* Now test for the other three overflow bits.
+ Set all non-carry bits. */
+ orl R(d3),R(d2)
+ /* Add 1 to get zero if all carry bits were set. */
+ addql #1,R(d2)
+
+ /* If we don't get zero then at least one byte of the word equals
+ C. */
+ bne L(L8)
+
+ /* Next look for a NUL byte.
+ Restore original longword without reload. */
+ eorl R(d0),R(d1)
+ /* Add the magic value. We get carry bits reported for each byte
+ which is not NUL. */
+ movel R(d3),R(d2)
+ addl R(d1),R(d2)
+
+ /* Check the fourth carry bit before it is clobbered by the next
+ XOR. If it is not set we have a hit, and return NULL. */
+ bcc L(L3)
+
+ /* We are only interested in carry bits that change due to the
+ previous add, so remove original bits. */
+ eorl R(d1),R(d2)
+
+ /* Now test for the other three overflow bits.
+ Set all non-carry bits. */
+ orl R(d3),R(d2)
+ /* Add 1 to get zero if all carry bits were set. */
+ addql #1,R(d2)
+
+ /* If we don't get zero then at least one byte of the word was NUL
+ and we return NULL. Otherwise continue with the next longword. */
+ bne L(L3)
+
+ /* Get the longword in question. */
+ movel MEM_POSTINC(a0),R(d1)
+ /* XOR with the byte we search for. */
+ eorl R(d0),R(d1)
+
+ /* Add the magic value. We get carry bits reported for each byte
+ which is not C. */
+ movel R(d3),R(d2)
+ addl R(d1),R(d2)
+
+ /* Check the fourth carry bit before it is clobbered by the next
+ XOR. If it is not set we have a hit. */
+ bcc L(L8)
+
+ /* We are only interested in carry bits that change due to the
+ previous add, so remove original bits */
+ eorl R(d1),R(d2)
+
+ /* Now test for the other three overflow bits.
+ Set all non-carry bits. */
+ orl R(d3),R(d2)
+ /* Add 1 to get zero if all carry bits were set. */
+ addql #1,R(d2)
+
+ /* If we don't get zero then at least one byte of the word equals
+ C. */
+ bne L(L8)
+
+ /* Next look for a NUL byte.
+ Restore original longword without reload. */
+ eorl R(d0),R(d1)
+ /* Add the magic value. We get carry bits reported for each byte
+ which is not NUL. */
+ movel R(d3),R(d2)
+ addl R(d1),R(d2)
+
+ /* Check the fourth carry bit before it is clobbered by the next
+ XOR. If it is not set we have a hit, and return NULL. */
+ bcc L(L3)
+
+ /* We are only interested in carry bits that change due to the
+ previous add, so remove original bits */
+ eorl R(d1),R(d2)
+
+ /* Now test for the other three overflow bits.
+ Set all non-carry bits. */
+ orl R(d3),R(d2)
+ /* Add 1 to get zero if all carry bits were set. */
+ addql #1,R(d2)
+
+ /* If we don't get zero then at least one byte of the word was NUL
+ and we return NULL. Otherwise continue with the next longword. */
+ beq L(L2)
+
+L(L3:)
+ /* Return NULL. */
+ clrl R(d0)
+ movel R(d0),R(a0)
+ movel MEM_POSTINC(sp),R(d3)
+ movel MEM_POSTINC(sp),R(d2)
+ rts
+
+L(L8:)
+ /* We have a hit. Check to see which byte it was. First
+ compensate for the autoincrement in the loop. */
+ subql #4,R(a0)
+
+ moveb MEM(a0),R(d1)
+ cmpb R(d0),R(d1)
+ beq L(L9)
+ tstb R(d1)
+ beq L(L3)
+ addql #1,R(a0)
+
+ moveb MEM(a0),R(d1)
+ cmpb R(d0),R(d1)
+ beq L(L9)
+ tstb R(d1)
+ beq L(L3)
+ addql #1,R(a0)
+
+ moveb MEM(a0),R(d1)
+ cmpb R(d0),R(d1)
+ beq L(L9)
+ tstb R(d1)
+ beq L(L3)
+ addql #1,R(a0)
+
+ /* Otherwise the fourth byte must equal C. */
+L(L9:)
+ movel R(a0),R(d0)
+ movel MEM_POSTINC(sp),R(d3)
+ movel MEM_POSTINC(sp),R(d2)
+ rts
+END(strchr)
+
+weak_alias (strchr, index)
diff --git a/sysdeps/m68k/strchrnul.S b/sysdeps/m68k/strchrnul.S
new file mode 100644
index 0000000..45e7616
--- /dev/null
+++ b/sysdeps/m68k/strchrnul.S
@@ -0,0 +1,250 @@
+/* strchrnul (str, ch) -- Return pointer to first occurrence of CH in STR
+ or the final NUL byte.
+ For Motorola 68000.
+ Copyright (C) 1999 Free Software Foundation, Inc.
+ This file is part of the GNU C Library.
+ Contributed by Andreas Schwab <schwab@gnu.org>.
+
+ The GNU C Library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public License as
+ published by the Free Software Foundation; either version 2 of the
+ License, or (at your option) any later version.
+
+ The GNU C Library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public
+ License along with the GNU C Library; see the file COPYING.LIB. If not,
+ write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ Boston, MA 02111-1307, USA. */
+
+#include <sysdep.h>
+#include "asm-syntax.h"
+
+ TEXT
+ENTRY(__strchrnul)
+ /* Save the callee-saved registers we use. */
+ movel R(d2),MEM_PREDEC(sp)
+ movel R(d3),MEM_PREDEC(sp)
+
+ /* Get string pointer and character. */
+ movel MEM_DISP(sp,12),R(a0)
+ moveb MEM_DISP(sp,19),R(d0)
+
+ /* Distribute the character to all bytes of a longword. */
+ movel R(d0),R(d1)
+ lsll #8,R(d1)
+ moveb R(d0),R(d1)
+ movel R(d1),R(d0)
+ swap R(d0)
+ movew R(d1),R(d0)
+
+ /* First search for the character one byte at a time until the
+ pointer is aligned to a longword boundary. */
+ movel R(a0),R(d1)
+ andw #3,R(d1)
+ beq L(L1)
+ moveb MEM(a0),R(d1)
+ cmpb R(d0),R(d1)
+ beq L(L9)
+ tstb R(d1)
+ beq L(L9)
+ addql #1,R(a0)
+
+ movel R(a0),R(d1)
+ andw #3,R(d1)
+ beq L(L1)
+ moveb MEM(a0),R(d1)
+ cmpb R(d0),R(d1)
+ beq L(L9)
+ tstb R(d1)
+ beq L(L9)
+ addql #1,R(a0)
+
+ movel R(a0),R(d1)
+ andw #3,R(d1)
+ beq L(L1)
+ moveb MEM(a0),R(d1)
+ cmpb R(d0),R(d1)
+ beq L(L9)
+ tstb R(d1)
+ beq L(L9)
+ addql #1,R(a0)
+
+L(L1:)
+ /* Load the magic bits. Unlike the generic implementation we can
+ use the carry bit as the fourth hole. */
+ movel #0xfefefeff,R(d3)
+
+ /* We exit the loop if adding MAGIC_BITS to LONGWORD fails to
+ change any of the hole bits of LONGWORD.
+
+ 1) Is this safe? Will it catch all the zero bytes?
+ Suppose there is a byte with all zeros. Any carry bits
+ propagating from its left will fall into the hole at its
+ least significant bit and stop. Since there will be no
+ carry from its most significant bit, the LSB of the
+ byte to the left will be unchanged, and the zero will be
+ detected.
+
+ 2) Is this worthwhile? Will it ignore everything except
+ zero bytes? Suppose every byte of LONGWORD has a bit set
+ somewhere. There will be a carry into bit 8. If bit 8
+ is set, this will carry into bit 16. If bit 8 is clear,
+ one of bits 9-15 must be set, so there will be a carry
+ into bit 16. Similarly, there will be a carry into bit
+ 24. If one of bits 24-31 is set, there will be a carry
+ into bit 32 (=carry flag), so all of the hole bits will
+ be changed.
+
+ 3) But wait! Aren't we looking for C, not zero?
+ Good point. So what we do is XOR LONGWORD with a longword,
+ each of whose bytes is C. This turns each byte that is C
+ into a zero. */
+
+L(L2:)
+ /* Get the longword in question. */
+ movel MEM_POSTINC(a0),R(d1)
+ /* XOR with the byte we search for. */
+ eorl R(d0),R(d1)
+
+ /* Add the magic value. We get carry bits reported for each byte
+ which is not C. */
+ movel R(d3),R(d2)
+ addl R(d1),R(d2)
+
+ /* Check the fourth carry bit before it is clobbered by the next
+ XOR. If it is not set we have a hit. */
+ bcc L(L8)
+
+ /* We are only interested in carry bits that change due to the
+ previous add, so remove original bits. */
+ eorl R(d1),R(d2)
+
+ /* Now test for the other three overflow bits.
+ Set all non-carry bits. */
+ orl R(d3),R(d2)
+ /* Add 1 to get zero if all carry bits were set. */
+ addql #1,R(d2)
+
+ /* If we don't get zero then at least one byte of the word equals
+ C. */
+ bne L(L8)
+
+ /* Next look for a NUL byte.
+ Restore original longword without reload. */
+ eorl R(d0),R(d1)
+ /* Add the magic value. We get carry bits reported for each byte
+ which is not NUL. */
+ movel R(d3),R(d2)
+ addl R(d1),R(d2)
+
+ /* Check the fourth carry bit before it is clobbered by the next
+ XOR. If it is not set we have a hit. */
+ bcc L(L8)
+
+ /* We are only interested in carry bits that change due to the
+ previous add, so remove original bits. */
+ eorl R(d1),R(d2)
+
+ /* Now test for the other three overflow bits.
+ Set all non-carry bits. */
+ orl R(d3),R(d2)
+ /* Add 1 to get zero if all carry bits were set. */
+ addql #1,R(d2)
+
+ /* If we don't get zero then at least one byte of the word was
+ NUL. Otherwise continue with the next longword. */
+ bne L(L8)
+
+ /* Get the longword in question. */
+ movel MEM_POSTINC(a0),R(d1)
+ /* XOR with the byte we search for. */
+ eorl R(d0),R(d1)
+
+ /* Add the magic value. We get carry bits reported for each byte
+ which is not C. */
+ movel R(d3),R(d2)
+ addl R(d1),R(d2)
+
+ /* Check the fourth carry bit before it is clobbered by the next
+ XOR. If it is not set we have a hit. */
+ bcc L(L8)
+
+ /* We are only interested in carry bits that change due to the
+ previous add, so remove original bits */
+ eorl R(d1),R(d2)
+
+ /* Now test for the other three overflow bits.
+ Set all non-carry bits. */
+ orl R(d3),R(d2)
+ /* Add 1 to get zero if all carry bits were set. */
+ addql #1,R(d2)
+
+ /* If we don't get zero then at least one byte of the word equals
+ C. */
+ bne L(L8)
+
+ /* Next look for a NUL byte.
+ Restore original longword without reload. */
+ eorl R(d0),R(d1)
+ /* Add the magic value. We get carry bits reported for each byte
+ which is not NUL. */
+ movel R(d3),R(d2)
+ addl R(d1),R(d2)
+
+ /* Check the fourth carry bit before it is clobbered by the next
+ XOR. If it is not set we have a hit. */
+ bcc L(L8)
+
+ /* We are only interested in carry bits that change due to the
+ previous add, so remove original bits */
+ eorl R(d1),R(d2)
+
+ /* Now test for the other three overflow bits.
+ Set all non-carry bits. */
+ orl R(d3),R(d2)
+ /* Add 1 to get zero if all carry bits were set. */
+ addql #1,R(d2)
+
+ /* If we don't get zero then at least one byte of the word was
+ NUL. Otherwise continue with the next longword. */
+ beq L(L2)
+
+L(L8:)
+ /* We have a hit. Check to see which byte it was. First
+ compensate for the autoincrement in the loop. */
+ subql #4,R(a0)
+
+ moveb MEM(a0),R(d1)
+ cmpb R(d0),R(d1)
+ beq L(L9)
+ tstb R(d1)
+ beq L(L9)
+ addql #1,R(a0)
+
+ moveb MEM(a0),R(d1)
+ cmpb R(d0),R(d1)
+ beq L(L9)
+ tstb R(d1)
+ beq L(L9)
+ addql #1,R(a0)
+
+ moveb MEM(a0),R(d1)
+ cmpb R(d0),R(d1)
+ beq L(L9)
+ tstb R(d1)
+ beq L(L9)
+ addql #1,R(a0)
+
+ /* Otherwise the fourth byte must equal C or be NUL. */
+L(L9:)
+ movel R(a0),R(d0)
+ movel MEM_POSTINC(sp),R(d3)
+ movel MEM_POSTINC(sp),R(d2)
+ rts
+END(__strchrnul)
+
+weak_alias (__strchrnul, strchrnul)