aboutsummaryrefslogtreecommitdiff
path: root/libgcc
diff options
context:
space:
mode:
authorSofiane Naci <sofiane.naci@arm.com>2013-01-15 15:49:13 +0000
committerSofiane Naci <sofiane@gcc.gnu.org>2013-01-15 15:49:13 +0000
commite0f8b6a0a070b748cf1666bc6eec6701453d8e2f (patch)
tree3b8cdebc20d73d8521ee1a2d1c8850712ca88a08 /libgcc
parent93aea671e273bb1a5166b51d307c656d81e00fe5 (diff)
downloadgcc-e0f8b6a0a070b748cf1666bc6eec6701453d8e2f.zip
gcc-e0f8b6a0a070b748cf1666bc6eec6701453d8e2f.tar.gz
gcc-e0f8b6a0a070b748cf1666bc6eec6701453d8e2f.tar.bz2
[AARCH64] Fix __clear_cache.
From-SVN: r195203
Diffstat (limited to 'libgcc')
-rw-r--r--libgcc/ChangeLog5
-rw-r--r--libgcc/config/aarch64/sync-cache.c10
2 files changed, 13 insertions, 2 deletions
diff --git a/libgcc/ChangeLog b/libgcc/ChangeLog
index 3f8337e..7750449 100644
--- a/libgcc/ChangeLog
+++ b/libgcc/ChangeLog
@@ -1,3 +1,8 @@
+2013-01-15 Sofiane Naci <sofiane.naci@arm.com>
+
+ * config/aarch64/sync-cache.c (__aarch64_sync_cache_range): Update
+ loop start address for cache clearing.
+
2013-01-14 Georg-Johann Lay <avr@gjlay.de>
* config/avr/lib1funcs.S: Remove trailing blanks.
diff --git a/libgcc/config/aarch64/sync-cache.c b/libgcc/config/aarch64/sync-cache.c
index d7b621e..2512cb8 100644
--- a/libgcc/config/aarch64/sync-cache.c
+++ b/libgcc/config/aarch64/sync-cache.c
@@ -39,7 +39,10 @@ __aarch64_sync_cache_range (const void *base, const void *end)
instruction cache fetches the updated data. 'end' is exclusive,
as per the GNU definition of __clear_cache. */
- for (address = base; address < (const char *) end; address += dcache_lsize)
+ /* Make the start address of the loop cache aligned. */
+ address = (const char*) ((unsigned long) base & ~ (dcache_lsize - 1));
+
+ for (address; address < (const char *) end; address += dcache_lsize)
asm volatile ("dc\tcvau, %0"
:
: "r" (address)
@@ -47,7 +50,10 @@ __aarch64_sync_cache_range (const void *base, const void *end)
asm volatile ("dsb\tish" : : : "memory");
- for (address = base; address < (const char *) end; address += icache_lsize)
+ /* Make the start address of the loop cache aligned. */
+ address = (const char*) ((unsigned long) base & ~ (icache_lsize - 1));
+
+ for (address; address < (const char *) end; address += icache_lsize)
asm volatile ("ic\tivau, %0"
:
: "r" (address)