Module Name:    src
Committed By:   jmcneill
Date:           Tue May  5 12:47:16 UTC 2020

Modified Files:
        src/sys/external/bsd/compiler_rt/dist/lib/builtins: clear_cache.c

Log Message:
Align addresses to cache lines in __clear_cache for aarch64.

This corrects an issue where if the start and end address fall in different
lines, and the end address is not cache line size aligned, the last line
will not be invalidated properly.

Patch from compiler-rt upstream: https://reviews.llvm.org/rCRT323315


To generate a diff of this commit:
cvs rdiff -u -r1.3 -r1.4 \
    src/sys/external/bsd/compiler_rt/dist/lib/builtins/clear_cache.c

Please note that diffs are not public domain; they are subject to the
copyright notices on the relevant files.

Modified files:

Index: src/sys/external/bsd/compiler_rt/dist/lib/builtins/clear_cache.c
diff -u src/sys/external/bsd/compiler_rt/dist/lib/builtins/clear_cache.c:1.3 src/sys/external/bsd/compiler_rt/dist/lib/builtins/clear_cache.c:1.4
--- src/sys/external/bsd/compiler_rt/dist/lib/builtins/clear_cache.c:1.3	Fri Jun 16 21:22:14 2017
+++ src/sys/external/bsd/compiler_rt/dist/lib/builtins/clear_cache.c	Tue May  5 12:47:16 2020
@@ -143,12 +143,14 @@ void __clear_cache(void *start, void *en
    * uintptr_t in case this runs in an IPL32 environment.
    */
   const size_t dcache_line_size = 4 << ((ctr_el0 >> 16) & 15);
-  for (addr = xstart; addr < xend; addr += dcache_line_size)
+  for (addr = xstart & ~(dcache_line_size - 1); addr < xend;
+       addr += dcache_line_size)
     __asm __volatile("dc cvau, %0" :: "r"(addr));
   __asm __volatile("dsb ish");
 
   const size_t icache_line_size = 4 << ((ctr_el0 >> 0) & 15);
-  for (addr = xstart; addr < xend; addr += icache_line_size)
+  for (addr = xstart & ~(icache_line_size - 1); addr < xend;
+       addr += icache_line_size)
     __asm __volatile("ic ivau, %0" :: "r"(addr));
   __asm __volatile("isb sy");
 #elif defined(__sparc__)

Reply via email to