Invert usage of the cache line mask so it actually works properly. --- bsps/aarch64/shared/cache/cache.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/bsps/aarch64/shared/cache/cache.c b/bsps/aarch64/shared/cache/cache.c index 9bdbe88c92..240cfb043a 100644 --- a/bsps/aarch64/shared/cache/cache.c +++ b/bsps/aarch64/shared/cache/cache.c @@ -43,7 +43,7 @@ #define AARCH64_CACHE_L1_DATA_LINE_MASK \ ( AARCH64_CACHE_L1_CPU_DATA_ALIGNMENT - 1 ) #define AARCH64_CACHE_PREPARE_MVA(mva) \ - ((const void *) (((size_t) (mva)) & AARCH64_CACHE_L1_DATA_LINE_MASK)) + ( (const void *) ( ( (size_t) mva ) & ~AARCH64_CACHE_L1_DATA_LINE_MASK ) ) static inline void AArch64_data_cache_clean_and_invalidate_line(const void *d_addr) -- 2.20.1 _______________________________________________ devel mailing list devel@rtems.org http://lists.rtems.org/mailman/listinfo/devel