Module Name: src Committed By: matt Date: Thu Aug 16 16:49:11 UTC 2012
Modified Files: src/common/lib/libc/arch/arm/atomic: Makefile.inc atomic_cas_8.S atomic_swap.S membar_ops.S src/sys/arch/arm/arm: lock_cas.S src/sys/arch/arm/include: mutex.h Log Message: Actually use the assembly version of the atomic function if compiling for armv6 or armv7 cpus. Use atomic_cas_ptr instead of _lock_cas so we pick up the assembly version when it's used. To generate a diff of this commit: cvs rdiff -u -r1.8 -r1.9 src/common/lib/libc/arch/arm/atomic/Makefile.inc cvs rdiff -u -r1.1 -r1.2 src/common/lib/libc/arch/arm/atomic/atomic_cas_8.S cvs rdiff -u -r1.2 -r1.3 src/common/lib/libc/arch/arm/atomic/atomic_swap.S \ src/common/lib/libc/arch/arm/atomic/membar_ops.S cvs rdiff -u -r1.7 -r1.8 src/sys/arch/arm/arm/lock_cas.S cvs rdiff -u -r1.10 -r1.11 src/sys/arch/arm/include/mutex.h Please note that diffs are not public domain; they are subject to the copyright notices on the relevant files.
Modified files: Index: src/common/lib/libc/arch/arm/atomic/Makefile.inc diff -u src/common/lib/libc/arch/arm/atomic/Makefile.inc:1.8 src/common/lib/libc/arch/arm/atomic/Makefile.inc:1.9 --- src/common/lib/libc/arch/arm/atomic/Makefile.inc:1.8 Sun Jan 4 17:54:29 2009 +++ src/common/lib/libc/arch/arm/atomic/Makefile.inc Thu Aug 16 16:49:10 2012 @@ -1,19 +1,40 @@ -# $NetBSD: Makefile.inc,v 1.8 2009/01/04 17:54:29 pooka Exp $ +# $NetBSD: Makefile.inc,v 1.9 2012/08/16 16:49:10 matt Exp $ + +ARMV6= ${CPUFLAGS:M-march=armv7*} ${CPUFLAGS:M-mcpu=cortex*} +ARMV6+= ${CPUFLAGS:M-march=armv6*} ${CPUFLAGS:M-mcpu=arm11*} +ARMV6+= ${CFLAGS:M-march=armv7*:} ${CFLAGS:M-mcpu=cortex*} +ARMV6+= ${CFLAGS:M-march=armv6*:} ${CFLAGS:M-mcpu=arm11*} +ARMV6+= ${CPPFLAGS:M-march=armv7*:} ${CPPFLAGS:M-mcpu=cortex*} +ARMV6+= ${CPPFLAGS:M-march=armv6*:} ${CPPFLAGS:M-mcpu=arm11*} .if defined(LIB) && (${LIB} == "kern" || ${LIB} == "c" || ${LIB} == "pthread" \ || ${LIB} == "rump") -SRCS+= atomic_add_32_cas.c atomic_add_32_nv_cas.c atomic_and_32_cas.c \ - atomic_and_32_nv_cas.c atomic_dec_32_cas.c atomic_dec_32_nv_cas.c \ - atomic_inc_32_cas.c atomic_inc_32_nv_cas.c atomic_or_32_cas.c \ - atomic_or_32_nv_cas.c atomic_swap_32_cas.c membar_ops_nop.c +.if empty(ARMV6) +SRCS.atomic+= atomic_add_32_cas.c atomic_add_32_nv_cas.c \ + atomic_and_32_cas.c atomic_and_32_nv_cas.c \ + atomic_dec_32_cas.c atomic_dec_32_nv_cas.c \ + atomic_inc_32_cas.c atomic_inc_32_nv_cas.c \ + atomic_or_32_cas.c atomic_or_32_nv_cas.c \ + atomic_swap_32_cas.c membar_ops_nop.c +.else +SRCS.atomic+= atomic_add_32.S atomic_and_32.S atomic_cas_32.S \ + atomic_dec_32.S atomic_inc_32.S atomic_or_32.S \ + atomic_swap.S membar_ops.S +.endif .endif .if defined(LIB) && (${LIB} == "c" || ${LIB} == "pthread") -SRCS+= atomic_init_testset.c -SRCS+= atomic_cas_up.S +.if empty(ARMV6) +SRCS.atomic+= atomic_init_testset.c +SRCS.atomic+= atomic_cas_up.S CPPFLAGS+= -D__HAVE_ASM_ATOMIC_CAS_UP +.else +SRCS.atomic+= atomic_init_cas.c +.endif .endif + +SRCS+= ${SRCS.atomic} Index: src/common/lib/libc/arch/arm/atomic/atomic_cas_8.S diff -u src/common/lib/libc/arch/arm/atomic/atomic_cas_8.S:1.1 src/common/lib/libc/arch/arm/atomic/atomic_cas_8.S:1.2 --- src/common/lib/libc/arch/arm/atomic/atomic_cas_8.S:1.1 Tue Nov 18 15:22:56 2008 +++ src/common/lib/libc/arch/arm/atomic/atomic_cas_8.S Thu Aug 16 16:49:10 2012 @@ -1,4 +1,4 @@ -/* $NetBSD: atomic_cas_8.S,v 1.1 2008/11/18 15:22:56 matt Exp $ */ +/* $NetBSD: atomic_cas_8.S,v 1.2 2012/08/16 16:49:10 matt Exp $ */ /*- * Copyright (c) 2008 The NetBSD Foundation, Inc. * All rights reserved. @@ -27,37 +27,28 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ -#include <machine/asm.h> -RCSID("$NetBSD: atomic_cas_8.S,v 1.1 2008/11/18 15:22:56 matt Exp $") +#include "atomic_op_asm.h" -ENTRY(atomic_cas_8) - XPUSH {r4,r5} /* we need some more registers */ - and r3, r0, #3 /* which byte do we replace? */ -#if __ARMEB__ - eor r3, r3, #3 /* bytes are reversed on BE */ -#endif - mov r3, r3, lsl #3 /* multiply by 8 */ - mov r1, r1, lsl r3 /* mov old value to correct byte */ - eor r2, r1, r2, lsl r3 /* move new value to correct byte */ -/* eor r2, r2, r1 */ /* new value is now (old ^ new) */ - mov r5, #0xff /* load mask */ - mov r5, r5, lsl r3 /* and move to correct byte */ - mov r3, r0 /* move pointer */ - -1: ldrex r4, [r3] /* load 32bit value */ - and r0, r4, r5 /* clear other bytes */ - teq r0, r1 /* equal old value? */ - bne 2f /* nope, bail. */ - eor r4, r4, r2 /* new == old ^ (old ^ new) */ - strex ip, r4, [r3] /* attempt to store it */ +#if defined(_ARM_ARCH_6) +/* + * ARMv6 has load-exclusive/store-exclusive which works for both user + * and kernel. + */ +ENTRY_NP(_atomic_cas_8) + mov r3, r0 /* we need r0 for return value */ +1: + ldrexb r0, [r3] /* load old value */ + teq r0, r1 /* compare? */ + RETc(ne) /* return if different */ + strexb ip, r2, [r3] /* store new value */ cmp ip, #0 /* succeed? */ - bne 1b /* nope, try again. */ + bne 1b /* nope, try again. */ + RET /* yes, return. */ + END(_atomic_cas_8) + +ATOMIC_OP_ALIAS(atomic_cas_8,_atomic_cas_8) +STRONG_ALIAS(_atomic_cas_char,_atomic_cas_8) +STRONG_ALIAS(_atomic_cas_uchar,_atomic_cas_8) -2: XPOP {r4,r5} /* don't need these anymore */ - and r1, r3, #3 -#if __ARMEB__ - eor r1, r1, #3 -#endif - mov r0, r0, lsr r1 /* shift it back to lsb byte */ - RET +#endif /* _ARCH_ARM_6 */ Index: src/common/lib/libc/arch/arm/atomic/atomic_swap.S diff -u src/common/lib/libc/arch/arm/atomic/atomic_swap.S:1.2 src/common/lib/libc/arch/arm/atomic/atomic_swap.S:1.3 --- src/common/lib/libc/arch/arm/atomic/atomic_swap.S:1.2 Sat Aug 16 07:12:40 2008 +++ src/common/lib/libc/arch/arm/atomic/atomic_swap.S Thu Aug 16 16:49:10 2012 @@ -1,7 +1,7 @@ -/* $NetBSD: atomic_swap.S,v 1.2 2008/08/16 07:12:40 matt Exp $ */ +/* $NetBSD: atomic_swap.S,v 1.3 2012/08/16 16:49:10 matt Exp $ */ /*- - * Copyright (c) 2007 The NetBSD Foundation, Inc. + * Copyright (c) 2007,2012 The NetBSD Foundation, Inc. * All rights reserved. * * This code is derived from software contributed to The NetBSD Foundation @@ -15,13 +15,6 @@ * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. - * 3. All advertising materials mentioning features or use of this software - * must display the following acknowledgement: - * This product includes software developed by the NetBSD - * Foundation, Inc. and its contributors. - * 4. Neither the name of The NetBSD Foundation nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED @@ -38,8 +31,31 @@ #include "atomic_op_asm.h" +/* + * While SWP{B} is sufficient on its own for pre-ARMv7 CPUs, on MP ARMv7 cores + * SWP{B} is disabled since it's no longer atomic among multiple CPUs. They + * will actually raise an UNDEFINED exception. + * + * So if we use the LDREX/STREX template, but use a SWP instruction followed + * by a MOV instruction (using a temporary register), that gives a handler + * for the SWP UNDEFINED exception enough information to "patch" this instance + * SWP with correct forms of LDREX/STREX. (note that this would happen even + * "read-only" pages. If the page gets tossed, we will get another exception + * and fix yet again). + */ + ENTRY_NP(_atomic_swap_32) - swp r0, r1, [r0] + mov r2, r0 +1: +#ifdef _ARM_ARCH_6 + ldrex r0, [r2] + strex r3, r1, [r2] +#else + swp r0, r1, [r2] + mov r3, #0 +#endif + cmp r3, #0 + bne 1b RET END(_atomic_swap_32) ATOMIC_OP_ALIAS(atomic_swap_32,_atomic_swap_32) @@ -51,11 +67,21 @@ STRONG_ALIAS(_atomic_swap_ulong,_atomic_ STRONG_ALIAS(_atomic_swap_ptr,_atomic_swap_32) ENTRY_NP(_atomic_swap_8) - swpb r0, r1, [r0] + mov r2, r0 +1: +#ifdef _ARM_ARCH_6 + ldrexb r0, [r2] + strexb r3, r1, [r2] +#else + swpb r0, r1, [r2] + mov r3, #0 +#endif + cmp r3, #0 + bne 1b RET END(_atomic_swap_8) ATOMIC_OP_ALIAS(atomic_swap_8,_atomic_swap_8) ATOMIC_OP_ALIAS(atomic_swap_char,_atomic_swap_8) ATOMIC_OP_ALIAS(atomic_swap_uchar,_atomic_swap_8) -STRONG_ALIAS(_atomic_swap_char,_atomic_swap_32) -STRONG_ALIAS(_atomic_swap_uchar,_atomic_swap_32) +STRONG_ALIAS(_atomic_swap_char,_atomic_swap_8) +STRONG_ALIAS(_atomic_swap_uchar,_atomic_swap_8) Index: src/common/lib/libc/arch/arm/atomic/membar_ops.S diff -u src/common/lib/libc/arch/arm/atomic/membar_ops.S:1.2 src/common/lib/libc/arch/arm/atomic/membar_ops.S:1.3 --- src/common/lib/libc/arch/arm/atomic/membar_ops.S:1.2 Sat Aug 16 07:12:40 2008 +++ src/common/lib/libc/arch/arm/atomic/membar_ops.S Thu Aug 16 16:49:10 2012 @@ -1,4 +1,4 @@ -/* $NetBSD: membar_ops.S,v 1.2 2008/08/16 07:12:40 matt Exp $ */ +/* $NetBSD: membar_ops.S,v 1.3 2012/08/16 16:49:10 matt Exp $ */ /*- * Copyright (c) 2008 The NetBSD Foundation, Inc. * All rights reserved. @@ -33,7 +33,11 @@ #ifdef _ARM_ARCH_6 ENTRY_NP(_membar_producer) +#ifdef _ARM_ARCH_7 + dsb +#else mcr p15, 0, r0, c7, c10, 4 /* Data Synchronization Barrier */ +#endif RET END(_membar_producer) ATOMIC_OP_ALIAS(membar_producer,_membar_producer) @@ -41,7 +45,11 @@ ATOMIC_OP_ALIAS(membar_write,_membar_pro STRONG_ALIAS(_membar_write,_membar_producer) ENTRY_NP(_membar_sync) +#ifdef _ARM_ARCH_7 + dmb +#else mcr p15, 0, r0, c7, c10, 5 /* Data Memory Barrier */ +#endif RET END(_membar_sync) ATOMIC_OP_ALIAS(membar_sync,_membar_sync) Index: src/sys/arch/arm/arm/lock_cas.S diff -u src/sys/arch/arm/arm/lock_cas.S:1.7 src/sys/arch/arm/arm/lock_cas.S:1.8 --- src/sys/arch/arm/arm/lock_cas.S:1.7 Wed Jul 7 01:17:26 2010 +++ src/sys/arch/arm/arm/lock_cas.S Thu Aug 16 16:49:10 2012 @@ -1,4 +1,4 @@ -/* $NetBSD: lock_cas.S,v 1.7 2010/07/07 01:17:26 chs Exp $ */ +/* $NetBSD: lock_cas.S,v 1.8 2012/08/16 16:49:10 matt Exp $ */ /*- * Copyright (c) 2007 The NetBSD Foundation, Inc. @@ -50,6 +50,7 @@ .word _C_LABEL(_lock_cas_fail) #endif /* ARM_LOCK_CAS_DEBUG */ +#ifndef _ARM_ARCH_6 /* * _lock_cas: * @@ -66,15 +67,6 @@ */ .globl _C_LABEL(_lock_cas_end) ENTRY_NP(_lock_cas) -#ifdef _ARCH_ARM_6 - mov ip, r0 -1: ldrex r0, [ip] /* eventual return value */ - cmp r1, r0 - RETc(ne) - strex r3, r2, [ip] - cmp r3, #0 - bne 1b -#else ldr r3, [r0] teq r3, r1 streq r2, [r0] @@ -93,7 +85,6 @@ _C_LABEL(_lock_cas_end): #endif /* __ARMEB__ */ stmia r3, {r1-r2} /* store ev_count */ #endif /* ARM_LOCK_CAS_DEBUG */ -#endif RET END(_lock_cas) @@ -114,6 +105,7 @@ STRONG_ALIAS(_atomic_cas_uint_ni,_lock_c STRONG_ALIAS(atomic_cas_uint_ni,_lock_cas) STRONG_ALIAS(_atomic_cas_ptr_ni,_lock_cas) STRONG_ALIAS(atomic_cas_ptr_ni,_lock_cas) +#endif /* !_ARM_ARCH_6 */ #ifdef __PROG32 #define SAVE_REGS stmfd sp!, {r4-r6} Index: src/sys/arch/arm/include/mutex.h diff -u src/sys/arch/arm/include/mutex.h:1.10 src/sys/arch/arm/include/mutex.h:1.11 --- src/sys/arch/arm/include/mutex.h:1.10 Mon Apr 28 20:23:14 2008 +++ src/sys/arch/arm/include/mutex.h Thu Aug 16 16:49:10 2012 @@ -1,4 +1,4 @@ -/* $NetBSD: mutex.h,v 1.10 2008/04/28 20:23:14 martin Exp $ */ +/* $NetBSD: mutex.h,v 1.11 2012/08/16 16:49:10 matt Exp $ */ /*- * Copyright (c) 2002, 2007 The NetBSD Foundation, Inc. @@ -90,11 +90,12 @@ struct kmutex { */ #define MUTEX_GIVE(mtx) /* nothing */ -unsigned long _lock_cas(volatile unsigned long *, - unsigned long, unsigned long); - #define MUTEX_CAS(p, o, n) \ - (_lock_cas((volatile unsigned long *)(p), (o), (n)) == (o)) + (atomic_cas_ulong((volatile unsigned long *)(p), (o), (n)) == (o)) +#ifdef MULTIPROCESSOR +#define MUTEX_SMT_PAUSE() __asm __volatile("wfe") +#define MUTEX_SMT_WAKE() __asm __volatile("sev") +#endif #endif /* __MUTEX_PRIVATE */