Module Name:    src
Committed By:   matt
Date:           Sun Aug 16 02:52:43 UTC 2009

Modified Files:
        src/sys/arch/mips/include [matt-nb5-mips64]: asm.h

Log Message:
Completely rework to support multiple ABIs.  Includes macros/ops to make
writing ABI independent assembly much easier.  Add macros to handle PIC
for N32/N64 as well as O32/O64.


To generate a diff of this commit:
cvs rdiff -u -r1.40 -r1.40.38.1 src/sys/arch/mips/include/asm.h

Please note that diffs are not public domain; they are subject to the
copyright notices on the relevant files.

Modified files:

Index: src/sys/arch/mips/include/asm.h
diff -u src/sys/arch/mips/include/asm.h:1.40 src/sys/arch/mips/include/asm.h:1.40.38.1
--- src/sys/arch/mips/include/asm.h:1.40	Wed Oct 17 19:55:36 2007
+++ src/sys/arch/mips/include/asm.h	Sun Aug 16 02:52:43 2009
@@ -1,4 +1,4 @@
-/*	$NetBSD: asm.h,v 1.40 2007/10/17 19:55:36 garbled Exp $	*/
+/*	$NetBSD: asm.h,v 1.40.38.1 2009/08/16 02:52:43 matt Exp $	*/
 
 /*
  * Copyright (c) 1992, 1993
@@ -52,7 +52,7 @@
  */
 
 #ifndef _MIPS_ASM_H
-#define _MIPS_ASM_H
+#define	_MIPS_ASM_H
 
 #include <machine/cdefs.h>	/* for API selection */
 #include <mips/regdef.h>
@@ -62,7 +62,7 @@
  * Must always be noreorder, must never use a macro instruction
  * Final addiu to t9 must always equal the size of this _KERN_MCOUNT
  */
-#define _KERN_MCOUNT						\
+#define	_KERN_MCOUNT						\
 	.set	push;						\
 	.set	noreorder;					\
 	.set	noat;						\
@@ -79,7 +79,7 @@
 	.set	pop;					
 
 #ifdef GPROF
-#define MCOUNT _KERN_MCOUNT
+#define	MCOUNT _KERN_MCOUNT
 #else
 #define	MCOUNT
 #endif
@@ -95,10 +95,10 @@
 #endif
 
 #ifdef USE_AENT
-#define AENT(x)				\
+#define	AENT(x)				\
 	.aent	x, 0
 #else
-#define AENT(x)
+#define	AENT(x)
 #endif
 
 /*
@@ -110,7 +110,7 @@
 /*
  * STRONG_ALIAS: create a strong alias.
  */
-#define STRONG_ALIAS(alias,sym)						\
+#define	STRONG_ALIAS(alias,sym)						\
 	.globl alias;							\
 	alias = sym
 
@@ -132,7 +132,7 @@
  *	- never use any register that callee-saved (S0-S8), and
  *	- not use any local stack storage.
  */
-#define LEAF(x)				\
+#define	LEAF(x)				\
 	.globl	_C_LABEL(x);		\
 	.ent	_C_LABEL(x), 0;		\
 _C_LABEL(x): ;				\
@@ -143,7 +143,7 @@
  * LEAF_NOPROFILE
  *	No profilable leaf routine.
  */
-#define LEAF_NOPROFILE(x)		\
+#define	LEAF_NOPROFILE(x)		\
 	.globl	_C_LABEL(x);		\
 	.ent	_C_LABEL(x), 0;		\
 _C_LABEL(x): ;				\
@@ -153,7 +153,7 @@
  * STATIC_LEAF
  *	Declare a local leaf function.
  */
-#define STATIC_LEAF(x)			\
+#define	STATIC_LEAF(x)			\
 	.ent	_C_LABEL(x), 0;		\
 _C_LABEL(x): ;				\
 	.frame sp, 0, ra;		\
@@ -163,7 +163,7 @@
  * XLEAF
  *	declare alternate entry to leaf routine
  */
-#define XLEAF(x)			\
+#define	XLEAF(x)			\
 	.globl	_C_LABEL(x);		\
 	AENT (_C_LABEL(x));		\
 _C_LABEL(x):
@@ -172,7 +172,7 @@
  * STATIC_XLEAF
  *	declare alternate entry to a static leaf routine
  */
-#define STATIC_XLEAF(x)			\
+#define	STATIC_XLEAF(x)			\
 	AENT (_C_LABEL(x));		\
 _C_LABEL(x):
 
@@ -181,7 +181,7 @@
  *	A function calls other functions and needs
  *	therefore stack space to save/restore registers.
  */
-#define NESTED(x, fsize, retpc)		\
+#define	NESTED(x, fsize, retpc)		\
 	.globl	_C_LABEL(x);		\
 	.ent	_C_LABEL(x), 0; 	\
 _C_LABEL(x): ;				\
@@ -192,7 +192,7 @@
  * NESTED_NOPROFILE(x)
  *	No profilable nested routine.
  */
-#define NESTED_NOPROFILE(x, fsize, retpc)	\
+#define	NESTED_NOPROFILE(x, fsize, retpc)	\
 	.globl	_C_LABEL(x);		\
 	.ent	_C_LABEL(x), 0;		\
 _C_LABEL(x): ;				\
@@ -202,7 +202,7 @@
  * XNESTED
  *	declare alternate entry point to nested routine.
  */
-#define XNESTED(x)			\
+#define	XNESTED(x)			\
 	.globl	_C_LABEL(x);		\
 	AENT (_C_LABEL(x));		\
 _C_LABEL(x):
@@ -211,19 +211,20 @@
  * END
  *	Mark end of a procedure.
  */
-#define END(x) \
-	.end _C_LABEL(x)
+#define	END(x) \
+	.end _C_LABEL(x);		\
+	.size _C_LABEL(x), . - _C_LABEL(x)
 
 /*
  * IMPORT -- import external symbol
  */
-#define IMPORT(sym, size)		\
+#define	IMPORT(sym, size)		\
 	.extern _C_LABEL(sym),size
 
 /*
  * EXPORT -- export definition of symbol
  */
-#define EXPORT(x)			\
+#define	EXPORT(x)			\
 	.globl	_C_LABEL(x);		\
 _C_LABEL(x):
 
@@ -232,16 +233,16 @@
  *	exception vector entrypoint
  *	XXX: regmask should be used to generate .mask
  */
-#define VECTOR(x, regmask)		\
+#define	VECTOR(x, regmask)		\
 	.ent	_C_LABEL(x),0;		\
 	EXPORT(x);			\
 
 #ifdef __STDC__
-#define VECTOR_END(x)			\
+#define	VECTOR_END(x)			\
 	EXPORT(x ## End);		\
 	END(x)
 #else
-#define VECTOR_END(x)			\
+#define	VECTOR_END(x)			\
 	EXPORT(x/**/End);		\
 	END(x)
 #endif
@@ -249,7 +250,7 @@
 /*
  * Macros to panic and printf from assembly language.
  */
-#define PANIC(msg)			\
+#define	PANIC(msg)			\
 	la	a0, 9f;			\
 	jal	_C_LABEL(panic);	\
 	nop;				\
@@ -266,28 +267,60 @@
 9:	.asciiz	msg;			\
 	.text
 
-#define ASMSTR(str)			\
+#define	ASMSTR(str)			\
 	.asciiz str;			\
 	.align	3
 
+#define	RCSID(name)	.pushsection ".ident"; .asciz name; .popsection
+
 /*
  * XXX retain dialects XXX
  */
-#define ALEAF(x)			XLEAF(x)
-#define NLEAF(x)			LEAF_NOPROFILE(x)
-#define NON_LEAF(x, fsize, retpc)	NESTED(x, fsize, retpc)
-#define NNON_LEAF(x, fsize, retpc)	NESTED_NOPROFILE(x, fsize, retpc)
+#define	ALEAF(x)			XLEAF(x)
+#define	NLEAF(x)			LEAF_NOPROFILE(x)
+#define	NON_LEAF(x, fsize, retpc)	NESTED(x, fsize, retpc)
+#define	NNON_LEAF(x, fsize, retpc)	NESTED_NOPROFILE(x, fsize, retpc)
+
+#if defined(__mips_o32)
+#define	SZREG	4
+#else
+#define	SZREG	8
+#endif
+
+#if defined(__mips_o32) || defined(__mips_o64)
+#define	ALSK	7		/* stack alignment */
+#define	ALMASK	-7		/* stack alignment */
+#define	SZFPREG	4
+#define	FPLD	lwc1
+#define	FPST	swc1
+#else
+#define	ALSK	15		/* stack alignment */
+#define	ALMASK	-15		/* stack alignment */
+#define	SZFPREG	8
+#define	FPLD	ldc1
+#define	FPST	sdc1
+#endif
 
 /*
  *  standard callframe {
- *  	register_t cf_args[4];		arg0 - arg3
+ *	register_t cf_pad[N];		o32 (N=2), n32/n64=(N=1)
+ *  	register_t cf_args[4];		arg0 - arg3 (only on o32 and o64)
+ *  	register_t cf_gp;		global pointer (only on n32 and n64)
  *  	register_t cf_sp;		frame pointer
  *  	register_t cf_ra;		return address
  *  };
  */
-#define	CALLFRAME_SIZ	(4 * (4 + 2))
-#define	CALLFRAME_SP	(4 * 4)
-#define	CALLFRAME_RA	(4 * 5)
+#if defined(__mips_o32)
+#define	CALLFRAME_SIZ	(SZREG * (4 + 4))
+#elif defined(__mips_o64)
+#define	CALLFRAME_SIZ	(SZREG * (4 + 4))
+#elif defined(__mips_n32) || defined(__mips_n64)
+#define	CALLFRAME_SIZ	(SZREG * 4)
+#endif
+#define	CALLFRAME_S0	(CALLFRAME_SIZ - 4 * SZREG)
+#define	CALLFRAME_GP	(CALLFRAME_SIZ - 3 * SZREG)
+#define	CALLFRAME_SP	(CALLFRAME_SIZ - 2 * SZREG)
+#define	CALLFRAME_RA	(CALLFRAME_SIZ - 1 * SZREG)
 
 /*
  * While it would be nice to be compatible with the SGI
@@ -298,22 +331,269 @@
  * assembler to prevent the assembler from generating 64-bit style
  * ABI calls.
  */
+#if defined(__mips_o32)
+#define	PTR_ADD		add
+#define	PTR_ADDI	addi
+#define	PTR_ADDU	addu
+#define	PTR_ADDIU	addiu
+#define	PTR_SUB		add
+#define	PTR_SUBI	subi
+#define	PTR_SUBU	subu
+#define	PTR_SUBIU	subu
+#define	PTR_L		lw
+#define	PTR_LA		la
+#define	PTR_S		sw
+#define	PTR_SLL		sll
+#define	PTR_SLLV	sllv
+#define	PTR_SRL		srl
+#define	PTR_SRLV	srlv
+#define	PTR_SRA		sra
+#define	PTR_SRAV	srav
+#define	PTR_LL		ll
+#define	PTR_SC		sc
+#define	PTR_WORD	.word
+#define	PTR_SCALESHIFT	2
+#endif /* __mips_o32 */
+
+#if defined(__mips_n32)
+#define	PTR_ADD		add
+#define	PTR_ADDI	addi
+#define	PTR_ADDU	add	/* no u */
+#define	PTR_ADDIU	addi	/* no u */
+#define	PTR_SUB		sub
+#define	PTR_SUBI	subi
+#define	PTR_SUBU	sub	/* no u */
+#define	PTR_SUBIU	sub	/* no u */
+#define	PTR_L		lw
+#define	PTR_LA		la
+#define	PTR_S		sw
+#define	PTR_SLL		sll
+#define	PTR_SLLV	sllv
+#define	PTR_SRL		srl
+#define	PTR_SRLV	srlv
+#define	PTR_SRA		sra
+#define	PTR_SRAV	srav
+#define	PTR_LL		ll
+#define	PTR_SC		sc
+#define	PTR_WORD	.word
+#define	PTR_SCALESHIFT	2
+#endif /* __mips_n32 */
+
+#if defined(__mips_n64) || defined(__mips_o64)
+#define	PTR_ADD		dadd
+#define	PTR_ADDI	daddi
+#define	PTR_ADDU	daddu
+#define	PTR_ADDIU	daddiu
+#define	PTR_SUB		dadd
+#define	PTR_SUBI	dsubi
+#define	PTR_SUBU	dsubu
+#define	PTR_SUBIU	dsubu
+#define	PTR_L		ld
+#define	PTR_LA		dla
+#define	PTR_S		sd
+#define	PTR_SLL		dsll
+#define	PTR_SLLV	dsllv
+#define	PTR_SRL		dsrl
+#define	PTR_SRLV	dsrlv
+#define	PTR_SRA		dsra
+#define	PTR_SRAV	dsrav
+#define	PTR_LL		lld
+#define	PTR_SC		scd
+#define	PTR_WORD	.dword
+#define	PTR_SCALESHIFT	3
+#endif /* __mips_n64 || __mips_o64 */
+
+#if _MIPS_SZINT == 32
+#define	INT_ADD		add
+#define	INT_ADDI	addi
+#define	INT_ADDU	addu
+#define	INT_ADDIU	addiu
+#define	INT_SUB		add
+#define	INT_SUBI	subi
+#define	INT_SUBU	subu
+#define	INT_SUBIU	subu
+#define	INT_L		lw
+#define	INT_LA		la
+#define	INT_S		sw
+#define	INT_SLL		sll
+#define	INT_SLLV	sllv
+#define	INT_SRL		srl
+#define	INT_SRLV	srlv
+#define	INT_SRA		sra
+#define	INT_SRAV	srav
+#define	INT_LL		ll
+#define	INT_SC		sc
+#else
+#define	INT_ADD		dadd
+#define	INT_ADDI	daddi
+#define	INT_ADDU	daddu
+#define	INT_ADDIU	daddiu
+#define	INT_SUB		dadd
+#define	INT_SUBI	dsubi
+#define	INT_SUBU	dsubu
+#define	INT_SUBIU	dsubu
+#define	INT_L		ld
+#define	INT_LA		dla
+#define	INT_S		sd
+#define	INT_SLL		dsll
+#define	INT_SLLV	dsllv
+#define	INT_SRL		dsrl
+#define	INT_SRLV	dsrlv
+#define	INT_SRA		dsra
+#define	INT_SRAV	dsrav
+#define	INT_LL		lld
+#define	INT_SC		scd
+#endif
+
+#if _MIPS_SZLONG == 32
+#define	LONG_ADD	add
+#define	LONG_ADDI	addi
+#define	LONG_ADDU	addu
+#define	LONG_ADDIU	addiu
+#define	LONG_SUB	add
+#define	LONG_SUBI	subi
+#define	LONG_SUBU	subu
+#define	LONG_SUBIU	subu
+#define	LONG_L		lw
+#define	LONG_LA		la
+#define	LONG_S		sw
+#define	LONG_SLL	sll
+#define	LONG_SLLV	sllv
+#define	LONG_SRL	srl
+#define	LONG_SRLV	srlv
+#define	LONG_SRA	sra
+#define	LONG_SRAV	srav
+#define	LONG_LL		ll
+#define	LONG_SC		sc
+#else
+#define	LONG_ADD	dadd
+#define	LONG_ADDI	daddi
+#define	LONG_ADDU	daddu
+#define	LONG_ADDIU	daddiu
+#define	LONG_SUB	dadd
+#define	LONG_SUBI	dsubi
+#define	LONG_SUBU	dsubu
+#define	LONG_SUBIU	dsubu
+#define	LONG_L		ld
+#define	LONG_LA		dla
+#define	LONG_S		sd
+#define	LONG_SLL	dsll
+#define	LONG_SLLV	dsllv
+#define	LONG_SRL	dsrl
+#define	LONG_SRLV	dsrlv
+#define	LONG_SRA	dsra
+#define	LONG_SRAV	dsrav
+#define	LONG_LL		lld
+#define	LONG_SC		scd
+#endif
+
+#if SZREG == 4
+#define	REG_L		lw
+#define	REG_S		sw
+#define	REG_LI		li
+#define	REG_SLL		sll
+#define	REG_SLLV	sllv
+#define	REG_SRL		srl
+#define	REG_SRLV	srlv
+#define	REG_SRA		sra
+#define	REG_SRAV	srav
+#else
+#define	REG_L		ld
+#define	REG_S		sd
+#define	REG_LI		dli
+#define	REG_SLL		dsll
+#define	REG_SLLV	dsllv
+#define	REG_SRL		dsrl
+#define	REG_SRLV	dsrlv
+#define	REG_SRA		dsra
+#define	REG_SRAV	dsrav
+#endif
 
-#if !defined(_MIPS_BSD_API) || _MIPS_BSD_API == _MIPS_BSD_API_LP32
-#define	REG_L	lw
-#define REG_S	sw
-#define	REG_LI	li
 #define	REG_PROLOGUE	.set push
 #define	REG_EPILOGUE	.set pop
-#define SZREG	4
+
+#if _MIPS_ISA == _MIPS_ISA_MIPS1 || _MIPS_ISA == _MIPS_ISA_MIPS2 || \
+    _MIPS_ISA == _MIPS_ISA_MIPS32 || _MIPS_ISA == _MIPS_ISA_MIPS32R2
+#define	MFC0		mfc0
+#define	MTC0		mtc0
+#endif
+#if _MIPS_ISA == _MIPS_ISA_MIPS3 || _MIPS_ISA == _MIPS_ISA_MIPS4 || \
+    _MIPS_ISA == _MIPS_ISA_MIPS5 || _MIPS_ISA == _MIPS_ISA_MIPS64 || \
+    _MIPS_ISA == _MIPS_ISA_MIPS64R2
+#define	MFC0		dmfc0
+#define	MTC0		dmtc0
+#endif
+
+#if defined(__mips_o32) || defined(__mips_o64)
+
+#ifdef __ABICALLS__
+#define	CPRESTORE(r)	.cprestore r
+#define	CPLOAD(r)	.cpload r
 #else
-#define	REG_L	ld
-#define REG_S	sd
-#define	REG_LI	dli
+#define	CPRESTORE(r)	/* not needed */
+#define	CPLOAD(r)	/* not needed */
+#endif
+
+#define	SETUP_GP	\
+			.set noreorder;				\
+			.cpload	t9;				\
+			.set reorder
+#define	SETUP_GPX(r)	\
+			.set noreorder;				\
+			move	r,ra;	/* save old ra */	\
+			bal	7f;				\
+			nop;					\
+		7:	.cpload	ra;				\
+			move	ra,r;				\
+			.set reorder
+#define	SETUP_GPX_L(r,lbl)	\
+			.set noreorder;				\
+			move	r,ra;	/* save old ra */	\
+			bal	lbl;				\
+			nop;					\
+		lbl:	.cpload	ra;				\
+			move	ra,r;				\
+			.set reorder
+#define	SAVE_GP(x)	.cprestore x
+
+#define	SETUP_GP64(a,b)		/* n32/n64 specific */
+#define	SETUP_GP64_R(a,b)		/* n32/n64 specific */
+#define	SETUP_GPX64(a,b)	/* n32/n64 specific */
+#define	SETUP_GPX64_L(a,b,c)	/* n32/n64 specific */
+#define	RESTORE_GP64		/* n32/n64 specific */
+#define	USE_ALT_CP(a)		/* n32/n64 specific */
+#endif /* __mips_o32 || __mips_o64 */
+
+#if defined(__mips_n32) || defined(__mips_n64)
 #define	REG_PROLOGUE	.set push ; .set mips3
 #define	REG_EPILOGUE	.set pop
-#define SZREG	8
-#endif	/* _MIPS_BSD_API */
+#endif
+
+#if defined(__mips_n32) || defined(__mips_n64)
+#define	SETUP_GP		/* o32 specific */
+#define	SETUP_GPX(r)		/* o32 specific */
+#define	SETUP_GPX_L(r,lbl)	/* o32 specific */
+#define	SAVE_GP(x)		/* o32 specific */
+#define	SETUP_GP64(a,b)		.cpsetup $t9, a, b
+#define	SETUP_GPX64(a,b)	\
+				move	b,ra;			\
+				.set noreorder;			\
+				bal	7f;			\
+				nop;				\
+			7:	.set reorder;			\
+				.cpsetup ra, a, 7b;		\
+				move	ra,b
+#define	SETUP_GPX64_L(a,b,c)	\
+				move	b,ra;			\
+				.set noreorder;			\
+				bal	c;			\
+				nop;				\
+			c:	.set reorder;			\
+				.cpsetup ra, a, c;		\
+				move	ra,b
+#define	RESTORE_GP64		.cpreturn
+#define	USE_ALT_CP(a)		.cplocal a
+#endif	/* __mips_n32 || __mips_n64 */
 
 /*
  * The DYNAMIC_STATUS_MASK option adds an additional masking operation
@@ -326,21 +606,21 @@
  * XXX this is only currently implemented for mips3.
  */
 #ifdef MIPS_DYNAMIC_STATUS_MASK
-#define DYNAMIC_STATUS_MASK(sr,scratch)	\
+#define	DYNAMIC_STATUS_MASK(sr,scratch)	\
 	lw	scratch, mips_dynamic_status_mask; \
 	and	sr, sr, scratch
 
-#define DYNAMIC_STATUS_MASK_TOUSER(sr,scratch1)		\
+#define	DYNAMIC_STATUS_MASK_TOUSER(sr,scratch1)		\
 	ori	sr, (MIPS_INT_MASK | MIPS_SR_INT_IE);	\
 	DYNAMIC_STATUS_MASK(sr,scratch1)
 #else
-#define DYNAMIC_STATUS_MASK(sr,scratch)
-#define DYNAMIC_STATUS_MASK_TOUSER(sr,scratch1)
+#define	DYNAMIC_STATUS_MASK(sr,scratch)
+#define	DYNAMIC_STATUS_MASK_TOUSER(sr,scratch1)
 #endif
 
 /* See lock_stubs.S. */
 #define	MIPS_LOCK_RAS_SIZE	128
 
-#define CPUVAR(off) _C_LABEL(cpu_info_store)+__CONCAT(CPU_INFO_,off)
+#define	CPUVAR(off) _C_LABEL(cpu_info_store)+__CONCAT(CPU_INFO_,off)
 
 #endif /* _MIPS_ASM_H */

Reply via email to