Hey everybody!

Paolo asked me to post my generic sharing patches here so that more
people can take a look at them before they are committed.  Here's the
next one in line: static field access.

The main part of this patch is actually the introduction of a new
trampoline call to initialize a generic class.  The reason we couldn't
use the class init trampoline is that it requires the vtable argument to
be know at method-compile-time which is not the case here.  Note that
this trampoline has only been implemented on x86 and amd64 yet.  The
code should still work on other architectures, but it will refuse to
share a method with static field accesses of generic classes, because
that trampoline isn't implemented there yet.

Mark
Index: mini/inssel.brg
===================================================================
--- mini/inssel.brg	(revision 89679)
+++ mini/inssel.brg	(working copy)
@@ -921,6 +921,23 @@
 	mono_bblock_add_inst (s->cbb, tree);
 }
 
+stmt: OP_TRAMPCALL_VTABLE (reg) {
+	#ifdef MONO_ARCH_VTABLE_REG
+		MonoCallInst *call = (MonoCallInst*)tree;
+		int vtable_reg = mono_regstate_next_int (s->rs);
+
+		MONO_EMIT_NEW_UNALU (s, OP_MOVE, vtable_reg, state->left->reg1);
+
+		call->inst.opcode = OP_VOIDCALL;
+
+		mono_call_inst_add_outarg_reg (s, call, vtable_reg, MONO_ARCH_VTABLE_REG, FALSE);
+
+		mono_bblock_add_inst (s->cbb, &call->inst);
+	#else
+		g_assert_not_reached ();
+	#endif
+}
+
 stmt: OP_SAVE_LMF,
 stmt: OP_RESTORE_LMF {
 	mono_bblock_add_inst (s->cbb, tree);
Index: mini/mini.c
===================================================================
--- mini/mini.c	(revision 89680)
+++ mini/mini.c	(working copy)
@@ -152,6 +152,7 @@
 
 /* helper methods signature */
 static MonoMethodSignature *helper_sig_class_init_trampoline = NULL;
+static MonoMethodSignature *helper_sig_generic_class_init_trampoline = NULL;
 static MonoMethodSignature *helper_sig_domain_get = NULL;
 
 static guint32 default_opt = 0;
@@ -6569,6 +6570,8 @@
 		case CEE_STSFLD: {
 			MonoClassField *field;
 			gpointer addr = NULL;
+			gboolean shared_access = FALSE;
+			int relation;
 
 			CHECK_OPSIZE (5);
 			token = read32 (ip + 1);
@@ -6584,9 +6587,33 @@
 			if (!dont_verify && !cfg->skip_visibility && !mono_method_can_access_field (method, field))
 				FIELD_ACCESS_FAILURE;
 
-			if (cfg->generic_sharing_context && mono_class_check_context_used (klass))
-				GENERIC_SHARING_FAILURE (*ip);
+			/*
+			 * We can only support shared generic static
+			 * field access on architectures where the
+			 * trampoline code has been extended to handle
+			 * the generic class init.
+			 */
+#ifndef MONO_ARCH_VTABLE_REG
+			GENERIC_SHARING_FAILURE (*ip);
+#endif
 
+			if (cfg->generic_sharing_context) {
+				int context_used = mono_class_check_context_used (klass);
+
+				if (context_used & MONO_GENERIC_CONTEXT_USED_METHOD ||
+						klass->valuetype)
+					GENERIC_SHARING_FAILURE (*ip);
+
+				if (context_used) {
+					relation = mono_class_generic_class_relation (klass, method->klass, generic_context, NULL);
+
+					if (!(method->flags & METHOD_ATTRIBUTE_STATIC) /*&& relation != MINI_GENERIC_CLASS_RELATION_OTHER*/)
+						shared_access = TRUE;
+					else
+						GENERIC_SHARING_FAILURE (*ip);
+				}
+			}
+
 			g_assert (!(field->type->attrs & FIELD_ATTRIBUTE_LITERAL));
 
 			if ((*ip) == CEE_STSFLD)
@@ -6602,7 +6629,70 @@
 				addr = g_hash_table_lookup (cfg->domain->special_static_fields, field);
 			mono_domain_unlock (cfg->domain);
 
-			if ((cfg->opt & MONO_OPT_SHARED) || (cfg->compile_aot && addr)) {
+			if (shared_access) {
+				MonoInst *this, *rgctx, *static_data;
+
+				/*
+				g_print ("sharing static field access in %s.%s.%s - depth %d offset %d\n",
+					method->klass->name_space, method->klass->name, method->name,
+					depth, field->offset);
+				*/
+
+				if (mono_class_needs_cctor_run (klass, method)) {
+					MonoMethodSignature *sig = helper_sig_generic_class_init_trampoline;
+					MonoCallInst *call;
+					MonoInst *this, *vtable;
+
+					NEW_ARGLOAD (cfg, this, 0);
+
+					if (relation == MINI_GENERIC_CLASS_RELATION_SELF) {
+						MONO_INST_NEW (cfg, vtable, CEE_LDIND_I);
+						vtable->cil_code = ip;
+						vtable->inst_left = this;
+						vtable->type = STACK_PTR;
+						vtable->klass = klass;
+					} else {
+						MonoInst *rgctx = get_runtime_generic_context_from_this (cfg, this, ip);
+
+						vtable = get_runtime_generic_context_ptr (cfg, method, bblock, klass,
+							token, generic_context, rgctx, MINI_RGCTX_VTABLE, ip);
+					}
+
+					call = mono_emit_call_args (cfg, bblock, sig, NULL, FALSE, FALSE, ip, FALSE);
+					call->inst.opcode = OP_TRAMPCALL_VTABLE;
+					call->fptr = mono_get_trampoline_code (MONO_TRAMPOLINE_GENERIC_CLASS_INIT);
+
+					call->inst.inst_left = vtable;
+
+					mono_spill_call (cfg, bblock, call, sig, FALSE, ip, FALSE);
+				}
+
+				/*
+				 * The pointer we're computing here is
+				 *
+				 *   super_info.static_data + field->offset
+				 */
+
+				NEW_ARGLOAD (cfg, this, 0);
+				rgctx = get_runtime_generic_context_from_this (cfg, this, ip);
+				static_data = get_runtime_generic_context_ptr (cfg, method, bblock, klass,
+					token, generic_context, rgctx, MINI_RGCTX_STATIC_DATA, ip);
+
+				if (field->offset == 0) {
+					ins = static_data;
+				} else {
+					MonoInst *field_offset;
+
+					NEW_ICONST (cfg, field_offset, field->offset);
+
+					MONO_INST_NEW (cfg, ins, OP_PADD);
+					ins->cil_code = ip;
+					ins->inst_left = static_data;
+					ins->inst_right = field_offset;
+					ins->type = STACK_PTR;
+					ins->klass = klass;
+				}
+			} else if ((cfg->opt & MONO_OPT_SHARED) || (cfg->compile_aot && addr)) {
 				int temp;
 				MonoInst *iargs [2];
 				MonoInst *domain_var;
@@ -6684,7 +6774,7 @@
 			} else {
 				gboolean is_const = FALSE;
 				MonoVTable *vtable = mono_class_vtable (cfg->domain, klass);
-				if (!((cfg->opt & MONO_OPT_SHARED) || cfg->compile_aot) && 
+				if (!shared_access && !((cfg->opt & MONO_OPT_SHARED) || cfg->compile_aot) && 
 				    vtable->initialized && (field->type->attrs & FIELD_ATTRIBUTE_INIT_ONLY)) {
 					gpointer addr = (char*)vtable->data + field->offset;
 					int ro_type = field->type->type;
@@ -8308,7 +8398,8 @@
 	case OP_VCALL:
 	case OP_VCALLVIRT:
 	case OP_VOIDCALL:
-	case OP_VOIDCALLVIRT: {
+	case OP_VOIDCALLVIRT:
+	case OP_TRAMPCALL_VTABLE: {
 		MonoCallInst *call = (MonoCallInst*)tree;
 		if (call->method)
 			printf ("[%s]", call->method->name);
@@ -8400,6 +8491,7 @@
 {
 	helper_sig_domain_get = mono_create_icall_signature ("ptr");
 	helper_sig_class_init_trampoline = mono_create_icall_signature ("void");
+	helper_sig_generic_class_init_trampoline = mono_create_icall_signature ("void");
 }
 
 gconstpointer
@@ -8448,6 +8540,7 @@
 	mono_trampoline_code [MONO_TRAMPOLINE_GENERIC] = mono_arch_create_trampoline_code (MONO_TRAMPOLINE_GENERIC);
 	mono_trampoline_code [MONO_TRAMPOLINE_JUMP] = mono_arch_create_trampoline_code (MONO_TRAMPOLINE_JUMP);
 	mono_trampoline_code [MONO_TRAMPOLINE_CLASS_INIT] = mono_arch_create_trampoline_code (MONO_TRAMPOLINE_CLASS_INIT);
+	mono_trampoline_code [MONO_TRAMPOLINE_GENERIC_CLASS_INIT] = mono_arch_create_trampoline_code (MONO_TRAMPOLINE_GENERIC_CLASS_INIT);
 #ifdef MONO_ARCH_HAVE_PIC_AOT
 	mono_trampoline_code [MONO_TRAMPOLINE_AOT] = mono_arch_create_trampoline_code (MONO_TRAMPOLINE_AOT);
 	mono_trampoline_code [MONO_TRAMPOLINE_AOT_PLT] = mono_arch_create_trampoline_code (MONO_TRAMPOLINE_AOT_PLT);
@@ -8479,6 +8572,8 @@
 {
 	gpointer code, ptr;
 
+	g_assert (!vtable->klass->generic_container);
+
 	/* previously created trampoline code */
 	mono_domain_lock (vtable->domain);
 	ptr = 
Index: mini/mini.h
===================================================================
--- mini/mini.h	(revision 89680)
+++ mini/mini.h	(working copy)
@@ -513,6 +513,7 @@
 	MONO_TRAMPOLINE_GENERIC,
 	MONO_TRAMPOLINE_JUMP,
 	MONO_TRAMPOLINE_CLASS_INIT,
+	MONO_TRAMPOLINE_GENERIC_CLASS_INIT,
 	MONO_TRAMPOLINE_AOT,
 	MONO_TRAMPOLINE_AOT_PLT,
 	MONO_TRAMPOLINE_DELEGATE,
@@ -961,6 +962,7 @@
 gpointer          mono_aot_plt_trampoline (gssize *regs, guint8 *code, guint8 *token_info, 
 										   guint8* tramp) MONO_INTERNAL;
 void              mono_class_init_trampoline (gssize *regs, guint8 *code, MonoVTable *vtable, guint8 *tramp) MONO_INTERNAL;
+void              mono_generic_class_init_trampoline (gssize *regs, guint8 *code, gpointer dummy, guint8 *tramp) MONO_INTERNAL;
 gpointer          mono_debugger_create_notification_function (void) MONO_INTERNAL;
 
 
@@ -1057,6 +1059,7 @@
 void        mono_arch_emit_imt_argument         (MonoCompile *cfg, MonoCallInst *call) MONO_INTERNAL;
 MonoMethod* mono_arch_find_imt_method           (gpointer *regs, guint8 *code) MONO_INTERNAL;
 MonoObject* mono_arch_find_this_argument        (gpointer *regs, MonoMethod *method) MONO_INTERNAL;
+MonoVTable* mono_arch_find_vtable		(gpointer *regs, guint8 *code) MONO_INTERNAL;
 gpointer    mono_arch_build_imt_thunk           (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count) MONO_INTERNAL;
 
 /* Exception handling */
Index: mini/mini-amd64.c
===================================================================
--- mini/mini-amd64.c	(revision 89679)
+++ mini/mini-amd64.c	(working copy)
@@ -5729,6 +5729,12 @@
 }
 #endif
 
+MonoVTable*
+mono_arch_find_vtable (gpointer *regs, guint8 *code)
+{
+	return (MonoVTable*) regs [MONO_ARCH_VTABLE_REG];
+}
+
 MonoInst*
 mono_arch_get_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
 {
Index: mini/mini-amd64.h
===================================================================
--- mini/mini-amd64.h	(revision 89679)
+++ mini/mini-amd64.h	(working copy)
@@ -268,6 +268,7 @@
 #define MONO_ARCH_HAVE_CREATE_VARS 1
 #define MONO_ARCH_HAVE_IMT 1
 #define MONO_ARCH_IMT_REG AMD64_R11
+#define MONO_ARCH_VTABLE_REG AMD64_R11
 #define MONO_ARCH_COMMON_VTABLE_TRAMPOLINE 1
 
 #define MONO_ARCH_AOT_SUPPORTED 1
Index: mini/mini-x86.c
===================================================================
--- mini/mini-x86.c	(revision 89679)
+++ mini/mini-x86.c	(working copy)
@@ -4316,6 +4316,12 @@
 }
 #endif
 
+MonoVTable*
+mono_arch_find_vtable (gpointer *regs, guint8 *code)
+{
+	return (MonoVTable*) regs [MONO_ARCH_VTABLE_REG];
+}
+
 MonoInst*
 mono_arch_get_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
 {
Index: mini/mini-x86.h
===================================================================
--- mini/mini-x86.h	(revision 89679)
+++ mini/mini-x86.h	(working copy)
@@ -275,6 +275,7 @@
 #define MONO_ARCH_HAVE_CREATE_VARS 1
 #define MONO_ARCH_HAVE_IMT 1
 #define MONO_ARCH_IMT_REG X86_EDX
+#define MONO_ARCH_VTABLE_REG X86_EDX
 #define MONO_ARCH_COMMON_VTABLE_TRAMPOLINE 1
 
 #define MONO_ARCH_AOT_SUPPORTED 1
Index: mini/tramp-amd64.c
===================================================================
--- mini/tramp-amd64.c	(revision 89679)
+++ mini/tramp-amd64.c	(working copy)
@@ -190,8 +190,10 @@
 
 	offset = 0;
 
-	/* Pop the return address off the stack */
-	amd64_pop_reg (code, AMD64_R11);
+	if (tramp_type != MONO_TRAMPOLINE_GENERIC_CLASS_INIT) {
+		/* Pop the return address off the stack */
+		amd64_pop_reg (code, AMD64_R11);
+	}
 
 	/* 
 	 * Allocate a new stack frame
@@ -200,16 +202,18 @@
 	amd64_mov_reg_reg (code, AMD64_RBP, AMD64_RSP, 8);
 	amd64_alu_reg_imm (code, X86_SUB, AMD64_RSP, framesize);
 
-	offset += 8;
-	tramp_offset = - offset;
+	if (tramp_type != MONO_TRAMPOLINE_GENERIC_CLASS_INIT) {
+		offset += 8;
+		tramp_offset = - offset;
 
-	offset += 8;
-	method_offset = - offset;
+		offset += 8;
+		method_offset = - offset;
 
-	/* Compute the trampoline address from the return address */
-	/* 5 = length of amd64_call_membase () */
-	amd64_alu_reg_imm (code, X86_SUB, AMD64_R11, 5);
-	amd64_mov_membase_reg (code, AMD64_RBP, tramp_offset, AMD64_R11, 8);
+		/* Compute the trampoline address from the return address */
+		/* 5 = length of amd64_call_membase () */
+		amd64_alu_reg_imm (code, X86_SUB, AMD64_R11, 5);
+		amd64_mov_membase_reg (code, AMD64_RBP, tramp_offset, AMD64_R11, 8);
+	}
 
 	/* Save all registers */
 
@@ -222,22 +226,24 @@
 	for (i = 0; i < 8; ++i)
 		amd64_movsd_membase_reg (code, AMD64_RBP, saved_fpregs_offset + (i * 8), i);
 
-	/* Obtain the trampoline argument which is encoded in the instruction stream */
-	amd64_mov_reg_membase (code, AMD64_R11, AMD64_RBP, tramp_offset, 8);
-	amd64_mov_reg_membase (code, AMD64_RAX, AMD64_R11, 5, 1);
-	amd64_widen_reg (code, AMD64_RAX, AMD64_RAX, TRUE, FALSE);
-	amd64_alu_reg_imm_size (code, X86_CMP, AMD64_RAX, 4, 1);
-	br [0] = code;
-	x86_branch8 (code, X86_CC_NE, 6, FALSE);
-	/* 32 bit immediate */
-	amd64_mov_reg_membase (code, AMD64_R11, AMD64_R11, 6, 4);
-	br [1] = code;
-	x86_jump8 (code, 10);
-	/* 64 bit immediate */
-	mono_amd64_patch (br [0], code);
-	amd64_mov_reg_membase (code, AMD64_R11, AMD64_R11, 6, 8);
-	mono_amd64_patch (br [1], code);
-	amd64_mov_membase_reg (code, AMD64_RBP, method_offset, AMD64_R11, 8);
+	if (tramp_type != MONO_TRAMPOLINE_GENERIC_CLASS_INIT) {
+		/* Obtain the trampoline argument which is encoded in the instruction stream */
+		amd64_mov_reg_membase (code, AMD64_R11, AMD64_RBP, tramp_offset, 8);
+		amd64_mov_reg_membase (code, AMD64_RAX, AMD64_R11, 5, 1);
+		amd64_widen_reg (code, AMD64_RAX, AMD64_RAX, TRUE, FALSE);
+		amd64_alu_reg_imm_size (code, X86_CMP, AMD64_RAX, 4, 1);
+		br [0] = code;
+		x86_branch8 (code, X86_CC_NE, 6, FALSE);
+		/* 32 bit immediate */
+		amd64_mov_reg_membase (code, AMD64_R11, AMD64_R11, 6, 4);
+		br [1] = code;
+		x86_jump8 (code, 10);
+		/* 64 bit immediate */
+		mono_amd64_patch (br [0], code);
+		amd64_mov_reg_membase (code, AMD64_R11, AMD64_R11, 6, 8);
+		mono_amd64_patch (br [1], code);
+		amd64_mov_membase_reg (code, AMD64_RBP, method_offset, AMD64_R11, 8);
+	}
 
 	/* Save LMF begin */
 
@@ -257,9 +263,11 @@
 	amd64_mov_reg_reg (code, AMD64_R11, AMD64_RSP, 8);
 	amd64_alu_reg_imm (code, X86_ADD, AMD64_R11, framesize + 16);
 	amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, rsp), AMD64_R11, 8);
-	/* Save method */
-	amd64_mov_reg_membase (code, AMD64_R11, AMD64_RBP, method_offset, 8);
-	amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, method), AMD64_R11, 8);
+	if (tramp_type != MONO_TRAMPOLINE_GENERIC_CLASS_INIT) {
+		/* Save method */
+		amd64_mov_reg_membase (code, AMD64_R11, AMD64_RBP, method_offset, 8);
+		amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, method), AMD64_R11, 8);
+	}
 	/* Save callee saved regs */
 #ifdef PLATFORM_WIN32
 	amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, rdi), AMD64_RDI, 8);
@@ -294,14 +302,22 @@
 	else
 		amd64_mov_reg_imm (code, AMD64_ARG_REG2, 0);
 
-	/* Arg3 is the method/vtable ptr */
-	amd64_mov_reg_membase (code, AMD64_ARG_REG3, AMD64_RBP, method_offset, 8);
+	/* Arg3 is the method ptr / dummy */
+	if (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
+		amd64_mov_reg_imm (code, AMD64_ARG_REG3, 0);
+	else
+		amd64_mov_reg_membase (code, AMD64_ARG_REG3, AMD64_RBP, method_offset, 8);
 
 	/* Arg4 is the trampoline address */
-	amd64_mov_reg_membase (code, AMD64_ARG_REG4, AMD64_RBP, tramp_offset, 8);
+	if (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
+		amd64_mov_reg_imm (code, AMD64_ARG_REG4, 0);
+	else
+		amd64_mov_reg_membase (code, AMD64_ARG_REG4, AMD64_RBP, tramp_offset, 8);
 
 	if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT)
 		tramp = (guint8*)mono_class_init_trampoline;
+	else if (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
+		tramp = (guint8*)mono_generic_class_init_trampoline;
 	else if (tramp_type == MONO_TRAMPOLINE_AOT)
 		tramp = (guint8*)mono_aot_trampoline;
 	else if (tramp_type == MONO_TRAMPOLINE_AOT_PLT)
@@ -331,7 +347,7 @@
 	/* Restore stack */
 	amd64_leave (code);
 
-	if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT)
+	if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT || tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
 		amd64_ret (code);
 	else
 		/* call the compiled method */
Index: mini/graph.c
===================================================================
--- mini/graph.c	(revision 89679)
+++ mini/graph.c	(working copy)
@@ -235,6 +235,7 @@
 	case OP_LCALL_REG:
 	case OP_VCALL_REG:
 	case OP_VOIDCALL_REG:
+	case OP_TRAMPCALL_VTABLE:
 		mono_print_label (fp, tree->inst_left);
 		break;
 	case CEE_BNE_UN:
Index: mini/mini-ops.h
===================================================================
--- mini/mini-ops.h	(revision 89679)
+++ mini/mini-ops.h	(working copy)
@@ -36,6 +36,7 @@
 MINI_OP(OP_VOIDCALLVIRT,	"voidcallvirt")
 MINI_OP(OP_VOIDCALL_REG,	"voidcall_reg")
 MINI_OP(OP_VOIDCALL_MEMBASE,	"voidcall_membase")
+MINI_OP(OP_TRAMPCALL_VTABLE,	"trampcall_vtable")
 MINI_OP(OP_FCALL,	"fcall")
 MINI_OP(OP_FCALLVIRT,	"fcallvirt")
 MINI_OP(OP_FCALL_REG,	"fcall_reg")
Index: mini/local-propagation.c
===================================================================
--- mini/local-propagation.c	(revision 89679)
+++ mini/local-propagation.c	(working copy)
@@ -716,7 +716,8 @@
 	case OP_VCALL:
 	case OP_VOIDCALL_REG:
 	case OP_VOIDCALLVIRT:
-	case OP_VOIDCALL: {
+	case OP_VOIDCALL:
+	case OP_TRAMPCALL_VTABLE: {
 		MonoCallInst *call = (MonoCallInst *)tree;
 		MonoMethodSignature *sig = call->signature;
 		int i, byref = FALSE;
Index: mini/mini-trampolines.c
===================================================================
--- mini/mini-trampolines.c	(revision 89679)
+++ mini/mini-trampolines.c	(working copy)
@@ -279,6 +279,24 @@
 	}
 }
 
+/**
+ * mono_generic_class_init_trampoline:
+ *
+ * This method calls mono_runtime_class_init () to run the static constructor
+ * for the type, then patches the caller code so it is not called again.
+ */
+void
+mono_generic_class_init_trampoline (gssize *regs, guint8 *code, gpointer dummy, guint8 *tramp)
+{
+	MonoVTable *vtable = mono_arch_find_vtable ((gpointer*)regs, code);
+
+	//g_print ("generic class init for class %s.%s\n", vtable->klass->name_space, vtable->klass->name);
+
+	mono_runtime_class_init (vtable);
+
+	//g_print ("done initing generic\n");
+}
+
 #ifdef MONO_ARCH_HAVE_CREATE_DELEGATE_TRAMPOLINE
 
 /**
Index: mini/aliasing.c
===================================================================
--- mini/aliasing.c	(revision 89679)
+++ mini/aliasing.c	(working copy)
@@ -167,7 +167,8 @@
 	case OP_VCALL:
 	case OP_VCALLVIRT:
 	case OP_VOIDCALL:
-	case OP_VOIDCALLVIRT: {
+	case OP_VOIDCALLVIRT:
+	case OP_TRAMPCALL_VTABLE: {
 		MonoCallInst *call = (MonoCallInst*)tree;
 		if (call->method)
 			printf ("[%s]", call->method->name);
Index: mini/tramp-x86.c
===================================================================
--- mini/tramp-x86.c	(revision 89679)
+++ mini/tramp-x86.c	(working copy)
@@ -190,6 +190,16 @@
 	 * and it is stored at: esp + pushed_args * sizeof (gpointer)
 	 * the ret address is at: esp + (pushed_args + 1) * sizeof (gpointer)
 	 */
+
+	/* If this is a generic class init the argument is not on the
+	 * stack yet but in MONO_ARCH_VTABLE_REG and is accessed by
+	 * the callee via the register array.  As a dummy argument we
+	 * pass it NULL, which we push here, and then everything that
+	 * follows works the same way.
+	 */
+	if (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
+		x86_push_imm (buf, 0);
+
 	/* Put all registers into an array on the stack
 	 * If this code is changed, make sure to update the offset value in
 	 * mono_arch_find_this_argument () in mini-x86.c.
@@ -298,6 +308,8 @@
 
 	if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT)
 		x86_call_code (buf, mono_class_init_trampoline);
+	else if (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
+		x86_call_code (buf, mono_generic_class_init_trampoline);
 	else if (tramp_type == MONO_TRAMPOLINE_AOT)
 		x86_call_code (buf, mono_aot_trampoline);
 	else if (tramp_type == MONO_TRAMPOLINE_AOT_PLT)
@@ -338,7 +350,7 @@
 	/* Pop saved reg array + stack align + method ptr */
 	x86_alu_reg_imm (buf, X86_ADD, X86_ESP, 10 * 4);
 
-	if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT)
+	if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT || tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
 		x86_ret (buf);
 	else
 		/* call the compiled method */
_______________________________________________
Mono-devel-list mailing list
Mono-devel-list@lists.ximian.com
http://lists.ximian.com/mailman/listinfo/mono-devel-list

Reply via email to