[PATCH] x86: Introduce workarounds for valgrind to work with jato.
Jato can be compiled with workarounds which make valgrind work with jato. To do so, define VALGRIND variable for make: make jato VALGRIND=y Currently workarounds eliminate class initialization from signal handler by unconditionally selecting calls to vm_class_ensure_init(). Signed-off-by: Tomek Grabiec tgrab...@gmail.com --- Makefile |5 + arch/x86/insn-selector.brg | 44 2 files changed, 49 insertions(+), 0 deletions(-) diff --git a/Makefile b/Makefile index 55ef9ba..f9c8d9b 100644 --- a/Makefile +++ b/Makefile @@ -177,6 +177,11 @@ INSTALL:= install DEFAULT_CFLAGS += $(ARCH_CFLAGS) -g -rdynamic -std=gnu99 -D_GNU_SOURCE -fstack-protector-all -D_FORTIFY_SOURCE=2 +ifdef VALGRIND +DEFAULT_CFLAGS += -DCONFIG_VALGRIND +MB_DEFINES += -DCONFIG_VALGRIND +endif + # XXX: Temporary hack -Vegard DEFAULT_CFLAGS += -DNOT_IMPLEMENTED='fprintf(stderr, %s:%d: warning: %s not implemented\n, __FILE__, __LINE__, __func__)' diff --git a/arch/x86/insn-selector.brg b/arch/x86/insn-selector.brg index 8522667..9c8dd70 100644 --- a/arch/x86/insn-selector.brg +++ b/arch/x86/insn-selector.brg @@ -1051,6 +1051,14 @@ reg: EXPR_CLASS_FIELD 1 vmc_state = vmc-state; vm_monitor_unlock(vmc-monitor); +%ifdef CONFIG_VALGRIND + select_insn(s, tree, imm_insn(INSN_PUSH_IMM, (unsigned long)vmc)); + select_insn(s, tree, rel_insn(INSN_CALL_REL, (unsigned long)vm_class_ensure_init)); + method_args_cleanup(s, tree, 1); + + mov_insn = memdisp_reg_insn(INSN_MOV_MEMDISP_REG, + (unsigned long) vmc-static_values + vmf-offset, out); +%else if (vmc_state = VM_CLASS_INITIALIZING) { /* Class is already initialized; no need for fix-up. We also * don't want the fixup if we're already inside the @@ -1064,6 +1072,7 @@ reg: EXPR_CLASS_FIELD 1 /* XXX: Check return value */ add_getstatic_fixup_site(mov_insn, vmf, s-b_parent); } +%endif /* CONFIG_VALGRIND */ select_insn(s, tree, mov_insn); @@ -1097,6 +1106,18 @@ freg:EXPR_FLOAT_CLASS_FIELD 1 vmc_state = vmc-state; vm_monitor_unlock(vmc-monitor); +%ifdef CONFIG_VALGRIND + select_insn(s, tree, imm_insn(INSN_PUSH_IMM, (unsigned long)vmc)); + select_insn(s, tree, rel_insn(INSN_CALL_REL, (unsigned long)vm_class_ensure_init)); + method_args_cleanup(s, tree, 1); + + if (expr-vm_type == J_FLOAT) + mov_insn = memdisp_reg_insn(INSN_MOV_MEMDISP_XMM, + (unsigned long) vmc-static_values + vmf-offset, out); + else + mov_insn = memdisp_reg_insn(INSN_MOV_64_MEMDISP_XMM, + (unsigned long) vmc-static_values + vmf-offset, out); +%else if (vmc_state = VM_CLASS_INITIALIZING) { /* Class is already initialized; no need for fix-up. We also * don't want the fixup if we're already inside the @@ -1118,6 +1139,7 @@ freg: EXPR_FLOAT_CLASS_FIELD 1 /* XXX: Check return value */ add_getstatic_fixup_site(mov_insn, vmf, s-b_parent); } +%endif /* CONFIG_VALGRIND */ select_insn(s, tree, mov_insn); } @@ -1995,6 +2017,14 @@ stmt:STMT_STORE(EXPR_CLASS_FIELD, reg) vmc_state = vmc-state; vm_monitor_unlock(vmc-monitor); +%ifdef CONFIG_VALGRIND + select_insn(s, tree, imm_insn(INSN_PUSH_IMM, (unsigned long)vmc)); + select_insn(s, tree, rel_insn(INSN_CALL_REL, (unsigned long)vm_class_ensure_init)); + method_args_cleanup(s, tree, 1); + + mov_insn = reg_memdisp_insn(INSN_MOV_REG_MEMDISP, + src, (unsigned long) vmc-static_values + vmf-offset); +%else if (vmc_state = VM_CLASS_INITIALIZING) { /* Class is already initialized; no need for fix-up. We also * don't want the fixup if we're already inside the @@ -2008,6 +2038,7 @@ stmt: STMT_STORE(EXPR_CLASS_FIELD, reg) /* XXX: Check return value */ add_putstatic_fixup_site(mov_insn, vmf, s-b_parent); } +%endif /* CONFIG_VALGRIND */ select_insn(s, tree, mov_insn); @@ -2044,6 +2075,18 @@ stmt:STMT_STORE(EXPR_FLOAT_CLASS_FIELD, freg) vmc_state = vmc-state; vm_monitor_unlock(vmc-monitor); +%ifdef CONFIG_VALGRIND + select_insn(s, tree, imm_insn(INSN_PUSH_IMM, (unsigned long)vmc)); + select_insn(s, tree, rel_insn(INSN_CALL_REL, (unsigned long)vm_class_ensure_init)); + method_args_cleanup(s, tree, 1); + + if (store_dest-vm_type == J_FLOAT) + mov_insn = reg_memdisp_insn(INSN_MOV_XMM_MEMDISP, + src, (unsigned long) vmc-static_values + vmf-offset); + else + mov_insn =
[PATCH] x86: fix writes below (%esp) which can be fixed at no cost
Signed-off-by: Tomek Grabiec tgrab...@gmail.com --- arch/x86/insn-selector.brg | 30 -- 1 files changed, 16 insertions(+), 14 deletions(-) diff --git a/arch/x86/insn-selector.brg b/arch/x86/insn-selector.brg index 2e7367f..85f2fa6 100644 --- a/arch/x86/insn-selector.brg +++ b/arch/x86/insn-selector.brg @@ -494,10 +494,11 @@ freg: OP_DREM(freg, freg) 1 select_insn(s, tree, reg_membase_insn(INSN_MOV_64_XMM_MEMBASE, state-right-reg1, esp, 8)); select_insn(s, tree, rel_insn(INSN_CALL_REL, (unsigned long)fmod)); - method_args_cleanup(s, tree, 4); - select_insn(s, tree, membase_insn(INSN_FSTP_64_MEMBASE, esp, -8)); - select_insn(s, tree, membase_reg_insn(INSN_MOV_64_MEMBASE_XMM, esp, -8, state-reg1)); + select_insn(s, tree, membase_insn(INSN_FSTP_64_MEMBASE, esp, 0)); + select_insn(s, tree, membase_reg_insn(INSN_MOV_64_MEMBASE_XMM, esp, 0, state-reg1)); + + method_args_cleanup(s, tree, 4); } freg: OP_FREM(freg, freg) 1 @@ -514,10 +515,11 @@ freg: OP_FREM(freg, freg) 1 select_insn(s, tree, reg_membase_insn(INSN_MOV_XMM_MEMBASE, state-right-reg1, esp, 4)); select_insn(s, tree, rel_insn(INSN_CALL_REL, (unsigned long)fmodf)); - method_args_cleanup(s, tree, 2); - select_insn(s, tree, membase_insn(INSN_FSTP_MEMBASE, esp, -4)); - select_insn(s, tree, membase_reg_insn(INSN_MOV_MEMBASE_XMM, esp, -4, state-reg1)); + select_insn(s, tree, membase_insn(INSN_FSTP_MEMBASE, esp, 0)); + select_insn(s, tree, membase_reg_insn(INSN_MOV_MEMBASE_XMM, esp, 0, state-reg1)); + + method_args_cleanup(s, tree, 2); } reg: OP_REM_64(reg, reg) 1 @@ -1823,16 +1825,16 @@ arg:EXPR_ARG(freg) size = get_vmtype_size(arg_expr-vm_type); + select_insn(s, tree, imm_reg_insn(INSN_SUB_IMM_REG, size, esp)); + if (arg_expr-vm_type == J_FLOAT) { select_insn(s, tree, - reg_membase_insn(INSN_MOV_XMM_MEMBASE, src, esp, -size)); + reg_membase_insn(INSN_MOV_XMM_MEMBASE, src, esp, 0)); } else { select_insn(s, tree, - reg_membase_insn(INSN_MOV_64_XMM_MEMBASE, src, esp, -size)); + reg_membase_insn(INSN_MOV_64_XMM_MEMBASE, src, esp, 0)); } - select_insn(s, tree, imm_reg_insn(INSN_SUB_IMM_REG, size, esp)); - state-reg1 = NULL; } %else @@ -1877,12 +1879,12 @@ arg:EXPR_ARG(freg) } else { int size = get_vmtype_size(arg_expr-vm_type); + select_insn(s, tree, imm_reg_insn(INSN_SUB_IMM_REG, size, esp)); + if (arg_expr-vm_type == J_FLOAT) - select_insn(s, tree, reg_membase_insn(INSN_MOV_XMM_MEMBASE, src, esp, -size)); + select_insn(s, tree, reg_membase_insn(INSN_MOV_XMM_MEMBASE, src, esp, 0)); else - select_insn(s, tree, reg_membase_insn(INSN_MOV_64_XMM_MEMBASE, src, esp, -size)); - - select_insn(s, tree, imm_reg_insn(INSN_SUB_IMM_REG, size, esp)); + select_insn(s, tree, reg_membase_insn(INSN_MOV_64_XMM_MEMBASE, src, esp, 0)); } state-reg1 = NULL; -- 1.6.3.3 -- Let Crystal Reports handle the reporting - Free Crystal Reports 2008 30-Day trial. Simplify your report design, integration and deployment - and focus on what you do best, core application coding. Discover what's new with Crystal Reports now. http://p.sf.net/sfu/bobj-july ___ Jatovm-devel mailing list Jatovm-devel@lists.sourceforge.net https://lists.sourceforge.net/lists/listinfo/jatovm-devel
Re: [PATCH] x86: Introduce workarounds for valgrind to work with jato.
Hi Tomek, On Mon, 2009-08-31 at 16:14 +0200, Tomek Grabiec wrote: Jato can be compiled with workarounds which make valgrind work with jato. To do so, define VALGRIND variable for make: make jato VALGRIND=y Currently workarounds eliminate class initialization from signal handler by unconditionally selecting calls to vm_class_ensure_init(). Signed-off-by: Tomek Grabiec tgrab...@gmail.com Can we turn this into -Xvalgrind command line option instead? We're still in early stages of development and are likely to use it a lot. An extra compile-time option will make things debugging than necessary, I think. Pekka -- Let Crystal Reports handle the reporting - Free Crystal Reports 2008 30-Day trial. Simplify your report design, integration and deployment - and focus on what you do best, core application coding. Discover what's new with Crystal Reports now. http://p.sf.net/sfu/bobj-july ___ Jatovm-devel mailing list Jatovm-devel@lists.sourceforge.net https://lists.sourceforge.net/lists/listinfo/jatovm-devel
Re: [PATCH] x86: fix writes below (%esp) which can be fixed at no cost
Hi Tomek, On Mon, 2009-08-31 at 20:30 +0200, Tomek Grabiec wrote: Signed-off-by: Tomek Grabiec tgrab...@gmail.com --- arch/x86/insn-selector.brg | 30 -- 1 files changed, 16 insertions(+), 14 deletions(-) diff --git a/arch/x86/insn-selector.brg b/arch/x86/insn-selector.brg index 2e7367f..85f2fa6 100644 --- a/arch/x86/insn-selector.brg +++ b/arch/x86/insn-selector.brg @@ -494,10 +494,11 @@ freg: OP_DREM(freg, freg) 1 select_insn(s, tree, reg_membase_insn(INSN_MOV_64_XMM_MEMBASE, state-right-reg1, esp, 8)); select_insn(s, tree, rel_insn(INSN_CALL_REL, (unsigned long)fmod)); - method_args_cleanup(s, tree, 4); - select_insn(s, tree, membase_insn(INSN_FSTP_64_MEMBASE, esp, -8)); - select_insn(s, tree, membase_reg_insn(INSN_MOV_64_MEMBASE_XMM, esp, -8, state-reg1)); + select_insn(s, tree, membase_insn(INSN_FSTP_64_MEMBASE, esp, 0)); + select_insn(s, tree, membase_reg_insn(INSN_MOV_64_MEMBASE_XMM, esp, 0, state-reg1)); + + method_args_cleanup(s, tree, 4); } freg:OP_FREM(freg, freg) 1 @@ -514,10 +515,11 @@ freg: OP_FREM(freg, freg) 1 select_insn(s, tree, reg_membase_insn(INSN_MOV_XMM_MEMBASE, state-right-reg1, esp, 4)); select_insn(s, tree, rel_insn(INSN_CALL_REL, (unsigned long)fmodf)); - method_args_cleanup(s, tree, 2); - select_insn(s, tree, membase_insn(INSN_FSTP_MEMBASE, esp, -4)); - select_insn(s, tree, membase_reg_insn(INSN_MOV_MEMBASE_XMM, esp, -4, state-reg1)); + select_insn(s, tree, membase_insn(INSN_FSTP_MEMBASE, esp, 0)); + select_insn(s, tree, membase_reg_insn(INSN_MOV_MEMBASE_XMM, esp, 0, state-reg1)); + + method_args_cleanup(s, tree, 2); } reg: OP_REM_64(reg, reg) 1 As mentioned on the IRC: the first two hunks make the generated asm less readable so I think we should use get_scratch_slot() here. -- Let Crystal Reports handle the reporting - Free Crystal Reports 2008 30-Day trial. Simplify your report design, integration and deployment - and focus on what you do best, core application coding. Discover what's new with Crystal Reports now. http://p.sf.net/sfu/bobj-july ___ Jatovm-devel mailing list Jatovm-devel@lists.sourceforge.net https://lists.sourceforge.net/lists/listinfo/jatovm-devel