Jato can be compiled with workarounds which make valgrind
work with jato.
To do so, define VALGRIND variable for make:
make jato VALGRIND=y
Currently workarounds eliminate class initialization
from signal handler by unconditionally selecting
calls to vm_class_ensure_init().
Signed-off-by: Tomek Grabiec tgrab...@gmail.com
---
Makefile |5 +
arch/x86/insn-selector.brg | 44
2 files changed, 49 insertions(+), 0 deletions(-)
diff --git a/Makefile b/Makefile
index 55ef9ba..f9c8d9b 100644
--- a/Makefile
+++ b/Makefile
@@ -177,6 +177,11 @@ INSTALL:= install
DEFAULT_CFLAGS += $(ARCH_CFLAGS) -g -rdynamic -std=gnu99 -D_GNU_SOURCE
-fstack-protector-all -D_FORTIFY_SOURCE=2
+ifdef VALGRIND
+DEFAULT_CFLAGS += -DCONFIG_VALGRIND
+MB_DEFINES += -DCONFIG_VALGRIND
+endif
+
# XXX: Temporary hack -Vegard
DEFAULT_CFLAGS += -DNOT_IMPLEMENTED='fprintf(stderr, %s:%d: warning: %s not
implemented\n, __FILE__, __LINE__, __func__)'
diff --git a/arch/x86/insn-selector.brg b/arch/x86/insn-selector.brg
index 8522667..9c8dd70 100644
--- a/arch/x86/insn-selector.brg
+++ b/arch/x86/insn-selector.brg
@@ -1051,6 +1051,14 @@ reg: EXPR_CLASS_FIELD 1
vmc_state = vmc-state;
vm_monitor_unlock(vmc-monitor);
+%ifdef CONFIG_VALGRIND
+ select_insn(s, tree, imm_insn(INSN_PUSH_IMM, (unsigned long)vmc));
+ select_insn(s, tree, rel_insn(INSN_CALL_REL, (unsigned
long)vm_class_ensure_init));
+ method_args_cleanup(s, tree, 1);
+
+ mov_insn = memdisp_reg_insn(INSN_MOV_MEMDISP_REG,
+ (unsigned long) vmc-static_values +
vmf-offset, out);
+%else
if (vmc_state = VM_CLASS_INITIALIZING) {
/* Class is already initialized; no need for fix-up. We also
* don't want the fixup if we're already inside the
@@ -1064,6 +1072,7 @@ reg: EXPR_CLASS_FIELD 1
/* XXX: Check return value */
add_getstatic_fixup_site(mov_insn, vmf, s-b_parent);
}
+%endif /* CONFIG_VALGRIND */
select_insn(s, tree, mov_insn);
@@ -1097,6 +1106,18 @@ freg:EXPR_FLOAT_CLASS_FIELD 1
vmc_state = vmc-state;
vm_monitor_unlock(vmc-monitor);
+%ifdef CONFIG_VALGRIND
+ select_insn(s, tree, imm_insn(INSN_PUSH_IMM, (unsigned
long)vmc));
+ select_insn(s, tree, rel_insn(INSN_CALL_REL, (unsigned
long)vm_class_ensure_init));
+ method_args_cleanup(s, tree, 1);
+
+ if (expr-vm_type == J_FLOAT)
+ mov_insn = memdisp_reg_insn(INSN_MOV_MEMDISP_XMM,
+ (unsigned long) vmc-static_values +
vmf-offset, out);
+ else
+ mov_insn = memdisp_reg_insn(INSN_MOV_64_MEMDISP_XMM,
+ (unsigned long) vmc-static_values +
vmf-offset, out);
+%else
if (vmc_state = VM_CLASS_INITIALIZING) {
/* Class is already initialized; no need for fix-up. We also
* don't want the fixup if we're already inside the
@@ -1118,6 +1139,7 @@ freg: EXPR_FLOAT_CLASS_FIELD 1
/* XXX: Check return value */
add_getstatic_fixup_site(mov_insn, vmf, s-b_parent);
}
+%endif /* CONFIG_VALGRIND */
select_insn(s, tree, mov_insn);
}
@@ -1995,6 +2017,14 @@ stmt:STMT_STORE(EXPR_CLASS_FIELD, reg)
vmc_state = vmc-state;
vm_monitor_unlock(vmc-monitor);
+%ifdef CONFIG_VALGRIND
+ select_insn(s, tree, imm_insn(INSN_PUSH_IMM, (unsigned long)vmc));
+ select_insn(s, tree, rel_insn(INSN_CALL_REL, (unsigned
long)vm_class_ensure_init));
+ method_args_cleanup(s, tree, 1);
+
+ mov_insn = reg_memdisp_insn(INSN_MOV_REG_MEMDISP,
+ src, (unsigned long) vmc-static_values +
vmf-offset);
+%else
if (vmc_state = VM_CLASS_INITIALIZING) {
/* Class is already initialized; no need for fix-up. We also
* don't want the fixup if we're already inside the
@@ -2008,6 +2038,7 @@ stmt: STMT_STORE(EXPR_CLASS_FIELD, reg)
/* XXX: Check return value */
add_putstatic_fixup_site(mov_insn, vmf, s-b_parent);
}
+%endif /* CONFIG_VALGRIND */
select_insn(s, tree, mov_insn);
@@ -2044,6 +2075,18 @@ stmt:STMT_STORE(EXPR_FLOAT_CLASS_FIELD, freg)
vmc_state = vmc-state;
vm_monitor_unlock(vmc-monitor);
+%ifdef CONFIG_VALGRIND
+ select_insn(s, tree, imm_insn(INSN_PUSH_IMM, (unsigned long)vmc));
+ select_insn(s, tree, rel_insn(INSN_CALL_REL, (unsigned
long)vm_class_ensure_init));
+ method_args_cleanup(s, tree, 1);
+
+ if (store_dest-vm_type == J_FLOAT)
+ mov_insn = reg_memdisp_insn(INSN_MOV_XMM_MEMDISP,
+ src, (unsigned long) vmc-static_values + vmf-offset);
+ else
+ mov_insn =