This fixes class field handling on x86-64. Signed-off-by: Eduard - Gabriel Munteanu <eduard.munte...@linux360.ro> --- arch/x86/insn-selector.brg | 85 ++++++++++++++++++++++++++++++++++++++++++++ 1 files changed, 85 insertions(+), 0 deletions(-)
diff --git a/arch/x86/insn-selector.brg b/arch/x86/insn-selector.brg index d1e8449..f9995fc 100644 --- a/arch/x86/insn-selector.brg +++ b/arch/x86/insn-selector.brg @@ -883,6 +883,7 @@ reg: OP_LE(reg, reg) 2 binop_reg_reg_low(state, s, tree, INSN_CMP_REG_REG); } +%ifdef CONFIG_X86_32 reg: EXPR_CLASS_FIELD 1 { struct expression *expr; @@ -937,6 +938,46 @@ reg: EXPR_CLASS_FIELD 1 state->reg2)); } } +%else +reg: EXPR_CLASS_FIELD 1 +{ + struct expression *expr; + struct var_info *out; + struct insn *mov_insn; + + struct vm_field *vmf; + struct vm_class *vmc; + enum vm_class_state vmc_state; + + expr = to_expr(tree); + + out = get_var(s->b_parent, J_LONG); + state->reg1 = out; + + vmf = expr->class_field; + vmc = vmf->class; + + vm_monitor_lock(&vmc->monitor); + vmc_state = vmc->state; + vm_monitor_unlock(&vmc->monitor); + + if (vmc_state >= VM_CLASS_INITIALIZING) { + /* Class is already initialized; no need for fix-up. We also + * don't want the fixup if we're already inside the + * initializer. */ + mov_insn = memdisp_reg_insn(INSN_MOV_MEMDISP_REG, + (unsigned long) vmc->static_values + vmf->offset, out); + } else { + mov_insn = memdisp_reg_insn(INSN_MOV_MEMDISP_REG, + (unsigned long) static_guard_page, out); + + /* XXX: Check return value */ + add_getstatic_fixup_site(mov_insn, vmf, s->b_parent); + } + + select_insn(s, tree, mov_insn); +} +%endif freg: EXPR_FLOAT_CLASS_FIELD 1 { @@ -1868,6 +1909,7 @@ stmt: STMT_EXPRESSION(freg) { } +%ifdef CONFIG_X86_32 stmt: STMT_STORE(EXPR_CLASS_FIELD, reg) { struct expression *store_dest; @@ -1925,6 +1967,49 @@ stmt: STMT_STORE(EXPR_CLASS_FIELD, reg) (unsigned long) vmc->static_values + vmf->offset + 4)); } } +%else +stmt: STMT_STORE(EXPR_CLASS_FIELD, reg) +{ + struct expression *store_dest; + struct expression *store_src; + struct statement *stmt; + struct var_info *src; + struct insn *mov_insn; + + struct vm_field *vmf; + struct vm_class *vmc; + enum vm_class_state vmc_state; + + stmt = to_stmt(tree); + store_dest = to_expr(stmt->store_dest); + store_src = to_expr(stmt->store_src); + + src = state->right->reg1; + + vmf = store_dest->class_field; + vmc = vmf->class; + + vm_monitor_lock(&vmc->monitor); + vmc_state = vmc->state; + vm_monitor_unlock(&vmc->monitor); + + if (vmc_state >= VM_CLASS_INITIALIZING) { + /* Class is already initialized; no need for fix-up. We also + * don't want the fixup if we're already inside the + * initializer. */ + mov_insn = reg_memdisp_insn(INSN_MOV_REG_MEMDISP, + src, (unsigned long) vmc->static_values + vmf->offset); + } else { + mov_insn = reg_memdisp_insn(INSN_MOV_REG_MEMDISP, + src, (unsigned long) static_guard_page); + + /* XXX: Check return value */ + add_putstatic_fixup_site(mov_insn, vmf, s->b_parent); + } + + select_insn(s, tree, mov_insn); +} +%endif stmt: STMT_STORE(EXPR_FLOAT_CLASS_FIELD, freg) { -- 1.6.0.6 ------------------------------------------------------------------------------ Let Crystal Reports handle the reporting - Free Crystal Reports 2008 30-Day trial. Simplify your report design, integration and deployment - and focus on what you do best, core application coding. Discover what's new with Crystal Reports now. http://p.sf.net/sfu/bobj-july _______________________________________________ Jatovm-devel mailing list Jatovm-devel@lists.sourceforge.net https://lists.sourceforge.net/lists/listinfo/jatovm-devel