On 01/10/2014 11:07 AM, Tom Musta wrote: > +#define MV_VSR(name, tcgop1, tcgop2, target, source) \ > +static void gen_##name(DisasContext *ctx) \ > +{ \ > + if (xS(ctx->opcode) < 32) { \ > + if (unlikely(!ctx->fpu_enabled)) { \ > + gen_exception(ctx, POWERPC_EXCP_FPU); \ > + return; \ > + } \ > + } else { \ > + if (unlikely(!ctx->altivec_enabled)) { \ > + gen_exception(ctx, POWERPC_EXCP_VPU); \ > + return; \ > + } \ > + } \ > + TCGv_i64 tmp = tcg_temp_new_i64(); \ > + tcg_gen_##tcgop1(tmp, source); \ > + tcg_gen_##tcgop2(target, tmp); \ > + tcg_temp_free_i64(tmp); \ > +} > + > + > +MV_VSR(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \ > + cpu_vsrh(xS(ctx->opcode))) > +MV_VSR(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \ > + cpu_gpr[rA(ctx->opcode)]) > +MV_VSR(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \ > + cpu_gpr[rA(ctx->opcode)]) > +#if defined(TARGET_PPC64) > +MV_VSR(mfvsrd, mov_i64, mov_i64, cpu_gpr[rA(ctx->opcode)], \ > + cpu_vsrh(xS(ctx->opcode))) > +MV_VSR(mtvsrd, mov_i64, mov_i64, cpu_vsrh(xT(ctx->opcode)), \ > + cpu_gpr[rA(ctx->opcode)]) > +#endif
Better to do this in one step: mfcsrwz: tcg_gen_ext32u_tl mtvsrwa: tcg_gen_ext_tl_i64 mtvsrwz: tcg_gen_extu_tl_i64 m[tf]vsrd: tcg_gen_mov_i64 r~