Generate sign-extended 32-bit constants with SETHI+XOR. Otherwise tidy the routine to avoid the need for conditional compilation and code duplication with movi_imm32. --- tcg/sparc/tcg-target.c | 27 +++++++++++++++------------ 1 files changed, 15 insertions(+), 12 deletions(-)
diff --git a/tcg/sparc/tcg-target.c b/tcg/sparc/tcg-target.c index b480ed6..78b26c9 100644 --- a/tcg/sparc/tcg-target.c +++ b/tcg/sparc/tcg-target.c @@ -314,22 +314,25 @@ static inline void tcg_out_movi_imm32(TCGContext *s, int ret, uint32_t arg) static inline void tcg_out_movi(TCGContext *s, TCGType type, int ret, tcg_target_long arg) { -#if defined(__sparc_v9__) && !defined(__sparc_v8plus__) - if (!check_fit_tl(arg, 32) && (arg & ~0xffffffffULL) != 0) { - tcg_out_movi_imm32(s, TCG_REG_I4, arg >> 32); + /* All 32-bit constants, as well as 64-bit constants with + no high bits set go through movi_imm32. */ + if (TCG_TARGET_REG_BITS == 32 + || type == TCG_TYPE_I32 + || (arg & ~(tcg_target_long)0xffffffff) == 0) { + tcg_out_movi_imm32(s, ret, arg); + } else if (check_fit_tl(arg, 13)) { + /* A 13-bit constant sign-extended to 64-bits. */ + tcg_out_movi_imm13(s, ret, arg); + } else if (check_fit_tl(arg, 32)) { + /* A 32-bit constant sign-extended to 64-bits. */ + tcg_out_sethi(s, ret, ~arg); + tcg_out_arithi(s, ret, ret, (arg & 0x3ff) | -0x400, ARITH_XOR); + } else { + tcg_out_movi_imm32(s, TCG_REG_I4, arg >> (TCG_TARGET_REG_BITS / 2)); tcg_out_arithi(s, TCG_REG_I4, TCG_REG_I4, 32, SHIFT_SLLX); tcg_out_movi_imm32(s, ret, arg); tcg_out_arith(s, ret, ret, TCG_REG_I4, ARITH_OR); - } else if (check_fit_tl(arg, 12)) - tcg_out_movi_imm13(s, ret, arg); - else { - tcg_out_sethi(s, ret, arg); - if (arg & 0x3ff) - tcg_out_arithi(s, ret, ret, arg & 0x3ff, ARITH_OR); } -#else - tcg_out_movi_imm32(s, ret, arg); -#endif } static inline void tcg_out_ld_raw(TCGContext *s, int ret, -- 1.6.5.2