diff --git a/qemu/tcg/tcg-op.c b/qemu/tcg/tcg-op.c index b5b3f626..66c639af 100644 --- a/qemu/tcg/tcg-op.c +++ b/qemu/tcg/tcg-op.c @@ -107,15 +107,18 @@ void tcg_gen_mb(TCGContext *ctx, TCGBar mb_type) /* 32 bit ops */ +void tcg_gen_movi_i32(TCGContext *s, TCGv_i32 ret, int32_t arg) +{ + tcg_gen_mov_i32(s, ret, tcg_constant_i32(s, arg)); +} + void tcg_gen_addi_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) { /* some cases can be optimized here */ if (arg2 == 0) { tcg_gen_mov_i32(s, ret, arg1); } else { - TCGv_i32 t0 = tcg_const_i32(s, arg2); - tcg_gen_add_i32(s, ret, arg1, t0); - tcg_temp_free_i32(s, t0); + tcg_gen_add_i32(s, ret, arg1, tcg_constant_i32(s, arg2)); } } @@ -125,9 +128,7 @@ void tcg_gen_subfi_i32(TCGContext *s, TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2) /* Don't recurse with tcg_gen_neg_i32. */ tcg_gen_op2_i32(s, INDEX_op_neg_i32, ret, arg2); } else { - TCGv_i32 t0 = tcg_const_i32(s, arg1); - tcg_gen_sub_i32(s, ret, t0, arg2); - tcg_temp_free_i32(s, t0); + tcg_gen_sub_i32(s, ret, tcg_constant_i32(s, arg1), arg2); } } @@ -137,15 +138,12 @@ void tcg_gen_subi_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) if (arg2 == 0) { tcg_gen_mov_i32(s, ret, arg1); } else { - TCGv_i32 t0 = tcg_const_i32(s, arg2); - tcg_gen_sub_i32(s, ret, arg1, t0); - tcg_temp_free_i32(s, t0); + tcg_gen_sub_i32(s, ret, arg1, tcg_constant_i32(s, arg2)); } } void tcg_gen_andi_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) { - TCGv_i32 t0; /* Some cases can be optimized here. */ switch (arg2) { case 0: @@ -168,9 +166,8 @@ void tcg_gen_andi_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) } break; } - t0 = tcg_const_i32(s, arg2); - tcg_gen_and_i32(s, ret, arg1, t0); - tcg_temp_free_i32(s, t0); + + tcg_gen_and_i32(s, ret, arg1, tcg_constant_i32(s, arg2)); } void tcg_gen_ori_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) @@ -181,9 +178,7 @@ void tcg_gen_ori_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) } else if (arg2 == 0) { tcg_gen_mov_i32(s, ret, arg1); } else { - TCGv_i32 t0 = tcg_const_i32(s, arg2); - tcg_gen_or_i32(s, ret, arg1, t0); - tcg_temp_free_i32(s, t0); + tcg_gen_or_i32(s, ret, arg1, tcg_constant_i32(s, arg2)); } } @@ -196,9 +191,7 @@ void tcg_gen_xori_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) /* Don't recurse with tcg_gen_not_i32. */ tcg_gen_op2_i32(s, INDEX_op_not_i32, ret, arg1); } else { - TCGv_i32 t0 = tcg_const_i32(s, arg2); - tcg_gen_xor_i32(s, ret, arg1, t0); - tcg_temp_free_i32(s, t0); + tcg_gen_xor_i32(s, ret, arg1, tcg_constant_i32(s, arg2)); } } @@ -208,9 +201,7 @@ void tcg_gen_shli_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) if (arg2 == 0) { tcg_gen_mov_i32(s, ret, arg1); } else { - TCGv_i32 t0 = tcg_const_i32(s, arg2); - tcg_gen_shl_i32(s, ret, arg1, t0); - tcg_temp_free_i32(s, t0); + tcg_gen_shl_i32(s, ret, arg1, tcg_constant_i32(s, arg2)); } } @@ -220,9 +211,7 @@ void tcg_gen_shri_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) if (arg2 == 0) { tcg_gen_mov_i32(s, ret, arg1); } else { - TCGv_i32 t0 = tcg_const_i32(s, arg2); - tcg_gen_shr_i32(s, ret, arg1, t0); - tcg_temp_free_i32(s, t0); + tcg_gen_shr_i32(s, ret, arg1, tcg_constant_i32(s, arg2)); } } @@ -232,9 +221,7 @@ void tcg_gen_sari_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) if (arg2 == 0) { tcg_gen_mov_i32(s, ret, arg1); } else { - TCGv_i32 t0 = tcg_const_i32(s, arg2); - tcg_gen_sar_i32(s, ret, arg1, t0); - tcg_temp_free_i32(s, t0); + tcg_gen_sar_i32(s, ret, arg1, tcg_constant_i32(s, arg2)); } } @@ -253,9 +240,7 @@ void tcg_gen_brcondi_i32(TCGContext *s, TCGCond cond, TCGv_i32 arg1, int32_t arg if (cond == TCG_COND_ALWAYS) { tcg_gen_br(s, l); } else if (cond != TCG_COND_NEVER) { - TCGv_i32 t0 = tcg_const_i32(s, arg2); - tcg_gen_brcond_i32(s, cond, arg1, t0, l); - tcg_temp_free_i32(s, t0); + tcg_gen_brcond_i32(s, cond, arg1, tcg_constant_i32(s, arg2), l); } } @@ -274,9 +259,7 @@ void tcg_gen_setcond_i32(TCGContext *s, TCGCond cond, TCGv_i32 ret, void tcg_gen_setcondi_i32(TCGContext *s, TCGCond cond, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) { - TCGv_i32 t0 = tcg_const_i32(s, arg2); - tcg_gen_setcond_i32(s, cond, ret, arg1, t0); - tcg_temp_free_i32(s, t0); + tcg_gen_setcond_i32(s, cond, ret, arg1, tcg_constant_i32(s, arg2)); } void tcg_gen_muli_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) @@ -286,9 +269,7 @@ void tcg_gen_muli_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) } else if (is_power_of_2(arg2)) { tcg_gen_shli_i32(s, ret, arg1, ctz32(arg2)); } else { - TCGv_i32 t0 = tcg_const_i32(s, arg2); - tcg_gen_mul_i32(s, ret, arg1, t0); - tcg_temp_free_i32(s, t0); + tcg_gen_mul_i32(s, ret, arg1, tcg_constant_i32(s, arg2)); } } @@ -436,9 +417,7 @@ void tcg_gen_clz_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) void tcg_gen_clzi_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2) { - TCGv_i32 t = tcg_const_i32(s, arg2); - tcg_gen_clz_i32(s, ret, arg1, t); - tcg_temp_free_i32(s, t); + tcg_gen_clz_i32(s, ret, arg1, tcg_constant_i32(s, arg2)); } void tcg_gen_ctz_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) @@ -471,10 +450,9 @@ void tcg_gen_ctz_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) tcg_gen_clzi_i32(s, t, t, 32); tcg_gen_xori_i32(s, t, t, 31); } - z = tcg_const_i32(s, 0); + z = tcg_constant_i32(s, 0); tcg_gen_movcond_i32(s, TCG_COND_EQ, ret, arg1, z, arg2, t); tcg_temp_free_i32(s, t); - tcg_temp_free_i32(s, z); } else { gen_helper_ctz_i32(s, ret, arg1, arg2); } @@ -490,9 +468,7 @@ void tcg_gen_ctzi_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2) tcg_gen_ctpop_i32(s, ret, t); tcg_temp_free_i32(s, t); } else { - TCGv_i32 t = tcg_const_i32(s, arg2); - tcg_gen_ctz_i32(s, ret, arg1, t); - tcg_temp_free_i32(s, t); + tcg_gen_ctz_i32(s, ret, arg1, tcg_constant_i32(s, arg2)); } } @@ -550,9 +526,7 @@ void tcg_gen_rotli_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) if (arg2 == 0) { tcg_gen_mov_i32(s, ret, arg1); } else if (TCG_TARGET_HAS_rot_i32) { - TCGv_i32 t0 = tcg_const_i32(s, arg2); - tcg_gen_rotl_i32(s, ret, arg1, t0); - tcg_temp_free_i32(s, t0); + tcg_gen_rotl_i32(s, ret, arg1, tcg_constant_i32(s, arg2)); } else { TCGv_i32 t0, t1; t0 = tcg_temp_new_i32(s); @@ -657,9 +631,8 @@ void tcg_gen_deposit_z_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg, tcg_gen_andi_i32(s, ret, arg, (1u << len) - 1); } else if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) { - TCGv_i32 zero = tcg_const_i32(s, 0); + TCGv_i32 zero = tcg_constant_i32(s, 0); tcg_gen_op5ii_i32(s, INDEX_op_deposit_i32, ret, zero, arg, ofs, len); - tcg_temp_free_i32(s, zero); } else { /* To help two-operand hosts we prefer to zero-extend first, which allows ARG to stay live. */ @@ -1063,7 +1036,7 @@ void tcg_gen_bswap32_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg) } else { TCGv_i32 t0 = tcg_temp_new_i32(s); TCGv_i32 t1 = tcg_temp_new_i32(s); - TCGv_i32 t2 = tcg_const_i32(s, 0x00ff00ff); + TCGv_i32 t2 = tcg_constant_i32(s, 0x00ff00ff); /* arg = abcd */ tcg_gen_shri_i32(s, t0, arg, 8); /* t0 = .abc */ @@ -1078,7 +1051,6 @@ void tcg_gen_bswap32_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg) tcg_temp_free_i32(s, t0); tcg_temp_free_i32(s, t1); - tcg_temp_free_i32(s, t2); } } @@ -1125,8 +1097,15 @@ void tcg_gen_discard_i64(TCGContext *s, TCGv_i64 arg) void tcg_gen_mov_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg) { - tcg_gen_mov_i32(s, TCGV_LOW(s, ret), TCGV_LOW(s, arg)); - tcg_gen_mov_i32(s, TCGV_HIGH(s, ret), TCGV_HIGH(s, arg)); + TCGTemp *ts = tcgv_i64_temp(s, arg); + + /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */ + if (ts->kind == TEMP_CONST) { + tcg_gen_movi_i64(s, ret, ts->val); + } else { + tcg_gen_mov_i32(s, TCGV_LOW(ret), TCGV_LOW(arg)); + tcg_gen_mov_i32(s, TCGV_HIGH(ret), TCGV_HIGH(arg)); + } } void tcg_gen_movi_i64(TCGContext *s, TCGv_i64 ret, int64_t arg) @@ -1248,6 +1227,14 @@ void tcg_gen_mul_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) tcg_temp_free_i64(s, t0); tcg_temp_free_i32(s, t1); } + +#else + +void tcg_gen_movi_i64(TCGContext *s, TCGv_i64 ret, int64_t arg) +{ + tcg_gen_mov_i64(s, ret, tcg_constant_i64(s, arg)); +} + #endif /* TCG_TARGET_REG_SIZE == 32 */ void tcg_gen_addi_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) @@ -1255,10 +1242,12 @@ void tcg_gen_addi_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) /* some cases can be optimized here */ if (arg2 == 0) { tcg_gen_mov_i64(s, ret, arg1); + } else if (TCG_TARGET_REG_BITS == 64) { + tcg_gen_add_i64(s, ret, arg1, tcg_constant_i64(s, arg2)); } else { - TCGv_i64 t0 = tcg_const_i64(s, arg2); - tcg_gen_add_i64(s, ret, arg1, t0); - tcg_temp_free_i64(s, t0); + tcg_gen_add2_i32(s, TCGV_LOW(s, ret), TCGV_HIGH(s, ret), + TCGV_LOW(s, arg1), TCGV_HIGH(s, arg1), + tcg_constant_i32(s, arg2), tcg_constant_i32(s, arg2 >> 32)); } } @@ -1267,10 +1256,12 @@ void tcg_gen_subfi_i64(TCGContext *s, TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2) if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) { /* Don't recurse with tcg_gen_neg_i64. */ tcg_gen_op2_i64(s, INDEX_op_neg_i64, ret, arg2); + } else if (TCG_TARGET_REG_BITS == 64) { + tcg_gen_sub_i64(s, ret, tcg_constant_i64(s, arg1), arg2); } else { - TCGv_i64 t0 = tcg_const_i64(s, arg1); - tcg_gen_sub_i64(s, ret, t0, arg2); - tcg_temp_free_i64(s, t0); + tcg_gen_sub2_i32(s, TCGV_LOW(s, ret), TCGV_HIGH(s, ret), + tcg_constant_i32(s, arg1), tcg_constant_i32(s, arg1 >> 32), + TCGV_LOW(s, arg2), TCGV_HIGH(s, arg2)); } } @@ -1279,17 +1270,17 @@ void tcg_gen_subi_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) /* some cases can be optimized here */ if (arg2 == 0) { tcg_gen_mov_i64(s, ret, arg1); + } else if (TCG_TARGET_REG_BITS == 64) { + tcg_gen_sub_i64(s, ret, arg1, tcg_constant_i64(s, arg2)); } else { - TCGv_i64 t0 = tcg_const_i64(s, arg2); - tcg_gen_sub_i64(s, ret, arg1, t0); - tcg_temp_free_i64(s, t0); + tcg_gen_sub2_i32(s, TCGV_LOW(s, ret), TCGV_HIGH(s, ret), + TCGV_LOW(s, arg1), TCGV_HIGH(s, arg1), + tcg_constant_i32(s, arg2), tcg_constant_i32(s, arg2 >> 32)); } } void tcg_gen_andi_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) { - TCGv_i64 t0; - if (TCG_TARGET_REG_BITS == 32) { tcg_gen_andi_i32(s, TCGV_LOW(s, ret), TCGV_LOW(s, arg1), arg2); tcg_gen_andi_i32(s, TCGV_HIGH(s, ret), TCGV_HIGH(s, arg1), arg2 >> 32); @@ -1324,9 +1315,8 @@ void tcg_gen_andi_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) } break; } - t0 = tcg_const_i64(s, arg2); - tcg_gen_and_i64(s, ret, arg1, t0); - tcg_temp_free_i64(s, t0); + + tcg_gen_and_i64(s, ret, arg1, tcg_constant_i64(s, arg2)); } void tcg_gen_ori_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) @@ -1342,9 +1332,7 @@ void tcg_gen_ori_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) } else if (arg2 == 0) { tcg_gen_mov_i64(s, ret, arg1); } else { - TCGv_i64 t0 = tcg_const_i64(s, arg2); - tcg_gen_or_i64(s, ret, arg1, t0); - tcg_temp_free_i64(s, t0); + tcg_gen_or_i64(s, ret, arg1, tcg_constant_i64(s, arg2)); } } @@ -1362,9 +1350,7 @@ void tcg_gen_xori_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) /* Don't recurse with tcg_gen_not_i64. */ tcg_gen_op2_i64(s, INDEX_op_not_i64, ret, arg1); } else { - TCGv_i64 t0 = tcg_const_i64(s, arg2); - tcg_gen_xor_i64(s, ret, arg1, t0); - tcg_temp_free_i64(s, t0); + tcg_gen_xor_i64(s, ret, arg1, tcg_constant_i64(s, arg2)); } } @@ -1427,9 +1413,7 @@ void tcg_gen_shli_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) } else if (arg2 == 0) { tcg_gen_mov_i64(s, ret, arg1); } else { - TCGv_i64 t0 = tcg_const_i64(s, arg2); - tcg_gen_shl_i64(s, ret, arg1, t0); - tcg_temp_free_i64(s, t0); + tcg_gen_shl_i64(s, ret, arg1, tcg_constant_i64(s, arg2)); } } @@ -1441,9 +1425,7 @@ void tcg_gen_shri_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) } else if (arg2 == 0) { tcg_gen_mov_i64(s, ret, arg1); } else { - TCGv_i64 t0 = tcg_const_i64(s, arg2); - tcg_gen_shr_i64(s, ret, arg1, t0); - tcg_temp_free_i64(s, t0); + tcg_gen_shr_i64(s, ret, arg1, tcg_constant_i64(s, arg2)); } } @@ -1455,9 +1437,7 @@ void tcg_gen_sari_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) } else if (arg2 == 0) { tcg_gen_mov_i64(s, ret, arg1); } else { - TCGv_i64 t0 = tcg_const_i64(s, arg2); - tcg_gen_sar_i64(s, ret, arg1, t0); - tcg_temp_free_i64(s, t0); + tcg_gen_sar_i64(s, ret, arg1, tcg_constant_i64(s, arg2)); } } @@ -1480,12 +1460,17 @@ void tcg_gen_brcond_i64(TCGContext *s, TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg void tcg_gen_brcondi_i64(TCGContext *s, TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l) { - if (cond == TCG_COND_ALWAYS) { + if (TCG_TARGET_REG_BITS == 64) { + tcg_gen_brcond_i64(s, cond, arg1, tcg_constant_i64(s, arg2), l); + } else if (cond == TCG_COND_ALWAYS) { tcg_gen_br(s, l); } else if (cond != TCG_COND_NEVER) { - TCGv_i64 t0 = tcg_const_i64(s, arg2); - tcg_gen_brcond_i64(s, cond, arg1, t0, l); - tcg_temp_free_i64(s, t0); + l->refs++; + tcg_gen_op6ii_i32(s, INDEX_op_brcond2_i32, + TCGV_LOW(s, arg1), TCGV_HIGH(s, arg1), + tcg_constant_i32(s, arg2), + tcg_constant_i32(s, arg2 >> 32), + cond, label_arg(s, l)); } } @@ -1511,9 +1496,19 @@ void tcg_gen_setcond_i64(TCGContext *s, TCGCond cond, TCGv_i64 ret, void tcg_gen_setcondi_i64(TCGContext *s, TCGCond cond, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) { - TCGv_i64 t0 = tcg_const_i64(s, arg2); - tcg_gen_setcond_i64(s, cond, ret, arg1, t0); - tcg_temp_free_i64(s, t0); + if (TCG_TARGET_REG_BITS == 64) { + tcg_gen_setcond_i64(s, cond, ret, arg1, tcg_constant_i64(s, arg2)); + } else if (cond == TCG_COND_ALWAYS) { + tcg_gen_movi_i64(s, ret, 1); + } else if (cond == TCG_COND_NEVER) { + tcg_gen_movi_i64(s, ret, 0); + } else { + tcg_gen_op6i_i32(s, INDEX_op_setcond2_i32, TCGV_LOW(s, ret), + TCGV_LOW(s, arg1), TCGV_HIGH(s, arg1), + tcg_constant_i32(s, arg2), + tcg_constant_i32(s, arg2 >> 32), cond); + tcg_gen_movi_i32(s, TCGV_HIGH(s, ret), 0); + } } void tcg_gen_muli_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) @@ -1702,7 +1697,7 @@ void tcg_gen_bswap32_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg) } else { TCGv_i64 t0 = tcg_temp_new_i64(s); TCGv_i64 t1 = tcg_temp_new_i64(s); - TCGv_i64 t2 = tcg_const_i64(s, 0x00ff00ff); + TCGv_i64 t2 = tcg_constant_i64(s, 0x00ff00ff); /* arg = ....abcd */ tcg_gen_shri_i64(s, t0, arg, 8); /* t0 = .....abc */ @@ -1718,7 +1713,6 @@ void tcg_gen_bswap32_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg) tcg_temp_free_i64(s, t0); tcg_temp_free_i64(s, t1); - tcg_temp_free_i64(s, t2); } } @@ -1862,16 +1856,16 @@ void tcg_gen_clzi_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2) if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_clz_i32 && arg2 <= 0xffffffffu) { - TCGv_i32 t = tcg_const_i32(s, (uint32_t)arg2 - 32); - tcg_gen_clz_i32(s, t, TCGV_LOW(s, arg1), t); + TCGv_i32 t = tcg_temp_new_i32(s); + tcg_gen_clzi_i32(s, t, TCGV_LOW(s, arg1), arg2 - 32); tcg_gen_addi_i32(s, t, t, 32); tcg_gen_clz_i32(s, TCGV_LOW(s, ret), TCGV_HIGH(s, arg1), t); tcg_gen_movi_i32(s, TCGV_HIGH(s, ret), 0); tcg_temp_free_i32(s, t); } else { - TCGv_i64 t = tcg_const_i64(s, arg2); - tcg_gen_clz_i64(s, ret, arg1, t); - tcg_temp_free_i64(s, t); + TCGv_i64 t0 = tcg_const_i64(s, arg2); + tcg_gen_clz_i64(s, ret, arg1, t0); + tcg_temp_free_i64(s, t0); } } @@ -1893,10 +1887,9 @@ void tcg_gen_ctz_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) tcg_gen_clzi_i64(s, t, t, 64); tcg_gen_xori_i64(s, t, t, 63); } - z = tcg_const_i64(s, 0); + z = tcg_constant_i64(s, 0); tcg_gen_movcond_i64(s, TCG_COND_EQ, ret, arg1, z, arg2, t); tcg_temp_free_i64(s, t); - tcg_temp_free_i64(s, z); } else { gen_helper_ctz_i64(s, ret, arg1, arg2); } @@ -1907,8 +1900,8 @@ void tcg_gen_ctzi_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2) if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctz_i32 && arg2 <= 0xffffffffu) { - TCGv_i32 t32 = tcg_const_i32(s, (uint32_t)arg2 - 32); - tcg_gen_ctz_i32(s, t32, TCGV_HIGH(s, arg1), t32); + TCGv_i32 t32 = tcg_temp_new_i32(s); + tcg_gen_ctzi_i32(s, t32, TCGV_HIGH(s, arg1), arg2 - 32); tcg_gen_addi_i32(s, t32, t32, 32); tcg_gen_ctz_i32(s, TCGV_LOW(s, ret), TCGV_LOW(s, arg1), t32); tcg_gen_movi_i32(s, TCGV_HIGH(s, ret), 0); @@ -1923,9 +1916,9 @@ void tcg_gen_ctzi_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2) tcg_gen_ctpop_i64(s, ret, t); tcg_temp_free_i64(s, t); } else { - TCGv_i64 t64 = tcg_const_i64(s, arg2); - tcg_gen_ctz_i64(s, ret, arg1, t64); - tcg_temp_free_i64(s, t64); + TCGv_i64 t0 = tcg_const_i64(s, arg2); + tcg_gen_ctz_i64(s, ret, arg1, t0); + tcg_temp_free_i64(s, t0); } } @@ -1981,9 +1974,7 @@ void tcg_gen_rotli_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) if (arg2 == 0) { tcg_gen_mov_i64(s, ret, arg1); } else if (TCG_TARGET_HAS_rot_i64) { - TCGv_i64 t0 = tcg_const_i64(s, arg2); - tcg_gen_rotl_i64(s, ret, arg1, t0); - tcg_temp_free_i64(s, t0); + tcg_gen_rotl_i64(s, ret, arg1, tcg_constant_i64(s, arg2)); } else { TCGv_i64 t0, t1; t0 = tcg_temp_new_i64(s); @@ -2102,9 +2093,8 @@ void tcg_gen_deposit_z_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg, tcg_gen_andi_i64(s, ret, arg, (1ull << len) - 1); } else if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) { - TCGv_i64 zero = tcg_const_i64(s, 0); + TCGv_i64 zero = tcg_constant_i64(s, 0); tcg_gen_op5ii_i64(s, INDEX_op_deposit_i64, ret, zero, arg, ofs, len); - tcg_temp_free_i64(s, zero); } else { if (TCG_TARGET_REG_BITS == 32) { if (ofs >= 32) { @@ -3116,9 +3106,8 @@ void tcg_gen_atomic_cmpxchg_i32(TCGContext *s, #ifdef CONFIG_SOFTMMU { - TCGv_i32 oi = tcg_const_i32(s, make_memop_idx(memop & ~MO_SIGN, idx)); - gen(s, retv, s->tcg_env, addr, cmpv, newv, oi); - tcg_temp_free_i32(s, oi); + TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx); + gen(s, retv, s->cpu_env, addr, cmpv, newv, tcg_constant_i32(s, oi)); } #else gen(s, retv, s->tcg_env, addr, cmpv, newv); @@ -3162,9 +3151,8 @@ void tcg_gen_atomic_cmpxchg_i64(TCGContext *s, #ifdef CONFIG_SOFTMMU { - TCGv_i32 oi = tcg_const_i32(s, make_memop_idx(memop, idx)); - gen(s, retv, s->tcg_env, addr, cmpv, newv, oi); - tcg_temp_free_i32(s, oi); + TCGMemOpIdx oi = make_memop_idx(memop, idx); + gen(s, retv, s->cpu_env, addr, cmpv, newv, tcg_constant_i32(s, oi)); } #else gen(s, retv, s->tcg_env, addr, cmpv, newv); @@ -3228,9 +3216,8 @@ static void do_atomic_op_i32(TCGContext *s, #ifdef CONFIG_SOFTMMU { - TCGv_i32 oi = tcg_const_i32(s, make_memop_idx(memop & ~MO_SIGN, idx)); - gen(s, ret, s->tcg_env, addr, val, oi); - tcg_temp_free_i32(s, oi); + TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx); + gen(s, ret, s->cpu_env, addr, val, tcg_constant_i32(s, oi)); } #else gen(s, ret, s->tcg_env, addr, val); @@ -3276,9 +3263,8 @@ static void do_atomic_op_i64(TCGContext *s, #ifdef CONFIG_SOFTMMU { - TCGv_i32 oi = tcg_const_i32(s, make_memop_idx(memop & ~MO_SIGN, idx)); - gen(s, ret, s->tcg_env, addr, val, oi); - tcg_temp_free_i32(s, oi); + TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx); + gen(s, ret, s->cpu_env, addr, val, tcg_constant_i32(s, oi)); } #else gen(s, ret, s->tcg_env, addr, val); diff --git a/qemu/tcg/tcg-op.h b/qemu/tcg/tcg-op.h index 79647f99..bb644ea5 100644 --- a/qemu/tcg/tcg-op.h +++ b/qemu/tcg/tcg-op.h @@ -289,6 +289,7 @@ void tcg_gen_mb(TCGContext *, TCGBar); /* 32 bit ops */ +void tcg_gen_movi_i32(TCGContext *s, TCGv_i32 ret, int32_t arg); void tcg_gen_addi_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2); void tcg_gen_subfi_i32(TCGContext *s, TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2); void tcg_gen_subi_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2); @@ -367,11 +368,6 @@ static inline void tcg_gen_mov_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg) } } -static inline void tcg_gen_movi_i32(TCGContext *s, TCGv_i32 ret, int32_t arg) -{ - tcg_gen_op2i_i32(s, INDEX_op_movi_i32, ret, arg); -} - static inline void tcg_gen_ld8u_i32(TCGContext *s, TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) { tcg_gen_ldst_op_i32(s, INDEX_op_ld8u_i32, ret, arg2, offset); @@ -477,6 +473,7 @@ static inline void tcg_gen_not_i32(TCGContext *s, TCGv_i32 ret, TCGv_i32 arg) /* 64 bit ops */ +void tcg_gen_movi_i64(TCGContext *s, TCGv_i64 ret, int64_t arg); void tcg_gen_addi_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2); void tcg_gen_subfi_i64(TCGContext *s, TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2); void tcg_gen_subi_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2); @@ -560,11 +557,6 @@ static inline void tcg_gen_mov_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg) } } -static inline void tcg_gen_movi_i64(TCGContext *s, TCGv_i64 ret, int64_t arg) -{ - tcg_gen_op2i_i64(s, INDEX_op_movi_i64, ret, arg); -} - static inline void tcg_gen_ld8u_i64(TCGContext *s, TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset) { @@ -708,7 +700,6 @@ static inline void tcg_gen_sub_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg1, T void tcg_gen_discard_i64(TCGContext *s, TCGv_i64 arg); void tcg_gen_mov_i64(TCGContext *s, TCGv_i64 ret, TCGv_i64 arg); -void tcg_gen_movi_i64(TCGContext *s, TCGv_i64 ret, int64_t arg); void tcg_gen_ld8u_i64(TCGContext *s, TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset); void tcg_gen_ld8s_i64(TCGContext *s, TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset); void tcg_gen_ld16u_i64(TCGContext *s, TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset);