|
@@ -76,7 +76,6 @@ static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
|
|
|
|
|
|
#define TCG_CT_CONST_S11 0x100
|
|
#define TCG_CT_CONST_S11 0x100
|
|
#define TCG_CT_CONST_S13 0x200
|
|
#define TCG_CT_CONST_S13 0x200
|
|
-#define TCG_CT_CONST_ZERO 0x400
|
|
|
|
|
|
|
|
#define ALL_GENERAL_REGS MAKE_64BIT_MASK(0, 32)
|
|
#define ALL_GENERAL_REGS MAKE_64BIT_MASK(0, 32)
|
|
|
|
|
|
@@ -340,9 +339,7 @@ static bool tcg_target_const_match(int64_t val, int ct,
|
|
val = (int32_t)val;
|
|
val = (int32_t)val;
|
|
}
|
|
}
|
|
|
|
|
|
- if ((ct & TCG_CT_CONST_ZERO) && val == 0) {
|
|
|
|
- return 1;
|
|
|
|
- } else if ((ct & TCG_CT_CONST_S11) && check_fit_tl(val, 11)) {
|
|
|
|
|
|
+ if ((ct & TCG_CT_CONST_S11) && check_fit_tl(val, 11)) {
|
|
return 1;
|
|
return 1;
|
|
} else if ((ct & TCG_CT_CONST_S13) && check_fit_tl(val, 13)) {
|
|
} else if ((ct & TCG_CT_CONST_S13) && check_fit_tl(val, 13)) {
|
|
return 1;
|
|
return 1;
|
|
@@ -1579,7 +1576,7 @@ tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
|
|
case INDEX_op_st_i64:
|
|
case INDEX_op_st_i64:
|
|
case INDEX_op_qemu_st_i32:
|
|
case INDEX_op_qemu_st_i32:
|
|
case INDEX_op_qemu_st_i64:
|
|
case INDEX_op_qemu_st_i64:
|
|
- return C_O0_I2(rZ, r);
|
|
|
|
|
|
+ return C_O0_I2(rz, r);
|
|
|
|
|
|
case INDEX_op_add_i32:
|
|
case INDEX_op_add_i32:
|
|
case INDEX_op_add_i64:
|
|
case INDEX_op_add_i64:
|
|
@@ -1611,22 +1608,22 @@ tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
|
|
case INDEX_op_setcond_i64:
|
|
case INDEX_op_setcond_i64:
|
|
case INDEX_op_negsetcond_i32:
|
|
case INDEX_op_negsetcond_i32:
|
|
case INDEX_op_negsetcond_i64:
|
|
case INDEX_op_negsetcond_i64:
|
|
- return C_O1_I2(r, rZ, rJ);
|
|
|
|
|
|
+ return C_O1_I2(r, rz, rJ);
|
|
|
|
|
|
case INDEX_op_brcond_i32:
|
|
case INDEX_op_brcond_i32:
|
|
case INDEX_op_brcond_i64:
|
|
case INDEX_op_brcond_i64:
|
|
- return C_O0_I2(rZ, rJ);
|
|
|
|
|
|
+ return C_O0_I2(rz, rJ);
|
|
case INDEX_op_movcond_i32:
|
|
case INDEX_op_movcond_i32:
|
|
case INDEX_op_movcond_i64:
|
|
case INDEX_op_movcond_i64:
|
|
- return C_O1_I4(r, rZ, rJ, rI, 0);
|
|
|
|
|
|
+ return C_O1_I4(r, rz, rJ, rI, 0);
|
|
case INDEX_op_add2_i32:
|
|
case INDEX_op_add2_i32:
|
|
case INDEX_op_add2_i64:
|
|
case INDEX_op_add2_i64:
|
|
case INDEX_op_sub2_i32:
|
|
case INDEX_op_sub2_i32:
|
|
case INDEX_op_sub2_i64:
|
|
case INDEX_op_sub2_i64:
|
|
- return C_O2_I4(r, r, rZ, rZ, rJ, rJ);
|
|
|
|
|
|
+ return C_O2_I4(r, r, rz, rz, rJ, rJ);
|
|
case INDEX_op_mulu2_i32:
|
|
case INDEX_op_mulu2_i32:
|
|
case INDEX_op_muls2_i32:
|
|
case INDEX_op_muls2_i32:
|
|
- return C_O2_I2(r, r, rZ, rJ);
|
|
|
|
|
|
+ return C_O2_I2(r, r, rz, rJ);
|
|
case INDEX_op_muluh_i64:
|
|
case INDEX_op_muluh_i64:
|
|
return C_O1_I2(r, r, r);
|
|
return C_O1_I2(r, r, r);
|
|
|
|
|