|
@@ -2567,6 +2567,14 @@ static inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2)
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+static void gen_qemu_ld32u_i64(DisasContext *ctx, TCGv_i64 val, TCGv addr)
|
|
|
+{
|
|
|
+ TCGv tmp = tcg_temp_new();
|
|
|
+ gen_qemu_ld32u(ctx, tmp, addr);
|
|
|
+ tcg_gen_extu_tl_i64(val, tmp);
|
|
|
+ tcg_temp_free(tmp);
|
|
|
+}
|
|
|
+
|
|
|
static inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2)
|
|
|
{
|
|
|
if (unlikely(ctx->le_mode)) {
|
|
@@ -2616,6 +2624,14 @@ static inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2)
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+static void gen_qemu_st32_i64(DisasContext *ctx, TCGv_i64 val, TCGv addr)
|
|
|
+{
|
|
|
+ TCGv tmp = tcg_temp_new();
|
|
|
+ tcg_gen_trunc_i64_tl(tmp, val);
|
|
|
+ gen_qemu_st32(ctx, tmp, addr);
|
|
|
+ tcg_temp_free(tmp);
|
|
|
+}
|
|
|
+
|
|
|
static inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
|
|
|
{
|
|
|
if (unlikely(ctx->le_mode)) {
|
|
@@ -7048,13 +7064,14 @@ static void gen_lxvdsx(DisasContext *ctx)
|
|
|
EA = tcg_temp_new();
|
|
|
gen_addr_reg_index(ctx, EA);
|
|
|
gen_qemu_ld64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
|
|
|
- tcg_gen_mov_tl(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
|
|
|
+ tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
|
|
|
tcg_temp_free(EA);
|
|
|
}
|
|
|
|
|
|
static void gen_lxvw4x(DisasContext *ctx)
|
|
|
{
|
|
|
- TCGv EA, tmp;
|
|
|
+ TCGv EA;
|
|
|
+ TCGv_i64 tmp;
|
|
|
TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
|
|
|
TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
|
|
|
if (unlikely(!ctx->vsx_enabled)) {
|
|
@@ -7063,21 +7080,22 @@ static void gen_lxvw4x(DisasContext *ctx)
|
|
|
}
|
|
|
gen_set_access_type(ctx, ACCESS_INT);
|
|
|
EA = tcg_temp_new();
|
|
|
- tmp = tcg_temp_new();
|
|
|
+ tmp = tcg_temp_new_i64();
|
|
|
+
|
|
|
gen_addr_reg_index(ctx, EA);
|
|
|
- gen_qemu_ld32u(ctx, tmp, EA);
|
|
|
+ gen_qemu_ld32u_i64(ctx, tmp, EA);
|
|
|
tcg_gen_addi_tl(EA, EA, 4);
|
|
|
- gen_qemu_ld32u(ctx, xth, EA);
|
|
|
+ gen_qemu_ld32u_i64(ctx, xth, EA);
|
|
|
tcg_gen_deposit_i64(xth, xth, tmp, 32, 32);
|
|
|
|
|
|
tcg_gen_addi_tl(EA, EA, 4);
|
|
|
- gen_qemu_ld32u(ctx, tmp, EA);
|
|
|
+ gen_qemu_ld32u_i64(ctx, tmp, EA);
|
|
|
tcg_gen_addi_tl(EA, EA, 4);
|
|
|
- gen_qemu_ld32u(ctx, xtl, EA);
|
|
|
+ gen_qemu_ld32u_i64(ctx, xtl, EA);
|
|
|
tcg_gen_deposit_i64(xtl, xtl, tmp, 32, 32);
|
|
|
|
|
|
tcg_temp_free(EA);
|
|
|
- tcg_temp_free(tmp);
|
|
|
+ tcg_temp_free_i64(tmp);
|
|
|
}
|
|
|
|
|
|
static void gen_stxsdx(DisasContext *ctx)
|
|
@@ -7112,7 +7130,8 @@ static void gen_stxvd2x(DisasContext *ctx)
|
|
|
|
|
|
static void gen_stxvw4x(DisasContext *ctx)
|
|
|
{
|
|
|
- TCGv EA, tmp;
|
|
|
+ TCGv_i64 tmp;
|
|
|
+ TCGv EA;
|
|
|
if (unlikely(!ctx->vsx_enabled)) {
|
|
|
gen_exception(ctx, POWERPC_EXCP_VSXU);
|
|
|
return;
|
|
@@ -7120,21 +7139,21 @@ static void gen_stxvw4x(DisasContext *ctx)
|
|
|
gen_set_access_type(ctx, ACCESS_INT);
|
|
|
EA = tcg_temp_new();
|
|
|
gen_addr_reg_index(ctx, EA);
|
|
|
- tmp = tcg_temp_new();
|
|
|
+ tmp = tcg_temp_new_i64();
|
|
|
|
|
|
tcg_gen_shri_i64(tmp, cpu_vsrh(xS(ctx->opcode)), 32);
|
|
|
- gen_qemu_st32(ctx, tmp, EA);
|
|
|
+ gen_qemu_st32_i64(ctx, tmp, EA);
|
|
|
tcg_gen_addi_tl(EA, EA, 4);
|
|
|
- gen_qemu_st32(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
|
|
|
+ gen_qemu_st32_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
|
|
|
|
|
|
tcg_gen_shri_i64(tmp, cpu_vsrl(xS(ctx->opcode)), 32);
|
|
|
tcg_gen_addi_tl(EA, EA, 4);
|
|
|
- gen_qemu_st32(ctx, tmp, EA);
|
|
|
+ gen_qemu_st32_i64(ctx, tmp, EA);
|
|
|
tcg_gen_addi_tl(EA, EA, 4);
|
|
|
- gen_qemu_st32(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
|
|
|
+ gen_qemu_st32_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
|
|
|
|
|
|
tcg_temp_free(EA);
|
|
|
- tcg_temp_free(tmp);
|
|
|
+ tcg_temp_free_i64(tmp);
|
|
|
}
|
|
|
|
|
|
static void gen_xxpermdi(DisasContext *ctx)
|
|
@@ -7171,8 +7190,8 @@ static void glue(gen_, name)(DisasContext * ctx) \
|
|
|
gen_exception(ctx, POWERPC_EXCP_VSXU); \
|
|
|
return; \
|
|
|
} \
|
|
|
- xb = tcg_temp_new(); \
|
|
|
- sgm = tcg_temp_new(); \
|
|
|
+ xb = tcg_temp_new_i64(); \
|
|
|
+ sgm = tcg_temp_new_i64(); \
|
|
|
tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
|
|
|
tcg_gen_movi_i64(sgm, sgn_mask); \
|
|
|
switch (op) { \
|
|
@@ -7189,18 +7208,18 @@ static void glue(gen_, name)(DisasContext * ctx) \
|
|
|
break; \
|
|
|
} \
|
|
|
case OP_CPSGN: { \
|
|
|
- TCGv_i64 xa = tcg_temp_new(); \
|
|
|
+ TCGv_i64 xa = tcg_temp_new_i64(); \
|
|
|
tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
|
|
|
tcg_gen_and_i64(xa, xa, sgm); \
|
|
|
tcg_gen_andc_i64(xb, xb, sgm); \
|
|
|
tcg_gen_or_i64(xb, xb, xa); \
|
|
|
- tcg_temp_free(xa); \
|
|
|
+ tcg_temp_free_i64(xa); \
|
|
|
break; \
|
|
|
} \
|
|
|
} \
|
|
|
tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
|
|
|
- tcg_temp_free(xb); \
|
|
|
- tcg_temp_free(sgm); \
|
|
|
+ tcg_temp_free_i64(xb); \
|
|
|
+ tcg_temp_free_i64(sgm); \
|
|
|
}
|
|
|
|
|
|
VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
|
|
@@ -7216,9 +7235,9 @@ static void glue(gen_, name)(DisasContext * ctx) \
|
|
|
gen_exception(ctx, POWERPC_EXCP_VSXU); \
|
|
|
return; \
|
|
|
} \
|
|
|
- xbh = tcg_temp_new(); \
|
|
|
- xbl = tcg_temp_new(); \
|
|
|
- sgm = tcg_temp_new(); \
|
|
|
+ xbh = tcg_temp_new_i64(); \
|
|
|
+ xbl = tcg_temp_new_i64(); \
|
|
|
+ sgm = tcg_temp_new_i64(); \
|
|
|
tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
|
|
|
tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
|
|
|
tcg_gen_movi_i64(sgm, sgn_mask); \
|
|
@@ -7239,8 +7258,8 @@ static void glue(gen_, name)(DisasContext * ctx) \
|
|
|
break; \
|
|
|
} \
|
|
|
case OP_CPSGN: { \
|
|
|
- TCGv_i64 xah = tcg_temp_new(); \
|
|
|
- TCGv_i64 xal = tcg_temp_new(); \
|
|
|
+ TCGv_i64 xah = tcg_temp_new_i64(); \
|
|
|
+ TCGv_i64 xal = tcg_temp_new_i64(); \
|
|
|
tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
|
|
|
tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
|
|
|
tcg_gen_and_i64(xah, xah, sgm); \
|
|
@@ -7249,16 +7268,16 @@ static void glue(gen_, name)(DisasContext * ctx) \
|
|
|
tcg_gen_andc_i64(xbl, xbl, sgm); \
|
|
|
tcg_gen_or_i64(xbh, xbh, xah); \
|
|
|
tcg_gen_or_i64(xbl, xbl, xal); \
|
|
|
- tcg_temp_free(xah); \
|
|
|
- tcg_temp_free(xal); \
|
|
|
+ tcg_temp_free_i64(xah); \
|
|
|
+ tcg_temp_free_i64(xal); \
|
|
|
break; \
|
|
|
} \
|
|
|
} \
|
|
|
tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
|
|
|
tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
|
|
|
- tcg_temp_free(xbh); \
|
|
|
- tcg_temp_free(xbl); \
|
|
|
- tcg_temp_free(sgm); \
|
|
|
+ tcg_temp_free_i64(xbh); \
|
|
|
+ tcg_temp_free_i64(xbl); \
|
|
|
+ tcg_temp_free_i64(sgm); \
|
|
|
}
|
|
|
|
|
|
VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
|
|
@@ -7284,11 +7303,11 @@ static void glue(gen_, name)(DisasContext * ctx) \
|
|
|
cpu_vsrl(xB(ctx->opcode))); \
|
|
|
}
|
|
|
|
|
|
-VSX_LOGICAL(xxland, tcg_gen_and_tl)
|
|
|
-VSX_LOGICAL(xxlandc, tcg_gen_andc_tl)
|
|
|
-VSX_LOGICAL(xxlor, tcg_gen_or_tl)
|
|
|
-VSX_LOGICAL(xxlxor, tcg_gen_xor_tl)
|
|
|
-VSX_LOGICAL(xxlnor, tcg_gen_nor_tl)
|
|
|
+VSX_LOGICAL(xxland, tcg_gen_and_i64)
|
|
|
+VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
|
|
|
+VSX_LOGICAL(xxlor, tcg_gen_or_i64)
|
|
|
+VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
|
|
|
+VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
|
|
|
|
|
|
#define VSX_XXMRG(name, high) \
|
|
|
static void glue(gen_, name)(DisasContext * ctx) \
|
|
@@ -7298,10 +7317,10 @@ static void glue(gen_, name)(DisasContext * ctx) \
|
|
|
gen_exception(ctx, POWERPC_EXCP_VSXU); \
|
|
|
return; \
|
|
|
} \
|
|
|
- a0 = tcg_temp_new(); \
|
|
|
- a1 = tcg_temp_new(); \
|
|
|
- b0 = tcg_temp_new(); \
|
|
|
- b1 = tcg_temp_new(); \
|
|
|
+ a0 = tcg_temp_new_i64(); \
|
|
|
+ a1 = tcg_temp_new_i64(); \
|
|
|
+ b0 = tcg_temp_new_i64(); \
|
|
|
+ b1 = tcg_temp_new_i64(); \
|
|
|
if (high) { \
|
|
|
tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
|
|
|
tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
|
|
@@ -7319,10 +7338,10 @@ static void glue(gen_, name)(DisasContext * ctx) \
|
|
|
b0, a0, 32, 32); \
|
|
|
tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
|
|
|
b1, a1, 32, 32); \
|
|
|
- tcg_temp_free(a0); \
|
|
|
- tcg_temp_free(a1); \
|
|
|
- tcg_temp_free(b0); \
|
|
|
- tcg_temp_free(b1); \
|
|
|
+ tcg_temp_free_i64(a0); \
|
|
|
+ tcg_temp_free_i64(a1); \
|
|
|
+ tcg_temp_free_i64(b0); \
|
|
|
+ tcg_temp_free_i64(b1); \
|
|
|
}
|
|
|
|
|
|
VSX_XXMRG(xxmrghw, 1)
|
|
@@ -7335,9 +7354,9 @@ static void gen_xxsel(DisasContext * ctx)
|
|
|
gen_exception(ctx, POWERPC_EXCP_VSXU);
|
|
|
return;
|
|
|
}
|
|
|
- a = tcg_temp_new();
|
|
|
- b = tcg_temp_new();
|
|
|
- c = tcg_temp_new();
|
|
|
+ a = tcg_temp_new_i64();
|
|
|
+ b = tcg_temp_new_i64();
|
|
|
+ c = tcg_temp_new_i64();
|
|
|
|
|
|
tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
|
|
|
tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
|
|
@@ -7355,9 +7374,9 @@ static void gen_xxsel(DisasContext * ctx)
|
|
|
tcg_gen_andc_i64(a, a, c);
|
|
|
tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
|
|
|
|
|
|
- tcg_temp_free(a);
|
|
|
- tcg_temp_free(b);
|
|
|
- tcg_temp_free(c);
|
|
|
+ tcg_temp_free_i64(a);
|
|
|
+ tcg_temp_free_i64(b);
|
|
|
+ tcg_temp_free_i64(c);
|
|
|
}
|
|
|
|
|
|
static void gen_xxspltw(DisasContext *ctx)
|
|
@@ -7372,8 +7391,8 @@ static void gen_xxspltw(DisasContext *ctx)
|
|
|
return;
|
|
|
}
|
|
|
|
|
|
- b = tcg_temp_new();
|
|
|
- b2 = tcg_temp_new();
|
|
|
+ b = tcg_temp_new_i64();
|
|
|
+ b2 = tcg_temp_new_i64();
|
|
|
|
|
|
if (UIM(ctx->opcode) & 1) {
|
|
|
tcg_gen_ext32u_i64(b, vsr);
|
|
@@ -7385,8 +7404,8 @@ static void gen_xxspltw(DisasContext *ctx)
|
|
|
tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
|
|
|
tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
|
|
|
|
|
|
- tcg_temp_free(b);
|
|
|
- tcg_temp_free(b2);
|
|
|
+ tcg_temp_free_i64(b);
|
|
|
+ tcg_temp_free_i64(b2);
|
|
|
}
|
|
|
|
|
|
static void gen_xxsldwi(DisasContext *ctx)
|
|
@@ -7396,8 +7415,8 @@ static void gen_xxsldwi(DisasContext *ctx)
|
|
|
gen_exception(ctx, POWERPC_EXCP_VSXU);
|
|
|
return;
|
|
|
}
|
|
|
- xth = tcg_temp_new();
|
|
|
- xtl = tcg_temp_new();
|
|
|
+ xth = tcg_temp_new_i64();
|
|
|
+ xtl = tcg_temp_new_i64();
|
|
|
|
|
|
switch (SHW(ctx->opcode)) {
|
|
|
case 0: {
|
|
@@ -7406,7 +7425,7 @@ static void gen_xxsldwi(DisasContext *ctx)
|
|
|
break;
|
|
|
}
|
|
|
case 1: {
|
|
|
- TCGv_i64 t0 = tcg_temp_new();
|
|
|
+ TCGv_i64 t0 = tcg_temp_new_i64();
|
|
|
tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
|
|
|
tcg_gen_shli_i64(xth, xth, 32);
|
|
|
tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
|
|
@@ -7417,7 +7436,7 @@ static void gen_xxsldwi(DisasContext *ctx)
|
|
|
tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
|
|
|
tcg_gen_shri_i64(t0, t0, 32);
|
|
|
tcg_gen_or_i64(xtl, xtl, t0);
|
|
|
- tcg_temp_free(t0);
|
|
|
+ tcg_temp_free_i64(t0);
|
|
|
break;
|
|
|
}
|
|
|
case 2: {
|
|
@@ -7426,7 +7445,7 @@ static void gen_xxsldwi(DisasContext *ctx)
|
|
|
break;
|
|
|
}
|
|
|
case 3: {
|
|
|
- TCGv_i64 t0 = tcg_temp_new();
|
|
|
+ TCGv_i64 t0 = tcg_temp_new_i64();
|
|
|
tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
|
|
|
tcg_gen_shli_i64(xth, xth, 32);
|
|
|
tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
|
|
@@ -7437,7 +7456,7 @@ static void gen_xxsldwi(DisasContext *ctx)
|
|
|
tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
|
|
|
tcg_gen_shri_i64(t0, t0, 32);
|
|
|
tcg_gen_or_i64(xtl, xtl, t0);
|
|
|
- tcg_temp_free(t0);
|
|
|
+ tcg_temp_free_i64(t0);
|
|
|
break;
|
|
|
}
|
|
|
}
|
|
@@ -7445,8 +7464,8 @@ static void gen_xxsldwi(DisasContext *ctx)
|
|
|
tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
|
|
|
tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
|
|
|
|
|
|
- tcg_temp_free(xth);
|
|
|
- tcg_temp_free(xtl);
|
|
|
+ tcg_temp_free_i64(xth);
|
|
|
+ tcg_temp_free_i64(xtl);
|
|
|
}
|
|
|
|
|
|
|