|
@@ -2516,7 +2516,7 @@ void tcg_expand_vec_op(TCGOpcode opc, TCGType type, unsigned vece,
|
|
TCGArg a0, ...)
|
|
TCGArg a0, ...)
|
|
{
|
|
{
|
|
va_list va;
|
|
va_list va;
|
|
- TCGv_vec v0, v1, v2, t1, t2;
|
|
|
|
|
|
+ TCGv_vec v0, v1, v2, t1, t2, c1;
|
|
TCGArg a2;
|
|
TCGArg a2;
|
|
|
|
|
|
va_start(va, a0);
|
|
va_start(va, a0);
|
|
@@ -2548,8 +2548,8 @@ void tcg_expand_vec_op(TCGOpcode opc, TCGType type, unsigned vece,
|
|
|
|
|
|
case INDEX_op_rotlv_vec:
|
|
case INDEX_op_rotlv_vec:
|
|
t1 = tcg_temp_new_vec(type);
|
|
t1 = tcg_temp_new_vec(type);
|
|
- tcg_gen_dupi_vec(vece, t1, 8 << vece);
|
|
|
|
- tcg_gen_sub_vec(vece, t1, v2, t1);
|
|
|
|
|
|
+ c1 = tcg_constant_vec(type, vece, 8 << vece);
|
|
|
|
+ tcg_gen_sub_vec(vece, t1, v2, c1);
|
|
/* Right shifts are negative left shifts for AArch64. */
|
|
/* Right shifts are negative left shifts for AArch64. */
|
|
vec_gen_3(INDEX_op_shlv_vec, type, vece, tcgv_vec_arg(t1),
|
|
vec_gen_3(INDEX_op_shlv_vec, type, vece, tcgv_vec_arg(t1),
|
|
tcgv_vec_arg(v1), tcgv_vec_arg(t1));
|
|
tcgv_vec_arg(v1), tcgv_vec_arg(t1));
|
|
@@ -2562,9 +2562,9 @@ void tcg_expand_vec_op(TCGOpcode opc, TCGType type, unsigned vece,
|
|
case INDEX_op_rotrv_vec:
|
|
case INDEX_op_rotrv_vec:
|
|
t1 = tcg_temp_new_vec(type);
|
|
t1 = tcg_temp_new_vec(type);
|
|
t2 = tcg_temp_new_vec(type);
|
|
t2 = tcg_temp_new_vec(type);
|
|
|
|
+ c1 = tcg_constant_vec(type, vece, 8 << vece);
|
|
tcg_gen_neg_vec(vece, t1, v2);
|
|
tcg_gen_neg_vec(vece, t1, v2);
|
|
- tcg_gen_dupi_vec(vece, t2, 8 << vece);
|
|
|
|
- tcg_gen_add_vec(vece, t2, t1, t2);
|
|
|
|
|
|
+ tcg_gen_sub_vec(vece, t2, c1, v2);
|
|
/* Right shifts are negative left shifts for AArch64. */
|
|
/* Right shifts are negative left shifts for AArch64. */
|
|
vec_gen_3(INDEX_op_shlv_vec, type, vece, tcgv_vec_arg(t1),
|
|
vec_gen_3(INDEX_op_shlv_vec, type, vece, tcgv_vec_arg(t1),
|
|
tcgv_vec_arg(v1), tcgv_vec_arg(t1));
|
|
tcgv_vec_arg(v1), tcgv_vec_arg(t1));
|