|
@@ -2861,6 +2861,9 @@ void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv, TCGv addr, TCGv_i64 cmpv,
|
|
#endif
|
|
#endif
|
|
#else
|
|
#else
|
|
gen_helper_exit_atomic(tcg_ctx.tcg_env);
|
|
gen_helper_exit_atomic(tcg_ctx.tcg_env);
|
|
|
|
+ /* Produce a result, so that we have a well-formed opcode stream
|
|
|
|
+ with respect to uses of the result in the (dead) code following. */
|
|
|
|
+ tcg_gen_movi_i64(retv, 0);
|
|
#endif /* CONFIG_ATOMIC64 */
|
|
#endif /* CONFIG_ATOMIC64 */
|
|
} else {
|
|
} else {
|
|
TCGv_i32 c32 = tcg_temp_new_i32();
|
|
TCGv_i32 c32 = tcg_temp_new_i32();
|
|
@@ -2966,6 +2969,9 @@ static void do_atomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
|
|
#endif
|
|
#endif
|
|
#else
|
|
#else
|
|
gen_helper_exit_atomic(tcg_ctx.tcg_env);
|
|
gen_helper_exit_atomic(tcg_ctx.tcg_env);
|
|
|
|
+ /* Produce a result, so that we have a well-formed opcode stream
|
|
|
|
+ with respect to uses of the result in the (dead) code following. */
|
|
|
|
+ tcg_gen_movi_i64(ret, 0);
|
|
#endif /* CONFIG_ATOMIC64 */
|
|
#endif /* CONFIG_ATOMIC64 */
|
|
} else {
|
|
} else {
|
|
TCGv_i32 v32 = tcg_temp_new_i32();
|
|
TCGv_i32 v32 = tcg_temp_new_i32();
|