tci.c 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251
  1. /*
  2. * Tiny Code Interpreter for QEMU
  3. *
  4. * Copyright (c) 2009, 2011, 2016 Stefan Weil
  5. *
  6. * This program is free software: you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation, either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. * This program is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU General Public License
  17. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  18. */
  19. #include "qemu/osdep.h"
  20. /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
  21. * Without assertions, the interpreter runs much faster. */
  22. #if defined(CONFIG_DEBUG_TCG)
  23. # define tci_assert(cond) assert(cond)
  24. #else
  25. # define tci_assert(cond) ((void)0)
  26. #endif
  27. #include "qemu-common.h"
  28. #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
  29. #include "exec/cpu_ldst.h"
  30. #include "tcg-op.h"
  31. /* Marker for missing code. */
  32. #define TODO() \
  33. do { \
  34. fprintf(stderr, "TODO %s:%u: %s()\n", \
  35. __FILE__, __LINE__, __func__); \
  36. tcg_abort(); \
  37. } while (0)
  38. #if MAX_OPC_PARAM_IARGS != 5
  39. # error Fix needed, number of supported input arguments changed!
  40. #endif
  41. #if TCG_TARGET_REG_BITS == 32
  42. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  43. tcg_target_ulong, tcg_target_ulong,
  44. tcg_target_ulong, tcg_target_ulong,
  45. tcg_target_ulong, tcg_target_ulong,
  46. tcg_target_ulong, tcg_target_ulong);
  47. #else
  48. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  49. tcg_target_ulong, tcg_target_ulong,
  50. tcg_target_ulong);
  51. #endif
  52. static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
  53. static tcg_target_ulong tci_read_reg(TCGReg index)
  54. {
  55. tci_assert(index < ARRAY_SIZE(tci_reg));
  56. return tci_reg[index];
  57. }
  58. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  59. static int8_t tci_read_reg8s(TCGReg index)
  60. {
  61. return (int8_t)tci_read_reg(index);
  62. }
  63. #endif
  64. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  65. static int16_t tci_read_reg16s(TCGReg index)
  66. {
  67. return (int16_t)tci_read_reg(index);
  68. }
  69. #endif
  70. #if TCG_TARGET_REG_BITS == 64
  71. static int32_t tci_read_reg32s(TCGReg index)
  72. {
  73. return (int32_t)tci_read_reg(index);
  74. }
  75. #endif
  76. static uint8_t tci_read_reg8(TCGReg index)
  77. {
  78. return (uint8_t)tci_read_reg(index);
  79. }
  80. static uint16_t tci_read_reg16(TCGReg index)
  81. {
  82. return (uint16_t)tci_read_reg(index);
  83. }
  84. static uint32_t tci_read_reg32(TCGReg index)
  85. {
  86. return (uint32_t)tci_read_reg(index);
  87. }
  88. #if TCG_TARGET_REG_BITS == 64
  89. static uint64_t tci_read_reg64(TCGReg index)
  90. {
  91. return tci_read_reg(index);
  92. }
  93. #endif
  94. static void tci_write_reg(TCGReg index, tcg_target_ulong value)
  95. {
  96. tci_assert(index < ARRAY_SIZE(tci_reg));
  97. tci_assert(index != TCG_AREG0);
  98. tci_assert(index != TCG_REG_CALL_STACK);
  99. tci_reg[index] = value;
  100. }
  101. #if TCG_TARGET_REG_BITS == 64
  102. static void tci_write_reg32s(TCGReg index, int32_t value)
  103. {
  104. tci_write_reg(index, value);
  105. }
  106. #endif
  107. static void tci_write_reg8(TCGReg index, uint8_t value)
  108. {
  109. tci_write_reg(index, value);
  110. }
  111. static void tci_write_reg32(TCGReg index, uint32_t value)
  112. {
  113. tci_write_reg(index, value);
  114. }
  115. #if TCG_TARGET_REG_BITS == 32
  116. static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
  117. uint64_t value)
  118. {
  119. tci_write_reg(low_index, value);
  120. tci_write_reg(high_index, value >> 32);
  121. }
  122. #elif TCG_TARGET_REG_BITS == 64
  123. static void tci_write_reg64(TCGReg index, uint64_t value)
  124. {
  125. tci_write_reg(index, value);
  126. }
  127. #endif
  128. #if TCG_TARGET_REG_BITS == 32
  129. /* Create a 64 bit value from two 32 bit values. */
  130. static uint64_t tci_uint64(uint32_t high, uint32_t low)
  131. {
  132. return ((uint64_t)high << 32) + low;
  133. }
  134. #endif
  135. /* Read constant (native size) from bytecode. */
  136. static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
  137. {
  138. tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
  139. *tb_ptr += sizeof(value);
  140. return value;
  141. }
  142. /* Read unsigned constant (32 bit) from bytecode. */
  143. static uint32_t tci_read_i32(uint8_t **tb_ptr)
  144. {
  145. uint32_t value = *(uint32_t *)(*tb_ptr);
  146. *tb_ptr += sizeof(value);
  147. return value;
  148. }
  149. /* Read signed constant (32 bit) from bytecode. */
  150. static int32_t tci_read_s32(uint8_t **tb_ptr)
  151. {
  152. int32_t value = *(int32_t *)(*tb_ptr);
  153. *tb_ptr += sizeof(value);
  154. return value;
  155. }
  156. #if TCG_TARGET_REG_BITS == 64
  157. /* Read constant (64 bit) from bytecode. */
  158. static uint64_t tci_read_i64(uint8_t **tb_ptr)
  159. {
  160. uint64_t value = *(uint64_t *)(*tb_ptr);
  161. *tb_ptr += sizeof(value);
  162. return value;
  163. }
  164. #endif
  165. /* Read indexed register (native size) from bytecode. */
  166. static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
  167. {
  168. tcg_target_ulong value = tci_read_reg(**tb_ptr);
  169. *tb_ptr += 1;
  170. return value;
  171. }
  172. /* Read indexed register (8 bit) from bytecode. */
  173. static uint8_t tci_read_r8(uint8_t **tb_ptr)
  174. {
  175. uint8_t value = tci_read_reg8(**tb_ptr);
  176. *tb_ptr += 1;
  177. return value;
  178. }
  179. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  180. /* Read indexed register (8 bit signed) from bytecode. */
  181. static int8_t tci_read_r8s(uint8_t **tb_ptr)
  182. {
  183. int8_t value = tci_read_reg8s(**tb_ptr);
  184. *tb_ptr += 1;
  185. return value;
  186. }
  187. #endif
  188. /* Read indexed register (16 bit) from bytecode. */
  189. static uint16_t tci_read_r16(uint8_t **tb_ptr)
  190. {
  191. uint16_t value = tci_read_reg16(**tb_ptr);
  192. *tb_ptr += 1;
  193. return value;
  194. }
  195. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  196. /* Read indexed register (16 bit signed) from bytecode. */
  197. static int16_t tci_read_r16s(uint8_t **tb_ptr)
  198. {
  199. int16_t value = tci_read_reg16s(**tb_ptr);
  200. *tb_ptr += 1;
  201. return value;
  202. }
  203. #endif
  204. /* Read indexed register (32 bit) from bytecode. */
  205. static uint32_t tci_read_r32(uint8_t **tb_ptr)
  206. {
  207. uint32_t value = tci_read_reg32(**tb_ptr);
  208. *tb_ptr += 1;
  209. return value;
  210. }
  211. #if TCG_TARGET_REG_BITS == 32
  212. /* Read two indexed registers (2 * 32 bit) from bytecode. */
  213. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  214. {
  215. uint32_t low = tci_read_r32(tb_ptr);
  216. return tci_uint64(tci_read_r32(tb_ptr), low);
  217. }
  218. #elif TCG_TARGET_REG_BITS == 64
  219. /* Read indexed register (32 bit signed) from bytecode. */
  220. static int32_t tci_read_r32s(uint8_t **tb_ptr)
  221. {
  222. int32_t value = tci_read_reg32s(**tb_ptr);
  223. *tb_ptr += 1;
  224. return value;
  225. }
  226. /* Read indexed register (64 bit) from bytecode. */
  227. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  228. {
  229. uint64_t value = tci_read_reg64(**tb_ptr);
  230. *tb_ptr += 1;
  231. return value;
  232. }
  233. #endif
  234. /* Read indexed register(s) with target address from bytecode. */
  235. static target_ulong tci_read_ulong(uint8_t **tb_ptr)
  236. {
  237. target_ulong taddr = tci_read_r(tb_ptr);
  238. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  239. taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
  240. #endif
  241. return taddr;
  242. }
  243. /* Read indexed register or constant (native size) from bytecode. */
  244. static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
  245. {
  246. tcg_target_ulong value;
  247. TCGReg r = **tb_ptr;
  248. *tb_ptr += 1;
  249. if (r == TCG_CONST) {
  250. value = tci_read_i(tb_ptr);
  251. } else {
  252. value = tci_read_reg(r);
  253. }
  254. return value;
  255. }
  256. /* Read indexed register or constant (32 bit) from bytecode. */
  257. static uint32_t tci_read_ri32(uint8_t **tb_ptr)
  258. {
  259. uint32_t value;
  260. TCGReg r = **tb_ptr;
  261. *tb_ptr += 1;
  262. if (r == TCG_CONST) {
  263. value = tci_read_i32(tb_ptr);
  264. } else {
  265. value = tci_read_reg32(r);
  266. }
  267. return value;
  268. }
  269. #if TCG_TARGET_REG_BITS == 32
  270. /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
  271. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  272. {
  273. uint32_t low = tci_read_ri32(tb_ptr);
  274. return tci_uint64(tci_read_ri32(tb_ptr), low);
  275. }
  276. #elif TCG_TARGET_REG_BITS == 64
  277. /* Read indexed register or constant (64 bit) from bytecode. */
  278. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  279. {
  280. uint64_t value;
  281. TCGReg r = **tb_ptr;
  282. *tb_ptr += 1;
  283. if (r == TCG_CONST) {
  284. value = tci_read_i64(tb_ptr);
  285. } else {
  286. value = tci_read_reg64(r);
  287. }
  288. return value;
  289. }
  290. #endif
  291. static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
  292. {
  293. tcg_target_ulong label = tci_read_i(tb_ptr);
  294. tci_assert(label != 0);
  295. return label;
  296. }
  297. static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
  298. {
  299. bool result = false;
  300. int32_t i0 = u0;
  301. int32_t i1 = u1;
  302. switch (condition) {
  303. case TCG_COND_EQ:
  304. result = (u0 == u1);
  305. break;
  306. case TCG_COND_NE:
  307. result = (u0 != u1);
  308. break;
  309. case TCG_COND_LT:
  310. result = (i0 < i1);
  311. break;
  312. case TCG_COND_GE:
  313. result = (i0 >= i1);
  314. break;
  315. case TCG_COND_LE:
  316. result = (i0 <= i1);
  317. break;
  318. case TCG_COND_GT:
  319. result = (i0 > i1);
  320. break;
  321. case TCG_COND_LTU:
  322. result = (u0 < u1);
  323. break;
  324. case TCG_COND_GEU:
  325. result = (u0 >= u1);
  326. break;
  327. case TCG_COND_LEU:
  328. result = (u0 <= u1);
  329. break;
  330. case TCG_COND_GTU:
  331. result = (u0 > u1);
  332. break;
  333. default:
  334. TODO();
  335. }
  336. return result;
  337. }
  338. static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
  339. {
  340. bool result = false;
  341. int64_t i0 = u0;
  342. int64_t i1 = u1;
  343. switch (condition) {
  344. case TCG_COND_EQ:
  345. result = (u0 == u1);
  346. break;
  347. case TCG_COND_NE:
  348. result = (u0 != u1);
  349. break;
  350. case TCG_COND_LT:
  351. result = (i0 < i1);
  352. break;
  353. case TCG_COND_GE:
  354. result = (i0 >= i1);
  355. break;
  356. case TCG_COND_LE:
  357. result = (i0 <= i1);
  358. break;
  359. case TCG_COND_GT:
  360. result = (i0 > i1);
  361. break;
  362. case TCG_COND_LTU:
  363. result = (u0 < u1);
  364. break;
  365. case TCG_COND_GEU:
  366. result = (u0 >= u1);
  367. break;
  368. case TCG_COND_LEU:
  369. result = (u0 <= u1);
  370. break;
  371. case TCG_COND_GTU:
  372. result = (u0 > u1);
  373. break;
  374. default:
  375. TODO();
  376. }
  377. return result;
  378. }
  379. #ifdef CONFIG_SOFTMMU
  380. # define qemu_ld_ub \
  381. helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  382. # define qemu_ld_leuw \
  383. helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  384. # define qemu_ld_leul \
  385. helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  386. # define qemu_ld_leq \
  387. helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  388. # define qemu_ld_beuw \
  389. helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  390. # define qemu_ld_beul \
  391. helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  392. # define qemu_ld_beq \
  393. helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  394. # define qemu_st_b(X) \
  395. helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  396. # define qemu_st_lew(X) \
  397. helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  398. # define qemu_st_lel(X) \
  399. helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  400. # define qemu_st_leq(X) \
  401. helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  402. # define qemu_st_bew(X) \
  403. helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  404. # define qemu_st_bel(X) \
  405. helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  406. # define qemu_st_beq(X) \
  407. helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  408. #else
  409. # define qemu_ld_ub ldub_p(g2h(taddr))
  410. # define qemu_ld_leuw lduw_le_p(g2h(taddr))
  411. # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
  412. # define qemu_ld_leq ldq_le_p(g2h(taddr))
  413. # define qemu_ld_beuw lduw_be_p(g2h(taddr))
  414. # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
  415. # define qemu_ld_beq ldq_be_p(g2h(taddr))
  416. # define qemu_st_b(X) stb_p(g2h(taddr), X)
  417. # define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
  418. # define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
  419. # define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
  420. # define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
  421. # define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
  422. # define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
  423. #endif
  424. /* Interpret pseudo code in tb. */
  425. uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
  426. {
  427. long tcg_temps[CPU_TEMP_BUF_NLONGS];
  428. uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
  429. uintptr_t ret = 0;
  430. tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
  431. tci_reg[TCG_REG_CALL_STACK] = sp_value;
  432. tci_assert(tb_ptr);
  433. for (;;) {
  434. TCGOpcode opc = tb_ptr[0];
  435. #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
  436. uint8_t op_size = tb_ptr[1];
  437. uint8_t *old_code_ptr = tb_ptr;
  438. #endif
  439. tcg_target_ulong t0;
  440. tcg_target_ulong t1;
  441. tcg_target_ulong t2;
  442. tcg_target_ulong label;
  443. TCGCond condition;
  444. target_ulong taddr;
  445. uint8_t tmp8;
  446. uint16_t tmp16;
  447. uint32_t tmp32;
  448. uint64_t tmp64;
  449. #if TCG_TARGET_REG_BITS == 32
  450. uint64_t v64;
  451. #endif
  452. TCGMemOpIdx oi;
  453. #if defined(GETPC)
  454. tci_tb_ptr = (uintptr_t)tb_ptr;
  455. #endif
  456. /* Skip opcode and size entry. */
  457. tb_ptr += 2;
  458. switch (opc) {
  459. case INDEX_op_call:
  460. t0 = tci_read_ri(&tb_ptr);
  461. #if TCG_TARGET_REG_BITS == 32
  462. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  463. tci_read_reg(TCG_REG_R1),
  464. tci_read_reg(TCG_REG_R2),
  465. tci_read_reg(TCG_REG_R3),
  466. tci_read_reg(TCG_REG_R5),
  467. tci_read_reg(TCG_REG_R6),
  468. tci_read_reg(TCG_REG_R7),
  469. tci_read_reg(TCG_REG_R8),
  470. tci_read_reg(TCG_REG_R9),
  471. tci_read_reg(TCG_REG_R10));
  472. tci_write_reg(TCG_REG_R0, tmp64);
  473. tci_write_reg(TCG_REG_R1, tmp64 >> 32);
  474. #else
  475. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  476. tci_read_reg(TCG_REG_R1),
  477. tci_read_reg(TCG_REG_R2),
  478. tci_read_reg(TCG_REG_R3),
  479. tci_read_reg(TCG_REG_R5));
  480. tci_write_reg(TCG_REG_R0, tmp64);
  481. #endif
  482. break;
  483. case INDEX_op_br:
  484. label = tci_read_label(&tb_ptr);
  485. tci_assert(tb_ptr == old_code_ptr + op_size);
  486. tb_ptr = (uint8_t *)label;
  487. continue;
  488. case INDEX_op_setcond_i32:
  489. t0 = *tb_ptr++;
  490. t1 = tci_read_r32(&tb_ptr);
  491. t2 = tci_read_ri32(&tb_ptr);
  492. condition = *tb_ptr++;
  493. tci_write_reg32(t0, tci_compare32(t1, t2, condition));
  494. break;
  495. #if TCG_TARGET_REG_BITS == 32
  496. case INDEX_op_setcond2_i32:
  497. t0 = *tb_ptr++;
  498. tmp64 = tci_read_r64(&tb_ptr);
  499. v64 = tci_read_ri64(&tb_ptr);
  500. condition = *tb_ptr++;
  501. tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
  502. break;
  503. #elif TCG_TARGET_REG_BITS == 64
  504. case INDEX_op_setcond_i64:
  505. t0 = *tb_ptr++;
  506. t1 = tci_read_r64(&tb_ptr);
  507. t2 = tci_read_ri64(&tb_ptr);
  508. condition = *tb_ptr++;
  509. tci_write_reg64(t0, tci_compare64(t1, t2, condition));
  510. break;
  511. #endif
  512. case INDEX_op_mov_i32:
  513. t0 = *tb_ptr++;
  514. t1 = tci_read_r32(&tb_ptr);
  515. tci_write_reg32(t0, t1);
  516. break;
  517. case INDEX_op_movi_i32:
  518. t0 = *tb_ptr++;
  519. t1 = tci_read_i32(&tb_ptr);
  520. tci_write_reg32(t0, t1);
  521. break;
  522. /* Load/store operations (32 bit). */
  523. case INDEX_op_ld8u_i32:
  524. t0 = *tb_ptr++;
  525. t1 = tci_read_r(&tb_ptr);
  526. t2 = tci_read_s32(&tb_ptr);
  527. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  528. break;
  529. case INDEX_op_ld8s_i32:
  530. case INDEX_op_ld16u_i32:
  531. TODO();
  532. break;
  533. case INDEX_op_ld16s_i32:
  534. TODO();
  535. break;
  536. case INDEX_op_ld_i32:
  537. t0 = *tb_ptr++;
  538. t1 = tci_read_r(&tb_ptr);
  539. t2 = tci_read_s32(&tb_ptr);
  540. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  541. break;
  542. case INDEX_op_st8_i32:
  543. t0 = tci_read_r8(&tb_ptr);
  544. t1 = tci_read_r(&tb_ptr);
  545. t2 = tci_read_s32(&tb_ptr);
  546. *(uint8_t *)(t1 + t2) = t0;
  547. break;
  548. case INDEX_op_st16_i32:
  549. t0 = tci_read_r16(&tb_ptr);
  550. t1 = tci_read_r(&tb_ptr);
  551. t2 = tci_read_s32(&tb_ptr);
  552. *(uint16_t *)(t1 + t2) = t0;
  553. break;
  554. case INDEX_op_st_i32:
  555. t0 = tci_read_r32(&tb_ptr);
  556. t1 = tci_read_r(&tb_ptr);
  557. t2 = tci_read_s32(&tb_ptr);
  558. tci_assert(t1 != sp_value || (int32_t)t2 < 0);
  559. *(uint32_t *)(t1 + t2) = t0;
  560. break;
  561. /* Arithmetic operations (32 bit). */
  562. case INDEX_op_add_i32:
  563. t0 = *tb_ptr++;
  564. t1 = tci_read_ri32(&tb_ptr);
  565. t2 = tci_read_ri32(&tb_ptr);
  566. tci_write_reg32(t0, t1 + t2);
  567. break;
  568. case INDEX_op_sub_i32:
  569. t0 = *tb_ptr++;
  570. t1 = tci_read_ri32(&tb_ptr);
  571. t2 = tci_read_ri32(&tb_ptr);
  572. tci_write_reg32(t0, t1 - t2);
  573. break;
  574. case INDEX_op_mul_i32:
  575. t0 = *tb_ptr++;
  576. t1 = tci_read_ri32(&tb_ptr);
  577. t2 = tci_read_ri32(&tb_ptr);
  578. tci_write_reg32(t0, t1 * t2);
  579. break;
  580. #if TCG_TARGET_HAS_div_i32
  581. case INDEX_op_div_i32:
  582. t0 = *tb_ptr++;
  583. t1 = tci_read_ri32(&tb_ptr);
  584. t2 = tci_read_ri32(&tb_ptr);
  585. tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
  586. break;
  587. case INDEX_op_divu_i32:
  588. t0 = *tb_ptr++;
  589. t1 = tci_read_ri32(&tb_ptr);
  590. t2 = tci_read_ri32(&tb_ptr);
  591. tci_write_reg32(t0, t1 / t2);
  592. break;
  593. case INDEX_op_rem_i32:
  594. t0 = *tb_ptr++;
  595. t1 = tci_read_ri32(&tb_ptr);
  596. t2 = tci_read_ri32(&tb_ptr);
  597. tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
  598. break;
  599. case INDEX_op_remu_i32:
  600. t0 = *tb_ptr++;
  601. t1 = tci_read_ri32(&tb_ptr);
  602. t2 = tci_read_ri32(&tb_ptr);
  603. tci_write_reg32(t0, t1 % t2);
  604. break;
  605. #elif TCG_TARGET_HAS_div2_i32
  606. case INDEX_op_div2_i32:
  607. case INDEX_op_divu2_i32:
  608. TODO();
  609. break;
  610. #endif
  611. case INDEX_op_and_i32:
  612. t0 = *tb_ptr++;
  613. t1 = tci_read_ri32(&tb_ptr);
  614. t2 = tci_read_ri32(&tb_ptr);
  615. tci_write_reg32(t0, t1 & t2);
  616. break;
  617. case INDEX_op_or_i32:
  618. t0 = *tb_ptr++;
  619. t1 = tci_read_ri32(&tb_ptr);
  620. t2 = tci_read_ri32(&tb_ptr);
  621. tci_write_reg32(t0, t1 | t2);
  622. break;
  623. case INDEX_op_xor_i32:
  624. t0 = *tb_ptr++;
  625. t1 = tci_read_ri32(&tb_ptr);
  626. t2 = tci_read_ri32(&tb_ptr);
  627. tci_write_reg32(t0, t1 ^ t2);
  628. break;
  629. /* Shift/rotate operations (32 bit). */
  630. case INDEX_op_shl_i32:
  631. t0 = *tb_ptr++;
  632. t1 = tci_read_ri32(&tb_ptr);
  633. t2 = tci_read_ri32(&tb_ptr);
  634. tci_write_reg32(t0, t1 << (t2 & 31));
  635. break;
  636. case INDEX_op_shr_i32:
  637. t0 = *tb_ptr++;
  638. t1 = tci_read_ri32(&tb_ptr);
  639. t2 = tci_read_ri32(&tb_ptr);
  640. tci_write_reg32(t0, t1 >> (t2 & 31));
  641. break;
  642. case INDEX_op_sar_i32:
  643. t0 = *tb_ptr++;
  644. t1 = tci_read_ri32(&tb_ptr);
  645. t2 = tci_read_ri32(&tb_ptr);
  646. tci_write_reg32(t0, ((int32_t)t1 >> (t2 & 31)));
  647. break;
  648. #if TCG_TARGET_HAS_rot_i32
  649. case INDEX_op_rotl_i32:
  650. t0 = *tb_ptr++;
  651. t1 = tci_read_ri32(&tb_ptr);
  652. t2 = tci_read_ri32(&tb_ptr);
  653. tci_write_reg32(t0, rol32(t1, t2 & 31));
  654. break;
  655. case INDEX_op_rotr_i32:
  656. t0 = *tb_ptr++;
  657. t1 = tci_read_ri32(&tb_ptr);
  658. t2 = tci_read_ri32(&tb_ptr);
  659. tci_write_reg32(t0, ror32(t1, t2 & 31));
  660. break;
  661. #endif
  662. #if TCG_TARGET_HAS_deposit_i32
  663. case INDEX_op_deposit_i32:
  664. t0 = *tb_ptr++;
  665. t1 = tci_read_r32(&tb_ptr);
  666. t2 = tci_read_r32(&tb_ptr);
  667. tmp16 = *tb_ptr++;
  668. tmp8 = *tb_ptr++;
  669. tmp32 = (((1 << tmp8) - 1) << tmp16);
  670. tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
  671. break;
  672. #endif
  673. case INDEX_op_brcond_i32:
  674. t0 = tci_read_r32(&tb_ptr);
  675. t1 = tci_read_ri32(&tb_ptr);
  676. condition = *tb_ptr++;
  677. label = tci_read_label(&tb_ptr);
  678. if (tci_compare32(t0, t1, condition)) {
  679. tci_assert(tb_ptr == old_code_ptr + op_size);
  680. tb_ptr = (uint8_t *)label;
  681. continue;
  682. }
  683. break;
  684. #if TCG_TARGET_REG_BITS == 32
  685. case INDEX_op_add2_i32:
  686. t0 = *tb_ptr++;
  687. t1 = *tb_ptr++;
  688. tmp64 = tci_read_r64(&tb_ptr);
  689. tmp64 += tci_read_r64(&tb_ptr);
  690. tci_write_reg64(t1, t0, tmp64);
  691. break;
  692. case INDEX_op_sub2_i32:
  693. t0 = *tb_ptr++;
  694. t1 = *tb_ptr++;
  695. tmp64 = tci_read_r64(&tb_ptr);
  696. tmp64 -= tci_read_r64(&tb_ptr);
  697. tci_write_reg64(t1, t0, tmp64);
  698. break;
  699. case INDEX_op_brcond2_i32:
  700. tmp64 = tci_read_r64(&tb_ptr);
  701. v64 = tci_read_ri64(&tb_ptr);
  702. condition = *tb_ptr++;
  703. label = tci_read_label(&tb_ptr);
  704. if (tci_compare64(tmp64, v64, condition)) {
  705. tci_assert(tb_ptr == old_code_ptr + op_size);
  706. tb_ptr = (uint8_t *)label;
  707. continue;
  708. }
  709. break;
  710. case INDEX_op_mulu2_i32:
  711. t0 = *tb_ptr++;
  712. t1 = *tb_ptr++;
  713. t2 = tci_read_r32(&tb_ptr);
  714. tmp64 = tci_read_r32(&tb_ptr);
  715. tci_write_reg64(t1, t0, t2 * tmp64);
  716. break;
  717. #endif /* TCG_TARGET_REG_BITS == 32 */
  718. #if TCG_TARGET_HAS_ext8s_i32
  719. case INDEX_op_ext8s_i32:
  720. t0 = *tb_ptr++;
  721. t1 = tci_read_r8s(&tb_ptr);
  722. tci_write_reg32(t0, t1);
  723. break;
  724. #endif
  725. #if TCG_TARGET_HAS_ext16s_i32
  726. case INDEX_op_ext16s_i32:
  727. t0 = *tb_ptr++;
  728. t1 = tci_read_r16s(&tb_ptr);
  729. tci_write_reg32(t0, t1);
  730. break;
  731. #endif
  732. #if TCG_TARGET_HAS_ext8u_i32
  733. case INDEX_op_ext8u_i32:
  734. t0 = *tb_ptr++;
  735. t1 = tci_read_r8(&tb_ptr);
  736. tci_write_reg32(t0, t1);
  737. break;
  738. #endif
  739. #if TCG_TARGET_HAS_ext16u_i32
  740. case INDEX_op_ext16u_i32:
  741. t0 = *tb_ptr++;
  742. t1 = tci_read_r16(&tb_ptr);
  743. tci_write_reg32(t0, t1);
  744. break;
  745. #endif
  746. #if TCG_TARGET_HAS_bswap16_i32
  747. case INDEX_op_bswap16_i32:
  748. t0 = *tb_ptr++;
  749. t1 = tci_read_r16(&tb_ptr);
  750. tci_write_reg32(t0, bswap16(t1));
  751. break;
  752. #endif
  753. #if TCG_TARGET_HAS_bswap32_i32
  754. case INDEX_op_bswap32_i32:
  755. t0 = *tb_ptr++;
  756. t1 = tci_read_r32(&tb_ptr);
  757. tci_write_reg32(t0, bswap32(t1));
  758. break;
  759. #endif
  760. #if TCG_TARGET_HAS_not_i32
  761. case INDEX_op_not_i32:
  762. t0 = *tb_ptr++;
  763. t1 = tci_read_r32(&tb_ptr);
  764. tci_write_reg32(t0, ~t1);
  765. break;
  766. #endif
  767. #if TCG_TARGET_HAS_neg_i32
  768. case INDEX_op_neg_i32:
  769. t0 = *tb_ptr++;
  770. t1 = tci_read_r32(&tb_ptr);
  771. tci_write_reg32(t0, -t1);
  772. break;
  773. #endif
  774. #if TCG_TARGET_REG_BITS == 64
  775. case INDEX_op_mov_i64:
  776. t0 = *tb_ptr++;
  777. t1 = tci_read_r64(&tb_ptr);
  778. tci_write_reg64(t0, t1);
  779. break;
  780. case INDEX_op_movi_i64:
  781. t0 = *tb_ptr++;
  782. t1 = tci_read_i64(&tb_ptr);
  783. tci_write_reg64(t0, t1);
  784. break;
  785. /* Load/store operations (64 bit). */
  786. case INDEX_op_ld8u_i64:
  787. t0 = *tb_ptr++;
  788. t1 = tci_read_r(&tb_ptr);
  789. t2 = tci_read_s32(&tb_ptr);
  790. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  791. break;
  792. case INDEX_op_ld8s_i64:
  793. case INDEX_op_ld16u_i64:
  794. case INDEX_op_ld16s_i64:
  795. TODO();
  796. break;
  797. case INDEX_op_ld32u_i64:
  798. t0 = *tb_ptr++;
  799. t1 = tci_read_r(&tb_ptr);
  800. t2 = tci_read_s32(&tb_ptr);
  801. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  802. break;
  803. case INDEX_op_ld32s_i64:
  804. t0 = *tb_ptr++;
  805. t1 = tci_read_r(&tb_ptr);
  806. t2 = tci_read_s32(&tb_ptr);
  807. tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
  808. break;
  809. case INDEX_op_ld_i64:
  810. t0 = *tb_ptr++;
  811. t1 = tci_read_r(&tb_ptr);
  812. t2 = tci_read_s32(&tb_ptr);
  813. tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
  814. break;
  815. case INDEX_op_st8_i64:
  816. t0 = tci_read_r8(&tb_ptr);
  817. t1 = tci_read_r(&tb_ptr);
  818. t2 = tci_read_s32(&tb_ptr);
  819. *(uint8_t *)(t1 + t2) = t0;
  820. break;
  821. case INDEX_op_st16_i64:
  822. t0 = tci_read_r16(&tb_ptr);
  823. t1 = tci_read_r(&tb_ptr);
  824. t2 = tci_read_s32(&tb_ptr);
  825. *(uint16_t *)(t1 + t2) = t0;
  826. break;
  827. case INDEX_op_st32_i64:
  828. t0 = tci_read_r32(&tb_ptr);
  829. t1 = tci_read_r(&tb_ptr);
  830. t2 = tci_read_s32(&tb_ptr);
  831. *(uint32_t *)(t1 + t2) = t0;
  832. break;
  833. case INDEX_op_st_i64:
  834. t0 = tci_read_r64(&tb_ptr);
  835. t1 = tci_read_r(&tb_ptr);
  836. t2 = tci_read_s32(&tb_ptr);
  837. tci_assert(t1 != sp_value || (int32_t)t2 < 0);
  838. *(uint64_t *)(t1 + t2) = t0;
  839. break;
  840. /* Arithmetic operations (64 bit). */
  841. case INDEX_op_add_i64:
  842. t0 = *tb_ptr++;
  843. t1 = tci_read_ri64(&tb_ptr);
  844. t2 = tci_read_ri64(&tb_ptr);
  845. tci_write_reg64(t0, t1 + t2);
  846. break;
  847. case INDEX_op_sub_i64:
  848. t0 = *tb_ptr++;
  849. t1 = tci_read_ri64(&tb_ptr);
  850. t2 = tci_read_ri64(&tb_ptr);
  851. tci_write_reg64(t0, t1 - t2);
  852. break;
  853. case INDEX_op_mul_i64:
  854. t0 = *tb_ptr++;
  855. t1 = tci_read_ri64(&tb_ptr);
  856. t2 = tci_read_ri64(&tb_ptr);
  857. tci_write_reg64(t0, t1 * t2);
  858. break;
  859. #if TCG_TARGET_HAS_div_i64
  860. case INDEX_op_div_i64:
  861. case INDEX_op_divu_i64:
  862. case INDEX_op_rem_i64:
  863. case INDEX_op_remu_i64:
  864. TODO();
  865. break;
  866. #elif TCG_TARGET_HAS_div2_i64
  867. case INDEX_op_div2_i64:
  868. case INDEX_op_divu2_i64:
  869. TODO();
  870. break;
  871. #endif
  872. case INDEX_op_and_i64:
  873. t0 = *tb_ptr++;
  874. t1 = tci_read_ri64(&tb_ptr);
  875. t2 = tci_read_ri64(&tb_ptr);
  876. tci_write_reg64(t0, t1 & t2);
  877. break;
  878. case INDEX_op_or_i64:
  879. t0 = *tb_ptr++;
  880. t1 = tci_read_ri64(&tb_ptr);
  881. t2 = tci_read_ri64(&tb_ptr);
  882. tci_write_reg64(t0, t1 | t2);
  883. break;
  884. case INDEX_op_xor_i64:
  885. t0 = *tb_ptr++;
  886. t1 = tci_read_ri64(&tb_ptr);
  887. t2 = tci_read_ri64(&tb_ptr);
  888. tci_write_reg64(t0, t1 ^ t2);
  889. break;
  890. /* Shift/rotate operations (64 bit). */
  891. case INDEX_op_shl_i64:
  892. t0 = *tb_ptr++;
  893. t1 = tci_read_ri64(&tb_ptr);
  894. t2 = tci_read_ri64(&tb_ptr);
  895. tci_write_reg64(t0, t1 << (t2 & 63));
  896. break;
  897. case INDEX_op_shr_i64:
  898. t0 = *tb_ptr++;
  899. t1 = tci_read_ri64(&tb_ptr);
  900. t2 = tci_read_ri64(&tb_ptr);
  901. tci_write_reg64(t0, t1 >> (t2 & 63));
  902. break;
  903. case INDEX_op_sar_i64:
  904. t0 = *tb_ptr++;
  905. t1 = tci_read_ri64(&tb_ptr);
  906. t2 = tci_read_ri64(&tb_ptr);
  907. tci_write_reg64(t0, ((int64_t)t1 >> (t2 & 63)));
  908. break;
  909. #if TCG_TARGET_HAS_rot_i64
  910. case INDEX_op_rotl_i64:
  911. t0 = *tb_ptr++;
  912. t1 = tci_read_ri64(&tb_ptr);
  913. t2 = tci_read_ri64(&tb_ptr);
  914. tci_write_reg64(t0, rol64(t1, t2 & 63));
  915. break;
  916. case INDEX_op_rotr_i64:
  917. t0 = *tb_ptr++;
  918. t1 = tci_read_ri64(&tb_ptr);
  919. t2 = tci_read_ri64(&tb_ptr);
  920. tci_write_reg64(t0, ror64(t1, t2 & 63));
  921. break;
  922. #endif
  923. #if TCG_TARGET_HAS_deposit_i64
  924. case INDEX_op_deposit_i64:
  925. t0 = *tb_ptr++;
  926. t1 = tci_read_r64(&tb_ptr);
  927. t2 = tci_read_r64(&tb_ptr);
  928. tmp16 = *tb_ptr++;
  929. tmp8 = *tb_ptr++;
  930. tmp64 = (((1ULL << tmp8) - 1) << tmp16);
  931. tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
  932. break;
  933. #endif
  934. case INDEX_op_brcond_i64:
  935. t0 = tci_read_r64(&tb_ptr);
  936. t1 = tci_read_ri64(&tb_ptr);
  937. condition = *tb_ptr++;
  938. label = tci_read_label(&tb_ptr);
  939. if (tci_compare64(t0, t1, condition)) {
  940. tci_assert(tb_ptr == old_code_ptr + op_size);
  941. tb_ptr = (uint8_t *)label;
  942. continue;
  943. }
  944. break;
  945. #if TCG_TARGET_HAS_ext8u_i64
  946. case INDEX_op_ext8u_i64:
  947. t0 = *tb_ptr++;
  948. t1 = tci_read_r8(&tb_ptr);
  949. tci_write_reg64(t0, t1);
  950. break;
  951. #endif
  952. #if TCG_TARGET_HAS_ext8s_i64
  953. case INDEX_op_ext8s_i64:
  954. t0 = *tb_ptr++;
  955. t1 = tci_read_r8s(&tb_ptr);
  956. tci_write_reg64(t0, t1);
  957. break;
  958. #endif
  959. #if TCG_TARGET_HAS_ext16s_i64
  960. case INDEX_op_ext16s_i64:
  961. t0 = *tb_ptr++;
  962. t1 = tci_read_r16s(&tb_ptr);
  963. tci_write_reg64(t0, t1);
  964. break;
  965. #endif
  966. #if TCG_TARGET_HAS_ext16u_i64
  967. case INDEX_op_ext16u_i64:
  968. t0 = *tb_ptr++;
  969. t1 = tci_read_r16(&tb_ptr);
  970. tci_write_reg64(t0, t1);
  971. break;
  972. #endif
  973. #if TCG_TARGET_HAS_ext32s_i64
  974. case INDEX_op_ext32s_i64:
  975. #endif
  976. case INDEX_op_ext_i32_i64:
  977. t0 = *tb_ptr++;
  978. t1 = tci_read_r32s(&tb_ptr);
  979. tci_write_reg64(t0, t1);
  980. break;
  981. #if TCG_TARGET_HAS_ext32u_i64
  982. case INDEX_op_ext32u_i64:
  983. #endif
  984. case INDEX_op_extu_i32_i64:
  985. t0 = *tb_ptr++;
  986. t1 = tci_read_r32(&tb_ptr);
  987. tci_write_reg64(t0, t1);
  988. break;
  989. #if TCG_TARGET_HAS_bswap16_i64
  990. case INDEX_op_bswap16_i64:
  991. TODO();
  992. t0 = *tb_ptr++;
  993. t1 = tci_read_r16(&tb_ptr);
  994. tci_write_reg64(t0, bswap16(t1));
  995. break;
  996. #endif
  997. #if TCG_TARGET_HAS_bswap32_i64
  998. case INDEX_op_bswap32_i64:
  999. t0 = *tb_ptr++;
  1000. t1 = tci_read_r32(&tb_ptr);
  1001. tci_write_reg64(t0, bswap32(t1));
  1002. break;
  1003. #endif
  1004. #if TCG_TARGET_HAS_bswap64_i64
  1005. case INDEX_op_bswap64_i64:
  1006. t0 = *tb_ptr++;
  1007. t1 = tci_read_r64(&tb_ptr);
  1008. tci_write_reg64(t0, bswap64(t1));
  1009. break;
  1010. #endif
  1011. #if TCG_TARGET_HAS_not_i64
  1012. case INDEX_op_not_i64:
  1013. t0 = *tb_ptr++;
  1014. t1 = tci_read_r64(&tb_ptr);
  1015. tci_write_reg64(t0, ~t1);
  1016. break;
  1017. #endif
  1018. #if TCG_TARGET_HAS_neg_i64
  1019. case INDEX_op_neg_i64:
  1020. t0 = *tb_ptr++;
  1021. t1 = tci_read_r64(&tb_ptr);
  1022. tci_write_reg64(t0, -t1);
  1023. break;
  1024. #endif
  1025. #endif /* TCG_TARGET_REG_BITS == 64 */
  1026. /* QEMU specific operations. */
  1027. case INDEX_op_exit_tb:
  1028. ret = *(uint64_t *)tb_ptr;
  1029. goto exit;
  1030. break;
  1031. case INDEX_op_goto_tb:
  1032. /* Jump address is aligned */
  1033. tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
  1034. t0 = atomic_read((int32_t *)tb_ptr);
  1035. tb_ptr += sizeof(int32_t);
  1036. tci_assert(tb_ptr == old_code_ptr + op_size);
  1037. tb_ptr += (int32_t)t0;
  1038. continue;
  1039. case INDEX_op_qemu_ld_i32:
  1040. t0 = *tb_ptr++;
  1041. taddr = tci_read_ulong(&tb_ptr);
  1042. oi = tci_read_i(&tb_ptr);
  1043. switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
  1044. case MO_UB:
  1045. tmp32 = qemu_ld_ub;
  1046. break;
  1047. case MO_SB:
  1048. tmp32 = (int8_t)qemu_ld_ub;
  1049. break;
  1050. case MO_LEUW:
  1051. tmp32 = qemu_ld_leuw;
  1052. break;
  1053. case MO_LESW:
  1054. tmp32 = (int16_t)qemu_ld_leuw;
  1055. break;
  1056. case MO_LEUL:
  1057. tmp32 = qemu_ld_leul;
  1058. break;
  1059. case MO_BEUW:
  1060. tmp32 = qemu_ld_beuw;
  1061. break;
  1062. case MO_BESW:
  1063. tmp32 = (int16_t)qemu_ld_beuw;
  1064. break;
  1065. case MO_BEUL:
  1066. tmp32 = qemu_ld_beul;
  1067. break;
  1068. default:
  1069. tcg_abort();
  1070. }
  1071. tci_write_reg(t0, tmp32);
  1072. break;
  1073. case INDEX_op_qemu_ld_i64:
  1074. t0 = *tb_ptr++;
  1075. if (TCG_TARGET_REG_BITS == 32) {
  1076. t1 = *tb_ptr++;
  1077. }
  1078. taddr = tci_read_ulong(&tb_ptr);
  1079. oi = tci_read_i(&tb_ptr);
  1080. switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
  1081. case MO_UB:
  1082. tmp64 = qemu_ld_ub;
  1083. break;
  1084. case MO_SB:
  1085. tmp64 = (int8_t)qemu_ld_ub;
  1086. break;
  1087. case MO_LEUW:
  1088. tmp64 = qemu_ld_leuw;
  1089. break;
  1090. case MO_LESW:
  1091. tmp64 = (int16_t)qemu_ld_leuw;
  1092. break;
  1093. case MO_LEUL:
  1094. tmp64 = qemu_ld_leul;
  1095. break;
  1096. case MO_LESL:
  1097. tmp64 = (int32_t)qemu_ld_leul;
  1098. break;
  1099. case MO_LEQ:
  1100. tmp64 = qemu_ld_leq;
  1101. break;
  1102. case MO_BEUW:
  1103. tmp64 = qemu_ld_beuw;
  1104. break;
  1105. case MO_BESW:
  1106. tmp64 = (int16_t)qemu_ld_beuw;
  1107. break;
  1108. case MO_BEUL:
  1109. tmp64 = qemu_ld_beul;
  1110. break;
  1111. case MO_BESL:
  1112. tmp64 = (int32_t)qemu_ld_beul;
  1113. break;
  1114. case MO_BEQ:
  1115. tmp64 = qemu_ld_beq;
  1116. break;
  1117. default:
  1118. tcg_abort();
  1119. }
  1120. tci_write_reg(t0, tmp64);
  1121. if (TCG_TARGET_REG_BITS == 32) {
  1122. tci_write_reg(t1, tmp64 >> 32);
  1123. }
  1124. break;
  1125. case INDEX_op_qemu_st_i32:
  1126. t0 = tci_read_r(&tb_ptr);
  1127. taddr = tci_read_ulong(&tb_ptr);
  1128. oi = tci_read_i(&tb_ptr);
  1129. switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
  1130. case MO_UB:
  1131. qemu_st_b(t0);
  1132. break;
  1133. case MO_LEUW:
  1134. qemu_st_lew(t0);
  1135. break;
  1136. case MO_LEUL:
  1137. qemu_st_lel(t0);
  1138. break;
  1139. case MO_BEUW:
  1140. qemu_st_bew(t0);
  1141. break;
  1142. case MO_BEUL:
  1143. qemu_st_bel(t0);
  1144. break;
  1145. default:
  1146. tcg_abort();
  1147. }
  1148. break;
  1149. case INDEX_op_qemu_st_i64:
  1150. tmp64 = tci_read_r64(&tb_ptr);
  1151. taddr = tci_read_ulong(&tb_ptr);
  1152. oi = tci_read_i(&tb_ptr);
  1153. switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
  1154. case MO_UB:
  1155. qemu_st_b(tmp64);
  1156. break;
  1157. case MO_LEUW:
  1158. qemu_st_lew(tmp64);
  1159. break;
  1160. case MO_LEUL:
  1161. qemu_st_lel(tmp64);
  1162. break;
  1163. case MO_LEQ:
  1164. qemu_st_leq(tmp64);
  1165. break;
  1166. case MO_BEUW:
  1167. qemu_st_bew(tmp64);
  1168. break;
  1169. case MO_BEUL:
  1170. qemu_st_bel(tmp64);
  1171. break;
  1172. case MO_BEQ:
  1173. qemu_st_beq(tmp64);
  1174. break;
  1175. default:
  1176. tcg_abort();
  1177. }
  1178. break;
  1179. case INDEX_op_mb:
  1180. /* Ensure ordering for all kinds */
  1181. smp_mb();
  1182. break;
  1183. default:
  1184. TODO();
  1185. break;
  1186. }
  1187. tci_assert(tb_ptr == old_code_ptr + op_size);
  1188. }
  1189. exit:
  1190. return ret;
  1191. }