tci.c 39 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275
  1. /*
  2. * Tiny Code Interpreter for QEMU
  3. *
  4. * Copyright (c) 2009, 2011, 2016 Stefan Weil
  5. *
  6. * This program is free software: you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation, either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. * This program is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU General Public License
  17. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  18. */
  19. #include "qemu/osdep.h"
  20. /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
  21. * Without assertions, the interpreter runs much faster. */
  22. #if defined(CONFIG_DEBUG_TCG)
  23. # define tci_assert(cond) assert(cond)
  24. #else
  25. # define tci_assert(cond) ((void)0)
  26. #endif
  27. #include "qemu-common.h"
  28. #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
  29. #include "exec/cpu_ldst.h"
  30. #include "tcg/tcg-op.h"
  31. /* Marker for missing code. */
  32. #define TODO() \
  33. do { \
  34. fprintf(stderr, "TODO %s:%u: %s()\n", \
  35. __FILE__, __LINE__, __func__); \
  36. tcg_abort(); \
  37. } while (0)
  38. #if MAX_OPC_PARAM_IARGS != 6
  39. # error Fix needed, number of supported input arguments changed!
  40. #endif
  41. #if TCG_TARGET_REG_BITS == 32
  42. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  43. tcg_target_ulong, tcg_target_ulong,
  44. tcg_target_ulong, tcg_target_ulong,
  45. tcg_target_ulong, tcg_target_ulong,
  46. tcg_target_ulong, tcg_target_ulong,
  47. tcg_target_ulong, tcg_target_ulong);
  48. #else
  49. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  50. tcg_target_ulong, tcg_target_ulong,
  51. tcg_target_ulong, tcg_target_ulong);
  52. #endif
  53. static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
  54. {
  55. tci_assert(index < TCG_TARGET_NB_REGS);
  56. return regs[index];
  57. }
  58. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  59. static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index)
  60. {
  61. return (int8_t)tci_read_reg(regs, index);
  62. }
  63. #endif
  64. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  65. static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index)
  66. {
  67. return (int16_t)tci_read_reg(regs, index);
  68. }
  69. #endif
  70. #if TCG_TARGET_REG_BITS == 64
  71. static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index)
  72. {
  73. return (int32_t)tci_read_reg(regs, index);
  74. }
  75. #endif
  76. static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index)
  77. {
  78. return (uint8_t)tci_read_reg(regs, index);
  79. }
  80. static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index)
  81. {
  82. return (uint16_t)tci_read_reg(regs, index);
  83. }
  84. static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index)
  85. {
  86. return (uint32_t)tci_read_reg(regs, index);
  87. }
  88. #if TCG_TARGET_REG_BITS == 64
  89. static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index)
  90. {
  91. return tci_read_reg(regs, index);
  92. }
  93. #endif
  94. static void
  95. tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
  96. {
  97. tci_assert(index < TCG_TARGET_NB_REGS);
  98. tci_assert(index != TCG_AREG0);
  99. tci_assert(index != TCG_REG_CALL_STACK);
  100. regs[index] = value;
  101. }
  102. #if TCG_TARGET_REG_BITS == 64
  103. static void
  104. tci_write_reg32s(tcg_target_ulong *regs, TCGReg index, int32_t value)
  105. {
  106. tci_write_reg(regs, index, value);
  107. }
  108. #endif
  109. static void tci_write_reg8(tcg_target_ulong *regs, TCGReg index, uint8_t value)
  110. {
  111. tci_write_reg(regs, index, value);
  112. }
  113. static void
  114. tci_write_reg16(tcg_target_ulong *regs, TCGReg index, uint16_t value)
  115. {
  116. tci_write_reg(regs, index, value);
  117. }
  118. static void
  119. tci_write_reg32(tcg_target_ulong *regs, TCGReg index, uint32_t value)
  120. {
  121. tci_write_reg(regs, index, value);
  122. }
  123. #if TCG_TARGET_REG_BITS == 32
  124. static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
  125. uint32_t low_index, uint64_t value)
  126. {
  127. tci_write_reg(regs, low_index, value);
  128. tci_write_reg(regs, high_index, value >> 32);
  129. }
  130. #elif TCG_TARGET_REG_BITS == 64
  131. static void
  132. tci_write_reg64(tcg_target_ulong *regs, TCGReg index, uint64_t value)
  133. {
  134. tci_write_reg(regs, index, value);
  135. }
  136. #endif
  137. #if TCG_TARGET_REG_BITS == 32
  138. /* Create a 64 bit value from two 32 bit values. */
  139. static uint64_t tci_uint64(uint32_t high, uint32_t low)
  140. {
  141. return ((uint64_t)high << 32) + low;
  142. }
  143. #endif
  144. /* Read constant (native size) from bytecode. */
  145. static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
  146. {
  147. tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
  148. *tb_ptr += sizeof(value);
  149. return value;
  150. }
  151. /* Read unsigned constant (32 bit) from bytecode. */
  152. static uint32_t tci_read_i32(uint8_t **tb_ptr)
  153. {
  154. uint32_t value = *(uint32_t *)(*tb_ptr);
  155. *tb_ptr += sizeof(value);
  156. return value;
  157. }
  158. /* Read signed constant (32 bit) from bytecode. */
  159. static int32_t tci_read_s32(uint8_t **tb_ptr)
  160. {
  161. int32_t value = *(int32_t *)(*tb_ptr);
  162. *tb_ptr += sizeof(value);
  163. return value;
  164. }
  165. #if TCG_TARGET_REG_BITS == 64
  166. /* Read constant (64 bit) from bytecode. */
  167. static uint64_t tci_read_i64(uint8_t **tb_ptr)
  168. {
  169. uint64_t value = *(uint64_t *)(*tb_ptr);
  170. *tb_ptr += sizeof(value);
  171. return value;
  172. }
  173. #endif
  174. /* Read indexed register (native size) from bytecode. */
  175. static tcg_target_ulong
  176. tci_read_r(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  177. {
  178. tcg_target_ulong value = tci_read_reg(regs, **tb_ptr);
  179. *tb_ptr += 1;
  180. return value;
  181. }
  182. /* Read indexed register (8 bit) from bytecode. */
  183. static uint8_t tci_read_r8(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  184. {
  185. uint8_t value = tci_read_reg8(regs, **tb_ptr);
  186. *tb_ptr += 1;
  187. return value;
  188. }
  189. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  190. /* Read indexed register (8 bit signed) from bytecode. */
  191. static int8_t tci_read_r8s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  192. {
  193. int8_t value = tci_read_reg8s(regs, **tb_ptr);
  194. *tb_ptr += 1;
  195. return value;
  196. }
  197. #endif
  198. /* Read indexed register (16 bit) from bytecode. */
  199. static uint16_t tci_read_r16(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  200. {
  201. uint16_t value = tci_read_reg16(regs, **tb_ptr);
  202. *tb_ptr += 1;
  203. return value;
  204. }
  205. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  206. /* Read indexed register (16 bit signed) from bytecode. */
  207. static int16_t tci_read_r16s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  208. {
  209. int16_t value = tci_read_reg16s(regs, **tb_ptr);
  210. *tb_ptr += 1;
  211. return value;
  212. }
  213. #endif
  214. /* Read indexed register (32 bit) from bytecode. */
  215. static uint32_t tci_read_r32(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  216. {
  217. uint32_t value = tci_read_reg32(regs, **tb_ptr);
  218. *tb_ptr += 1;
  219. return value;
  220. }
  221. #if TCG_TARGET_REG_BITS == 32
  222. /* Read two indexed registers (2 * 32 bit) from bytecode. */
  223. static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  224. {
  225. uint32_t low = tci_read_r32(regs, tb_ptr);
  226. return tci_uint64(tci_read_r32(regs, tb_ptr), low);
  227. }
  228. #elif TCG_TARGET_REG_BITS == 64
  229. /* Read indexed register (32 bit signed) from bytecode. */
  230. static int32_t tci_read_r32s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  231. {
  232. int32_t value = tci_read_reg32s(regs, **tb_ptr);
  233. *tb_ptr += 1;
  234. return value;
  235. }
  236. /* Read indexed register (64 bit) from bytecode. */
  237. static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  238. {
  239. uint64_t value = tci_read_reg64(regs, **tb_ptr);
  240. *tb_ptr += 1;
  241. return value;
  242. }
  243. #endif
  244. /* Read indexed register(s) with target address from bytecode. */
  245. static target_ulong
  246. tci_read_ulong(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  247. {
  248. target_ulong taddr = tci_read_r(regs, tb_ptr);
  249. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  250. taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32;
  251. #endif
  252. return taddr;
  253. }
  254. /* Read indexed register or constant (native size) from bytecode. */
  255. static tcg_target_ulong
  256. tci_read_ri(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  257. {
  258. tcg_target_ulong value;
  259. TCGReg r = **tb_ptr;
  260. *tb_ptr += 1;
  261. if (r == TCG_CONST) {
  262. value = tci_read_i(tb_ptr);
  263. } else {
  264. value = tci_read_reg(regs, r);
  265. }
  266. return value;
  267. }
  268. /* Read indexed register or constant (32 bit) from bytecode. */
  269. static uint32_t tci_read_ri32(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  270. {
  271. uint32_t value;
  272. TCGReg r = **tb_ptr;
  273. *tb_ptr += 1;
  274. if (r == TCG_CONST) {
  275. value = tci_read_i32(tb_ptr);
  276. } else {
  277. value = tci_read_reg32(regs, r);
  278. }
  279. return value;
  280. }
  281. #if TCG_TARGET_REG_BITS == 32
  282. /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
  283. static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  284. {
  285. uint32_t low = tci_read_ri32(regs, tb_ptr);
  286. return tci_uint64(tci_read_ri32(regs, tb_ptr), low);
  287. }
  288. #elif TCG_TARGET_REG_BITS == 64
  289. /* Read indexed register or constant (64 bit) from bytecode. */
  290. static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
  291. {
  292. uint64_t value;
  293. TCGReg r = **tb_ptr;
  294. *tb_ptr += 1;
  295. if (r == TCG_CONST) {
  296. value = tci_read_i64(tb_ptr);
  297. } else {
  298. value = tci_read_reg64(regs, r);
  299. }
  300. return value;
  301. }
  302. #endif
  303. static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
  304. {
  305. tcg_target_ulong label = tci_read_i(tb_ptr);
  306. tci_assert(label != 0);
  307. return label;
  308. }
  309. static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
  310. {
  311. bool result = false;
  312. int32_t i0 = u0;
  313. int32_t i1 = u1;
  314. switch (condition) {
  315. case TCG_COND_EQ:
  316. result = (u0 == u1);
  317. break;
  318. case TCG_COND_NE:
  319. result = (u0 != u1);
  320. break;
  321. case TCG_COND_LT:
  322. result = (i0 < i1);
  323. break;
  324. case TCG_COND_GE:
  325. result = (i0 >= i1);
  326. break;
  327. case TCG_COND_LE:
  328. result = (i0 <= i1);
  329. break;
  330. case TCG_COND_GT:
  331. result = (i0 > i1);
  332. break;
  333. case TCG_COND_LTU:
  334. result = (u0 < u1);
  335. break;
  336. case TCG_COND_GEU:
  337. result = (u0 >= u1);
  338. break;
  339. case TCG_COND_LEU:
  340. result = (u0 <= u1);
  341. break;
  342. case TCG_COND_GTU:
  343. result = (u0 > u1);
  344. break;
  345. default:
  346. TODO();
  347. }
  348. return result;
  349. }
  350. static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
  351. {
  352. bool result = false;
  353. int64_t i0 = u0;
  354. int64_t i1 = u1;
  355. switch (condition) {
  356. case TCG_COND_EQ:
  357. result = (u0 == u1);
  358. break;
  359. case TCG_COND_NE:
  360. result = (u0 != u1);
  361. break;
  362. case TCG_COND_LT:
  363. result = (i0 < i1);
  364. break;
  365. case TCG_COND_GE:
  366. result = (i0 >= i1);
  367. break;
  368. case TCG_COND_LE:
  369. result = (i0 <= i1);
  370. break;
  371. case TCG_COND_GT:
  372. result = (i0 > i1);
  373. break;
  374. case TCG_COND_LTU:
  375. result = (u0 < u1);
  376. break;
  377. case TCG_COND_GEU:
  378. result = (u0 >= u1);
  379. break;
  380. case TCG_COND_LEU:
  381. result = (u0 <= u1);
  382. break;
  383. case TCG_COND_GTU:
  384. result = (u0 > u1);
  385. break;
  386. default:
  387. TODO();
  388. }
  389. return result;
  390. }
  391. #ifdef CONFIG_SOFTMMU
  392. # define qemu_ld_ub \
  393. helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  394. # define qemu_ld_leuw \
  395. helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  396. # define qemu_ld_leul \
  397. helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  398. # define qemu_ld_leq \
  399. helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  400. # define qemu_ld_beuw \
  401. helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  402. # define qemu_ld_beul \
  403. helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  404. # define qemu_ld_beq \
  405. helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
  406. # define qemu_st_b(X) \
  407. helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  408. # define qemu_st_lew(X) \
  409. helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  410. # define qemu_st_lel(X) \
  411. helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  412. # define qemu_st_leq(X) \
  413. helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  414. # define qemu_st_bew(X) \
  415. helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  416. # define qemu_st_bel(X) \
  417. helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  418. # define qemu_st_beq(X) \
  419. helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
  420. #else
  421. # define qemu_ld_ub ldub_p(g2h(taddr))
  422. # define qemu_ld_leuw lduw_le_p(g2h(taddr))
  423. # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
  424. # define qemu_ld_leq ldq_le_p(g2h(taddr))
  425. # define qemu_ld_beuw lduw_be_p(g2h(taddr))
  426. # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
  427. # define qemu_ld_beq ldq_be_p(g2h(taddr))
  428. # define qemu_st_b(X) stb_p(g2h(taddr), X)
  429. # define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
  430. # define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
  431. # define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
  432. # define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
  433. # define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
  434. # define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
  435. #endif
  436. /* Interpret pseudo code in tb. */
  437. uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
  438. {
  439. tcg_target_ulong regs[TCG_TARGET_NB_REGS];
  440. long tcg_temps[CPU_TEMP_BUF_NLONGS];
  441. uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
  442. uintptr_t ret = 0;
  443. regs[TCG_AREG0] = (tcg_target_ulong)env;
  444. regs[TCG_REG_CALL_STACK] = sp_value;
  445. tci_assert(tb_ptr);
  446. for (;;) {
  447. TCGOpcode opc = tb_ptr[0];
  448. #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
  449. uint8_t op_size = tb_ptr[1];
  450. uint8_t *old_code_ptr = tb_ptr;
  451. #endif
  452. tcg_target_ulong t0;
  453. tcg_target_ulong t1;
  454. tcg_target_ulong t2;
  455. tcg_target_ulong label;
  456. TCGCond condition;
  457. target_ulong taddr;
  458. uint8_t tmp8;
  459. uint16_t tmp16;
  460. uint32_t tmp32;
  461. uint64_t tmp64;
  462. #if TCG_TARGET_REG_BITS == 32
  463. uint64_t v64;
  464. #endif
  465. TCGMemOpIdx oi;
  466. #if defined(GETPC)
  467. tci_tb_ptr = (uintptr_t)tb_ptr;
  468. #endif
  469. /* Skip opcode and size entry. */
  470. tb_ptr += 2;
  471. switch (opc) {
  472. case INDEX_op_call:
  473. t0 = tci_read_ri(regs, &tb_ptr);
  474. #if TCG_TARGET_REG_BITS == 32
  475. tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
  476. tci_read_reg(regs, TCG_REG_R1),
  477. tci_read_reg(regs, TCG_REG_R2),
  478. tci_read_reg(regs, TCG_REG_R3),
  479. tci_read_reg(regs, TCG_REG_R5),
  480. tci_read_reg(regs, TCG_REG_R6),
  481. tci_read_reg(regs, TCG_REG_R7),
  482. tci_read_reg(regs, TCG_REG_R8),
  483. tci_read_reg(regs, TCG_REG_R9),
  484. tci_read_reg(regs, TCG_REG_R10),
  485. tci_read_reg(regs, TCG_REG_R11),
  486. tci_read_reg(regs, TCG_REG_R12));
  487. tci_write_reg(regs, TCG_REG_R0, tmp64);
  488. tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
  489. #else
  490. tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
  491. tci_read_reg(regs, TCG_REG_R1),
  492. tci_read_reg(regs, TCG_REG_R2),
  493. tci_read_reg(regs, TCG_REG_R3),
  494. tci_read_reg(regs, TCG_REG_R5),
  495. tci_read_reg(regs, TCG_REG_R6));
  496. tci_write_reg(regs, TCG_REG_R0, tmp64);
  497. #endif
  498. break;
  499. case INDEX_op_br:
  500. label = tci_read_label(&tb_ptr);
  501. tci_assert(tb_ptr == old_code_ptr + op_size);
  502. tb_ptr = (uint8_t *)label;
  503. continue;
  504. case INDEX_op_setcond_i32:
  505. t0 = *tb_ptr++;
  506. t1 = tci_read_r32(regs, &tb_ptr);
  507. t2 = tci_read_ri32(regs, &tb_ptr);
  508. condition = *tb_ptr++;
  509. tci_write_reg32(regs, t0, tci_compare32(t1, t2, condition));
  510. break;
  511. #if TCG_TARGET_REG_BITS == 32
  512. case INDEX_op_setcond2_i32:
  513. t0 = *tb_ptr++;
  514. tmp64 = tci_read_r64(regs, &tb_ptr);
  515. v64 = tci_read_ri64(regs, &tb_ptr);
  516. condition = *tb_ptr++;
  517. tci_write_reg32(regs, t0, tci_compare64(tmp64, v64, condition));
  518. break;
  519. #elif TCG_TARGET_REG_BITS == 64
  520. case INDEX_op_setcond_i64:
  521. t0 = *tb_ptr++;
  522. t1 = tci_read_r64(regs, &tb_ptr);
  523. t2 = tci_read_ri64(regs, &tb_ptr);
  524. condition = *tb_ptr++;
  525. tci_write_reg64(regs, t0, tci_compare64(t1, t2, condition));
  526. break;
  527. #endif
  528. case INDEX_op_mov_i32:
  529. t0 = *tb_ptr++;
  530. t1 = tci_read_r32(regs, &tb_ptr);
  531. tci_write_reg32(regs, t0, t1);
  532. break;
  533. case INDEX_op_movi_i32:
  534. t0 = *tb_ptr++;
  535. t1 = tci_read_i32(&tb_ptr);
  536. tci_write_reg32(regs, t0, t1);
  537. break;
  538. /* Load/store operations (32 bit). */
  539. case INDEX_op_ld8u_i32:
  540. t0 = *tb_ptr++;
  541. t1 = tci_read_r(regs, &tb_ptr);
  542. t2 = tci_read_s32(&tb_ptr);
  543. tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
  544. break;
  545. case INDEX_op_ld8s_i32:
  546. TODO();
  547. break;
  548. case INDEX_op_ld16u_i32:
  549. TODO();
  550. break;
  551. case INDEX_op_ld16s_i32:
  552. TODO();
  553. break;
  554. case INDEX_op_ld_i32:
  555. t0 = *tb_ptr++;
  556. t1 = tci_read_r(regs, &tb_ptr);
  557. t2 = tci_read_s32(&tb_ptr);
  558. tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
  559. break;
  560. case INDEX_op_st8_i32:
  561. t0 = tci_read_r8(regs, &tb_ptr);
  562. t1 = tci_read_r(regs, &tb_ptr);
  563. t2 = tci_read_s32(&tb_ptr);
  564. *(uint8_t *)(t1 + t2) = t0;
  565. break;
  566. case INDEX_op_st16_i32:
  567. t0 = tci_read_r16(regs, &tb_ptr);
  568. t1 = tci_read_r(regs, &tb_ptr);
  569. t2 = tci_read_s32(&tb_ptr);
  570. *(uint16_t *)(t1 + t2) = t0;
  571. break;
  572. case INDEX_op_st_i32:
  573. t0 = tci_read_r32(regs, &tb_ptr);
  574. t1 = tci_read_r(regs, &tb_ptr);
  575. t2 = tci_read_s32(&tb_ptr);
  576. tci_assert(t1 != sp_value || (int32_t)t2 < 0);
  577. *(uint32_t *)(t1 + t2) = t0;
  578. break;
  579. /* Arithmetic operations (32 bit). */
  580. case INDEX_op_add_i32:
  581. t0 = *tb_ptr++;
  582. t1 = tci_read_ri32(regs, &tb_ptr);
  583. t2 = tci_read_ri32(regs, &tb_ptr);
  584. tci_write_reg32(regs, t0, t1 + t2);
  585. break;
  586. case INDEX_op_sub_i32:
  587. t0 = *tb_ptr++;
  588. t1 = tci_read_ri32(regs, &tb_ptr);
  589. t2 = tci_read_ri32(regs, &tb_ptr);
  590. tci_write_reg32(regs, t0, t1 - t2);
  591. break;
  592. case INDEX_op_mul_i32:
  593. t0 = *tb_ptr++;
  594. t1 = tci_read_ri32(regs, &tb_ptr);
  595. t2 = tci_read_ri32(regs, &tb_ptr);
  596. tci_write_reg32(regs, t0, t1 * t2);
  597. break;
  598. #if TCG_TARGET_HAS_div_i32
  599. case INDEX_op_div_i32:
  600. t0 = *tb_ptr++;
  601. t1 = tci_read_ri32(regs, &tb_ptr);
  602. t2 = tci_read_ri32(regs, &tb_ptr);
  603. tci_write_reg32(regs, t0, (int32_t)t1 / (int32_t)t2);
  604. break;
  605. case INDEX_op_divu_i32:
  606. t0 = *tb_ptr++;
  607. t1 = tci_read_ri32(regs, &tb_ptr);
  608. t2 = tci_read_ri32(regs, &tb_ptr);
  609. tci_write_reg32(regs, t0, t1 / t2);
  610. break;
  611. case INDEX_op_rem_i32:
  612. t0 = *tb_ptr++;
  613. t1 = tci_read_ri32(regs, &tb_ptr);
  614. t2 = tci_read_ri32(regs, &tb_ptr);
  615. tci_write_reg32(regs, t0, (int32_t)t1 % (int32_t)t2);
  616. break;
  617. case INDEX_op_remu_i32:
  618. t0 = *tb_ptr++;
  619. t1 = tci_read_ri32(regs, &tb_ptr);
  620. t2 = tci_read_ri32(regs, &tb_ptr);
  621. tci_write_reg32(regs, t0, t1 % t2);
  622. break;
  623. #elif TCG_TARGET_HAS_div2_i32
  624. case INDEX_op_div2_i32:
  625. case INDEX_op_divu2_i32:
  626. TODO();
  627. break;
  628. #endif
  629. case INDEX_op_and_i32:
  630. t0 = *tb_ptr++;
  631. t1 = tci_read_ri32(regs, &tb_ptr);
  632. t2 = tci_read_ri32(regs, &tb_ptr);
  633. tci_write_reg32(regs, t0, t1 & t2);
  634. break;
  635. case INDEX_op_or_i32:
  636. t0 = *tb_ptr++;
  637. t1 = tci_read_ri32(regs, &tb_ptr);
  638. t2 = tci_read_ri32(regs, &tb_ptr);
  639. tci_write_reg32(regs, t0, t1 | t2);
  640. break;
  641. case INDEX_op_xor_i32:
  642. t0 = *tb_ptr++;
  643. t1 = tci_read_ri32(regs, &tb_ptr);
  644. t2 = tci_read_ri32(regs, &tb_ptr);
  645. tci_write_reg32(regs, t0, t1 ^ t2);
  646. break;
  647. /* Shift/rotate operations (32 bit). */
  648. case INDEX_op_shl_i32:
  649. t0 = *tb_ptr++;
  650. t1 = tci_read_ri32(regs, &tb_ptr);
  651. t2 = tci_read_ri32(regs, &tb_ptr);
  652. tci_write_reg32(regs, t0, t1 << (t2 & 31));
  653. break;
  654. case INDEX_op_shr_i32:
  655. t0 = *tb_ptr++;
  656. t1 = tci_read_ri32(regs, &tb_ptr);
  657. t2 = tci_read_ri32(regs, &tb_ptr);
  658. tci_write_reg32(regs, t0, t1 >> (t2 & 31));
  659. break;
  660. case INDEX_op_sar_i32:
  661. t0 = *tb_ptr++;
  662. t1 = tci_read_ri32(regs, &tb_ptr);
  663. t2 = tci_read_ri32(regs, &tb_ptr);
  664. tci_write_reg32(regs, t0, ((int32_t)t1 >> (t2 & 31)));
  665. break;
  666. #if TCG_TARGET_HAS_rot_i32
  667. case INDEX_op_rotl_i32:
  668. t0 = *tb_ptr++;
  669. t1 = tci_read_ri32(regs, &tb_ptr);
  670. t2 = tci_read_ri32(regs, &tb_ptr);
  671. tci_write_reg32(regs, t0, rol32(t1, t2 & 31));
  672. break;
  673. case INDEX_op_rotr_i32:
  674. t0 = *tb_ptr++;
  675. t1 = tci_read_ri32(regs, &tb_ptr);
  676. t2 = tci_read_ri32(regs, &tb_ptr);
  677. tci_write_reg32(regs, t0, ror32(t1, t2 & 31));
  678. break;
  679. #endif
  680. #if TCG_TARGET_HAS_deposit_i32
  681. case INDEX_op_deposit_i32:
  682. t0 = *tb_ptr++;
  683. t1 = tci_read_r32(regs, &tb_ptr);
  684. t2 = tci_read_r32(regs, &tb_ptr);
  685. tmp16 = *tb_ptr++;
  686. tmp8 = *tb_ptr++;
  687. tmp32 = (((1 << tmp8) - 1) << tmp16);
  688. tci_write_reg32(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
  689. break;
  690. #endif
  691. case INDEX_op_brcond_i32:
  692. t0 = tci_read_r32(regs, &tb_ptr);
  693. t1 = tci_read_ri32(regs, &tb_ptr);
  694. condition = *tb_ptr++;
  695. label = tci_read_label(&tb_ptr);
  696. if (tci_compare32(t0, t1, condition)) {
  697. tci_assert(tb_ptr == old_code_ptr + op_size);
  698. tb_ptr = (uint8_t *)label;
  699. continue;
  700. }
  701. break;
  702. #if TCG_TARGET_REG_BITS == 32
  703. case INDEX_op_add2_i32:
  704. t0 = *tb_ptr++;
  705. t1 = *tb_ptr++;
  706. tmp64 = tci_read_r64(regs, &tb_ptr);
  707. tmp64 += tci_read_r64(regs, &tb_ptr);
  708. tci_write_reg64(regs, t1, t0, tmp64);
  709. break;
  710. case INDEX_op_sub2_i32:
  711. t0 = *tb_ptr++;
  712. t1 = *tb_ptr++;
  713. tmp64 = tci_read_r64(regs, &tb_ptr);
  714. tmp64 -= tci_read_r64(regs, &tb_ptr);
  715. tci_write_reg64(regs, t1, t0, tmp64);
  716. break;
  717. case INDEX_op_brcond2_i32:
  718. tmp64 = tci_read_r64(regs, &tb_ptr);
  719. v64 = tci_read_ri64(regs, &tb_ptr);
  720. condition = *tb_ptr++;
  721. label = tci_read_label(&tb_ptr);
  722. if (tci_compare64(tmp64, v64, condition)) {
  723. tci_assert(tb_ptr == old_code_ptr + op_size);
  724. tb_ptr = (uint8_t *)label;
  725. continue;
  726. }
  727. break;
  728. case INDEX_op_mulu2_i32:
  729. t0 = *tb_ptr++;
  730. t1 = *tb_ptr++;
  731. t2 = tci_read_r32(regs, &tb_ptr);
  732. tmp64 = tci_read_r32(regs, &tb_ptr);
  733. tci_write_reg64(regs, t1, t0, t2 * tmp64);
  734. break;
  735. #endif /* TCG_TARGET_REG_BITS == 32 */
  736. #if TCG_TARGET_HAS_ext8s_i32
  737. case INDEX_op_ext8s_i32:
  738. t0 = *tb_ptr++;
  739. t1 = tci_read_r8s(regs, &tb_ptr);
  740. tci_write_reg32(regs, t0, t1);
  741. break;
  742. #endif
  743. #if TCG_TARGET_HAS_ext16s_i32
  744. case INDEX_op_ext16s_i32:
  745. t0 = *tb_ptr++;
  746. t1 = tci_read_r16s(regs, &tb_ptr);
  747. tci_write_reg32(regs, t0, t1);
  748. break;
  749. #endif
  750. #if TCG_TARGET_HAS_ext8u_i32
  751. case INDEX_op_ext8u_i32:
  752. t0 = *tb_ptr++;
  753. t1 = tci_read_r8(regs, &tb_ptr);
  754. tci_write_reg32(regs, t0, t1);
  755. break;
  756. #endif
  757. #if TCG_TARGET_HAS_ext16u_i32
  758. case INDEX_op_ext16u_i32:
  759. t0 = *tb_ptr++;
  760. t1 = tci_read_r16(regs, &tb_ptr);
  761. tci_write_reg32(regs, t0, t1);
  762. break;
  763. #endif
  764. #if TCG_TARGET_HAS_bswap16_i32
  765. case INDEX_op_bswap16_i32:
  766. t0 = *tb_ptr++;
  767. t1 = tci_read_r16(regs, &tb_ptr);
  768. tci_write_reg32(regs, t0, bswap16(t1));
  769. break;
  770. #endif
  771. #if TCG_TARGET_HAS_bswap32_i32
  772. case INDEX_op_bswap32_i32:
  773. t0 = *tb_ptr++;
  774. t1 = tci_read_r32(regs, &tb_ptr);
  775. tci_write_reg32(regs, t0, bswap32(t1));
  776. break;
  777. #endif
  778. #if TCG_TARGET_HAS_not_i32
  779. case INDEX_op_not_i32:
  780. t0 = *tb_ptr++;
  781. t1 = tci_read_r32(regs, &tb_ptr);
  782. tci_write_reg32(regs, t0, ~t1);
  783. break;
  784. #endif
  785. #if TCG_TARGET_HAS_neg_i32
  786. case INDEX_op_neg_i32:
  787. t0 = *tb_ptr++;
  788. t1 = tci_read_r32(regs, &tb_ptr);
  789. tci_write_reg32(regs, t0, -t1);
  790. break;
  791. #endif
  792. #if TCG_TARGET_REG_BITS == 64
  793. case INDEX_op_mov_i64:
  794. t0 = *tb_ptr++;
  795. t1 = tci_read_r64(regs, &tb_ptr);
  796. tci_write_reg64(regs, t0, t1);
  797. break;
  798. case INDEX_op_movi_i64:
  799. t0 = *tb_ptr++;
  800. t1 = tci_read_i64(&tb_ptr);
  801. tci_write_reg64(regs, t0, t1);
  802. break;
  803. /* Load/store operations (64 bit). */
  804. case INDEX_op_ld8u_i64:
  805. t0 = *tb_ptr++;
  806. t1 = tci_read_r(regs, &tb_ptr);
  807. t2 = tci_read_s32(&tb_ptr);
  808. tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
  809. break;
  810. case INDEX_op_ld8s_i64:
  811. TODO();
  812. break;
  813. case INDEX_op_ld16u_i64:
  814. t0 = *tb_ptr++;
  815. t1 = tci_read_r(regs, &tb_ptr);
  816. t2 = tci_read_s32(&tb_ptr);
  817. tci_write_reg16(regs, t0, *(uint16_t *)(t1 + t2));
  818. break;
  819. case INDEX_op_ld16s_i64:
  820. TODO();
  821. break;
  822. case INDEX_op_ld32u_i64:
  823. t0 = *tb_ptr++;
  824. t1 = tci_read_r(regs, &tb_ptr);
  825. t2 = tci_read_s32(&tb_ptr);
  826. tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
  827. break;
  828. case INDEX_op_ld32s_i64:
  829. t0 = *tb_ptr++;
  830. t1 = tci_read_r(regs, &tb_ptr);
  831. t2 = tci_read_s32(&tb_ptr);
  832. tci_write_reg32s(regs, t0, *(int32_t *)(t1 + t2));
  833. break;
  834. case INDEX_op_ld_i64:
  835. t0 = *tb_ptr++;
  836. t1 = tci_read_r(regs, &tb_ptr);
  837. t2 = tci_read_s32(&tb_ptr);
  838. tci_write_reg64(regs, t0, *(uint64_t *)(t1 + t2));
  839. break;
  840. case INDEX_op_st8_i64:
  841. t0 = tci_read_r8(regs, &tb_ptr);
  842. t1 = tci_read_r(regs, &tb_ptr);
  843. t2 = tci_read_s32(&tb_ptr);
  844. *(uint8_t *)(t1 + t2) = t0;
  845. break;
  846. case INDEX_op_st16_i64:
  847. t0 = tci_read_r16(regs, &tb_ptr);
  848. t1 = tci_read_r(regs, &tb_ptr);
  849. t2 = tci_read_s32(&tb_ptr);
  850. *(uint16_t *)(t1 + t2) = t0;
  851. break;
  852. case INDEX_op_st32_i64:
  853. t0 = tci_read_r32(regs, &tb_ptr);
  854. t1 = tci_read_r(regs, &tb_ptr);
  855. t2 = tci_read_s32(&tb_ptr);
  856. *(uint32_t *)(t1 + t2) = t0;
  857. break;
  858. case INDEX_op_st_i64:
  859. t0 = tci_read_r64(regs, &tb_ptr);
  860. t1 = tci_read_r(regs, &tb_ptr);
  861. t2 = tci_read_s32(&tb_ptr);
  862. tci_assert(t1 != sp_value || (int32_t)t2 < 0);
  863. *(uint64_t *)(t1 + t2) = t0;
  864. break;
  865. /* Arithmetic operations (64 bit). */
  866. case INDEX_op_add_i64:
  867. t0 = *tb_ptr++;
  868. t1 = tci_read_ri64(regs, &tb_ptr);
  869. t2 = tci_read_ri64(regs, &tb_ptr);
  870. tci_write_reg64(regs, t0, t1 + t2);
  871. break;
  872. case INDEX_op_sub_i64:
  873. t0 = *tb_ptr++;
  874. t1 = tci_read_ri64(regs, &tb_ptr);
  875. t2 = tci_read_ri64(regs, &tb_ptr);
  876. tci_write_reg64(regs, t0, t1 - t2);
  877. break;
  878. case INDEX_op_mul_i64:
  879. t0 = *tb_ptr++;
  880. t1 = tci_read_ri64(regs, &tb_ptr);
  881. t2 = tci_read_ri64(regs, &tb_ptr);
  882. tci_write_reg64(regs, t0, t1 * t2);
  883. break;
  884. #if TCG_TARGET_HAS_div_i64
  885. case INDEX_op_div_i64:
  886. case INDEX_op_divu_i64:
  887. case INDEX_op_rem_i64:
  888. case INDEX_op_remu_i64:
  889. TODO();
  890. break;
  891. #elif TCG_TARGET_HAS_div2_i64
  892. case INDEX_op_div2_i64:
  893. case INDEX_op_divu2_i64:
  894. TODO();
  895. break;
  896. #endif
  897. case INDEX_op_and_i64:
  898. t0 = *tb_ptr++;
  899. t1 = tci_read_ri64(regs, &tb_ptr);
  900. t2 = tci_read_ri64(regs, &tb_ptr);
  901. tci_write_reg64(regs, t0, t1 & t2);
  902. break;
  903. case INDEX_op_or_i64:
  904. t0 = *tb_ptr++;
  905. t1 = tci_read_ri64(regs, &tb_ptr);
  906. t2 = tci_read_ri64(regs, &tb_ptr);
  907. tci_write_reg64(regs, t0, t1 | t2);
  908. break;
  909. case INDEX_op_xor_i64:
  910. t0 = *tb_ptr++;
  911. t1 = tci_read_ri64(regs, &tb_ptr);
  912. t2 = tci_read_ri64(regs, &tb_ptr);
  913. tci_write_reg64(regs, t0, t1 ^ t2);
  914. break;
  915. /* Shift/rotate operations (64 bit). */
  916. case INDEX_op_shl_i64:
  917. t0 = *tb_ptr++;
  918. t1 = tci_read_ri64(regs, &tb_ptr);
  919. t2 = tci_read_ri64(regs, &tb_ptr);
  920. tci_write_reg64(regs, t0, t1 << (t2 & 63));
  921. break;
  922. case INDEX_op_shr_i64:
  923. t0 = *tb_ptr++;
  924. t1 = tci_read_ri64(regs, &tb_ptr);
  925. t2 = tci_read_ri64(regs, &tb_ptr);
  926. tci_write_reg64(regs, t0, t1 >> (t2 & 63));
  927. break;
  928. case INDEX_op_sar_i64:
  929. t0 = *tb_ptr++;
  930. t1 = tci_read_ri64(regs, &tb_ptr);
  931. t2 = tci_read_ri64(regs, &tb_ptr);
  932. tci_write_reg64(regs, t0, ((int64_t)t1 >> (t2 & 63)));
  933. break;
  934. #if TCG_TARGET_HAS_rot_i64
  935. case INDEX_op_rotl_i64:
  936. t0 = *tb_ptr++;
  937. t1 = tci_read_ri64(regs, &tb_ptr);
  938. t2 = tci_read_ri64(regs, &tb_ptr);
  939. tci_write_reg64(regs, t0, rol64(t1, t2 & 63));
  940. break;
  941. case INDEX_op_rotr_i64:
  942. t0 = *tb_ptr++;
  943. t1 = tci_read_ri64(regs, &tb_ptr);
  944. t2 = tci_read_ri64(regs, &tb_ptr);
  945. tci_write_reg64(regs, t0, ror64(t1, t2 & 63));
  946. break;
  947. #endif
  948. #if TCG_TARGET_HAS_deposit_i64
  949. case INDEX_op_deposit_i64:
  950. t0 = *tb_ptr++;
  951. t1 = tci_read_r64(regs, &tb_ptr);
  952. t2 = tci_read_r64(regs, &tb_ptr);
  953. tmp16 = *tb_ptr++;
  954. tmp8 = *tb_ptr++;
  955. tmp64 = (((1ULL << tmp8) - 1) << tmp16);
  956. tci_write_reg64(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
  957. break;
  958. #endif
  959. case INDEX_op_brcond_i64:
  960. t0 = tci_read_r64(regs, &tb_ptr);
  961. t1 = tci_read_ri64(regs, &tb_ptr);
  962. condition = *tb_ptr++;
  963. label = tci_read_label(&tb_ptr);
  964. if (tci_compare64(t0, t1, condition)) {
  965. tci_assert(tb_ptr == old_code_ptr + op_size);
  966. tb_ptr = (uint8_t *)label;
  967. continue;
  968. }
  969. break;
  970. #if TCG_TARGET_HAS_ext8u_i64
  971. case INDEX_op_ext8u_i64:
  972. t0 = *tb_ptr++;
  973. t1 = tci_read_r8(regs, &tb_ptr);
  974. tci_write_reg64(regs, t0, t1);
  975. break;
  976. #endif
  977. #if TCG_TARGET_HAS_ext8s_i64
  978. case INDEX_op_ext8s_i64:
  979. t0 = *tb_ptr++;
  980. t1 = tci_read_r8s(regs, &tb_ptr);
  981. tci_write_reg64(regs, t0, t1);
  982. break;
  983. #endif
  984. #if TCG_TARGET_HAS_ext16s_i64
  985. case INDEX_op_ext16s_i64:
  986. t0 = *tb_ptr++;
  987. t1 = tci_read_r16s(regs, &tb_ptr);
  988. tci_write_reg64(regs, t0, t1);
  989. break;
  990. #endif
  991. #if TCG_TARGET_HAS_ext16u_i64
  992. case INDEX_op_ext16u_i64:
  993. t0 = *tb_ptr++;
  994. t1 = tci_read_r16(regs, &tb_ptr);
  995. tci_write_reg64(regs, t0, t1);
  996. break;
  997. #endif
  998. #if TCG_TARGET_HAS_ext32s_i64
  999. case INDEX_op_ext32s_i64:
  1000. #endif
  1001. case INDEX_op_ext_i32_i64:
  1002. t0 = *tb_ptr++;
  1003. t1 = tci_read_r32s(regs, &tb_ptr);
  1004. tci_write_reg64(regs, t0, t1);
  1005. break;
  1006. #if TCG_TARGET_HAS_ext32u_i64
  1007. case INDEX_op_ext32u_i64:
  1008. #endif
  1009. case INDEX_op_extu_i32_i64:
  1010. t0 = *tb_ptr++;
  1011. t1 = tci_read_r32(regs, &tb_ptr);
  1012. tci_write_reg64(regs, t0, t1);
  1013. break;
  1014. #if TCG_TARGET_HAS_bswap16_i64
  1015. case INDEX_op_bswap16_i64:
  1016. t0 = *tb_ptr++;
  1017. t1 = tci_read_r16(regs, &tb_ptr);
  1018. tci_write_reg64(regs, t0, bswap16(t1));
  1019. break;
  1020. #endif
  1021. #if TCG_TARGET_HAS_bswap32_i64
  1022. case INDEX_op_bswap32_i64:
  1023. t0 = *tb_ptr++;
  1024. t1 = tci_read_r32(regs, &tb_ptr);
  1025. tci_write_reg64(regs, t0, bswap32(t1));
  1026. break;
  1027. #endif
  1028. #if TCG_TARGET_HAS_bswap64_i64
  1029. case INDEX_op_bswap64_i64:
  1030. t0 = *tb_ptr++;
  1031. t1 = tci_read_r64(regs, &tb_ptr);
  1032. tci_write_reg64(regs, t0, bswap64(t1));
  1033. break;
  1034. #endif
  1035. #if TCG_TARGET_HAS_not_i64
  1036. case INDEX_op_not_i64:
  1037. t0 = *tb_ptr++;
  1038. t1 = tci_read_r64(regs, &tb_ptr);
  1039. tci_write_reg64(regs, t0, ~t1);
  1040. break;
  1041. #endif
  1042. #if TCG_TARGET_HAS_neg_i64
  1043. case INDEX_op_neg_i64:
  1044. t0 = *tb_ptr++;
  1045. t1 = tci_read_r64(regs, &tb_ptr);
  1046. tci_write_reg64(regs, t0, -t1);
  1047. break;
  1048. #endif
  1049. #endif /* TCG_TARGET_REG_BITS == 64 */
  1050. /* QEMU specific operations. */
  1051. case INDEX_op_exit_tb:
  1052. ret = *(uint64_t *)tb_ptr;
  1053. goto exit;
  1054. break;
  1055. case INDEX_op_goto_tb:
  1056. /* Jump address is aligned */
  1057. tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
  1058. t0 = atomic_read((int32_t *)tb_ptr);
  1059. tb_ptr += sizeof(int32_t);
  1060. tci_assert(tb_ptr == old_code_ptr + op_size);
  1061. tb_ptr += (int32_t)t0;
  1062. continue;
  1063. case INDEX_op_qemu_ld_i32:
  1064. t0 = *tb_ptr++;
  1065. taddr = tci_read_ulong(regs, &tb_ptr);
  1066. oi = tci_read_i(&tb_ptr);
  1067. switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
  1068. case MO_UB:
  1069. tmp32 = qemu_ld_ub;
  1070. break;
  1071. case MO_SB:
  1072. tmp32 = (int8_t)qemu_ld_ub;
  1073. break;
  1074. case MO_LEUW:
  1075. tmp32 = qemu_ld_leuw;
  1076. break;
  1077. case MO_LESW:
  1078. tmp32 = (int16_t)qemu_ld_leuw;
  1079. break;
  1080. case MO_LEUL:
  1081. tmp32 = qemu_ld_leul;
  1082. break;
  1083. case MO_BEUW:
  1084. tmp32 = qemu_ld_beuw;
  1085. break;
  1086. case MO_BESW:
  1087. tmp32 = (int16_t)qemu_ld_beuw;
  1088. break;
  1089. case MO_BEUL:
  1090. tmp32 = qemu_ld_beul;
  1091. break;
  1092. default:
  1093. tcg_abort();
  1094. }
  1095. tci_write_reg(regs, t0, tmp32);
  1096. break;
  1097. case INDEX_op_qemu_ld_i64:
  1098. t0 = *tb_ptr++;
  1099. if (TCG_TARGET_REG_BITS == 32) {
  1100. t1 = *tb_ptr++;
  1101. }
  1102. taddr = tci_read_ulong(regs, &tb_ptr);
  1103. oi = tci_read_i(&tb_ptr);
  1104. switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
  1105. case MO_UB:
  1106. tmp64 = qemu_ld_ub;
  1107. break;
  1108. case MO_SB:
  1109. tmp64 = (int8_t)qemu_ld_ub;
  1110. break;
  1111. case MO_LEUW:
  1112. tmp64 = qemu_ld_leuw;
  1113. break;
  1114. case MO_LESW:
  1115. tmp64 = (int16_t)qemu_ld_leuw;
  1116. break;
  1117. case MO_LEUL:
  1118. tmp64 = qemu_ld_leul;
  1119. break;
  1120. case MO_LESL:
  1121. tmp64 = (int32_t)qemu_ld_leul;
  1122. break;
  1123. case MO_LEQ:
  1124. tmp64 = qemu_ld_leq;
  1125. break;
  1126. case MO_BEUW:
  1127. tmp64 = qemu_ld_beuw;
  1128. break;
  1129. case MO_BESW:
  1130. tmp64 = (int16_t)qemu_ld_beuw;
  1131. break;
  1132. case MO_BEUL:
  1133. tmp64 = qemu_ld_beul;
  1134. break;
  1135. case MO_BESL:
  1136. tmp64 = (int32_t)qemu_ld_beul;
  1137. break;
  1138. case MO_BEQ:
  1139. tmp64 = qemu_ld_beq;
  1140. break;
  1141. default:
  1142. tcg_abort();
  1143. }
  1144. tci_write_reg(regs, t0, tmp64);
  1145. if (TCG_TARGET_REG_BITS == 32) {
  1146. tci_write_reg(regs, t1, tmp64 >> 32);
  1147. }
  1148. break;
  1149. case INDEX_op_qemu_st_i32:
  1150. t0 = tci_read_r(regs, &tb_ptr);
  1151. taddr = tci_read_ulong(regs, &tb_ptr);
  1152. oi = tci_read_i(&tb_ptr);
  1153. switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
  1154. case MO_UB:
  1155. qemu_st_b(t0);
  1156. break;
  1157. case MO_LEUW:
  1158. qemu_st_lew(t0);
  1159. break;
  1160. case MO_LEUL:
  1161. qemu_st_lel(t0);
  1162. break;
  1163. case MO_BEUW:
  1164. qemu_st_bew(t0);
  1165. break;
  1166. case MO_BEUL:
  1167. qemu_st_bel(t0);
  1168. break;
  1169. default:
  1170. tcg_abort();
  1171. }
  1172. break;
  1173. case INDEX_op_qemu_st_i64:
  1174. tmp64 = tci_read_r64(regs, &tb_ptr);
  1175. taddr = tci_read_ulong(regs, &tb_ptr);
  1176. oi = tci_read_i(&tb_ptr);
  1177. switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
  1178. case MO_UB:
  1179. qemu_st_b(tmp64);
  1180. break;
  1181. case MO_LEUW:
  1182. qemu_st_lew(tmp64);
  1183. break;
  1184. case MO_LEUL:
  1185. qemu_st_lel(tmp64);
  1186. break;
  1187. case MO_LEQ:
  1188. qemu_st_leq(tmp64);
  1189. break;
  1190. case MO_BEUW:
  1191. qemu_st_bew(tmp64);
  1192. break;
  1193. case MO_BEUL:
  1194. qemu_st_bel(tmp64);
  1195. break;
  1196. case MO_BEQ:
  1197. qemu_st_beq(tmp64);
  1198. break;
  1199. default:
  1200. tcg_abort();
  1201. }
  1202. break;
  1203. case INDEX_op_mb:
  1204. /* Ensure ordering for all kinds */
  1205. smp_mb();
  1206. break;
  1207. default:
  1208. TODO();
  1209. break;
  1210. }
  1211. tci_assert(tb_ptr == old_code_ptr + op_size);
  1212. }
  1213. exit:
  1214. return ret;
  1215. }