2
0

tci.c 35 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219
  1. /*
  2. * Tiny Code Interpreter for QEMU
  3. *
  4. * Copyright (c) 2009, 2011 Stefan Weil
  5. *
  6. * This program is free software: you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation, either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. * This program is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU General Public License
  17. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  18. */
  19. #include "config.h"
  20. /* Defining NDEBUG disables assertions (which makes the code faster). */
  21. #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
  22. # define NDEBUG
  23. #endif
  24. #include "qemu-common.h"
  25. #include "dyngen-exec.h" /* env */
  26. #include "exec-all.h" /* MAX_OPC_PARAM_IARGS */
  27. #include "tcg-op.h"
  28. /* Marker for missing code. */
  29. #define TODO() \
  30. do { \
  31. fprintf(stderr, "TODO %s:%u: %s()\n", \
  32. __FILE__, __LINE__, __func__); \
  33. tcg_abort(); \
  34. } while (0)
  35. #if MAX_OPC_PARAM_IARGS != 4
  36. # error Fix needed, number of supported input arguments changed!
  37. #endif
  38. #if TCG_TARGET_REG_BITS == 32
  39. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  40. tcg_target_ulong, tcg_target_ulong,
  41. tcg_target_ulong, tcg_target_ulong,
  42. tcg_target_ulong, tcg_target_ulong);
  43. #else
  44. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  45. tcg_target_ulong, tcg_target_ulong);
  46. #endif
  47. /* TCI can optionally use a global register variable for env. */
  48. #if !defined(AREG0)
  49. CPUArchState *env;
  50. #endif
  51. /* Targets which don't use GETPC also don't need tci_tb_ptr
  52. which makes them a little faster. */
  53. #if defined(GETPC)
  54. uintptr_t tci_tb_ptr;
  55. #endif
  56. static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
  57. #if !defined(CONFIG_TCG_PASS_AREG0)
  58. # define helper_ldb_mmu(env, addr, mmu_idx) __ldb_mmu(addr, mmu_idx)
  59. # define helper_ldw_mmu(env, addr, mmu_idx) __ldw_mmu(addr, mmu_idx)
  60. # define helper_ldl_mmu(env, addr, mmu_idx) __ldl_mmu(addr, mmu_idx)
  61. # define helper_ldq_mmu(env, addr, mmu_idx) __ldq_mmu(addr, mmu_idx)
  62. # define helper_stb_mmu(env, addr, val, mmu_idx) __stb_mmu(addr, val, mmu_idx)
  63. # define helper_stw_mmu(env, addr, val, mmu_idx) __stw_mmu(addr, val, mmu_idx)
  64. # define helper_stl_mmu(env, addr, val, mmu_idx) __stl_mmu(addr, val, mmu_idx)
  65. # define helper_stq_mmu(env, addr, val, mmu_idx) __stq_mmu(addr, val, mmu_idx)
  66. #endif /* !CONFIG_TCG_PASS_AREG0 */
  67. static tcg_target_ulong tci_read_reg(TCGReg index)
  68. {
  69. assert(index < ARRAY_SIZE(tci_reg));
  70. return tci_reg[index];
  71. }
  72. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  73. static int8_t tci_read_reg8s(TCGReg index)
  74. {
  75. return (int8_t)tci_read_reg(index);
  76. }
  77. #endif
  78. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  79. static int16_t tci_read_reg16s(TCGReg index)
  80. {
  81. return (int16_t)tci_read_reg(index);
  82. }
  83. #endif
  84. #if TCG_TARGET_REG_BITS == 64
  85. static int32_t tci_read_reg32s(TCGReg index)
  86. {
  87. return (int32_t)tci_read_reg(index);
  88. }
  89. #endif
  90. static uint8_t tci_read_reg8(TCGReg index)
  91. {
  92. return (uint8_t)tci_read_reg(index);
  93. }
  94. static uint16_t tci_read_reg16(TCGReg index)
  95. {
  96. return (uint16_t)tci_read_reg(index);
  97. }
  98. static uint32_t tci_read_reg32(TCGReg index)
  99. {
  100. return (uint32_t)tci_read_reg(index);
  101. }
  102. #if TCG_TARGET_REG_BITS == 64
  103. static uint64_t tci_read_reg64(TCGReg index)
  104. {
  105. return tci_read_reg(index);
  106. }
  107. #endif
  108. static void tci_write_reg(TCGReg index, tcg_target_ulong value)
  109. {
  110. assert(index < ARRAY_SIZE(tci_reg));
  111. assert(index != TCG_AREG0);
  112. tci_reg[index] = value;
  113. }
  114. static void tci_write_reg8s(TCGReg index, int8_t value)
  115. {
  116. tci_write_reg(index, value);
  117. }
  118. static void tci_write_reg16s(TCGReg index, int16_t value)
  119. {
  120. tci_write_reg(index, value);
  121. }
  122. #if TCG_TARGET_REG_BITS == 64
  123. static void tci_write_reg32s(TCGReg index, int32_t value)
  124. {
  125. tci_write_reg(index, value);
  126. }
  127. #endif
  128. static void tci_write_reg8(TCGReg index, uint8_t value)
  129. {
  130. tci_write_reg(index, value);
  131. }
  132. static void tci_write_reg16(TCGReg index, uint16_t value)
  133. {
  134. tci_write_reg(index, value);
  135. }
  136. static void tci_write_reg32(TCGReg index, uint32_t value)
  137. {
  138. tci_write_reg(index, value);
  139. }
  140. #if TCG_TARGET_REG_BITS == 32
  141. static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
  142. uint64_t value)
  143. {
  144. tci_write_reg(low_index, value);
  145. tci_write_reg(high_index, value >> 32);
  146. }
  147. #elif TCG_TARGET_REG_BITS == 64
  148. static void tci_write_reg64(TCGReg index, uint64_t value)
  149. {
  150. tci_write_reg(index, value);
  151. }
  152. #endif
  153. #if TCG_TARGET_REG_BITS == 32
  154. /* Create a 64 bit value from two 32 bit values. */
  155. static uint64_t tci_uint64(uint32_t high, uint32_t low)
  156. {
  157. return ((uint64_t)high << 32) + low;
  158. }
  159. #endif
  160. /* Read constant (native size) from bytecode. */
  161. static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
  162. {
  163. tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
  164. *tb_ptr += sizeof(value);
  165. return value;
  166. }
  167. /* Read constant (32 bit) from bytecode. */
  168. static uint32_t tci_read_i32(uint8_t **tb_ptr)
  169. {
  170. uint32_t value = *(uint32_t *)(*tb_ptr);
  171. *tb_ptr += sizeof(value);
  172. return value;
  173. }
  174. #if TCG_TARGET_REG_BITS == 64
  175. /* Read constant (64 bit) from bytecode. */
  176. static uint64_t tci_read_i64(uint8_t **tb_ptr)
  177. {
  178. uint64_t value = *(uint64_t *)(*tb_ptr);
  179. *tb_ptr += sizeof(value);
  180. return value;
  181. }
  182. #endif
  183. /* Read indexed register (native size) from bytecode. */
  184. static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
  185. {
  186. tcg_target_ulong value = tci_read_reg(**tb_ptr);
  187. *tb_ptr += 1;
  188. return value;
  189. }
  190. /* Read indexed register (8 bit) from bytecode. */
  191. static uint8_t tci_read_r8(uint8_t **tb_ptr)
  192. {
  193. uint8_t value = tci_read_reg8(**tb_ptr);
  194. *tb_ptr += 1;
  195. return value;
  196. }
  197. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  198. /* Read indexed register (8 bit signed) from bytecode. */
  199. static int8_t tci_read_r8s(uint8_t **tb_ptr)
  200. {
  201. int8_t value = tci_read_reg8s(**tb_ptr);
  202. *tb_ptr += 1;
  203. return value;
  204. }
  205. #endif
  206. /* Read indexed register (16 bit) from bytecode. */
  207. static uint16_t tci_read_r16(uint8_t **tb_ptr)
  208. {
  209. uint16_t value = tci_read_reg16(**tb_ptr);
  210. *tb_ptr += 1;
  211. return value;
  212. }
  213. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  214. /* Read indexed register (16 bit signed) from bytecode. */
  215. static int16_t tci_read_r16s(uint8_t **tb_ptr)
  216. {
  217. int16_t value = tci_read_reg16s(**tb_ptr);
  218. *tb_ptr += 1;
  219. return value;
  220. }
  221. #endif
  222. /* Read indexed register (32 bit) from bytecode. */
  223. static uint32_t tci_read_r32(uint8_t **tb_ptr)
  224. {
  225. uint32_t value = tci_read_reg32(**tb_ptr);
  226. *tb_ptr += 1;
  227. return value;
  228. }
  229. #if TCG_TARGET_REG_BITS == 32
  230. /* Read two indexed registers (2 * 32 bit) from bytecode. */
  231. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  232. {
  233. uint32_t low = tci_read_r32(tb_ptr);
  234. return tci_uint64(tci_read_r32(tb_ptr), low);
  235. }
  236. #elif TCG_TARGET_REG_BITS == 64
  237. /* Read indexed register (32 bit signed) from bytecode. */
  238. static int32_t tci_read_r32s(uint8_t **tb_ptr)
  239. {
  240. int32_t value = tci_read_reg32s(**tb_ptr);
  241. *tb_ptr += 1;
  242. return value;
  243. }
  244. /* Read indexed register (64 bit) from bytecode. */
  245. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  246. {
  247. uint64_t value = tci_read_reg64(**tb_ptr);
  248. *tb_ptr += 1;
  249. return value;
  250. }
  251. #endif
  252. /* Read indexed register(s) with target address from bytecode. */
  253. static target_ulong tci_read_ulong(uint8_t **tb_ptr)
  254. {
  255. target_ulong taddr = tci_read_r(tb_ptr);
  256. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  257. taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
  258. #endif
  259. return taddr;
  260. }
  261. /* Read indexed register or constant (native size) from bytecode. */
  262. static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
  263. {
  264. tcg_target_ulong value;
  265. TCGReg r = **tb_ptr;
  266. *tb_ptr += 1;
  267. if (r == TCG_CONST) {
  268. value = tci_read_i(tb_ptr);
  269. } else {
  270. value = tci_read_reg(r);
  271. }
  272. return value;
  273. }
  274. /* Read indexed register or constant (32 bit) from bytecode. */
  275. static uint32_t tci_read_ri32(uint8_t **tb_ptr)
  276. {
  277. uint32_t value;
  278. TCGReg r = **tb_ptr;
  279. *tb_ptr += 1;
  280. if (r == TCG_CONST) {
  281. value = tci_read_i32(tb_ptr);
  282. } else {
  283. value = tci_read_reg32(r);
  284. }
  285. return value;
  286. }
  287. #if TCG_TARGET_REG_BITS == 32
  288. /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
  289. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  290. {
  291. uint32_t low = tci_read_ri32(tb_ptr);
  292. return tci_uint64(tci_read_ri32(tb_ptr), low);
  293. }
  294. #elif TCG_TARGET_REG_BITS == 64
  295. /* Read indexed register or constant (64 bit) from bytecode. */
  296. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  297. {
  298. uint64_t value;
  299. TCGReg r = **tb_ptr;
  300. *tb_ptr += 1;
  301. if (r == TCG_CONST) {
  302. value = tci_read_i64(tb_ptr);
  303. } else {
  304. value = tci_read_reg64(r);
  305. }
  306. return value;
  307. }
  308. #endif
  309. static target_ulong tci_read_label(uint8_t **tb_ptr)
  310. {
  311. target_ulong label = tci_read_i(tb_ptr);
  312. assert(label != 0);
  313. return label;
  314. }
  315. static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
  316. {
  317. bool result = false;
  318. int32_t i0 = u0;
  319. int32_t i1 = u1;
  320. switch (condition) {
  321. case TCG_COND_EQ:
  322. result = (u0 == u1);
  323. break;
  324. case TCG_COND_NE:
  325. result = (u0 != u1);
  326. break;
  327. case TCG_COND_LT:
  328. result = (i0 < i1);
  329. break;
  330. case TCG_COND_GE:
  331. result = (i0 >= i1);
  332. break;
  333. case TCG_COND_LE:
  334. result = (i0 <= i1);
  335. break;
  336. case TCG_COND_GT:
  337. result = (i0 > i1);
  338. break;
  339. case TCG_COND_LTU:
  340. result = (u0 < u1);
  341. break;
  342. case TCG_COND_GEU:
  343. result = (u0 >= u1);
  344. break;
  345. case TCG_COND_LEU:
  346. result = (u0 <= u1);
  347. break;
  348. case TCG_COND_GTU:
  349. result = (u0 > u1);
  350. break;
  351. default:
  352. TODO();
  353. }
  354. return result;
  355. }
  356. static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
  357. {
  358. bool result = false;
  359. int64_t i0 = u0;
  360. int64_t i1 = u1;
  361. switch (condition) {
  362. case TCG_COND_EQ:
  363. result = (u0 == u1);
  364. break;
  365. case TCG_COND_NE:
  366. result = (u0 != u1);
  367. break;
  368. case TCG_COND_LT:
  369. result = (i0 < i1);
  370. break;
  371. case TCG_COND_GE:
  372. result = (i0 >= i1);
  373. break;
  374. case TCG_COND_LE:
  375. result = (i0 <= i1);
  376. break;
  377. case TCG_COND_GT:
  378. result = (i0 > i1);
  379. break;
  380. case TCG_COND_LTU:
  381. result = (u0 < u1);
  382. break;
  383. case TCG_COND_GEU:
  384. result = (u0 >= u1);
  385. break;
  386. case TCG_COND_LEU:
  387. result = (u0 <= u1);
  388. break;
  389. case TCG_COND_GTU:
  390. result = (u0 > u1);
  391. break;
  392. default:
  393. TODO();
  394. }
  395. return result;
  396. }
  397. /* Interpret pseudo code in tb. */
  398. tcg_target_ulong tcg_qemu_tb_exec(CPUArchState *cpustate, uint8_t *tb_ptr)
  399. {
  400. tcg_target_ulong next_tb = 0;
  401. env = cpustate;
  402. tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
  403. assert(tb_ptr);
  404. for (;;) {
  405. #if defined(GETPC)
  406. tci_tb_ptr = (uintptr_t)tb_ptr;
  407. #endif
  408. TCGOpcode opc = tb_ptr[0];
  409. #if !defined(NDEBUG)
  410. uint8_t op_size = tb_ptr[1];
  411. uint8_t *old_code_ptr = tb_ptr;
  412. #endif
  413. tcg_target_ulong t0;
  414. tcg_target_ulong t1;
  415. tcg_target_ulong t2;
  416. tcg_target_ulong label;
  417. TCGCond condition;
  418. target_ulong taddr;
  419. #ifndef CONFIG_SOFTMMU
  420. tcg_target_ulong host_addr;
  421. #endif
  422. uint8_t tmp8;
  423. uint16_t tmp16;
  424. uint32_t tmp32;
  425. uint64_t tmp64;
  426. #if TCG_TARGET_REG_BITS == 32
  427. uint64_t v64;
  428. #endif
  429. /* Skip opcode and size entry. */
  430. tb_ptr += 2;
  431. switch (opc) {
  432. case INDEX_op_end:
  433. case INDEX_op_nop:
  434. break;
  435. case INDEX_op_nop1:
  436. case INDEX_op_nop2:
  437. case INDEX_op_nop3:
  438. case INDEX_op_nopn:
  439. case INDEX_op_discard:
  440. TODO();
  441. break;
  442. case INDEX_op_set_label:
  443. TODO();
  444. break;
  445. case INDEX_op_call:
  446. t0 = tci_read_ri(&tb_ptr);
  447. #if TCG_TARGET_REG_BITS == 32
  448. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  449. tci_read_reg(TCG_REG_R1),
  450. tci_read_reg(TCG_REG_R2),
  451. tci_read_reg(TCG_REG_R3),
  452. tci_read_reg(TCG_REG_R5),
  453. tci_read_reg(TCG_REG_R6),
  454. tci_read_reg(TCG_REG_R7),
  455. tci_read_reg(TCG_REG_R8));
  456. tci_write_reg(TCG_REG_R0, tmp64);
  457. tci_write_reg(TCG_REG_R1, tmp64 >> 32);
  458. #else
  459. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  460. tci_read_reg(TCG_REG_R1),
  461. tci_read_reg(TCG_REG_R2),
  462. tci_read_reg(TCG_REG_R3));
  463. tci_write_reg(TCG_REG_R0, tmp64);
  464. #endif
  465. break;
  466. case INDEX_op_jmp:
  467. case INDEX_op_br:
  468. label = tci_read_label(&tb_ptr);
  469. assert(tb_ptr == old_code_ptr + op_size);
  470. tb_ptr = (uint8_t *)label;
  471. continue;
  472. case INDEX_op_setcond_i32:
  473. t0 = *tb_ptr++;
  474. t1 = tci_read_r32(&tb_ptr);
  475. t2 = tci_read_ri32(&tb_ptr);
  476. condition = *tb_ptr++;
  477. tci_write_reg32(t0, tci_compare32(t1, t2, condition));
  478. break;
  479. #if TCG_TARGET_REG_BITS == 32
  480. case INDEX_op_setcond2_i32:
  481. t0 = *tb_ptr++;
  482. tmp64 = tci_read_r64(&tb_ptr);
  483. v64 = tci_read_ri64(&tb_ptr);
  484. condition = *tb_ptr++;
  485. tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
  486. break;
  487. #elif TCG_TARGET_REG_BITS == 64
  488. case INDEX_op_setcond_i64:
  489. t0 = *tb_ptr++;
  490. t1 = tci_read_r64(&tb_ptr);
  491. t2 = tci_read_ri64(&tb_ptr);
  492. condition = *tb_ptr++;
  493. tci_write_reg64(t0, tci_compare64(t1, t2, condition));
  494. break;
  495. #endif
  496. case INDEX_op_mov_i32:
  497. t0 = *tb_ptr++;
  498. t1 = tci_read_r32(&tb_ptr);
  499. tci_write_reg32(t0, t1);
  500. break;
  501. case INDEX_op_movi_i32:
  502. t0 = *tb_ptr++;
  503. t1 = tci_read_i32(&tb_ptr);
  504. tci_write_reg32(t0, t1);
  505. break;
  506. /* Load/store operations (32 bit). */
  507. case INDEX_op_ld8u_i32:
  508. t0 = *tb_ptr++;
  509. t1 = tci_read_r(&tb_ptr);
  510. t2 = tci_read_i32(&tb_ptr);
  511. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  512. break;
  513. case INDEX_op_ld8s_i32:
  514. case INDEX_op_ld16u_i32:
  515. TODO();
  516. break;
  517. case INDEX_op_ld16s_i32:
  518. TODO();
  519. break;
  520. case INDEX_op_ld_i32:
  521. t0 = *tb_ptr++;
  522. t1 = tci_read_r(&tb_ptr);
  523. t2 = tci_read_i32(&tb_ptr);
  524. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  525. break;
  526. case INDEX_op_st8_i32:
  527. t0 = tci_read_r8(&tb_ptr);
  528. t1 = tci_read_r(&tb_ptr);
  529. t2 = tci_read_i32(&tb_ptr);
  530. *(uint8_t *)(t1 + t2) = t0;
  531. break;
  532. case INDEX_op_st16_i32:
  533. t0 = tci_read_r16(&tb_ptr);
  534. t1 = tci_read_r(&tb_ptr);
  535. t2 = tci_read_i32(&tb_ptr);
  536. *(uint16_t *)(t1 + t2) = t0;
  537. break;
  538. case INDEX_op_st_i32:
  539. t0 = tci_read_r32(&tb_ptr);
  540. t1 = tci_read_r(&tb_ptr);
  541. t2 = tci_read_i32(&tb_ptr);
  542. *(uint32_t *)(t1 + t2) = t0;
  543. break;
  544. /* Arithmetic operations (32 bit). */
  545. case INDEX_op_add_i32:
  546. t0 = *tb_ptr++;
  547. t1 = tci_read_ri32(&tb_ptr);
  548. t2 = tci_read_ri32(&tb_ptr);
  549. tci_write_reg32(t0, t1 + t2);
  550. break;
  551. case INDEX_op_sub_i32:
  552. t0 = *tb_ptr++;
  553. t1 = tci_read_ri32(&tb_ptr);
  554. t2 = tci_read_ri32(&tb_ptr);
  555. tci_write_reg32(t0, t1 - t2);
  556. break;
  557. case INDEX_op_mul_i32:
  558. t0 = *tb_ptr++;
  559. t1 = tci_read_ri32(&tb_ptr);
  560. t2 = tci_read_ri32(&tb_ptr);
  561. tci_write_reg32(t0, t1 * t2);
  562. break;
  563. #if TCG_TARGET_HAS_div_i32
  564. case INDEX_op_div_i32:
  565. t0 = *tb_ptr++;
  566. t1 = tci_read_ri32(&tb_ptr);
  567. t2 = tci_read_ri32(&tb_ptr);
  568. tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
  569. break;
  570. case INDEX_op_divu_i32:
  571. t0 = *tb_ptr++;
  572. t1 = tci_read_ri32(&tb_ptr);
  573. t2 = tci_read_ri32(&tb_ptr);
  574. tci_write_reg32(t0, t1 / t2);
  575. break;
  576. case INDEX_op_rem_i32:
  577. t0 = *tb_ptr++;
  578. t1 = tci_read_ri32(&tb_ptr);
  579. t2 = tci_read_ri32(&tb_ptr);
  580. tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
  581. break;
  582. case INDEX_op_remu_i32:
  583. t0 = *tb_ptr++;
  584. t1 = tci_read_ri32(&tb_ptr);
  585. t2 = tci_read_ri32(&tb_ptr);
  586. tci_write_reg32(t0, t1 % t2);
  587. break;
  588. #elif TCG_TARGET_HAS_div2_i32
  589. case INDEX_op_div2_i32:
  590. case INDEX_op_divu2_i32:
  591. TODO();
  592. break;
  593. #endif
  594. case INDEX_op_and_i32:
  595. t0 = *tb_ptr++;
  596. t1 = tci_read_ri32(&tb_ptr);
  597. t2 = tci_read_ri32(&tb_ptr);
  598. tci_write_reg32(t0, t1 & t2);
  599. break;
  600. case INDEX_op_or_i32:
  601. t0 = *tb_ptr++;
  602. t1 = tci_read_ri32(&tb_ptr);
  603. t2 = tci_read_ri32(&tb_ptr);
  604. tci_write_reg32(t0, t1 | t2);
  605. break;
  606. case INDEX_op_xor_i32:
  607. t0 = *tb_ptr++;
  608. t1 = tci_read_ri32(&tb_ptr);
  609. t2 = tci_read_ri32(&tb_ptr);
  610. tci_write_reg32(t0, t1 ^ t2);
  611. break;
  612. /* Shift/rotate operations (32 bit). */
  613. case INDEX_op_shl_i32:
  614. t0 = *tb_ptr++;
  615. t1 = tci_read_ri32(&tb_ptr);
  616. t2 = tci_read_ri32(&tb_ptr);
  617. tci_write_reg32(t0, t1 << t2);
  618. break;
  619. case INDEX_op_shr_i32:
  620. t0 = *tb_ptr++;
  621. t1 = tci_read_ri32(&tb_ptr);
  622. t2 = tci_read_ri32(&tb_ptr);
  623. tci_write_reg32(t0, t1 >> t2);
  624. break;
  625. case INDEX_op_sar_i32:
  626. t0 = *tb_ptr++;
  627. t1 = tci_read_ri32(&tb_ptr);
  628. t2 = tci_read_ri32(&tb_ptr);
  629. tci_write_reg32(t0, ((int32_t)t1 >> t2));
  630. break;
  631. #if TCG_TARGET_HAS_rot_i32
  632. case INDEX_op_rotl_i32:
  633. t0 = *tb_ptr++;
  634. t1 = tci_read_ri32(&tb_ptr);
  635. t2 = tci_read_ri32(&tb_ptr);
  636. tci_write_reg32(t0, (t1 << t2) | (t1 >> (32 - t2)));
  637. break;
  638. case INDEX_op_rotr_i32:
  639. t0 = *tb_ptr++;
  640. t1 = tci_read_ri32(&tb_ptr);
  641. t2 = tci_read_ri32(&tb_ptr);
  642. tci_write_reg32(t0, (t1 >> t2) | (t1 << (32 - t2)));
  643. break;
  644. #endif
  645. case INDEX_op_brcond_i32:
  646. t0 = tci_read_r32(&tb_ptr);
  647. t1 = tci_read_ri32(&tb_ptr);
  648. condition = *tb_ptr++;
  649. label = tci_read_label(&tb_ptr);
  650. if (tci_compare32(t0, t1, condition)) {
  651. assert(tb_ptr == old_code_ptr + op_size);
  652. tb_ptr = (uint8_t *)label;
  653. continue;
  654. }
  655. break;
  656. #if TCG_TARGET_REG_BITS == 32
  657. case INDEX_op_add2_i32:
  658. t0 = *tb_ptr++;
  659. t1 = *tb_ptr++;
  660. tmp64 = tci_read_r64(&tb_ptr);
  661. tmp64 += tci_read_r64(&tb_ptr);
  662. tci_write_reg64(t1, t0, tmp64);
  663. break;
  664. case INDEX_op_sub2_i32:
  665. t0 = *tb_ptr++;
  666. t1 = *tb_ptr++;
  667. tmp64 = tci_read_r64(&tb_ptr);
  668. tmp64 -= tci_read_r64(&tb_ptr);
  669. tci_write_reg64(t1, t0, tmp64);
  670. break;
  671. case INDEX_op_brcond2_i32:
  672. tmp64 = tci_read_r64(&tb_ptr);
  673. v64 = tci_read_ri64(&tb_ptr);
  674. condition = *tb_ptr++;
  675. label = tci_read_label(&tb_ptr);
  676. if (tci_compare64(tmp64, v64, condition)) {
  677. assert(tb_ptr == old_code_ptr + op_size);
  678. tb_ptr = (uint8_t *)label;
  679. continue;
  680. }
  681. break;
  682. case INDEX_op_mulu2_i32:
  683. t0 = *tb_ptr++;
  684. t1 = *tb_ptr++;
  685. t2 = tci_read_r32(&tb_ptr);
  686. tmp64 = tci_read_r32(&tb_ptr);
  687. tci_write_reg64(t1, t0, t2 * tmp64);
  688. break;
  689. #endif /* TCG_TARGET_REG_BITS == 32 */
  690. #if TCG_TARGET_HAS_ext8s_i32
  691. case INDEX_op_ext8s_i32:
  692. t0 = *tb_ptr++;
  693. t1 = tci_read_r8s(&tb_ptr);
  694. tci_write_reg32(t0, t1);
  695. break;
  696. #endif
  697. #if TCG_TARGET_HAS_ext16s_i32
  698. case INDEX_op_ext16s_i32:
  699. t0 = *tb_ptr++;
  700. t1 = tci_read_r16s(&tb_ptr);
  701. tci_write_reg32(t0, t1);
  702. break;
  703. #endif
  704. #if TCG_TARGET_HAS_ext8u_i32
  705. case INDEX_op_ext8u_i32:
  706. t0 = *tb_ptr++;
  707. t1 = tci_read_r8(&tb_ptr);
  708. tci_write_reg32(t0, t1);
  709. break;
  710. #endif
  711. #if TCG_TARGET_HAS_ext16u_i32
  712. case INDEX_op_ext16u_i32:
  713. t0 = *tb_ptr++;
  714. t1 = tci_read_r16(&tb_ptr);
  715. tci_write_reg32(t0, t1);
  716. break;
  717. #endif
  718. #if TCG_TARGET_HAS_bswap16_i32
  719. case INDEX_op_bswap16_i32:
  720. t0 = *tb_ptr++;
  721. t1 = tci_read_r16(&tb_ptr);
  722. tci_write_reg32(t0, bswap16(t1));
  723. break;
  724. #endif
  725. #if TCG_TARGET_HAS_bswap32_i32
  726. case INDEX_op_bswap32_i32:
  727. t0 = *tb_ptr++;
  728. t1 = tci_read_r32(&tb_ptr);
  729. tci_write_reg32(t0, bswap32(t1));
  730. break;
  731. #endif
  732. #if TCG_TARGET_HAS_not_i32
  733. case INDEX_op_not_i32:
  734. t0 = *tb_ptr++;
  735. t1 = tci_read_r32(&tb_ptr);
  736. tci_write_reg32(t0, ~t1);
  737. break;
  738. #endif
  739. #if TCG_TARGET_HAS_neg_i32
  740. case INDEX_op_neg_i32:
  741. t0 = *tb_ptr++;
  742. t1 = tci_read_r32(&tb_ptr);
  743. tci_write_reg32(t0, -t1);
  744. break;
  745. #endif
  746. #if TCG_TARGET_REG_BITS == 64
  747. case INDEX_op_mov_i64:
  748. t0 = *tb_ptr++;
  749. t1 = tci_read_r64(&tb_ptr);
  750. tci_write_reg64(t0, t1);
  751. break;
  752. case INDEX_op_movi_i64:
  753. t0 = *tb_ptr++;
  754. t1 = tci_read_i64(&tb_ptr);
  755. tci_write_reg64(t0, t1);
  756. break;
  757. /* Load/store operations (64 bit). */
  758. case INDEX_op_ld8u_i64:
  759. t0 = *tb_ptr++;
  760. t1 = tci_read_r(&tb_ptr);
  761. t2 = tci_read_i32(&tb_ptr);
  762. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  763. break;
  764. case INDEX_op_ld8s_i64:
  765. case INDEX_op_ld16u_i64:
  766. case INDEX_op_ld16s_i64:
  767. TODO();
  768. break;
  769. case INDEX_op_ld32u_i64:
  770. t0 = *tb_ptr++;
  771. t1 = tci_read_r(&tb_ptr);
  772. t2 = tci_read_i32(&tb_ptr);
  773. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  774. break;
  775. case INDEX_op_ld32s_i64:
  776. t0 = *tb_ptr++;
  777. t1 = tci_read_r(&tb_ptr);
  778. t2 = tci_read_i32(&tb_ptr);
  779. tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
  780. break;
  781. case INDEX_op_ld_i64:
  782. t0 = *tb_ptr++;
  783. t1 = tci_read_r(&tb_ptr);
  784. t2 = tci_read_i32(&tb_ptr);
  785. tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
  786. break;
  787. case INDEX_op_st8_i64:
  788. t0 = tci_read_r8(&tb_ptr);
  789. t1 = tci_read_r(&tb_ptr);
  790. t2 = tci_read_i32(&tb_ptr);
  791. *(uint8_t *)(t1 + t2) = t0;
  792. break;
  793. case INDEX_op_st16_i64:
  794. t0 = tci_read_r16(&tb_ptr);
  795. t1 = tci_read_r(&tb_ptr);
  796. t2 = tci_read_i32(&tb_ptr);
  797. *(uint16_t *)(t1 + t2) = t0;
  798. break;
  799. case INDEX_op_st32_i64:
  800. t0 = tci_read_r32(&tb_ptr);
  801. t1 = tci_read_r(&tb_ptr);
  802. t2 = tci_read_i32(&tb_ptr);
  803. *(uint32_t *)(t1 + t2) = t0;
  804. break;
  805. case INDEX_op_st_i64:
  806. t0 = tci_read_r64(&tb_ptr);
  807. t1 = tci_read_r(&tb_ptr);
  808. t2 = tci_read_i32(&tb_ptr);
  809. *(uint64_t *)(t1 + t2) = t0;
  810. break;
  811. /* Arithmetic operations (64 bit). */
  812. case INDEX_op_add_i64:
  813. t0 = *tb_ptr++;
  814. t1 = tci_read_ri64(&tb_ptr);
  815. t2 = tci_read_ri64(&tb_ptr);
  816. tci_write_reg64(t0, t1 + t2);
  817. break;
  818. case INDEX_op_sub_i64:
  819. t0 = *tb_ptr++;
  820. t1 = tci_read_ri64(&tb_ptr);
  821. t2 = tci_read_ri64(&tb_ptr);
  822. tci_write_reg64(t0, t1 - t2);
  823. break;
  824. case INDEX_op_mul_i64:
  825. t0 = *tb_ptr++;
  826. t1 = tci_read_ri64(&tb_ptr);
  827. t2 = tci_read_ri64(&tb_ptr);
  828. tci_write_reg64(t0, t1 * t2);
  829. break;
  830. #if TCG_TARGET_HAS_div_i64
  831. case INDEX_op_div_i64:
  832. case INDEX_op_divu_i64:
  833. case INDEX_op_rem_i64:
  834. case INDEX_op_remu_i64:
  835. TODO();
  836. break;
  837. #elif TCG_TARGET_HAS_div2_i64
  838. case INDEX_op_div2_i64:
  839. case INDEX_op_divu2_i64:
  840. TODO();
  841. break;
  842. #endif
  843. case INDEX_op_and_i64:
  844. t0 = *tb_ptr++;
  845. t1 = tci_read_ri64(&tb_ptr);
  846. t2 = tci_read_ri64(&tb_ptr);
  847. tci_write_reg64(t0, t1 & t2);
  848. break;
  849. case INDEX_op_or_i64:
  850. t0 = *tb_ptr++;
  851. t1 = tci_read_ri64(&tb_ptr);
  852. t2 = tci_read_ri64(&tb_ptr);
  853. tci_write_reg64(t0, t1 | t2);
  854. break;
  855. case INDEX_op_xor_i64:
  856. t0 = *tb_ptr++;
  857. t1 = tci_read_ri64(&tb_ptr);
  858. t2 = tci_read_ri64(&tb_ptr);
  859. tci_write_reg64(t0, t1 ^ t2);
  860. break;
  861. /* Shift/rotate operations (64 bit). */
  862. case INDEX_op_shl_i64:
  863. t0 = *tb_ptr++;
  864. t1 = tci_read_ri64(&tb_ptr);
  865. t2 = tci_read_ri64(&tb_ptr);
  866. tci_write_reg64(t0, t1 << t2);
  867. break;
  868. case INDEX_op_shr_i64:
  869. t0 = *tb_ptr++;
  870. t1 = tci_read_ri64(&tb_ptr);
  871. t2 = tci_read_ri64(&tb_ptr);
  872. tci_write_reg64(t0, t1 >> t2);
  873. break;
  874. case INDEX_op_sar_i64:
  875. t0 = *tb_ptr++;
  876. t1 = tci_read_ri64(&tb_ptr);
  877. t2 = tci_read_ri64(&tb_ptr);
  878. tci_write_reg64(t0, ((int64_t)t1 >> t2));
  879. break;
  880. #if TCG_TARGET_HAS_rot_i64
  881. case INDEX_op_rotl_i64:
  882. case INDEX_op_rotr_i64:
  883. TODO();
  884. break;
  885. #endif
  886. case INDEX_op_brcond_i64:
  887. t0 = tci_read_r64(&tb_ptr);
  888. t1 = tci_read_ri64(&tb_ptr);
  889. condition = *tb_ptr++;
  890. label = tci_read_label(&tb_ptr);
  891. if (tci_compare64(t0, t1, condition)) {
  892. assert(tb_ptr == old_code_ptr + op_size);
  893. tb_ptr = (uint8_t *)label;
  894. continue;
  895. }
  896. break;
  897. #if TCG_TARGET_HAS_ext8u_i64
  898. case INDEX_op_ext8u_i64:
  899. t0 = *tb_ptr++;
  900. t1 = tci_read_r8(&tb_ptr);
  901. tci_write_reg64(t0, t1);
  902. break;
  903. #endif
  904. #if TCG_TARGET_HAS_ext8s_i64
  905. case INDEX_op_ext8s_i64:
  906. t0 = *tb_ptr++;
  907. t1 = tci_read_r8s(&tb_ptr);
  908. tci_write_reg64(t0, t1);
  909. break;
  910. #endif
  911. #if TCG_TARGET_HAS_ext16s_i64
  912. case INDEX_op_ext16s_i64:
  913. t0 = *tb_ptr++;
  914. t1 = tci_read_r16s(&tb_ptr);
  915. tci_write_reg64(t0, t1);
  916. break;
  917. #endif
  918. #if TCG_TARGET_HAS_ext16u_i64
  919. case INDEX_op_ext16u_i64:
  920. t0 = *tb_ptr++;
  921. t1 = tci_read_r16(&tb_ptr);
  922. tci_write_reg64(t0, t1);
  923. break;
  924. #endif
  925. #if TCG_TARGET_HAS_ext32s_i64
  926. case INDEX_op_ext32s_i64:
  927. t0 = *tb_ptr++;
  928. t1 = tci_read_r32s(&tb_ptr);
  929. tci_write_reg64(t0, t1);
  930. break;
  931. #endif
  932. #if TCG_TARGET_HAS_ext32u_i64
  933. case INDEX_op_ext32u_i64:
  934. t0 = *tb_ptr++;
  935. t1 = tci_read_r32(&tb_ptr);
  936. tci_write_reg64(t0, t1);
  937. break;
  938. #endif
  939. #if TCG_TARGET_HAS_bswap16_i64
  940. case INDEX_op_bswap16_i64:
  941. TODO();
  942. t0 = *tb_ptr++;
  943. t1 = tci_read_r16(&tb_ptr);
  944. tci_write_reg64(t0, bswap16(t1));
  945. break;
  946. #endif
  947. #if TCG_TARGET_HAS_bswap32_i64
  948. case INDEX_op_bswap32_i64:
  949. t0 = *tb_ptr++;
  950. t1 = tci_read_r32(&tb_ptr);
  951. tci_write_reg64(t0, bswap32(t1));
  952. break;
  953. #endif
  954. #if TCG_TARGET_HAS_bswap64_i64
  955. case INDEX_op_bswap64_i64:
  956. TODO();
  957. t0 = *tb_ptr++;
  958. t1 = tci_read_r64(&tb_ptr);
  959. tci_write_reg64(t0, bswap64(t1));
  960. break;
  961. #endif
  962. #if TCG_TARGET_HAS_not_i64
  963. case INDEX_op_not_i64:
  964. t0 = *tb_ptr++;
  965. t1 = tci_read_r64(&tb_ptr);
  966. tci_write_reg64(t0, ~t1);
  967. break;
  968. #endif
  969. #if TCG_TARGET_HAS_neg_i64
  970. case INDEX_op_neg_i64:
  971. t0 = *tb_ptr++;
  972. t1 = tci_read_r64(&tb_ptr);
  973. tci_write_reg64(t0, -t1);
  974. break;
  975. #endif
  976. #endif /* TCG_TARGET_REG_BITS == 64 */
  977. /* QEMU specific operations. */
  978. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  979. case INDEX_op_debug_insn_start:
  980. TODO();
  981. break;
  982. #else
  983. case INDEX_op_debug_insn_start:
  984. TODO();
  985. break;
  986. #endif
  987. case INDEX_op_exit_tb:
  988. next_tb = *(uint64_t *)tb_ptr;
  989. goto exit;
  990. break;
  991. case INDEX_op_goto_tb:
  992. t0 = tci_read_i32(&tb_ptr);
  993. assert(tb_ptr == old_code_ptr + op_size);
  994. tb_ptr += (int32_t)t0;
  995. continue;
  996. case INDEX_op_qemu_ld8u:
  997. t0 = *tb_ptr++;
  998. taddr = tci_read_ulong(&tb_ptr);
  999. #ifdef CONFIG_SOFTMMU
  1000. tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
  1001. #else
  1002. host_addr = (tcg_target_ulong)taddr;
  1003. assert(taddr == host_addr);
  1004. tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
  1005. #endif
  1006. tci_write_reg8(t0, tmp8);
  1007. break;
  1008. case INDEX_op_qemu_ld8s:
  1009. t0 = *tb_ptr++;
  1010. taddr = tci_read_ulong(&tb_ptr);
  1011. #ifdef CONFIG_SOFTMMU
  1012. tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
  1013. #else
  1014. host_addr = (tcg_target_ulong)taddr;
  1015. assert(taddr == host_addr);
  1016. tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
  1017. #endif
  1018. tci_write_reg8s(t0, tmp8);
  1019. break;
  1020. case INDEX_op_qemu_ld16u:
  1021. t0 = *tb_ptr++;
  1022. taddr = tci_read_ulong(&tb_ptr);
  1023. #ifdef CONFIG_SOFTMMU
  1024. tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
  1025. #else
  1026. host_addr = (tcg_target_ulong)taddr;
  1027. assert(taddr == host_addr);
  1028. tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
  1029. #endif
  1030. tci_write_reg16(t0, tmp16);
  1031. break;
  1032. case INDEX_op_qemu_ld16s:
  1033. t0 = *tb_ptr++;
  1034. taddr = tci_read_ulong(&tb_ptr);
  1035. #ifdef CONFIG_SOFTMMU
  1036. tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
  1037. #else
  1038. host_addr = (tcg_target_ulong)taddr;
  1039. assert(taddr == host_addr);
  1040. tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
  1041. #endif
  1042. tci_write_reg16s(t0, tmp16);
  1043. break;
  1044. #if TCG_TARGET_REG_BITS == 64
  1045. case INDEX_op_qemu_ld32u:
  1046. t0 = *tb_ptr++;
  1047. taddr = tci_read_ulong(&tb_ptr);
  1048. #ifdef CONFIG_SOFTMMU
  1049. tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
  1050. #else
  1051. host_addr = (tcg_target_ulong)taddr;
  1052. assert(taddr == host_addr);
  1053. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1054. #endif
  1055. tci_write_reg32(t0, tmp32);
  1056. break;
  1057. case INDEX_op_qemu_ld32s:
  1058. t0 = *tb_ptr++;
  1059. taddr = tci_read_ulong(&tb_ptr);
  1060. #ifdef CONFIG_SOFTMMU
  1061. tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
  1062. #else
  1063. host_addr = (tcg_target_ulong)taddr;
  1064. assert(taddr == host_addr);
  1065. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1066. #endif
  1067. tci_write_reg32s(t0, tmp32);
  1068. break;
  1069. #endif /* TCG_TARGET_REG_BITS == 64 */
  1070. case INDEX_op_qemu_ld32:
  1071. t0 = *tb_ptr++;
  1072. taddr = tci_read_ulong(&tb_ptr);
  1073. #ifdef CONFIG_SOFTMMU
  1074. tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
  1075. #else
  1076. host_addr = (tcg_target_ulong)taddr;
  1077. assert(taddr == host_addr);
  1078. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1079. #endif
  1080. tci_write_reg32(t0, tmp32);
  1081. break;
  1082. case INDEX_op_qemu_ld64:
  1083. t0 = *tb_ptr++;
  1084. #if TCG_TARGET_REG_BITS == 32
  1085. t1 = *tb_ptr++;
  1086. #endif
  1087. taddr = tci_read_ulong(&tb_ptr);
  1088. #ifdef CONFIG_SOFTMMU
  1089. tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr));
  1090. #else
  1091. host_addr = (tcg_target_ulong)taddr;
  1092. assert(taddr == host_addr);
  1093. tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE));
  1094. #endif
  1095. tci_write_reg(t0, tmp64);
  1096. #if TCG_TARGET_REG_BITS == 32
  1097. tci_write_reg(t1, tmp64 >> 32);
  1098. #endif
  1099. break;
  1100. case INDEX_op_qemu_st8:
  1101. t0 = tci_read_r8(&tb_ptr);
  1102. taddr = tci_read_ulong(&tb_ptr);
  1103. #ifdef CONFIG_SOFTMMU
  1104. t2 = tci_read_i(&tb_ptr);
  1105. helper_stb_mmu(env, taddr, t0, t2);
  1106. #else
  1107. host_addr = (tcg_target_ulong)taddr;
  1108. assert(taddr == host_addr);
  1109. *(uint8_t *)(host_addr + GUEST_BASE) = t0;
  1110. #endif
  1111. break;
  1112. case INDEX_op_qemu_st16:
  1113. t0 = tci_read_r16(&tb_ptr);
  1114. taddr = tci_read_ulong(&tb_ptr);
  1115. #ifdef CONFIG_SOFTMMU
  1116. t2 = tci_read_i(&tb_ptr);
  1117. helper_stw_mmu(env, taddr, t0, t2);
  1118. #else
  1119. host_addr = (tcg_target_ulong)taddr;
  1120. assert(taddr == host_addr);
  1121. *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0);
  1122. #endif
  1123. break;
  1124. case INDEX_op_qemu_st32:
  1125. t0 = tci_read_r32(&tb_ptr);
  1126. taddr = tci_read_ulong(&tb_ptr);
  1127. #ifdef CONFIG_SOFTMMU
  1128. t2 = tci_read_i(&tb_ptr);
  1129. helper_stl_mmu(env, taddr, t0, t2);
  1130. #else
  1131. host_addr = (tcg_target_ulong)taddr;
  1132. assert(taddr == host_addr);
  1133. *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0);
  1134. #endif
  1135. break;
  1136. case INDEX_op_qemu_st64:
  1137. tmp64 = tci_read_r64(&tb_ptr);
  1138. taddr = tci_read_ulong(&tb_ptr);
  1139. #ifdef CONFIG_SOFTMMU
  1140. t2 = tci_read_i(&tb_ptr);
  1141. helper_stq_mmu(env, taddr, tmp64, t2);
  1142. #else
  1143. host_addr = (tcg_target_ulong)taddr;
  1144. assert(taddr == host_addr);
  1145. *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64);
  1146. #endif
  1147. break;
  1148. default:
  1149. TODO();
  1150. break;
  1151. }
  1152. assert(tb_ptr == old_code_ptr + op_size);
  1153. }
  1154. exit:
  1155. return next_tb;
  1156. }