tci.c 36 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237
  1. /*
  2. * Tiny Code Interpreter for QEMU
  3. *
  4. * Copyright (c) 2009, 2011 Stefan Weil
  5. *
  6. * This program is free software: you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation, either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. * This program is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU General Public License
  17. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  18. */
  19. #include "config.h"
  20. /* Defining NDEBUG disables assertions (which makes the code faster). */
  21. #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
  22. # define NDEBUG
  23. #endif
  24. #include "qemu-common.h"
  25. #include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */
  26. #include "tcg-op.h"
  27. /* Marker for missing code. */
  28. #define TODO() \
  29. do { \
  30. fprintf(stderr, "TODO %s:%u: %s()\n", \
  31. __FILE__, __LINE__, __func__); \
  32. tcg_abort(); \
  33. } while (0)
  34. #if MAX_OPC_PARAM_IARGS != 5
  35. # error Fix needed, number of supported input arguments changed!
  36. #endif
  37. #if TCG_TARGET_REG_BITS == 32
  38. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  39. tcg_target_ulong, tcg_target_ulong,
  40. tcg_target_ulong, tcg_target_ulong,
  41. tcg_target_ulong, tcg_target_ulong,
  42. tcg_target_ulong, tcg_target_ulong);
  43. #else
  44. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  45. tcg_target_ulong, tcg_target_ulong,
  46. tcg_target_ulong);
  47. #endif
  48. /* Targets which don't use GETPC also don't need tci_tb_ptr
  49. which makes them a little faster. */
  50. #if defined(GETPC)
  51. uintptr_t tci_tb_ptr;
  52. #endif
  53. static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
  54. static tcg_target_ulong tci_read_reg(TCGReg index)
  55. {
  56. assert(index < ARRAY_SIZE(tci_reg));
  57. return tci_reg[index];
  58. }
  59. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  60. static int8_t tci_read_reg8s(TCGReg index)
  61. {
  62. return (int8_t)tci_read_reg(index);
  63. }
  64. #endif
  65. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  66. static int16_t tci_read_reg16s(TCGReg index)
  67. {
  68. return (int16_t)tci_read_reg(index);
  69. }
  70. #endif
  71. #if TCG_TARGET_REG_BITS == 64
  72. static int32_t tci_read_reg32s(TCGReg index)
  73. {
  74. return (int32_t)tci_read_reg(index);
  75. }
  76. #endif
  77. static uint8_t tci_read_reg8(TCGReg index)
  78. {
  79. return (uint8_t)tci_read_reg(index);
  80. }
  81. static uint16_t tci_read_reg16(TCGReg index)
  82. {
  83. return (uint16_t)tci_read_reg(index);
  84. }
  85. static uint32_t tci_read_reg32(TCGReg index)
  86. {
  87. return (uint32_t)tci_read_reg(index);
  88. }
  89. #if TCG_TARGET_REG_BITS == 64
  90. static uint64_t tci_read_reg64(TCGReg index)
  91. {
  92. return tci_read_reg(index);
  93. }
  94. #endif
  95. static void tci_write_reg(TCGReg index, tcg_target_ulong value)
  96. {
  97. assert(index < ARRAY_SIZE(tci_reg));
  98. assert(index != TCG_AREG0);
  99. assert(index != TCG_REG_CALL_STACK);
  100. tci_reg[index] = value;
  101. }
  102. static void tci_write_reg8s(TCGReg index, int8_t value)
  103. {
  104. tci_write_reg(index, value);
  105. }
  106. static void tci_write_reg16s(TCGReg index, int16_t value)
  107. {
  108. tci_write_reg(index, value);
  109. }
  110. #if TCG_TARGET_REG_BITS == 64
  111. static void tci_write_reg32s(TCGReg index, int32_t value)
  112. {
  113. tci_write_reg(index, value);
  114. }
  115. #endif
  116. static void tci_write_reg8(TCGReg index, uint8_t value)
  117. {
  118. tci_write_reg(index, value);
  119. }
  120. static void tci_write_reg16(TCGReg index, uint16_t value)
  121. {
  122. tci_write_reg(index, value);
  123. }
  124. static void tci_write_reg32(TCGReg index, uint32_t value)
  125. {
  126. tci_write_reg(index, value);
  127. }
  128. #if TCG_TARGET_REG_BITS == 32
  129. static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
  130. uint64_t value)
  131. {
  132. tci_write_reg(low_index, value);
  133. tci_write_reg(high_index, value >> 32);
  134. }
  135. #elif TCG_TARGET_REG_BITS == 64
  136. static void tci_write_reg64(TCGReg index, uint64_t value)
  137. {
  138. tci_write_reg(index, value);
  139. }
  140. #endif
  141. #if TCG_TARGET_REG_BITS == 32
  142. /* Create a 64 bit value from two 32 bit values. */
  143. static uint64_t tci_uint64(uint32_t high, uint32_t low)
  144. {
  145. return ((uint64_t)high << 32) + low;
  146. }
  147. #endif
  148. /* Read constant (native size) from bytecode. */
  149. static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
  150. {
  151. tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
  152. *tb_ptr += sizeof(value);
  153. return value;
  154. }
  155. /* Read unsigned constant (32 bit) from bytecode. */
  156. static uint32_t tci_read_i32(uint8_t **tb_ptr)
  157. {
  158. uint32_t value = *(uint32_t *)(*tb_ptr);
  159. *tb_ptr += sizeof(value);
  160. return value;
  161. }
  162. /* Read signed constant (32 bit) from bytecode. */
  163. static int32_t tci_read_s32(uint8_t **tb_ptr)
  164. {
  165. int32_t value = *(int32_t *)(*tb_ptr);
  166. *tb_ptr += sizeof(value);
  167. return value;
  168. }
  169. #if TCG_TARGET_REG_BITS == 64
  170. /* Read constant (64 bit) from bytecode. */
  171. static uint64_t tci_read_i64(uint8_t **tb_ptr)
  172. {
  173. uint64_t value = *(uint64_t *)(*tb_ptr);
  174. *tb_ptr += sizeof(value);
  175. return value;
  176. }
  177. #endif
  178. /* Read indexed register (native size) from bytecode. */
  179. static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
  180. {
  181. tcg_target_ulong value = tci_read_reg(**tb_ptr);
  182. *tb_ptr += 1;
  183. return value;
  184. }
  185. /* Read indexed register (8 bit) from bytecode. */
  186. static uint8_t tci_read_r8(uint8_t **tb_ptr)
  187. {
  188. uint8_t value = tci_read_reg8(**tb_ptr);
  189. *tb_ptr += 1;
  190. return value;
  191. }
  192. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  193. /* Read indexed register (8 bit signed) from bytecode. */
  194. static int8_t tci_read_r8s(uint8_t **tb_ptr)
  195. {
  196. int8_t value = tci_read_reg8s(**tb_ptr);
  197. *tb_ptr += 1;
  198. return value;
  199. }
  200. #endif
  201. /* Read indexed register (16 bit) from bytecode. */
  202. static uint16_t tci_read_r16(uint8_t **tb_ptr)
  203. {
  204. uint16_t value = tci_read_reg16(**tb_ptr);
  205. *tb_ptr += 1;
  206. return value;
  207. }
  208. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  209. /* Read indexed register (16 bit signed) from bytecode. */
  210. static int16_t tci_read_r16s(uint8_t **tb_ptr)
  211. {
  212. int16_t value = tci_read_reg16s(**tb_ptr);
  213. *tb_ptr += 1;
  214. return value;
  215. }
  216. #endif
  217. /* Read indexed register (32 bit) from bytecode. */
  218. static uint32_t tci_read_r32(uint8_t **tb_ptr)
  219. {
  220. uint32_t value = tci_read_reg32(**tb_ptr);
  221. *tb_ptr += 1;
  222. return value;
  223. }
  224. #if TCG_TARGET_REG_BITS == 32
  225. /* Read two indexed registers (2 * 32 bit) from bytecode. */
  226. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  227. {
  228. uint32_t low = tci_read_r32(tb_ptr);
  229. return tci_uint64(tci_read_r32(tb_ptr), low);
  230. }
  231. #elif TCG_TARGET_REG_BITS == 64
  232. /* Read indexed register (32 bit signed) from bytecode. */
  233. static int32_t tci_read_r32s(uint8_t **tb_ptr)
  234. {
  235. int32_t value = tci_read_reg32s(**tb_ptr);
  236. *tb_ptr += 1;
  237. return value;
  238. }
  239. /* Read indexed register (64 bit) from bytecode. */
  240. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  241. {
  242. uint64_t value = tci_read_reg64(**tb_ptr);
  243. *tb_ptr += 1;
  244. return value;
  245. }
  246. #endif
  247. /* Read indexed register(s) with target address from bytecode. */
  248. static target_ulong tci_read_ulong(uint8_t **tb_ptr)
  249. {
  250. target_ulong taddr = tci_read_r(tb_ptr);
  251. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  252. taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
  253. #endif
  254. return taddr;
  255. }
  256. /* Read indexed register or constant (native size) from bytecode. */
  257. static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
  258. {
  259. tcg_target_ulong value;
  260. TCGReg r = **tb_ptr;
  261. *tb_ptr += 1;
  262. if (r == TCG_CONST) {
  263. value = tci_read_i(tb_ptr);
  264. } else {
  265. value = tci_read_reg(r);
  266. }
  267. return value;
  268. }
  269. /* Read indexed register or constant (32 bit) from bytecode. */
  270. static uint32_t tci_read_ri32(uint8_t **tb_ptr)
  271. {
  272. uint32_t value;
  273. TCGReg r = **tb_ptr;
  274. *tb_ptr += 1;
  275. if (r == TCG_CONST) {
  276. value = tci_read_i32(tb_ptr);
  277. } else {
  278. value = tci_read_reg32(r);
  279. }
  280. return value;
  281. }
  282. #if TCG_TARGET_REG_BITS == 32
  283. /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
  284. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  285. {
  286. uint32_t low = tci_read_ri32(tb_ptr);
  287. return tci_uint64(tci_read_ri32(tb_ptr), low);
  288. }
  289. #elif TCG_TARGET_REG_BITS == 64
  290. /* Read indexed register or constant (64 bit) from bytecode. */
  291. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  292. {
  293. uint64_t value;
  294. TCGReg r = **tb_ptr;
  295. *tb_ptr += 1;
  296. if (r == TCG_CONST) {
  297. value = tci_read_i64(tb_ptr);
  298. } else {
  299. value = tci_read_reg64(r);
  300. }
  301. return value;
  302. }
  303. #endif
  304. static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
  305. {
  306. tcg_target_ulong label = tci_read_i(tb_ptr);
  307. assert(label != 0);
  308. return label;
  309. }
  310. static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
  311. {
  312. bool result = false;
  313. int32_t i0 = u0;
  314. int32_t i1 = u1;
  315. switch (condition) {
  316. case TCG_COND_EQ:
  317. result = (u0 == u1);
  318. break;
  319. case TCG_COND_NE:
  320. result = (u0 != u1);
  321. break;
  322. case TCG_COND_LT:
  323. result = (i0 < i1);
  324. break;
  325. case TCG_COND_GE:
  326. result = (i0 >= i1);
  327. break;
  328. case TCG_COND_LE:
  329. result = (i0 <= i1);
  330. break;
  331. case TCG_COND_GT:
  332. result = (i0 > i1);
  333. break;
  334. case TCG_COND_LTU:
  335. result = (u0 < u1);
  336. break;
  337. case TCG_COND_GEU:
  338. result = (u0 >= u1);
  339. break;
  340. case TCG_COND_LEU:
  341. result = (u0 <= u1);
  342. break;
  343. case TCG_COND_GTU:
  344. result = (u0 > u1);
  345. break;
  346. default:
  347. TODO();
  348. }
  349. return result;
  350. }
  351. static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
  352. {
  353. bool result = false;
  354. int64_t i0 = u0;
  355. int64_t i1 = u1;
  356. switch (condition) {
  357. case TCG_COND_EQ:
  358. result = (u0 == u1);
  359. break;
  360. case TCG_COND_NE:
  361. result = (u0 != u1);
  362. break;
  363. case TCG_COND_LT:
  364. result = (i0 < i1);
  365. break;
  366. case TCG_COND_GE:
  367. result = (i0 >= i1);
  368. break;
  369. case TCG_COND_LE:
  370. result = (i0 <= i1);
  371. break;
  372. case TCG_COND_GT:
  373. result = (i0 > i1);
  374. break;
  375. case TCG_COND_LTU:
  376. result = (u0 < u1);
  377. break;
  378. case TCG_COND_GEU:
  379. result = (u0 >= u1);
  380. break;
  381. case TCG_COND_LEU:
  382. result = (u0 <= u1);
  383. break;
  384. case TCG_COND_GTU:
  385. result = (u0 > u1);
  386. break;
  387. default:
  388. TODO();
  389. }
  390. return result;
  391. }
  392. /* Interpret pseudo code in tb. */
  393. uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
  394. {
  395. long tcg_temps[CPU_TEMP_BUF_NLONGS];
  396. uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
  397. uintptr_t next_tb = 0;
  398. tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
  399. tci_reg[TCG_REG_CALL_STACK] = sp_value;
  400. assert(tb_ptr);
  401. for (;;) {
  402. TCGOpcode opc = tb_ptr[0];
  403. #if !defined(NDEBUG)
  404. uint8_t op_size = tb_ptr[1];
  405. uint8_t *old_code_ptr = tb_ptr;
  406. #endif
  407. tcg_target_ulong t0;
  408. tcg_target_ulong t1;
  409. tcg_target_ulong t2;
  410. tcg_target_ulong label;
  411. TCGCond condition;
  412. target_ulong taddr;
  413. #ifndef CONFIG_SOFTMMU
  414. tcg_target_ulong host_addr;
  415. #endif
  416. uint8_t tmp8;
  417. uint16_t tmp16;
  418. uint32_t tmp32;
  419. uint64_t tmp64;
  420. #if TCG_TARGET_REG_BITS == 32
  421. uint64_t v64;
  422. #endif
  423. #if defined(GETPC)
  424. tci_tb_ptr = (uintptr_t)tb_ptr;
  425. #endif
  426. /* Skip opcode and size entry. */
  427. tb_ptr += 2;
  428. switch (opc) {
  429. case INDEX_op_end:
  430. case INDEX_op_nop:
  431. break;
  432. case INDEX_op_nop1:
  433. case INDEX_op_nop2:
  434. case INDEX_op_nop3:
  435. case INDEX_op_nopn:
  436. case INDEX_op_discard:
  437. TODO();
  438. break;
  439. case INDEX_op_set_label:
  440. TODO();
  441. break;
  442. case INDEX_op_call:
  443. t0 = tci_read_ri(&tb_ptr);
  444. #if TCG_TARGET_REG_BITS == 32
  445. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  446. tci_read_reg(TCG_REG_R1),
  447. tci_read_reg(TCG_REG_R2),
  448. tci_read_reg(TCG_REG_R3),
  449. tci_read_reg(TCG_REG_R5),
  450. tci_read_reg(TCG_REG_R6),
  451. tci_read_reg(TCG_REG_R7),
  452. tci_read_reg(TCG_REG_R8),
  453. tci_read_reg(TCG_REG_R9),
  454. tci_read_reg(TCG_REG_R10));
  455. tci_write_reg(TCG_REG_R0, tmp64);
  456. tci_write_reg(TCG_REG_R1, tmp64 >> 32);
  457. #else
  458. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  459. tci_read_reg(TCG_REG_R1),
  460. tci_read_reg(TCG_REG_R2),
  461. tci_read_reg(TCG_REG_R3),
  462. tci_read_reg(TCG_REG_R5));
  463. tci_write_reg(TCG_REG_R0, tmp64);
  464. #endif
  465. break;
  466. case INDEX_op_br:
  467. label = tci_read_label(&tb_ptr);
  468. assert(tb_ptr == old_code_ptr + op_size);
  469. tb_ptr = (uint8_t *)label;
  470. continue;
  471. case INDEX_op_setcond_i32:
  472. t0 = *tb_ptr++;
  473. t1 = tci_read_r32(&tb_ptr);
  474. t2 = tci_read_ri32(&tb_ptr);
  475. condition = *tb_ptr++;
  476. tci_write_reg32(t0, tci_compare32(t1, t2, condition));
  477. break;
  478. #if TCG_TARGET_REG_BITS == 32
  479. case INDEX_op_setcond2_i32:
  480. t0 = *tb_ptr++;
  481. tmp64 = tci_read_r64(&tb_ptr);
  482. v64 = tci_read_ri64(&tb_ptr);
  483. condition = *tb_ptr++;
  484. tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
  485. break;
  486. #elif TCG_TARGET_REG_BITS == 64
  487. case INDEX_op_setcond_i64:
  488. t0 = *tb_ptr++;
  489. t1 = tci_read_r64(&tb_ptr);
  490. t2 = tci_read_ri64(&tb_ptr);
  491. condition = *tb_ptr++;
  492. tci_write_reg64(t0, tci_compare64(t1, t2, condition));
  493. break;
  494. #endif
  495. case INDEX_op_mov_i32:
  496. t0 = *tb_ptr++;
  497. t1 = tci_read_r32(&tb_ptr);
  498. tci_write_reg32(t0, t1);
  499. break;
  500. case INDEX_op_movi_i32:
  501. t0 = *tb_ptr++;
  502. t1 = tci_read_i32(&tb_ptr);
  503. tci_write_reg32(t0, t1);
  504. break;
  505. /* Load/store operations (32 bit). */
  506. case INDEX_op_ld8u_i32:
  507. t0 = *tb_ptr++;
  508. t1 = tci_read_r(&tb_ptr);
  509. t2 = tci_read_s32(&tb_ptr);
  510. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  511. break;
  512. case INDEX_op_ld8s_i32:
  513. case INDEX_op_ld16u_i32:
  514. TODO();
  515. break;
  516. case INDEX_op_ld16s_i32:
  517. TODO();
  518. break;
  519. case INDEX_op_ld_i32:
  520. t0 = *tb_ptr++;
  521. t1 = tci_read_r(&tb_ptr);
  522. t2 = tci_read_s32(&tb_ptr);
  523. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  524. break;
  525. case INDEX_op_st8_i32:
  526. t0 = tci_read_r8(&tb_ptr);
  527. t1 = tci_read_r(&tb_ptr);
  528. t2 = tci_read_s32(&tb_ptr);
  529. *(uint8_t *)(t1 + t2) = t0;
  530. break;
  531. case INDEX_op_st16_i32:
  532. t0 = tci_read_r16(&tb_ptr);
  533. t1 = tci_read_r(&tb_ptr);
  534. t2 = tci_read_s32(&tb_ptr);
  535. *(uint16_t *)(t1 + t2) = t0;
  536. break;
  537. case INDEX_op_st_i32:
  538. t0 = tci_read_r32(&tb_ptr);
  539. t1 = tci_read_r(&tb_ptr);
  540. t2 = tci_read_s32(&tb_ptr);
  541. assert(t1 != sp_value || (int32_t)t2 < 0);
  542. *(uint32_t *)(t1 + t2) = t0;
  543. break;
  544. /* Arithmetic operations (32 bit). */
  545. case INDEX_op_add_i32:
  546. t0 = *tb_ptr++;
  547. t1 = tci_read_ri32(&tb_ptr);
  548. t2 = tci_read_ri32(&tb_ptr);
  549. tci_write_reg32(t0, t1 + t2);
  550. break;
  551. case INDEX_op_sub_i32:
  552. t0 = *tb_ptr++;
  553. t1 = tci_read_ri32(&tb_ptr);
  554. t2 = tci_read_ri32(&tb_ptr);
  555. tci_write_reg32(t0, t1 - t2);
  556. break;
  557. case INDEX_op_mul_i32:
  558. t0 = *tb_ptr++;
  559. t1 = tci_read_ri32(&tb_ptr);
  560. t2 = tci_read_ri32(&tb_ptr);
  561. tci_write_reg32(t0, t1 * t2);
  562. break;
  563. #if TCG_TARGET_HAS_div_i32
  564. case INDEX_op_div_i32:
  565. t0 = *tb_ptr++;
  566. t1 = tci_read_ri32(&tb_ptr);
  567. t2 = tci_read_ri32(&tb_ptr);
  568. tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
  569. break;
  570. case INDEX_op_divu_i32:
  571. t0 = *tb_ptr++;
  572. t1 = tci_read_ri32(&tb_ptr);
  573. t2 = tci_read_ri32(&tb_ptr);
  574. tci_write_reg32(t0, t1 / t2);
  575. break;
  576. case INDEX_op_rem_i32:
  577. t0 = *tb_ptr++;
  578. t1 = tci_read_ri32(&tb_ptr);
  579. t2 = tci_read_ri32(&tb_ptr);
  580. tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
  581. break;
  582. case INDEX_op_remu_i32:
  583. t0 = *tb_ptr++;
  584. t1 = tci_read_ri32(&tb_ptr);
  585. t2 = tci_read_ri32(&tb_ptr);
  586. tci_write_reg32(t0, t1 % t2);
  587. break;
  588. #elif TCG_TARGET_HAS_div2_i32
  589. case INDEX_op_div2_i32:
  590. case INDEX_op_divu2_i32:
  591. TODO();
  592. break;
  593. #endif
  594. case INDEX_op_and_i32:
  595. t0 = *tb_ptr++;
  596. t1 = tci_read_ri32(&tb_ptr);
  597. t2 = tci_read_ri32(&tb_ptr);
  598. tci_write_reg32(t0, t1 & t2);
  599. break;
  600. case INDEX_op_or_i32:
  601. t0 = *tb_ptr++;
  602. t1 = tci_read_ri32(&tb_ptr);
  603. t2 = tci_read_ri32(&tb_ptr);
  604. tci_write_reg32(t0, t1 | t2);
  605. break;
  606. case INDEX_op_xor_i32:
  607. t0 = *tb_ptr++;
  608. t1 = tci_read_ri32(&tb_ptr);
  609. t2 = tci_read_ri32(&tb_ptr);
  610. tci_write_reg32(t0, t1 ^ t2);
  611. break;
  612. /* Shift/rotate operations (32 bit). */
  613. case INDEX_op_shl_i32:
  614. t0 = *tb_ptr++;
  615. t1 = tci_read_ri32(&tb_ptr);
  616. t2 = tci_read_ri32(&tb_ptr);
  617. tci_write_reg32(t0, t1 << t2);
  618. break;
  619. case INDEX_op_shr_i32:
  620. t0 = *tb_ptr++;
  621. t1 = tci_read_ri32(&tb_ptr);
  622. t2 = tci_read_ri32(&tb_ptr);
  623. tci_write_reg32(t0, t1 >> t2);
  624. break;
  625. case INDEX_op_sar_i32:
  626. t0 = *tb_ptr++;
  627. t1 = tci_read_ri32(&tb_ptr);
  628. t2 = tci_read_ri32(&tb_ptr);
  629. tci_write_reg32(t0, ((int32_t)t1 >> t2));
  630. break;
  631. #if TCG_TARGET_HAS_rot_i32
  632. case INDEX_op_rotl_i32:
  633. t0 = *tb_ptr++;
  634. t1 = tci_read_ri32(&tb_ptr);
  635. t2 = tci_read_ri32(&tb_ptr);
  636. tci_write_reg32(t0, rol32(t1, t2));
  637. break;
  638. case INDEX_op_rotr_i32:
  639. t0 = *tb_ptr++;
  640. t1 = tci_read_ri32(&tb_ptr);
  641. t2 = tci_read_ri32(&tb_ptr);
  642. tci_write_reg32(t0, ror32(t1, t2));
  643. break;
  644. #endif
  645. #if TCG_TARGET_HAS_deposit_i32
  646. case INDEX_op_deposit_i32:
  647. t0 = *tb_ptr++;
  648. t1 = tci_read_r32(&tb_ptr);
  649. t2 = tci_read_r32(&tb_ptr);
  650. tmp16 = *tb_ptr++;
  651. tmp8 = *tb_ptr++;
  652. tmp32 = (((1 << tmp8) - 1) << tmp16);
  653. tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
  654. break;
  655. #endif
  656. case INDEX_op_brcond_i32:
  657. t0 = tci_read_r32(&tb_ptr);
  658. t1 = tci_read_ri32(&tb_ptr);
  659. condition = *tb_ptr++;
  660. label = tci_read_label(&tb_ptr);
  661. if (tci_compare32(t0, t1, condition)) {
  662. assert(tb_ptr == old_code_ptr + op_size);
  663. tb_ptr = (uint8_t *)label;
  664. continue;
  665. }
  666. break;
  667. #if TCG_TARGET_REG_BITS == 32
  668. case INDEX_op_add2_i32:
  669. t0 = *tb_ptr++;
  670. t1 = *tb_ptr++;
  671. tmp64 = tci_read_r64(&tb_ptr);
  672. tmp64 += tci_read_r64(&tb_ptr);
  673. tci_write_reg64(t1, t0, tmp64);
  674. break;
  675. case INDEX_op_sub2_i32:
  676. t0 = *tb_ptr++;
  677. t1 = *tb_ptr++;
  678. tmp64 = tci_read_r64(&tb_ptr);
  679. tmp64 -= tci_read_r64(&tb_ptr);
  680. tci_write_reg64(t1, t0, tmp64);
  681. break;
  682. case INDEX_op_brcond2_i32:
  683. tmp64 = tci_read_r64(&tb_ptr);
  684. v64 = tci_read_ri64(&tb_ptr);
  685. condition = *tb_ptr++;
  686. label = tci_read_label(&tb_ptr);
  687. if (tci_compare64(tmp64, v64, condition)) {
  688. assert(tb_ptr == old_code_ptr + op_size);
  689. tb_ptr = (uint8_t *)label;
  690. continue;
  691. }
  692. break;
  693. case INDEX_op_mulu2_i32:
  694. t0 = *tb_ptr++;
  695. t1 = *tb_ptr++;
  696. t2 = tci_read_r32(&tb_ptr);
  697. tmp64 = tci_read_r32(&tb_ptr);
  698. tci_write_reg64(t1, t0, t2 * tmp64);
  699. break;
  700. #endif /* TCG_TARGET_REG_BITS == 32 */
  701. #if TCG_TARGET_HAS_ext8s_i32
  702. case INDEX_op_ext8s_i32:
  703. t0 = *tb_ptr++;
  704. t1 = tci_read_r8s(&tb_ptr);
  705. tci_write_reg32(t0, t1);
  706. break;
  707. #endif
  708. #if TCG_TARGET_HAS_ext16s_i32
  709. case INDEX_op_ext16s_i32:
  710. t0 = *tb_ptr++;
  711. t1 = tci_read_r16s(&tb_ptr);
  712. tci_write_reg32(t0, t1);
  713. break;
  714. #endif
  715. #if TCG_TARGET_HAS_ext8u_i32
  716. case INDEX_op_ext8u_i32:
  717. t0 = *tb_ptr++;
  718. t1 = tci_read_r8(&tb_ptr);
  719. tci_write_reg32(t0, t1);
  720. break;
  721. #endif
  722. #if TCG_TARGET_HAS_ext16u_i32
  723. case INDEX_op_ext16u_i32:
  724. t0 = *tb_ptr++;
  725. t1 = tci_read_r16(&tb_ptr);
  726. tci_write_reg32(t0, t1);
  727. break;
  728. #endif
  729. #if TCG_TARGET_HAS_bswap16_i32
  730. case INDEX_op_bswap16_i32:
  731. t0 = *tb_ptr++;
  732. t1 = tci_read_r16(&tb_ptr);
  733. tci_write_reg32(t0, bswap16(t1));
  734. break;
  735. #endif
  736. #if TCG_TARGET_HAS_bswap32_i32
  737. case INDEX_op_bswap32_i32:
  738. t0 = *tb_ptr++;
  739. t1 = tci_read_r32(&tb_ptr);
  740. tci_write_reg32(t0, bswap32(t1));
  741. break;
  742. #endif
  743. #if TCG_TARGET_HAS_not_i32
  744. case INDEX_op_not_i32:
  745. t0 = *tb_ptr++;
  746. t1 = tci_read_r32(&tb_ptr);
  747. tci_write_reg32(t0, ~t1);
  748. break;
  749. #endif
  750. #if TCG_TARGET_HAS_neg_i32
  751. case INDEX_op_neg_i32:
  752. t0 = *tb_ptr++;
  753. t1 = tci_read_r32(&tb_ptr);
  754. tci_write_reg32(t0, -t1);
  755. break;
  756. #endif
  757. #if TCG_TARGET_REG_BITS == 64
  758. case INDEX_op_mov_i64:
  759. t0 = *tb_ptr++;
  760. t1 = tci_read_r64(&tb_ptr);
  761. tci_write_reg64(t0, t1);
  762. break;
  763. case INDEX_op_movi_i64:
  764. t0 = *tb_ptr++;
  765. t1 = tci_read_i64(&tb_ptr);
  766. tci_write_reg64(t0, t1);
  767. break;
  768. /* Load/store operations (64 bit). */
  769. case INDEX_op_ld8u_i64:
  770. t0 = *tb_ptr++;
  771. t1 = tci_read_r(&tb_ptr);
  772. t2 = tci_read_s32(&tb_ptr);
  773. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  774. break;
  775. case INDEX_op_ld8s_i64:
  776. case INDEX_op_ld16u_i64:
  777. case INDEX_op_ld16s_i64:
  778. TODO();
  779. break;
  780. case INDEX_op_ld32u_i64:
  781. t0 = *tb_ptr++;
  782. t1 = tci_read_r(&tb_ptr);
  783. t2 = tci_read_s32(&tb_ptr);
  784. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  785. break;
  786. case INDEX_op_ld32s_i64:
  787. t0 = *tb_ptr++;
  788. t1 = tci_read_r(&tb_ptr);
  789. t2 = tci_read_s32(&tb_ptr);
  790. tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
  791. break;
  792. case INDEX_op_ld_i64:
  793. t0 = *tb_ptr++;
  794. t1 = tci_read_r(&tb_ptr);
  795. t2 = tci_read_s32(&tb_ptr);
  796. tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
  797. break;
  798. case INDEX_op_st8_i64:
  799. t0 = tci_read_r8(&tb_ptr);
  800. t1 = tci_read_r(&tb_ptr);
  801. t2 = tci_read_s32(&tb_ptr);
  802. *(uint8_t *)(t1 + t2) = t0;
  803. break;
  804. case INDEX_op_st16_i64:
  805. t0 = tci_read_r16(&tb_ptr);
  806. t1 = tci_read_r(&tb_ptr);
  807. t2 = tci_read_s32(&tb_ptr);
  808. *(uint16_t *)(t1 + t2) = t0;
  809. break;
  810. case INDEX_op_st32_i64:
  811. t0 = tci_read_r32(&tb_ptr);
  812. t1 = tci_read_r(&tb_ptr);
  813. t2 = tci_read_s32(&tb_ptr);
  814. *(uint32_t *)(t1 + t2) = t0;
  815. break;
  816. case INDEX_op_st_i64:
  817. t0 = tci_read_r64(&tb_ptr);
  818. t1 = tci_read_r(&tb_ptr);
  819. t2 = tci_read_s32(&tb_ptr);
  820. assert(t1 != sp_value || (int32_t)t2 < 0);
  821. *(uint64_t *)(t1 + t2) = t0;
  822. break;
  823. /* Arithmetic operations (64 bit). */
  824. case INDEX_op_add_i64:
  825. t0 = *tb_ptr++;
  826. t1 = tci_read_ri64(&tb_ptr);
  827. t2 = tci_read_ri64(&tb_ptr);
  828. tci_write_reg64(t0, t1 + t2);
  829. break;
  830. case INDEX_op_sub_i64:
  831. t0 = *tb_ptr++;
  832. t1 = tci_read_ri64(&tb_ptr);
  833. t2 = tci_read_ri64(&tb_ptr);
  834. tci_write_reg64(t0, t1 - t2);
  835. break;
  836. case INDEX_op_mul_i64:
  837. t0 = *tb_ptr++;
  838. t1 = tci_read_ri64(&tb_ptr);
  839. t2 = tci_read_ri64(&tb_ptr);
  840. tci_write_reg64(t0, t1 * t2);
  841. break;
  842. #if TCG_TARGET_HAS_div_i64
  843. case INDEX_op_div_i64:
  844. case INDEX_op_divu_i64:
  845. case INDEX_op_rem_i64:
  846. case INDEX_op_remu_i64:
  847. TODO();
  848. break;
  849. #elif TCG_TARGET_HAS_div2_i64
  850. case INDEX_op_div2_i64:
  851. case INDEX_op_divu2_i64:
  852. TODO();
  853. break;
  854. #endif
  855. case INDEX_op_and_i64:
  856. t0 = *tb_ptr++;
  857. t1 = tci_read_ri64(&tb_ptr);
  858. t2 = tci_read_ri64(&tb_ptr);
  859. tci_write_reg64(t0, t1 & t2);
  860. break;
  861. case INDEX_op_or_i64:
  862. t0 = *tb_ptr++;
  863. t1 = tci_read_ri64(&tb_ptr);
  864. t2 = tci_read_ri64(&tb_ptr);
  865. tci_write_reg64(t0, t1 | t2);
  866. break;
  867. case INDEX_op_xor_i64:
  868. t0 = *tb_ptr++;
  869. t1 = tci_read_ri64(&tb_ptr);
  870. t2 = tci_read_ri64(&tb_ptr);
  871. tci_write_reg64(t0, t1 ^ t2);
  872. break;
  873. /* Shift/rotate operations (64 bit). */
  874. case INDEX_op_shl_i64:
  875. t0 = *tb_ptr++;
  876. t1 = tci_read_ri64(&tb_ptr);
  877. t2 = tci_read_ri64(&tb_ptr);
  878. tci_write_reg64(t0, t1 << t2);
  879. break;
  880. case INDEX_op_shr_i64:
  881. t0 = *tb_ptr++;
  882. t1 = tci_read_ri64(&tb_ptr);
  883. t2 = tci_read_ri64(&tb_ptr);
  884. tci_write_reg64(t0, t1 >> t2);
  885. break;
  886. case INDEX_op_sar_i64:
  887. t0 = *tb_ptr++;
  888. t1 = tci_read_ri64(&tb_ptr);
  889. t2 = tci_read_ri64(&tb_ptr);
  890. tci_write_reg64(t0, ((int64_t)t1 >> t2));
  891. break;
  892. #if TCG_TARGET_HAS_rot_i64
  893. case INDEX_op_rotl_i64:
  894. t0 = *tb_ptr++;
  895. t1 = tci_read_ri64(&tb_ptr);
  896. t2 = tci_read_ri64(&tb_ptr);
  897. tci_write_reg64(t0, rol64(t1, t2));
  898. break;
  899. case INDEX_op_rotr_i64:
  900. t0 = *tb_ptr++;
  901. t1 = tci_read_ri64(&tb_ptr);
  902. t2 = tci_read_ri64(&tb_ptr);
  903. tci_write_reg64(t0, ror64(t1, t2));
  904. break;
  905. #endif
  906. #if TCG_TARGET_HAS_deposit_i64
  907. case INDEX_op_deposit_i64:
  908. t0 = *tb_ptr++;
  909. t1 = tci_read_r64(&tb_ptr);
  910. t2 = tci_read_r64(&tb_ptr);
  911. tmp16 = *tb_ptr++;
  912. tmp8 = *tb_ptr++;
  913. tmp64 = (((1ULL << tmp8) - 1) << tmp16);
  914. tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
  915. break;
  916. #endif
  917. case INDEX_op_brcond_i64:
  918. t0 = tci_read_r64(&tb_ptr);
  919. t1 = tci_read_ri64(&tb_ptr);
  920. condition = *tb_ptr++;
  921. label = tci_read_label(&tb_ptr);
  922. if (tci_compare64(t0, t1, condition)) {
  923. assert(tb_ptr == old_code_ptr + op_size);
  924. tb_ptr = (uint8_t *)label;
  925. continue;
  926. }
  927. break;
  928. #if TCG_TARGET_HAS_ext8u_i64
  929. case INDEX_op_ext8u_i64:
  930. t0 = *tb_ptr++;
  931. t1 = tci_read_r8(&tb_ptr);
  932. tci_write_reg64(t0, t1);
  933. break;
  934. #endif
  935. #if TCG_TARGET_HAS_ext8s_i64
  936. case INDEX_op_ext8s_i64:
  937. t0 = *tb_ptr++;
  938. t1 = tci_read_r8s(&tb_ptr);
  939. tci_write_reg64(t0, t1);
  940. break;
  941. #endif
  942. #if TCG_TARGET_HAS_ext16s_i64
  943. case INDEX_op_ext16s_i64:
  944. t0 = *tb_ptr++;
  945. t1 = tci_read_r16s(&tb_ptr);
  946. tci_write_reg64(t0, t1);
  947. break;
  948. #endif
  949. #if TCG_TARGET_HAS_ext16u_i64
  950. case INDEX_op_ext16u_i64:
  951. t0 = *tb_ptr++;
  952. t1 = tci_read_r16(&tb_ptr);
  953. tci_write_reg64(t0, t1);
  954. break;
  955. #endif
  956. #if TCG_TARGET_HAS_ext32s_i64
  957. case INDEX_op_ext32s_i64:
  958. t0 = *tb_ptr++;
  959. t1 = tci_read_r32s(&tb_ptr);
  960. tci_write_reg64(t0, t1);
  961. break;
  962. #endif
  963. #if TCG_TARGET_HAS_ext32u_i64
  964. case INDEX_op_ext32u_i64:
  965. t0 = *tb_ptr++;
  966. t1 = tci_read_r32(&tb_ptr);
  967. tci_write_reg64(t0, t1);
  968. break;
  969. #endif
  970. #if TCG_TARGET_HAS_bswap16_i64
  971. case INDEX_op_bswap16_i64:
  972. TODO();
  973. t0 = *tb_ptr++;
  974. t1 = tci_read_r16(&tb_ptr);
  975. tci_write_reg64(t0, bswap16(t1));
  976. break;
  977. #endif
  978. #if TCG_TARGET_HAS_bswap32_i64
  979. case INDEX_op_bswap32_i64:
  980. t0 = *tb_ptr++;
  981. t1 = tci_read_r32(&tb_ptr);
  982. tci_write_reg64(t0, bswap32(t1));
  983. break;
  984. #endif
  985. #if TCG_TARGET_HAS_bswap64_i64
  986. case INDEX_op_bswap64_i64:
  987. t0 = *tb_ptr++;
  988. t1 = tci_read_r64(&tb_ptr);
  989. tci_write_reg64(t0, bswap64(t1));
  990. break;
  991. #endif
  992. #if TCG_TARGET_HAS_not_i64
  993. case INDEX_op_not_i64:
  994. t0 = *tb_ptr++;
  995. t1 = tci_read_r64(&tb_ptr);
  996. tci_write_reg64(t0, ~t1);
  997. break;
  998. #endif
  999. #if TCG_TARGET_HAS_neg_i64
  1000. case INDEX_op_neg_i64:
  1001. t0 = *tb_ptr++;
  1002. t1 = tci_read_r64(&tb_ptr);
  1003. tci_write_reg64(t0, -t1);
  1004. break;
  1005. #endif
  1006. #endif /* TCG_TARGET_REG_BITS == 64 */
  1007. /* QEMU specific operations. */
  1008. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  1009. case INDEX_op_debug_insn_start:
  1010. TODO();
  1011. break;
  1012. #else
  1013. case INDEX_op_debug_insn_start:
  1014. TODO();
  1015. break;
  1016. #endif
  1017. case INDEX_op_exit_tb:
  1018. next_tb = *(uint64_t *)tb_ptr;
  1019. goto exit;
  1020. break;
  1021. case INDEX_op_goto_tb:
  1022. t0 = tci_read_i32(&tb_ptr);
  1023. assert(tb_ptr == old_code_ptr + op_size);
  1024. tb_ptr += (int32_t)t0;
  1025. continue;
  1026. case INDEX_op_qemu_ld8u:
  1027. t0 = *tb_ptr++;
  1028. taddr = tci_read_ulong(&tb_ptr);
  1029. #ifdef CONFIG_SOFTMMU
  1030. tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
  1031. #else
  1032. host_addr = (tcg_target_ulong)taddr;
  1033. tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
  1034. #endif
  1035. tci_write_reg8(t0, tmp8);
  1036. break;
  1037. case INDEX_op_qemu_ld8s:
  1038. t0 = *tb_ptr++;
  1039. taddr = tci_read_ulong(&tb_ptr);
  1040. #ifdef CONFIG_SOFTMMU
  1041. tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
  1042. #else
  1043. host_addr = (tcg_target_ulong)taddr;
  1044. tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
  1045. #endif
  1046. tci_write_reg8s(t0, tmp8);
  1047. break;
  1048. case INDEX_op_qemu_ld16u:
  1049. t0 = *tb_ptr++;
  1050. taddr = tci_read_ulong(&tb_ptr);
  1051. #ifdef CONFIG_SOFTMMU
  1052. tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
  1053. #else
  1054. host_addr = (tcg_target_ulong)taddr;
  1055. tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
  1056. #endif
  1057. tci_write_reg16(t0, tmp16);
  1058. break;
  1059. case INDEX_op_qemu_ld16s:
  1060. t0 = *tb_ptr++;
  1061. taddr = tci_read_ulong(&tb_ptr);
  1062. #ifdef CONFIG_SOFTMMU
  1063. tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
  1064. #else
  1065. host_addr = (tcg_target_ulong)taddr;
  1066. tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
  1067. #endif
  1068. tci_write_reg16s(t0, tmp16);
  1069. break;
  1070. #if TCG_TARGET_REG_BITS == 64
  1071. case INDEX_op_qemu_ld32u:
  1072. t0 = *tb_ptr++;
  1073. taddr = tci_read_ulong(&tb_ptr);
  1074. #ifdef CONFIG_SOFTMMU
  1075. tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
  1076. #else
  1077. host_addr = (tcg_target_ulong)taddr;
  1078. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1079. #endif
  1080. tci_write_reg32(t0, tmp32);
  1081. break;
  1082. case INDEX_op_qemu_ld32s:
  1083. t0 = *tb_ptr++;
  1084. taddr = tci_read_ulong(&tb_ptr);
  1085. #ifdef CONFIG_SOFTMMU
  1086. tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
  1087. #else
  1088. host_addr = (tcg_target_ulong)taddr;
  1089. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1090. #endif
  1091. tci_write_reg32s(t0, tmp32);
  1092. break;
  1093. #endif /* TCG_TARGET_REG_BITS == 64 */
  1094. case INDEX_op_qemu_ld32:
  1095. t0 = *tb_ptr++;
  1096. taddr = tci_read_ulong(&tb_ptr);
  1097. #ifdef CONFIG_SOFTMMU
  1098. tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
  1099. #else
  1100. host_addr = (tcg_target_ulong)taddr;
  1101. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1102. #endif
  1103. tci_write_reg32(t0, tmp32);
  1104. break;
  1105. case INDEX_op_qemu_ld64:
  1106. t0 = *tb_ptr++;
  1107. #if TCG_TARGET_REG_BITS == 32
  1108. t1 = *tb_ptr++;
  1109. #endif
  1110. taddr = tci_read_ulong(&tb_ptr);
  1111. #ifdef CONFIG_SOFTMMU
  1112. tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr));
  1113. #else
  1114. host_addr = (tcg_target_ulong)taddr;
  1115. tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE));
  1116. #endif
  1117. tci_write_reg(t0, tmp64);
  1118. #if TCG_TARGET_REG_BITS == 32
  1119. tci_write_reg(t1, tmp64 >> 32);
  1120. #endif
  1121. break;
  1122. case INDEX_op_qemu_st8:
  1123. t0 = tci_read_r8(&tb_ptr);
  1124. taddr = tci_read_ulong(&tb_ptr);
  1125. #ifdef CONFIG_SOFTMMU
  1126. t2 = tci_read_i(&tb_ptr);
  1127. helper_stb_mmu(env, taddr, t0, t2);
  1128. #else
  1129. host_addr = (tcg_target_ulong)taddr;
  1130. *(uint8_t *)(host_addr + GUEST_BASE) = t0;
  1131. #endif
  1132. break;
  1133. case INDEX_op_qemu_st16:
  1134. t0 = tci_read_r16(&tb_ptr);
  1135. taddr = tci_read_ulong(&tb_ptr);
  1136. #ifdef CONFIG_SOFTMMU
  1137. t2 = tci_read_i(&tb_ptr);
  1138. helper_stw_mmu(env, taddr, t0, t2);
  1139. #else
  1140. host_addr = (tcg_target_ulong)taddr;
  1141. *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0);
  1142. #endif
  1143. break;
  1144. case INDEX_op_qemu_st32:
  1145. t0 = tci_read_r32(&tb_ptr);
  1146. taddr = tci_read_ulong(&tb_ptr);
  1147. #ifdef CONFIG_SOFTMMU
  1148. t2 = tci_read_i(&tb_ptr);
  1149. helper_stl_mmu(env, taddr, t0, t2);
  1150. #else
  1151. host_addr = (tcg_target_ulong)taddr;
  1152. *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0);
  1153. #endif
  1154. break;
  1155. case INDEX_op_qemu_st64:
  1156. tmp64 = tci_read_r64(&tb_ptr);
  1157. taddr = tci_read_ulong(&tb_ptr);
  1158. #ifdef CONFIG_SOFTMMU
  1159. t2 = tci_read_i(&tb_ptr);
  1160. helper_stq_mmu(env, taddr, tmp64, t2);
  1161. #else
  1162. host_addr = (tcg_target_ulong)taddr;
  1163. *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64);
  1164. #endif
  1165. break;
  1166. default:
  1167. TODO();
  1168. break;
  1169. }
  1170. assert(tb_ptr == old_code_ptr + op_size);
  1171. }
  1172. exit:
  1173. return next_tb;
  1174. }